v0.5.35: helm updates, copilot improvements, 404 for docs, salesforce fixes, subflow resize clamping
13
.github/workflows/test-build.yml
vendored
@@ -48,6 +48,19 @@ jobs:
|
||||
ENCRYPTION_KEY: '7cf672e460e430c1fba707575c2b0e2ad5a99dddf9b7b7e3b5646e630861db1c' # dummy key for CI only
|
||||
run: bun run test
|
||||
|
||||
- name: Check schema and migrations are in sync
|
||||
working-directory: packages/db
|
||||
run: |
|
||||
bunx drizzle-kit generate --config=./drizzle.config.ts
|
||||
if [ -n "$(git status --porcelain ./migrations)" ]; then
|
||||
echo "❌ Schema and migrations are out of sync!"
|
||||
echo "Run 'cd packages/db && bunx drizzle-kit generate' and commit the new migrations."
|
||||
git status --porcelain ./migrations
|
||||
git diff ./migrations
|
||||
exit 1
|
||||
fi
|
||||
echo "✅ Schema and migrations are in sync"
|
||||
|
||||
- name: Build application
|
||||
env:
|
||||
NODE_OPTIONS: '--no-warnings'
|
||||
|
||||
@@ -188,7 +188,7 @@ DATABASE_URL="postgresql://postgres:your_password@localhost:5432/simstudio"
|
||||
|
||||
Then run the migrations:
|
||||
```bash
|
||||
cd apps/sim # Required so drizzle picks correct .env file
|
||||
cd packages/db # Required so drizzle picks correct .env file
|
||||
bunx drizzle-kit migrate --config=./drizzle.config.ts
|
||||
```
|
||||
|
||||
|
||||
23
apps/docs/app/[lang]/not-found.tsx
Normal file
@@ -0,0 +1,23 @@
|
||||
import { DocsBody, DocsPage } from 'fumadocs-ui/page'
|
||||
|
||||
export const metadata = {
|
||||
title: 'Page Not Found',
|
||||
}
|
||||
|
||||
export default function NotFound() {
|
||||
return (
|
||||
<DocsPage>
|
||||
<DocsBody>
|
||||
<div className='flex min-h-[60vh] flex-col items-center justify-center text-center'>
|
||||
<h1 className='mb-4 bg-gradient-to-b from-[#8357FF] to-[#6F3DFA] bg-clip-text font-bold text-8xl text-transparent'>
|
||||
404
|
||||
</h1>
|
||||
<h2 className='mb-2 font-semibold text-2xl text-foreground'>Page Not Found</h2>
|
||||
<p className='text-muted-foreground'>
|
||||
The page you're looking for doesn't exist or has been moved.
|
||||
</p>
|
||||
</div>
|
||||
</DocsBody>
|
||||
</DocsPage>
|
||||
)
|
||||
}
|
||||
@@ -573,10 +573,10 @@ export default function LoginPage({
|
||||
<Dialog open={forgotPasswordOpen} onOpenChange={setForgotPasswordOpen}>
|
||||
<DialogContent className='auth-card auth-card-shadow max-w-[540px] rounded-[10px] border backdrop-blur-sm'>
|
||||
<DialogHeader>
|
||||
<DialogTitle className='auth-text-primary font-semibold text-xl tracking-tight'>
|
||||
<DialogTitle className='font-semibold text-black text-xl tracking-tight'>
|
||||
Reset Password
|
||||
</DialogTitle>
|
||||
<DialogDescription className='auth-text-secondary text-sm'>
|
||||
<DialogDescription className='text-muted-foreground text-sm'>
|
||||
Enter your email address and we'll send you a link to reset your password if your
|
||||
account exists.
|
||||
</DialogDescription>
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
import Image from 'next/image'
|
||||
import Link from 'next/link'
|
||||
import { Avatar, AvatarFallback, AvatarImage } from '@/components/ui/avatar'
|
||||
import { getAllPostMeta } from '@/lib/blog/registry'
|
||||
import { soehne } from '@/app/_styles/fonts/soehne/soehne'
|
||||
import { PostGrid } from '@/app/(landing)/studio/post-grid'
|
||||
|
||||
export const revalidate = 3600
|
||||
|
||||
@@ -18,7 +17,6 @@ export default async function StudioIndex({
|
||||
const all = await getAllPostMeta()
|
||||
const filtered = tag ? all.filter((p) => p.tags.includes(tag)) : all
|
||||
|
||||
// Sort to ensure featured post is first on page 1
|
||||
const sorted =
|
||||
pageNum === 1
|
||||
? filtered.sort((a, b) => {
|
||||
@@ -63,69 +61,7 @@ export default async function StudioIndex({
|
||||
</div> */}
|
||||
|
||||
{/* Grid layout for consistent rows */}
|
||||
<div className='grid grid-cols-1 gap-4 md:grid-cols-2 md:gap-6 lg:grid-cols-3'>
|
||||
{posts.map((p, i) => {
|
||||
return (
|
||||
<Link key={p.slug} href={`/studio/${p.slug}`} className='group flex flex-col'>
|
||||
<div className='flex h-full flex-col overflow-hidden rounded-xl border border-gray-200 transition-colors duration-300 hover:border-gray-300'>
|
||||
<Image
|
||||
src={p.ogImage}
|
||||
alt={p.title}
|
||||
width={800}
|
||||
height={450}
|
||||
className='h-48 w-full object-cover'
|
||||
sizes='(max-width: 768px) 100vw, (max-width: 1024px) 50vw, 33vw'
|
||||
loading='lazy'
|
||||
unoptimized
|
||||
/>
|
||||
<div className='flex flex-1 flex-col p-4'>
|
||||
<div className='mb-2 text-gray-600 text-xs'>
|
||||
{new Date(p.date).toLocaleDateString('en-US', {
|
||||
month: 'short',
|
||||
day: 'numeric',
|
||||
year: 'numeric',
|
||||
})}
|
||||
</div>
|
||||
<h3 className='shine-text mb-1 font-medium text-lg leading-tight'>{p.title}</h3>
|
||||
<p className='mb-3 line-clamp-3 flex-1 text-gray-700 text-sm'>{p.description}</p>
|
||||
<div className='flex items-center gap-2'>
|
||||
<div className='-space-x-1.5 flex'>
|
||||
{(p.authors && p.authors.length > 0 ? p.authors : [p.author])
|
||||
.slice(0, 3)
|
||||
.map((author, idx) => (
|
||||
<Avatar key={idx} className='size-4 border border-white'>
|
||||
<AvatarImage src={author?.avatarUrl} alt={author?.name} />
|
||||
<AvatarFallback className='border border-white bg-gray-100 text-[10px] text-gray-600'>
|
||||
{author?.name.slice(0, 2)}
|
||||
</AvatarFallback>
|
||||
</Avatar>
|
||||
))}
|
||||
</div>
|
||||
<span className='text-gray-600 text-xs'>
|
||||
{(p.authors && p.authors.length > 0 ? p.authors : [p.author])
|
||||
.slice(0, 2)
|
||||
.map((a) => a?.name)
|
||||
.join(', ')}
|
||||
{(p.authors && p.authors.length > 0 ? p.authors : [p.author]).length > 2 && (
|
||||
<>
|
||||
{' '}
|
||||
and{' '}
|
||||
{(p.authors && p.authors.length > 0 ? p.authors : [p.author]).length - 2}{' '}
|
||||
other
|
||||
{(p.authors && p.authors.length > 0 ? p.authors : [p.author]).length - 2 >
|
||||
1
|
||||
? 's'
|
||||
: ''}
|
||||
</>
|
||||
)}
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</Link>
|
||||
)
|
||||
})}
|
||||
</div>
|
||||
<PostGrid posts={posts} />
|
||||
|
||||
{totalPages > 1 && (
|
||||
<div className='mt-10 flex items-center justify-center gap-3'>
|
||||
|
||||
90
apps/sim/app/(landing)/studio/post-grid.tsx
Normal file
@@ -0,0 +1,90 @@
|
||||
'use client'
|
||||
|
||||
import Image from 'next/image'
|
||||
import Link from 'next/link'
|
||||
import { Avatar, AvatarFallback, AvatarImage } from '@/components/ui/avatar'
|
||||
|
||||
interface Author {
|
||||
id: string
|
||||
name: string
|
||||
avatarUrl?: string
|
||||
url?: string
|
||||
}
|
||||
|
||||
interface Post {
|
||||
slug: string
|
||||
title: string
|
||||
description: string
|
||||
date: string
|
||||
ogImage: string
|
||||
author: Author
|
||||
authors?: Author[]
|
||||
featured?: boolean
|
||||
}
|
||||
|
||||
export function PostGrid({ posts }: { posts: Post[] }) {
|
||||
return (
|
||||
<div className='grid grid-cols-1 gap-4 md:grid-cols-2 md:gap-6 lg:grid-cols-3'>
|
||||
{posts.map((p, index) => (
|
||||
<Link key={p.slug} href={`/studio/${p.slug}`} className='group flex flex-col'>
|
||||
<div className='flex h-full flex-col overflow-hidden rounded-xl border border-gray-200 transition-colors duration-300 hover:border-gray-300'>
|
||||
{/* Image container with fixed aspect ratio to prevent layout shift */}
|
||||
<div className='relative aspect-video w-full overflow-hidden'>
|
||||
<Image
|
||||
src={p.ogImage}
|
||||
alt={p.title}
|
||||
sizes='(max-width: 768px) 100vw, (max-width: 1024px) 50vw, 33vw'
|
||||
unoptimized
|
||||
priority={index < 6}
|
||||
loading={index < 6 ? undefined : 'lazy'}
|
||||
fill
|
||||
style={{ objectFit: 'cover' }}
|
||||
/>
|
||||
</div>
|
||||
<div className='flex flex-1 flex-col p-4'>
|
||||
<div className='mb-2 text-gray-600 text-xs'>
|
||||
{new Date(p.date).toLocaleDateString('en-US', {
|
||||
month: 'short',
|
||||
day: 'numeric',
|
||||
year: 'numeric',
|
||||
})}
|
||||
</div>
|
||||
<h3 className='shine-text mb-1 font-medium text-lg leading-tight'>{p.title}</h3>
|
||||
<p className='mb-3 line-clamp-3 flex-1 text-gray-700 text-sm'>{p.description}</p>
|
||||
<div className='flex items-center gap-2'>
|
||||
<div className='-space-x-1.5 flex'>
|
||||
{(p.authors && p.authors.length > 0 ? p.authors : [p.author])
|
||||
.slice(0, 3)
|
||||
.map((author, idx) => (
|
||||
<Avatar key={idx} className='size-4 border border-white'>
|
||||
<AvatarImage src={author?.avatarUrl} alt={author?.name} />
|
||||
<AvatarFallback className='border border-white bg-gray-100 text-[10px] text-gray-600'>
|
||||
{author?.name.slice(0, 2)}
|
||||
</AvatarFallback>
|
||||
</Avatar>
|
||||
))}
|
||||
</div>
|
||||
<span className='text-gray-600 text-xs'>
|
||||
{(p.authors && p.authors.length > 0 ? p.authors : [p.author])
|
||||
.slice(0, 2)
|
||||
.map((a) => a?.name)
|
||||
.join(', ')}
|
||||
{(p.authors && p.authors.length > 0 ? p.authors : [p.author]).length > 2 && (
|
||||
<>
|
||||
{' '}
|
||||
and {(p.authors && p.authors.length > 0 ? p.authors : [p.author]).length - 2}{' '}
|
||||
other
|
||||
{(p.authors && p.authors.length > 0 ? p.authors : [p.author]).length - 2 > 1
|
||||
? 's'
|
||||
: ''}
|
||||
</>
|
||||
)}
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</Link>
|
||||
))}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -12,6 +12,7 @@ export function ThemeProvider({ children, ...props }: ThemeProviderProps) {
|
||||
pathname === '/' ||
|
||||
pathname.startsWith('/login') ||
|
||||
pathname.startsWith('/signup') ||
|
||||
pathname.startsWith('/reset-password') ||
|
||||
pathname.startsWith('/sso') ||
|
||||
pathname.startsWith('/terms') ||
|
||||
pathname.startsWith('/privacy') ||
|
||||
|
||||
@@ -759,3 +759,24 @@ input[type="search"]::-ms-clear {
|
||||
--surface-elevated: #202020;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove backticks from inline code in prose (Tailwind Typography default)
|
||||
*/
|
||||
.prose code::before,
|
||||
.prose code::after {
|
||||
content: none !important;
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove underlines from heading anchor links in prose
|
||||
*/
|
||||
.prose h1 a,
|
||||
.prose h2 a,
|
||||
.prose h3 a,
|
||||
.prose h4 a,
|
||||
.prose h5 a,
|
||||
.prose h6 a {
|
||||
text-decoration: none !important;
|
||||
color: inherit !important;
|
||||
}
|
||||
|
||||
@@ -32,7 +32,17 @@ export async function GET(request: NextRequest) {
|
||||
.from(account)
|
||||
.where(and(...whereConditions))
|
||||
|
||||
return NextResponse.json({ accounts })
|
||||
// Use the user's email as the display name (consistent with credential selector)
|
||||
const userEmail = session.user.email
|
||||
|
||||
const accountsWithDisplayName = accounts.map((acc) => ({
|
||||
id: acc.id,
|
||||
accountId: acc.accountId,
|
||||
providerId: acc.providerId,
|
||||
displayName: userEmail || acc.providerId,
|
||||
}))
|
||||
|
||||
return NextResponse.json({ accounts: accountsWithDisplayName })
|
||||
} catch (error) {
|
||||
logger.error('Failed to fetch accounts', { error })
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
|
||||
@@ -6,6 +6,10 @@
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { createMockRequest, setupAuthApiMocks } from '@/app/api/__test-utils__/utils'
|
||||
|
||||
vi.mock('@/lib/core/utils/urls', () => ({
|
||||
getBaseUrl: vi.fn(() => 'https://app.example.com'),
|
||||
}))
|
||||
|
||||
describe('Forget Password API Route', () => {
|
||||
beforeEach(() => {
|
||||
vi.resetModules()
|
||||
@@ -15,7 +19,7 @@ describe('Forget Password API Route', () => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
it('should send password reset email successfully', async () => {
|
||||
it('should send password reset email successfully with same-origin redirectTo', async () => {
|
||||
setupAuthApiMocks({
|
||||
operations: {
|
||||
forgetPassword: { success: true },
|
||||
@@ -24,7 +28,7 @@ describe('Forget Password API Route', () => {
|
||||
|
||||
const req = createMockRequest('POST', {
|
||||
email: 'test@example.com',
|
||||
redirectTo: 'https://example.com/reset',
|
||||
redirectTo: 'https://app.example.com/reset',
|
||||
})
|
||||
|
||||
const { POST } = await import('@/app/api/auth/forget-password/route')
|
||||
@@ -39,12 +43,36 @@ describe('Forget Password API Route', () => {
|
||||
expect(auth.auth.api.forgetPassword).toHaveBeenCalledWith({
|
||||
body: {
|
||||
email: 'test@example.com',
|
||||
redirectTo: 'https://example.com/reset',
|
||||
redirectTo: 'https://app.example.com/reset',
|
||||
},
|
||||
method: 'POST',
|
||||
})
|
||||
})
|
||||
|
||||
it('should reject external redirectTo URL', async () => {
|
||||
setupAuthApiMocks({
|
||||
operations: {
|
||||
forgetPassword: { success: true },
|
||||
},
|
||||
})
|
||||
|
||||
const req = createMockRequest('POST', {
|
||||
email: 'test@example.com',
|
||||
redirectTo: 'https://evil.com/phishing',
|
||||
})
|
||||
|
||||
const { POST } = await import('@/app/api/auth/forget-password/route')
|
||||
|
||||
const response = await POST(req)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(400)
|
||||
expect(data.message).toBe('Redirect URL must be a valid same-origin URL')
|
||||
|
||||
const auth = await import('@/lib/auth')
|
||||
expect(auth.auth.api.forgetPassword).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should send password reset email without redirectTo', async () => {
|
||||
setupAuthApiMocks({
|
||||
operations: {
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { auth } from '@/lib/auth'
|
||||
import { isSameOrigin } from '@/lib/core/utils/validation'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
@@ -13,10 +14,15 @@ const forgetPasswordSchema = z.object({
|
||||
.email('Please provide a valid email address'),
|
||||
redirectTo: z
|
||||
.string()
|
||||
.url('Redirect URL must be a valid URL')
|
||||
.optional()
|
||||
.or(z.literal(''))
|
||||
.transform((val) => (val === '' ? undefined : val)),
|
||||
.transform((val) => (val === '' || val === undefined ? undefined : val))
|
||||
.refine(
|
||||
(val) => val === undefined || (z.string().url().safeParse(val).success && isSameOrigin(val)),
|
||||
{
|
||||
message: 'Redirect URL must be a valid same-origin URL',
|
||||
}
|
||||
),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
|
||||
@@ -11,6 +11,7 @@ import { processInputFileFields } from '@/lib/execution/files'
|
||||
import { preprocessExecution } from '@/lib/execution/preprocessing'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { LoggingSession } from '@/lib/logs/execution/logging-session'
|
||||
import { ALL_TRIGGER_TYPES } from '@/lib/logs/types'
|
||||
import { executeWorkflowCore } from '@/lib/workflows/executor/execution-core'
|
||||
import { type ExecutionEvent, encodeSSEEvent } from '@/lib/workflows/executor/execution-events'
|
||||
import { PauseResumeManager } from '@/lib/workflows/executor/human-in-the-loop-manager'
|
||||
@@ -30,7 +31,7 @@ const logger = createLogger('WorkflowExecuteAPI')
|
||||
|
||||
const ExecuteWorkflowSchema = z.object({
|
||||
selectedOutputs: z.array(z.string()).optional().default([]),
|
||||
triggerType: z.enum(['api', 'webhook', 'schedule', 'manual', 'chat']).optional(),
|
||||
triggerType: z.enum(ALL_TRIGGER_TYPES).optional(),
|
||||
stream: z.boolean().optional(),
|
||||
useDraftState: z.boolean().optional(),
|
||||
input: z.any().optional(),
|
||||
|
||||
@@ -6,13 +6,14 @@ import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { encryptSecret } from '@/lib/core/security/encryption'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { ALL_TRIGGER_TYPES } from '@/lib/logs/types'
|
||||
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
|
||||
import { MAX_EMAIL_RECIPIENTS, MAX_WORKFLOW_IDS } from '../constants'
|
||||
|
||||
const logger = createLogger('WorkspaceNotificationAPI')
|
||||
|
||||
const levelFilterSchema = z.array(z.enum(['info', 'error']))
|
||||
const triggerFilterSchema = z.array(z.enum(['api', 'webhook', 'schedule', 'manual', 'chat']))
|
||||
const triggerFilterSchema = z.array(z.enum(ALL_TRIGGER_TYPES))
|
||||
|
||||
const alertRuleSchema = z.enum([
|
||||
'consecutive_failures',
|
||||
|
||||
@@ -7,6 +7,7 @@ import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { encryptSecret } from '@/lib/core/security/encryption'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { ALL_TRIGGER_TYPES } from '@/lib/logs/types'
|
||||
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
|
||||
import { MAX_EMAIL_RECIPIENTS, MAX_NOTIFICATIONS_PER_TYPE, MAX_WORKFLOW_IDS } from './constants'
|
||||
|
||||
@@ -14,7 +15,7 @@ const logger = createLogger('WorkspaceNotificationsAPI')
|
||||
|
||||
const notificationTypeSchema = z.enum(['webhook', 'email', 'slack'])
|
||||
const levelFilterSchema = z.array(z.enum(['info', 'error']))
|
||||
const triggerFilterSchema = z.array(z.enum(['api', 'webhook', 'schedule', 'manual', 'chat']))
|
||||
const triggerFilterSchema = z.array(z.enum(ALL_TRIGGER_TYPES))
|
||||
|
||||
const alertRuleSchema = z.enum([
|
||||
'consecutive_failures',
|
||||
@@ -80,7 +81,7 @@ const createNotificationSchema = z
|
||||
workflowIds: z.array(z.string()).max(MAX_WORKFLOW_IDS).default([]),
|
||||
allWorkflows: z.boolean().default(false),
|
||||
levelFilter: levelFilterSchema.default(['info', 'error']),
|
||||
triggerFilter: triggerFilterSchema.default(['api', 'webhook', 'schedule', 'manual', 'chat']),
|
||||
triggerFilter: triggerFilterSchema.default([...ALL_TRIGGER_TYPES]),
|
||||
includeFinalOutput: z.boolean().default(false),
|
||||
includeTraceSpans: z.boolean().default(false),
|
||||
includeRateLimits: z.boolean().default(false),
|
||||
|
||||
@@ -104,6 +104,8 @@ export function SlackChannelSelector({
|
||||
disabled={disabled || channels.length === 0}
|
||||
isLoading={isLoading}
|
||||
error={fetchError}
|
||||
searchable
|
||||
searchPlaceholder='Search channels...'
|
||||
/>
|
||||
{selectedChannel && !fetchError && (
|
||||
<p className='text-[12px] text-[var(--text-muted)]'>
|
||||
|
||||
@@ -22,6 +22,7 @@ import { SlackIcon } from '@/components/icons'
|
||||
import { Skeleton } from '@/components/ui'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { ALL_TRIGGER_TYPES, type TriggerType } from '@/lib/logs/types'
|
||||
import { quickValidateEmail } from '@/lib/messaging/email/validation'
|
||||
import {
|
||||
type NotificationSubscription,
|
||||
@@ -43,7 +44,6 @@ const PRIMARY_BUTTON_STYLES =
|
||||
|
||||
type NotificationType = 'webhook' | 'email' | 'slack'
|
||||
type LogLevel = 'info' | 'error'
|
||||
type TriggerType = 'api' | 'webhook' | 'schedule' | 'manual' | 'chat'
|
||||
type AlertRule =
|
||||
| 'none'
|
||||
| 'consecutive_failures'
|
||||
@@ -84,7 +84,6 @@ interface NotificationSettingsProps {
|
||||
}
|
||||
|
||||
const LOG_LEVELS: LogLevel[] = ['info', 'error']
|
||||
const TRIGGER_TYPES: TriggerType[] = ['api', 'webhook', 'schedule', 'manual', 'chat']
|
||||
|
||||
function formatAlertConfigLabel(config: {
|
||||
rule: AlertRule
|
||||
@@ -137,7 +136,7 @@ export function NotificationSettings({
|
||||
workflowIds: [] as string[],
|
||||
allWorkflows: true,
|
||||
levelFilter: ['info', 'error'] as LogLevel[],
|
||||
triggerFilter: ['api', 'webhook', 'schedule', 'manual', 'chat'] as TriggerType[],
|
||||
triggerFilter: [...ALL_TRIGGER_TYPES] as TriggerType[],
|
||||
includeFinalOutput: false,
|
||||
includeTraceSpans: false,
|
||||
includeRateLimits: false,
|
||||
@@ -207,7 +206,7 @@ export function NotificationSettings({
|
||||
workflowIds: [],
|
||||
allWorkflows: true,
|
||||
levelFilter: ['info', 'error'],
|
||||
triggerFilter: ['api', 'webhook', 'schedule', 'manual', 'chat'],
|
||||
triggerFilter: [...ALL_TRIGGER_TYPES],
|
||||
includeFinalOutput: false,
|
||||
includeTraceSpans: false,
|
||||
includeRateLimits: false,
|
||||
@@ -768,7 +767,7 @@ export function NotificationSettings({
|
||||
<Combobox
|
||||
options={slackAccounts.map((acc) => ({
|
||||
value: acc.id,
|
||||
label: acc.accountId,
|
||||
label: acc.displayName || 'Slack Workspace',
|
||||
}))}
|
||||
value={formData.slackAccountId}
|
||||
onChange={(value) => {
|
||||
@@ -859,7 +858,7 @@ export function NotificationSettings({
|
||||
<div className='flex flex-col gap-[8px]'>
|
||||
<Label className='text-[var(--text-secondary)]'>Trigger Type Filters</Label>
|
||||
<Combobox
|
||||
options={TRIGGER_TYPES.map((trigger) => ({
|
||||
options={ALL_TRIGGER_TYPES.map((trigger) => ({
|
||||
label: trigger.charAt(0).toUpperCase() + trigger.slice(1),
|
||||
value: trigger,
|
||||
}))}
|
||||
|
||||
@@ -101,6 +101,9 @@ const ACTION_VERBS = [
|
||||
'Generated',
|
||||
'Rendering',
|
||||
'Rendered',
|
||||
'Sleeping',
|
||||
'Slept',
|
||||
'Resumed',
|
||||
] as const
|
||||
|
||||
/**
|
||||
@@ -580,6 +583,11 @@ export function ToolCall({ toolCall: toolCallProp, toolCallId, onStateChange }:
|
||||
(toolCall.state === (ClientToolCallState.executing as any) ||
|
||||
toolCall.state === ('executing' as any))
|
||||
|
||||
const showWake =
|
||||
toolCall.name === 'sleep' &&
|
||||
(toolCall.state === (ClientToolCallState.executing as any) ||
|
||||
toolCall.state === ('executing' as any))
|
||||
|
||||
const handleStateChange = (state: any) => {
|
||||
forceUpdate({})
|
||||
onStateChange?.(state)
|
||||
@@ -1102,6 +1110,37 @@ export function ToolCall({ toolCall: toolCallProp, toolCallId, onStateChange }:
|
||||
Move to Background
|
||||
</Button>
|
||||
</div>
|
||||
) : showWake ? (
|
||||
<div className='mt-[8px]'>
|
||||
<Button
|
||||
onClick={async () => {
|
||||
try {
|
||||
const instance = getClientTool(toolCall.id)
|
||||
// Get elapsed seconds before waking
|
||||
const elapsedSeconds = instance?.getElapsedSeconds?.() || 0
|
||||
// Transition to background state locally so UI updates immediately
|
||||
// Pass elapsed seconds in the result so dynamic text can use it
|
||||
instance?.setState?.((ClientToolCallState as any).background, {
|
||||
result: { _elapsedSeconds: elapsedSeconds },
|
||||
})
|
||||
// Update the tool call params in the store to include elapsed time for display
|
||||
const { updateToolCallParams } = useCopilotStore.getState()
|
||||
updateToolCallParams?.(toolCall.id, { _elapsedSeconds: Math.round(elapsedSeconds) })
|
||||
await instance?.markToolComplete?.(
|
||||
200,
|
||||
`User woke you up after ${Math.round(elapsedSeconds)} seconds`
|
||||
)
|
||||
// Optionally force a re-render; store should sync state from server
|
||||
forceUpdate({})
|
||||
onStateChange?.('background')
|
||||
} catch {}
|
||||
}}
|
||||
variant='primary'
|
||||
title='Wake'
|
||||
>
|
||||
Wake
|
||||
</Button>
|
||||
</div>
|
||||
) : null}
|
||||
</div>
|
||||
)
|
||||
|
||||
@@ -90,6 +90,7 @@ export function ShortInput({
|
||||
blockId,
|
||||
triggerId: undefined,
|
||||
isPreview,
|
||||
useWebhookUrl,
|
||||
})
|
||||
|
||||
const wandHook = useWand({
|
||||
|
||||
@@ -74,6 +74,7 @@ export function TriggerSave({
|
||||
blockId,
|
||||
triggerId: effectiveTriggerId,
|
||||
isPreview,
|
||||
useWebhookUrl: true, // to store the webhook url in the store
|
||||
})
|
||||
|
||||
const triggerConfig = useSubBlockStore((state) => state.getValue(blockId, 'triggerConfig'))
|
||||
|
||||
@@ -6,6 +6,61 @@ import { getBlock } from '@/blocks/registry'
|
||||
|
||||
const logger = createLogger('NodeUtilities')
|
||||
|
||||
/**
|
||||
* Estimates block dimensions based on block type.
|
||||
* Uses subblock count to estimate height for blocks that haven't been measured yet.
|
||||
*
|
||||
* @param blockType - The type of block (e.g., 'condition', 'agent')
|
||||
* @returns Estimated width and height for the block
|
||||
*/
|
||||
export function estimateBlockDimensions(blockType: string): { width: number; height: number } {
|
||||
const blockConfig = getBlock(blockType)
|
||||
const subBlockCount = blockConfig?.subBlocks?.length ?? 3
|
||||
// Many subblocks are conditionally rendered (advanced mode, provider-specific, etc.)
|
||||
// Use roughly half the config count as a reasonable estimate, capped between 3-7 rows
|
||||
const estimatedRows = Math.max(3, Math.min(Math.ceil(subBlockCount / 2), 7))
|
||||
const hasErrorRow = blockType !== 'starter' && blockType !== 'response' ? 1 : 0
|
||||
|
||||
const height =
|
||||
BLOCK_DIMENSIONS.HEADER_HEIGHT +
|
||||
BLOCK_DIMENSIONS.WORKFLOW_CONTENT_PADDING +
|
||||
(estimatedRows + hasErrorRow) * BLOCK_DIMENSIONS.WORKFLOW_ROW_HEIGHT
|
||||
|
||||
return {
|
||||
width: BLOCK_DIMENSIONS.FIXED_WIDTH,
|
||||
height: Math.max(height, BLOCK_DIMENSIONS.MIN_HEIGHT),
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Clamps a position to keep a block fully inside a container's content area.
|
||||
* Content area starts after the header and padding, and ends before the right/bottom padding.
|
||||
*
|
||||
* @param position - Raw position relative to container origin
|
||||
* @param containerDimensions - Container width and height
|
||||
* @param blockDimensions - Block width and height
|
||||
* @returns Clamped position that keeps block inside content area
|
||||
*/
|
||||
export function clampPositionToContainer(
|
||||
position: { x: number; y: number },
|
||||
containerDimensions: { width: number; height: number },
|
||||
blockDimensions: { width: number; height: number }
|
||||
): { x: number; y: number } {
|
||||
const { width: containerWidth, height: containerHeight } = containerDimensions
|
||||
const { width: blockWidth, height: blockHeight } = blockDimensions
|
||||
|
||||
// Content area bounds (where blocks can be placed)
|
||||
const minX = CONTAINER_DIMENSIONS.LEFT_PADDING
|
||||
const minY = CONTAINER_DIMENSIONS.HEADER_HEIGHT + CONTAINER_DIMENSIONS.TOP_PADDING
|
||||
const maxX = containerWidth - CONTAINER_DIMENSIONS.RIGHT_PADDING - blockWidth
|
||||
const maxY = containerHeight - CONTAINER_DIMENSIONS.BOTTOM_PADDING - blockHeight
|
||||
|
||||
return {
|
||||
x: Math.max(minX, Math.min(position.x, Math.max(minX, maxX))),
|
||||
y: Math.max(minY, Math.min(position.y, Math.max(minY, maxY))),
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook providing utilities for node position, hierarchy, and dimension calculations
|
||||
*/
|
||||
@@ -21,7 +76,7 @@ export function useNodeUtilities(blocks: Record<string, any>) {
|
||||
|
||||
/**
|
||||
* Get the dimensions of a block.
|
||||
* For regular blocks, estimates height based on block config if not yet measured.
|
||||
* For regular blocks, uses stored height or estimates based on block config.
|
||||
*/
|
||||
const getBlockDimensions = useCallback(
|
||||
(blockId: string): { width: number; height: number } => {
|
||||
@@ -41,32 +96,16 @@ export function useNodeUtilities(blocks: Record<string, any>) {
|
||||
}
|
||||
}
|
||||
|
||||
// Workflow block nodes have fixed visual width
|
||||
const width = BLOCK_DIMENSIONS.FIXED_WIDTH
|
||||
|
||||
// Prefer deterministic height published by the block component; fallback to estimate
|
||||
let height = block.height
|
||||
|
||||
if (!height) {
|
||||
// Estimate height based on block config's subblock count for more accurate initial sizing
|
||||
// This is critical for subflow containers to size correctly before child blocks are measured
|
||||
const blockConfig = getBlock(block.type)
|
||||
const subBlockCount = blockConfig?.subBlocks?.length ?? 3
|
||||
// Many subblocks are conditionally rendered (advanced mode, provider-specific, etc.)
|
||||
// Use roughly half the config count as a reasonable estimate, capped between 3-7 rows
|
||||
const estimatedRows = Math.max(3, Math.min(Math.ceil(subBlockCount / 2), 7))
|
||||
const hasErrorRow = block.type !== 'starter' && block.type !== 'response' ? 1 : 0
|
||||
|
||||
height =
|
||||
BLOCK_DIMENSIONS.HEADER_HEIGHT +
|
||||
BLOCK_DIMENSIONS.WORKFLOW_CONTENT_PADDING +
|
||||
(estimatedRows + hasErrorRow) * BLOCK_DIMENSIONS.WORKFLOW_ROW_HEIGHT
|
||||
if (block.height) {
|
||||
return {
|
||||
width: BLOCK_DIMENSIONS.FIXED_WIDTH,
|
||||
height: Math.max(block.height, BLOCK_DIMENSIONS.MIN_HEIGHT),
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
width,
|
||||
height: Math.max(height, BLOCK_DIMENSIONS.MIN_HEIGHT),
|
||||
}
|
||||
// Use shared estimation utility for blocks without measured height
|
||||
return estimateBlockDimensions(block.type)
|
||||
},
|
||||
[blocks, isContainerType]
|
||||
)
|
||||
@@ -164,29 +203,36 @@ export function useNodeUtilities(blocks: Record<string, any>) {
|
||||
)
|
||||
|
||||
/**
|
||||
* Calculates the relative position of a node to a new parent's content area.
|
||||
* Accounts for header height and padding offsets in container nodes.
|
||||
* Calculates the relative position of a node to a new parent's origin.
|
||||
* React Flow positions children relative to parent origin, so we clamp
|
||||
* to the content area bounds (after header and padding).
|
||||
* @param nodeId ID of the node being repositioned
|
||||
* @param newParentId ID of the new parent
|
||||
* @returns Relative position coordinates {x, y} within the parent's content area
|
||||
* @returns Relative position coordinates {x, y} within the parent
|
||||
*/
|
||||
const calculateRelativePosition = useCallback(
|
||||
(nodeId: string, newParentId: string): { x: number; y: number } => {
|
||||
const nodeAbsPos = getNodeAbsolutePosition(nodeId)
|
||||
const parentAbsPos = getNodeAbsolutePosition(newParentId)
|
||||
const parentNode = getNodes().find((n) => n.id === newParentId)
|
||||
|
||||
// Account for container's header and padding
|
||||
// Children are positioned relative to content area, not container origin
|
||||
const headerHeight = 50
|
||||
const leftPadding = 16
|
||||
const topPadding = 16
|
||||
|
||||
return {
|
||||
x: nodeAbsPos.x - parentAbsPos.x - leftPadding,
|
||||
y: nodeAbsPos.y - parentAbsPos.y - headerHeight - topPadding,
|
||||
// Calculate raw relative position (relative to parent origin)
|
||||
const rawPosition = {
|
||||
x: nodeAbsPos.x - parentAbsPos.x,
|
||||
y: nodeAbsPos.y - parentAbsPos.y,
|
||||
}
|
||||
|
||||
// Get container and block dimensions
|
||||
const containerDimensions = {
|
||||
width: parentNode?.data?.width || CONTAINER_DIMENSIONS.DEFAULT_WIDTH,
|
||||
height: parentNode?.data?.height || CONTAINER_DIMENSIONS.DEFAULT_HEIGHT,
|
||||
}
|
||||
const blockDimensions = getBlockDimensions(nodeId)
|
||||
|
||||
// Clamp position to keep block inside content area
|
||||
return clampPositionToContainer(rawPosition, containerDimensions, blockDimensions)
|
||||
},
|
||||
[getNodeAbsolutePosition]
|
||||
[getNodeAbsolutePosition, getNodes, getBlockDimensions]
|
||||
)
|
||||
|
||||
/**
|
||||
@@ -252,7 +298,11 @@ export function useNodeUtilities(blocks: Record<string, any>) {
|
||||
*/
|
||||
const calculateLoopDimensions = useCallback(
|
||||
(nodeId: string): { width: number; height: number } => {
|
||||
const childNodes = getNodes().filter((node) => node.parentId === nodeId)
|
||||
// Check both React Flow's node.parentId AND blocks store's data.parentId
|
||||
// This ensures we catch children even if React Flow hasn't re-rendered yet
|
||||
const childNodes = getNodes().filter(
|
||||
(node) => node.parentId === nodeId || blocks[node.id]?.data?.parentId === nodeId
|
||||
)
|
||||
if (childNodes.length === 0) {
|
||||
return {
|
||||
width: CONTAINER_DIMENSIONS.DEFAULT_WIDTH,
|
||||
@@ -265,8 +315,11 @@ export function useNodeUtilities(blocks: Record<string, any>) {
|
||||
|
||||
childNodes.forEach((node) => {
|
||||
const { width: nodeWidth, height: nodeHeight } = getBlockDimensions(node.id)
|
||||
maxRight = Math.max(maxRight, node.position.x + nodeWidth)
|
||||
maxBottom = Math.max(maxBottom, node.position.y + nodeHeight)
|
||||
// Use block position from store if available (more up-to-date)
|
||||
const block = blocks[node.id]
|
||||
const position = block?.position || node.position
|
||||
maxRight = Math.max(maxRight, position.x + nodeWidth)
|
||||
maxBottom = Math.max(maxBottom, position.y + nodeHeight)
|
||||
})
|
||||
|
||||
const width = Math.max(
|
||||
@@ -283,7 +336,7 @@ export function useNodeUtilities(blocks: Record<string, any>) {
|
||||
|
||||
return { width, height }
|
||||
},
|
||||
[getNodes, getBlockDimensions]
|
||||
[getNodes, getBlockDimensions, blocks]
|
||||
)
|
||||
|
||||
/**
|
||||
|
||||
@@ -18,7 +18,7 @@ import { useShallow } from 'zustand/react/shallow'
|
||||
import type { OAuthConnectEventDetail } from '@/lib/copilot/tools/client/other/oauth-request-access'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import type { OAuthProvider } from '@/lib/oauth'
|
||||
import { CONTAINER_DIMENSIONS } from '@/lib/workflows/blocks/block-dimensions'
|
||||
import { BLOCK_DIMENSIONS, CONTAINER_DIMENSIONS } from '@/lib/workflows/blocks/block-dimensions'
|
||||
import { TriggerUtils } from '@/lib/workflows/triggers/triggers'
|
||||
import { useWorkspacePermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
import {
|
||||
@@ -40,6 +40,10 @@ import {
|
||||
useCurrentWorkflow,
|
||||
useNodeUtilities,
|
||||
} from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks'
|
||||
import {
|
||||
clampPositionToContainer,
|
||||
estimateBlockDimensions,
|
||||
} from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-node-utilities'
|
||||
import { useSocket } from '@/app/workspace/providers/socket-provider'
|
||||
import { getBlock } from '@/blocks'
|
||||
import { isAnnotationOnlyBlock } from '@/executor/constants'
|
||||
@@ -694,17 +698,19 @@ const WorkflowContent = React.memo(() => {
|
||||
return
|
||||
}
|
||||
|
||||
// Calculate position relative to the container's content area
|
||||
// Account for header (50px), left padding (16px), and top padding (16px)
|
||||
const headerHeight = 50
|
||||
const leftPadding = 16
|
||||
const topPadding = 16
|
||||
|
||||
const relativePosition = {
|
||||
x: position.x - containerInfo.loopPosition.x - leftPadding,
|
||||
y: position.y - containerInfo.loopPosition.y - headerHeight - topPadding,
|
||||
// Calculate raw position relative to container origin
|
||||
const rawPosition = {
|
||||
x: position.x - containerInfo.loopPosition.x,
|
||||
y: position.y - containerInfo.loopPosition.y,
|
||||
}
|
||||
|
||||
// Clamp position to keep block inside container's content area
|
||||
const relativePosition = clampPositionToContainer(
|
||||
rawPosition,
|
||||
containerInfo.dimensions,
|
||||
estimateBlockDimensions(data.type)
|
||||
)
|
||||
|
||||
// Capture existing child blocks before adding the new one
|
||||
const existingChildBlocks = Object.values(blocks).filter(
|
||||
(b) => b.data?.parentId === containerInfo.loopId
|
||||
@@ -1910,17 +1916,47 @@ const WorkflowContent = React.memo(() => {
|
||||
})
|
||||
document.body.style.cursor = ''
|
||||
|
||||
// Get the block's current parent (if any)
|
||||
const currentBlock = blocks[node.id]
|
||||
const currentParentId = currentBlock?.data?.parentId
|
||||
|
||||
// Calculate position - clamp if inside a container
|
||||
let finalPosition = node.position
|
||||
if (currentParentId) {
|
||||
// Block is inside a container - clamp position to keep it fully inside
|
||||
const parentNode = getNodes().find((n) => n.id === currentParentId)
|
||||
if (parentNode) {
|
||||
const containerDimensions = {
|
||||
width: parentNode.data?.width || CONTAINER_DIMENSIONS.DEFAULT_WIDTH,
|
||||
height: parentNode.data?.height || CONTAINER_DIMENSIONS.DEFAULT_HEIGHT,
|
||||
}
|
||||
const blockDimensions = {
|
||||
width: BLOCK_DIMENSIONS.FIXED_WIDTH,
|
||||
height: Math.max(
|
||||
currentBlock?.height || BLOCK_DIMENSIONS.MIN_HEIGHT,
|
||||
BLOCK_DIMENSIONS.MIN_HEIGHT
|
||||
),
|
||||
}
|
||||
|
||||
finalPosition = clampPositionToContainer(
|
||||
node.position,
|
||||
containerDimensions,
|
||||
blockDimensions
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Emit collaborative position update for the final position
|
||||
// This ensures other users see the smooth final position
|
||||
collaborativeUpdateBlockPosition(node.id, node.position, true)
|
||||
collaborativeUpdateBlockPosition(node.id, finalPosition, true)
|
||||
|
||||
// Record single move entry on drag end to avoid micro-moves
|
||||
const start = getDragStartPosition()
|
||||
if (start && start.id === node.id) {
|
||||
const before = { x: start.x, y: start.y, parentId: start.parentId }
|
||||
const after = {
|
||||
x: node.position.x,
|
||||
y: node.position.y,
|
||||
x: finalPosition.x,
|
||||
y: finalPosition.y,
|
||||
parentId: node.parentId || blocks[node.id]?.data?.parentId,
|
||||
}
|
||||
const moved =
|
||||
|
||||
@@ -3,5 +3,5 @@
|
||||
"name": "Emir Karabeg",
|
||||
"url": "https://x.com/karabegemir",
|
||||
"xHandle": "karabegemir",
|
||||
"avatarUrl": "/studio/authors/emir.png"
|
||||
"avatarUrl": "/studio/authors/emir.jpg"
|
||||
}
|
||||
|
||||
@@ -3,5 +3,5 @@
|
||||
"name": "Siddharth",
|
||||
"url": "https://x.com/sidganesan",
|
||||
"xHandle": "sidganesan",
|
||||
"avatarUrl": "/studio/authors/sid.png"
|
||||
"avatarUrl": "/studio/authors/sid.jpg"
|
||||
}
|
||||
|
||||
@@ -3,5 +3,5 @@
|
||||
"name": "Waleed Latif",
|
||||
"url": "https://x.com/typingwala",
|
||||
"xHandle": "typingwala",
|
||||
"avatarUrl": "/studio/authors/waleed.png"
|
||||
"avatarUrl": "/studio/authors/waleed.jpg"
|
||||
}
|
||||
|
||||
@@ -18,7 +18,7 @@ featured: true
|
||||
draft: false
|
||||
---
|
||||
|
||||

|
||||

|
||||
|
||||
## Why we’re excited
|
||||
|
||||
|
||||
@@ -17,27 +17,32 @@ vi.mock('@/lib/core/utils/request', () => ({
|
||||
generateRequestId: vi.fn(() => 'test-request-id'),
|
||||
}))
|
||||
|
||||
vi.mock('@/lib/execution/isolated-vm', () => ({
|
||||
executeInIsolatedVM: vi.fn(),
|
||||
vi.mock('@/tools', () => ({
|
||||
executeTool: vi.fn(),
|
||||
}))
|
||||
|
||||
import { executeInIsolatedVM } from '@/lib/execution/isolated-vm'
|
||||
import { executeTool } from '@/tools'
|
||||
|
||||
const mockExecuteInIsolatedVM = executeInIsolatedVM as ReturnType<typeof vi.fn>
|
||||
const mockExecuteTool = executeTool as ReturnType<typeof vi.fn>
|
||||
|
||||
function simulateIsolatedVMExecution(
|
||||
code: string,
|
||||
contextVariables: Record<string, unknown>
|
||||
): { result: unknown; stdout: string; error?: { message: string; name: string } } {
|
||||
/**
|
||||
* Simulates what the function_execute tool does when evaluating condition code
|
||||
*/
|
||||
function simulateConditionExecution(code: string): {
|
||||
success: boolean
|
||||
output?: { result: unknown }
|
||||
error?: string
|
||||
} {
|
||||
try {
|
||||
const fn = new Function(...Object.keys(contextVariables), code)
|
||||
const result = fn(...Object.values(contextVariables))
|
||||
return { result, stdout: '' }
|
||||
// The code is in format: "const context = {...};\nreturn Boolean(...)"
|
||||
// We need to execute it and return the result
|
||||
const fn = new Function(code)
|
||||
const result = fn()
|
||||
return { success: true, output: { result } }
|
||||
} catch (error: any) {
|
||||
return {
|
||||
result: null,
|
||||
stdout: '',
|
||||
error: { message: error.message, name: error.name || 'Error' },
|
||||
success: false,
|
||||
error: error.message,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -143,8 +148,8 @@ describe('ConditionBlockHandler', () => {
|
||||
|
||||
vi.clearAllMocks()
|
||||
|
||||
mockExecuteInIsolatedVM.mockImplementation(async ({ code, contextVariables }) => {
|
||||
return simulateIsolatedVMExecution(code, contextVariables)
|
||||
mockExecuteTool.mockImplementation(async (_toolId: string, params: { code: string }) => {
|
||||
return simulateConditionExecution(params.code)
|
||||
})
|
||||
})
|
||||
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { executeInIsolatedVM } from '@/lib/execution/isolated-vm'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import type { BlockOutput } from '@/blocks/types'
|
||||
import { BlockType, CONDITION, DEFAULTS, EDGE } from '@/executor/constants'
|
||||
import type { BlockHandler, ExecutionContext } from '@/executor/types'
|
||||
import type { SerializedBlock } from '@/serializer/types'
|
||||
import { executeTool } from '@/tools'
|
||||
|
||||
const logger = createLogger('ConditionBlockHandler')
|
||||
|
||||
@@ -39,32 +38,38 @@ export async function evaluateConditionExpression(
|
||||
}
|
||||
|
||||
try {
|
||||
const requestId = generateRequestId()
|
||||
const contextSetup = `const context = ${JSON.stringify(evalContext)};`
|
||||
const code = `${contextSetup}\nreturn Boolean(${resolvedConditionValue})`
|
||||
|
||||
const code = `return Boolean(${resolvedConditionValue})`
|
||||
const result = await executeTool(
|
||||
'function_execute',
|
||||
{
|
||||
code,
|
||||
timeout: CONDITION_TIMEOUT_MS,
|
||||
envVars: {},
|
||||
_context: {
|
||||
workflowId: ctx.workflowId,
|
||||
workspaceId: ctx.workspaceId,
|
||||
},
|
||||
},
|
||||
false,
|
||||
false,
|
||||
ctx
|
||||
)
|
||||
|
||||
const result = await executeInIsolatedVM({
|
||||
code,
|
||||
params: {},
|
||||
envVars: {},
|
||||
contextVariables: { context: evalContext },
|
||||
timeoutMs: CONDITION_TIMEOUT_MS,
|
||||
requestId,
|
||||
})
|
||||
|
||||
if (result.error) {
|
||||
logger.error(`Failed to evaluate condition: ${result.error.message}`, {
|
||||
if (!result.success) {
|
||||
logger.error(`Failed to evaluate condition: ${result.error}`, {
|
||||
originalCondition: conditionExpression,
|
||||
resolvedCondition: resolvedConditionValue,
|
||||
evalContext,
|
||||
error: result.error,
|
||||
})
|
||||
throw new Error(
|
||||
`Evaluation error in condition: ${result.error.message}. (Resolved: ${resolvedConditionValue})`
|
||||
`Evaluation error in condition: ${result.error}. (Resolved: ${resolvedConditionValue})`
|
||||
)
|
||||
}
|
||||
|
||||
return Boolean(result.result)
|
||||
return Boolean(result.output?.result)
|
||||
} catch (evalError: any) {
|
||||
logger.error(`Failed to evaluate condition: ${evalError.message}`, {
|
||||
originalCondition: conditionExpression,
|
||||
|
||||
@@ -4,6 +4,7 @@ interface SlackAccount {
|
||||
id: string
|
||||
accountId: string
|
||||
providerId: string
|
||||
displayName?: string
|
||||
}
|
||||
|
||||
interface UseSlackAccountsResult {
|
||||
|
||||
@@ -14,6 +14,7 @@ interface UseWebhookManagementProps {
|
||||
blockId: string
|
||||
triggerId?: string
|
||||
isPreview?: boolean
|
||||
useWebhookUrl?: boolean
|
||||
}
|
||||
|
||||
interface WebhookManagementState {
|
||||
@@ -90,6 +91,7 @@ export function useWebhookManagement({
|
||||
blockId,
|
||||
triggerId,
|
||||
isPreview = false,
|
||||
useWebhookUrl = false,
|
||||
}: UseWebhookManagementProps): WebhookManagementState {
|
||||
const params = useParams()
|
||||
const workflowId = params.workflowId as string
|
||||
@@ -204,9 +206,10 @@ export function useWebhookManagement({
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
loadWebhookOrGenerateUrl()
|
||||
}, [isPreview, triggerId, workflowId, blockId])
|
||||
if (useWebhookUrl) {
|
||||
loadWebhookOrGenerateUrl()
|
||||
}
|
||||
}, [isPreview, triggerId, workflowId, blockId, useWebhookUrl])
|
||||
|
||||
const createWebhook = async (
|
||||
effectiveTriggerId: string | undefined,
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
'use client'
|
||||
|
||||
import { useState } from 'react'
|
||||
import { Check, Copy } from 'lucide-react'
|
||||
import { Code } from '@/components/emcn'
|
||||
|
||||
interface CodeBlockProps {
|
||||
@@ -8,5 +10,36 @@ interface CodeBlockProps {
|
||||
}
|
||||
|
||||
export function CodeBlock({ code, language }: CodeBlockProps) {
|
||||
return <Code.Viewer code={code} showGutter={true} language={language} />
|
||||
const [copied, setCopied] = useState(false)
|
||||
|
||||
const handleCopy = () => {
|
||||
navigator.clipboard.writeText(code)
|
||||
setCopied(true)
|
||||
setTimeout(() => setCopied(false), 2000)
|
||||
}
|
||||
|
||||
return (
|
||||
<div className='dark w-full overflow-hidden rounded-md border border-[#2a2a2a] bg-[#1F1F1F] text-sm'>
|
||||
<div className='flex items-center justify-between border-[#2a2a2a] border-b px-4 py-1.5'>
|
||||
<span className='text-[#A3A3A3] text-xs'>{language}</span>
|
||||
<button
|
||||
onClick={handleCopy}
|
||||
className='text-[#A3A3A3] transition-colors hover:text-gray-300'
|
||||
title='Copy code'
|
||||
>
|
||||
{copied ? (
|
||||
<Check className='h-3 w-3' strokeWidth={2} />
|
||||
) : (
|
||||
<Copy className='h-3 w-3' strokeWidth={2} />
|
||||
)}
|
||||
</button>
|
||||
</div>
|
||||
<Code.Viewer
|
||||
code={code}
|
||||
showGutter
|
||||
language={language}
|
||||
className='[&_pre]:!pb-0 m-0 rounded-none border-0 bg-transparent'
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -67,7 +67,7 @@ export const mdxComponents: MDXRemoteProps['components'] = {
|
||||
a: (props: any) => {
|
||||
const isAnchorLink = props.className?.includes('anchor')
|
||||
if (isAnchorLink) {
|
||||
return <a {...props} />
|
||||
return <a {...props} className={clsx('text-inherit no-underline', props.className)} />
|
||||
}
|
||||
return (
|
||||
<a
|
||||
@@ -113,7 +113,7 @@ export const mdxComponents: MDXRemoteProps['components'] = {
|
||||
const mappedLanguage = languageMap[language.toLowerCase()] || 'javascript'
|
||||
|
||||
return (
|
||||
<div className='my-6'>
|
||||
<div className='not-prose my-6'>
|
||||
<CodeBlock
|
||||
code={typeof codeContent === 'string' ? codeContent.trim() : String(codeContent)}
|
||||
language={mappedLanguage}
|
||||
@@ -129,9 +129,10 @@ export const mdxComponents: MDXRemoteProps['components'] = {
|
||||
<code
|
||||
{...props}
|
||||
className={clsx(
|
||||
'rounded bg-gray-100 px-1.5 py-0.5 font-mono text-[0.9em] text-red-600',
|
||||
'rounded bg-gray-100 px-1.5 py-0.5 font-mono font-normal text-[0.9em] text-red-600',
|
||||
props.className
|
||||
)}
|
||||
style={{ fontWeight: 400 }}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -38,7 +38,9 @@ function slugify(text: string): string {
|
||||
}
|
||||
|
||||
async function scanFrontmatters(): Promise<BlogMeta[]> {
|
||||
if (cachedMeta) return cachedMeta
|
||||
if (cachedMeta) {
|
||||
return cachedMeta
|
||||
}
|
||||
await ensureContentDirs()
|
||||
const entries = await fs.readdir(BLOG_DIR).catch(() => [])
|
||||
const authorsMap = await loadAuthors()
|
||||
|
||||
@@ -33,6 +33,7 @@ export const ToolIds = z.enum([
|
||||
'knowledge_base',
|
||||
'manage_custom_tool',
|
||||
'manage_mcp_tool',
|
||||
'sleep',
|
||||
])
|
||||
export type ToolId = z.infer<typeof ToolIds>
|
||||
|
||||
@@ -252,6 +253,14 @@ export const ToolArgSchemas = {
|
||||
.optional()
|
||||
.describe('Required for add and edit operations. The MCP server configuration.'),
|
||||
}),
|
||||
|
||||
sleep: z.object({
|
||||
seconds: z
|
||||
.number()
|
||||
.min(0)
|
||||
.max(180)
|
||||
.describe('The number of seconds to sleep (0-180, max 3 minutes)'),
|
||||
}),
|
||||
} as const
|
||||
export type ToolArgSchemaMap = typeof ToolArgSchemas
|
||||
|
||||
@@ -318,6 +327,7 @@ export const ToolSSESchemas = {
|
||||
knowledge_base: toolCallSSEFor('knowledge_base', ToolArgSchemas.knowledge_base),
|
||||
manage_custom_tool: toolCallSSEFor('manage_custom_tool', ToolArgSchemas.manage_custom_tool),
|
||||
manage_mcp_tool: toolCallSSEFor('manage_mcp_tool', ToolArgSchemas.manage_mcp_tool),
|
||||
sleep: toolCallSSEFor('sleep', ToolArgSchemas.sleep),
|
||||
} as const
|
||||
export type ToolSSESchemaMap = typeof ToolSSESchemas
|
||||
|
||||
@@ -552,6 +562,11 @@ export const ToolResultSchemas = {
|
||||
serverName: z.string().optional(),
|
||||
message: z.string().optional(),
|
||||
}),
|
||||
sleep: z.object({
|
||||
success: z.boolean(),
|
||||
seconds: z.number(),
|
||||
message: z.string().optional(),
|
||||
}),
|
||||
} as const
|
||||
export type ToolResultSchemaMap = typeof ToolResultSchemas
|
||||
|
||||
|
||||
144
apps/sim/lib/copilot/tools/client/other/sleep.ts
Normal file
@@ -0,0 +1,144 @@
|
||||
import { Loader2, MinusCircle, Moon, XCircle } from 'lucide-react'
|
||||
import {
|
||||
BaseClientTool,
|
||||
type BaseClientToolMetadata,
|
||||
ClientToolCallState,
|
||||
} from '@/lib/copilot/tools/client/base-tool'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
|
||||
/** Maximum sleep duration in seconds (3 minutes) */
|
||||
const MAX_SLEEP_SECONDS = 180
|
||||
|
||||
/** Track sleep start times for calculating elapsed time on wake */
|
||||
const sleepStartTimes: Record<string, number> = {}
|
||||
|
||||
interface SleepArgs {
|
||||
seconds?: number
|
||||
}
|
||||
|
||||
/**
|
||||
* Format seconds into a human-readable duration string
|
||||
*/
|
||||
function formatDuration(seconds: number): string {
|
||||
if (seconds >= 60) {
|
||||
return `${Math.round(seconds / 60)} minute${seconds >= 120 ? 's' : ''}`
|
||||
}
|
||||
return `${seconds} second${seconds !== 1 ? 's' : ''}`
|
||||
}
|
||||
|
||||
export class SleepClientTool extends BaseClientTool {
|
||||
static readonly id = 'sleep'
|
||||
|
||||
constructor(toolCallId: string) {
|
||||
super(toolCallId, SleepClientTool.id, SleepClientTool.metadata)
|
||||
}
|
||||
|
||||
static readonly metadata: BaseClientToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Preparing to sleep', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Sleeping', icon: Loader2 },
|
||||
[ClientToolCallState.executing]: { text: 'Sleeping', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Finished sleeping', icon: Moon },
|
||||
[ClientToolCallState.error]: { text: 'Sleep interrupted', icon: XCircle },
|
||||
[ClientToolCallState.rejected]: { text: 'Sleep skipped', icon: MinusCircle },
|
||||
[ClientToolCallState.aborted]: { text: 'Sleep aborted', icon: MinusCircle },
|
||||
[ClientToolCallState.background]: { text: 'Resumed', icon: Moon },
|
||||
},
|
||||
// No interrupt - auto-execute immediately
|
||||
getDynamicText: (params, state) => {
|
||||
const seconds = params?.seconds
|
||||
if (typeof seconds === 'number' && seconds > 0) {
|
||||
const displayTime = formatDuration(seconds)
|
||||
switch (state) {
|
||||
case ClientToolCallState.success:
|
||||
return `Slept for ${displayTime}`
|
||||
case ClientToolCallState.executing:
|
||||
case ClientToolCallState.pending:
|
||||
return `Sleeping for ${displayTime}`
|
||||
case ClientToolCallState.generating:
|
||||
return `Preparing to sleep for ${displayTime}`
|
||||
case ClientToolCallState.error:
|
||||
return `Failed to sleep for ${displayTime}`
|
||||
case ClientToolCallState.rejected:
|
||||
return `Skipped sleeping for ${displayTime}`
|
||||
case ClientToolCallState.aborted:
|
||||
return `Aborted sleeping for ${displayTime}`
|
||||
case ClientToolCallState.background: {
|
||||
// Calculate elapsed time from when sleep started
|
||||
const elapsedSeconds = params?._elapsedSeconds
|
||||
if (typeof elapsedSeconds === 'number' && elapsedSeconds > 0) {
|
||||
return `Resumed after ${formatDuration(Math.round(elapsedSeconds))}`
|
||||
}
|
||||
return 'Resumed early'
|
||||
}
|
||||
}
|
||||
}
|
||||
return undefined
|
||||
},
|
||||
}
|
||||
|
||||
/**
|
||||
* Get elapsed seconds since sleep started
|
||||
*/
|
||||
getElapsedSeconds(): number {
|
||||
const startTime = sleepStartTimes[this.toolCallId]
|
||||
if (!startTime) return 0
|
||||
return (Date.now() - startTime) / 1000
|
||||
}
|
||||
|
||||
async handleReject(): Promise<void> {
|
||||
await super.handleReject()
|
||||
this.setState(ClientToolCallState.rejected)
|
||||
}
|
||||
|
||||
async handleAccept(args?: SleepArgs): Promise<void> {
|
||||
const logger = createLogger('SleepClientTool')
|
||||
|
||||
// Use a timeout slightly longer than max sleep (3 minutes + buffer)
|
||||
const timeoutMs = (MAX_SLEEP_SECONDS + 30) * 1000
|
||||
|
||||
await this.executeWithTimeout(async () => {
|
||||
const params = args || {}
|
||||
logger.debug('handleAccept() called', {
|
||||
toolCallId: this.toolCallId,
|
||||
state: this.getState(),
|
||||
hasArgs: !!args,
|
||||
seconds: params.seconds,
|
||||
})
|
||||
|
||||
// Validate and clamp seconds
|
||||
let seconds = typeof params.seconds === 'number' ? params.seconds : 0
|
||||
if (seconds < 0) seconds = 0
|
||||
if (seconds > MAX_SLEEP_SECONDS) seconds = MAX_SLEEP_SECONDS
|
||||
|
||||
logger.debug('Starting sleep', { seconds })
|
||||
|
||||
// Track start time for elapsed calculation
|
||||
sleepStartTimes[this.toolCallId] = Date.now()
|
||||
|
||||
this.setState(ClientToolCallState.executing)
|
||||
|
||||
try {
|
||||
// Sleep for the specified duration
|
||||
await new Promise((resolve) => setTimeout(resolve, seconds * 1000))
|
||||
|
||||
logger.debug('Sleep completed successfully')
|
||||
this.setState(ClientToolCallState.success)
|
||||
await this.markToolComplete(200, `Slept for ${seconds} seconds`)
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error)
|
||||
logger.error('Sleep failed', { error: message })
|
||||
this.setState(ClientToolCallState.error)
|
||||
await this.markToolComplete(500, message)
|
||||
} finally {
|
||||
// Clean up start time tracking
|
||||
delete sleepStartTimes[this.toolCallId]
|
||||
}
|
||||
}, timeoutMs)
|
||||
}
|
||||
|
||||
async execute(args?: SleepArgs): Promise<void> {
|
||||
// Auto-execute without confirmation - go straight to executing
|
||||
await this.handleAccept(args)
|
||||
}
|
||||
}
|
||||
@@ -8,6 +8,7 @@ import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { getBlockOutputs } from '@/lib/workflows/blocks/block-outputs'
|
||||
import { extractAndPersistCustomTools } from '@/lib/workflows/persistence/custom-tools-persistence'
|
||||
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
|
||||
import { isValidKey } from '@/lib/workflows/sanitization/key-validation'
|
||||
import { validateWorkflowState } from '@/lib/workflows/sanitization/validation'
|
||||
import { getAllBlocks, getBlock } from '@/blocks/registry'
|
||||
import type { SubBlockConfig } from '@/blocks/types'
|
||||
@@ -49,6 +50,8 @@ type SkippedItemType =
|
||||
| 'invalid_block_type'
|
||||
| 'invalid_edge_target'
|
||||
| 'invalid_edge_source'
|
||||
| 'invalid_source_handle'
|
||||
| 'invalid_target_handle'
|
||||
| 'invalid_subblock_field'
|
||||
| 'missing_required_params'
|
||||
| 'invalid_subflow_parent'
|
||||
@@ -733,8 +736,279 @@ function normalizeResponseFormat(value: any): string {
|
||||
}
|
||||
}
|
||||
|
||||
interface EdgeHandleValidationResult {
|
||||
valid: boolean
|
||||
error?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to add connections as edges for a block
|
||||
* Validates source handle is valid for the block type
|
||||
*/
|
||||
function validateSourceHandleForBlock(
|
||||
sourceHandle: string,
|
||||
sourceBlockType: string,
|
||||
sourceBlock: any
|
||||
): EdgeHandleValidationResult {
|
||||
if (sourceHandle === 'error') {
|
||||
return { valid: true }
|
||||
}
|
||||
|
||||
switch (sourceBlockType) {
|
||||
case 'loop':
|
||||
if (sourceHandle === 'loop-start-source' || sourceHandle === 'loop-end-source') {
|
||||
return { valid: true }
|
||||
}
|
||||
return {
|
||||
valid: false,
|
||||
error: `Invalid source handle "${sourceHandle}" for loop block. Valid handles: loop-start-source, loop-end-source, error`,
|
||||
}
|
||||
|
||||
case 'parallel':
|
||||
if (sourceHandle === 'parallel-start-source' || sourceHandle === 'parallel-end-source') {
|
||||
return { valid: true }
|
||||
}
|
||||
return {
|
||||
valid: false,
|
||||
error: `Invalid source handle "${sourceHandle}" for parallel block. Valid handles: parallel-start-source, parallel-end-source, error`,
|
||||
}
|
||||
|
||||
case 'condition': {
|
||||
if (!sourceHandle.startsWith('condition-')) {
|
||||
return {
|
||||
valid: false,
|
||||
error: `Invalid source handle "${sourceHandle}" for condition block. Must start with "condition-"`,
|
||||
}
|
||||
}
|
||||
|
||||
const conditionsValue = sourceBlock?.subBlocks?.conditions?.value
|
||||
if (!conditionsValue) {
|
||||
return {
|
||||
valid: false,
|
||||
error: `Invalid condition handle "${sourceHandle}" - no conditions defined`,
|
||||
}
|
||||
}
|
||||
|
||||
return validateConditionHandle(sourceHandle, sourceBlock.id, conditionsValue)
|
||||
}
|
||||
|
||||
case 'router':
|
||||
if (sourceHandle === 'source' || sourceHandle.startsWith('router-')) {
|
||||
return { valid: true }
|
||||
}
|
||||
return {
|
||||
valid: false,
|
||||
error: `Invalid source handle "${sourceHandle}" for router block. Valid handles: source, router-{targetId}, error`,
|
||||
}
|
||||
|
||||
default:
|
||||
if (sourceHandle === 'source') {
|
||||
return { valid: true }
|
||||
}
|
||||
return {
|
||||
valid: false,
|
||||
error: `Invalid source handle "${sourceHandle}" for ${sourceBlockType} block. Valid handles: source, error`,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates condition handle references a valid condition in the block.
|
||||
* Accepts both internal IDs (condition-blockId-if) and semantic keys (condition-blockId-else-if)
|
||||
*/
|
||||
function validateConditionHandle(
|
||||
sourceHandle: string,
|
||||
blockId: string,
|
||||
conditionsValue: string | any[]
|
||||
): EdgeHandleValidationResult {
|
||||
let conditions: any[]
|
||||
if (typeof conditionsValue === 'string') {
|
||||
try {
|
||||
conditions = JSON.parse(conditionsValue)
|
||||
} catch {
|
||||
return {
|
||||
valid: false,
|
||||
error: `Cannot validate condition handle "${sourceHandle}" - conditions is not valid JSON`,
|
||||
}
|
||||
}
|
||||
} else if (Array.isArray(conditionsValue)) {
|
||||
conditions = conditionsValue
|
||||
} else {
|
||||
return {
|
||||
valid: false,
|
||||
error: `Cannot validate condition handle "${sourceHandle}" - conditions is not an array`,
|
||||
}
|
||||
}
|
||||
|
||||
if (!Array.isArray(conditions) || conditions.length === 0) {
|
||||
return {
|
||||
valid: false,
|
||||
error: `Invalid condition handle "${sourceHandle}" - no conditions defined`,
|
||||
}
|
||||
}
|
||||
|
||||
const validHandles = new Set<string>()
|
||||
const semanticPrefix = `condition-${blockId}-`
|
||||
let elseIfCount = 0
|
||||
|
||||
for (const condition of conditions) {
|
||||
if (condition.id) {
|
||||
validHandles.add(`condition-${condition.id}`)
|
||||
}
|
||||
|
||||
const title = condition.title?.toLowerCase()
|
||||
if (title === 'if') {
|
||||
validHandles.add(`${semanticPrefix}if`)
|
||||
} else if (title === 'else if') {
|
||||
elseIfCount++
|
||||
validHandles.add(
|
||||
elseIfCount === 1 ? `${semanticPrefix}else-if` : `${semanticPrefix}else-if-${elseIfCount}`
|
||||
)
|
||||
} else if (title === 'else') {
|
||||
validHandles.add(`${semanticPrefix}else`)
|
||||
}
|
||||
}
|
||||
|
||||
if (validHandles.has(sourceHandle)) {
|
||||
return { valid: true }
|
||||
}
|
||||
|
||||
const validOptions = Array.from(validHandles).slice(0, 5)
|
||||
const moreCount = validHandles.size - validOptions.length
|
||||
let validOptionsStr = validOptions.join(', ')
|
||||
if (moreCount > 0) {
|
||||
validOptionsStr += `, ... and ${moreCount} more`
|
||||
}
|
||||
|
||||
return {
|
||||
valid: false,
|
||||
error: `Invalid condition handle "${sourceHandle}". Valid handles: ${validOptionsStr}`,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates target handle is valid (must be 'target')
|
||||
*/
|
||||
function validateTargetHandle(targetHandle: string): EdgeHandleValidationResult {
|
||||
if (targetHandle === 'target') {
|
||||
return { valid: true }
|
||||
}
|
||||
return {
|
||||
valid: false,
|
||||
error: `Invalid target handle "${targetHandle}". Expected "target"`,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a validated edge between two blocks.
|
||||
* Returns true if edge was created, false if skipped due to validation errors.
|
||||
*/
|
||||
function createValidatedEdge(
|
||||
modifiedState: any,
|
||||
sourceBlockId: string,
|
||||
targetBlockId: string,
|
||||
sourceHandle: string,
|
||||
targetHandle: string,
|
||||
operationType: string,
|
||||
logger: ReturnType<typeof createLogger>,
|
||||
skippedItems?: SkippedItem[]
|
||||
): boolean {
|
||||
if (!modifiedState.blocks[targetBlockId]) {
|
||||
logger.warn(`Target block "${targetBlockId}" not found. Edge skipped.`, {
|
||||
sourceBlockId,
|
||||
targetBlockId,
|
||||
sourceHandle,
|
||||
})
|
||||
skippedItems?.push({
|
||||
type: 'invalid_edge_target',
|
||||
operationType,
|
||||
blockId: sourceBlockId,
|
||||
reason: `Edge from "${sourceBlockId}" to "${targetBlockId}" skipped - target block does not exist`,
|
||||
details: { sourceHandle, targetHandle, targetId: targetBlockId },
|
||||
})
|
||||
return false
|
||||
}
|
||||
|
||||
const sourceBlock = modifiedState.blocks[sourceBlockId]
|
||||
if (!sourceBlock) {
|
||||
logger.warn(`Source block "${sourceBlockId}" not found. Edge skipped.`, {
|
||||
sourceBlockId,
|
||||
targetBlockId,
|
||||
})
|
||||
skippedItems?.push({
|
||||
type: 'invalid_edge_source',
|
||||
operationType,
|
||||
blockId: sourceBlockId,
|
||||
reason: `Edge from "${sourceBlockId}" to "${targetBlockId}" skipped - source block does not exist`,
|
||||
details: { sourceHandle, targetHandle, targetId: targetBlockId },
|
||||
})
|
||||
return false
|
||||
}
|
||||
|
||||
const sourceBlockType = sourceBlock.type
|
||||
if (!sourceBlockType) {
|
||||
logger.warn(`Source block "${sourceBlockId}" has no type. Edge skipped.`, {
|
||||
sourceBlockId,
|
||||
targetBlockId,
|
||||
})
|
||||
skippedItems?.push({
|
||||
type: 'invalid_edge_source',
|
||||
operationType,
|
||||
blockId: sourceBlockId,
|
||||
reason: `Edge from "${sourceBlockId}" to "${targetBlockId}" skipped - source block has no type`,
|
||||
details: { sourceHandle, targetHandle, targetId: targetBlockId },
|
||||
})
|
||||
return false
|
||||
}
|
||||
|
||||
const sourceValidation = validateSourceHandleForBlock(sourceHandle, sourceBlockType, sourceBlock)
|
||||
if (!sourceValidation.valid) {
|
||||
logger.warn(`Invalid source handle. Edge skipped.`, {
|
||||
sourceBlockId,
|
||||
targetBlockId,
|
||||
sourceHandle,
|
||||
error: sourceValidation.error,
|
||||
})
|
||||
skippedItems?.push({
|
||||
type: 'invalid_source_handle',
|
||||
operationType,
|
||||
blockId: sourceBlockId,
|
||||
reason: sourceValidation.error || `Invalid source handle "${sourceHandle}"`,
|
||||
details: { sourceHandle, targetHandle, targetId: targetBlockId },
|
||||
})
|
||||
return false
|
||||
}
|
||||
|
||||
const targetValidation = validateTargetHandle(targetHandle)
|
||||
if (!targetValidation.valid) {
|
||||
logger.warn(`Invalid target handle. Edge skipped.`, {
|
||||
sourceBlockId,
|
||||
targetBlockId,
|
||||
targetHandle,
|
||||
error: targetValidation.error,
|
||||
})
|
||||
skippedItems?.push({
|
||||
type: 'invalid_target_handle',
|
||||
operationType,
|
||||
blockId: sourceBlockId,
|
||||
reason: targetValidation.error || `Invalid target handle "${targetHandle}"`,
|
||||
details: { sourceHandle, targetHandle, targetId: targetBlockId },
|
||||
})
|
||||
return false
|
||||
}
|
||||
|
||||
modifiedState.edges.push({
|
||||
id: crypto.randomUUID(),
|
||||
source: sourceBlockId,
|
||||
sourceHandle,
|
||||
target: targetBlockId,
|
||||
targetHandle,
|
||||
type: 'default',
|
||||
})
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds connections as edges for a block
|
||||
*/
|
||||
function addConnectionsAsEdges(
|
||||
modifiedState: any,
|
||||
@@ -746,34 +1020,16 @@ function addConnectionsAsEdges(
|
||||
Object.entries(connections).forEach(([sourceHandle, targets]) => {
|
||||
const targetArray = Array.isArray(targets) ? targets : [targets]
|
||||
targetArray.forEach((targetId: string) => {
|
||||
// Validate target block exists - skip edge if target doesn't exist
|
||||
if (!modifiedState.blocks[targetId]) {
|
||||
logger.warn(
|
||||
`Target block "${targetId}" not found when creating connection from "${blockId}". ` +
|
||||
`Edge skipped.`,
|
||||
{
|
||||
sourceBlockId: blockId,
|
||||
targetBlockId: targetId,
|
||||
existingBlocks: Object.keys(modifiedState.blocks),
|
||||
}
|
||||
)
|
||||
skippedItems?.push({
|
||||
type: 'invalid_edge_target',
|
||||
operationType: 'add_edge',
|
||||
blockId: blockId,
|
||||
reason: `Edge from "${blockId}" to "${targetId}" skipped - target block does not exist`,
|
||||
details: { sourceHandle, targetId },
|
||||
})
|
||||
return
|
||||
}
|
||||
modifiedState.edges.push({
|
||||
id: crypto.randomUUID(),
|
||||
source: blockId,
|
||||
createValidatedEdge(
|
||||
modifiedState,
|
||||
blockId,
|
||||
targetId,
|
||||
sourceHandle,
|
||||
target: targetId,
|
||||
targetHandle: 'target',
|
||||
type: 'default',
|
||||
})
|
||||
'target',
|
||||
'add_edge',
|
||||
logger,
|
||||
skippedItems
|
||||
)
|
||||
})
|
||||
})
|
||||
}
|
||||
@@ -850,13 +1106,18 @@ function applyOperationsToWorkflowState(
|
||||
* Reorder operations to ensure correct execution sequence:
|
||||
* 1. delete - Remove blocks first to free up IDs and clean state
|
||||
* 2. extract_from_subflow - Extract blocks from subflows before modifications
|
||||
* 3. add - Create new blocks so they exist before being referenced
|
||||
* 3. add - Create new blocks (sorted by connection dependencies)
|
||||
* 4. insert_into_subflow - Insert blocks into subflows (sorted by parent dependency)
|
||||
* 5. edit - Edit existing blocks last, so connections to newly added blocks work
|
||||
*
|
||||
* This ordering is CRITICAL: edit operations may reference blocks being added
|
||||
* in the same batch (e.g., connecting block A to newly added block B).
|
||||
* Without proper ordering, the target block wouldn't exist yet.
|
||||
* This ordering is CRITICAL: operations may reference blocks being added/inserted
|
||||
* in the same batch. Without proper ordering, target blocks wouldn't exist yet.
|
||||
*
|
||||
* For add operations, we use a two-pass approach:
|
||||
* - Pass 1: Create all blocks (without connections)
|
||||
* - Pass 2: Add all connections (now all blocks exist)
|
||||
* This ensures that if block A connects to block B, and both are being added,
|
||||
* B will exist when we try to create the edge from A to B.
|
||||
*/
|
||||
const deletes = operations.filter((op) => op.operation_type === 'delete')
|
||||
const extracts = operations.filter((op) => op.operation_type === 'extract_from_subflow')
|
||||
@@ -868,6 +1129,8 @@ function applyOperationsToWorkflowState(
|
||||
// This handles cases where a loop/parallel is being added along with its children
|
||||
const sortedInserts = topologicalSortInserts(inserts, adds)
|
||||
|
||||
// We'll process add operations in two passes (handled in the switch statement below)
|
||||
// This is tracked via a separate flag to know which pass we're in
|
||||
const orderedOperations: EditWorkflowOperation[] = [
|
||||
...deletes,
|
||||
...extracts,
|
||||
@@ -877,15 +1140,46 @@ function applyOperationsToWorkflowState(
|
||||
]
|
||||
|
||||
logger.info('Operations after reordering:', {
|
||||
order: orderedOperations.map(
|
||||
totalOperations: orderedOperations.length,
|
||||
deleteCount: deletes.length,
|
||||
extractCount: extracts.length,
|
||||
addCount: adds.length,
|
||||
insertCount: sortedInserts.length,
|
||||
editCount: edits.length,
|
||||
operationOrder: orderedOperations.map(
|
||||
(op) =>
|
||||
`${op.operation_type}:${op.block_id}${op.params?.subflowId ? `(parent:${op.params.subflowId})` : ''}`
|
||||
),
|
||||
})
|
||||
|
||||
// Two-pass processing for add operations:
|
||||
// Pass 1: Create all blocks (without connections)
|
||||
// Pass 2: Add all connections (all blocks now exist)
|
||||
const addOperationsWithConnections: Array<{
|
||||
blockId: string
|
||||
connections: Record<string, any>
|
||||
}> = []
|
||||
|
||||
for (const operation of orderedOperations) {
|
||||
const { operation_type, block_id, params } = operation
|
||||
|
||||
// CRITICAL: Validate block_id is a valid string and not "undefined"
|
||||
// This prevents undefined keys from being set in the workflow state
|
||||
if (!isValidKey(block_id)) {
|
||||
logSkippedItem(skippedItems, {
|
||||
type: 'missing_required_params',
|
||||
operationType: operation_type,
|
||||
blockId: String(block_id || 'invalid'),
|
||||
reason: `Invalid block_id "${block_id}" (type: ${typeof block_id}) - operation skipped. Block IDs must be valid non-empty strings.`,
|
||||
})
|
||||
logger.error('Invalid block_id detected in operation', {
|
||||
operation_type,
|
||||
block_id,
|
||||
block_id_type: typeof block_id,
|
||||
})
|
||||
continue
|
||||
}
|
||||
|
||||
logger.debug(`Executing operation: ${operation_type} for block ${block_id}`, {
|
||||
params: params ? Object.keys(params) : [],
|
||||
currentBlockCount: Object.keys(modifiedState.blocks).length,
|
||||
@@ -1128,6 +1422,22 @@ function applyOperationsToWorkflowState(
|
||||
|
||||
// Add new nested blocks
|
||||
Object.entries(params.nestedNodes).forEach(([childId, childBlock]: [string, any]) => {
|
||||
// Validate childId is a valid string
|
||||
if (!isValidKey(childId)) {
|
||||
logSkippedItem(skippedItems, {
|
||||
type: 'missing_required_params',
|
||||
operationType: 'add_nested_node',
|
||||
blockId: String(childId || 'invalid'),
|
||||
reason: `Invalid childId "${childId}" in nestedNodes - child block skipped`,
|
||||
})
|
||||
logger.error('Invalid childId detected in nestedNodes', {
|
||||
parentBlockId: block_id,
|
||||
childId,
|
||||
childId_type: typeof childId,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
const childBlockState = createBlockFromParams(
|
||||
childId,
|
||||
childBlock,
|
||||
@@ -1202,67 +1512,44 @@ function applyOperationsToWorkflowState(
|
||||
|
||||
// Handle connections update (convert to edges)
|
||||
if (params?.connections) {
|
||||
// Remove existing edges from this block
|
||||
modifiedState.edges = modifiedState.edges.filter((edge: any) => edge.source !== block_id)
|
||||
|
||||
// Add new edges based on connections
|
||||
Object.entries(params.connections).forEach(([connectionType, targets]) => {
|
||||
if (targets === null) return
|
||||
|
||||
// Map semantic connection names to actual React Flow handle IDs
|
||||
// 'success' in YAML/connections maps to 'source' handle in React Flow
|
||||
const mapConnectionTypeToHandle = (type: string): string => {
|
||||
if (type === 'success') return 'source'
|
||||
if (type === 'error') return 'error'
|
||||
// Conditions and other types pass through as-is
|
||||
return type
|
||||
}
|
||||
|
||||
const actualSourceHandle = mapConnectionTypeToHandle(connectionType)
|
||||
const sourceHandle = mapConnectionTypeToHandle(connectionType)
|
||||
|
||||
const addEdge = (targetBlock: string, targetHandle?: string) => {
|
||||
// Validate target block exists - skip edge if target doesn't exist
|
||||
if (!modifiedState.blocks[targetBlock]) {
|
||||
logger.warn(
|
||||
`Target block "${targetBlock}" not found when creating connection from "${block_id}". ` +
|
||||
`Edge skipped.`,
|
||||
{
|
||||
sourceBlockId: block_id,
|
||||
targetBlockId: targetBlock,
|
||||
existingBlocks: Object.keys(modifiedState.blocks),
|
||||
}
|
||||
)
|
||||
logSkippedItem(skippedItems, {
|
||||
type: 'invalid_edge_target',
|
||||
operationType: 'edit',
|
||||
blockId: block_id,
|
||||
reason: `Edge from "${block_id}" to "${targetBlock}" skipped - target block does not exist`,
|
||||
details: { sourceHandle: actualSourceHandle, targetId: targetBlock },
|
||||
})
|
||||
return
|
||||
}
|
||||
modifiedState.edges.push({
|
||||
id: crypto.randomUUID(),
|
||||
source: block_id,
|
||||
sourceHandle: actualSourceHandle,
|
||||
target: targetBlock,
|
||||
targetHandle: targetHandle || 'target',
|
||||
type: 'default',
|
||||
})
|
||||
const addEdgeForTarget = (targetBlock: string, targetHandle?: string) => {
|
||||
createValidatedEdge(
|
||||
modifiedState,
|
||||
block_id,
|
||||
targetBlock,
|
||||
sourceHandle,
|
||||
targetHandle || 'target',
|
||||
'edit',
|
||||
logger,
|
||||
skippedItems
|
||||
)
|
||||
}
|
||||
|
||||
if (typeof targets === 'string') {
|
||||
addEdge(targets)
|
||||
addEdgeForTarget(targets)
|
||||
} else if (Array.isArray(targets)) {
|
||||
targets.forEach((target: any) => {
|
||||
if (typeof target === 'string') {
|
||||
addEdge(target)
|
||||
addEdgeForTarget(target)
|
||||
} else if (target?.block) {
|
||||
addEdge(target.block, target.handle)
|
||||
addEdgeForTarget(target.block, target.handle)
|
||||
}
|
||||
})
|
||||
} else if (typeof targets === 'object' && (targets as any)?.block) {
|
||||
addEdge((targets as any).block, (targets as any).handle)
|
||||
addEdgeForTarget((targets as any).block, (targets as any).handle)
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -1360,6 +1647,22 @@ function applyOperationsToWorkflowState(
|
||||
// Handle nested nodes (for loops/parallels created from scratch)
|
||||
if (params.nestedNodes) {
|
||||
Object.entries(params.nestedNodes).forEach(([childId, childBlock]: [string, any]) => {
|
||||
// Validate childId is a valid string
|
||||
if (!isValidKey(childId)) {
|
||||
logSkippedItem(skippedItems, {
|
||||
type: 'missing_required_params',
|
||||
operationType: 'add_nested_node',
|
||||
blockId: String(childId || 'invalid'),
|
||||
reason: `Invalid childId "${childId}" in nestedNodes - child block skipped`,
|
||||
})
|
||||
logger.error('Invalid childId detected in nestedNodes', {
|
||||
parentBlockId: block_id,
|
||||
childId,
|
||||
childId_type: typeof childId,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
const childBlockState = createBlockFromParams(
|
||||
childId,
|
||||
childBlock,
|
||||
@@ -1368,21 +1671,22 @@ function applyOperationsToWorkflowState(
|
||||
)
|
||||
modifiedState.blocks[childId] = childBlockState
|
||||
|
||||
// Defer connection processing to ensure all blocks exist first
|
||||
if (childBlock.connections) {
|
||||
addConnectionsAsEdges(
|
||||
modifiedState,
|
||||
childId,
|
||||
childBlock.connections,
|
||||
logger,
|
||||
skippedItems
|
||||
)
|
||||
addOperationsWithConnections.push({
|
||||
blockId: childId,
|
||||
connections: childBlock.connections,
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// Add connections as edges
|
||||
// Defer connection processing to ensure all blocks exist first (pass 2)
|
||||
if (params.connections) {
|
||||
addConnectionsAsEdges(modifiedState, block_id, params.connections, logger, skippedItems)
|
||||
addOperationsWithConnections.push({
|
||||
blockId: block_id,
|
||||
connections: params.connections,
|
||||
})
|
||||
}
|
||||
break
|
||||
}
|
||||
@@ -1506,13 +1810,18 @@ function applyOperationsToWorkflowState(
|
||||
modifiedState.blocks[block_id] = newBlock
|
||||
}
|
||||
|
||||
// Add/update connections as edges
|
||||
// Defer connection processing to ensure all blocks exist first
|
||||
// This is particularly important when multiple blocks are being inserted
|
||||
// and they have connections to each other
|
||||
if (params.connections) {
|
||||
// Remove existing edges from this block
|
||||
// Remove existing edges from this block first
|
||||
modifiedState.edges = modifiedState.edges.filter((edge: any) => edge.source !== block_id)
|
||||
|
||||
// Add new connections
|
||||
addConnectionsAsEdges(modifiedState, block_id, params.connections, logger, skippedItems)
|
||||
// Add to deferred connections list
|
||||
addOperationsWithConnections.push({
|
||||
blockId: block_id,
|
||||
connections: params.connections,
|
||||
})
|
||||
}
|
||||
break
|
||||
}
|
||||
@@ -1562,6 +1871,34 @@ function applyOperationsToWorkflowState(
|
||||
}
|
||||
}
|
||||
|
||||
// Pass 2: Add all deferred connections from add/insert operations
|
||||
// Now all blocks exist (from add, insert, and edit operations), so connections can be safely created
|
||||
// This ensures that if block A connects to block B, and both are being added/inserted,
|
||||
// B will exist when we create the edge from A to B
|
||||
if (addOperationsWithConnections.length > 0) {
|
||||
logger.info('Processing deferred connections from add/insert operations', {
|
||||
deferredConnectionCount: addOperationsWithConnections.length,
|
||||
totalBlocks: Object.keys(modifiedState.blocks).length,
|
||||
})
|
||||
|
||||
for (const { blockId, connections } of addOperationsWithConnections) {
|
||||
// Verify the source block still exists (it might have been deleted by a later operation)
|
||||
if (!modifiedState.blocks[blockId]) {
|
||||
logger.warn('Source block no longer exists for deferred connection', {
|
||||
blockId,
|
||||
availableBlocks: Object.keys(modifiedState.blocks),
|
||||
})
|
||||
continue
|
||||
}
|
||||
|
||||
addConnectionsAsEdges(modifiedState, blockId, connections, logger, skippedItems)
|
||||
}
|
||||
|
||||
logger.info('Finished processing deferred connections', {
|
||||
totalEdges: modifiedState.edges.length,
|
||||
})
|
||||
}
|
||||
|
||||
// Regenerate loops and parallels after modifications
|
||||
modifiedState.loops = generateLoopBlocks(modifiedState.blocks)
|
||||
modifiedState.parallels = generateParallelBlocks(modifiedState.blocks)
|
||||
|
||||
@@ -37,8 +37,28 @@ export const isEmailVerificationEnabled = isTruthy(env.EMAIL_VERIFICATION_ENABLE
|
||||
|
||||
/**
|
||||
* Is authentication disabled (for self-hosted deployments behind private networks)
|
||||
* This flag is blocked when isHosted is true.
|
||||
*/
|
||||
export const isAuthDisabled = isTruthy(env.DISABLE_AUTH)
|
||||
export const isAuthDisabled = isTruthy(env.DISABLE_AUTH) && !isHosted
|
||||
|
||||
if (isTruthy(env.DISABLE_AUTH)) {
|
||||
import('@/lib/logs/console/logger')
|
||||
.then(({ createLogger }) => {
|
||||
const logger = createLogger('FeatureFlags')
|
||||
if (isHosted) {
|
||||
logger.error(
|
||||
'DISABLE_AUTH is set but ignored on hosted environment. Authentication remains enabled for security.'
|
||||
)
|
||||
} else {
|
||||
logger.warn(
|
||||
'DISABLE_AUTH is enabled. Authentication is bypassed and all requests use an anonymous session. Only use this in trusted private networks.'
|
||||
)
|
||||
}
|
||||
})
|
||||
.catch(() => {
|
||||
// Fallback during config compilation when logger is unavailable
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Is user registration disabled
|
||||
|
||||
@@ -31,20 +31,25 @@ vi.mock('crypto', () => ({
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@/lib/core/config/env', () => ({
|
||||
env: {
|
||||
ENCRYPTION_KEY: '0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef',
|
||||
OPENAI_API_KEY_1: 'test-openai-key-1',
|
||||
OPENAI_API_KEY_2: 'test-openai-key-2',
|
||||
OPENAI_API_KEY_3: 'test-openai-key-3',
|
||||
ANTHROPIC_API_KEY_1: 'test-anthropic-key-1',
|
||||
ANTHROPIC_API_KEY_2: 'test-anthropic-key-2',
|
||||
ANTHROPIC_API_KEY_3: 'test-anthropic-key-3',
|
||||
GEMINI_API_KEY_1: 'test-gemini-key-1',
|
||||
GEMINI_API_KEY_2: 'test-gemini-key-2',
|
||||
GEMINI_API_KEY_3: 'test-gemini-key-3',
|
||||
},
|
||||
}))
|
||||
vi.mock('@/lib/core/config/env', async (importOriginal) => {
|
||||
const actual = await importOriginal<typeof import('@/lib/core/config/env')>()
|
||||
return {
|
||||
...actual,
|
||||
env: {
|
||||
...actual.env,
|
||||
ENCRYPTION_KEY: '0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef', // fake key for testing
|
||||
OPENAI_API_KEY_1: 'test-openai-key-1', // fake key for testing
|
||||
OPENAI_API_KEY_2: 'test-openai-key-2', // fake key for testing
|
||||
OPENAI_API_KEY_3: 'test-openai-key-3', // fake key for testing
|
||||
ANTHROPIC_API_KEY_1: 'test-anthropic-key-1', // fake key for testing
|
||||
ANTHROPIC_API_KEY_2: 'test-anthropic-key-2', // fake key for testing
|
||||
ANTHROPIC_API_KEY_3: 'test-anthropic-key-3', // fake key for testing
|
||||
GEMINI_API_KEY_1: 'test-gemini-key-1', // fake key for testing
|
||||
GEMINI_API_KEY_2: 'test-gemini-key-2', // fake key for testing
|
||||
GEMINI_API_KEY_3: 'test-gemini-key-3', // fake key for testing
|
||||
},
|
||||
}
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
vi.clearAllMocks()
|
||||
|
||||
@@ -1,3 +1,22 @@
|
||||
import { getBaseUrl } from './urls'
|
||||
|
||||
/**
|
||||
* Checks if a URL is same-origin with the application's base URL.
|
||||
* Used to prevent open redirect vulnerabilities.
|
||||
*
|
||||
* @param url - The URL to validate
|
||||
* @returns True if the URL is same-origin, false otherwise (secure default)
|
||||
*/
|
||||
export function isSameOrigin(url: string): boolean {
|
||||
try {
|
||||
const targetUrl = new URL(url)
|
||||
const appUrl = new URL(getBaseUrl())
|
||||
return targetUrl.origin === appUrl.origin
|
||||
} catch {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates a name by removing any characters that could cause issues
|
||||
* with variable references or node naming.
|
||||
|
||||
@@ -81,8 +81,8 @@ export async function emitWorkflowExecutionCompleted(log: WorkflowExecutionLog):
|
||||
)
|
||||
|
||||
for (const subscription of subscriptions) {
|
||||
const levelMatches = subscription.levelFilter?.includes(log.level) ?? true
|
||||
const triggerMatches = subscription.triggerFilter?.includes(log.trigger) ?? true
|
||||
const levelMatches = subscription.levelFilter.includes(log.level)
|
||||
const triggerMatches = subscription.triggerFilter.includes(log.trigger)
|
||||
|
||||
if (!levelMatches || !triggerMatches) {
|
||||
logger.debug(`Skipping subscription ${subscription.id} due to filter mismatch`)
|
||||
@@ -98,6 +98,7 @@ export async function emitWorkflowExecutionCompleted(log: WorkflowExecutionLog):
|
||||
status: log.level === 'error' ? 'error' : 'success',
|
||||
durationMs: log.totalDurationMs || 0,
|
||||
cost: (log.cost as { total?: number })?.total || 0,
|
||||
triggerFilter: subscription.triggerFilter,
|
||||
}
|
||||
|
||||
const shouldAlert = await shouldTriggerAlert(alertConfig, context, subscription.lastAlertAt)
|
||||
|
||||
@@ -51,8 +51,11 @@ export interface ExecutionEnvironment {
|
||||
workspaceId: string
|
||||
}
|
||||
|
||||
export const ALL_TRIGGER_TYPES = ['api', 'webhook', 'schedule', 'manual', 'chat'] as const
|
||||
export type TriggerType = (typeof ALL_TRIGGER_TYPES)[number]
|
||||
|
||||
export interface ExecutionTrigger {
|
||||
type: 'api' | 'webhook' | 'schedule' | 'manual' | 'chat' | string
|
||||
type: TriggerType | string
|
||||
source: string
|
||||
data?: Record<string, unknown>
|
||||
timestamp: string
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { db } from '@sim/db'
|
||||
import { workflowExecutionLogs } from '@sim/db/schema'
|
||||
import { and, avg, count, desc, eq, gte } from 'drizzle-orm'
|
||||
import { and, avg, count, desc, eq, gte, inArray } from 'drizzle-orm'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
|
||||
const logger = createLogger('AlertRules')
|
||||
@@ -135,25 +135,29 @@ export function isInCooldown(lastAlertAt: Date | null): boolean {
|
||||
return new Date() < cooldownEnd
|
||||
}
|
||||
|
||||
/**
|
||||
* Context passed to alert check functions
|
||||
*/
|
||||
export interface AlertCheckContext {
|
||||
workflowId: string
|
||||
executionId: string
|
||||
status: 'success' | 'error'
|
||||
durationMs: number
|
||||
cost: number
|
||||
triggerFilter: string[]
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if consecutive failures threshold is met
|
||||
*/
|
||||
async function checkConsecutiveFailures(workflowId: string, threshold: number): Promise<boolean> {
|
||||
async function checkConsecutiveFailures(
|
||||
workflowId: string,
|
||||
threshold: number,
|
||||
triggerFilter: string[]
|
||||
): Promise<boolean> {
|
||||
const recentLogs = await db
|
||||
.select({ level: workflowExecutionLogs.level })
|
||||
.from(workflowExecutionLogs)
|
||||
.where(eq(workflowExecutionLogs.workflowId, workflowId))
|
||||
.where(
|
||||
and(
|
||||
eq(workflowExecutionLogs.workflowId, workflowId),
|
||||
inArray(workflowExecutionLogs.trigger, triggerFilter)
|
||||
)
|
||||
)
|
||||
.orderBy(desc(workflowExecutionLogs.createdAt))
|
||||
.limit(threshold)
|
||||
|
||||
@@ -162,13 +166,11 @@ async function checkConsecutiveFailures(workflowId: string, threshold: number):
|
||||
return recentLogs.every((log) => log.level === 'error')
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if failure rate exceeds threshold
|
||||
*/
|
||||
async function checkFailureRate(
|
||||
workflowId: string,
|
||||
ratePercent: number,
|
||||
windowHours: number
|
||||
windowHours: number,
|
||||
triggerFilter: string[]
|
||||
): Promise<boolean> {
|
||||
const windowStart = new Date(Date.now() - windowHours * 60 * 60 * 1000)
|
||||
|
||||
@@ -181,7 +183,8 @@ async function checkFailureRate(
|
||||
.where(
|
||||
and(
|
||||
eq(workflowExecutionLogs.workflowId, workflowId),
|
||||
gte(workflowExecutionLogs.createdAt, windowStart)
|
||||
gte(workflowExecutionLogs.createdAt, windowStart),
|
||||
inArray(workflowExecutionLogs.trigger, triggerFilter)
|
||||
)
|
||||
)
|
||||
.orderBy(workflowExecutionLogs.createdAt)
|
||||
@@ -206,14 +209,12 @@ function checkLatencyThreshold(durationMs: number, thresholdMs: number): boolean
|
||||
return durationMs > thresholdMs
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if execution duration is significantly above average
|
||||
*/
|
||||
async function checkLatencySpike(
|
||||
workflowId: string,
|
||||
currentDurationMs: number,
|
||||
spikePercent: number,
|
||||
windowHours: number
|
||||
windowHours: number,
|
||||
triggerFilter: string[]
|
||||
): Promise<boolean> {
|
||||
const windowStart = new Date(Date.now() - windowHours * 60 * 60 * 1000)
|
||||
|
||||
@@ -226,7 +227,8 @@ async function checkLatencySpike(
|
||||
.where(
|
||||
and(
|
||||
eq(workflowExecutionLogs.workflowId, workflowId),
|
||||
gte(workflowExecutionLogs.createdAt, windowStart)
|
||||
gte(workflowExecutionLogs.createdAt, windowStart),
|
||||
inArray(workflowExecutionLogs.trigger, triggerFilter)
|
||||
)
|
||||
)
|
||||
|
||||
@@ -248,13 +250,11 @@ function checkCostThreshold(cost: number, thresholdDollars: number): boolean {
|
||||
return cost > thresholdDollars
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if error count exceeds threshold within window
|
||||
*/
|
||||
async function checkErrorCount(
|
||||
workflowId: string,
|
||||
threshold: number,
|
||||
windowHours: number
|
||||
windowHours: number,
|
||||
triggerFilter: string[]
|
||||
): Promise<boolean> {
|
||||
const windowStart = new Date(Date.now() - windowHours * 60 * 60 * 1000)
|
||||
|
||||
@@ -265,7 +265,8 @@ async function checkErrorCount(
|
||||
and(
|
||||
eq(workflowExecutionLogs.workflowId, workflowId),
|
||||
eq(workflowExecutionLogs.level, 'error'),
|
||||
gte(workflowExecutionLogs.createdAt, windowStart)
|
||||
gte(workflowExecutionLogs.createdAt, windowStart),
|
||||
inArray(workflowExecutionLogs.trigger, triggerFilter)
|
||||
)
|
||||
)
|
||||
|
||||
@@ -273,9 +274,6 @@ async function checkErrorCount(
|
||||
return errorCount >= threshold
|
||||
}
|
||||
|
||||
/**
|
||||
* Evaluates if an alert should be triggered based on the configuration
|
||||
*/
|
||||
export async function shouldTriggerAlert(
|
||||
config: AlertConfig,
|
||||
context: AlertCheckContext,
|
||||
@@ -287,16 +285,21 @@ export async function shouldTriggerAlert(
|
||||
}
|
||||
|
||||
const { rule } = config
|
||||
const { workflowId, status, durationMs, cost } = context
|
||||
const { workflowId, status, durationMs, cost, triggerFilter } = context
|
||||
|
||||
switch (rule) {
|
||||
case 'consecutive_failures':
|
||||
if (status !== 'error') return false
|
||||
return checkConsecutiveFailures(workflowId, config.consecutiveFailures!)
|
||||
return checkConsecutiveFailures(workflowId, config.consecutiveFailures!, triggerFilter)
|
||||
|
||||
case 'failure_rate':
|
||||
if (status !== 'error') return false
|
||||
return checkFailureRate(workflowId, config.failureRatePercent!, config.windowHours!)
|
||||
return checkFailureRate(
|
||||
workflowId,
|
||||
config.failureRatePercent!,
|
||||
config.windowHours!,
|
||||
triggerFilter
|
||||
)
|
||||
|
||||
case 'latency_threshold':
|
||||
return checkLatencyThreshold(durationMs, config.durationThresholdMs!)
|
||||
@@ -306,19 +309,24 @@ export async function shouldTriggerAlert(
|
||||
workflowId,
|
||||
durationMs,
|
||||
config.latencySpikePercent!,
|
||||
config.windowHours!
|
||||
config.windowHours!,
|
||||
triggerFilter
|
||||
)
|
||||
|
||||
case 'cost_threshold':
|
||||
return checkCostThreshold(cost, config.costThresholdDollars!)
|
||||
|
||||
case 'no_activity':
|
||||
// no_activity alerts are handled by the hourly polling job, not execution events
|
||||
return false
|
||||
|
||||
case 'error_count':
|
||||
if (status !== 'error') return false
|
||||
return checkErrorCount(workflowId, config.errorCountThreshold!, config.windowHours!)
|
||||
return checkErrorCount(
|
||||
workflowId,
|
||||
config.errorCountThreshold!,
|
||||
config.windowHours!,
|
||||
triggerFilter
|
||||
)
|
||||
|
||||
default:
|
||||
logger.warn(`Unknown alert rule: ${rule}`)
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { db } from '@sim/db'
|
||||
import {
|
||||
workflow,
|
||||
workflowDeploymentVersion,
|
||||
workflowExecutionLogs,
|
||||
workspaceNotificationDelivery,
|
||||
workspaceNotificationSubscription,
|
||||
@@ -9,15 +10,81 @@ import { and, eq, gte, inArray, sql } from 'drizzle-orm'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import { isTriggerDevEnabled } from '@/lib/core/config/feature-flags'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { TRIGGER_TYPES } from '@/lib/workflows/triggers/triggers'
|
||||
import {
|
||||
executeNotificationDelivery,
|
||||
workspaceNotificationDeliveryTask,
|
||||
} from '@/background/workspace-notification-delivery'
|
||||
import type { WorkflowState } from '@/stores/workflows/workflow/types'
|
||||
import type { AlertConfig } from './alert-rules'
|
||||
import { isInCooldown } from './alert-rules'
|
||||
|
||||
const logger = createLogger('InactivityPolling')
|
||||
|
||||
const SCHEDULE_BLOCK_TYPES: string[] = [TRIGGER_TYPES.SCHEDULE]
|
||||
const WEBHOOK_BLOCK_TYPES: string[] = [TRIGGER_TYPES.WEBHOOK, TRIGGER_TYPES.GENERIC_WEBHOOK]
|
||||
|
||||
function deploymentHasTriggerType(
|
||||
deploymentState: Pick<WorkflowState, 'blocks'>,
|
||||
triggerFilter: string[]
|
||||
): boolean {
|
||||
const blocks = deploymentState.blocks
|
||||
if (!blocks) return false
|
||||
|
||||
const alwaysAvailable = ['api', 'manual', 'chat']
|
||||
if (triggerFilter.some((t) => alwaysAvailable.includes(t))) {
|
||||
return true
|
||||
}
|
||||
|
||||
for (const block of Object.values(blocks)) {
|
||||
if (triggerFilter.includes('schedule') && SCHEDULE_BLOCK_TYPES.includes(block.type)) {
|
||||
return true
|
||||
}
|
||||
|
||||
if (triggerFilter.includes('webhook')) {
|
||||
if (WEBHOOK_BLOCK_TYPES.includes(block.type)) {
|
||||
return true
|
||||
}
|
||||
if (block.triggerMode === true) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
async function getWorkflowsWithTriggerTypes(
|
||||
workspaceId: string,
|
||||
triggerFilter: string[]
|
||||
): Promise<Set<string>> {
|
||||
const workflowIds = new Set<string>()
|
||||
|
||||
const deployedWorkflows = await db
|
||||
.select({
|
||||
workflowId: workflow.id,
|
||||
deploymentState: workflowDeploymentVersion.state,
|
||||
})
|
||||
.from(workflow)
|
||||
.innerJoin(
|
||||
workflowDeploymentVersion,
|
||||
and(
|
||||
eq(workflowDeploymentVersion.workflowId, workflow.id),
|
||||
eq(workflowDeploymentVersion.isActive, true)
|
||||
)
|
||||
)
|
||||
.where(and(eq(workflow.workspaceId, workspaceId), eq(workflow.isDeployed, true)))
|
||||
|
||||
for (const w of deployedWorkflows) {
|
||||
const state = w.deploymentState as WorkflowState | null
|
||||
if (state && deploymentHasTriggerType(state, triggerFilter)) {
|
||||
workflowIds.add(w.workflowId)
|
||||
}
|
||||
}
|
||||
|
||||
return workflowIds
|
||||
}
|
||||
|
||||
interface InactivityCheckResult {
|
||||
subscriptionId: string
|
||||
workflowId: string
|
||||
@@ -25,9 +92,6 @@ interface InactivityCheckResult {
|
||||
reason?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks a single workflow for inactivity and triggers notification if needed
|
||||
*/
|
||||
async function checkWorkflowInactivity(
|
||||
subscription: typeof workspaceNotificationSubscription.$inferSelect,
|
||||
workflowId: string,
|
||||
@@ -141,9 +205,6 @@ async function checkWorkflowInactivity(
|
||||
return result
|
||||
}
|
||||
|
||||
/**
|
||||
* Polls all active no_activity subscriptions and triggers alerts as needed
|
||||
*/
|
||||
export async function pollInactivityAlerts(): Promise<{
|
||||
total: number
|
||||
triggered: number
|
||||
@@ -179,19 +240,30 @@ export async function pollInactivityAlerts(): Promise<{
|
||||
continue
|
||||
}
|
||||
|
||||
const triggerFilter = subscription.triggerFilter as string[]
|
||||
if (!triggerFilter || triggerFilter.length === 0) {
|
||||
logger.warn(`Subscription ${subscription.id} has no trigger filter, skipping`)
|
||||
continue
|
||||
}
|
||||
|
||||
const eligibleWorkflowIds = await getWorkflowsWithTriggerTypes(
|
||||
subscription.workspaceId,
|
||||
triggerFilter
|
||||
)
|
||||
|
||||
let workflowIds: string[] = []
|
||||
|
||||
if (subscription.allWorkflows) {
|
||||
const workflows = await db
|
||||
.select({ id: workflow.id })
|
||||
.from(workflow)
|
||||
.where(eq(workflow.workspaceId, subscription.workspaceId))
|
||||
|
||||
workflowIds = workflows.map((w) => w.id)
|
||||
workflowIds = Array.from(eligibleWorkflowIds)
|
||||
} else {
|
||||
workflowIds = subscription.workflowIds || []
|
||||
workflowIds = (subscription.workflowIds || []).filter((id) => eligibleWorkflowIds.has(id))
|
||||
}
|
||||
|
||||
logger.debug(`Checking ${workflowIds.length} workflows for subscription ${subscription.id}`, {
|
||||
triggerFilter,
|
||||
eligibleCount: eligibleWorkflowIds.size,
|
||||
})
|
||||
|
||||
for (const workflowId of workflowIds) {
|
||||
const result = await checkWorkflowInactivity(subscription, workflowId, alertConfig)
|
||||
results.push(result)
|
||||
|
||||
@@ -81,7 +81,11 @@ async function formatTeamsGraphNotification(
|
||||
foundWorkflow: any,
|
||||
request: NextRequest
|
||||
): Promise<any> {
|
||||
const notification = body.value[0]
|
||||
const notification = body.value?.[0]
|
||||
if (!notification) {
|
||||
logger.warn('Received empty Teams notification body')
|
||||
return null
|
||||
}
|
||||
const changeType = notification.changeType || 'created'
|
||||
const resource = notification.resource || ''
|
||||
const subscriptionId = notification.subscriptionId || ''
|
||||
|
||||
@@ -2,6 +2,7 @@ import type { Edge } from 'reactflow'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import type { BlockWithDiff } from '@/lib/workflows/diff/types'
|
||||
import { isValidKey } from '@/lib/workflows/sanitization/key-validation'
|
||||
import { mergeSubblockState } from '@/stores/workflows/utils'
|
||||
import type { BlockState, WorkflowState } from '@/stores/workflows/workflow/types'
|
||||
|
||||
@@ -537,6 +538,17 @@ export class WorkflowDiffEngine {
|
||||
|
||||
// First pass: build ID mappings
|
||||
for (const [proposedId, proposedBlock] of Object.entries(proposedState.blocks)) {
|
||||
// CRITICAL: Skip invalid block IDs to prevent "undefined" keys in workflow state
|
||||
if (!isValidKey(proposedId)) {
|
||||
logger.error('Invalid proposedId detected in proposed state', {
|
||||
proposedId,
|
||||
proposedId_type: typeof proposedId,
|
||||
blockType: proposedBlock?.type,
|
||||
blockName: proposedBlock?.name,
|
||||
})
|
||||
continue
|
||||
}
|
||||
|
||||
const key = `${proposedBlock.type}:${proposedBlock.name}`
|
||||
|
||||
// Check if this block exists in current state by type:name
|
||||
@@ -552,7 +564,31 @@ export class WorkflowDiffEngine {
|
||||
|
||||
// Second pass: build final blocks with mapped IDs
|
||||
for (const [proposedId, proposedBlock] of Object.entries(proposedState.blocks)) {
|
||||
// CRITICAL: Skip invalid block IDs to prevent "undefined" keys in workflow state
|
||||
if (!isValidKey(proposedId)) {
|
||||
logger.error('Invalid proposedId detected in proposed state (second pass)', {
|
||||
proposedId,
|
||||
proposedId_type: typeof proposedId,
|
||||
blockType: proposedBlock?.type,
|
||||
blockName: proposedBlock?.name,
|
||||
})
|
||||
continue
|
||||
}
|
||||
|
||||
const finalId = idMap[proposedId]
|
||||
|
||||
// CRITICAL: Validate finalId before using as key
|
||||
if (!isValidKey(finalId)) {
|
||||
logger.error('Invalid finalId generated from idMap', {
|
||||
proposedId,
|
||||
finalId,
|
||||
finalId_type: typeof finalId,
|
||||
blockType: proposedBlock?.type,
|
||||
blockName: proposedBlock?.name,
|
||||
})
|
||||
continue
|
||||
}
|
||||
|
||||
const key = `${proposedBlock.type}:${proposedBlock.name}`
|
||||
const existingBlock = existingBlockMap[key]?.block
|
||||
|
||||
@@ -617,6 +653,8 @@ export class WorkflowDiffEngine {
|
||||
const { generateLoopBlocks, generateParallelBlocks } = await import(
|
||||
'@/stores/workflows/workflow/utils'
|
||||
)
|
||||
|
||||
// Build the proposed state
|
||||
const finalProposedState: WorkflowState = {
|
||||
blocks: finalBlocks,
|
||||
edges: finalEdges,
|
||||
@@ -625,6 +663,9 @@ export class WorkflowDiffEngine {
|
||||
lastSaved: Date.now(),
|
||||
}
|
||||
|
||||
// Use the proposed state directly - validation happens at the source
|
||||
const fullyCleanedState = finalProposedState
|
||||
|
||||
// Transfer block heights from baseline workflow for better measurements in diff view
|
||||
// If editing on top of diff, this transfers from the diff (which already has good heights)
|
||||
// Otherwise transfers from original workflow
|
||||
@@ -694,7 +735,7 @@ export class WorkflowDiffEngine {
|
||||
'@/lib/workflows/autolayout/constants'
|
||||
)
|
||||
|
||||
const layoutedBlocks = applyTargetedLayout(finalBlocks, finalProposedState.edges, {
|
||||
const layoutedBlocks = applyTargetedLayout(finalBlocks, fullyCleanedState.edges, {
|
||||
changedBlockIds: impactedBlockArray,
|
||||
horizontalSpacing: DEFAULT_HORIZONTAL_SPACING,
|
||||
verticalSpacing: DEFAULT_VERTICAL_SPACING,
|
||||
@@ -742,7 +783,7 @@ export class WorkflowDiffEngine {
|
||||
|
||||
const layoutResult = applyNativeAutoLayout(
|
||||
finalBlocks,
|
||||
finalProposedState.edges,
|
||||
fullyCleanedState.edges,
|
||||
DEFAULT_LAYOUT_OPTIONS
|
||||
)
|
||||
|
||||
@@ -824,7 +865,7 @@ export class WorkflowDiffEngine {
|
||||
})
|
||||
|
||||
// Create edge identifiers for proposed state
|
||||
finalEdges.forEach((edge) => {
|
||||
fullyCleanedState.edges.forEach((edge) => {
|
||||
const edgeId = `${edge.source}-${edge.sourceHandle || 'source'}-${edge.target}-${edge.targetHandle || 'target'}`
|
||||
proposedEdgeSet.add(edgeId)
|
||||
})
|
||||
@@ -863,21 +904,21 @@ export class WorkflowDiffEngine {
|
||||
}
|
||||
}
|
||||
|
||||
// Apply diff markers to blocks
|
||||
// Apply diff markers to blocks in the fully cleaned state
|
||||
if (computed) {
|
||||
for (const id of computed.new_blocks || []) {
|
||||
if (finalBlocks[id]) {
|
||||
finalBlocks[id].is_diff = 'new'
|
||||
if (fullyCleanedState.blocks[id]) {
|
||||
;(fullyCleanedState.blocks[id] as any).is_diff = 'new'
|
||||
}
|
||||
}
|
||||
for (const id of computed.edited_blocks || []) {
|
||||
if (finalBlocks[id]) {
|
||||
finalBlocks[id].is_diff = 'edited'
|
||||
if (fullyCleanedState.blocks[id]) {
|
||||
;(fullyCleanedState.blocks[id] as any).is_diff = 'edited'
|
||||
|
||||
// Also mark specific subblocks that changed
|
||||
if (computed.field_diffs?.[id]) {
|
||||
const fieldDiff = computed.field_diffs[id]
|
||||
const block = finalBlocks[id]
|
||||
const block = fullyCleanedState.blocks[id]
|
||||
|
||||
// Apply diff markers to changed subblocks
|
||||
for (const changedField of fieldDiff.changed_fields) {
|
||||
@@ -889,12 +930,12 @@ export class WorkflowDiffEngine {
|
||||
}
|
||||
}
|
||||
}
|
||||
// Note: We don't remove deleted blocks from finalBlocks, just mark them
|
||||
// Note: We don't remove deleted blocks from fullyCleanedState, just mark them
|
||||
}
|
||||
|
||||
// Store the diff
|
||||
// Store the diff with the fully sanitized state
|
||||
this.currentDiff = {
|
||||
proposedState: finalProposedState,
|
||||
proposedState: fullyCleanedState,
|
||||
diffAnalysis: computed,
|
||||
metadata: {
|
||||
source: 'workflow_state',
|
||||
@@ -903,10 +944,10 @@ export class WorkflowDiffEngine {
|
||||
}
|
||||
|
||||
logger.info('Successfully created diff from workflow state', {
|
||||
blockCount: Object.keys(finalProposedState.blocks).length,
|
||||
edgeCount: finalProposedState.edges.length,
|
||||
hasLoops: Object.keys(finalProposedState.loops || {}).length > 0,
|
||||
hasParallels: Object.keys(finalProposedState.parallels || {}).length > 0,
|
||||
blockCount: Object.keys(fullyCleanedState.blocks).length,
|
||||
edgeCount: fullyCleanedState.edges.length,
|
||||
hasLoops: Object.keys(fullyCleanedState.loops || {}).length > 0,
|
||||
hasParallels: Object.keys(fullyCleanedState.parallels || {}).length > 0,
|
||||
newBlocks: computed?.new_blocks?.length || 0,
|
||||
editedBlocks: computed?.edited_blocks?.length || 0,
|
||||
deletedBlocks: computed?.deleted_blocks?.length || 0,
|
||||
@@ -1096,6 +1137,17 @@ export function stripWorkflowDiffMarkers(state: WorkflowState): WorkflowState {
|
||||
const cleanBlocks: Record<string, BlockState> = {}
|
||||
|
||||
for (const [blockId, block] of Object.entries(state.blocks || {})) {
|
||||
// Validate block ID at the source - skip invalid IDs
|
||||
if (!isValidKey(blockId)) {
|
||||
logger.error('Invalid blockId detected in stripWorkflowDiffMarkers', {
|
||||
blockId,
|
||||
blockId_type: typeof blockId,
|
||||
blockType: block?.type,
|
||||
blockName: block?.name,
|
||||
})
|
||||
continue
|
||||
}
|
||||
|
||||
const cleanBlock: BlockState = structuredClone(block)
|
||||
const blockWithDiff = cleanBlock as BlockState & BlockWithDiff
|
||||
blockWithDiff.is_diff = undefined
|
||||
|
||||
9
apps/sim/lib/workflows/sanitization/key-validation.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
/**
|
||||
* Checks if a key is valid (not undefined, null, empty, or literal "undefined"/"null")
|
||||
* Use this to validate BEFORE setting a dynamic key on any object.
|
||||
*/
|
||||
export function isValidKey(key: unknown): key is string {
|
||||
return (
|
||||
!!key && typeof key === 'string' && key !== 'undefined' && key !== 'null' && key.trim() !== ''
|
||||
)
|
||||
}
|
||||
@@ -84,6 +84,7 @@ const nextConfig: NextConfig = {
|
||||
],
|
||||
outputFileTracingIncludes: {
|
||||
'/api/tools/stagehand/*': ['./node_modules/ws/**/*'],
|
||||
'/*': ['./node_modules/sharp/**/*', './node_modules/@img/**/*'],
|
||||
},
|
||||
experimental: {
|
||||
optimizeCss: true,
|
||||
|
||||
BIN
apps/sim/public/studio/authors/emir.jpg
Normal file
|
After Width: | Height: | Size: 32 KiB |
|
Before Width: | Height: | Size: 2.0 MiB |
BIN
apps/sim/public/studio/authors/sid.jpg
Normal file
|
After Width: | Height: | Size: 349 KiB |
|
Before Width: | Height: | Size: 123 KiB |
BIN
apps/sim/public/studio/authors/waleed.jpg
Normal file
|
After Width: | Height: | Size: 33 KiB |
|
Before Width: | Height: | Size: 2.4 MiB |
BIN
apps/sim/public/studio/series-a/team.jpg
Normal file
|
After Width: | Height: | Size: 515 KiB |
|
Before Width: | Height: | Size: 10 MiB |
@@ -32,6 +32,7 @@ import { SearchDocumentationClientTool } from '@/lib/copilot/tools/client/other/
|
||||
import { SearchErrorsClientTool } from '@/lib/copilot/tools/client/other/search-errors'
|
||||
import { SearchOnlineClientTool } from '@/lib/copilot/tools/client/other/search-online'
|
||||
import { SearchPatternsClientTool } from '@/lib/copilot/tools/client/other/search-patterns'
|
||||
import { SleepClientTool } from '@/lib/copilot/tools/client/other/sleep'
|
||||
import { createExecutionContext, getTool } from '@/lib/copilot/tools/client/registry'
|
||||
import { GetCredentialsClientTool } from '@/lib/copilot/tools/client/user/get-credentials'
|
||||
import { SetEnvironmentVariablesClientTool } from '@/lib/copilot/tools/client/user/set-environment-variables'
|
||||
@@ -104,6 +105,7 @@ const CLIENT_TOOL_INSTANTIATORS: Record<string, (id: string) => any> = {
|
||||
navigate_ui: (id) => new NavigateUIClientTool(id),
|
||||
manage_custom_tool: (id) => new ManageCustomToolClientTool(id),
|
||||
manage_mcp_tool: (id) => new ManageMcpToolClientTool(id),
|
||||
sleep: (id) => new SleepClientTool(id),
|
||||
}
|
||||
|
||||
// Read-only static metadata for class-based tools (no instances)
|
||||
@@ -141,6 +143,7 @@ export const CLASS_TOOL_METADATA: Record<string, BaseClientToolMetadata | undefi
|
||||
navigate_ui: (NavigateUIClientTool as any)?.metadata,
|
||||
manage_custom_tool: (ManageCustomToolClientTool as any)?.metadata,
|
||||
manage_mcp_tool: (ManageMcpToolClientTool as any)?.metadata,
|
||||
sleep: (SleepClientTool as any)?.metadata,
|
||||
}
|
||||
|
||||
function ensureClientToolInstance(toolName: string | undefined, toolCallId: string | undefined) {
|
||||
@@ -2260,6 +2263,22 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
set({ toolCallsById: map })
|
||||
} catch {}
|
||||
},
|
||||
|
||||
updateToolCallParams: (toolCallId: string, params: Record<string, any>) => {
|
||||
try {
|
||||
if (!toolCallId) return
|
||||
const map = { ...get().toolCallsById }
|
||||
const current = map[toolCallId]
|
||||
if (!current) return
|
||||
const updatedParams = { ...current.params, ...params }
|
||||
map[toolCallId] = {
|
||||
...current,
|
||||
params: updatedParams,
|
||||
display: resolveToolDisplay(current.name, current.state, toolCallId, updatedParams),
|
||||
}
|
||||
set({ toolCallsById: map })
|
||||
} catch {}
|
||||
},
|
||||
updatePreviewToolCallState: (
|
||||
toolCallState: 'accepted' | 'rejected' | 'error',
|
||||
toolCallId?: string
|
||||
|
||||
@@ -178,6 +178,7 @@ export interface CopilotActions {
|
||||
toolCallId?: string
|
||||
) => void
|
||||
setToolCallState: (toolCall: any, newState: ClientToolCallState, options?: any) => void
|
||||
updateToolCallParams: (toolCallId: string, params: Record<string, any>) => void
|
||||
sendDocsMessage: (query: string, options?: { stream?: boolean; topK?: number }) => Promise<void>
|
||||
saveChatMessages: (chatId: string) => Promise<void>
|
||||
|
||||
|
||||
@@ -1,39 +1,12 @@
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import type {
|
||||
SalesforceCreateAccountParams,
|
||||
SalesforceCreateAccountResponse,
|
||||
} from '@/tools/salesforce/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
const logger = createLogger('SalesforceCreateAccount')
|
||||
|
||||
export interface SalesforceCreateAccountParams {
|
||||
accessToken: string
|
||||
idToken?: string
|
||||
instanceUrl?: string
|
||||
name: string
|
||||
type?: string
|
||||
industry?: string
|
||||
phone?: string
|
||||
website?: string
|
||||
billingStreet?: string
|
||||
billingCity?: string
|
||||
billingState?: string
|
||||
billingPostalCode?: string
|
||||
billingCountry?: string
|
||||
description?: string
|
||||
annualRevenue?: string
|
||||
numberOfEmployees?: string
|
||||
}
|
||||
|
||||
export interface SalesforceCreateAccountResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
id: string
|
||||
success: boolean
|
||||
created: boolean
|
||||
metadata: {
|
||||
operation: 'create_account'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const salesforceCreateAccountTool: ToolConfig<
|
||||
SalesforceCreateAccountParams,
|
||||
SalesforceCreateAccountResponse
|
||||
|
||||
@@ -1,30 +1,9 @@
|
||||
import type {
|
||||
SalesforceCreateCaseParams,
|
||||
SalesforceCreateCaseResponse,
|
||||
} from '@/tools/salesforce/types'
|
||||
import { getInstanceUrl } from '@/tools/salesforce/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
import { getInstanceUrl } from './utils'
|
||||
|
||||
export interface SalesforceCreateCaseParams {
|
||||
accessToken: string
|
||||
idToken?: string
|
||||
instanceUrl?: string
|
||||
subject: string
|
||||
status?: string
|
||||
priority?: string
|
||||
origin?: string
|
||||
contactId?: string
|
||||
accountId?: string
|
||||
description?: string
|
||||
}
|
||||
|
||||
export interface SalesforceCreateCaseResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
id: string
|
||||
success: boolean
|
||||
created: boolean
|
||||
metadata: {
|
||||
operation: 'create_case'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const salesforceCreateCaseTool: ToolConfig<
|
||||
SalesforceCreateCaseParams,
|
||||
|
||||
@@ -1,38 +1,13 @@
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import type {
|
||||
SalesforceCreateContactParams,
|
||||
SalesforceCreateContactResponse,
|
||||
} from '@/tools/salesforce/types'
|
||||
import { getInstanceUrl } from '@/tools/salesforce/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
import { getInstanceUrl } from './utils'
|
||||
|
||||
const logger = createLogger('SalesforceContacts')
|
||||
|
||||
export interface SalesforceCreateContactParams {
|
||||
accessToken: string
|
||||
idToken?: string
|
||||
instanceUrl?: string
|
||||
lastName: string
|
||||
firstName?: string
|
||||
email?: string
|
||||
phone?: string
|
||||
accountId?: string
|
||||
title?: string
|
||||
department?: string
|
||||
mailingStreet?: string
|
||||
mailingCity?: string
|
||||
mailingState?: string
|
||||
mailingPostalCode?: string
|
||||
mailingCountry?: string
|
||||
description?: string
|
||||
}
|
||||
|
||||
export interface SalesforceCreateContactResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
id: string
|
||||
success: boolean
|
||||
created: boolean
|
||||
metadata: { operation: 'create_contact' }
|
||||
}
|
||||
}
|
||||
|
||||
export const salesforceCreateContactTool: ToolConfig<
|
||||
SalesforceCreateContactParams,
|
||||
SalesforceCreateContactResponse
|
||||
|
||||
@@ -1,32 +1,9 @@
|
||||
import type {
|
||||
SalesforceCreateLeadParams,
|
||||
SalesforceCreateLeadResponse,
|
||||
} from '@/tools/salesforce/types'
|
||||
import { getInstanceUrl } from '@/tools/salesforce/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
import { getInstanceUrl } from './utils'
|
||||
|
||||
export interface SalesforceCreateLeadParams {
|
||||
accessToken: string
|
||||
idToken?: string
|
||||
instanceUrl?: string
|
||||
lastName: string
|
||||
company: string
|
||||
firstName?: string
|
||||
email?: string
|
||||
phone?: string
|
||||
status?: string
|
||||
leadSource?: string
|
||||
title?: string
|
||||
description?: string
|
||||
}
|
||||
|
||||
export interface SalesforceCreateLeadResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
id: string
|
||||
success: boolean
|
||||
created: boolean
|
||||
metadata: {
|
||||
operation: 'create_lead'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const salesforceCreateLeadTool: ToolConfig<
|
||||
SalesforceCreateLeadParams,
|
||||
|
||||
@@ -1,30 +1,9 @@
|
||||
import type {
|
||||
SalesforceCreateOpportunityParams,
|
||||
SalesforceCreateOpportunityResponse,
|
||||
} from '@/tools/salesforce/types'
|
||||
import { getInstanceUrl } from '@/tools/salesforce/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
import { getInstanceUrl } from './utils'
|
||||
|
||||
export interface SalesforceCreateOpportunityParams {
|
||||
accessToken: string
|
||||
idToken?: string
|
||||
instanceUrl?: string
|
||||
name: string
|
||||
stageName: string
|
||||
closeDate: string
|
||||
accountId?: string
|
||||
amount?: string
|
||||
probability?: string
|
||||
description?: string
|
||||
}
|
||||
|
||||
export interface SalesforceCreateOpportunityResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
id: string
|
||||
success: boolean
|
||||
created: boolean
|
||||
metadata: {
|
||||
operation: 'create_opportunity'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const salesforceCreateOpportunityTool: ToolConfig<
|
||||
SalesforceCreateOpportunityParams,
|
||||
|
||||
@@ -1,30 +1,9 @@
|
||||
import type {
|
||||
SalesforceCreateTaskParams,
|
||||
SalesforceCreateTaskResponse,
|
||||
} from '@/tools/salesforce/types'
|
||||
import { getInstanceUrl } from '@/tools/salesforce/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
import { getInstanceUrl } from './utils'
|
||||
|
||||
export interface SalesforceCreateTaskParams {
|
||||
accessToken: string
|
||||
idToken?: string
|
||||
instanceUrl?: string
|
||||
subject: string
|
||||
status?: string
|
||||
priority?: string
|
||||
activityDate?: string
|
||||
whoId?: string
|
||||
whatId?: string
|
||||
description?: string
|
||||
}
|
||||
|
||||
export interface SalesforceCreateTaskResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
id: string
|
||||
success: boolean
|
||||
created: boolean
|
||||
metadata: {
|
||||
operation: 'create_task'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const salesforceCreateTaskTool: ToolConfig<
|
||||
SalesforceCreateTaskParams,
|
||||
|
||||
@@ -1,26 +1,12 @@
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import type {
|
||||
SalesforceDeleteAccountParams,
|
||||
SalesforceDeleteAccountResponse,
|
||||
} from '@/tools/salesforce/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
const logger = createLogger('SalesforceDeleteAccount')
|
||||
|
||||
export interface SalesforceDeleteAccountParams {
|
||||
accessToken: string
|
||||
idToken?: string
|
||||
instanceUrl?: string
|
||||
accountId: string
|
||||
}
|
||||
|
||||
export interface SalesforceDeleteAccountResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
id: string
|
||||
deleted: boolean
|
||||
metadata: {
|
||||
operation: 'delete_account'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const salesforceDeleteAccountTool: ToolConfig<
|
||||
SalesforceDeleteAccountParams,
|
||||
SalesforceDeleteAccountResponse
|
||||
|
||||
@@ -1,23 +1,9 @@
|
||||
import type {
|
||||
SalesforceDeleteCaseParams,
|
||||
SalesforceDeleteCaseResponse,
|
||||
} from '@/tools/salesforce/types'
|
||||
import { getInstanceUrl } from '@/tools/salesforce/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
import { getInstanceUrl } from './utils'
|
||||
|
||||
export interface SalesforceDeleteCaseParams {
|
||||
accessToken: string
|
||||
idToken?: string
|
||||
instanceUrl?: string
|
||||
caseId: string
|
||||
}
|
||||
|
||||
export interface SalesforceDeleteCaseResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
id: string
|
||||
deleted: boolean
|
||||
metadata: {
|
||||
operation: 'delete_case'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const salesforceDeleteCaseTool: ToolConfig<
|
||||
SalesforceDeleteCaseParams,
|
||||
|
||||
@@ -1,25 +1,13 @@
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import type {
|
||||
SalesforceDeleteContactParams,
|
||||
SalesforceDeleteContactResponse,
|
||||
} from '@/tools/salesforce/types'
|
||||
import { getInstanceUrl } from '@/tools/salesforce/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
import { getInstanceUrl } from './utils'
|
||||
|
||||
const logger = createLogger('SalesforceContacts')
|
||||
|
||||
export interface SalesforceDeleteContactParams {
|
||||
accessToken: string
|
||||
idToken?: string
|
||||
instanceUrl?: string
|
||||
contactId: string
|
||||
}
|
||||
|
||||
export interface SalesforceDeleteContactResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
id: string
|
||||
deleted: boolean
|
||||
metadata: { operation: 'delete_contact' }
|
||||
}
|
||||
}
|
||||
|
||||
export const salesforceDeleteContactTool: ToolConfig<
|
||||
SalesforceDeleteContactParams,
|
||||
SalesforceDeleteContactResponse
|
||||
|
||||
@@ -1,23 +1,9 @@
|
||||
import type {
|
||||
SalesforceDeleteLeadParams,
|
||||
SalesforceDeleteLeadResponse,
|
||||
} from '@/tools/salesforce/types'
|
||||
import { getInstanceUrl } from '@/tools/salesforce/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
import { getInstanceUrl } from './utils'
|
||||
|
||||
export interface SalesforceDeleteLeadParams {
|
||||
accessToken: string
|
||||
idToken?: string
|
||||
instanceUrl?: string
|
||||
leadId: string
|
||||
}
|
||||
|
||||
export interface SalesforceDeleteLeadResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
id: string
|
||||
deleted: boolean
|
||||
metadata: {
|
||||
operation: 'delete_lead'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const salesforceDeleteLeadTool: ToolConfig<
|
||||
SalesforceDeleteLeadParams,
|
||||
|
||||
@@ -1,23 +1,9 @@
|
||||
import type {
|
||||
SalesforceDeleteOpportunityParams,
|
||||
SalesforceDeleteOpportunityResponse,
|
||||
} from '@/tools/salesforce/types'
|
||||
import { getInstanceUrl } from '@/tools/salesforce/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
import { getInstanceUrl } from './utils'
|
||||
|
||||
export interface SalesforceDeleteOpportunityParams {
|
||||
accessToken: string
|
||||
idToken?: string
|
||||
instanceUrl?: string
|
||||
opportunityId: string
|
||||
}
|
||||
|
||||
export interface SalesforceDeleteOpportunityResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
id: string
|
||||
deleted: boolean
|
||||
metadata: {
|
||||
operation: 'delete_opportunity'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const salesforceDeleteOpportunityTool: ToolConfig<
|
||||
SalesforceDeleteOpportunityParams,
|
||||
|
||||
@@ -1,23 +1,9 @@
|
||||
import type {
|
||||
SalesforceDeleteTaskParams,
|
||||
SalesforceDeleteTaskResponse,
|
||||
} from '@/tools/salesforce/types'
|
||||
import { getInstanceUrl } from '@/tools/salesforce/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
import { getInstanceUrl } from './utils'
|
||||
|
||||
export interface SalesforceDeleteTaskParams {
|
||||
accessToken: string
|
||||
idToken?: string
|
||||
instanceUrl?: string
|
||||
taskId: string
|
||||
}
|
||||
|
||||
export interface SalesforceDeleteTaskResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
id: string
|
||||
deleted: boolean
|
||||
metadata: {
|
||||
operation: 'delete_task'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const salesforceDeleteTaskTool: ToolConfig<
|
||||
SalesforceDeleteTaskParams,
|
||||
|
||||
@@ -1,38 +1,13 @@
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import type {
|
||||
SalesforceDescribeObjectParams,
|
||||
SalesforceDescribeObjectResponse,
|
||||
} from '@/tools/salesforce/types'
|
||||
import { extractErrorMessage, getInstanceUrl } from '@/tools/salesforce/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
import { extractErrorMessage, getInstanceUrl } from './utils'
|
||||
|
||||
const logger = createLogger('SalesforceQuery')
|
||||
|
||||
export interface SalesforceDescribeObjectParams {
|
||||
accessToken: string
|
||||
idToken?: string
|
||||
instanceUrl?: string
|
||||
objectName: string
|
||||
}
|
||||
|
||||
export interface SalesforceDescribeObjectResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
objectName: string
|
||||
label?: string
|
||||
labelPlural?: string
|
||||
fields?: any[]
|
||||
keyPrefix?: string
|
||||
queryable?: boolean
|
||||
createable?: boolean
|
||||
updateable?: boolean
|
||||
deletable?: boolean
|
||||
childRelationships?: any[]
|
||||
recordTypeInfos?: any[]
|
||||
metadata: {
|
||||
operation: 'describe_object'
|
||||
fieldCount: number
|
||||
}
|
||||
success: boolean
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Describe a Salesforce object to get its metadata/fields
|
||||
* Useful for discovering available fields for queries
|
||||
|
||||
@@ -1,34 +1,6 @@
|
||||
import type { SalesforceGetCasesParams, SalesforceGetCasesResponse } from '@/tools/salesforce/types'
|
||||
import { getInstanceUrl } from '@/tools/salesforce/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
import { getInstanceUrl } from './utils'
|
||||
|
||||
export interface SalesforceGetCasesParams {
|
||||
accessToken: string
|
||||
idToken?: string
|
||||
instanceUrl?: string
|
||||
caseId?: string
|
||||
limit?: string
|
||||
fields?: string
|
||||
orderBy?: string
|
||||
}
|
||||
|
||||
export interface SalesforceGetCasesResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
case?: any
|
||||
cases?: any[]
|
||||
paging?: {
|
||||
nextRecordsUrl?: string
|
||||
totalSize: number
|
||||
done: boolean
|
||||
}
|
||||
metadata: {
|
||||
operation: 'get_cases'
|
||||
totalReturned?: number
|
||||
hasMore?: boolean
|
||||
}
|
||||
success: boolean
|
||||
}
|
||||
}
|
||||
|
||||
export const salesforceGetCasesTool: ToolConfig<
|
||||
SalesforceGetCasesParams,
|
||||
|
||||
@@ -1,39 +1,13 @@
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import type {
|
||||
SalesforceGetContactsParams,
|
||||
SalesforceGetContactsResponse,
|
||||
} from '@/tools/salesforce/types'
|
||||
import { getInstanceUrl } from '@/tools/salesforce/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
import { getInstanceUrl } from './utils'
|
||||
|
||||
const logger = createLogger('SalesforceContacts')
|
||||
|
||||
export interface SalesforceGetContactsParams {
|
||||
accessToken: string
|
||||
idToken?: string
|
||||
instanceUrl?: string
|
||||
contactId?: string
|
||||
limit?: string
|
||||
fields?: string
|
||||
orderBy?: string
|
||||
}
|
||||
|
||||
export interface SalesforceGetContactsResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
contacts?: any[]
|
||||
contact?: any
|
||||
paging?: {
|
||||
nextRecordsUrl?: string
|
||||
totalSize: number
|
||||
done: boolean
|
||||
}
|
||||
metadata: {
|
||||
operation: 'get_contacts'
|
||||
totalReturned?: number
|
||||
hasMore?: boolean
|
||||
singleContact?: boolean
|
||||
}
|
||||
success: boolean
|
||||
}
|
||||
}
|
||||
|
||||
export const salesforceGetContactsTool: ToolConfig<
|
||||
SalesforceGetContactsParams,
|
||||
SalesforceGetContactsResponse
|
||||
|
||||
@@ -1,32 +1,13 @@
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import type {
|
||||
SalesforceGetDashboardParams,
|
||||
SalesforceGetDashboardResponse,
|
||||
} from '@/tools/salesforce/types'
|
||||
import { extractErrorMessage, getInstanceUrl } from '@/tools/salesforce/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
import { extractErrorMessage, getInstanceUrl } from './utils'
|
||||
|
||||
const logger = createLogger('SalesforceDashboards')
|
||||
|
||||
export interface SalesforceGetDashboardParams {
|
||||
accessToken: string
|
||||
idToken?: string
|
||||
instanceUrl?: string
|
||||
dashboardId: string
|
||||
}
|
||||
|
||||
export interface SalesforceGetDashboardResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
dashboard: any
|
||||
dashboardId: string
|
||||
components: any[]
|
||||
metadata: {
|
||||
operation: 'get_dashboard'
|
||||
dashboardName?: string
|
||||
folderId?: string
|
||||
runningUser?: any
|
||||
}
|
||||
success: boolean
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get details for a specific dashboard
|
||||
* @see https://developer.salesforce.com/docs/atlas.en-us.api_analytics.meta/api_analytics/sforce_analytics_rest_api_dashboard_results.htm
|
||||
|
||||
@@ -1,35 +1,6 @@
|
||||
import type { SalesforceGetLeadsParams, SalesforceGetLeadsResponse } from '@/tools/salesforce/types'
|
||||
import { getInstanceUrl } from '@/tools/salesforce/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
import { getInstanceUrl } from './utils'
|
||||
|
||||
export interface SalesforceGetLeadsParams {
|
||||
accessToken: string
|
||||
idToken?: string
|
||||
instanceUrl?: string
|
||||
leadId?: string
|
||||
limit?: string
|
||||
fields?: string
|
||||
orderBy?: string
|
||||
}
|
||||
|
||||
export interface SalesforceGetLeadsResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
lead?: any
|
||||
leads?: any[]
|
||||
paging?: {
|
||||
nextRecordsUrl?: string
|
||||
totalSize: number
|
||||
done: boolean
|
||||
}
|
||||
metadata: {
|
||||
operation: 'get_leads'
|
||||
totalReturned?: number
|
||||
hasMore?: boolean
|
||||
singleLead?: boolean
|
||||
}
|
||||
success: boolean
|
||||
}
|
||||
}
|
||||
|
||||
export const salesforceGetLeadsTool: ToolConfig<
|
||||
SalesforceGetLeadsParams,
|
||||
|
||||
@@ -1,34 +1,9 @@
|
||||
import type {
|
||||
SalesforceGetOpportunitiesParams,
|
||||
SalesforceGetOpportunitiesResponse,
|
||||
} from '@/tools/salesforce/types'
|
||||
import { getInstanceUrl } from '@/tools/salesforce/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
import { getInstanceUrl } from './utils'
|
||||
|
||||
export interface SalesforceGetOpportunitiesParams {
|
||||
accessToken: string
|
||||
idToken?: string
|
||||
instanceUrl?: string
|
||||
opportunityId?: string
|
||||
limit?: string
|
||||
fields?: string
|
||||
orderBy?: string
|
||||
}
|
||||
|
||||
export interface SalesforceGetOpportunitiesResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
opportunity?: any
|
||||
opportunities?: any[]
|
||||
paging?: {
|
||||
nextRecordsUrl?: string
|
||||
totalSize: number
|
||||
done: boolean
|
||||
}
|
||||
metadata: {
|
||||
operation: 'get_opportunities'
|
||||
totalReturned?: number
|
||||
hasMore?: boolean
|
||||
}
|
||||
success: boolean
|
||||
}
|
||||
}
|
||||
|
||||
export const salesforceGetOpportunitiesTool: ToolConfig<
|
||||
SalesforceGetOpportunitiesParams,
|
||||
|
||||
@@ -1,28 +1,13 @@
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import type {
|
||||
SalesforceGetReportParams,
|
||||
SalesforceGetReportResponse,
|
||||
} from '@/tools/salesforce/types'
|
||||
import { extractErrorMessage, getInstanceUrl } from '@/tools/salesforce/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
import { extractErrorMessage, getInstanceUrl } from './utils'
|
||||
|
||||
const logger = createLogger('SalesforceReports')
|
||||
|
||||
export interface SalesforceGetReportParams {
|
||||
accessToken: string
|
||||
idToken?: string
|
||||
instanceUrl?: string
|
||||
reportId: string
|
||||
}
|
||||
|
||||
export interface SalesforceGetReportResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
report: any
|
||||
reportId: string
|
||||
metadata: {
|
||||
operation: 'get_report'
|
||||
}
|
||||
success: boolean
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get metadata for a specific report
|
||||
* @see https://developer.salesforce.com/docs/atlas.en-us.api_analytics.meta/api_analytics/sforce_analytics_rest_api_get_reportmetadata.htm
|
||||
|
||||
@@ -1,34 +1,6 @@
|
||||
import type { SalesforceGetTasksParams, SalesforceGetTasksResponse } from '@/tools/salesforce/types'
|
||||
import { getInstanceUrl } from '@/tools/salesforce/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
import { getInstanceUrl } from './utils'
|
||||
|
||||
export interface SalesforceGetTasksParams {
|
||||
accessToken: string
|
||||
idToken?: string
|
||||
instanceUrl?: string
|
||||
taskId?: string
|
||||
limit?: string
|
||||
fields?: string
|
||||
orderBy?: string
|
||||
}
|
||||
|
||||
export interface SalesforceGetTasksResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
task?: any
|
||||
tasks?: any[]
|
||||
paging?: {
|
||||
nextRecordsUrl?: string
|
||||
totalSize: number
|
||||
done: boolean
|
||||
}
|
||||
metadata: {
|
||||
operation: 'get_tasks'
|
||||
totalReturned?: number
|
||||
hasMore?: boolean
|
||||
}
|
||||
success: boolean
|
||||
}
|
||||
}
|
||||
|
||||
export const salesforceGetTasksTool: ToolConfig<
|
||||
SalesforceGetTasksParams,
|
||||
|
||||
@@ -1,28 +1,13 @@
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import type {
|
||||
SalesforceListDashboardsParams,
|
||||
SalesforceListDashboardsResponse,
|
||||
} from '@/tools/salesforce/types'
|
||||
import { extractErrorMessage, getInstanceUrl } from '@/tools/salesforce/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
import { extractErrorMessage, getInstanceUrl } from './utils'
|
||||
|
||||
const logger = createLogger('SalesforceDashboards')
|
||||
|
||||
export interface SalesforceListDashboardsParams {
|
||||
accessToken: string
|
||||
idToken?: string
|
||||
instanceUrl?: string
|
||||
folderName?: string
|
||||
}
|
||||
|
||||
export interface SalesforceListDashboardsResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
dashboards: any[]
|
||||
metadata: {
|
||||
operation: 'list_dashboards'
|
||||
totalReturned: number
|
||||
}
|
||||
success: boolean
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* List all dashboards accessible by the current user
|
||||
* @see https://developer.salesforce.com/docs/atlas.en-us.api_analytics.meta/api_analytics/sforce_analytics_rest_api_getbasic_dashboardlist.htm
|
||||
|
||||
@@ -1,29 +1,13 @@
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import type {
|
||||
SalesforceListObjectsParams,
|
||||
SalesforceListObjectsResponse,
|
||||
} from '@/tools/salesforce/types'
|
||||
import { extractErrorMessage, getInstanceUrl } from '@/tools/salesforce/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
import { extractErrorMessage, getInstanceUrl } from './utils'
|
||||
|
||||
const logger = createLogger('SalesforceQuery')
|
||||
|
||||
export interface SalesforceListObjectsParams {
|
||||
accessToken: string
|
||||
idToken?: string
|
||||
instanceUrl?: string
|
||||
}
|
||||
|
||||
export interface SalesforceListObjectsResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
objects: any[]
|
||||
encoding?: string
|
||||
maxBatchSize?: number
|
||||
metadata: {
|
||||
operation: 'list_objects'
|
||||
totalReturned: number
|
||||
}
|
||||
success: boolean
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* List all available Salesforce objects
|
||||
* Useful for discovering what objects are available
|
||||
|
||||
@@ -1,27 +1,13 @@
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import type {
|
||||
SalesforceListReportTypesParams,
|
||||
SalesforceListReportTypesResponse,
|
||||
} from '@/tools/salesforce/types'
|
||||
import { extractErrorMessage, getInstanceUrl } from '@/tools/salesforce/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
import { extractErrorMessage, getInstanceUrl } from './utils'
|
||||
|
||||
const logger = createLogger('SalesforceReports')
|
||||
|
||||
export interface SalesforceListReportTypesParams {
|
||||
accessToken: string
|
||||
idToken?: string
|
||||
instanceUrl?: string
|
||||
}
|
||||
|
||||
export interface SalesforceListReportTypesResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
reportTypes: any[]
|
||||
metadata: {
|
||||
operation: 'list_report_types'
|
||||
totalReturned: number
|
||||
}
|
||||
success: boolean
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get list of available report types
|
||||
* @see https://developer.salesforce.com/docs/atlas.en-us.api_analytics.meta/api_analytics/sforce_analytics_rest_api_list_reporttypes.htm
|
||||
|
||||
@@ -1,29 +1,13 @@
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import type {
|
||||
SalesforceListReportsParams,
|
||||
SalesforceListReportsResponse,
|
||||
} from '@/tools/salesforce/types'
|
||||
import { extractErrorMessage, getInstanceUrl } from '@/tools/salesforce/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
import { extractErrorMessage, getInstanceUrl } from './utils'
|
||||
|
||||
const logger = createLogger('SalesforceReports')
|
||||
|
||||
export interface SalesforceListReportsParams {
|
||||
accessToken: string
|
||||
idToken?: string
|
||||
instanceUrl?: string
|
||||
folderName?: string
|
||||
searchTerm?: string
|
||||
}
|
||||
|
||||
export interface SalesforceListReportsResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
reports: any[]
|
||||
metadata: {
|
||||
operation: 'list_reports'
|
||||
totalReturned: number
|
||||
}
|
||||
success: boolean
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* List all reports accessible by the current user
|
||||
* @see https://developer.salesforce.com/docs/atlas.en-us.api_analytics.meta/api_analytics/sforce_analytics_rest_api_get_reportlist.htm
|
||||
|
||||
@@ -1,33 +1,10 @@
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import type { SalesforceQueryParams, SalesforceQueryResponse } from '@/tools/salesforce/types'
|
||||
import { extractErrorMessage, getInstanceUrl } from '@/tools/salesforce/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
import { extractErrorMessage, getInstanceUrl } from './utils'
|
||||
|
||||
const logger = createLogger('SalesforceQuery')
|
||||
|
||||
export interface SalesforceQueryParams {
|
||||
accessToken: string
|
||||
idToken?: string
|
||||
instanceUrl?: string
|
||||
query: string
|
||||
}
|
||||
|
||||
export interface SalesforceQueryResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
records: any[]
|
||||
totalSize: number
|
||||
done: boolean
|
||||
nextRecordsUrl?: string
|
||||
query: string
|
||||
metadata: {
|
||||
operation: 'query'
|
||||
totalReturned: number
|
||||
hasMore: boolean
|
||||
}
|
||||
success: boolean
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a custom SOQL query
|
||||
* @see https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/dome_query.htm
|
||||
|
||||
@@ -1,32 +1,13 @@
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import type {
|
||||
SalesforceQueryMoreParams,
|
||||
SalesforceQueryMoreResponse,
|
||||
} from '@/tools/salesforce/types'
|
||||
import { extractErrorMessage, getInstanceUrl } from '@/tools/salesforce/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
import { extractErrorMessage, getInstanceUrl } from './utils'
|
||||
|
||||
const logger = createLogger('SalesforceQuery')
|
||||
|
||||
export interface SalesforceQueryMoreParams {
|
||||
accessToken: string
|
||||
idToken?: string
|
||||
instanceUrl?: string
|
||||
nextRecordsUrl: string
|
||||
}
|
||||
|
||||
export interface SalesforceQueryMoreResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
records: any[]
|
||||
totalSize: number
|
||||
done: boolean
|
||||
nextRecordsUrl?: string
|
||||
metadata: {
|
||||
operation: 'query_more'
|
||||
totalReturned: number
|
||||
hasMore: boolean
|
||||
}
|
||||
success: boolean
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve additional query results using the nextRecordsUrl
|
||||
* @see https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/dome_query.htm
|
||||
|
||||
@@ -1,32 +1,13 @@
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import type {
|
||||
SalesforceRefreshDashboardParams,
|
||||
SalesforceRefreshDashboardResponse,
|
||||
} from '@/tools/salesforce/types'
|
||||
import { extractErrorMessage, getInstanceUrl } from '@/tools/salesforce/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
import { extractErrorMessage, getInstanceUrl } from './utils'
|
||||
|
||||
const logger = createLogger('SalesforceDashboards')
|
||||
|
||||
export interface SalesforceRefreshDashboardParams {
|
||||
accessToken: string
|
||||
idToken?: string
|
||||
instanceUrl?: string
|
||||
dashboardId: string
|
||||
}
|
||||
|
||||
export interface SalesforceRefreshDashboardResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
dashboard: any
|
||||
dashboardId: string
|
||||
components: any[]
|
||||
status?: any
|
||||
metadata: {
|
||||
operation: 'refresh_dashboard'
|
||||
dashboardName?: string
|
||||
refreshDate?: string
|
||||
}
|
||||
success: boolean
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Refresh a dashboard to get latest data
|
||||
* @see https://developer.salesforce.com/docs/atlas.en-us.api_analytics.meta/api_analytics/sforce_analytics_rest_api_refresh_dashboard.htm
|
||||
|
||||
@@ -1,38 +1,13 @@
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import type {
|
||||
SalesforceRunReportParams,
|
||||
SalesforceRunReportResponse,
|
||||
} from '@/tools/salesforce/types'
|
||||
import { extractErrorMessage, getInstanceUrl } from '@/tools/salesforce/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
import { extractErrorMessage, getInstanceUrl } from './utils'
|
||||
|
||||
const logger = createLogger('SalesforceReports')
|
||||
|
||||
export interface SalesforceRunReportParams {
|
||||
accessToken: string
|
||||
idToken?: string
|
||||
instanceUrl?: string
|
||||
reportId: string
|
||||
includeDetails?: string
|
||||
filters?: string
|
||||
}
|
||||
|
||||
export interface SalesforceRunReportResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
reportId: string
|
||||
reportMetadata?: any
|
||||
reportExtendedMetadata?: any
|
||||
factMap?: any
|
||||
groupingsDown?: any
|
||||
groupingsAcross?: any
|
||||
hasDetailRows?: boolean
|
||||
allData?: boolean
|
||||
metadata: {
|
||||
operation: 'run_report'
|
||||
reportName?: string
|
||||
reportFormat?: string
|
||||
}
|
||||
success: boolean
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Run a report and return the results
|
||||
* @see https://developer.salesforce.com/docs/atlas.en-us.api_analytics.meta/api_analytics/sforce_analytics_rest_api_get_reportdata.htm
|
||||
|
||||
@@ -1,6 +1,23 @@
|
||||
import type { ToolResponse } from '@/tools/types'
|
||||
|
||||
// Common Salesforce types
|
||||
/**
|
||||
* Base parameters shared by all Salesforce operations
|
||||
*/
|
||||
export interface BaseSalesforceParams {
|
||||
accessToken: string
|
||||
idToken?: string
|
||||
instanceUrl?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Common paging structure for list operations
|
||||
*/
|
||||
export interface SalesforcePaging {
|
||||
nextRecordsUrl?: string
|
||||
totalSize: number
|
||||
done: boolean
|
||||
}
|
||||
|
||||
export interface SalesforceAccount {
|
||||
Id: string
|
||||
Name: string
|
||||
@@ -22,13 +39,12 @@ export interface SalesforceAccount {
|
||||
[key: string]: any
|
||||
}
|
||||
|
||||
export interface SalesforcePaging {
|
||||
nextRecordsUrl?: string
|
||||
totalSize: number
|
||||
done: boolean
|
||||
export interface SalesforceGetAccountsParams extends BaseSalesforceParams {
|
||||
limit?: string
|
||||
fields?: string
|
||||
orderBy?: string
|
||||
}
|
||||
|
||||
// Get Accounts
|
||||
export interface SalesforceGetAccountsResponse extends ToolResponse {
|
||||
output: {
|
||||
accounts: SalesforceAccount[]
|
||||
@@ -42,16 +58,22 @@ export interface SalesforceGetAccountsResponse extends ToolResponse {
|
||||
}
|
||||
}
|
||||
|
||||
export interface SalesforceGetAccountsParams {
|
||||
accessToken: string
|
||||
idToken?: string
|
||||
instanceUrl?: string
|
||||
limit?: string
|
||||
fields?: string
|
||||
orderBy?: string
|
||||
export interface SalesforceCreateAccountParams extends BaseSalesforceParams {
|
||||
name: string
|
||||
type?: string
|
||||
industry?: string
|
||||
phone?: string
|
||||
website?: string
|
||||
billingStreet?: string
|
||||
billingCity?: string
|
||||
billingState?: string
|
||||
billingPostalCode?: string
|
||||
billingCountry?: string
|
||||
description?: string
|
||||
annualRevenue?: string
|
||||
numberOfEmployees?: string
|
||||
}
|
||||
|
||||
// Create Account
|
||||
export interface SalesforceCreateAccountResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
@@ -64,7 +86,23 @@ export interface SalesforceCreateAccountResponse {
|
||||
}
|
||||
}
|
||||
|
||||
// Update Account
|
||||
export interface SalesforceUpdateAccountParams extends BaseSalesforceParams {
|
||||
accountId: string
|
||||
name?: string
|
||||
type?: string
|
||||
industry?: string
|
||||
phone?: string
|
||||
website?: string
|
||||
billingStreet?: string
|
||||
billingCity?: string
|
||||
billingState?: string
|
||||
billingPostalCode?: string
|
||||
billingCountry?: string
|
||||
description?: string
|
||||
annualRevenue?: string
|
||||
numberOfEmployees?: string
|
||||
}
|
||||
|
||||
export interface SalesforceUpdateAccountResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
@@ -76,7 +114,10 @@ export interface SalesforceUpdateAccountResponse {
|
||||
}
|
||||
}
|
||||
|
||||
// Delete Account
|
||||
export interface SalesforceDeleteAccountParams extends BaseSalesforceParams {
|
||||
accountId: string
|
||||
}
|
||||
|
||||
export interface SalesforceDeleteAccountResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
@@ -88,17 +129,19 @@ export interface SalesforceDeleteAccountResponse {
|
||||
}
|
||||
}
|
||||
|
||||
// Contact types
|
||||
export interface SalesforceGetContactsParams extends BaseSalesforceParams {
|
||||
contactId?: string
|
||||
limit?: string
|
||||
fields?: string
|
||||
orderBy?: string
|
||||
}
|
||||
|
||||
export interface SalesforceGetContactsResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
contacts?: any[]
|
||||
contact?: any
|
||||
paging?: {
|
||||
nextRecordsUrl?: string
|
||||
totalSize: number
|
||||
done: boolean
|
||||
}
|
||||
paging?: SalesforcePaging
|
||||
metadata: {
|
||||
operation: 'get_contacts'
|
||||
totalReturned?: number
|
||||
@@ -109,6 +152,22 @@ export interface SalesforceGetContactsResponse {
|
||||
}
|
||||
}
|
||||
|
||||
export interface SalesforceCreateContactParams extends BaseSalesforceParams {
|
||||
lastName: string
|
||||
firstName?: string
|
||||
email?: string
|
||||
phone?: string
|
||||
accountId?: string
|
||||
title?: string
|
||||
department?: string
|
||||
mailingStreet?: string
|
||||
mailingCity?: string
|
||||
mailingState?: string
|
||||
mailingPostalCode?: string
|
||||
mailingCountry?: string
|
||||
description?: string
|
||||
}
|
||||
|
||||
export interface SalesforceCreateContactResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
@@ -119,6 +178,23 @@ export interface SalesforceCreateContactResponse {
|
||||
}
|
||||
}
|
||||
|
||||
export interface SalesforceUpdateContactParams extends BaseSalesforceParams {
|
||||
contactId: string
|
||||
lastName?: string
|
||||
firstName?: string
|
||||
email?: string
|
||||
phone?: string
|
||||
accountId?: string
|
||||
title?: string
|
||||
department?: string
|
||||
mailingStreet?: string
|
||||
mailingCity?: string
|
||||
mailingState?: string
|
||||
mailingPostalCode?: string
|
||||
mailingCountry?: string
|
||||
description?: string
|
||||
}
|
||||
|
||||
export interface SalesforceUpdateContactResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
@@ -128,6 +204,10 @@ export interface SalesforceUpdateContactResponse {
|
||||
}
|
||||
}
|
||||
|
||||
export interface SalesforceDeleteContactParams extends BaseSalesforceParams {
|
||||
contactId: string
|
||||
}
|
||||
|
||||
export interface SalesforceDeleteContactResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
@@ -137,7 +217,335 @@ export interface SalesforceDeleteContactResponse {
|
||||
}
|
||||
}
|
||||
|
||||
// Report types
|
||||
export interface SalesforceGetLeadsParams extends BaseSalesforceParams {
|
||||
leadId?: string
|
||||
limit?: string
|
||||
fields?: string
|
||||
orderBy?: string
|
||||
}
|
||||
|
||||
export interface SalesforceGetLeadsResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
lead?: any
|
||||
leads?: any[]
|
||||
paging?: SalesforcePaging
|
||||
metadata: {
|
||||
operation: 'get_leads'
|
||||
totalReturned?: number
|
||||
hasMore?: boolean
|
||||
singleLead?: boolean
|
||||
}
|
||||
success: boolean
|
||||
}
|
||||
}
|
||||
|
||||
export interface SalesforceCreateLeadParams extends BaseSalesforceParams {
|
||||
lastName: string
|
||||
company: string
|
||||
firstName?: string
|
||||
email?: string
|
||||
phone?: string
|
||||
status?: string
|
||||
leadSource?: string
|
||||
title?: string
|
||||
description?: string
|
||||
}
|
||||
|
||||
export interface SalesforceCreateLeadResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
id: string
|
||||
success: boolean
|
||||
created: boolean
|
||||
metadata: {
|
||||
operation: 'create_lead'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export interface SalesforceUpdateLeadParams extends BaseSalesforceParams {
|
||||
leadId: string
|
||||
lastName?: string
|
||||
company?: string
|
||||
firstName?: string
|
||||
email?: string
|
||||
phone?: string
|
||||
status?: string
|
||||
leadSource?: string
|
||||
title?: string
|
||||
description?: string
|
||||
}
|
||||
|
||||
export interface SalesforceUpdateLeadResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
id: string
|
||||
updated: boolean
|
||||
metadata: {
|
||||
operation: 'update_lead'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export interface SalesforceDeleteLeadParams extends BaseSalesforceParams {
|
||||
leadId: string
|
||||
}
|
||||
|
||||
export interface SalesforceDeleteLeadResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
id: string
|
||||
deleted: boolean
|
||||
metadata: {
|
||||
operation: 'delete_lead'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export interface SalesforceGetOpportunitiesParams extends BaseSalesforceParams {
|
||||
opportunityId?: string
|
||||
limit?: string
|
||||
fields?: string
|
||||
orderBy?: string
|
||||
}
|
||||
|
||||
export interface SalesforceGetOpportunitiesResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
opportunity?: any
|
||||
opportunities?: any[]
|
||||
paging?: SalesforcePaging
|
||||
metadata: {
|
||||
operation: 'get_opportunities'
|
||||
totalReturned?: number
|
||||
hasMore?: boolean
|
||||
}
|
||||
success: boolean
|
||||
}
|
||||
}
|
||||
|
||||
export interface SalesforceCreateOpportunityParams extends BaseSalesforceParams {
|
||||
name: string
|
||||
stageName: string
|
||||
closeDate: string
|
||||
accountId?: string
|
||||
amount?: string
|
||||
probability?: string
|
||||
description?: string
|
||||
}
|
||||
|
||||
export interface SalesforceCreateOpportunityResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
id: string
|
||||
success: boolean
|
||||
created: boolean
|
||||
metadata: {
|
||||
operation: 'create_opportunity'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export interface SalesforceUpdateOpportunityParams extends BaseSalesforceParams {
|
||||
opportunityId: string
|
||||
name?: string
|
||||
stageName?: string
|
||||
closeDate?: string
|
||||
accountId?: string
|
||||
amount?: string
|
||||
probability?: string
|
||||
description?: string
|
||||
}
|
||||
|
||||
export interface SalesforceUpdateOpportunityResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
id: string
|
||||
updated: boolean
|
||||
metadata: {
|
||||
operation: 'update_opportunity'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export interface SalesforceDeleteOpportunityParams extends BaseSalesforceParams {
|
||||
opportunityId: string
|
||||
}
|
||||
|
||||
export interface SalesforceDeleteOpportunityResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
id: string
|
||||
deleted: boolean
|
||||
metadata: {
|
||||
operation: 'delete_opportunity'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export interface SalesforceGetCasesParams extends BaseSalesforceParams {
|
||||
caseId?: string
|
||||
limit?: string
|
||||
fields?: string
|
||||
orderBy?: string
|
||||
}
|
||||
|
||||
export interface SalesforceGetCasesResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
case?: any
|
||||
cases?: any[]
|
||||
paging?: SalesforcePaging
|
||||
metadata: {
|
||||
operation: 'get_cases'
|
||||
totalReturned?: number
|
||||
hasMore?: boolean
|
||||
}
|
||||
success: boolean
|
||||
}
|
||||
}
|
||||
|
||||
export interface SalesforceCreateCaseParams extends BaseSalesforceParams {
|
||||
subject: string
|
||||
status?: string
|
||||
priority?: string
|
||||
origin?: string
|
||||
contactId?: string
|
||||
accountId?: string
|
||||
description?: string
|
||||
}
|
||||
|
||||
export interface SalesforceCreateCaseResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
id: string
|
||||
success: boolean
|
||||
created: boolean
|
||||
metadata: {
|
||||
operation: 'create_case'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export interface SalesforceUpdateCaseParams extends BaseSalesforceParams {
|
||||
caseId: string
|
||||
subject?: string
|
||||
status?: string
|
||||
priority?: string
|
||||
description?: string
|
||||
}
|
||||
|
||||
export interface SalesforceUpdateCaseResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
id: string
|
||||
updated: boolean
|
||||
metadata: {
|
||||
operation: 'update_case'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export interface SalesforceDeleteCaseParams extends BaseSalesforceParams {
|
||||
caseId: string
|
||||
}
|
||||
|
||||
export interface SalesforceDeleteCaseResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
id: string
|
||||
deleted: boolean
|
||||
metadata: {
|
||||
operation: 'delete_case'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export interface SalesforceGetTasksParams extends BaseSalesforceParams {
|
||||
taskId?: string
|
||||
limit?: string
|
||||
fields?: string
|
||||
orderBy?: string
|
||||
}
|
||||
|
||||
export interface SalesforceGetTasksResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
task?: any
|
||||
tasks?: any[]
|
||||
paging?: SalesforcePaging
|
||||
metadata: {
|
||||
operation: 'get_tasks'
|
||||
totalReturned?: number
|
||||
hasMore?: boolean
|
||||
}
|
||||
success: boolean
|
||||
}
|
||||
}
|
||||
|
||||
export interface SalesforceCreateTaskParams extends BaseSalesforceParams {
|
||||
subject: string
|
||||
status?: string
|
||||
priority?: string
|
||||
activityDate?: string
|
||||
whoId?: string
|
||||
whatId?: string
|
||||
description?: string
|
||||
}
|
||||
|
||||
export interface SalesforceCreateTaskResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
id: string
|
||||
success: boolean
|
||||
created: boolean
|
||||
metadata: {
|
||||
operation: 'create_task'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export interface SalesforceUpdateTaskParams extends BaseSalesforceParams {
|
||||
taskId: string
|
||||
subject?: string
|
||||
status?: string
|
||||
priority?: string
|
||||
activityDate?: string
|
||||
description?: string
|
||||
}
|
||||
|
||||
export interface SalesforceUpdateTaskResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
id: string
|
||||
updated: boolean
|
||||
metadata: {
|
||||
operation: 'update_task'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export interface SalesforceDeleteTaskParams extends BaseSalesforceParams {
|
||||
taskId: string
|
||||
}
|
||||
|
||||
export interface SalesforceDeleteTaskResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
id: string
|
||||
deleted: boolean
|
||||
metadata: {
|
||||
operation: 'delete_task'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export interface SalesforceListReportsParams extends BaseSalesforceParams {
|
||||
folderName?: string
|
||||
searchTerm?: string
|
||||
}
|
||||
|
||||
export interface SalesforceListReportsResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
@@ -150,6 +558,10 @@ export interface SalesforceListReportsResponse {
|
||||
}
|
||||
}
|
||||
|
||||
export interface SalesforceGetReportParams extends BaseSalesforceParams {
|
||||
reportId: string
|
||||
}
|
||||
|
||||
export interface SalesforceGetReportResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
@@ -162,26 +574,34 @@ export interface SalesforceGetReportResponse {
|
||||
}
|
||||
}
|
||||
|
||||
export interface SalesforceRunReportParams extends BaseSalesforceParams {
|
||||
reportId: string
|
||||
includeDetails?: string
|
||||
filters?: string
|
||||
}
|
||||
|
||||
export interface SalesforceRunReportResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
reportId: string
|
||||
reportMetadata: any
|
||||
reportExtendedMetadata: any
|
||||
factMap: any
|
||||
groupingsDown: any
|
||||
groupingsAcross: any
|
||||
hasDetailRows: boolean
|
||||
allData: boolean
|
||||
reportMetadata?: any
|
||||
reportExtendedMetadata?: any
|
||||
factMap?: any
|
||||
groupingsDown?: any
|
||||
groupingsAcross?: any
|
||||
hasDetailRows?: boolean
|
||||
allData?: boolean
|
||||
metadata: {
|
||||
operation: 'run_report'
|
||||
reportName: string
|
||||
reportFormat: string
|
||||
reportName?: string
|
||||
reportFormat?: string
|
||||
}
|
||||
success: boolean
|
||||
}
|
||||
}
|
||||
|
||||
export interface SalesforceListReportTypesParams extends BaseSalesforceParams {}
|
||||
|
||||
export interface SalesforceListReportTypesResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
@@ -194,7 +614,10 @@ export interface SalesforceListReportTypesResponse {
|
||||
}
|
||||
}
|
||||
|
||||
// Dashboard types
|
||||
export interface SalesforceListDashboardsParams extends BaseSalesforceParams {
|
||||
folderName?: string
|
||||
}
|
||||
|
||||
export interface SalesforceListDashboardsResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
@@ -207,6 +630,10 @@ export interface SalesforceListDashboardsResponse {
|
||||
}
|
||||
}
|
||||
|
||||
export interface SalesforceGetDashboardParams extends BaseSalesforceParams {
|
||||
dashboardId: string
|
||||
}
|
||||
|
||||
export interface SalesforceGetDashboardResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
@@ -215,31 +642,38 @@ export interface SalesforceGetDashboardResponse {
|
||||
components: any[]
|
||||
metadata: {
|
||||
operation: 'get_dashboard'
|
||||
dashboardName: string
|
||||
folderId: string
|
||||
runningUser: any
|
||||
dashboardName?: string
|
||||
folderId?: string
|
||||
runningUser?: any
|
||||
}
|
||||
success: boolean
|
||||
}
|
||||
}
|
||||
|
||||
export interface SalesforceRefreshDashboardParams extends BaseSalesforceParams {
|
||||
dashboardId: string
|
||||
}
|
||||
|
||||
export interface SalesforceRefreshDashboardResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
dashboard: any
|
||||
dashboardId: string
|
||||
components: any[]
|
||||
status: any
|
||||
status?: any
|
||||
metadata: {
|
||||
operation: 'refresh_dashboard'
|
||||
dashboardName: string
|
||||
refreshDate: string
|
||||
dashboardName?: string
|
||||
refreshDate?: string
|
||||
}
|
||||
success: boolean
|
||||
}
|
||||
}
|
||||
|
||||
// Query types
|
||||
export interface SalesforceQueryParams extends BaseSalesforceParams {
|
||||
query: string
|
||||
}
|
||||
|
||||
export interface SalesforceQueryResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
@@ -257,6 +691,10 @@ export interface SalesforceQueryResponse {
|
||||
}
|
||||
}
|
||||
|
||||
export interface SalesforceQueryMoreParams extends BaseSalesforceParams {
|
||||
nextRecordsUrl: string
|
||||
}
|
||||
|
||||
export interface SalesforceQueryMoreResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
@@ -273,20 +711,24 @@ export interface SalesforceQueryMoreResponse {
|
||||
}
|
||||
}
|
||||
|
||||
export interface SalesforceDescribeObjectParams extends BaseSalesforceParams {
|
||||
objectName: string
|
||||
}
|
||||
|
||||
export interface SalesforceDescribeObjectResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
objectName: string
|
||||
label: string
|
||||
labelPlural: string
|
||||
fields: any[]
|
||||
keyPrefix: string
|
||||
queryable: boolean
|
||||
createable: boolean
|
||||
updateable: boolean
|
||||
deletable: boolean
|
||||
childRelationships: any[]
|
||||
recordTypeInfos: any[]
|
||||
label?: string
|
||||
labelPlural?: string
|
||||
fields?: any[]
|
||||
keyPrefix?: string
|
||||
queryable?: boolean
|
||||
createable?: boolean
|
||||
updateable?: boolean
|
||||
deletable?: boolean
|
||||
childRelationships?: any[]
|
||||
recordTypeInfos?: any[]
|
||||
metadata: {
|
||||
operation: 'describe_object'
|
||||
fieldCount: number
|
||||
@@ -295,12 +737,14 @@ export interface SalesforceDescribeObjectResponse {
|
||||
}
|
||||
}
|
||||
|
||||
export interface SalesforceListObjectsParams extends BaseSalesforceParams {}
|
||||
|
||||
export interface SalesforceListObjectsResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
objects: any[]
|
||||
encoding: string
|
||||
maxBatchSize: number
|
||||
encoding?: string
|
||||
maxBatchSize?: number
|
||||
metadata: {
|
||||
operation: 'list_objects'
|
||||
totalReturned: number
|
||||
@@ -309,7 +753,6 @@ export interface SalesforceListObjectsResponse {
|
||||
}
|
||||
}
|
||||
|
||||
// Generic Salesforce response type for the block
|
||||
export type SalesforceResponse =
|
||||
| SalesforceGetAccountsResponse
|
||||
| SalesforceCreateAccountResponse
|
||||
@@ -319,6 +762,22 @@ export type SalesforceResponse =
|
||||
| SalesforceCreateContactResponse
|
||||
| SalesforceUpdateContactResponse
|
||||
| SalesforceDeleteContactResponse
|
||||
| SalesforceGetLeadsResponse
|
||||
| SalesforceCreateLeadResponse
|
||||
| SalesforceUpdateLeadResponse
|
||||
| SalesforceDeleteLeadResponse
|
||||
| SalesforceGetOpportunitiesResponse
|
||||
| SalesforceCreateOpportunityResponse
|
||||
| SalesforceUpdateOpportunityResponse
|
||||
| SalesforceDeleteOpportunityResponse
|
||||
| SalesforceGetCasesResponse
|
||||
| SalesforceCreateCaseResponse
|
||||
| SalesforceUpdateCaseResponse
|
||||
| SalesforceDeleteCaseResponse
|
||||
| SalesforceGetTasksResponse
|
||||
| SalesforceCreateTaskResponse
|
||||
| SalesforceUpdateTaskResponse
|
||||
| SalesforceDeleteTaskResponse
|
||||
| SalesforceListReportsResponse
|
||||
| SalesforceGetReportResponse
|
||||
| SalesforceRunReportResponse
|
||||
@@ -330,4 +789,3 @@ export type SalesforceResponse =
|
||||
| SalesforceQueryMoreResponse
|
||||
| SalesforceDescribeObjectResponse
|
||||
| SalesforceListObjectsResponse
|
||||
| { success: boolean; output: any } // Generic for leads, opportunities, cases, tasks
|
||||
|
||||
@@ -1,39 +1,12 @@
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import type {
|
||||
SalesforceUpdateAccountParams,
|
||||
SalesforceUpdateAccountResponse,
|
||||
} from '@/tools/salesforce/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
const logger = createLogger('SalesforceUpdateAccount')
|
||||
|
||||
export interface SalesforceUpdateAccountParams {
|
||||
accessToken: string
|
||||
idToken?: string
|
||||
instanceUrl?: string
|
||||
accountId: string
|
||||
name?: string
|
||||
type?: string
|
||||
industry?: string
|
||||
phone?: string
|
||||
website?: string
|
||||
billingStreet?: string
|
||||
billingCity?: string
|
||||
billingState?: string
|
||||
billingPostalCode?: string
|
||||
billingCountry?: string
|
||||
description?: string
|
||||
annualRevenue?: string
|
||||
numberOfEmployees?: string
|
||||
}
|
||||
|
||||
export interface SalesforceUpdateAccountResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
id: string
|
||||
updated: boolean
|
||||
metadata: {
|
||||
operation: 'update_account'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const salesforceUpdateAccountTool: ToolConfig<
|
||||
SalesforceUpdateAccountParams,
|
||||
SalesforceUpdateAccountResponse
|
||||
|
||||
@@ -1,27 +1,9 @@
|
||||
import type {
|
||||
SalesforceUpdateCaseParams,
|
||||
SalesforceUpdateCaseResponse,
|
||||
} from '@/tools/salesforce/types'
|
||||
import { getInstanceUrl } from '@/tools/salesforce/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
import { getInstanceUrl } from './utils'
|
||||
|
||||
export interface SalesforceUpdateCaseParams {
|
||||
accessToken: string
|
||||
idToken?: string
|
||||
instanceUrl?: string
|
||||
caseId: string
|
||||
subject?: string
|
||||
status?: string
|
||||
priority?: string
|
||||
description?: string
|
||||
}
|
||||
|
||||
export interface SalesforceUpdateCaseResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
id: string
|
||||
updated: boolean
|
||||
metadata: {
|
||||
operation: 'update_case'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const salesforceUpdateCaseTool: ToolConfig<
|
||||
SalesforceUpdateCaseParams,
|
||||
|
||||
@@ -1,38 +1,13 @@
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import type {
|
||||
SalesforceUpdateContactParams,
|
||||
SalesforceUpdateContactResponse,
|
||||
} from '@/tools/salesforce/types'
|
||||
import { getInstanceUrl } from '@/tools/salesforce/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
import { getInstanceUrl } from './utils'
|
||||
|
||||
const logger = createLogger('SalesforceContacts')
|
||||
|
||||
export interface SalesforceUpdateContactParams {
|
||||
accessToken: string
|
||||
idToken?: string
|
||||
instanceUrl?: string
|
||||
contactId: string
|
||||
lastName?: string
|
||||
firstName?: string
|
||||
email?: string
|
||||
phone?: string
|
||||
accountId?: string
|
||||
title?: string
|
||||
department?: string
|
||||
mailingStreet?: string
|
||||
mailingCity?: string
|
||||
mailingState?: string
|
||||
mailingPostalCode?: string
|
||||
mailingCountry?: string
|
||||
description?: string
|
||||
}
|
||||
|
||||
export interface SalesforceUpdateContactResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
id: string
|
||||
updated: boolean
|
||||
metadata: { operation: 'update_contact' }
|
||||
}
|
||||
}
|
||||
|
||||
export const salesforceUpdateContactTool: ToolConfig<
|
||||
SalesforceUpdateContactParams,
|
||||
SalesforceUpdateContactResponse
|
||||
|
||||
@@ -1,32 +1,9 @@
|
||||
import type {
|
||||
SalesforceUpdateLeadParams,
|
||||
SalesforceUpdateLeadResponse,
|
||||
} from '@/tools/salesforce/types'
|
||||
import { getInstanceUrl } from '@/tools/salesforce/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
import { getInstanceUrl } from './utils'
|
||||
|
||||
export interface SalesforceUpdateLeadParams {
|
||||
accessToken: string
|
||||
idToken?: string
|
||||
instanceUrl?: string
|
||||
leadId: string
|
||||
lastName?: string
|
||||
company?: string
|
||||
firstName?: string
|
||||
email?: string
|
||||
phone?: string
|
||||
status?: string
|
||||
leadSource?: string
|
||||
title?: string
|
||||
description?: string
|
||||
}
|
||||
|
||||
export interface SalesforceUpdateLeadResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
id: string
|
||||
updated: boolean
|
||||
metadata: {
|
||||
operation: 'update_lead'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const salesforceUpdateLeadTool: ToolConfig<
|
||||
SalesforceUpdateLeadParams,
|
||||
|
||||
@@ -1,30 +1,9 @@
|
||||
import type {
|
||||
SalesforceUpdateOpportunityParams,
|
||||
SalesforceUpdateOpportunityResponse,
|
||||
} from '@/tools/salesforce/types'
|
||||
import { getInstanceUrl } from '@/tools/salesforce/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
import { getInstanceUrl } from './utils'
|
||||
|
||||
export interface SalesforceUpdateOpportunityParams {
|
||||
accessToken: string
|
||||
idToken?: string
|
||||
instanceUrl?: string
|
||||
opportunityId: string
|
||||
name?: string
|
||||
stageName?: string
|
||||
closeDate?: string
|
||||
accountId?: string
|
||||
amount?: string
|
||||
probability?: string
|
||||
description?: string
|
||||
}
|
||||
|
||||
export interface SalesforceUpdateOpportunityResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
id: string
|
||||
updated: boolean
|
||||
metadata: {
|
||||
operation: 'update_opportunity'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const salesforceUpdateOpportunityTool: ToolConfig<
|
||||
SalesforceUpdateOpportunityParams,
|
||||
|
||||
@@ -1,28 +1,9 @@
|
||||
import type {
|
||||
SalesforceUpdateTaskParams,
|
||||
SalesforceUpdateTaskResponse,
|
||||
} from '@/tools/salesforce/types'
|
||||
import { getInstanceUrl } from '@/tools/salesforce/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
import { getInstanceUrl } from './utils'
|
||||
|
||||
export interface SalesforceUpdateTaskParams {
|
||||
accessToken: string
|
||||
idToken?: string
|
||||
instanceUrl?: string
|
||||
taskId: string
|
||||
subject?: string
|
||||
status?: string
|
||||
priority?: string
|
||||
activityDate?: string
|
||||
description?: string
|
||||
}
|
||||
|
||||
export interface SalesforceUpdateTaskResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
id: string
|
||||
updated: boolean
|
||||
metadata: {
|
||||
operation: 'update_task'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const salesforceUpdateTaskTool: ToolConfig<
|
||||
SalesforceUpdateTaskParams,
|
||||
|
||||
@@ -44,9 +44,18 @@ app:
|
||||
|
||||
NODE_ENV: "production"
|
||||
NEXT_TELEMETRY_DISABLED: "1"
|
||||
|
||||
# AWS-specific environment variables
|
||||
|
||||
# AWS S3 Cloud Storage Configuration (RECOMMENDED for production)
|
||||
# Create S3 buckets in your AWS account and configure IAM permissions
|
||||
AWS_REGION: "us-west-2"
|
||||
AWS_ACCESS_KEY_ID: "" # AWS access key (or use IRSA for EKS)
|
||||
AWS_SECRET_ACCESS_KEY: "" # AWS secret key (or use IRSA for EKS)
|
||||
S3_BUCKET_NAME: "workspace-files" # Workspace files
|
||||
S3_KB_BUCKET_NAME: "knowledge-base" # Knowledge base documents
|
||||
S3_EXECUTION_FILES_BUCKET_NAME: "execution-files" # Workflow execution outputs
|
||||
S3_CHAT_BUCKET_NAME: "chat-files" # Deployed chat assets
|
||||
S3_COPILOT_BUCKET_NAME: "copilot-files" # Copilot attachments
|
||||
S3_PROFILE_PICTURES_BUCKET_NAME: "profile-pictures" # User avatars
|
||||
|
||||
# Realtime service
|
||||
realtime:
|
||||
|
||||
@@ -42,10 +42,23 @@ app:
|
||||
# Optional: API Key Encryption (RECOMMENDED for production)
|
||||
# Generate 64-character hex string using: openssl rand -hex 32
|
||||
API_ENCRYPTION_KEY: "your-64-char-hex-api-encryption-key-here" # Optional but recommended
|
||||
|
||||
|
||||
NODE_ENV: "production"
|
||||
NEXT_TELEMETRY_DISABLED: "1"
|
||||
|
||||
# Azure Blob Storage Configuration (RECOMMENDED for production)
|
||||
# Create a storage account and containers in your Azure subscription
|
||||
AZURE_ACCOUNT_NAME: "simstudiostorageacct" # Azure storage account name
|
||||
AZURE_ACCOUNT_KEY: "" # Storage account access key
|
||||
# Or use connection string instead of account name/key:
|
||||
# AZURE_CONNECTION_STRING: "DefaultEndpointsProtocol=https;AccountName=...;AccountKey=...;EndpointSuffix=core.windows.net"
|
||||
AZURE_STORAGE_CONTAINER_NAME: "workspace-files" # Workspace files container
|
||||
AZURE_STORAGE_KB_CONTAINER_NAME: "knowledge-base" # Knowledge base documents container
|
||||
AZURE_STORAGE_EXECUTION_FILES_CONTAINER_NAME: "execution-files" # Workflow execution outputs
|
||||
AZURE_STORAGE_CHAT_CONTAINER_NAME: "chat-files" # Deployed chat assets container
|
||||
AZURE_STORAGE_COPILOT_CONTAINER_NAME: "copilot-files" # Copilot attachments container
|
||||
AZURE_STORAGE_PROFILE_PICTURES_CONTAINER_NAME: "profile-pictures" # User avatars container
|
||||
|
||||
# Realtime service
|
||||
realtime:
|
||||
enabled: true
|
||||
|
||||
@@ -116,8 +116,37 @@ app:
|
||||
# Access Control (leave empty if not restricting login)
|
||||
ALLOWED_LOGIN_EMAILS: "" # Comma-separated list of allowed email addresses for login
|
||||
ALLOWED_LOGIN_DOMAINS: "" # Comma-separated list of allowed email domains for login
|
||||
|
||||
|
||||
|
||||
# SSO Configuration (Enterprise Single Sign-On)
|
||||
# Set to "true" AFTER running the SSO registration script
|
||||
SSO_ENABLED: "" # Enable SSO authentication ("true" to enable)
|
||||
NEXT_PUBLIC_SSO_ENABLED: "" # Show SSO login button in UI ("true" to enable)
|
||||
|
||||
# AWS S3 Cloud Storage Configuration (optional - for file storage)
|
||||
# If configured, files will be stored in S3 instead of local storage
|
||||
AWS_REGION: "" # AWS region (e.g., "us-east-1")
|
||||
AWS_ACCESS_KEY_ID: "" # AWS access key ID
|
||||
AWS_SECRET_ACCESS_KEY: "" # AWS secret access key
|
||||
S3_BUCKET_NAME: "" # S3 bucket for workspace files
|
||||
S3_KB_BUCKET_NAME: "" # S3 bucket for knowledge base files
|
||||
S3_EXECUTION_FILES_BUCKET_NAME: "" # S3 bucket for workflow execution files
|
||||
S3_CHAT_BUCKET_NAME: "" # S3 bucket for deployed chat files
|
||||
S3_COPILOT_BUCKET_NAME: "" # S3 bucket for copilot files
|
||||
S3_PROFILE_PICTURES_BUCKET_NAME: "" # S3 bucket for user profile pictures
|
||||
|
||||
# Azure Blob Storage Configuration (optional - for file storage)
|
||||
# If configured, files will be stored in Azure Blob instead of local storage
|
||||
# Note: Azure Blob takes precedence over S3 if both are configured
|
||||
AZURE_ACCOUNT_NAME: "" # Azure storage account name
|
||||
AZURE_ACCOUNT_KEY: "" # Azure storage account key
|
||||
AZURE_CONNECTION_STRING: "" # Azure connection string (alternative to account name/key)
|
||||
AZURE_STORAGE_CONTAINER_NAME: "" # Azure container for workspace files
|
||||
AZURE_STORAGE_KB_CONTAINER_NAME: "" # Azure container for knowledge base files
|
||||
AZURE_STORAGE_EXECUTION_FILES_CONTAINER_NAME: "" # Azure container for workflow execution files
|
||||
AZURE_STORAGE_CHAT_CONTAINER_NAME: "" # Azure container for deployed chat files
|
||||
AZURE_STORAGE_COPILOT_CONTAINER_NAME: "" # Azure container for copilot files
|
||||
AZURE_STORAGE_PROFILE_PICTURES_CONTAINER_NAME: "" # Azure container for user profile pictures
|
||||
|
||||
# Service configuration
|
||||
service:
|
||||
type: ClusterIP
|
||||
|
||||