mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-22 21:38:05 -05:00
Compare commits
32 Commits
improvemen
...
v0.5.65
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0ce0f98aa5 | ||
|
|
dff1c9d083 | ||
|
|
b09f683072 | ||
|
|
a8bb0db660 | ||
|
|
af82820a28 | ||
|
|
4372841797 | ||
|
|
5e8c843241 | ||
|
|
7bf3d73ee6 | ||
|
|
7ffc11a738 | ||
|
|
be578e2ed7 | ||
|
|
f415e5edc4 | ||
|
|
13a6e6c3fa | ||
|
|
f5ab7f21ae | ||
|
|
bfb6fffe38 | ||
|
|
4fbec0a43f | ||
|
|
585f5e365b | ||
|
|
3792bdd252 | ||
|
|
eb5d1f3e5b | ||
|
|
54ab82c8dd | ||
|
|
f895bf469b | ||
|
|
dd3209af06 | ||
|
|
b6ba3b50a7 | ||
|
|
b304233062 | ||
|
|
57e4b49bd6 | ||
|
|
e12dd204ed | ||
|
|
3d9d9cbc54 | ||
|
|
0f4ec962ad | ||
|
|
4827866f9a | ||
|
|
3e697d9ed9 | ||
|
|
4431a1a484 | ||
|
|
4d1a9a3f22 | ||
|
|
eb07a080fb |
@@ -2,9 +2,10 @@
|
||||
|
||||
import { useEffect, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { Eye, EyeOff } from 'lucide-react'
|
||||
import { ArrowRight, ChevronRight, Eye, EyeOff } from 'lucide-react'
|
||||
import Link from 'next/link'
|
||||
import { useRouter, useSearchParams } from 'next/navigation'
|
||||
import { Button } from '@/components/ui/button'
|
||||
import {
|
||||
Dialog,
|
||||
DialogContent,
|
||||
@@ -21,10 +22,8 @@ import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { quickValidateEmail } from '@/lib/messaging/email/validation'
|
||||
import { inter } from '@/app/_styles/fonts/inter/inter'
|
||||
import { soehne } from '@/app/_styles/fonts/soehne/soehne'
|
||||
import { BrandedButton } from '@/app/(auth)/components/branded-button'
|
||||
import { SocialLoginButtons } from '@/app/(auth)/components/social-login-buttons'
|
||||
import { SSOLoginButton } from '@/app/(auth)/components/sso-login-button'
|
||||
import { useBrandedButtonClass } from '@/hooks/use-branded-button-class'
|
||||
|
||||
const logger = createLogger('LoginForm')
|
||||
|
||||
@@ -106,7 +105,8 @@ export default function LoginPage({
|
||||
const [password, setPassword] = useState('')
|
||||
const [passwordErrors, setPasswordErrors] = useState<string[]>([])
|
||||
const [showValidationError, setShowValidationError] = useState(false)
|
||||
const buttonClass = useBrandedButtonClass()
|
||||
const [buttonClass, setButtonClass] = useState('branded-button-gradient')
|
||||
const [isButtonHovered, setIsButtonHovered] = useState(false)
|
||||
|
||||
const [callbackUrl, setCallbackUrl] = useState('/workspace')
|
||||
const [isInviteFlow, setIsInviteFlow] = useState(false)
|
||||
@@ -114,6 +114,7 @@ export default function LoginPage({
|
||||
const [forgotPasswordOpen, setForgotPasswordOpen] = useState(false)
|
||||
const [forgotPasswordEmail, setForgotPasswordEmail] = useState('')
|
||||
const [isSubmittingReset, setIsSubmittingReset] = useState(false)
|
||||
const [isResetButtonHovered, setIsResetButtonHovered] = useState(false)
|
||||
const [resetStatus, setResetStatus] = useState<{
|
||||
type: 'success' | 'error' | null
|
||||
message: string
|
||||
@@ -122,7 +123,6 @@ export default function LoginPage({
|
||||
const [email, setEmail] = useState('')
|
||||
const [emailErrors, setEmailErrors] = useState<string[]>([])
|
||||
const [showEmailValidationError, setShowEmailValidationError] = useState(false)
|
||||
const [resetSuccessMessage, setResetSuccessMessage] = useState<string | null>(null)
|
||||
|
||||
useEffect(() => {
|
||||
setMounted(true)
|
||||
@@ -139,12 +139,32 @@ export default function LoginPage({
|
||||
|
||||
const inviteFlow = searchParams.get('invite_flow') === 'true'
|
||||
setIsInviteFlow(inviteFlow)
|
||||
}
|
||||
|
||||
const resetSuccess = searchParams.get('resetSuccess') === 'true'
|
||||
if (resetSuccess) {
|
||||
setResetSuccessMessage('Password reset successful. Please sign in with your new password.')
|
||||
const checkCustomBrand = () => {
|
||||
const computedStyle = getComputedStyle(document.documentElement)
|
||||
const brandAccent = computedStyle.getPropertyValue('--brand-accent-hex').trim()
|
||||
|
||||
if (brandAccent && brandAccent !== '#6f3dfa') {
|
||||
setButtonClass('branded-button-custom')
|
||||
} else {
|
||||
setButtonClass('branded-button-gradient')
|
||||
}
|
||||
}
|
||||
|
||||
checkCustomBrand()
|
||||
|
||||
window.addEventListener('resize', checkCustomBrand)
|
||||
const observer = new MutationObserver(checkCustomBrand)
|
||||
observer.observe(document.documentElement, {
|
||||
attributes: true,
|
||||
attributeFilter: ['style', 'class'],
|
||||
})
|
||||
|
||||
return () => {
|
||||
window.removeEventListener('resize', checkCustomBrand)
|
||||
observer.disconnect()
|
||||
}
|
||||
}, [searchParams])
|
||||
|
||||
useEffect(() => {
|
||||
@@ -182,13 +202,6 @@ export default function LoginPage({
|
||||
e.preventDefault()
|
||||
setIsLoading(true)
|
||||
|
||||
const redirectToVerify = (emailToVerify: string) => {
|
||||
if (typeof window !== 'undefined') {
|
||||
sessionStorage.setItem('verificationEmail', emailToVerify)
|
||||
}
|
||||
router.push('/verify')
|
||||
}
|
||||
|
||||
const formData = new FormData(e.currentTarget)
|
||||
const emailRaw = formData.get('email') as string
|
||||
const email = emailRaw.trim().toLowerCase()
|
||||
@@ -208,7 +221,6 @@ export default function LoginPage({
|
||||
|
||||
try {
|
||||
const safeCallbackUrl = validateCallbackUrl(callbackUrl) ? callbackUrl : '/workspace'
|
||||
let errorHandled = false
|
||||
|
||||
const result = await client.signIn.email(
|
||||
{
|
||||
@@ -219,16 +231,11 @@ export default function LoginPage({
|
||||
{
|
||||
onError: (ctx) => {
|
||||
logger.error('Login error:', ctx.error)
|
||||
|
||||
if (ctx.error.code?.includes('EMAIL_NOT_VERIFIED')) {
|
||||
errorHandled = true
|
||||
redirectToVerify(email)
|
||||
return
|
||||
}
|
||||
|
||||
errorHandled = true
|
||||
const errorMessage: string[] = ['Invalid email or password']
|
||||
|
||||
if (ctx.error.code?.includes('EMAIL_NOT_VERIFIED')) {
|
||||
return
|
||||
}
|
||||
if (
|
||||
ctx.error.code?.includes('BAD_REQUEST') ||
|
||||
ctx.error.message?.includes('Email and password sign in is not enabled')
|
||||
@@ -264,7 +271,6 @@ export default function LoginPage({
|
||||
errorMessage.push('Too many requests. Please wait a moment before trying again.')
|
||||
}
|
||||
|
||||
setResetSuccessMessage(null)
|
||||
setPasswordErrors(errorMessage)
|
||||
setShowValidationError(true)
|
||||
},
|
||||
@@ -272,25 +278,15 @@ export default function LoginPage({
|
||||
)
|
||||
|
||||
if (!result || result.error) {
|
||||
// Show error if not already handled by onError callback
|
||||
if (!errorHandled) {
|
||||
setResetSuccessMessage(null)
|
||||
const errorMessage = result?.error?.message || 'Login failed. Please try again.'
|
||||
setPasswordErrors([errorMessage])
|
||||
setShowValidationError(true)
|
||||
}
|
||||
setIsLoading(false)
|
||||
return
|
||||
}
|
||||
|
||||
// Clear reset success message on successful login
|
||||
setResetSuccessMessage(null)
|
||||
|
||||
// Explicit redirect fallback if better-auth doesn't redirect
|
||||
router.push(safeCallbackUrl)
|
||||
} catch (err: any) {
|
||||
if (err.message?.includes('not verified') || err.code?.includes('EMAIL_NOT_VERIFIED')) {
|
||||
redirectToVerify(email)
|
||||
if (typeof window !== 'undefined') {
|
||||
sessionStorage.setItem('verificationEmail', email)
|
||||
}
|
||||
router.push('/verify')
|
||||
return
|
||||
}
|
||||
|
||||
@@ -404,13 +400,6 @@ export default function LoginPage({
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Password reset success message */}
|
||||
{resetSuccessMessage && (
|
||||
<div className={`${inter.className} mt-1 space-y-1 text-[#4CAF50] text-xs`}>
|
||||
<p>{resetSuccessMessage}</p>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Email/Password Form - show unless explicitly disabled */}
|
||||
{!isFalsy(getEnv('NEXT_PUBLIC_EMAIL_PASSWORD_SIGNUP_ENABLED')) && (
|
||||
<form onSubmit={onSubmit} className={`${inter.className} mt-8 space-y-8`}>
|
||||
@@ -493,14 +482,24 @@ export default function LoginPage({
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<BrandedButton
|
||||
<Button
|
||||
type='submit'
|
||||
onMouseEnter={() => setIsButtonHovered(true)}
|
||||
onMouseLeave={() => setIsButtonHovered(false)}
|
||||
className='group inline-flex w-full items-center justify-center gap-2 rounded-[10px] border border-[#6F3DFA] bg-gradient-to-b from-[#8357FF] to-[#6F3DFA] py-[6px] pr-[10px] pl-[12px] text-[15px] text-white shadow-[inset_0_2px_4px_0_#9B77FF] transition-all'
|
||||
disabled={isLoading}
|
||||
loading={isLoading}
|
||||
loadingText='Signing in'
|
||||
>
|
||||
Sign in
|
||||
</BrandedButton>
|
||||
<span className='flex items-center gap-1'>
|
||||
{isLoading ? 'Signing in...' : 'Sign in'}
|
||||
<span className='inline-flex transition-transform duration-200 group-hover:translate-x-0.5'>
|
||||
{isButtonHovered ? (
|
||||
<ArrowRight className='h-4 w-4' aria-hidden='true' />
|
||||
) : (
|
||||
<ChevronRight className='h-4 w-4' aria-hidden='true' />
|
||||
)}
|
||||
</span>
|
||||
</span>
|
||||
</Button>
|
||||
</form>
|
||||
)}
|
||||
|
||||
@@ -611,15 +610,25 @@ export default function LoginPage({
|
||||
<p>{resetStatus.message}</p>
|
||||
</div>
|
||||
)}
|
||||
<BrandedButton
|
||||
<Button
|
||||
type='button'
|
||||
onClick={handleForgotPassword}
|
||||
onMouseEnter={() => setIsResetButtonHovered(true)}
|
||||
onMouseLeave={() => setIsResetButtonHovered(false)}
|
||||
className='group inline-flex w-full items-center justify-center gap-2 rounded-[10px] border border-[#6F3DFA] bg-gradient-to-b from-[#8357FF] to-[#6F3DFA] py-[6px] pr-[10px] pl-[12px] text-[15px] text-white shadow-[inset_0_2px_4px_0_#9B77FF] transition-all'
|
||||
disabled={isSubmittingReset}
|
||||
loading={isSubmittingReset}
|
||||
loadingText='Sending'
|
||||
>
|
||||
Send Reset Link
|
||||
</BrandedButton>
|
||||
<span className='flex items-center gap-1'>
|
||||
{isSubmittingReset ? 'Sending...' : 'Send Reset Link'}
|
||||
<span className='inline-flex transition-transform duration-200 group-hover:translate-x-0.5'>
|
||||
{isResetButtonHovered ? (
|
||||
<ArrowRight className='h-4 w-4' aria-hidden='true' />
|
||||
) : (
|
||||
<ChevronRight className='h-4 w-4' aria-hidden='true' />
|
||||
)}
|
||||
</span>
|
||||
</span>
|
||||
</Button>
|
||||
</div>
|
||||
</DialogContent>
|
||||
</Dialog>
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
'use client'
|
||||
|
||||
import { useState } from 'react'
|
||||
import { Eye, EyeOff } from 'lucide-react'
|
||||
import { useEffect, useState } from 'react'
|
||||
import { ArrowRight, ChevronRight, Eye, EyeOff } from 'lucide-react'
|
||||
import { Button } from '@/components/ui/button'
|
||||
import { Input } from '@/components/ui/input'
|
||||
import { Label } from '@/components/ui/label'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { inter } from '@/app/_styles/fonts/inter/inter'
|
||||
import { BrandedButton } from '@/app/(auth)/components/branded-button'
|
||||
|
||||
interface RequestResetFormProps {
|
||||
email: string
|
||||
@@ -27,6 +27,36 @@ export function RequestResetForm({
|
||||
statusMessage,
|
||||
className,
|
||||
}: RequestResetFormProps) {
|
||||
const [buttonClass, setButtonClass] = useState('branded-button-gradient')
|
||||
const [isButtonHovered, setIsButtonHovered] = useState(false)
|
||||
|
||||
useEffect(() => {
|
||||
const checkCustomBrand = () => {
|
||||
const computedStyle = getComputedStyle(document.documentElement)
|
||||
const brandAccent = computedStyle.getPropertyValue('--brand-accent-hex').trim()
|
||||
|
||||
if (brandAccent && brandAccent !== '#6f3dfa') {
|
||||
setButtonClass('branded-button-custom')
|
||||
} else {
|
||||
setButtonClass('branded-button-gradient')
|
||||
}
|
||||
}
|
||||
|
||||
checkCustomBrand()
|
||||
|
||||
window.addEventListener('resize', checkCustomBrand)
|
||||
const observer = new MutationObserver(checkCustomBrand)
|
||||
observer.observe(document.documentElement, {
|
||||
attributes: true,
|
||||
attributeFilter: ['style', 'class'],
|
||||
})
|
||||
|
||||
return () => {
|
||||
window.removeEventListener('resize', checkCustomBrand)
|
||||
observer.disconnect()
|
||||
}
|
||||
}, [])
|
||||
|
||||
const handleSubmit = async (e: React.FormEvent) => {
|
||||
e.preventDefault()
|
||||
onSubmit(email)
|
||||
@@ -64,14 +94,24 @@ export function RequestResetForm({
|
||||
)}
|
||||
</div>
|
||||
|
||||
<BrandedButton
|
||||
<Button
|
||||
type='submit'
|
||||
disabled={isSubmitting}
|
||||
loading={isSubmitting}
|
||||
loadingText='Sending'
|
||||
onMouseEnter={() => setIsButtonHovered(true)}
|
||||
onMouseLeave={() => setIsButtonHovered(false)}
|
||||
className='group inline-flex w-full items-center justify-center gap-2 rounded-[10px] border border-[#6F3DFA] bg-gradient-to-b from-[#8357FF] to-[#6F3DFA] py-[6px] pr-[10px] pl-[12px] text-[15px] text-white shadow-[inset_0_2px_4px_0_#9B77FF] transition-all'
|
||||
>
|
||||
Send Reset Link
|
||||
</BrandedButton>
|
||||
<span className='flex items-center gap-1'>
|
||||
{isSubmitting ? 'Sending...' : 'Send Reset Link'}
|
||||
<span className='inline-flex transition-transform duration-200 group-hover:translate-x-0.5'>
|
||||
{isButtonHovered ? (
|
||||
<ArrowRight className='h-4 w-4' aria-hidden='true' />
|
||||
) : (
|
||||
<ChevronRight className='h-4 w-4' aria-hidden='true' />
|
||||
)}
|
||||
</span>
|
||||
</span>
|
||||
</Button>
|
||||
</form>
|
||||
)
|
||||
}
|
||||
@@ -98,6 +138,35 @@ export function SetNewPasswordForm({
|
||||
const [validationMessage, setValidationMessage] = useState('')
|
||||
const [showPassword, setShowPassword] = useState(false)
|
||||
const [showConfirmPassword, setShowConfirmPassword] = useState(false)
|
||||
const [buttonClass, setButtonClass] = useState('branded-button-gradient')
|
||||
const [isButtonHovered, setIsButtonHovered] = useState(false)
|
||||
|
||||
useEffect(() => {
|
||||
const checkCustomBrand = () => {
|
||||
const computedStyle = getComputedStyle(document.documentElement)
|
||||
const brandAccent = computedStyle.getPropertyValue('--brand-accent-hex').trim()
|
||||
|
||||
if (brandAccent && brandAccent !== '#6f3dfa') {
|
||||
setButtonClass('branded-button-custom')
|
||||
} else {
|
||||
setButtonClass('branded-button-gradient')
|
||||
}
|
||||
}
|
||||
|
||||
checkCustomBrand()
|
||||
|
||||
window.addEventListener('resize', checkCustomBrand)
|
||||
const observer = new MutationObserver(checkCustomBrand)
|
||||
observer.observe(document.documentElement, {
|
||||
attributes: true,
|
||||
attributeFilter: ['style', 'class'],
|
||||
})
|
||||
|
||||
return () => {
|
||||
window.removeEventListener('resize', checkCustomBrand)
|
||||
observer.disconnect()
|
||||
}
|
||||
}, [])
|
||||
|
||||
const handleSubmit = async (e: React.FormEvent) => {
|
||||
e.preventDefault()
|
||||
@@ -227,14 +296,24 @@ export function SetNewPasswordForm({
|
||||
)}
|
||||
</div>
|
||||
|
||||
<BrandedButton
|
||||
type='submit'
|
||||
<Button
|
||||
disabled={isSubmitting || !token}
|
||||
loading={isSubmitting}
|
||||
loadingText='Resetting'
|
||||
type='submit'
|
||||
onMouseEnter={() => setIsButtonHovered(true)}
|
||||
onMouseLeave={() => setIsButtonHovered(false)}
|
||||
className='group inline-flex w-full items-center justify-center gap-2 rounded-[10px] border border-[#6F3DFA] bg-gradient-to-b from-[#8357FF] to-[#6F3DFA] py-[6px] pr-[10px] pl-[12px] text-[15px] text-white shadow-[inset_0_2px_4px_0_#9B77FF] transition-all'
|
||||
>
|
||||
Reset Password
|
||||
</BrandedButton>
|
||||
<span className='flex items-center gap-1'>
|
||||
{isSubmitting ? 'Resetting...' : 'Reset Password'}
|
||||
<span className='inline-flex transition-transform duration-200 group-hover:translate-x-0.5'>
|
||||
{isButtonHovered ? (
|
||||
<ArrowRight className='h-4 w-4' aria-hidden='true' />
|
||||
) : (
|
||||
<ChevronRight className='h-4 w-4' aria-hidden='true' />
|
||||
)}
|
||||
</span>
|
||||
</span>
|
||||
</Button>
|
||||
</form>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -2,9 +2,10 @@
|
||||
|
||||
import { Suspense, useEffect, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { Eye, EyeOff } from 'lucide-react'
|
||||
import { ArrowRight, ChevronRight, Eye, EyeOff } from 'lucide-react'
|
||||
import Link from 'next/link'
|
||||
import { useRouter, useSearchParams } from 'next/navigation'
|
||||
import { Button } from '@/components/ui/button'
|
||||
import { Input } from '@/components/ui/input'
|
||||
import { Label } from '@/components/ui/label'
|
||||
import { client, useSession } from '@/lib/auth/auth-client'
|
||||
@@ -13,10 +14,8 @@ import { cn } from '@/lib/core/utils/cn'
|
||||
import { quickValidateEmail } from '@/lib/messaging/email/validation'
|
||||
import { inter } from '@/app/_styles/fonts/inter/inter'
|
||||
import { soehne } from '@/app/_styles/fonts/soehne/soehne'
|
||||
import { BrandedButton } from '@/app/(auth)/components/branded-button'
|
||||
import { SocialLoginButtons } from '@/app/(auth)/components/social-login-buttons'
|
||||
import { SSOLoginButton } from '@/app/(auth)/components/sso-login-button'
|
||||
import { useBrandedButtonClass } from '@/hooks/use-branded-button-class'
|
||||
|
||||
const logger = createLogger('SignupForm')
|
||||
|
||||
@@ -96,7 +95,8 @@ function SignupFormContent({
|
||||
const [showEmailValidationError, setShowEmailValidationError] = useState(false)
|
||||
const [redirectUrl, setRedirectUrl] = useState('')
|
||||
const [isInviteFlow, setIsInviteFlow] = useState(false)
|
||||
const buttonClass = useBrandedButtonClass()
|
||||
const [buttonClass, setButtonClass] = useState('branded-button-gradient')
|
||||
const [isButtonHovered, setIsButtonHovered] = useState(false)
|
||||
|
||||
const [name, setName] = useState('')
|
||||
const [nameErrors, setNameErrors] = useState<string[]>([])
|
||||
@@ -126,6 +126,31 @@ function SignupFormContent({
|
||||
if (inviteFlowParam === 'true') {
|
||||
setIsInviteFlow(true)
|
||||
}
|
||||
|
||||
const checkCustomBrand = () => {
|
||||
const computedStyle = getComputedStyle(document.documentElement)
|
||||
const brandAccent = computedStyle.getPropertyValue('--brand-accent-hex').trim()
|
||||
|
||||
if (brandAccent && brandAccent !== '#6f3dfa') {
|
||||
setButtonClass('branded-button-custom')
|
||||
} else {
|
||||
setButtonClass('branded-button-gradient')
|
||||
}
|
||||
}
|
||||
|
||||
checkCustomBrand()
|
||||
|
||||
window.addEventListener('resize', checkCustomBrand)
|
||||
const observer = new MutationObserver(checkCustomBrand)
|
||||
observer.observe(document.documentElement, {
|
||||
attributes: true,
|
||||
attributeFilter: ['style', 'class'],
|
||||
})
|
||||
|
||||
return () => {
|
||||
window.removeEventListener('resize', checkCustomBrand)
|
||||
observer.disconnect()
|
||||
}
|
||||
}, [searchParams])
|
||||
|
||||
const validatePassword = (passwordValue: string): string[] => {
|
||||
@@ -475,14 +500,24 @@ function SignupFormContent({
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<BrandedButton
|
||||
<Button
|
||||
type='submit'
|
||||
onMouseEnter={() => setIsButtonHovered(true)}
|
||||
onMouseLeave={() => setIsButtonHovered(false)}
|
||||
className='group inline-flex w-full items-center justify-center gap-2 rounded-[10px] border border-[#6F3DFA] bg-gradient-to-b from-[#8357FF] to-[#6F3DFA] py-[6px] pr-[10px] pl-[12px] text-[15px] text-white shadow-[inset_0_2px_4px_0_#9B77FF] transition-all'
|
||||
disabled={isLoading}
|
||||
loading={isLoading}
|
||||
loadingText='Creating account'
|
||||
>
|
||||
Create account
|
||||
</BrandedButton>
|
||||
<span className='flex items-center gap-1'>
|
||||
{isLoading ? 'Creating account' : 'Create account'}
|
||||
<span className='inline-flex transition-transform duration-200 group-hover:translate-x-0.5'>
|
||||
{isButtonHovered ? (
|
||||
<ArrowRight className='h-4 w-4' aria-hidden='true' />
|
||||
) : (
|
||||
<ChevronRight className='h-4 w-4' aria-hidden='true' />
|
||||
)}
|
||||
</span>
|
||||
</span>
|
||||
</Button>
|
||||
</form>
|
||||
)}
|
||||
|
||||
|
||||
@@ -13,7 +13,6 @@ import { cn } from '@/lib/core/utils/cn'
|
||||
import { quickValidateEmail } from '@/lib/messaging/email/validation'
|
||||
import { inter } from '@/app/_styles/fonts/inter/inter'
|
||||
import { soehne } from '@/app/_styles/fonts/soehne/soehne'
|
||||
import { useBrandedButtonClass } from '@/hooks/use-branded-button-class'
|
||||
|
||||
const logger = createLogger('SSOForm')
|
||||
|
||||
@@ -58,7 +57,7 @@ export default function SSOForm() {
|
||||
const [email, setEmail] = useState('')
|
||||
const [emailErrors, setEmailErrors] = useState<string[]>([])
|
||||
const [showEmailValidationError, setShowEmailValidationError] = useState(false)
|
||||
const buttonClass = useBrandedButtonClass()
|
||||
const [buttonClass, setButtonClass] = useState('branded-button-gradient')
|
||||
const [callbackUrl, setCallbackUrl] = useState('/workspace')
|
||||
|
||||
useEffect(() => {
|
||||
@@ -91,6 +90,31 @@ export default function SSOForm() {
|
||||
setShowEmailValidationError(true)
|
||||
}
|
||||
}
|
||||
|
||||
const checkCustomBrand = () => {
|
||||
const computedStyle = getComputedStyle(document.documentElement)
|
||||
const brandAccent = computedStyle.getPropertyValue('--brand-accent-hex').trim()
|
||||
|
||||
if (brandAccent && brandAccent !== '#6f3dfa') {
|
||||
setButtonClass('branded-button-custom')
|
||||
} else {
|
||||
setButtonClass('branded-button-gradient')
|
||||
}
|
||||
}
|
||||
|
||||
checkCustomBrand()
|
||||
|
||||
window.addEventListener('resize', checkCustomBrand)
|
||||
const observer = new MutationObserver(checkCustomBrand)
|
||||
observer.observe(document.documentElement, {
|
||||
attributes: true,
|
||||
attributeFilter: ['style', 'class'],
|
||||
})
|
||||
|
||||
return () => {
|
||||
window.removeEventListener('resize', checkCustomBrand)
|
||||
observer.disconnect()
|
||||
}
|
||||
}, [searchParams])
|
||||
|
||||
const handleEmailChange = (e: React.ChangeEvent<HTMLInputElement>) => {
|
||||
|
||||
@@ -8,7 +8,6 @@ import { cn } from '@/lib/core/utils/cn'
|
||||
import { inter } from '@/app/_styles/fonts/inter/inter'
|
||||
import { soehne } from '@/app/_styles/fonts/soehne/soehne'
|
||||
import { useVerification } from '@/app/(auth)/verify/use-verification'
|
||||
import { useBrandedButtonClass } from '@/hooks/use-branded-button-class'
|
||||
|
||||
interface VerifyContentProps {
|
||||
hasEmailService: boolean
|
||||
@@ -59,7 +58,34 @@ function VerificationForm({
|
||||
setCountdown(30)
|
||||
}
|
||||
|
||||
const buttonClass = useBrandedButtonClass()
|
||||
const [buttonClass, setButtonClass] = useState('branded-button-gradient')
|
||||
|
||||
useEffect(() => {
|
||||
const checkCustomBrand = () => {
|
||||
const computedStyle = getComputedStyle(document.documentElement)
|
||||
const brandAccent = computedStyle.getPropertyValue('--brand-accent-hex').trim()
|
||||
|
||||
if (brandAccent && brandAccent !== '#6f3dfa') {
|
||||
setButtonClass('branded-button-custom')
|
||||
} else {
|
||||
setButtonClass('branded-button-gradient')
|
||||
}
|
||||
}
|
||||
|
||||
checkCustomBrand()
|
||||
|
||||
window.addEventListener('resize', checkCustomBrand)
|
||||
const observer = new MutationObserver(checkCustomBrand)
|
||||
observer.observe(document.documentElement, {
|
||||
attributes: true,
|
||||
attributeFilter: ['style', 'class'],
|
||||
})
|
||||
|
||||
return () => {
|
||||
window.removeEventListener('resize', checkCustomBrand)
|
||||
observer.disconnect()
|
||||
}
|
||||
}, [])
|
||||
|
||||
return (
|
||||
<>
|
||||
|
||||
@@ -4,6 +4,7 @@ import { useRef, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { X } from 'lucide-react'
|
||||
import { Textarea } from '@/components/emcn'
|
||||
import { Button } from '@/components/ui/button'
|
||||
import { Input } from '@/components/ui/input'
|
||||
import { Label } from '@/components/ui/label'
|
||||
import {
|
||||
@@ -17,7 +18,6 @@ import { isHosted } from '@/lib/core/config/feature-flags'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { quickValidateEmail } from '@/lib/messaging/email/validation'
|
||||
import { soehne } from '@/app/_styles/fonts/soehne/soehne'
|
||||
import { BrandedButton } from '@/app/(auth)/components/branded-button'
|
||||
import Footer from '@/app/(landing)/components/footer/footer'
|
||||
import Nav from '@/app/(landing)/components/nav/nav'
|
||||
|
||||
@@ -493,17 +493,18 @@ export default function CareersPage() {
|
||||
|
||||
{/* Submit Button */}
|
||||
<div className='flex justify-end pt-2'>
|
||||
<BrandedButton
|
||||
<Button
|
||||
type='submit'
|
||||
disabled={isSubmitting || submitStatus === 'success'}
|
||||
loading={isSubmitting}
|
||||
loadingText='Submitting'
|
||||
showArrow={false}
|
||||
fullWidth={false}
|
||||
className='min-w-[200px]'
|
||||
className='min-w-[200px] rounded-[10px] border border-[#6F3DFA] bg-gradient-to-b from-[#8357FF] to-[#6F3DFA] text-white shadow-[inset_0_2px_4px_0_#9B77FF] transition-all duration-300 hover:opacity-90 disabled:opacity-50'
|
||||
size='lg'
|
||||
>
|
||||
{submitStatus === 'success' ? 'Submitted' : 'Submit Application'}
|
||||
</BrandedButton>
|
||||
{isSubmitting
|
||||
? 'Submitting...'
|
||||
: submitStatus === 'success'
|
||||
? 'Submitted'
|
||||
: 'Submit Application'}
|
||||
</Button>
|
||||
</div>
|
||||
</form>
|
||||
</section>
|
||||
|
||||
@@ -11,7 +11,6 @@ import { useBrandConfig } from '@/lib/branding/branding'
|
||||
import { isHosted } from '@/lib/core/config/feature-flags'
|
||||
import { soehne } from '@/app/_styles/fonts/soehne/soehne'
|
||||
import { getFormattedGitHubStars } from '@/app/(landing)/actions/github'
|
||||
import { useBrandedButtonClass } from '@/hooks/use-branded-button-class'
|
||||
|
||||
const logger = createLogger('nav')
|
||||
|
||||
@@ -21,12 +20,11 @@ interface NavProps {
|
||||
}
|
||||
|
||||
export default function Nav({ hideAuthButtons = false, variant = 'landing' }: NavProps = {}) {
|
||||
const [githubStars, setGithubStars] = useState('25.8k')
|
||||
const [githubStars, setGithubStars] = useState('25.1k')
|
||||
const [isHovered, setIsHovered] = useState(false)
|
||||
const [isLoginHovered, setIsLoginHovered] = useState(false)
|
||||
const router = useRouter()
|
||||
const brand = useBrandConfig()
|
||||
const buttonClass = useBrandedButtonClass()
|
||||
|
||||
useEffect(() => {
|
||||
if (variant !== 'landing') return
|
||||
@@ -185,7 +183,7 @@ export default function Nav({ hideAuthButtons = false, variant = 'landing' }: Na
|
||||
href='/signup'
|
||||
onMouseEnter={() => setIsHovered(true)}
|
||||
onMouseLeave={() => setIsHovered(false)}
|
||||
className={`${buttonClass} group inline-flex items-center justify-center gap-2 rounded-[10px] py-[6px] pr-[10px] pl-[12px] text-[15px] text-white transition-all`}
|
||||
className='group inline-flex items-center justify-center gap-2 rounded-[10px] border border-[#6F3DFA] bg-gradient-to-b from-[#8357FF] to-[#6F3DFA] py-[6px] pr-[10px] pl-[12px] text-[14px] text-white shadow-[inset_0_2px_4px_0_#9B77FF] transition-all sm:text-[16px]'
|
||||
aria-label='Get started with Sim - Sign up for free'
|
||||
prefetch={true}
|
||||
>
|
||||
|
||||
@@ -4,11 +4,6 @@ import { createLogger } from '@sim/logger'
|
||||
import { and, desc, eq, inArray } from 'drizzle-orm'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { refreshOAuthToken } from '@/lib/oauth'
|
||||
import {
|
||||
getMicrosoftRefreshTokenExpiry,
|
||||
isMicrosoftProvider,
|
||||
PROACTIVE_REFRESH_THRESHOLD_DAYS,
|
||||
} from '@/lib/oauth/microsoft'
|
||||
|
||||
const logger = createLogger('OAuthUtilsAPI')
|
||||
|
||||
@@ -210,32 +205,15 @@ export async function refreshAccessTokenIfNeeded(
|
||||
}
|
||||
|
||||
// Decide if we should refresh: token missing OR expired
|
||||
const accessTokenExpiresAt = credential.accessTokenExpiresAt
|
||||
const refreshTokenExpiresAt = credential.refreshTokenExpiresAt
|
||||
const expiresAt = credential.accessTokenExpiresAt
|
||||
const now = new Date()
|
||||
|
||||
// Check if access token needs refresh (missing or expired)
|
||||
const accessTokenNeedsRefresh =
|
||||
!!credential.refreshToken &&
|
||||
(!credential.accessToken || (accessTokenExpiresAt && accessTokenExpiresAt <= now))
|
||||
|
||||
// Check if we should proactively refresh to prevent refresh token expiry
|
||||
// This applies to Microsoft providers whose refresh tokens expire after 90 days of inactivity
|
||||
const proactiveRefreshThreshold = new Date(
|
||||
now.getTime() + PROACTIVE_REFRESH_THRESHOLD_DAYS * 24 * 60 * 60 * 1000
|
||||
)
|
||||
const refreshTokenNeedsProactiveRefresh =
|
||||
!!credential.refreshToken &&
|
||||
isMicrosoftProvider(credential.providerId) &&
|
||||
refreshTokenExpiresAt &&
|
||||
refreshTokenExpiresAt <= proactiveRefreshThreshold
|
||||
|
||||
const shouldRefresh = accessTokenNeedsRefresh || refreshTokenNeedsProactiveRefresh
|
||||
const shouldRefresh =
|
||||
!!credential.refreshToken && (!credential.accessToken || (expiresAt && expiresAt <= now))
|
||||
|
||||
const accessToken = credential.accessToken
|
||||
|
||||
if (shouldRefresh) {
|
||||
logger.info(`[${requestId}] Refreshing token for credential`)
|
||||
logger.info(`[${requestId}] Token expired, attempting to refresh for credential`)
|
||||
try {
|
||||
const refreshedToken = await refreshOAuthToken(
|
||||
credential.providerId,
|
||||
@@ -249,15 +227,11 @@ export async function refreshAccessTokenIfNeeded(
|
||||
userId: credential.userId,
|
||||
hasRefreshToken: !!credential.refreshToken,
|
||||
})
|
||||
if (!accessTokenNeedsRefresh && accessToken) {
|
||||
logger.info(`[${requestId}] Proactive refresh failed but access token still valid`)
|
||||
return accessToken
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
// Prepare update data
|
||||
const updateData: Record<string, unknown> = {
|
||||
const updateData: any = {
|
||||
accessToken: refreshedToken.accessToken,
|
||||
accessTokenExpiresAt: new Date(Date.now() + refreshedToken.expiresIn * 1000),
|
||||
updatedAt: new Date(),
|
||||
@@ -269,10 +243,6 @@ export async function refreshAccessTokenIfNeeded(
|
||||
updateData.refreshToken = refreshedToken.refreshToken
|
||||
}
|
||||
|
||||
if (isMicrosoftProvider(credential.providerId)) {
|
||||
updateData.refreshTokenExpiresAt = getMicrosoftRefreshTokenExpiry()
|
||||
}
|
||||
|
||||
// Update the token in the database
|
||||
await db.update(account).set(updateData).where(eq(account.id, credentialId))
|
||||
|
||||
@@ -286,10 +256,6 @@ export async function refreshAccessTokenIfNeeded(
|
||||
credentialId,
|
||||
userId: credential.userId,
|
||||
})
|
||||
if (!accessTokenNeedsRefresh && accessToken) {
|
||||
logger.info(`[${requestId}] Proactive refresh failed but access token still valid`)
|
||||
return accessToken
|
||||
}
|
||||
return null
|
||||
}
|
||||
} else if (!accessToken) {
|
||||
@@ -311,27 +277,10 @@ export async function refreshTokenIfNeeded(
|
||||
credentialId: string
|
||||
): Promise<{ accessToken: string; refreshed: boolean }> {
|
||||
// Decide if we should refresh: token missing OR expired
|
||||
const accessTokenExpiresAt = credential.accessTokenExpiresAt
|
||||
const refreshTokenExpiresAt = credential.refreshTokenExpiresAt
|
||||
const expiresAt = credential.accessTokenExpiresAt
|
||||
const now = new Date()
|
||||
|
||||
// Check if access token needs refresh (missing or expired)
|
||||
const accessTokenNeedsRefresh =
|
||||
!!credential.refreshToken &&
|
||||
(!credential.accessToken || (accessTokenExpiresAt && accessTokenExpiresAt <= now))
|
||||
|
||||
// Check if we should proactively refresh to prevent refresh token expiry
|
||||
// This applies to Microsoft providers whose refresh tokens expire after 90 days of inactivity
|
||||
const proactiveRefreshThreshold = new Date(
|
||||
now.getTime() + PROACTIVE_REFRESH_THRESHOLD_DAYS * 24 * 60 * 60 * 1000
|
||||
)
|
||||
const refreshTokenNeedsProactiveRefresh =
|
||||
!!credential.refreshToken &&
|
||||
isMicrosoftProvider(credential.providerId) &&
|
||||
refreshTokenExpiresAt &&
|
||||
refreshTokenExpiresAt <= proactiveRefreshThreshold
|
||||
|
||||
const shouldRefresh = accessTokenNeedsRefresh || refreshTokenNeedsProactiveRefresh
|
||||
const shouldRefresh =
|
||||
!!credential.refreshToken && (!credential.accessToken || (expiresAt && expiresAt <= now))
|
||||
|
||||
// If token appears valid and present, return it directly
|
||||
if (!shouldRefresh) {
|
||||
@@ -344,17 +293,13 @@ export async function refreshTokenIfNeeded(
|
||||
|
||||
if (!refreshResult) {
|
||||
logger.error(`[${requestId}] Failed to refresh token for credential`)
|
||||
if (!accessTokenNeedsRefresh && credential.accessToken) {
|
||||
logger.info(`[${requestId}] Proactive refresh failed but access token still valid`)
|
||||
return { accessToken: credential.accessToken, refreshed: false }
|
||||
}
|
||||
throw new Error('Failed to refresh token')
|
||||
}
|
||||
|
||||
const { accessToken: refreshedToken, expiresIn, refreshToken: newRefreshToken } = refreshResult
|
||||
|
||||
// Prepare update data
|
||||
const updateData: Record<string, unknown> = {
|
||||
const updateData: any = {
|
||||
accessToken: refreshedToken,
|
||||
accessTokenExpiresAt: new Date(Date.now() + expiresIn * 1000), // Use provider's expiry
|
||||
updatedAt: new Date(),
|
||||
@@ -366,10 +311,6 @@ export async function refreshTokenIfNeeded(
|
||||
updateData.refreshToken = newRefreshToken
|
||||
}
|
||||
|
||||
if (isMicrosoftProvider(credential.providerId)) {
|
||||
updateData.refreshTokenExpiresAt = getMicrosoftRefreshTokenExpiry()
|
||||
}
|
||||
|
||||
await db.update(account).set(updateData).where(eq(account.id, credentialId))
|
||||
|
||||
logger.info(`[${requestId}] Successfully refreshed access token`)
|
||||
@@ -390,11 +331,6 @@ export async function refreshTokenIfNeeded(
|
||||
}
|
||||
}
|
||||
|
||||
if (!accessTokenNeedsRefresh && credential.accessToken) {
|
||||
logger.info(`[${requestId}] Proactive refresh failed but access token still valid`)
|
||||
return { accessToken: credential.accessToken, refreshed: false }
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Refresh failed and no valid token found in DB`, error)
|
||||
throw error
|
||||
}
|
||||
|
||||
@@ -15,8 +15,7 @@ const resetPasswordSchema = z.object({
|
||||
.max(100, 'Password must not exceed 100 characters')
|
||||
.regex(/[A-Z]/, 'Password must contain at least one uppercase letter')
|
||||
.regex(/[a-z]/, 'Password must contain at least one lowercase letter')
|
||||
.regex(/[0-9]/, 'Password must contain at least one number')
|
||||
.regex(/[^A-Za-z0-9]/, 'Password must contain at least one special character'),
|
||||
.regex(/[0-9]/, 'Password must contain at least one number'),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
|
||||
@@ -11,7 +11,7 @@ import { preprocessExecution } from '@/lib/execution/preprocessing'
|
||||
import { LoggingSession } from '@/lib/logs/execution/logging-session'
|
||||
import { normalizeInputFormatValue } from '@/lib/workflows/input-format'
|
||||
import { createStreamingResponse } from '@/lib/workflows/streaming/streaming'
|
||||
import { isInputDefinitionTrigger } from '@/lib/workflows/triggers/input-definition-triggers'
|
||||
import { isValidStartBlockType } from '@/lib/workflows/triggers/start-block-types'
|
||||
import { setFormAuthCookie, validateFormAuth } from '@/app/api/form/utils'
|
||||
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
|
||||
|
||||
@@ -36,7 +36,7 @@ async function getWorkflowInputSchema(workflowId: string): Promise<any[]> {
|
||||
.from(workflowBlocks)
|
||||
.where(eq(workflowBlocks.workflowId, workflowId))
|
||||
|
||||
const startBlock = blocks.find((block) => isInputDefinitionTrigger(block.type))
|
||||
const startBlock = blocks.find((block) => isValidStartBlockType(block.type))
|
||||
|
||||
if (!startBlock) {
|
||||
return []
|
||||
|
||||
@@ -276,11 +276,8 @@ describe('Function Execute API Route', () => {
|
||||
it.concurrent('should resolve tag variables with <tag_name> syntax', async () => {
|
||||
const req = createMockRequest('POST', {
|
||||
code: 'return <email>',
|
||||
blockData: {
|
||||
'block-123': { id: '123', subject: 'Test Email' },
|
||||
},
|
||||
blockNameMapping: {
|
||||
email: 'block-123',
|
||||
params: {
|
||||
email: { id: '123', subject: 'Test Email' },
|
||||
},
|
||||
})
|
||||
|
||||
@@ -308,13 +305,9 @@ describe('Function Execute API Route', () => {
|
||||
it.concurrent('should only match valid variable names in angle brackets', async () => {
|
||||
const req = createMockRequest('POST', {
|
||||
code: 'return <validVar> + "<invalid@email.com>" + <another_valid>',
|
||||
blockData: {
|
||||
'block-1': 'hello',
|
||||
'block-2': 'world',
|
||||
},
|
||||
blockNameMapping: {
|
||||
validvar: 'block-1',
|
||||
another_valid: 'block-2',
|
||||
params: {
|
||||
validVar: 'hello',
|
||||
another_valid: 'world',
|
||||
},
|
||||
})
|
||||
|
||||
@@ -328,22 +321,28 @@ describe('Function Execute API Route', () => {
|
||||
it.concurrent(
|
||||
'should handle Gmail webhook data with email addresses containing angle brackets',
|
||||
async () => {
|
||||
const emailData = {
|
||||
id: '123',
|
||||
from: 'Waleed Latif <waleed@sim.ai>',
|
||||
to: 'User <user@example.com>',
|
||||
subject: 'Test Email',
|
||||
bodyText: 'Hello world',
|
||||
const gmailData = {
|
||||
email: {
|
||||
id: '123',
|
||||
from: 'Waleed Latif <waleed@sim.ai>',
|
||||
to: 'User <user@example.com>',
|
||||
subject: 'Test Email',
|
||||
bodyText: 'Hello world',
|
||||
},
|
||||
rawEmail: {
|
||||
id: '123',
|
||||
payload: {
|
||||
headers: [
|
||||
{ name: 'From', value: 'Waleed Latif <waleed@sim.ai>' },
|
||||
{ name: 'To', value: 'User <user@example.com>' },
|
||||
],
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
const req = createMockRequest('POST', {
|
||||
code: 'return <email>',
|
||||
blockData: {
|
||||
'block-email': emailData,
|
||||
},
|
||||
blockNameMapping: {
|
||||
email: 'block-email',
|
||||
},
|
||||
params: gmailData,
|
||||
})
|
||||
|
||||
const response = await POST(req)
|
||||
@@ -357,20 +356,17 @@ describe('Function Execute API Route', () => {
|
||||
it.concurrent(
|
||||
'should properly serialize complex email objects with special characters',
|
||||
async () => {
|
||||
const emailData = {
|
||||
from: 'Test User <test@example.com>',
|
||||
bodyHtml: '<div>HTML content with "quotes" and \'apostrophes\'</div>',
|
||||
bodyText: 'Text with\nnewlines\tand\ttabs',
|
||||
const complexEmailData = {
|
||||
email: {
|
||||
from: 'Test User <test@example.com>',
|
||||
bodyHtml: '<div>HTML content with "quotes" and \'apostrophes\'</div>',
|
||||
bodyText: 'Text with\nnewlines\tand\ttabs',
|
||||
},
|
||||
}
|
||||
|
||||
const req = createMockRequest('POST', {
|
||||
code: 'return <email>',
|
||||
blockData: {
|
||||
'block-email': emailData,
|
||||
},
|
||||
blockNameMapping: {
|
||||
email: 'block-email',
|
||||
},
|
||||
params: complexEmailData,
|
||||
})
|
||||
|
||||
const response = await POST(req)
|
||||
@@ -523,23 +519,18 @@ describe('Function Execute API Route', () => {
|
||||
})
|
||||
|
||||
it.concurrent('should handle JSON serialization edge cases', async () => {
|
||||
const complexData = {
|
||||
special: 'chars"with\'quotes',
|
||||
unicode: '🎉 Unicode content',
|
||||
nested: {
|
||||
deep: {
|
||||
value: 'test',
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
const req = createMockRequest('POST', {
|
||||
code: 'return <complexData>',
|
||||
blockData: {
|
||||
'block-complex': complexData,
|
||||
},
|
||||
blockNameMapping: {
|
||||
complexdata: 'block-complex',
|
||||
params: {
|
||||
complexData: {
|
||||
special: 'chars"with\'quotes',
|
||||
unicode: '🎉 Unicode content',
|
||||
nested: {
|
||||
deep: {
|
||||
value: 'test',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
|
||||
@@ -6,10 +6,10 @@ import { executeInE2B } from '@/lib/execution/e2b'
|
||||
import { executeInIsolatedVM } from '@/lib/execution/isolated-vm'
|
||||
import { CodeLanguage, DEFAULT_CODE_LANGUAGE, isValidCodeLanguage } from '@/lib/execution/languages'
|
||||
import { escapeRegExp, normalizeName, REFERENCE } from '@/executor/constants'
|
||||
import { type OutputSchema, resolveBlockReference } from '@/executor/utils/block-reference'
|
||||
import {
|
||||
createEnvVarPattern,
|
||||
createWorkflowVariablePattern,
|
||||
resolveEnvVarReferences,
|
||||
} from '@/executor/utils/reference-validation'
|
||||
export const dynamic = 'force-dynamic'
|
||||
export const runtime = 'nodejs'
|
||||
@@ -18,8 +18,8 @@ export const MAX_DURATION = 210
|
||||
|
||||
const logger = createLogger('FunctionExecuteAPI')
|
||||
|
||||
const E2B_JS_WRAPPER_LINES = 3
|
||||
const E2B_PYTHON_WRAPPER_LINES = 1
|
||||
const E2B_JS_WRAPPER_LINES = 3 // Lines before user code: ';(async () => {', ' try {', ' const __sim_result = await (async () => {'
|
||||
const E2B_PYTHON_WRAPPER_LINES = 1 // Lines before user code: 'def __sim_main__():'
|
||||
|
||||
type TypeScriptModule = typeof import('typescript')
|
||||
|
||||
@@ -134,21 +134,33 @@ function extractEnhancedError(
|
||||
if (error.stack) {
|
||||
enhanced.stack = error.stack
|
||||
|
||||
// Parse stack trace to extract line and column information
|
||||
// Handle both compilation errors and runtime errors
|
||||
const stackLines: string[] = error.stack.split('\n')
|
||||
|
||||
for (const line of stackLines) {
|
||||
// Pattern 1: Compilation errors - "user-function.js:6"
|
||||
let match = line.match(/user-function\.js:(\d+)(?::(\d+))?/)
|
||||
|
||||
// Pattern 2: Runtime errors - "at user-function.js:5:12"
|
||||
if (!match) {
|
||||
match = line.match(/at\s+user-function\.js:(\d+):(\d+)/)
|
||||
}
|
||||
|
||||
// Pattern 3: Generic patterns for any line containing our filename
|
||||
if (!match) {
|
||||
match = line.match(/user-function\.js:(\d+)(?::(\d+))?/)
|
||||
}
|
||||
|
||||
if (match) {
|
||||
const stackLine = Number.parseInt(match[1], 10)
|
||||
const stackColumn = match[2] ? Number.parseInt(match[2], 10) : undefined
|
||||
|
||||
// Adjust line number to account for wrapper code
|
||||
// The user code starts at a specific line in our wrapper
|
||||
const adjustedLine = stackLine - userCodeStartLine + 1
|
||||
|
||||
// Check if this is a syntax error in wrapper code caused by incomplete user code
|
||||
const isWrapperSyntaxError =
|
||||
stackLine > userCodeStartLine &&
|
||||
error.name === 'SyntaxError' &&
|
||||
@@ -156,6 +168,7 @@ function extractEnhancedError(
|
||||
error.message.includes('Unexpected end of input'))
|
||||
|
||||
if (isWrapperSyntaxError && userCode) {
|
||||
// Map wrapper syntax errors to the last line of user code
|
||||
const codeLines = userCode.split('\n')
|
||||
const lastUserLine = codeLines.length
|
||||
enhanced.line = lastUserLine
|
||||
@@ -168,6 +181,7 @@ function extractEnhancedError(
|
||||
enhanced.line = adjustedLine
|
||||
enhanced.column = stackColumn
|
||||
|
||||
// Extract the actual line content from user code
|
||||
if (userCode) {
|
||||
const codeLines = userCode.split('\n')
|
||||
if (adjustedLine <= codeLines.length) {
|
||||
@@ -178,6 +192,7 @@ function extractEnhancedError(
|
||||
}
|
||||
|
||||
if (stackLine <= userCodeStartLine) {
|
||||
// Error is in wrapper code itself
|
||||
enhanced.line = stackLine
|
||||
enhanced.column = stackColumn
|
||||
break
|
||||
@@ -185,6 +200,7 @@ function extractEnhancedError(
|
||||
}
|
||||
}
|
||||
|
||||
// Clean up stack trace to show user-relevant information
|
||||
const cleanedStackLines: string[] = stackLines
|
||||
.filter(
|
||||
(line: string) =>
|
||||
@@ -198,6 +214,9 @@ function extractEnhancedError(
|
||||
}
|
||||
}
|
||||
|
||||
// Keep original message without adding error type prefix
|
||||
// The error type will be added later in createUserFriendlyErrorMessage
|
||||
|
||||
return enhanced
|
||||
}
|
||||
|
||||
@@ -212,6 +231,7 @@ function formatE2BError(
|
||||
userCode: string,
|
||||
prologueLineCount: number
|
||||
): { formattedError: string; cleanedOutput: string } {
|
||||
// Calculate line offset based on language and prologue
|
||||
const wrapperLines =
|
||||
language === CodeLanguage.Python ? E2B_PYTHON_WRAPPER_LINES : E2B_JS_WRAPPER_LINES
|
||||
const totalOffset = prologueLineCount + wrapperLines
|
||||
@@ -221,20 +241,27 @@ function formatE2BError(
|
||||
let cleanErrorMsg = ''
|
||||
|
||||
if (language === CodeLanguage.Python) {
|
||||
// Python error format: "Cell In[X], line Y" followed by error details
|
||||
// Extract line number from the Cell reference
|
||||
const cellMatch = errorOutput.match(/Cell In\[\d+\], line (\d+)/)
|
||||
if (cellMatch) {
|
||||
const originalLine = Number.parseInt(cellMatch[1], 10)
|
||||
userLine = originalLine - totalOffset
|
||||
}
|
||||
|
||||
// Extract clean error message from the error string
|
||||
// Remove file references like "(detected at line X) (file.py, line Y)"
|
||||
cleanErrorMsg = errorMessage
|
||||
.replace(/\s*\(detected at line \d+\)/g, '')
|
||||
.replace(/\s*\([^)]+\.py, line \d+\)/g, '')
|
||||
.trim()
|
||||
} else if (language === CodeLanguage.JavaScript) {
|
||||
// JavaScript error format from E2B: "SyntaxError: /path/file.ts: Message. (line:col)\n\n 9 | ..."
|
||||
// First, extract the error type and message from the first line
|
||||
const firstLineEnd = errorMessage.indexOf('\n')
|
||||
const firstLine = firstLineEnd > 0 ? errorMessage.substring(0, firstLineEnd) : errorMessage
|
||||
|
||||
// Parse: "SyntaxError: /home/user/index.ts: Missing semicolon. (11:9)"
|
||||
const jsErrorMatch = firstLine.match(/^(\w+Error):\s*[^:]+:\s*([^(]+)\.\s*\((\d+):(\d+)\)/)
|
||||
if (jsErrorMatch) {
|
||||
cleanErrorType = jsErrorMatch[1]
|
||||
@@ -242,11 +269,13 @@ function formatE2BError(
|
||||
const originalLine = Number.parseInt(jsErrorMatch[3], 10)
|
||||
userLine = originalLine - totalOffset
|
||||
} else {
|
||||
// Fallback: look for line number in the arrow pointer line (> 11 |)
|
||||
const arrowMatch = errorMessage.match(/^>\s*(\d+)\s*\|/m)
|
||||
if (arrowMatch) {
|
||||
const originalLine = Number.parseInt(arrowMatch[1], 10)
|
||||
userLine = originalLine - totalOffset
|
||||
}
|
||||
// Try to extract error type and message
|
||||
const errorMatch = firstLine.match(/^(\w+Error):\s*(.+)/)
|
||||
if (errorMatch) {
|
||||
cleanErrorType = errorMatch[1]
|
||||
@@ -260,11 +289,13 @@ function formatE2BError(
|
||||
}
|
||||
}
|
||||
|
||||
// Build the final clean error message
|
||||
const finalErrorMsg =
|
||||
cleanErrorType && cleanErrorMsg
|
||||
? `${cleanErrorType}: ${cleanErrorMsg}`
|
||||
: cleanErrorMsg || errorMessage
|
||||
|
||||
// Format with line number if available
|
||||
let formattedError = finalErrorMsg
|
||||
if (userLine && userLine > 0) {
|
||||
const codeLines = userCode.split('\n')
|
||||
@@ -280,6 +311,7 @@ function formatE2BError(
|
||||
}
|
||||
}
|
||||
|
||||
// For stdout, just return the clean error message without the full traceback
|
||||
const cleanedOutput = finalErrorMsg
|
||||
|
||||
return { formattedError, cleanedOutput }
|
||||
@@ -295,6 +327,7 @@ function createUserFriendlyErrorMessage(
|
||||
): string {
|
||||
let errorMessage = enhanced.message
|
||||
|
||||
// Add line information if available
|
||||
if (enhanced.line !== undefined) {
|
||||
let lineInfo = `Line ${enhanced.line}`
|
||||
|
||||
@@ -305,14 +338,18 @@ function createUserFriendlyErrorMessage(
|
||||
|
||||
errorMessage = `${lineInfo} - ${errorMessage}`
|
||||
} else {
|
||||
// If no line number, try to extract it from stack trace for display
|
||||
if (enhanced.stack) {
|
||||
const stackMatch = enhanced.stack.match(/user-function\.js:(\d+)(?::(\d+))?/)
|
||||
if (stackMatch) {
|
||||
const line = Number.parseInt(stackMatch[1], 10)
|
||||
let lineInfo = `Line ${line}`
|
||||
|
||||
// Try to get line content if we have userCode
|
||||
if (userCode) {
|
||||
const codeLines = userCode.split('\n')
|
||||
// Note: stackMatch gives us VM line number, need to adjust
|
||||
// This is a fallback case, so we might not have perfect line mapping
|
||||
if (line <= codeLines.length) {
|
||||
const lineContent = codeLines[line - 1]?.trim()
|
||||
if (lineContent) {
|
||||
@@ -326,6 +363,7 @@ function createUserFriendlyErrorMessage(
|
||||
}
|
||||
}
|
||||
|
||||
// Add error type prefix with consistent naming
|
||||
if (enhanced.name !== 'Error') {
|
||||
const errorTypePrefix =
|
||||
enhanced.name === 'SyntaxError'
|
||||
@@ -336,6 +374,7 @@ function createUserFriendlyErrorMessage(
|
||||
? 'Reference Error'
|
||||
: enhanced.name
|
||||
|
||||
// Only add prefix if not already present
|
||||
if (!errorMessage.toLowerCase().includes(errorTypePrefix.toLowerCase())) {
|
||||
errorMessage = `${errorTypePrefix}: ${errorMessage}`
|
||||
}
|
||||
@@ -344,6 +383,9 @@ function createUserFriendlyErrorMessage(
|
||||
return errorMessage
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves workflow variables with <variable.name> syntax
|
||||
*/
|
||||
function resolveWorkflowVariables(
|
||||
code: string,
|
||||
workflowVariables: Record<string, any>,
|
||||
@@ -363,35 +405,39 @@ function resolveWorkflowVariables(
|
||||
while ((match = regex.exec(code)) !== null) {
|
||||
const variableName = match[1].trim()
|
||||
|
||||
// Find the variable by name (workflowVariables is indexed by ID, values are variable objects)
|
||||
const foundVariable = Object.entries(workflowVariables).find(
|
||||
([_, variable]) => normalizeName(variable.name || '') === variableName
|
||||
)
|
||||
|
||||
if (!foundVariable) {
|
||||
const availableVars = Object.values(workflowVariables)
|
||||
.map((v) => v.name)
|
||||
.filter(Boolean)
|
||||
throw new Error(
|
||||
`Variable "${variableName}" doesn't exist.` +
|
||||
(availableVars.length > 0 ? ` Available: ${availableVars.join(', ')}` : '')
|
||||
)
|
||||
}
|
||||
let variableValue: unknown = ''
|
||||
if (foundVariable) {
|
||||
const variable = foundVariable[1]
|
||||
variableValue = variable.value
|
||||
|
||||
const variable = foundVariable[1]
|
||||
let variableValue: unknown = variable.value
|
||||
|
||||
if (variable.value !== undefined && variable.value !== null) {
|
||||
const type = variable.type === 'string' ? 'plain' : variable.type
|
||||
|
||||
if (type === 'number') {
|
||||
variableValue = Number(variableValue)
|
||||
} else if (type === 'boolean') {
|
||||
variableValue = variableValue === 'true' || variableValue === true
|
||||
} else if (type === 'json' && typeof variableValue === 'string') {
|
||||
if (variable.value !== undefined && variable.value !== null) {
|
||||
try {
|
||||
variableValue = JSON.parse(variableValue)
|
||||
// Handle 'string' type the same as 'plain' for backward compatibility
|
||||
const type = variable.type === 'string' ? 'plain' : variable.type
|
||||
|
||||
// For plain text, use exactly what's entered without modifications
|
||||
if (type === 'plain' && typeof variableValue === 'string') {
|
||||
// Use as-is for plain text
|
||||
} else if (type === 'number') {
|
||||
variableValue = Number(variableValue)
|
||||
} else if (type === 'boolean') {
|
||||
variableValue = variableValue === 'true' || variableValue === true
|
||||
} else if (type === 'json') {
|
||||
try {
|
||||
variableValue =
|
||||
typeof variableValue === 'string' ? JSON.parse(variableValue) : variableValue
|
||||
} catch {
|
||||
// Keep original value if JSON parsing fails
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Keep as-is
|
||||
// Fallback to original value on error
|
||||
variableValue = variable.value
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -404,9 +450,11 @@ function resolveWorkflowVariables(
|
||||
})
|
||||
}
|
||||
|
||||
// Process replacements in reverse order to maintain correct indices
|
||||
for (let i = replacements.length - 1; i >= 0; i--) {
|
||||
const { match: matchStr, index, variableName, variableValue } = replacements[i]
|
||||
|
||||
// Use variable reference approach
|
||||
const safeVarName = `__variable_${variableName.replace(/[^a-zA-Z0-9_]/g, '_')}`
|
||||
contextVariables[safeVarName] = variableValue
|
||||
resolvedCode =
|
||||
@@ -416,6 +464,9 @@ function resolveWorkflowVariables(
|
||||
return resolvedCode
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves environment variables with {{var_name}} syntax
|
||||
*/
|
||||
function resolveEnvironmentVariables(
|
||||
code: string,
|
||||
params: Record<string, any>,
|
||||
@@ -431,28 +482,32 @@ function resolveEnvironmentVariables(
|
||||
|
||||
const resolverVars: Record<string, string> = {}
|
||||
Object.entries(params).forEach(([key, value]) => {
|
||||
if (value !== undefined && value !== null) {
|
||||
if (value) {
|
||||
resolverVars[key] = String(value)
|
||||
}
|
||||
})
|
||||
Object.entries(envVars).forEach(([key, value]) => {
|
||||
if (value !== undefined && value !== null) {
|
||||
if (value) {
|
||||
resolverVars[key] = value
|
||||
}
|
||||
})
|
||||
|
||||
while ((match = regex.exec(code)) !== null) {
|
||||
const varName = match[1].trim()
|
||||
|
||||
if (!(varName in resolverVars)) {
|
||||
continue
|
||||
}
|
||||
|
||||
const resolved = resolveEnvVarReferences(match[0], resolverVars, {
|
||||
allowEmbedded: true,
|
||||
resolveExactMatch: true,
|
||||
trimKeys: true,
|
||||
onMissing: 'empty',
|
||||
deep: false,
|
||||
})
|
||||
const varValue =
|
||||
typeof resolved === 'string' ? resolved : resolved == null ? '' : String(resolved)
|
||||
replacements.push({
|
||||
match: match[0],
|
||||
index: match.index,
|
||||
varName,
|
||||
varValue: resolverVars[varName],
|
||||
varValue: String(varValue),
|
||||
})
|
||||
}
|
||||
|
||||
@@ -468,59 +523,64 @@ function resolveEnvironmentVariables(
|
||||
return resolvedCode
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves tags with <tag_name> syntax (including nested paths like <block.response.data>)
|
||||
*/
|
||||
function resolveTagVariables(
|
||||
code: string,
|
||||
blockData: Record<string, unknown>,
|
||||
params: Record<string, any>,
|
||||
blockData: Record<string, any>,
|
||||
blockNameMapping: Record<string, string>,
|
||||
blockOutputSchemas: Record<string, OutputSchema>,
|
||||
contextVariables: Record<string, unknown>,
|
||||
language = 'javascript'
|
||||
contextVariables: Record<string, any>
|
||||
): string {
|
||||
let resolvedCode = code
|
||||
const undefinedLiteral = language === 'python' ? 'None' : 'undefined'
|
||||
|
||||
const tagPattern = new RegExp(
|
||||
`${REFERENCE.START}([a-zA-Z_](?:[a-zA-Z0-9_${REFERENCE.PATH_DELIMITER}]*[a-zA-Z0-9_])?)${REFERENCE.END}`,
|
||||
`${REFERENCE.START}([a-zA-Z_][a-zA-Z0-9_${REFERENCE.PATH_DELIMITER}]*[a-zA-Z0-9_])${REFERENCE.END}`,
|
||||
'g'
|
||||
)
|
||||
const tagMatches = resolvedCode.match(tagPattern) || []
|
||||
|
||||
for (const match of tagMatches) {
|
||||
const tagName = match.slice(REFERENCE.START.length, -REFERENCE.END.length).trim()
|
||||
const pathParts = tagName.split(REFERENCE.PATH_DELIMITER)
|
||||
const blockName = pathParts[0]
|
||||
const fieldPath = pathParts.slice(1)
|
||||
|
||||
const result = resolveBlockReference(blockName, fieldPath, {
|
||||
blockNameMapping,
|
||||
blockData,
|
||||
blockOutputSchemas,
|
||||
})
|
||||
// Handle nested paths like "getrecord.response.data" or "function1.response.result"
|
||||
// First try params, then blockData directly, then try with block name mapping
|
||||
let tagValue = getNestedValue(params, tagName) || getNestedValue(blockData, tagName) || ''
|
||||
|
||||
if (!result) {
|
||||
continue
|
||||
}
|
||||
// If not found and the path starts with a block name, try mapping the block name to ID
|
||||
if (!tagValue && tagName.includes(REFERENCE.PATH_DELIMITER)) {
|
||||
const pathParts = tagName.split(REFERENCE.PATH_DELIMITER)
|
||||
const normalizedBlockName = pathParts[0] // This should already be normalized like "function1"
|
||||
|
||||
let tagValue = result.value
|
||||
// Direct lookup using normalized block name
|
||||
const blockId = blockNameMapping[normalizedBlockName] ?? null
|
||||
|
||||
if (tagValue === undefined) {
|
||||
resolvedCode = resolvedCode.replace(new RegExp(escapeRegExp(match), 'g'), undefinedLiteral)
|
||||
continue
|
||||
}
|
||||
|
||||
if (typeof tagValue === 'string') {
|
||||
const trimmed = tagValue.trimStart()
|
||||
if (trimmed.startsWith('{') || trimmed.startsWith('[')) {
|
||||
try {
|
||||
tagValue = JSON.parse(tagValue)
|
||||
} catch {
|
||||
// Keep as string if not valid JSON
|
||||
}
|
||||
if (blockId) {
|
||||
const remainingPath = pathParts.slice(1).join('.')
|
||||
const fullPath = `${blockId}.${remainingPath}`
|
||||
tagValue = getNestedValue(blockData, fullPath) || ''
|
||||
}
|
||||
}
|
||||
|
||||
const safeVarName = `__tag_${tagName.replace(/_/g, '_1').replace(/\./g, '_0')}`
|
||||
// If the value is a stringified JSON, parse it back to object
|
||||
if (
|
||||
typeof tagValue === 'string' &&
|
||||
tagValue.length > 100 &&
|
||||
(tagValue.startsWith('{') || tagValue.startsWith('['))
|
||||
) {
|
||||
try {
|
||||
tagValue = JSON.parse(tagValue)
|
||||
} catch (e) {
|
||||
// Keep as string if parsing fails
|
||||
}
|
||||
}
|
||||
|
||||
// Instead of injecting large JSON directly, create a variable reference
|
||||
const safeVarName = `__tag_${tagName.replace(/[^a-zA-Z0-9_]/g, '_')}`
|
||||
contextVariables[safeVarName] = tagValue
|
||||
|
||||
// Replace the template with a variable reference
|
||||
resolvedCode = resolvedCode.replace(new RegExp(escapeRegExp(match), 'g'), safeVarName)
|
||||
}
|
||||
|
||||
@@ -536,31 +596,44 @@ function resolveTagVariables(
|
||||
*/
|
||||
function resolveCodeVariables(
|
||||
code: string,
|
||||
params: Record<string, unknown>,
|
||||
params: Record<string, any>,
|
||||
envVars: Record<string, string> = {},
|
||||
blockData: Record<string, unknown> = {},
|
||||
blockData: Record<string, any> = {},
|
||||
blockNameMapping: Record<string, string> = {},
|
||||
blockOutputSchemas: Record<string, OutputSchema> = {},
|
||||
workflowVariables: Record<string, unknown> = {},
|
||||
language = 'javascript'
|
||||
): { resolvedCode: string; contextVariables: Record<string, unknown> } {
|
||||
workflowVariables: Record<string, any> = {}
|
||||
): { resolvedCode: string; contextVariables: Record<string, any> } {
|
||||
let resolvedCode = code
|
||||
const contextVariables: Record<string, unknown> = {}
|
||||
const contextVariables: Record<string, any> = {}
|
||||
|
||||
// Resolve workflow variables with <variable.name> syntax first
|
||||
resolvedCode = resolveWorkflowVariables(resolvedCode, workflowVariables, contextVariables)
|
||||
|
||||
// Resolve environment variables with {{var_name}} syntax
|
||||
resolvedCode = resolveEnvironmentVariables(resolvedCode, params, envVars, contextVariables)
|
||||
|
||||
// Resolve tags with <tag_name> syntax (including nested paths like <block.response.data>)
|
||||
resolvedCode = resolveTagVariables(
|
||||
resolvedCode,
|
||||
params,
|
||||
blockData,
|
||||
blockNameMapping,
|
||||
blockOutputSchemas,
|
||||
contextVariables,
|
||||
language
|
||||
contextVariables
|
||||
)
|
||||
|
||||
return { resolvedCode, contextVariables }
|
||||
}
|
||||
|
||||
/**
|
||||
* Get nested value from object using dot notation path
|
||||
*/
|
||||
function getNestedValue(obj: any, path: string): any {
|
||||
if (!obj || !path) return undefined
|
||||
|
||||
return path.split('.').reduce((current, key) => {
|
||||
return current && typeof current === 'object' ? current[key] : undefined
|
||||
}, obj)
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove one trailing newline from stdout
|
||||
* This handles the common case where print() or console.log() adds a trailing \n
|
||||
@@ -593,12 +666,12 @@ export async function POST(req: NextRequest) {
|
||||
envVars = {},
|
||||
blockData = {},
|
||||
blockNameMapping = {},
|
||||
blockOutputSchemas = {},
|
||||
workflowVariables = {},
|
||||
workflowId,
|
||||
isCustomTool = false,
|
||||
} = body
|
||||
|
||||
// Extract internal parameters that shouldn't be passed to the execution context
|
||||
const executionParams = { ...params }
|
||||
executionParams._context = undefined
|
||||
|
||||
@@ -610,21 +683,21 @@ export async function POST(req: NextRequest) {
|
||||
isCustomTool,
|
||||
})
|
||||
|
||||
const lang = isValidCodeLanguage(language) ? language : DEFAULT_CODE_LANGUAGE
|
||||
|
||||
// Resolve variables in the code with workflow environment variables
|
||||
const codeResolution = resolveCodeVariables(
|
||||
code,
|
||||
executionParams,
|
||||
envVars,
|
||||
blockData,
|
||||
blockNameMapping,
|
||||
blockOutputSchemas,
|
||||
workflowVariables,
|
||||
lang
|
||||
workflowVariables
|
||||
)
|
||||
resolvedCode = codeResolution.resolvedCode
|
||||
const contextVariables = codeResolution.contextVariables
|
||||
|
||||
const lang = isValidCodeLanguage(language) ? language : DEFAULT_CODE_LANGUAGE
|
||||
|
||||
// Extract imports once for JavaScript code (reuse later to avoid double extraction)
|
||||
let jsImports = ''
|
||||
let jsRemainingCode = resolvedCode
|
||||
let hasImports = false
|
||||
@@ -634,22 +707,31 @@ export async function POST(req: NextRequest) {
|
||||
jsImports = extractionResult.imports
|
||||
jsRemainingCode = extractionResult.remainingCode
|
||||
|
||||
// Check for ES6 imports or CommonJS require statements
|
||||
// ES6 imports are extracted by the TypeScript parser
|
||||
// Also check for require() calls which indicate external dependencies
|
||||
const hasRequireStatements = /require\s*\(\s*['"`]/.test(resolvedCode)
|
||||
hasImports = jsImports.trim().length > 0 || hasRequireStatements
|
||||
}
|
||||
|
||||
// Python always requires E2B
|
||||
if (lang === CodeLanguage.Python && !isE2bEnabled) {
|
||||
throw new Error(
|
||||
'Python execution requires E2B to be enabled. Please contact your administrator to enable E2B, or use JavaScript instead.'
|
||||
)
|
||||
}
|
||||
|
||||
// JavaScript with imports requires E2B
|
||||
if (lang === CodeLanguage.JavaScript && hasImports && !isE2bEnabled) {
|
||||
throw new Error(
|
||||
'JavaScript code with import statements requires E2B to be enabled. Please remove the import statements, or contact your administrator to enable E2B.'
|
||||
)
|
||||
}
|
||||
|
||||
// Use E2B if:
|
||||
// - E2B is enabled AND
|
||||
// - Not a custom tool AND
|
||||
// - (Python OR JavaScript with imports)
|
||||
const useE2B =
|
||||
isE2bEnabled &&
|
||||
!isCustomTool &&
|
||||
@@ -662,10 +744,13 @@ export async function POST(req: NextRequest) {
|
||||
language: lang,
|
||||
})
|
||||
let prologue = ''
|
||||
const epilogue = ''
|
||||
|
||||
if (lang === CodeLanguage.JavaScript) {
|
||||
// Track prologue lines for error adjustment
|
||||
let prologueLineCount = 0
|
||||
|
||||
// Reuse the imports we already extracted earlier
|
||||
const imports = jsImports
|
||||
const remainingCode = jsRemainingCode
|
||||
|
||||
@@ -680,11 +765,7 @@ export async function POST(req: NextRequest) {
|
||||
prologue += `const environmentVariables = JSON.parse(${JSON.stringify(JSON.stringify(envVars))});\n`
|
||||
prologueLineCount++
|
||||
for (const [k, v] of Object.entries(contextVariables)) {
|
||||
if (v === undefined) {
|
||||
prologue += `const ${k} = undefined;\n`
|
||||
} else {
|
||||
prologue += `const ${k} = JSON.parse(${JSON.stringify(JSON.stringify(v))});\n`
|
||||
}
|
||||
prologue += `const ${k} = JSON.parse(${JSON.stringify(JSON.stringify(v))});\n`
|
||||
prologueLineCount++
|
||||
}
|
||||
|
||||
@@ -701,7 +782,7 @@ export async function POST(req: NextRequest) {
|
||||
' }',
|
||||
'})();',
|
||||
].join('\n')
|
||||
const codeForE2B = importSection + prologue + wrapped
|
||||
const codeForE2B = importSection + prologue + wrapped + epilogue
|
||||
|
||||
const execStart = Date.now()
|
||||
const {
|
||||
@@ -723,6 +804,7 @@ export async function POST(req: NextRequest) {
|
||||
error: e2bError,
|
||||
})
|
||||
|
||||
// If there was an execution error, format it properly
|
||||
if (e2bError) {
|
||||
const { formattedError, cleanedOutput } = formatE2BError(
|
||||
e2bError,
|
||||
@@ -746,7 +828,7 @@ export async function POST(req: NextRequest) {
|
||||
output: { result: e2bResult ?? null, stdout: cleanStdout(stdout), executionTime },
|
||||
})
|
||||
}
|
||||
|
||||
// Track prologue lines for error adjustment
|
||||
let prologueLineCount = 0
|
||||
prologue += 'import json\n'
|
||||
prologueLineCount++
|
||||
@@ -755,11 +837,7 @@ export async function POST(req: NextRequest) {
|
||||
prologue += `environmentVariables = json.loads(${JSON.stringify(JSON.stringify(envVars))})\n`
|
||||
prologueLineCount++
|
||||
for (const [k, v] of Object.entries(contextVariables)) {
|
||||
if (v === undefined) {
|
||||
prologue += `${k} = None\n`
|
||||
} else {
|
||||
prologue += `${k} = json.loads(${JSON.stringify(JSON.stringify(v))})\n`
|
||||
}
|
||||
prologue += `${k} = json.loads(${JSON.stringify(JSON.stringify(v))})\n`
|
||||
prologueLineCount++
|
||||
}
|
||||
const wrapped = [
|
||||
@@ -768,7 +846,7 @@ export async function POST(req: NextRequest) {
|
||||
'__sim_result__ = __sim_main__()',
|
||||
"print('__SIM_RESULT__=' + json.dumps(__sim_result__))",
|
||||
].join('\n')
|
||||
const codeForE2B = prologue + wrapped
|
||||
const codeForE2B = prologue + wrapped + epilogue
|
||||
|
||||
const execStart = Date.now()
|
||||
const {
|
||||
@@ -790,6 +868,7 @@ export async function POST(req: NextRequest) {
|
||||
error: e2bError,
|
||||
})
|
||||
|
||||
// If there was an execution error, format it properly
|
||||
if (e2bError) {
|
||||
const { formattedError, cleanedOutput } = formatE2BError(
|
||||
e2bError,
|
||||
@@ -818,6 +897,7 @@ export async function POST(req: NextRequest) {
|
||||
|
||||
const wrapperLines = ['(async () => {', ' try {']
|
||||
if (isCustomTool) {
|
||||
wrapperLines.push(' // For custom tools, make parameters directly accessible')
|
||||
Object.keys(executionParams).forEach((key) => {
|
||||
wrapperLines.push(` const ${key} = params.${key};`)
|
||||
})
|
||||
@@ -851,10 +931,12 @@ export async function POST(req: NextRequest) {
|
||||
})
|
||||
|
||||
const ivmError = isolatedResult.error
|
||||
// Adjust line number for prepended param destructuring in custom tools
|
||||
let adjustedLine = ivmError.line
|
||||
let adjustedLineContent = ivmError.lineContent
|
||||
if (prependedLineCount > 0 && ivmError.line !== undefined) {
|
||||
adjustedLine = Math.max(1, ivmError.line - prependedLineCount)
|
||||
// Get line content from original user code, not the prepended code
|
||||
const codeLines = resolvedCode.split('\n')
|
||||
if (adjustedLine <= codeLines.length) {
|
||||
adjustedLineContent = codeLines[adjustedLine - 1]?.trim()
|
||||
|
||||
@@ -5,7 +5,6 @@ import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import {
|
||||
bulkDocumentOperation,
|
||||
bulkDocumentOperationByFilter,
|
||||
createDocumentRecords,
|
||||
createSingleDocument,
|
||||
getDocuments,
|
||||
@@ -58,20 +57,13 @@ const BulkCreateDocumentsSchema = z.object({
|
||||
bulk: z.literal(true),
|
||||
})
|
||||
|
||||
const BulkUpdateDocumentsSchema = z
|
||||
.object({
|
||||
operation: z.enum(['enable', 'disable', 'delete']),
|
||||
documentIds: z
|
||||
.array(z.string())
|
||||
.min(1, 'At least one document ID is required')
|
||||
.max(100, 'Cannot operate on more than 100 documents at once')
|
||||
.optional(),
|
||||
selectAll: z.boolean().optional(),
|
||||
enabledFilter: z.enum(['all', 'enabled', 'disabled']).optional(),
|
||||
})
|
||||
.refine((data) => data.selectAll || (data.documentIds && data.documentIds.length > 0), {
|
||||
message: 'Either selectAll must be true or documentIds must be provided',
|
||||
})
|
||||
const BulkUpdateDocumentsSchema = z.object({
|
||||
operation: z.enum(['enable', 'disable', 'delete']),
|
||||
documentIds: z
|
||||
.array(z.string())
|
||||
.min(1, 'At least one document ID is required')
|
||||
.max(100, 'Cannot operate on more than 100 documents at once'),
|
||||
})
|
||||
|
||||
export async function GET(req: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
@@ -98,17 +90,14 @@ export async function GET(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
}
|
||||
|
||||
const url = new URL(req.url)
|
||||
const enabledFilter = url.searchParams.get('enabledFilter') as
|
||||
| 'all'
|
||||
| 'enabled'
|
||||
| 'disabled'
|
||||
| null
|
||||
const includeDisabled = url.searchParams.get('includeDisabled') === 'true'
|
||||
const search = url.searchParams.get('search') || undefined
|
||||
const limit = Number.parseInt(url.searchParams.get('limit') || '50')
|
||||
const offset = Number.parseInt(url.searchParams.get('offset') || '0')
|
||||
const sortByParam = url.searchParams.get('sortBy')
|
||||
const sortOrderParam = url.searchParams.get('sortOrder')
|
||||
|
||||
// Validate sort parameters
|
||||
const validSortFields: DocumentSortField[] = [
|
||||
'filename',
|
||||
'fileSize',
|
||||
@@ -116,7 +105,6 @@ export async function GET(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
'chunkCount',
|
||||
'uploadedAt',
|
||||
'processingStatus',
|
||||
'enabled',
|
||||
]
|
||||
const validSortOrders: SortOrder[] = ['asc', 'desc']
|
||||
|
||||
@@ -132,7 +120,7 @@ export async function GET(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
const result = await getDocuments(
|
||||
knowledgeBaseId,
|
||||
{
|
||||
enabledFilter: enabledFilter || undefined,
|
||||
includeDisabled,
|
||||
search,
|
||||
limit,
|
||||
offset,
|
||||
@@ -202,7 +190,8 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
const createdDocuments = await createDocumentRecords(
|
||||
validatedData.documents,
|
||||
knowledgeBaseId,
|
||||
requestId
|
||||
requestId,
|
||||
userId
|
||||
)
|
||||
|
||||
logger.info(
|
||||
@@ -261,10 +250,16 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
throw validationError
|
||||
}
|
||||
} else {
|
||||
// Handle single document creation
|
||||
try {
|
||||
const validatedData = CreateDocumentSchema.parse(body)
|
||||
|
||||
const newDocument = await createSingleDocument(validatedData, knowledgeBaseId, requestId)
|
||||
const newDocument = await createSingleDocument(
|
||||
validatedData,
|
||||
knowledgeBaseId,
|
||||
requestId,
|
||||
userId
|
||||
)
|
||||
|
||||
try {
|
||||
const { PlatformEvents } = await import('@/lib/core/telemetry')
|
||||
@@ -299,6 +294,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error creating document`, error)
|
||||
|
||||
// Check if it's a storage limit error
|
||||
const errorMessage = error instanceof Error ? error.message : 'Failed to create document'
|
||||
const isStorageLimitError =
|
||||
errorMessage.includes('Storage limit exceeded') || errorMessage.includes('storage limit')
|
||||
@@ -335,20 +331,16 @@ export async function PATCH(req: NextRequest, { params }: { params: Promise<{ id
|
||||
|
||||
try {
|
||||
const validatedData = BulkUpdateDocumentsSchema.parse(body)
|
||||
const { operation, documentIds, selectAll, enabledFilter } = validatedData
|
||||
const { operation, documentIds } = validatedData
|
||||
|
||||
try {
|
||||
let result
|
||||
if (selectAll) {
|
||||
result = await bulkDocumentOperationByFilter(
|
||||
knowledgeBaseId,
|
||||
operation,
|
||||
enabledFilter,
|
||||
requestId
|
||||
)
|
||||
} else {
|
||||
result = await bulkDocumentOperation(knowledgeBaseId, operation, documentIds!, requestId)
|
||||
}
|
||||
const result = await bulkDocumentOperation(
|
||||
knowledgeBaseId,
|
||||
operation,
|
||||
documentIds,
|
||||
requestId,
|
||||
session.user.id
|
||||
)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
|
||||
@@ -1,27 +0,0 @@
|
||||
'use client'
|
||||
|
||||
import Link from 'next/link'
|
||||
import { useBrandedButtonClass } from '@/hooks/use-branded-button-class'
|
||||
|
||||
interface BrandedLinkProps {
|
||||
href: string
|
||||
children: React.ReactNode
|
||||
className?: string
|
||||
target?: string
|
||||
rel?: string
|
||||
}
|
||||
|
||||
export function BrandedLink({ href, children, className = '', target, rel }: BrandedLinkProps) {
|
||||
const buttonClass = useBrandedButtonClass()
|
||||
|
||||
return (
|
||||
<Link
|
||||
href={href}
|
||||
target={target}
|
||||
rel={rel}
|
||||
className={`${buttonClass} group inline-flex items-center justify-center gap-2 rounded-[10px] py-[6px] pr-[10px] pl-[12px] text-[15px] text-white transition-all ${className}`}
|
||||
>
|
||||
{children}
|
||||
</Link>
|
||||
)
|
||||
}
|
||||
@@ -2,7 +2,6 @@ import { BookOpen, Github, Rss } from 'lucide-react'
|
||||
import Link from 'next/link'
|
||||
import { inter } from '@/app/_styles/fonts/inter/inter'
|
||||
import { soehne } from '@/app/_styles/fonts/soehne/soehne'
|
||||
import { BrandedLink } from '@/app/changelog/components/branded-link'
|
||||
import ChangelogList from '@/app/changelog/components/timeline-list'
|
||||
|
||||
export interface ChangelogEntry {
|
||||
@@ -67,24 +66,25 @@ export default async function ChangelogContent() {
|
||||
<hr className='mt-6 border-border' />
|
||||
|
||||
<div className='mt-6 flex flex-wrap items-center gap-3 text-sm'>
|
||||
<BrandedLink
|
||||
<Link
|
||||
href='https://github.com/simstudioai/sim/releases'
|
||||
target='_blank'
|
||||
rel='noopener noreferrer'
|
||||
className='group inline-flex items-center justify-center gap-2 rounded-[10px] border border-[#6F3DFA] bg-gradient-to-b from-[#8357FF] to-[#6F3DFA] py-[6px] pr-[10px] pl-[12px] text-[14px] text-white shadow-[inset_0_2px_4px_0_#9B77FF] transition-all sm:text-[16px]'
|
||||
>
|
||||
<Github className='h-4 w-4' />
|
||||
View on GitHub
|
||||
</BrandedLink>
|
||||
</Link>
|
||||
<Link
|
||||
href='https://docs.sim.ai'
|
||||
className='inline-flex items-center gap-2 rounded-[10px] border border-border py-[6px] pr-[10px] pl-[12px] text-[15px] transition-all hover:bg-muted'
|
||||
className='inline-flex items-center gap-2 rounded-md border border-border px-3 py-1.5 hover:bg-muted'
|
||||
>
|
||||
<BookOpen className='h-4 w-4' />
|
||||
Documentation
|
||||
</Link>
|
||||
<Link
|
||||
href='/changelog.xml'
|
||||
className='inline-flex items-center gap-2 rounded-[10px] border border-border py-[6px] pr-[10px] pl-[12px] text-[15px] transition-all hover:bg-muted'
|
||||
className='inline-flex items-center gap-2 rounded-md border border-border px-3 py-1.5 hover:bg-muted'
|
||||
>
|
||||
<Rss className='h-4 w-4' />
|
||||
RSS Feed
|
||||
|
||||
@@ -117,7 +117,7 @@ export default function ChatClient({ identifier }: { identifier: string }) {
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
const messagesEndRef = useRef<HTMLDivElement>(null)
|
||||
const messagesContainerRef = useRef<HTMLDivElement>(null)
|
||||
const [starCount, setStarCount] = useState('25.8k')
|
||||
const [starCount, setStarCount] = useState('25.1k')
|
||||
const [conversationId, setConversationId] = useState('')
|
||||
|
||||
const [showScrollButton, setShowScrollButton] = useState(false)
|
||||
|
||||
@@ -207,6 +207,7 @@ function TemplateCardInner({
|
||||
isPannable={false}
|
||||
defaultZoom={0.8}
|
||||
fitPadding={0.2}
|
||||
lightweight
|
||||
/>
|
||||
) : (
|
||||
<div className='h-full w-full bg-[var(--surface-4)]' />
|
||||
|
||||
@@ -61,7 +61,6 @@ export function EditChunkModal({
|
||||
const [showUnsavedChangesAlert, setShowUnsavedChangesAlert] = useState(false)
|
||||
const [pendingNavigation, setPendingNavigation] = useState<(() => void) | null>(null)
|
||||
const [tokenizerOn, setTokenizerOn] = useState(false)
|
||||
const [hoveredTokenIndex, setHoveredTokenIndex] = useState<number | null>(null)
|
||||
const textareaRef = useRef<HTMLTextAreaElement>(null)
|
||||
|
||||
const error = mutationError?.message ?? null
|
||||
@@ -255,8 +254,6 @@ export function EditChunkModal({
|
||||
style={{
|
||||
backgroundColor: getTokenBgColor(index),
|
||||
}}
|
||||
onMouseEnter={() => setHoveredTokenIndex(index)}
|
||||
onMouseLeave={() => setHoveredTokenIndex(null)}
|
||||
>
|
||||
{token}
|
||||
</span>
|
||||
@@ -284,11 +281,6 @@ export function EditChunkModal({
|
||||
<div className='flex items-center gap-[8px]'>
|
||||
<span className='text-[12px] text-[var(--text-secondary)]'>Tokenizer</span>
|
||||
<Switch checked={tokenizerOn} onCheckedChange={setTokenizerOn} />
|
||||
{tokenizerOn && hoveredTokenIndex !== null && (
|
||||
<span className='text-[12px] text-[var(--text-tertiary)]'>
|
||||
Token #{hoveredTokenIndex + 1}
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
<span className='text-[12px] text-[var(--text-secondary)]'>
|
||||
{tokenCount.toLocaleString()}
|
||||
|
||||
@@ -36,7 +36,6 @@ import {
|
||||
import { Input } from '@/components/ui/input'
|
||||
import { SearchHighlight } from '@/components/ui/search-highlight'
|
||||
import { Skeleton } from '@/components/ui/skeleton'
|
||||
import { formatAbsoluteDate, formatRelativeTime } from '@/lib/core/utils/formatting'
|
||||
import type { ChunkData } from '@/lib/knowledge/types'
|
||||
import {
|
||||
ChunkContextMenu,
|
||||
@@ -59,6 +58,55 @@ import {
|
||||
|
||||
const logger = createLogger('Document')
|
||||
|
||||
/**
|
||||
* Formats a date string to relative time (e.g., "2h ago", "3d ago")
|
||||
*/
|
||||
function formatRelativeTime(dateString: string): string {
|
||||
const date = new Date(dateString)
|
||||
const now = new Date()
|
||||
const diffInSeconds = Math.floor((now.getTime() - date.getTime()) / 1000)
|
||||
|
||||
if (diffInSeconds < 60) {
|
||||
return 'just now'
|
||||
}
|
||||
if (diffInSeconds < 3600) {
|
||||
const minutes = Math.floor(diffInSeconds / 60)
|
||||
return `${minutes}m ago`
|
||||
}
|
||||
if (diffInSeconds < 86400) {
|
||||
const hours = Math.floor(diffInSeconds / 3600)
|
||||
return `${hours}h ago`
|
||||
}
|
||||
if (diffInSeconds < 604800) {
|
||||
const days = Math.floor(diffInSeconds / 86400)
|
||||
return `${days}d ago`
|
||||
}
|
||||
if (diffInSeconds < 2592000) {
|
||||
const weeks = Math.floor(diffInSeconds / 604800)
|
||||
return `${weeks}w ago`
|
||||
}
|
||||
if (diffInSeconds < 31536000) {
|
||||
const months = Math.floor(diffInSeconds / 2592000)
|
||||
return `${months}mo ago`
|
||||
}
|
||||
const years = Math.floor(diffInSeconds / 31536000)
|
||||
return `${years}y ago`
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats a date string to absolute format for tooltip display
|
||||
*/
|
||||
function formatAbsoluteDate(dateString: string): string {
|
||||
const date = new Date(dateString)
|
||||
return date.toLocaleDateString('en-US', {
|
||||
year: 'numeric',
|
||||
month: 'short',
|
||||
day: 'numeric',
|
||||
hour: '2-digit',
|
||||
minute: '2-digit',
|
||||
})
|
||||
}
|
||||
|
||||
interface DocumentProps {
|
||||
knowledgeBaseId: string
|
||||
documentId: string
|
||||
@@ -256,6 +304,7 @@ export function Document({
|
||||
|
||||
const [searchQuery, setSearchQuery] = useState('')
|
||||
const [debouncedSearchQuery, setDebouncedSearchQuery] = useState('')
|
||||
const [isSearching, setIsSearching] = useState(false)
|
||||
|
||||
const {
|
||||
chunks: initialChunks,
|
||||
@@ -295,6 +344,7 @@ export function Document({
|
||||
const handler = setTimeout(() => {
|
||||
startTransition(() => {
|
||||
setDebouncedSearchQuery(searchQuery)
|
||||
setIsSearching(searchQuery.trim().length > 0)
|
||||
})
|
||||
}, 200)
|
||||
|
||||
@@ -303,7 +353,6 @@ export function Document({
|
||||
}
|
||||
}, [searchQuery])
|
||||
|
||||
const isSearching = debouncedSearchQuery.trim().length > 0
|
||||
const showingSearch = isSearching && searchQuery.trim().length > 0 && searchResults.length > 0
|
||||
const SEARCH_PAGE_SIZE = 50
|
||||
const maxSearchPages = Math.ceil(searchResults.length / SEARCH_PAGE_SIZE)
|
||||
|
||||
@@ -27,10 +27,6 @@ import {
|
||||
ModalContent,
|
||||
ModalFooter,
|
||||
ModalHeader,
|
||||
Popover,
|
||||
PopoverContent,
|
||||
PopoverItem,
|
||||
PopoverTrigger,
|
||||
Table,
|
||||
TableBody,
|
||||
TableCell,
|
||||
@@ -44,11 +40,8 @@ import { Input } from '@/components/ui/input'
|
||||
import { SearchHighlight } from '@/components/ui/search-highlight'
|
||||
import { Skeleton } from '@/components/ui/skeleton'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { formatAbsoluteDate, formatRelativeTime } from '@/lib/core/utils/formatting'
|
||||
import { ALL_TAG_SLOTS, type AllTagSlot, getFieldTypeForSlot } from '@/lib/knowledge/constants'
|
||||
import type { DocumentSortField, SortOrder } from '@/lib/knowledge/documents/types'
|
||||
import type { DocumentData } from '@/lib/knowledge/types'
|
||||
import { formatFileSize } from '@/lib/uploads/utils/file-utils'
|
||||
import {
|
||||
ActionBar,
|
||||
AddDocumentsModal,
|
||||
@@ -196,8 +189,8 @@ function KnowledgeBaseLoading({ knowledgeBaseName }: KnowledgeBaseLoadingProps)
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<Skeleton className='mt-[4px] h-[21px] w-[300px] rounded-[4px]' />
|
||||
<div className='mt-[4px]'>
|
||||
<Skeleton className='h-[21px] w-[300px] rounded-[4px]' />
|
||||
</div>
|
||||
|
||||
<div className='mt-[16px] flex items-center gap-[8px]'>
|
||||
@@ -215,12 +208,9 @@ function KnowledgeBaseLoading({ knowledgeBaseName }: KnowledgeBaseLoadingProps)
|
||||
className='flex-1 border-0 bg-transparent px-0 font-medium text-[var(--text-secondary)] text-small leading-none placeholder:text-[var(--text-subtle)] focus-visible:ring-0 focus-visible:ring-offset-0'
|
||||
/>
|
||||
</div>
|
||||
<div className='flex items-center gap-[8px]'>
|
||||
<Skeleton className='h-[32px] w-[52px] rounded-[6px]' />
|
||||
<Button disabled variant='tertiary' className='h-[32px] rounded-[6px]'>
|
||||
Add Documents
|
||||
</Button>
|
||||
</div>
|
||||
<Button disabled variant='tertiary' className='h-[32px] rounded-[6px]'>
|
||||
Add Documents
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
<div className='mt-[12px] flex flex-1 flex-col overflow-hidden'>
|
||||
@@ -232,11 +222,73 @@ function KnowledgeBaseLoading({ knowledgeBaseName }: KnowledgeBaseLoadingProps)
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats a date string to relative time (e.g., "2h ago", "3d ago")
|
||||
*/
|
||||
function formatRelativeTime(dateString: string): string {
|
||||
const date = new Date(dateString)
|
||||
const now = new Date()
|
||||
const diffInSeconds = Math.floor((now.getTime() - date.getTime()) / 1000)
|
||||
|
||||
if (diffInSeconds < 60) {
|
||||
return 'just now'
|
||||
}
|
||||
if (diffInSeconds < 3600) {
|
||||
const minutes = Math.floor(diffInSeconds / 60)
|
||||
return `${minutes}m ago`
|
||||
}
|
||||
if (diffInSeconds < 86400) {
|
||||
const hours = Math.floor(diffInSeconds / 3600)
|
||||
return `${hours}h ago`
|
||||
}
|
||||
if (diffInSeconds < 604800) {
|
||||
const days = Math.floor(diffInSeconds / 86400)
|
||||
return `${days}d ago`
|
||||
}
|
||||
if (diffInSeconds < 2592000) {
|
||||
const weeks = Math.floor(diffInSeconds / 604800)
|
||||
return `${weeks}w ago`
|
||||
}
|
||||
if (diffInSeconds < 31536000) {
|
||||
const months = Math.floor(diffInSeconds / 2592000)
|
||||
return `${months}mo ago`
|
||||
}
|
||||
const years = Math.floor(diffInSeconds / 31536000)
|
||||
return `${years}y ago`
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats a date string to absolute format for tooltip display
|
||||
*/
|
||||
function formatAbsoluteDate(dateString: string): string {
|
||||
const date = new Date(dateString)
|
||||
return date.toLocaleDateString('en-US', {
|
||||
year: 'numeric',
|
||||
month: 'short',
|
||||
day: 'numeric',
|
||||
hour: '2-digit',
|
||||
minute: '2-digit',
|
||||
})
|
||||
}
|
||||
|
||||
interface KnowledgeBaseProps {
|
||||
id: string
|
||||
knowledgeBaseName?: string
|
||||
}
|
||||
|
||||
function getFileIcon(mimeType: string, filename: string) {
|
||||
const IconComponent = getDocumentIcon(mimeType, filename)
|
||||
return <IconComponent className='h-6 w-5 flex-shrink-0' />
|
||||
}
|
||||
|
||||
function formatFileSize(bytes: number): string {
|
||||
if (bytes === 0) return '0 Bytes'
|
||||
const k = 1024
|
||||
const sizes = ['Bytes', 'KB', 'MB', 'GB']
|
||||
const i = Math.floor(Math.log(bytes) / Math.log(k))
|
||||
return `${Number.parseFloat((bytes / k ** i).toFixed(2))} ${sizes[i]}`
|
||||
}
|
||||
|
||||
const AnimatedLoader = ({ className }: { className?: string }) => (
|
||||
<Loader2 className={cn(className, 'animate-spin')} />
|
||||
)
|
||||
@@ -284,24 +336,53 @@ const getStatusBadge = (doc: DocumentData) => {
|
||||
}
|
||||
}
|
||||
|
||||
const TAG_SLOTS = [
|
||||
'tag1',
|
||||
'tag2',
|
||||
'tag3',
|
||||
'tag4',
|
||||
'tag5',
|
||||
'tag6',
|
||||
'tag7',
|
||||
'number1',
|
||||
'number2',
|
||||
'number3',
|
||||
'number4',
|
||||
'number5',
|
||||
'date1',
|
||||
'date2',
|
||||
'boolean1',
|
||||
'boolean2',
|
||||
'boolean3',
|
||||
] as const
|
||||
|
||||
type TagSlot = (typeof TAG_SLOTS)[number]
|
||||
|
||||
interface TagValue {
|
||||
slot: AllTagSlot
|
||||
slot: TagSlot
|
||||
displayName: string
|
||||
value: string
|
||||
}
|
||||
|
||||
const TAG_FIELD_TYPES: Record<string, string> = {
|
||||
tag: 'text',
|
||||
number: 'number',
|
||||
date: 'date',
|
||||
boolean: 'boolean',
|
||||
}
|
||||
|
||||
/**
|
||||
* Computes tag values for a document
|
||||
*/
|
||||
function getDocumentTags(doc: DocumentData, definitions: TagDefinition[]): TagValue[] {
|
||||
const result: TagValue[] = []
|
||||
|
||||
for (const slot of ALL_TAG_SLOTS) {
|
||||
for (const slot of TAG_SLOTS) {
|
||||
const raw = doc[slot]
|
||||
if (raw == null) continue
|
||||
|
||||
const def = definitions.find((d) => d.tagSlot === slot)
|
||||
const fieldType = def?.fieldType || getFieldTypeForSlot(slot) || 'text'
|
||||
const fieldType = def?.fieldType || TAG_FIELD_TYPES[slot.replace(/\d+$/, '')] || 'text'
|
||||
|
||||
let value: string
|
||||
if (fieldType === 'date') {
|
||||
@@ -343,8 +424,6 @@ export function KnowledgeBase({
|
||||
|
||||
const [searchQuery, setSearchQuery] = useState('')
|
||||
const [showTagsModal, setShowTagsModal] = useState(false)
|
||||
const [enabledFilter, setEnabledFilter] = useState<'all' | 'enabled' | 'disabled'>('all')
|
||||
const [isFilterPopoverOpen, setIsFilterPopoverOpen] = useState(false)
|
||||
|
||||
/**
|
||||
* Memoize the search query setter to prevent unnecessary re-renders
|
||||
@@ -355,7 +434,6 @@ export function KnowledgeBase({
|
||||
}, [])
|
||||
|
||||
const [selectedDocuments, setSelectedDocuments] = useState<Set<string>>(new Set())
|
||||
const [isSelectAllMode, setIsSelectAllMode] = useState(false)
|
||||
const [showDeleteDialog, setShowDeleteDialog] = useState(false)
|
||||
const [showAddDocumentsModal, setShowAddDocumentsModal] = useState(false)
|
||||
const [showDeleteDocumentModal, setShowDeleteDocumentModal] = useState(false)
|
||||
@@ -382,6 +460,7 @@ export function KnowledgeBase({
|
||||
error: knowledgeBaseError,
|
||||
refresh: refreshKnowledgeBase,
|
||||
} = useKnowledgeBase(id)
|
||||
const [hasProcessingDocuments, setHasProcessingDocuments] = useState(false)
|
||||
|
||||
const {
|
||||
documents,
|
||||
@@ -390,7 +469,6 @@ export function KnowledgeBase({
|
||||
isFetching: isFetchingDocuments,
|
||||
isPlaceholderData: isPlaceholderDocuments,
|
||||
error: documentsError,
|
||||
hasProcessingDocuments,
|
||||
updateDocument,
|
||||
refreshDocuments,
|
||||
} = useKnowledgeBaseDocuments(id, {
|
||||
@@ -399,14 +477,7 @@ export function KnowledgeBase({
|
||||
offset: (currentPage - 1) * DOCUMENTS_PER_PAGE,
|
||||
sortBy,
|
||||
sortOrder,
|
||||
refetchInterval: (data) => {
|
||||
if (isDeleting) return false
|
||||
const hasPending = data?.documents?.some(
|
||||
(doc) => doc.processingStatus === 'pending' || doc.processingStatus === 'processing'
|
||||
)
|
||||
return hasPending ? 3000 : false
|
||||
},
|
||||
enabledFilter,
|
||||
refetchInterval: hasProcessingDocuments && !isDeleting ? 3000 : false,
|
||||
})
|
||||
|
||||
const { tagDefinitions } = useKnowledgeBaseTagDefinitions(id)
|
||||
@@ -472,52 +543,52 @@ export function KnowledgeBase({
|
||||
</TableHead>
|
||||
)
|
||||
|
||||
useEffect(() => {
|
||||
const processing = documents.some(
|
||||
(doc) => doc.processingStatus === 'pending' || doc.processingStatus === 'processing'
|
||||
)
|
||||
setHasProcessingDocuments(processing)
|
||||
|
||||
if (processing) {
|
||||
checkForDeadProcesses()
|
||||
}
|
||||
}, [documents])
|
||||
|
||||
/**
|
||||
* Checks for documents with stale processing states and marks them as failed
|
||||
*/
|
||||
const checkForDeadProcesses = useCallback(
|
||||
(docsToCheck: DocumentData[]) => {
|
||||
const now = new Date()
|
||||
const DEAD_PROCESS_THRESHOLD_MS = 600 * 1000 // 10 minutes
|
||||
const checkForDeadProcesses = () => {
|
||||
const now = new Date()
|
||||
const DEAD_PROCESS_THRESHOLD_MS = 600 * 1000 // 10 minutes
|
||||
|
||||
const staleDocuments = docsToCheck.filter((doc) => {
|
||||
if (doc.processingStatus !== 'processing' || !doc.processingStartedAt) {
|
||||
return false
|
||||
}
|
||||
const staleDocuments = documents.filter((doc) => {
|
||||
if (doc.processingStatus !== 'processing' || !doc.processingStartedAt) {
|
||||
return false
|
||||
}
|
||||
|
||||
const processingDuration = now.getTime() - new Date(doc.processingStartedAt).getTime()
|
||||
return processingDuration > DEAD_PROCESS_THRESHOLD_MS
|
||||
})
|
||||
const processingDuration = now.getTime() - new Date(doc.processingStartedAt).getTime()
|
||||
return processingDuration > DEAD_PROCESS_THRESHOLD_MS
|
||||
})
|
||||
|
||||
if (staleDocuments.length === 0) return
|
||||
if (staleDocuments.length === 0) return
|
||||
|
||||
logger.warn(`Found ${staleDocuments.length} documents with dead processes`)
|
||||
logger.warn(`Found ${staleDocuments.length} documents with dead processes`)
|
||||
|
||||
staleDocuments.forEach((doc) => {
|
||||
updateDocumentMutation(
|
||||
{
|
||||
knowledgeBaseId: id,
|
||||
documentId: doc.id,
|
||||
updates: { markFailedDueToTimeout: true },
|
||||
staleDocuments.forEach((doc) => {
|
||||
updateDocumentMutation(
|
||||
{
|
||||
knowledgeBaseId: id,
|
||||
documentId: doc.id,
|
||||
updates: { markFailedDueToTimeout: true },
|
||||
},
|
||||
{
|
||||
onSuccess: () => {
|
||||
logger.info(`Successfully marked dead process as failed for document: ${doc.filename}`)
|
||||
},
|
||||
{
|
||||
onSuccess: () => {
|
||||
logger.info(
|
||||
`Successfully marked dead process as failed for document: ${doc.filename}`
|
||||
)
|
||||
},
|
||||
}
|
||||
)
|
||||
})
|
||||
},
|
||||
[id, updateDocumentMutation]
|
||||
)
|
||||
|
||||
useEffect(() => {
|
||||
if (hasProcessingDocuments) {
|
||||
checkForDeadProcesses(documents)
|
||||
}
|
||||
}, [hasProcessingDocuments, documents, checkForDeadProcesses])
|
||||
}
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
const handleToggleEnabled = (docId: string) => {
|
||||
const document = documents.find((doc) => doc.id === docId)
|
||||
@@ -677,7 +748,6 @@ export function KnowledgeBase({
|
||||
setSelectedDocuments(new Set(documents.map((doc) => doc.id)))
|
||||
} else {
|
||||
setSelectedDocuments(new Set())
|
||||
setIsSelectAllMode(false)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -723,26 +793,6 @@ export function KnowledgeBase({
|
||||
* Handles bulk enabling of selected documents
|
||||
*/
|
||||
const handleBulkEnable = () => {
|
||||
if (isSelectAllMode) {
|
||||
bulkDocumentMutation(
|
||||
{
|
||||
knowledgeBaseId: id,
|
||||
operation: 'enable',
|
||||
selectAll: true,
|
||||
enabledFilter,
|
||||
},
|
||||
{
|
||||
onSuccess: (result) => {
|
||||
logger.info(`Successfully enabled ${result.successCount} documents`)
|
||||
setSelectedDocuments(new Set())
|
||||
setIsSelectAllMode(false)
|
||||
refreshDocuments()
|
||||
},
|
||||
}
|
||||
)
|
||||
return
|
||||
}
|
||||
|
||||
const documentsToEnable = documents.filter(
|
||||
(doc) => selectedDocuments.has(doc.id) && !doc.enabled
|
||||
)
|
||||
@@ -771,26 +821,6 @@ export function KnowledgeBase({
|
||||
* Handles bulk disabling of selected documents
|
||||
*/
|
||||
const handleBulkDisable = () => {
|
||||
if (isSelectAllMode) {
|
||||
bulkDocumentMutation(
|
||||
{
|
||||
knowledgeBaseId: id,
|
||||
operation: 'disable',
|
||||
selectAll: true,
|
||||
enabledFilter,
|
||||
},
|
||||
{
|
||||
onSuccess: (result) => {
|
||||
logger.info(`Successfully disabled ${result.successCount} documents`)
|
||||
setSelectedDocuments(new Set())
|
||||
setIsSelectAllMode(false)
|
||||
refreshDocuments()
|
||||
},
|
||||
}
|
||||
)
|
||||
return
|
||||
}
|
||||
|
||||
const documentsToDisable = documents.filter(
|
||||
(doc) => selectedDocuments.has(doc.id) && doc.enabled
|
||||
)
|
||||
@@ -815,35 +845,18 @@ export function KnowledgeBase({
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Opens the bulk delete confirmation modal
|
||||
*/
|
||||
const handleBulkDelete = () => {
|
||||
if (selectedDocuments.size === 0) return
|
||||
setShowBulkDeleteModal(true)
|
||||
}
|
||||
|
||||
/**
|
||||
* Confirms and executes the bulk deletion of selected documents
|
||||
*/
|
||||
const confirmBulkDelete = () => {
|
||||
if (isSelectAllMode) {
|
||||
bulkDocumentMutation(
|
||||
{
|
||||
knowledgeBaseId: id,
|
||||
operation: 'delete',
|
||||
selectAll: true,
|
||||
enabledFilter,
|
||||
},
|
||||
{
|
||||
onSuccess: (result) => {
|
||||
logger.info(`Successfully deleted ${result.successCount} documents`)
|
||||
refreshDocuments()
|
||||
setSelectedDocuments(new Set())
|
||||
setIsSelectAllMode(false)
|
||||
},
|
||||
onSettled: () => {
|
||||
setShowBulkDeleteModal(false)
|
||||
},
|
||||
}
|
||||
)
|
||||
return
|
||||
}
|
||||
|
||||
const documentsToDelete = documents.filter((doc) => selectedDocuments.has(doc.id))
|
||||
|
||||
if (documentsToDelete.length === 0) return
|
||||
@@ -868,17 +881,14 @@ export function KnowledgeBase({
|
||||
}
|
||||
|
||||
const selectedDocumentsList = documents.filter((doc) => selectedDocuments.has(doc.id))
|
||||
const enabledCount = isSelectAllMode
|
||||
? enabledFilter === 'disabled'
|
||||
? 0
|
||||
: pagination.total
|
||||
: selectedDocumentsList.filter((doc) => doc.enabled).length
|
||||
const disabledCount = isSelectAllMode
|
||||
? enabledFilter === 'enabled'
|
||||
? 0
|
||||
: pagination.total
|
||||
: selectedDocumentsList.filter((doc) => !doc.enabled).length
|
||||
const enabledCount = selectedDocumentsList.filter((doc) => doc.enabled).length
|
||||
const disabledCount = selectedDocumentsList.filter((doc) => !doc.enabled).length
|
||||
|
||||
/**
|
||||
* Handle right-click on a document row
|
||||
* If right-clicking on an unselected document, select only that document
|
||||
* If right-clicking on a selected document with multiple selections, keep all selections
|
||||
*/
|
||||
const handleDocumentContextMenu = useCallback(
|
||||
(e: React.MouseEvent, doc: DocumentData) => {
|
||||
const isCurrentlySelected = selectedDocuments.has(doc.id)
|
||||
@@ -995,13 +1005,11 @@ export function KnowledgeBase({
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
{knowledgeBase?.description && (
|
||||
<p className='mt-[4px] line-clamp-2 max-w-[40vw] font-medium text-[14px] text-[var(--text-tertiary)]'>
|
||||
{knowledgeBase.description}
|
||||
</p>
|
||||
)}
|
||||
</div>
|
||||
{knowledgeBase?.description && (
|
||||
<p className='mt-[4px] line-clamp-2 max-w-[40vw] font-medium text-[14px] text-[var(--text-tertiary)]'>
|
||||
{knowledgeBase.description}
|
||||
</p>
|
||||
)}
|
||||
|
||||
<div className='mt-[16px] flex items-center gap-[8px]'>
|
||||
<span className='text-[14px] text-[var(--text-muted)]'>
|
||||
@@ -1044,76 +1052,21 @@ export function KnowledgeBase({
|
||||
))}
|
||||
</div>
|
||||
|
||||
<div className='flex items-center gap-[8px]'>
|
||||
<Popover open={isFilterPopoverOpen} onOpenChange={setIsFilterPopoverOpen}>
|
||||
<PopoverTrigger asChild>
|
||||
<Button variant='default' className='h-[32px] rounded-[6px]'>
|
||||
{enabledFilter === 'all'
|
||||
? 'All'
|
||||
: enabledFilter === 'enabled'
|
||||
? 'Enabled'
|
||||
: 'Disabled'}
|
||||
<ChevronDown className='ml-2 h-4 w-4 text-muted-foreground' />
|
||||
</Button>
|
||||
</PopoverTrigger>
|
||||
<PopoverContent align='end' side='bottom' sideOffset={4}>
|
||||
<div className='flex flex-col gap-[2px]'>
|
||||
<PopoverItem
|
||||
active={enabledFilter === 'all'}
|
||||
onClick={() => {
|
||||
setEnabledFilter('all')
|
||||
setIsFilterPopoverOpen(false)
|
||||
setCurrentPage(1)
|
||||
setSelectedDocuments(new Set())
|
||||
setIsSelectAllMode(false)
|
||||
}}
|
||||
>
|
||||
All
|
||||
</PopoverItem>
|
||||
<PopoverItem
|
||||
active={enabledFilter === 'enabled'}
|
||||
onClick={() => {
|
||||
setEnabledFilter('enabled')
|
||||
setIsFilterPopoverOpen(false)
|
||||
setCurrentPage(1)
|
||||
setSelectedDocuments(new Set())
|
||||
setIsSelectAllMode(false)
|
||||
}}
|
||||
>
|
||||
Enabled
|
||||
</PopoverItem>
|
||||
<PopoverItem
|
||||
active={enabledFilter === 'disabled'}
|
||||
onClick={() => {
|
||||
setEnabledFilter('disabled')
|
||||
setIsFilterPopoverOpen(false)
|
||||
setCurrentPage(1)
|
||||
setSelectedDocuments(new Set())
|
||||
setIsSelectAllMode(false)
|
||||
}}
|
||||
>
|
||||
Disabled
|
||||
</PopoverItem>
|
||||
</div>
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
onClick={handleAddDocuments}
|
||||
disabled={userPermissions.canEdit !== true}
|
||||
variant='tertiary'
|
||||
className='h-[32px] rounded-[6px]'
|
||||
>
|
||||
Add Documents
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
{userPermissions.canEdit !== true && (
|
||||
<Tooltip.Content>Write permission required to add documents</Tooltip.Content>
|
||||
)}
|
||||
</Tooltip.Root>
|
||||
</div>
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
onClick={handleAddDocuments}
|
||||
disabled={userPermissions.canEdit !== true}
|
||||
variant='tertiary'
|
||||
className='h-[32px] rounded-[6px]'
|
||||
>
|
||||
Add Documents
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
{userPermissions.canEdit !== true && (
|
||||
<Tooltip.Content>Write permission required to add documents</Tooltip.Content>
|
||||
)}
|
||||
</Tooltip.Root>
|
||||
</div>
|
||||
|
||||
{error && !isLoadingKnowledgeBase && (
|
||||
@@ -1136,20 +1089,14 @@ export function KnowledgeBase({
|
||||
<div className='mt-[10px] flex h-64 items-center justify-center rounded-lg border border-muted-foreground/25 bg-muted/20'>
|
||||
<div className='text-center'>
|
||||
<p className='font-medium text-[var(--text-secondary)] text-sm'>
|
||||
{searchQuery
|
||||
? 'No documents found'
|
||||
: enabledFilter !== 'all'
|
||||
? 'Nothing matches your filter'
|
||||
: 'No documents yet'}
|
||||
{searchQuery ? 'No documents found' : 'No documents yet'}
|
||||
</p>
|
||||
<p className='mt-1 text-[var(--text-muted)] text-xs'>
|
||||
{searchQuery
|
||||
? 'Try a different search term'
|
||||
: enabledFilter !== 'all'
|
||||
? 'Try changing the filter'
|
||||
: userPermissions.canEdit === true
|
||||
? 'Add documents to get started'
|
||||
: 'Documents will appear here once added'}
|
||||
: userPermissions.canEdit === true
|
||||
? 'Add documents to get started'
|
||||
: 'Documents will appear here once added'}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
@@ -1173,7 +1120,7 @@ export function KnowledgeBase({
|
||||
{renderSortableHeader('tokenCount', 'Tokens', 'hidden w-[8%] lg:table-cell')}
|
||||
{renderSortableHeader('chunkCount', 'Chunks', 'w-[8%]')}
|
||||
{renderSortableHeader('uploadedAt', 'Uploaded', 'w-[11%]')}
|
||||
{renderSortableHeader('enabled', 'Status', 'w-[10%]')}
|
||||
{renderSortableHeader('processingStatus', 'Status', 'w-[10%]')}
|
||||
<TableHead className='w-[12%] px-[12px] py-[8px] text-[12px] text-[var(--text-secondary)]'>
|
||||
Tags
|
||||
</TableHead>
|
||||
@@ -1217,10 +1164,7 @@ export function KnowledgeBase({
|
||||
</TableCell>
|
||||
<TableCell className='w-[180px] max-w-[180px] px-[12px] py-[8px]'>
|
||||
<div className='flex min-w-0 items-center gap-[8px]'>
|
||||
{(() => {
|
||||
const IconComponent = getDocumentIcon(doc.mimeType, doc.filename)
|
||||
return <IconComponent className='h-6 w-5 flex-shrink-0' />
|
||||
})()}
|
||||
{getFileIcon(doc.mimeType, doc.filename)}
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<span
|
||||
@@ -1564,14 +1508,6 @@ export function KnowledgeBase({
|
||||
enabledCount={enabledCount}
|
||||
disabledCount={disabledCount}
|
||||
isLoading={isBulkOperating}
|
||||
totalCount={pagination.total}
|
||||
isAllPageSelected={isAllSelected}
|
||||
isAllSelected={isSelectAllMode}
|
||||
onSelectAll={() => setIsSelectAllMode(true)}
|
||||
onClearSelectAll={() => {
|
||||
setIsSelectAllMode(false)
|
||||
setSelectedDocuments(new Set())
|
||||
}}
|
||||
/>
|
||||
|
||||
<DocumentContextMenu
|
||||
|
||||
@@ -13,11 +13,6 @@ interface ActionBarProps {
|
||||
disabledCount?: number
|
||||
isLoading?: boolean
|
||||
className?: string
|
||||
totalCount?: number
|
||||
isAllPageSelected?: boolean
|
||||
isAllSelected?: boolean
|
||||
onSelectAll?: () => void
|
||||
onClearSelectAll?: () => void
|
||||
}
|
||||
|
||||
export function ActionBar({
|
||||
@@ -29,21 +24,14 @@ export function ActionBar({
|
||||
disabledCount = 0,
|
||||
isLoading = false,
|
||||
className,
|
||||
totalCount = 0,
|
||||
isAllPageSelected = false,
|
||||
isAllSelected = false,
|
||||
onSelectAll,
|
||||
onClearSelectAll,
|
||||
}: ActionBarProps) {
|
||||
const userPermissions = useUserPermissionsContext()
|
||||
|
||||
if (selectedCount === 0 && !isAllSelected) return null
|
||||
if (selectedCount === 0) return null
|
||||
|
||||
const canEdit = userPermissions.canEdit
|
||||
const showEnableButton = disabledCount > 0 && onEnable && canEdit
|
||||
const showDisableButton = enabledCount > 0 && onDisable && canEdit
|
||||
const showSelectAllOption =
|
||||
isAllPageSelected && !isAllSelected && totalCount > selectedCount && onSelectAll
|
||||
|
||||
return (
|
||||
<motion.div
|
||||
@@ -55,31 +43,7 @@ export function ActionBar({
|
||||
>
|
||||
<div className='flex items-center gap-[8px] rounded-[10px] border border-[var(--border)] bg-[var(--surface-2)] px-[8px] py-[6px]'>
|
||||
<span className='px-[4px] text-[13px] text-[var(--text-secondary)]'>
|
||||
{isAllSelected ? totalCount : selectedCount} selected
|
||||
{showSelectAllOption && (
|
||||
<>
|
||||
{' · '}
|
||||
<button
|
||||
type='button'
|
||||
onClick={onSelectAll}
|
||||
className='text-[var(--brand-primary)] hover:underline'
|
||||
>
|
||||
Select all
|
||||
</button>
|
||||
</>
|
||||
)}
|
||||
{isAllSelected && onClearSelectAll && (
|
||||
<>
|
||||
{' · '}
|
||||
<button
|
||||
type='button'
|
||||
onClick={onClearSelectAll}
|
||||
className='text-[var(--brand-primary)] hover:underline'
|
||||
>
|
||||
Clear
|
||||
</button>
|
||||
</>
|
||||
)}
|
||||
{selectedCount} selected
|
||||
</span>
|
||||
|
||||
<div className='flex items-center gap-[5px]'>
|
||||
|
||||
@@ -123,11 +123,7 @@ export function RenameDocumentModal({
|
||||
>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button
|
||||
variant='tertiary'
|
||||
type='submit'
|
||||
disabled={isSubmitting || !name?.trim() || name.trim() === initialName}
|
||||
>
|
||||
<Button variant='tertiary' type='submit' disabled={isSubmitting || !name?.trim()}>
|
||||
{isSubmitting ? 'Renaming...' : 'Rename'}
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
import { useCallback, useState } from 'react'
|
||||
import { useParams, useRouter } from 'next/navigation'
|
||||
import { Badge, DocumentAttachment, Tooltip } from '@/components/emcn'
|
||||
import { formatAbsoluteDate, formatRelativeTime } from '@/lib/core/utils/formatting'
|
||||
import { BaseTagsModal } from '@/app/workspace/[workspaceId]/knowledge/[id]/components'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
import { useContextMenu } from '@/app/workspace/[workspaceId]/w/components/sidebar/hooks'
|
||||
@@ -22,6 +21,55 @@ interface BaseCardProps {
|
||||
onDelete?: (id: string) => Promise<void>
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats a date string to relative time (e.g., "2h ago", "3d ago")
|
||||
*/
|
||||
function formatRelativeTime(dateString: string): string {
|
||||
const date = new Date(dateString)
|
||||
const now = new Date()
|
||||
const diffInSeconds = Math.floor((now.getTime() - date.getTime()) / 1000)
|
||||
|
||||
if (diffInSeconds < 60) {
|
||||
return 'just now'
|
||||
}
|
||||
if (diffInSeconds < 3600) {
|
||||
const minutes = Math.floor(diffInSeconds / 60)
|
||||
return `${minutes}m ago`
|
||||
}
|
||||
if (diffInSeconds < 86400) {
|
||||
const hours = Math.floor(diffInSeconds / 3600)
|
||||
return `${hours}h ago`
|
||||
}
|
||||
if (diffInSeconds < 604800) {
|
||||
const days = Math.floor(diffInSeconds / 86400)
|
||||
return `${days}d ago`
|
||||
}
|
||||
if (diffInSeconds < 2592000) {
|
||||
const weeks = Math.floor(diffInSeconds / 604800)
|
||||
return `${weeks}w ago`
|
||||
}
|
||||
if (diffInSeconds < 31536000) {
|
||||
const months = Math.floor(diffInSeconds / 2592000)
|
||||
return `${months}mo ago`
|
||||
}
|
||||
const years = Math.floor(diffInSeconds / 31536000)
|
||||
return `${years}y ago`
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats a date string to absolute format for tooltip display
|
||||
*/
|
||||
function formatAbsoluteDate(dateString: string): string {
|
||||
const date = new Date(dateString)
|
||||
return date.toLocaleDateString('en-US', {
|
||||
year: 'numeric',
|
||||
month: 'short',
|
||||
day: 'numeric',
|
||||
hour: '2-digit',
|
||||
minute: '2-digit',
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Skeleton placeholder for a knowledge base card
|
||||
*/
|
||||
|
||||
@@ -344,51 +344,53 @@ export function CreateBaseModal({ open, onOpenChange }: CreateBaseModalProps) {
|
||||
<Textarea
|
||||
id='description'
|
||||
placeholder='Describe this knowledge base (optional)'
|
||||
rows={4}
|
||||
rows={3}
|
||||
{...register('description')}
|
||||
className={cn(errors.description && 'border-[var(--text-error)]')}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className='grid grid-cols-2 gap-[12px]'>
|
||||
<div className='flex flex-col gap-[8px]'>
|
||||
<Label htmlFor='minChunkSize'>Min Chunk Size (characters)</Label>
|
||||
<Input
|
||||
id='minChunkSize'
|
||||
placeholder='100'
|
||||
{...register('minChunkSize', { valueAsNumber: true })}
|
||||
className={cn(errors.minChunkSize && 'border-[var(--text-error)]')}
|
||||
autoComplete='off'
|
||||
data-form-type='other'
|
||||
name='min-chunk-size'
|
||||
/>
|
||||
<div className='space-y-[12px] rounded-[6px] bg-[var(--surface-5)] px-[12px] py-[14px]'>
|
||||
<div className='grid grid-cols-2 gap-[12px]'>
|
||||
<div className='flex flex-col gap-[8px]'>
|
||||
<Label htmlFor='minChunkSize'>Min Chunk Size (characters)</Label>
|
||||
<Input
|
||||
id='minChunkSize'
|
||||
placeholder='100'
|
||||
{...register('minChunkSize', { valueAsNumber: true })}
|
||||
className={cn(errors.minChunkSize && 'border-[var(--text-error)]')}
|
||||
autoComplete='off'
|
||||
data-form-type='other'
|
||||
name='min-chunk-size'
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className='flex flex-col gap-[8px]'>
|
||||
<Label htmlFor='maxChunkSize'>Max Chunk Size (tokens)</Label>
|
||||
<Input
|
||||
id='maxChunkSize'
|
||||
placeholder='1024'
|
||||
{...register('maxChunkSize', { valueAsNumber: true })}
|
||||
className={cn(errors.maxChunkSize && 'border-[var(--text-error)]')}
|
||||
autoComplete='off'
|
||||
data-form-type='other'
|
||||
name='max-chunk-size'
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className='flex flex-col gap-[8px]'>
|
||||
<Label htmlFor='maxChunkSize'>Max Chunk Size (tokens)</Label>
|
||||
<Label htmlFor='overlapSize'>Overlap (tokens)</Label>
|
||||
<Input
|
||||
id='maxChunkSize'
|
||||
placeholder='1024'
|
||||
{...register('maxChunkSize', { valueAsNumber: true })}
|
||||
className={cn(errors.maxChunkSize && 'border-[var(--text-error)]')}
|
||||
id='overlapSize'
|
||||
placeholder='200'
|
||||
{...register('overlapSize', { valueAsNumber: true })}
|
||||
className={cn(errors.overlapSize && 'border-[var(--text-error)]')}
|
||||
autoComplete='off'
|
||||
data-form-type='other'
|
||||
name='max-chunk-size'
|
||||
name='overlap-size'
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className='flex flex-col gap-[8px]'>
|
||||
<Label htmlFor='overlapSize'>Overlap (tokens)</Label>
|
||||
<Input
|
||||
id='overlapSize'
|
||||
placeholder='200'
|
||||
{...register('overlapSize', { valueAsNumber: true })}
|
||||
className={cn(errors.overlapSize && 'border-[var(--text-error)]')}
|
||||
autoComplete='off'
|
||||
data-form-type='other'
|
||||
name='overlap-size'
|
||||
/>
|
||||
<p className='text-[11px] text-[var(--text-muted)]'>
|
||||
1 token ≈ 4 characters. Max chunk size and overlap are in tokens.
|
||||
</p>
|
||||
|
||||
@@ -59,7 +59,7 @@ export function EditKnowledgeBaseModal({
|
||||
handleSubmit,
|
||||
reset,
|
||||
watch,
|
||||
formState: { errors, isDirty },
|
||||
formState: { errors },
|
||||
} = useForm<FormValues>({
|
||||
resolver: zodResolver(FormSchema),
|
||||
defaultValues: {
|
||||
@@ -127,7 +127,7 @@ export function EditKnowledgeBaseModal({
|
||||
<Textarea
|
||||
id='description'
|
||||
placeholder='Describe this knowledge base (optional)'
|
||||
rows={4}
|
||||
rows={3}
|
||||
{...register('description')}
|
||||
className={cn(errors.description && 'border-[var(--text-error)]')}
|
||||
/>
|
||||
@@ -161,7 +161,7 @@ export function EditKnowledgeBaseModal({
|
||||
<Button
|
||||
variant='tertiary'
|
||||
type='submit'
|
||||
disabled={isSubmitting || !nameValue?.trim() || !isDirty}
|
||||
disabled={isSubmitting || !nameValue?.trim()}
|
||||
>
|
||||
{isSubmitting ? 'Saving...' : 'Save'}
|
||||
</Button>
|
||||
|
||||
@@ -16,8 +16,8 @@ import {
|
||||
import { redactApiKeys } from '@/lib/core/security/redaction'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import {
|
||||
BlockDetailsSidebar,
|
||||
getLeftmostBlockId,
|
||||
PreviewEditor,
|
||||
WorkflowPreview,
|
||||
} from '@/app/workspace/[workspaceId]/w/components/preview'
|
||||
import { useExecutionSnapshot } from '@/hooks/queries/logs'
|
||||
@@ -248,10 +248,11 @@ export function ExecutionSnapshot({
|
||||
cursorStyle='pointer'
|
||||
executedBlocks={blockExecutions}
|
||||
selectedBlockId={pinnedBlockId}
|
||||
lightweight
|
||||
/>
|
||||
</div>
|
||||
{pinnedBlockId && workflowState.blocks[pinnedBlockId] && (
|
||||
<PreviewEditor
|
||||
<BlockDetailsSidebar
|
||||
block={workflowState.blocks[pinnedBlockId]}
|
||||
executionData={blockExecutions[pinnedBlockId]}
|
||||
allBlockExecutions={blockExecutions}
|
||||
|
||||
@@ -18,7 +18,6 @@ import {
|
||||
} from '@/components/emcn'
|
||||
import { WorkflowIcon } from '@/components/icons'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { formatDuration } from '@/lib/core/utils/formatting'
|
||||
import { LoopTool } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/loop/loop-config'
|
||||
import { ParallelTool } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/parallel/parallel-config'
|
||||
import { getBlock, getBlockByToolName } from '@/blocks'
|
||||
@@ -143,6 +142,14 @@ function normalizeAndSortSpans(spans: TraceSpan[]): TraceSpan[] {
|
||||
|
||||
const DEFAULT_BLOCK_COLOR = '#6b7280'
|
||||
|
||||
/**
|
||||
* Formats duration in ms
|
||||
*/
|
||||
function formatDuration(ms: number): string {
|
||||
if (ms < 1000) return `${ms}ms`
|
||||
return `${(ms / 1000).toFixed(2)}s`
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets icon and color for a span type using block config
|
||||
*/
|
||||
@@ -307,7 +314,7 @@ function ExpandableRowHeader({
|
||||
</span>
|
||||
</div>
|
||||
<span className='font-medium text-[12px] text-[var(--text-tertiary)]'>
|
||||
{formatDuration(duration, { precision: 2 })}
|
||||
{formatDuration(duration)}
|
||||
</span>
|
||||
</div>
|
||||
)
|
||||
|
||||
@@ -213,6 +213,7 @@ function TemplateCardInner({
|
||||
isPannable={false}
|
||||
defaultZoom={0.8}
|
||||
fitPadding={0.2}
|
||||
lightweight
|
||||
cursorStyle='pointer'
|
||||
/>
|
||||
) : (
|
||||
|
||||
@@ -2,12 +2,12 @@ import { memo, useCallback } from 'react'
|
||||
import { ArrowLeftRight, ArrowUpDown, Circle, CircleOff, LogOut } from 'lucide-react'
|
||||
import { Button, Copy, Tooltip, Trash2 } from '@/components/emcn'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { isInputDefinitionTrigger } from '@/lib/workflows/triggers/input-definition-triggers'
|
||||
import { isValidStartBlockType } from '@/lib/workflows/triggers/start-block-types'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
import { validateTriggerPaste } from '@/app/workspace/[workspaceId]/w/[workflowId]/utils'
|
||||
import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow'
|
||||
import { useNotificationStore } from '@/stores/notifications'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
import { getUniqueBlockName, prepareDuplicateBlockState } from '@/stores/workflows/utils'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
|
||||
const DEFAULT_DUPLICATE_OFFSET = { x: 50, y: 50 }
|
||||
@@ -48,38 +48,29 @@ export const ActionBar = memo(
|
||||
collaborativeBatchToggleBlockEnabled,
|
||||
collaborativeBatchToggleBlockHandles,
|
||||
} = useCollaborativeWorkflow()
|
||||
const { setPendingSelection } = useWorkflowRegistry()
|
||||
|
||||
const addNotification = useNotificationStore((s) => s.addNotification)
|
||||
const { activeWorkflowId, setPendingSelection } = useWorkflowRegistry()
|
||||
|
||||
const handleDuplicateBlock = useCallback(() => {
|
||||
const { copyBlocks, preparePasteData, activeWorkflowId } = useWorkflowRegistry.getState()
|
||||
const existingBlocks = useWorkflowStore.getState().blocks
|
||||
copyBlocks([blockId])
|
||||
const blocks = useWorkflowStore.getState().blocks
|
||||
const sourceBlock = blocks[blockId]
|
||||
if (!sourceBlock) return
|
||||
|
||||
const pasteData = preparePasteData(DEFAULT_DUPLICATE_OFFSET)
|
||||
if (!pasteData) return
|
||||
const newId = crypto.randomUUID()
|
||||
const newName = getUniqueBlockName(sourceBlock.name, blocks)
|
||||
const subBlockValues =
|
||||
useSubBlockStore.getState().workflowValues[activeWorkflowId || '']?.[blockId] || {}
|
||||
|
||||
const blocks = Object.values(pasteData.blocks)
|
||||
const validation = validateTriggerPaste(blocks, existingBlocks, 'duplicate')
|
||||
if (!validation.isValid) {
|
||||
addNotification({
|
||||
level: 'error',
|
||||
message: validation.message!,
|
||||
workflowId: activeWorkflowId || undefined,
|
||||
})
|
||||
return
|
||||
}
|
||||
const { block, subBlockValues: filteredValues } = prepareDuplicateBlockState({
|
||||
sourceBlock,
|
||||
newId,
|
||||
newName,
|
||||
positionOffset: DEFAULT_DUPLICATE_OFFSET,
|
||||
subBlockValues,
|
||||
})
|
||||
|
||||
setPendingSelection(blocks.map((b) => b.id))
|
||||
collaborativeBatchAddBlocks(
|
||||
blocks,
|
||||
pasteData.edges,
|
||||
pasteData.loops,
|
||||
pasteData.parallels,
|
||||
pasteData.subBlockValues
|
||||
)
|
||||
}, [blockId, addNotification, collaborativeBatchAddBlocks, setPendingSelection])
|
||||
setPendingSelection([newId])
|
||||
collaborativeBatchAddBlocks([block], [], {}, {}, { [newId]: filteredValues })
|
||||
}, [blockId, activeWorkflowId, collaborativeBatchAddBlocks, setPendingSelection])
|
||||
|
||||
const { isEnabled, horizontalHandles, parentId, parentType } = useWorkflowStore(
|
||||
useCallback(
|
||||
@@ -99,7 +90,7 @@ export const ActionBar = memo(
|
||||
|
||||
const userPermissions = useUserPermissionsContext()
|
||||
|
||||
const isStartBlock = isInputDefinitionTrigger(blockType)
|
||||
const isStartBlock = isValidStartBlockType(blockType)
|
||||
const isResponseBlock = blockType === 'response'
|
||||
const isNoteBlock = blockType === 'note'
|
||||
const isSubflowBlock = blockType === 'loop' || blockType === 'parallel'
|
||||
@@ -151,29 +142,6 @@ export const ActionBar = memo(
|
||||
</Tooltip.Root>
|
||||
)}
|
||||
|
||||
{isSubflowBlock && (
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
variant='ghost'
|
||||
onClick={(e) => {
|
||||
e.stopPropagation()
|
||||
if (!disabled) {
|
||||
collaborativeBatchToggleBlockEnabled([blockId])
|
||||
}
|
||||
}}
|
||||
className={ACTION_BUTTON_STYLES}
|
||||
disabled={disabled}
|
||||
>
|
||||
{isEnabled ? <Circle className={ICON_SIZE} /> : <CircleOff className={ICON_SIZE} />}
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content side='top'>
|
||||
{getTooltipMessage(isEnabled ? 'Disable Block' : 'Enable Block')}
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
)}
|
||||
|
||||
{!isStartBlock && !isResponseBlock && (
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
@@ -245,6 +213,29 @@ export const ActionBar = memo(
|
||||
</Tooltip.Root>
|
||||
)}
|
||||
|
||||
{isSubflowBlock && (
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
variant='ghost'
|
||||
onClick={(e) => {
|
||||
e.stopPropagation()
|
||||
if (!disabled) {
|
||||
collaborativeBatchToggleBlockEnabled([blockId])
|
||||
}
|
||||
}}
|
||||
className={ACTION_BUTTON_STYLES}
|
||||
disabled={disabled}
|
||||
>
|
||||
{isEnabled ? <Circle className={ICON_SIZE} /> : <CircleOff className={ICON_SIZE} />}
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content side='top'>
|
||||
{getTooltipMessage(isEnabled ? 'Disable Block' : 'Enable Block')}
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
)}
|
||||
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
|
||||
@@ -8,7 +8,7 @@ import {
|
||||
PopoverDivider,
|
||||
PopoverItem,
|
||||
} from '@/components/emcn'
|
||||
import { TriggerUtils } from '@/lib/workflows/triggers/triggers'
|
||||
import { isValidStartBlockType } from '@/lib/workflows/triggers/start-block-types'
|
||||
|
||||
/**
|
||||
* Block information for context menu actions
|
||||
@@ -74,16 +74,12 @@ export function BlockMenu({
|
||||
const allEnabled = selectedBlocks.every((b) => b.enabled)
|
||||
const allDisabled = selectedBlocks.every((b) => !b.enabled)
|
||||
|
||||
const hasSingletonBlock = selectedBlocks.some(
|
||||
(b) =>
|
||||
TriggerUtils.requiresSingleInstance(b.type) || TriggerUtils.isSingleInstanceBlockType(b.type)
|
||||
)
|
||||
const hasTriggerBlock = selectedBlocks.some((b) => TriggerUtils.isTriggerBlock(b))
|
||||
const hasStarterBlock = selectedBlocks.some((b) => isValidStartBlockType(b.type))
|
||||
const allNoteBlocks = selectedBlocks.every((b) => b.type === 'note')
|
||||
const isSubflow =
|
||||
isSingleBlock && (selectedBlocks[0]?.type === 'loop' || selectedBlocks[0]?.type === 'parallel')
|
||||
|
||||
const canRemoveFromSubflow = showRemoveFromSubflow && !hasTriggerBlock
|
||||
const canRemoveFromSubflow = showRemoveFromSubflow && !hasStarterBlock
|
||||
|
||||
const getToggleEnabledLabel = () => {
|
||||
if (allEnabled) return 'Disable'
|
||||
@@ -131,7 +127,7 @@ export function BlockMenu({
|
||||
<span>Paste</span>
|
||||
<span className='ml-auto opacity-70 group-hover:opacity-100'>⌘V</span>
|
||||
</PopoverItem>
|
||||
{!hasSingletonBlock && (
|
||||
{!hasStarterBlock && (
|
||||
<PopoverItem
|
||||
disabled={disableEdit}
|
||||
onClick={() => {
|
||||
|
||||
@@ -138,24 +138,18 @@ export const Notifications = memo(function Notifications() {
|
||||
}`}
|
||||
>
|
||||
<div className='flex h-full flex-col justify-between px-[8px] pt-[6px] pb-[8px]'>
|
||||
<div className='flex items-start gap-[8px]'>
|
||||
<div
|
||||
className={`min-w-0 flex-1 font-medium text-[12px] leading-[16px] ${
|
||||
hasAction ? 'line-clamp-2' : 'line-clamp-4'
|
||||
}`}
|
||||
>
|
||||
{notification.level === 'error' && (
|
||||
<span className='mr-[6px] mb-[2.75px] inline-block h-[6px] w-[6px] rounded-[2px] bg-[var(--text-error)] align-middle' />
|
||||
)}
|
||||
{notification.message}
|
||||
</div>
|
||||
<div
|
||||
className={`font-medium text-[12px] leading-[16px] ${
|
||||
hasAction ? 'line-clamp-2' : 'line-clamp-4'
|
||||
}`}
|
||||
>
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
variant='ghost'
|
||||
onClick={() => removeNotification(notification.id)}
|
||||
aria-label='Dismiss notification'
|
||||
className='!p-1.5 -m-1.5 shrink-0'
|
||||
className='!p-1.5 -m-1.5 float-right ml-[16px]'
|
||||
>
|
||||
<X className='h-3 w-3' />
|
||||
</Button>
|
||||
@@ -164,6 +158,10 @@ export const Notifications = memo(function Notifications() {
|
||||
<Tooltip.Shortcut keys='⌘E'>Clear all</Tooltip.Shortcut>
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
{notification.level === 'error' && (
|
||||
<span className='mr-[6px] mb-[2.75px] inline-block h-[6px] w-[6px] rounded-[2px] bg-[var(--text-error)] align-middle' />
|
||||
)}
|
||||
{notification.message}
|
||||
</div>
|
||||
{hasAction && (
|
||||
<Button
|
||||
|
||||
@@ -78,7 +78,6 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
mode,
|
||||
setMode,
|
||||
isAborting,
|
||||
maskCredentialValue,
|
||||
} = useCopilotStore()
|
||||
|
||||
const messageCheckpoints = isUser ? allMessageCheckpoints[message.id] || [] : []
|
||||
@@ -211,10 +210,7 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
const isLastTextBlock =
|
||||
index === message.contentBlocks!.length - 1 && block.type === 'text'
|
||||
const parsed = parseSpecialTags(block.content)
|
||||
// Mask credential IDs in the displayed content
|
||||
const cleanBlockContent = maskCredentialValue(
|
||||
parsed.cleanContent.replace(/\n{3,}/g, '\n\n')
|
||||
)
|
||||
const cleanBlockContent = parsed.cleanContent.replace(/\n{3,}/g, '\n\n')
|
||||
|
||||
if (!cleanBlockContent.trim()) return null
|
||||
|
||||
@@ -242,7 +238,7 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
return (
|
||||
<div key={blockKey} className='w-full'>
|
||||
<ThinkingBlock
|
||||
content={maskCredentialValue(block.content)}
|
||||
content={block.content}
|
||||
isStreaming={isActivelyStreaming}
|
||||
hasFollowingContent={hasFollowingContent}
|
||||
hasSpecialTags={hasSpecialTags}
|
||||
@@ -265,7 +261,7 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
}
|
||||
return null
|
||||
})
|
||||
}, [message.contentBlocks, isActivelyStreaming, parsedTags, isLastMessage, maskCredentialValue])
|
||||
}, [message.contentBlocks, isActivelyStreaming, parsedTags, isLastMessage])
|
||||
|
||||
if (isUser) {
|
||||
return (
|
||||
|
||||
@@ -782,7 +782,6 @@ const SubagentContentRenderer = memo(function SubagentContentRenderer({
|
||||
const [isExpanded, setIsExpanded] = useState(true)
|
||||
const [duration, setDuration] = useState(0)
|
||||
const startTimeRef = useRef<number>(Date.now())
|
||||
const maskCredentialValue = useCopilotStore((s) => s.maskCredentialValue)
|
||||
const wasStreamingRef = useRef(false)
|
||||
|
||||
// Only show streaming animations for current message
|
||||
@@ -817,16 +816,14 @@ const SubagentContentRenderer = memo(function SubagentContentRenderer({
|
||||
currentText += parsed.cleanContent
|
||||
} else if (block.type === 'subagent_tool_call' && block.toolCall) {
|
||||
if (currentText.trim()) {
|
||||
// Mask any credential IDs in the accumulated text before displaying
|
||||
segments.push({ type: 'text', content: maskCredentialValue(currentText) })
|
||||
segments.push({ type: 'text', content: currentText })
|
||||
currentText = ''
|
||||
}
|
||||
segments.push({ type: 'tool', block })
|
||||
}
|
||||
}
|
||||
if (currentText.trim()) {
|
||||
// Mask any credential IDs in the accumulated text before displaying
|
||||
segments.push({ type: 'text', content: maskCredentialValue(currentText) })
|
||||
segments.push({ type: 'text', content: currentText })
|
||||
}
|
||||
|
||||
const allParsed = parseSpecialTags(allRawText)
|
||||
@@ -955,7 +952,6 @@ const WorkflowEditSummary = memo(function WorkflowEditSummary({
|
||||
toolCall: CopilotToolCall
|
||||
}) {
|
||||
const blocks = useWorkflowStore((s) => s.blocks)
|
||||
const maskCredentialValue = useCopilotStore((s) => s.maskCredentialValue)
|
||||
|
||||
const cachedBlockInfoRef = useRef<Record<string, { name: string; type: string }>>({})
|
||||
|
||||
@@ -987,7 +983,6 @@ const WorkflowEditSummary = memo(function WorkflowEditSummary({
|
||||
title: string
|
||||
value: any
|
||||
isPassword?: boolean
|
||||
isCredential?: boolean
|
||||
}
|
||||
|
||||
interface BlockChange {
|
||||
@@ -1096,7 +1091,6 @@ const WorkflowEditSummary = memo(function WorkflowEditSummary({
|
||||
title: subBlockConfig.title ?? subBlockConfig.id,
|
||||
value,
|
||||
isPassword: subBlockConfig.password === true,
|
||||
isCredential: subBlockConfig.type === 'oauth-input',
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1178,15 +1172,8 @@ const WorkflowEditSummary = memo(function WorkflowEditSummary({
|
||||
{subBlocksToShow && subBlocksToShow.length > 0 && (
|
||||
<div className='border-[var(--border-1)] border-t px-2.5 py-1.5'>
|
||||
{subBlocksToShow.map((sb) => {
|
||||
// Mask password fields and credential IDs
|
||||
let displayValue: string
|
||||
if (sb.isPassword) {
|
||||
displayValue = '•••'
|
||||
} else {
|
||||
// Get display value first, then mask any credential IDs that might be in it
|
||||
const rawValue = getDisplayValue(sb.value)
|
||||
displayValue = maskCredentialValue(rawValue)
|
||||
}
|
||||
// Mask password fields like the canvas does
|
||||
const displayValue = sb.isPassword ? '•••' : getDisplayValue(sb.value)
|
||||
return (
|
||||
<div key={sb.id} className='flex items-start gap-1.5 py-0.5 text-[11px]'>
|
||||
<span
|
||||
@@ -1425,13 +1412,10 @@ function RunSkipButtons({
|
||||
setIsProcessing(true)
|
||||
setButtonsHidden(true)
|
||||
try {
|
||||
// Add to auto-allowed list - this also executes all pending integration tools of this type
|
||||
// Add to auto-allowed list first
|
||||
await addAutoAllowedTool(toolCall.name)
|
||||
// For client tools with interrupts (not integration tools), we still need to call handleRun
|
||||
// since executeIntegrationTool only works for server-side tools
|
||||
if (!isIntegrationTool(toolCall.name)) {
|
||||
await handleRun(toolCall, setToolCallState, onStateChange, editedParams)
|
||||
}
|
||||
// Then execute
|
||||
await handleRun(toolCall, setToolCallState, onStateChange, editedParams)
|
||||
} finally {
|
||||
setIsProcessing(false)
|
||||
actionInProgressRef.current = false
|
||||
@@ -1454,10 +1438,10 @@ function RunSkipButtons({
|
||||
|
||||
if (buttonsHidden) return null
|
||||
|
||||
// Show "Always Allow" for all tools that require confirmation
|
||||
const showAlwaysAllow = true
|
||||
// Hide "Always Allow" for integration tools (only show for client tools with interrupts)
|
||||
const showAlwaysAllow = !isIntegrationTool(toolCall.name)
|
||||
|
||||
// Standardized buttons for all interrupt tools: Allow, Always Allow, Skip
|
||||
// Standardized buttons for all interrupt tools: Allow, (Always Allow for client tools only), Skip
|
||||
return (
|
||||
<div className='mt-[10px] flex gap-[6px]'>
|
||||
<Button onClick={onRun} disabled={isProcessing} variant='tertiary'>
|
||||
|
||||
@@ -105,10 +105,10 @@ export function useCopilotInitialization(props: UseCopilotInitializationProps) {
|
||||
isSendingMessage,
|
||||
])
|
||||
|
||||
/** Load auto-allowed tools once on mount - runs immediately, independent of workflow */
|
||||
/** Load auto-allowed tools once on mount */
|
||||
const hasLoadedAutoAllowedToolsRef = useRef(false)
|
||||
useEffect(() => {
|
||||
if (!hasLoadedAutoAllowedToolsRef.current) {
|
||||
if (hasMountedRef.current && !hasLoadedAutoAllowedToolsRef.current) {
|
||||
hasLoadedAutoAllowedToolsRef.current = true
|
||||
loadAutoAllowedTools().catch((err) => {
|
||||
logger.warn('[Copilot] Failed to load auto-allowed tools', err)
|
||||
|
||||
@@ -17,7 +17,7 @@ import { Skeleton } from '@/components/ui'
|
||||
import { isDev } from '@/lib/core/config/feature-flags'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { getBaseUrl, getEmailDomain } from '@/lib/core/utils/urls'
|
||||
import { isInputDefinitionTrigger } from '@/lib/workflows/triggers/input-definition-triggers'
|
||||
import { isValidStartBlockType } from '@/lib/workflows/triggers/start-block-types'
|
||||
import {
|
||||
type FieldConfig,
|
||||
useCreateForm,
|
||||
@@ -147,7 +147,7 @@ export function FormDeploy({
|
||||
|
||||
useEffect(() => {
|
||||
const blocks = Object.values(useWorkflowStore.getState().blocks)
|
||||
const startBlock = blocks.find((b) => isInputDefinitionTrigger(b.type))
|
||||
const startBlock = blocks.find((b) => isValidStartBlockType(b.type))
|
||||
|
||||
if (startBlock) {
|
||||
const inputFormat = useSubBlockStore.getState().getValue(startBlock.id, 'inputFormat')
|
||||
|
||||
@@ -14,7 +14,7 @@ import {
|
||||
Textarea,
|
||||
} from '@/components/emcn'
|
||||
import { normalizeInputFormatValue } from '@/lib/workflows/input-format'
|
||||
import { isInputDefinitionTrigger } from '@/lib/workflows/triggers/input-definition-triggers'
|
||||
import { isValidStartBlockType } from '@/lib/workflows/triggers/start-block-types'
|
||||
import type { InputFormatField } from '@/lib/workflows/types'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
@@ -52,7 +52,7 @@ export function ApiInfoModal({ open, onOpenChange, workflowId }: ApiInfoModalPro
|
||||
for (const [blockId, block] of Object.entries(blocks)) {
|
||||
if (!block || typeof block !== 'object') continue
|
||||
const blockType = (block as { type?: string }).type
|
||||
if (blockType && isInputDefinitionTrigger(blockType)) {
|
||||
if (blockType && isValidStartBlockType(blockType)) {
|
||||
return blockId
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,8 +18,8 @@ import {
|
||||
import { Skeleton } from '@/components/ui'
|
||||
import type { WorkflowDeploymentVersionResponse } from '@/lib/workflows/persistence/utils'
|
||||
import {
|
||||
BlockDetailsSidebar,
|
||||
getLeftmostBlockId,
|
||||
PreviewEditor,
|
||||
WorkflowPreview,
|
||||
} from '@/app/workspace/[workspaceId]/w/components/preview'
|
||||
import { useDeploymentVersionState, useRevertToVersion } from '@/hooks/queries/workflows'
|
||||
@@ -337,7 +337,7 @@ export function GeneralDeploy({
|
||||
/>
|
||||
</div>
|
||||
{expandedSelectedBlockId && workflowToShow.blocks?.[expandedSelectedBlockId] && (
|
||||
<PreviewEditor
|
||||
<BlockDetailsSidebar
|
||||
block={workflowToShow.blocks[expandedSelectedBlockId]}
|
||||
workflowVariables={workflowToShow.variables}
|
||||
loops={workflowToShow.loops}
|
||||
|
||||
@@ -15,7 +15,7 @@ import {
|
||||
import { Skeleton } from '@/components/ui'
|
||||
import { generateToolInputSchema, sanitizeToolName } from '@/lib/mcp/workflow-tool-schema'
|
||||
import { normalizeInputFormatValue } from '@/lib/workflows/input-format'
|
||||
import { isInputDefinitionTrigger } from '@/lib/workflows/triggers/input-definition-triggers'
|
||||
import { isValidStartBlockType } from '@/lib/workflows/triggers/start-block-types'
|
||||
import type { InputFormatField } from '@/lib/workflows/types'
|
||||
import {
|
||||
useAddWorkflowMcpTool,
|
||||
@@ -107,7 +107,7 @@ export function McpDeploy({
|
||||
for (const [blockId, block] of Object.entries(blocks)) {
|
||||
if (!block || typeof block !== 'object') continue
|
||||
const blockType = (block as { type?: string }).type
|
||||
if (blockType && isInputDefinitionTrigger(blockType)) {
|
||||
if (blockType && isValidStartBlockType(blockType)) {
|
||||
return blockId
|
||||
}
|
||||
}
|
||||
|
||||
@@ -446,6 +446,7 @@ const OGCaptureContainer = forwardRef<HTMLDivElement>((_, ref) => {
|
||||
isPannable={false}
|
||||
defaultZoom={0.8}
|
||||
fitPadding={0.2}
|
||||
lightweight
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
|
||||
@@ -35,9 +35,9 @@ import { WandPromptBar } from '@/app/workspace/[workspaceId]/w/[workflowId]/comp
|
||||
import { useAccessibleReferencePrefixes } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-accessible-reference-prefixes'
|
||||
import { useWand } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-wand'
|
||||
import type { GenerationType } from '@/blocks/types'
|
||||
import { normalizeName } from '@/executor/constants'
|
||||
import { createEnvVarPattern, createReferencePattern } from '@/executor/utils/reference-validation'
|
||||
import { useTagSelection } from '@/hooks/kb/use-tag-selection'
|
||||
import { normalizeName } from '@/stores/workflows/utils'
|
||||
|
||||
const logger = createLogger('Code')
|
||||
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { memo, useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { isEqual } from 'lodash'
|
||||
import { useReactFlow } from 'reactflow'
|
||||
import { useStoreWithEqualityFn } from 'zustand/traditional'
|
||||
import { Combobox, type ComboboxOption } from '@/components/emcn/components'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { buildCanonicalIndex, resolveDependencyValue } from '@/lib/workflows/subblocks/visibility'
|
||||
@@ -103,8 +102,7 @@ export const ComboBox = memo(function ComboBox({
|
||||
[blockConfig?.subBlocks]
|
||||
)
|
||||
const canonicalModeOverrides = blockState?.data?.canonicalModes
|
||||
const dependencyValues = useStoreWithEqualityFn(
|
||||
useSubBlockStore,
|
||||
const dependencyValues = useSubBlockStore(
|
||||
useCallback(
|
||||
(state) => {
|
||||
if (dependsOnFields.length === 0 || !activeWorkflowId) return []
|
||||
|
||||
@@ -32,9 +32,9 @@ import {
|
||||
} from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/tag-dropdown/tag-dropdown'
|
||||
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value'
|
||||
import { useAccessibleReferencePrefixes } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-accessible-reference-prefixes'
|
||||
import { normalizeName } from '@/executor/constants'
|
||||
import { createEnvVarPattern, createReferencePattern } from '@/executor/utils/reference-validation'
|
||||
import { useTagSelection } from '@/hooks/kb/use-tag-selection'
|
||||
import { normalizeName } from '@/stores/workflows/utils'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
|
||||
const logger = createLogger('ConditionInput')
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { memo, useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { isEqual } from 'lodash'
|
||||
import { useStoreWithEqualityFn } from 'zustand/traditional'
|
||||
import { Badge } from '@/components/emcn'
|
||||
import { Combobox, type ComboboxOption } from '@/components/emcn/components'
|
||||
import { buildCanonicalIndex, resolveDependencyValue } from '@/lib/workflows/subblocks/visibility'
|
||||
@@ -101,8 +100,7 @@ export const Dropdown = memo(function Dropdown({
|
||||
[blockConfig?.subBlocks]
|
||||
)
|
||||
const canonicalModeOverrides = blockState?.data?.canonicalModes
|
||||
const dependencyValues = useStoreWithEqualityFn(
|
||||
useSubBlockStore,
|
||||
const dependencyValues = useSubBlockStore(
|
||||
useCallback(
|
||||
(state) => {
|
||||
if (dependsOnFields.length === 0 || !activeWorkflowId) return []
|
||||
|
||||
@@ -2,8 +2,9 @@
|
||||
|
||||
import type { ReactNode } from 'react'
|
||||
import { splitReferenceSegment } from '@/lib/workflows/sanitization/references'
|
||||
import { normalizeName, REFERENCE } from '@/executor/constants'
|
||||
import { REFERENCE } from '@/executor/constants'
|
||||
import { createCombinedPattern } from '@/executor/utils/reference-validation'
|
||||
import { normalizeName } from '@/stores/workflows/utils'
|
||||
|
||||
export interface HighlightContext {
|
||||
accessiblePrefixes?: Set<string>
|
||||
|
||||
@@ -34,4 +34,3 @@ export { Text } from './text/text'
|
||||
export { TimeInput } from './time-input/time-input'
|
||||
export { ToolInput } from './tool-input/tool-input'
|
||||
export { VariablesInput } from './variables-input/variables-input'
|
||||
export { WorkflowSelectorInput } from './workflow-selector/workflow-selector-input'
|
||||
|
||||
@@ -2,13 +2,12 @@ import { useMemo, useRef, useState } from 'react'
|
||||
import { Badge, Input } from '@/components/emcn'
|
||||
import { Label } from '@/components/ui/label'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { extractInputFieldsFromBlocks } from '@/lib/workflows/input-format'
|
||||
import { formatDisplayText } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/formatted-text'
|
||||
import { TagDropdown } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/tag-dropdown/tag-dropdown'
|
||||
import { useSubBlockInput } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-input'
|
||||
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value'
|
||||
import { useAccessibleReferencePrefixes } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-accessible-reference-prefixes'
|
||||
import { useWorkflowState } from '@/hooks/queries/workflows'
|
||||
import { useWorkflowInputFields } from '@/hooks/queries/workflows'
|
||||
|
||||
/**
|
||||
* Props for the InputMappingField component
|
||||
@@ -71,11 +70,7 @@ export function InputMapping({
|
||||
const overlayRefs = useRef<Map<string, HTMLDivElement>>(new Map())
|
||||
|
||||
const workflowId = typeof selectedWorkflowId === 'string' ? selectedWorkflowId : undefined
|
||||
const { data: workflowState, isLoading } = useWorkflowState(workflowId)
|
||||
const childInputFields = useMemo(
|
||||
() => (workflowState?.blocks ? extractInputFieldsFromBlocks(workflowState.blocks) : []),
|
||||
[workflowState?.blocks]
|
||||
)
|
||||
const { data: childInputFields = [], isLoading } = useWorkflowInputFields(workflowId)
|
||||
const [collapsedFields, setCollapsedFields] = useState<Record<string, boolean>>({})
|
||||
|
||||
const valueObj: Record<string, string> = useMemo(() => {
|
||||
|
||||
@@ -1,12 +1,4 @@
|
||||
import {
|
||||
useCallback,
|
||||
useEffect,
|
||||
useImperativeHandle,
|
||||
useLayoutEffect,
|
||||
useMemo,
|
||||
useRef,
|
||||
useState,
|
||||
} from 'react'
|
||||
import { useCallback, useEffect, useImperativeHandle, useMemo, useRef, useState } from 'react'
|
||||
import { isEqual } from 'lodash'
|
||||
import { ChevronDown, ChevronsUpDown, ChevronUp, Plus } from 'lucide-react'
|
||||
import { Button, Popover, PopoverContent, PopoverItem, PopoverTrigger } from '@/components/emcn'
|
||||
@@ -390,137 +382,92 @@ export function MessagesInput({
|
||||
textareaRefs.current[fieldId]?.focus()
|
||||
}, [])
|
||||
|
||||
const syncOverlay = useCallback((fieldId: string) => {
|
||||
const autoResizeTextarea = useCallback((fieldId: string) => {
|
||||
const textarea = textareaRefs.current[fieldId]
|
||||
if (!textarea) return
|
||||
const overlay = overlayRefs.current[fieldId]
|
||||
if (!textarea || !overlay) return
|
||||
|
||||
overlay.style.width = `${textarea.clientWidth}px`
|
||||
overlay.scrollTop = textarea.scrollTop
|
||||
overlay.scrollLeft = textarea.scrollLeft
|
||||
// If user has manually resized, respect their chosen height and only sync overlay.
|
||||
if (userResizedRef.current[fieldId]) {
|
||||
const currentHeight =
|
||||
textarea.offsetHeight || Number.parseFloat(textarea.style.height) || MIN_TEXTAREA_HEIGHT_PX
|
||||
const clampedHeight = Math.max(MIN_TEXTAREA_HEIGHT_PX, currentHeight)
|
||||
textarea.style.height = `${clampedHeight}px`
|
||||
if (overlay) {
|
||||
overlay.style.height = `${clampedHeight}px`
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
textarea.style.height = 'auto'
|
||||
const naturalHeight = textarea.scrollHeight || MIN_TEXTAREA_HEIGHT_PX
|
||||
const nextHeight = Math.min(
|
||||
MAX_TEXTAREA_HEIGHT_PX,
|
||||
Math.max(MIN_TEXTAREA_HEIGHT_PX, naturalHeight)
|
||||
)
|
||||
textarea.style.height = `${nextHeight}px`
|
||||
|
||||
if (overlay) {
|
||||
overlay.style.height = `${nextHeight}px`
|
||||
}
|
||||
}, [])
|
||||
|
||||
const autoResizeTextarea = useCallback(
|
||||
(fieldId: string) => {
|
||||
const textarea = textareaRefs.current[fieldId]
|
||||
const overlay = overlayRefs.current[fieldId]
|
||||
if (!textarea) return
|
||||
const handleResizeStart = useCallback((fieldId: string, e: React.MouseEvent<HTMLDivElement>) => {
|
||||
e.preventDefault()
|
||||
e.stopPropagation()
|
||||
|
||||
if (!textarea.value.trim()) {
|
||||
userResizedRef.current[fieldId] = false
|
||||
const textarea = textareaRefs.current[fieldId]
|
||||
if (!textarea) return
|
||||
|
||||
const startHeight = textarea.offsetHeight || textarea.scrollHeight || MIN_TEXTAREA_HEIGHT_PX
|
||||
|
||||
isResizingRef.current = true
|
||||
resizeStateRef.current = {
|
||||
fieldId,
|
||||
startY: e.clientY,
|
||||
startHeight,
|
||||
}
|
||||
|
||||
const handleMouseMove = (moveEvent: MouseEvent) => {
|
||||
if (!isResizingRef.current || !resizeStateRef.current) return
|
||||
|
||||
const { fieldId: activeFieldId, startY, startHeight } = resizeStateRef.current
|
||||
const deltaY = moveEvent.clientY - startY
|
||||
const nextHeight = Math.max(MIN_TEXTAREA_HEIGHT_PX, startHeight + deltaY)
|
||||
|
||||
const activeTextarea = textareaRefs.current[activeFieldId]
|
||||
if (activeTextarea) {
|
||||
activeTextarea.style.height = `${nextHeight}px`
|
||||
}
|
||||
|
||||
if (userResizedRef.current[fieldId]) {
|
||||
if (overlay) {
|
||||
overlay.style.height = `${textarea.offsetHeight}px`
|
||||
}
|
||||
syncOverlay(fieldId)
|
||||
return
|
||||
}
|
||||
|
||||
textarea.style.height = 'auto'
|
||||
const scrollHeight = textarea.scrollHeight
|
||||
const height = Math.min(
|
||||
MAX_TEXTAREA_HEIGHT_PX,
|
||||
Math.max(MIN_TEXTAREA_HEIGHT_PX, scrollHeight)
|
||||
)
|
||||
|
||||
textarea.style.height = `${height}px`
|
||||
const overlay = overlayRefs.current[activeFieldId]
|
||||
if (overlay) {
|
||||
overlay.style.height = `${height}px`
|
||||
overlay.style.height = `${nextHeight}px`
|
||||
}
|
||||
}
|
||||
|
||||
const handleMouseUp = () => {
|
||||
if (resizeStateRef.current) {
|
||||
const { fieldId: activeFieldId } = resizeStateRef.current
|
||||
userResizedRef.current[activeFieldId] = true
|
||||
}
|
||||
|
||||
syncOverlay(fieldId)
|
||||
},
|
||||
[syncOverlay]
|
||||
)
|
||||
isResizingRef.current = false
|
||||
resizeStateRef.current = null
|
||||
document.removeEventListener('mousemove', handleMouseMove)
|
||||
document.removeEventListener('mouseup', handleMouseUp)
|
||||
}
|
||||
|
||||
const handleResizeStart = useCallback(
|
||||
(fieldId: string, e: React.MouseEvent<HTMLDivElement>) => {
|
||||
e.preventDefault()
|
||||
e.stopPropagation()
|
||||
|
||||
const textarea = textareaRefs.current[fieldId]
|
||||
if (!textarea) return
|
||||
|
||||
const startHeight = textarea.offsetHeight || textarea.scrollHeight || MIN_TEXTAREA_HEIGHT_PX
|
||||
|
||||
isResizingRef.current = true
|
||||
resizeStateRef.current = {
|
||||
fieldId,
|
||||
startY: e.clientY,
|
||||
startHeight,
|
||||
}
|
||||
|
||||
const handleMouseMove = (moveEvent: MouseEvent) => {
|
||||
if (!isResizingRef.current || !resizeStateRef.current) return
|
||||
|
||||
const { fieldId: activeFieldId, startY, startHeight } = resizeStateRef.current
|
||||
const deltaY = moveEvent.clientY - startY
|
||||
const nextHeight = Math.max(MIN_TEXTAREA_HEIGHT_PX, startHeight + deltaY)
|
||||
|
||||
const activeTextarea = textareaRefs.current[activeFieldId]
|
||||
const overlay = overlayRefs.current[activeFieldId]
|
||||
|
||||
if (activeTextarea) {
|
||||
activeTextarea.style.height = `${nextHeight}px`
|
||||
}
|
||||
|
||||
if (overlay) {
|
||||
overlay.style.height = `${nextHeight}px`
|
||||
if (activeTextarea) {
|
||||
overlay.scrollTop = activeTextarea.scrollTop
|
||||
overlay.scrollLeft = activeTextarea.scrollLeft
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const handleMouseUp = () => {
|
||||
if (resizeStateRef.current) {
|
||||
const { fieldId: activeFieldId } = resizeStateRef.current
|
||||
userResizedRef.current[activeFieldId] = true
|
||||
syncOverlay(activeFieldId)
|
||||
}
|
||||
|
||||
isResizingRef.current = false
|
||||
resizeStateRef.current = null
|
||||
document.removeEventListener('mousemove', handleMouseMove)
|
||||
document.removeEventListener('mouseup', handleMouseUp)
|
||||
}
|
||||
|
||||
document.addEventListener('mousemove', handleMouseMove)
|
||||
document.addEventListener('mouseup', handleMouseUp)
|
||||
},
|
||||
[syncOverlay]
|
||||
)
|
||||
|
||||
useLayoutEffect(() => {
|
||||
currentMessages.forEach((_, index) => {
|
||||
autoResizeTextarea(`message-${index}`)
|
||||
})
|
||||
}, [currentMessages, autoResizeTextarea])
|
||||
document.addEventListener('mousemove', handleMouseMove)
|
||||
document.addEventListener('mouseup', handleMouseUp)
|
||||
}, [])
|
||||
|
||||
useEffect(() => {
|
||||
const observers: ResizeObserver[] = []
|
||||
|
||||
for (let i = 0; i < currentMessages.length; i++) {
|
||||
const fieldId = `message-${i}`
|
||||
const textarea = textareaRefs.current[fieldId]
|
||||
const overlay = overlayRefs.current[fieldId]
|
||||
|
||||
if (textarea && overlay) {
|
||||
const observer = new ResizeObserver(() => {
|
||||
overlay.style.width = `${textarea.clientWidth}px`
|
||||
})
|
||||
observer.observe(textarea)
|
||||
observers.push(observer)
|
||||
}
|
||||
}
|
||||
|
||||
return () => {
|
||||
observers.forEach((observer) => observer.disconnect())
|
||||
}
|
||||
}, [currentMessages.length])
|
||||
currentMessages.forEach((_, index) => {
|
||||
const fieldId = `message-${index}`
|
||||
autoResizeTextarea(fieldId)
|
||||
})
|
||||
}, [currentMessages, autoResizeTextarea])
|
||||
|
||||
return (
|
||||
<div className='flex w-full flex-col gap-[10px]'>
|
||||
@@ -674,15 +621,19 @@ export function MessagesInput({
|
||||
</div>
|
||||
|
||||
{/* Content Input with overlay for variable highlighting */}
|
||||
<div className='relative w-full overflow-hidden'>
|
||||
<div className='relative w-full'>
|
||||
<textarea
|
||||
ref={(el) => {
|
||||
textareaRefs.current[fieldId] = el
|
||||
}}
|
||||
className='relative z-[2] m-0 box-border h-auto min-h-[80px] w-full resize-none overflow-y-auto overflow-x-hidden whitespace-pre-wrap break-words border-none bg-transparent px-[8px] py-[8px] font-medium font-sans text-sm text-transparent leading-[1.5] caret-[var(--text-primary)] outline-none [-ms-overflow-style:none] [scrollbar-width:none] placeholder:text-[var(--text-muted)] focus:outline-none focus-visible:outline-none disabled:cursor-not-allowed [&::-webkit-scrollbar]:hidden'
|
||||
className='allow-scroll box-border min-h-[80px] w-full resize-none whitespace-pre-wrap break-words border-none bg-transparent px-[8px] pt-[8px] font-[inherit] font-medium text-sm text-transparent leading-[inherit] caret-[var(--text-primary)] outline-none placeholder:text-[var(--text-muted)] focus:outline-none focus-visible:outline-none disabled:cursor-not-allowed'
|
||||
rows={3}
|
||||
placeholder='Enter message content...'
|
||||
value={message.content}
|
||||
onChange={fieldHandlers.onChange}
|
||||
onChange={(e) => {
|
||||
fieldHandlers.onChange(e)
|
||||
autoResizeTextarea(fieldId)
|
||||
}}
|
||||
onKeyDown={(e) => {
|
||||
if (e.key === 'Tab' && !isPreview && !disabled) {
|
||||
e.preventDefault()
|
||||
@@ -719,13 +670,12 @@ export function MessagesInput({
|
||||
ref={(el) => {
|
||||
overlayRefs.current[fieldId] = el
|
||||
}}
|
||||
className='pointer-events-none absolute top-0 left-0 z-[1] m-0 box-border w-full overflow-y-auto overflow-x-hidden whitespace-pre-wrap break-words border-none bg-transparent px-[8px] py-[8px] font-medium font-sans text-[var(--text-primary)] text-sm leading-[1.5] [-ms-overflow-style:none] [scrollbar-width:none] [&::-webkit-scrollbar]:hidden'
|
||||
className='scrollbar-none pointer-events-none absolute top-0 left-0 box-border w-full overflow-auto whitespace-pre-wrap break-words border-none bg-transparent px-[8px] pt-[8px] font-[inherit] font-medium text-[var(--text-primary)] text-sm leading-[inherit]'
|
||||
>
|
||||
{formatDisplayText(message.content, {
|
||||
accessiblePrefixes,
|
||||
highlightAll: !accessiblePrefixes,
|
||||
})}
|
||||
{message.content.endsWith('\n') && '\u200B'}
|
||||
</div>
|
||||
|
||||
{/* Env var dropdown for this message */}
|
||||
@@ -755,7 +705,7 @@ export function MessagesInput({
|
||||
|
||||
{!isPreview && !disabled && (
|
||||
<div
|
||||
className='absolute right-1 bottom-1 z-[3] flex h-4 w-4 cursor-ns-resize items-center justify-center rounded-[4px] border border-[var(--border-1)] bg-[var(--surface-5)] dark:bg-[var(--surface-5)]'
|
||||
className='absolute right-1 bottom-1 flex h-4 w-4 cursor-ns-resize items-center justify-center rounded-[4px] border border-[var(--border-1)] bg-[var(--surface-5)] dark:bg-[var(--surface-5)]'
|
||||
onMouseDown={(e) => handleResizeStart(fieldId, e)}
|
||||
onDragStart={(e) => {
|
||||
e.preventDefault()
|
||||
|
||||
@@ -35,11 +35,11 @@ import type {
|
||||
import { useAccessibleReferencePrefixes } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-accessible-reference-prefixes'
|
||||
import { getBlock } from '@/blocks'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import { normalizeName } from '@/executor/constants'
|
||||
import type { Variable } from '@/stores/panel'
|
||||
import { useVariablesStore } from '@/stores/panel'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
import { normalizeName } from '@/stores/workflows/utils'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
import type { BlockState } from '@/stores/workflows/workflow/types'
|
||||
|
||||
|
||||
@@ -29,7 +29,6 @@ import {
|
||||
type OAuthProvider,
|
||||
type OAuthService,
|
||||
} from '@/lib/oauth'
|
||||
import { extractInputFieldsFromBlocks } from '@/lib/workflows/input-format'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
import {
|
||||
CheckboxList,
|
||||
@@ -66,7 +65,7 @@ import { useForceRefreshMcpTools, useMcpServers, useStoredMcpTools } from '@/hoo
|
||||
import {
|
||||
useChildDeploymentStatus,
|
||||
useDeployChildWorkflow,
|
||||
useWorkflowState,
|
||||
useWorkflowInputFields,
|
||||
useWorkflows,
|
||||
} from '@/hooks/queries/workflows'
|
||||
import { usePermissionConfig } from '@/hooks/use-permission-config'
|
||||
@@ -772,11 +771,7 @@ function WorkflowInputMapperSyncWrapper({
|
||||
disabled: boolean
|
||||
workflowId: string
|
||||
}) {
|
||||
const { data: workflowState, isLoading } = useWorkflowState(workflowId)
|
||||
const inputFields = useMemo(
|
||||
() => (workflowState?.blocks ? extractInputFieldsFromBlocks(workflowState.blocks) : []),
|
||||
[workflowState?.blocks]
|
||||
)
|
||||
const { data: inputFields = [], isLoading } = useWorkflowInputFields(workflowId)
|
||||
|
||||
const parsedValue = useMemo(() => {
|
||||
try {
|
||||
|
||||
@@ -1,45 +0,0 @@
|
||||
'use client'
|
||||
|
||||
import { useMemo } from 'react'
|
||||
import { SelectorCombobox } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/selector-combobox/selector-combobox'
|
||||
import type { SubBlockConfig } from '@/blocks/types'
|
||||
import type { SelectorContext } from '@/hooks/selectors/types'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
|
||||
interface WorkflowSelectorInputProps {
|
||||
blockId: string
|
||||
subBlock: SubBlockConfig
|
||||
disabled?: boolean
|
||||
isPreview?: boolean
|
||||
previewValue?: string | null
|
||||
}
|
||||
|
||||
export function WorkflowSelectorInput({
|
||||
blockId,
|
||||
subBlock,
|
||||
disabled = false,
|
||||
isPreview = false,
|
||||
previewValue,
|
||||
}: WorkflowSelectorInputProps) {
|
||||
const activeWorkflowId = useWorkflowRegistry((s) => s.activeWorkflowId)
|
||||
|
||||
const context: SelectorContext = useMemo(
|
||||
() => ({
|
||||
excludeWorkflowId: activeWorkflowId ?? undefined,
|
||||
}),
|
||||
[activeWorkflowId]
|
||||
)
|
||||
|
||||
return (
|
||||
<SelectorCombobox
|
||||
blockId={blockId}
|
||||
subBlock={subBlock}
|
||||
selectorKey='sim.workflows'
|
||||
selectorContext={context}
|
||||
disabled={disabled}
|
||||
isPreview={isPreview}
|
||||
previewValue={previewValue}
|
||||
placeholder={subBlock.placeholder || 'Select workflow...'}
|
||||
/>
|
||||
)
|
||||
}
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
import { useCallback, useMemo } from 'react'
|
||||
import { isEqual } from 'lodash'
|
||||
import { useStoreWithEqualityFn } from 'zustand/traditional'
|
||||
import {
|
||||
buildCanonicalIndex,
|
||||
isNonEmptyValue,
|
||||
@@ -152,7 +151,7 @@ export function useDependsOnGate(
|
||||
|
||||
// Get values for all dependency fields (both all and any)
|
||||
// Use isEqual to prevent re-renders when dependency values haven't actually changed
|
||||
const dependencyValuesMap = useStoreWithEqualityFn(useSubBlockStore, dependencySelector, isEqual)
|
||||
const dependencyValuesMap = useSubBlockStore(dependencySelector, isEqual)
|
||||
|
||||
const depsSatisfied = useMemo(() => {
|
||||
// Check all fields (AND logic) - all must be satisfied
|
||||
|
||||
@@ -2,7 +2,6 @@ import { useCallback, useEffect, useRef } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { isEqual } from 'lodash'
|
||||
import { useShallow } from 'zustand/react/shallow'
|
||||
import { useStoreWithEqualityFn } from 'zustand/traditional'
|
||||
import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow'
|
||||
import { getProviderFromModel } from '@/providers/utils'
|
||||
import { useWorkflowDiffStore } from '@/stores/workflow-diff/store'
|
||||
@@ -59,8 +58,7 @@ export function useSubBlockValue<T = any>(
|
||||
const streamingValueRef = useRef<T | null>(null)
|
||||
const wasStreamingRef = useRef<boolean>(false)
|
||||
|
||||
const storeValue = useStoreWithEqualityFn(
|
||||
useSubBlockStore,
|
||||
const storeValue = useSubBlockStore(
|
||||
useCallback(
|
||||
(state) => {
|
||||
// If the active workflow ID isn't available yet, return undefined so we can fall back to initialValue
|
||||
@@ -94,8 +92,7 @@ export function useSubBlockValue<T = any>(
|
||||
|
||||
// Always call this hook unconditionally - don't wrap it in a condition
|
||||
// Optimized: only re-render if model value actually changes
|
||||
const modelSubBlockValue = useStoreWithEqualityFn(
|
||||
useSubBlockStore,
|
||||
const modelSubBlockValue = useSubBlockStore(
|
||||
useCallback((state) => (blockId ? state.getValue(blockId, 'model') : null), [blockId]),
|
||||
(a, b) => a === b
|
||||
)
|
||||
|
||||
@@ -40,7 +40,6 @@ import {
|
||||
TimeInput,
|
||||
ToolInput,
|
||||
VariablesInput,
|
||||
WorkflowSelectorInput,
|
||||
} from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components'
|
||||
import { useDependsOnGate } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-depends-on-gate'
|
||||
import type { SubBlockConfig } from '@/blocks/types'
|
||||
@@ -91,6 +90,7 @@ const isFieldRequired = (config: SubBlockConfig, subBlockValues?: Record<string,
|
||||
if (!config.required) return false
|
||||
if (typeof config.required === 'boolean') return config.required
|
||||
|
||||
// Helper function to evaluate a condition
|
||||
const evalCond = (
|
||||
cond: {
|
||||
field: string
|
||||
@@ -132,6 +132,7 @@ const isFieldRequired = (config: SubBlockConfig, subBlockValues?: Record<string,
|
||||
return match
|
||||
}
|
||||
|
||||
// If required is a condition object or function, evaluate it
|
||||
const condition = typeof config.required === 'function' ? config.required() : config.required
|
||||
return evalCond(condition, subBlockValues || {})
|
||||
}
|
||||
@@ -377,6 +378,7 @@ function SubBlockComponent({
|
||||
setIsValidJson(isValid)
|
||||
}
|
||||
|
||||
// Check if wand is enabled for this sub-block
|
||||
const isWandEnabled = config.wandConfig?.enabled ?? false
|
||||
|
||||
/**
|
||||
@@ -436,6 +438,8 @@ function SubBlockComponent({
|
||||
| null
|
||||
| undefined
|
||||
|
||||
// Use dependsOn gating to compute final disabled state
|
||||
// Only pass previewContextValues when in preview mode to avoid format mismatches
|
||||
const { finalDisabled: gatedDisabled } = useDependsOnGate(blockId, config, {
|
||||
disabled,
|
||||
isPreview,
|
||||
@@ -865,17 +869,6 @@ function SubBlockComponent({
|
||||
/>
|
||||
)
|
||||
|
||||
case 'workflow-selector':
|
||||
return (
|
||||
<WorkflowSelectorInput
|
||||
blockId={blockId}
|
||||
subBlock={config}
|
||||
disabled={isDisabled}
|
||||
isPreview={isPreview}
|
||||
previewValue={previewValue as string | null}
|
||||
/>
|
||||
)
|
||||
|
||||
case 'mcp-server-selector':
|
||||
return (
|
||||
<McpServerSelector
|
||||
|
||||
@@ -68,7 +68,7 @@ export function SubflowEditor({
|
||||
<div className='flex flex-1 flex-col overflow-hidden pt-[0px]'>
|
||||
{/* Subflow Editor Section */}
|
||||
<div ref={subBlocksRef} className='subblocks-section flex flex-1 flex-col overflow-hidden'>
|
||||
<div className='flex-1 overflow-y-auto overflow-x-hidden px-[8px] pt-[9px] pb-[8px]'>
|
||||
<div className='flex-1 overflow-y-auto overflow-x-hidden px-[8px] pt-[5px] pb-[8px]'>
|
||||
{/* Type Selection */}
|
||||
<div>
|
||||
<Label className='mb-[6.5px] block pl-[2px] font-medium text-[13px] text-[var(--text-primary)]'>
|
||||
|
||||
@@ -2,18 +2,8 @@
|
||||
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { isEqual } from 'lodash'
|
||||
import {
|
||||
BookOpen,
|
||||
Check,
|
||||
ChevronDown,
|
||||
ChevronUp,
|
||||
ExternalLink,
|
||||
Loader2,
|
||||
Pencil,
|
||||
} from 'lucide-react'
|
||||
import { useParams } from 'next/navigation'
|
||||
import { BookOpen, Check, ChevronDown, ChevronUp, Pencil } from 'lucide-react'
|
||||
import { useShallow } from 'zustand/react/shallow'
|
||||
import { useStoreWithEqualityFn } from 'zustand/traditional'
|
||||
import { Button, Tooltip } from '@/components/emcn'
|
||||
import {
|
||||
buildCanonicalIndex,
|
||||
@@ -38,10 +28,8 @@ import { LoopTool } from '@/app/workspace/[workspaceId]/w/[workflowId]/component
|
||||
import { ParallelTool } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/parallel/parallel-config'
|
||||
import { getSubBlockStableKey } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/utils'
|
||||
import { useCurrentWorkflow } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks'
|
||||
import { WorkflowPreview } from '@/app/workspace/[workspaceId]/w/components/preview'
|
||||
import { getBlock } from '@/blocks/registry'
|
||||
import type { SubBlockType } from '@/blocks/types'
|
||||
import { useWorkflowState } from '@/hooks/queries/workflows'
|
||||
import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow'
|
||||
import { usePanelEditorStore } from '@/stores/panel'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
@@ -96,14 +84,6 @@ export function Editor() {
|
||||
// Get subflow display properties from configs
|
||||
const subflowConfig = isSubflow ? (currentBlock.type === 'loop' ? LoopTool : ParallelTool) : null
|
||||
|
||||
// Check if selected block is a workflow block
|
||||
const isWorkflowBlock =
|
||||
currentBlock && (currentBlock.type === 'workflow' || currentBlock.type === 'workflow_input')
|
||||
|
||||
// Get workspace ID from params
|
||||
const params = useParams()
|
||||
const workspaceId = params.workspaceId as string
|
||||
|
||||
// Refs for resize functionality
|
||||
const subBlocksRef = useRef<HTMLDivElement>(null)
|
||||
|
||||
@@ -119,8 +99,7 @@ export function Editor() {
|
||||
currentWorkflow.isSnapshotView
|
||||
)
|
||||
|
||||
const blockSubBlockValues = useStoreWithEqualityFn(
|
||||
useSubBlockStore,
|
||||
const blockSubBlockValues = useSubBlockStore(
|
||||
useCallback(
|
||||
(state) => {
|
||||
if (!activeWorkflowId || !currentBlockId) return EMPTY_SUBBLOCK_VALUES
|
||||
@@ -273,11 +252,11 @@ export function Editor() {
|
||||
|
||||
// Trigger rename mode when signaled from context menu
|
||||
useEffect(() => {
|
||||
if (shouldFocusRename && currentBlock) {
|
||||
if (shouldFocusRename && currentBlock && !isSubflow) {
|
||||
handleStartRename()
|
||||
setShouldFocusRename(false)
|
||||
}
|
||||
}, [shouldFocusRename, currentBlock, handleStartRename, setShouldFocusRename])
|
||||
}, [shouldFocusRename, currentBlock, isSubflow, handleStartRename, setShouldFocusRename])
|
||||
|
||||
/**
|
||||
* Handles opening documentation link in a new secure tab.
|
||||
@@ -289,22 +268,6 @@ export function Editor() {
|
||||
}
|
||||
}
|
||||
|
||||
// Get child workflow ID for workflow blocks
|
||||
const childWorkflowId = isWorkflowBlock ? blockSubBlockValues?.workflowId : null
|
||||
|
||||
// Fetch child workflow state for preview (only for workflow blocks with a selected workflow)
|
||||
const { data: childWorkflowState, isLoading: isLoadingChildWorkflow } =
|
||||
useWorkflowState(childWorkflowId)
|
||||
|
||||
/**
|
||||
* Handles opening the child workflow in a new tab.
|
||||
*/
|
||||
const handleOpenChildWorkflow = useCallback(() => {
|
||||
if (childWorkflowId && workspaceId) {
|
||||
window.open(`/workspace/${workspaceId}/w/${childWorkflowId}`, '_blank', 'noopener,noreferrer')
|
||||
}
|
||||
}, [childWorkflowId, workspaceId])
|
||||
|
||||
// Determine if connections are at minimum height (collapsed state)
|
||||
const isConnectionsAtMinHeight = connectionsHeight <= 35
|
||||
|
||||
@@ -357,7 +320,7 @@ export function Editor() {
|
||||
</div>
|
||||
<div className='flex shrink-0 items-center gap-[8px]'>
|
||||
{/* Rename button */}
|
||||
{currentBlock && (
|
||||
{currentBlock && !isSubflow && (
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
@@ -443,66 +406,7 @@ export function Editor() {
|
||||
className='subblocks-section flex flex-1 flex-col overflow-hidden'
|
||||
>
|
||||
<div className='flex-1 overflow-y-auto overflow-x-hidden px-[8px] pt-[12px] pb-[8px] [overflow-anchor:none]'>
|
||||
{/* Workflow Preview - only for workflow blocks with a selected child workflow */}
|
||||
{isWorkflowBlock && childWorkflowId && (
|
||||
<>
|
||||
<div className='subblock-content flex flex-col gap-[9.5px]'>
|
||||
<div className='pl-[2px] font-medium text-[13px] text-[var(--text-primary)] leading-none'>
|
||||
Workflow Preview
|
||||
</div>
|
||||
<div className='relative h-[160px] overflow-hidden rounded-[4px] border border-[var(--border)]'>
|
||||
{isLoadingChildWorkflow ? (
|
||||
<div className='flex h-full items-center justify-center bg-[var(--surface-3)]'>
|
||||
<Loader2 className='h-5 w-5 animate-spin text-[var(--text-tertiary)]' />
|
||||
</div>
|
||||
) : childWorkflowState ? (
|
||||
<>
|
||||
<div className='[&_*:active]:!cursor-grabbing [&_*]:!cursor-grab [&_.react-flow__handle]:!hidden h-full w-full'>
|
||||
<WorkflowPreview
|
||||
workflowState={childWorkflowState}
|
||||
height={160}
|
||||
width='100%'
|
||||
isPannable={true}
|
||||
defaultZoom={0.6}
|
||||
fitPadding={0.15}
|
||||
cursorStyle='grab'
|
||||
/>
|
||||
</div>
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
type='button'
|
||||
variant='ghost'
|
||||
onClick={handleOpenChildWorkflow}
|
||||
className='absolute right-[6px] bottom-[6px] z-10 h-[24px] w-[24px] cursor-pointer border border-[var(--border)] bg-[var(--surface-2)] p-0 hover:bg-[var(--surface-4)]'
|
||||
>
|
||||
<ExternalLink className='h-[12px] w-[12px]' />
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content side='top'>Open workflow</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
</>
|
||||
) : (
|
||||
<div className='flex h-full items-center justify-center bg-[var(--surface-3)]'>
|
||||
<span className='text-[13px] text-[var(--text-tertiary)]'>
|
||||
Unable to load preview
|
||||
</span>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
<div className='subblock-divider px-[2px] pt-[16px] pb-[13px]'>
|
||||
<div
|
||||
className='h-[1.25px]'
|
||||
style={{
|
||||
backgroundImage:
|
||||
'repeating-linear-gradient(to right, var(--border) 0px, var(--border) 6px, transparent 6px, transparent 12px)',
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
{subBlocks.length === 0 && !isWorkflowBlock ? (
|
||||
{subBlocks.length === 0 ? (
|
||||
<div className='flex h-full items-center justify-center text-center text-[#8D8D8D] text-[13px]'>
|
||||
This block has no subblocks
|
||||
</div>
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { useShallow } from 'zustand/react/shallow'
|
||||
import { useCallback } from 'react'
|
||||
import { shallow } from 'zustand/shallow'
|
||||
import { useWorkflowDiffStore } from '@/stores/workflow-diff'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
|
||||
@@ -12,26 +13,35 @@ import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
*/
|
||||
export function useEditorBlockProperties(blockId: string | null, isSnapshotView: boolean) {
|
||||
const normalBlockProps = useWorkflowStore(
|
||||
useShallow((state) => {
|
||||
if (!blockId) return { advancedMode: false, triggerMode: false }
|
||||
const block = state.blocks?.[blockId]
|
||||
return {
|
||||
advancedMode: block?.advancedMode ?? false,
|
||||
triggerMode: block?.triggerMode ?? false,
|
||||
}
|
||||
})
|
||||
useCallback(
|
||||
(state) => {
|
||||
if (!blockId) return { advancedMode: false, triggerMode: false }
|
||||
const block = state.blocks?.[blockId]
|
||||
return {
|
||||
advancedMode: block?.advancedMode ?? false,
|
||||
triggerMode: block?.triggerMode ?? false,
|
||||
}
|
||||
},
|
||||
[blockId]
|
||||
),
|
||||
shallow
|
||||
)
|
||||
|
||||
const baselineBlockProps = useWorkflowDiffStore(
|
||||
useShallow((state) => {
|
||||
if (!blockId) return { advancedMode: false, triggerMode: false }
|
||||
const block = state.baselineWorkflow?.blocks?.[blockId]
|
||||
return {
|
||||
advancedMode: block?.advancedMode ?? false,
|
||||
triggerMode: block?.triggerMode ?? false,
|
||||
}
|
||||
})
|
||||
useCallback(
|
||||
(state) => {
|
||||
if (!blockId) return { advancedMode: false, triggerMode: false }
|
||||
const block = state.baselineWorkflow?.blocks?.[blockId]
|
||||
return {
|
||||
advancedMode: block?.advancedMode ?? false,
|
||||
triggerMode: block?.triggerMode ?? false,
|
||||
}
|
||||
},
|
||||
[blockId]
|
||||
),
|
||||
shallow
|
||||
)
|
||||
|
||||
// Use the appropriate props based on view mode
|
||||
return isSnapshotView ? baselineBlockProps : normalBlockProps
|
||||
}
|
||||
|
||||
@@ -3,7 +3,6 @@ import { createLogger } from '@sim/logger'
|
||||
import { isEqual } from 'lodash'
|
||||
import { useParams } from 'next/navigation'
|
||||
import { Handle, type NodeProps, Position, useUpdateNodeInternals } from 'reactflow'
|
||||
import { useStoreWithEqualityFn } from 'zustand/traditional'
|
||||
import { Badge, Tooltip } from '@/components/emcn'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
@@ -527,8 +526,7 @@ const SubBlockRow = memo(function SubBlockRow({
|
||||
* Subscribe only to variables for this workflow to avoid re-renders from other workflows.
|
||||
* Uses isEqual for deep comparison since Object.fromEntries creates a new object each time.
|
||||
*/
|
||||
const workflowVariables = useStoreWithEqualityFn(
|
||||
useVariablesStore,
|
||||
const workflowVariables = useVariablesStore(
|
||||
useCallback(
|
||||
(state) => {
|
||||
if (!workflowId) return {}
|
||||
@@ -731,8 +729,7 @@ export const WorkflowBlock = memo(function WorkflowBlock({
|
||||
const isStarterBlock = type === 'starter'
|
||||
const isWebhookTriggerBlock = type === 'webhook' || type === 'generic_webhook'
|
||||
|
||||
const blockSubBlockValues = useStoreWithEqualityFn(
|
||||
useSubBlockStore,
|
||||
const blockSubBlockValues = useSubBlockStore(
|
||||
useCallback(
|
||||
(state) => {
|
||||
if (!activeWorkflowId) return EMPTY_SUBBLOCK_VALUES
|
||||
|
||||
@@ -2,7 +2,7 @@ import { useMemo } from 'react'
|
||||
import { useShallow } from 'zustand/react/shallow'
|
||||
import { BlockPathCalculator } from '@/lib/workflows/blocks/block-path-calculator'
|
||||
import { SYSTEM_REFERENCE_PREFIXES } from '@/lib/workflows/sanitization/references'
|
||||
import { isInputDefinitionTrigger } from '@/lib/workflows/triggers/input-definition-triggers'
|
||||
import { isValidStartBlockType } from '@/lib/workflows/triggers/start-block-types'
|
||||
import { normalizeName } from '@/executor/constants'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
import type { Loop, Parallel } from '@/stores/workflows/workflow/types'
|
||||
@@ -27,7 +27,7 @@ export function useAccessibleReferencePrefixes(blockId?: string | null): Set<str
|
||||
const accessibleIds = new Set<string>(ancestorIds)
|
||||
accessibleIds.add(blockId)
|
||||
|
||||
const starterBlock = Object.values(blocks).find((block) => isInputDefinitionTrigger(block.type))
|
||||
const starterBlock = Object.values(blocks).find((block) => isValidStartBlockType(block.type))
|
||||
if (starterBlock && ancestorIds.includes(starterBlock.id)) {
|
||||
accessibleIds.add(starterBlock.id)
|
||||
}
|
||||
|
||||
@@ -180,21 +180,6 @@ function mapEdgesByNode(edges: Edge[], nodeIds: Set<string>): Map<string, Edge[]
|
||||
return result
|
||||
}
|
||||
|
||||
/**
|
||||
* Syncs the panel editor with the current selection state.
|
||||
* Shows block details when exactly one block is selected, clears otherwise.
|
||||
*/
|
||||
function syncPanelWithSelection(selectedIds: string[]) {
|
||||
const { currentBlockId, clearCurrentBlock, setCurrentBlockId } = usePanelEditorStore.getState()
|
||||
if (selectedIds.length === 1 && selectedIds[0] !== currentBlockId) {
|
||||
setCurrentBlockId(selectedIds[0])
|
||||
} else if (selectedIds.length === 0 && currentBlockId) {
|
||||
clearCurrentBlock()
|
||||
} else if (selectedIds.length > 1 && currentBlockId) {
|
||||
clearCurrentBlock()
|
||||
}
|
||||
}
|
||||
|
||||
/** Custom node types for ReactFlow. */
|
||||
const nodeTypes: NodeTypes = {
|
||||
workflowBlock: WorkflowBlock,
|
||||
@@ -2090,10 +2075,7 @@ const WorkflowContent = React.memo(() => {
|
||||
...node,
|
||||
selected: pendingSet.has(node.id),
|
||||
}))
|
||||
const resolved = resolveParentChildSelectionConflicts(withSelection, blocks)
|
||||
setDisplayNodes(resolved)
|
||||
const selectedIds = resolved.filter((node) => node.selected).map((node) => node.id)
|
||||
syncPanelWithSelection(selectedIds)
|
||||
setDisplayNodes(resolveParentChildSelectionConflicts(withSelection, blocks))
|
||||
return
|
||||
}
|
||||
|
||||
@@ -2193,7 +2175,13 @@ const WorkflowContent = React.memo(() => {
|
||||
})
|
||||
const selectedIds = selectedIdsRef.current as string[] | null
|
||||
if (selectedIds !== null) {
|
||||
syncPanelWithSelection(selectedIds)
|
||||
const { currentBlockId, clearCurrentBlock, setCurrentBlockId } =
|
||||
usePanelEditorStore.getState()
|
||||
if (selectedIds.length === 1 && selectedIds[0] !== currentBlockId) {
|
||||
setCurrentBlockId(selectedIds[0])
|
||||
} else if (selectedIds.length === 0 && currentBlockId) {
|
||||
clearCurrentBlock()
|
||||
}
|
||||
}
|
||||
},
|
||||
[blocks]
|
||||
|
||||
@@ -13,7 +13,6 @@ import {
|
||||
import { ReactFlowProvider } from 'reactflow'
|
||||
import { Badge, Button, ChevronDown, Code, Combobox, Input, Label } from '@/components/emcn'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { formatDuration } from '@/lib/core/utils/formatting'
|
||||
import { extractReferencePrefixes } from '@/lib/workflows/sanitization/references'
|
||||
import {
|
||||
buildCanonicalIndex,
|
||||
@@ -686,7 +685,7 @@ interface WorkflowVariable {
|
||||
value: unknown
|
||||
}
|
||||
|
||||
interface PreviewEditorProps {
|
||||
interface BlockDetailsSidebarProps {
|
||||
block: BlockState
|
||||
executionData?: ExecutionData
|
||||
/** All block execution data for resolving variable references */
|
||||
@@ -705,6 +704,14 @@ interface PreviewEditorProps {
|
||||
onClose?: () => void
|
||||
}
|
||||
|
||||
/**
|
||||
* Format duration for display
|
||||
*/
|
||||
function formatDuration(ms: number): string {
|
||||
if (ms < 1000) return `${ms}ms`
|
||||
return `${(ms / 1000).toFixed(2)}s`
|
||||
}
|
||||
|
||||
/** Minimum height for the connections section (header only) */
|
||||
const MIN_CONNECTIONS_HEIGHT = 30
|
||||
/** Maximum height for the connections section */
|
||||
@@ -715,7 +722,7 @@ const DEFAULT_CONNECTIONS_HEIGHT = 150
|
||||
/**
|
||||
* Readonly sidebar panel showing block configuration using SubBlock components.
|
||||
*/
|
||||
function PreviewEditorContent({
|
||||
function BlockDetailsSidebarContent({
|
||||
block,
|
||||
executionData,
|
||||
allBlockExecutions,
|
||||
@@ -725,7 +732,7 @@ function PreviewEditorContent({
|
||||
parallels,
|
||||
isExecutionMode = false,
|
||||
onClose,
|
||||
}: PreviewEditorProps) {
|
||||
}: BlockDetailsSidebarProps) {
|
||||
// Convert Record<string, Variable> to Array<Variable> for iteration
|
||||
const normalizedWorkflowVariables = useMemo(() => {
|
||||
if (!workflowVariables) return []
|
||||
@@ -991,22 +998,6 @@ function PreviewEditorContent({
|
||||
})
|
||||
}, [extractedRefs.envVars])
|
||||
|
||||
const rawValues = useMemo(() => {
|
||||
return Object.entries(subBlockValues).reduce<Record<string, unknown>>((acc, [key, entry]) => {
|
||||
if (entry && typeof entry === 'object' && 'value' in entry) {
|
||||
acc[key] = (entry as { value: unknown }).value
|
||||
} else {
|
||||
acc[key] = entry
|
||||
}
|
||||
return acc
|
||||
}, {})
|
||||
}, [subBlockValues])
|
||||
|
||||
const canonicalIndex = useMemo(
|
||||
() => buildCanonicalIndex(blockConfig?.subBlocks || []),
|
||||
[blockConfig?.subBlocks]
|
||||
)
|
||||
|
||||
// Check if this is a subflow block (loop or parallel)
|
||||
const isSubflow = block.type === 'loop' || block.type === 'parallel'
|
||||
const loopConfig = block.type === 'loop' ? loops?.[block.id] : undefined
|
||||
@@ -1088,6 +1079,21 @@ function PreviewEditorContent({
|
||||
)
|
||||
}
|
||||
|
||||
const rawValues = useMemo(() => {
|
||||
return Object.entries(subBlockValues).reduce<Record<string, unknown>>((acc, [key, entry]) => {
|
||||
if (entry && typeof entry === 'object' && 'value' in entry) {
|
||||
acc[key] = (entry as { value: unknown }).value
|
||||
} else {
|
||||
acc[key] = entry
|
||||
}
|
||||
return acc
|
||||
}, {})
|
||||
}, [subBlockValues])
|
||||
|
||||
const canonicalIndex = useMemo(
|
||||
() => buildCanonicalIndex(blockConfig.subBlocks),
|
||||
[blockConfig.subBlocks]
|
||||
)
|
||||
const canonicalModeOverrides = block.data?.canonicalModes
|
||||
const effectiveAdvanced =
|
||||
(block.advancedMode ?? false) ||
|
||||
@@ -1173,7 +1179,7 @@ function PreviewEditorContent({
|
||||
)}
|
||||
{executionData.durationMs !== undefined && (
|
||||
<span className='font-medium text-[12px] text-[var(--text-tertiary)]'>
|
||||
{formatDuration(executionData.durationMs, { precision: 2 })}
|
||||
{formatDuration(executionData.durationMs)}
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
@@ -1365,12 +1371,12 @@ function PreviewEditorContent({
|
||||
}
|
||||
|
||||
/**
|
||||
* Preview editor wrapped in ReactFlowProvider for hook compatibility.
|
||||
* Block details sidebar wrapped in ReactFlowProvider for hook compatibility.
|
||||
*/
|
||||
export function PreviewEditor(props: PreviewEditorProps) {
|
||||
export function BlockDetailsSidebar(props: BlockDetailsSidebarProps) {
|
||||
return (
|
||||
<ReactFlowProvider>
|
||||
<PreviewEditorContent {...props} />
|
||||
<BlockDetailsSidebarContent {...props} />
|
||||
</ReactFlowProvider>
|
||||
)
|
||||
}
|
||||
@@ -21,9 +21,11 @@ interface WorkflowPreviewBlockData {
|
||||
}
|
||||
|
||||
/**
|
||||
* Preview block component for workflow visualization.
|
||||
* Renders block header, subblocks skeleton, and handles without
|
||||
* hooks, store subscriptions, or interactive features.
|
||||
* Lightweight block component for workflow previews.
|
||||
* Renders block header, dummy subblocks skeleton, and handles.
|
||||
* Respects horizontalHandles and enabled state from workflow.
|
||||
* No heavy hooks, store subscriptions, or interactive features.
|
||||
* Used in template cards and other preview contexts for performance.
|
||||
*/
|
||||
function WorkflowPreviewBlockInner({ data }: NodeProps<WorkflowPreviewBlockData>) {
|
||||
const {
|
||||
|
||||
@@ -15,9 +15,10 @@ interface WorkflowPreviewSubflowData {
|
||||
}
|
||||
|
||||
/**
|
||||
* Preview subflow component for workflow visualization.
|
||||
* Renders loop/parallel containers without hooks, store subscriptions,
|
||||
* or interactive features.
|
||||
* Lightweight subflow component for workflow previews.
|
||||
* Matches the styling of the actual SubflowNodeComponent but without
|
||||
* hooks, store subscriptions, or interactive features.
|
||||
* Used in template cards and other preview contexts for performance.
|
||||
*/
|
||||
function WorkflowPreviewSubflowInner({ data }: NodeProps<WorkflowPreviewSubflowData>) {
|
||||
const { name, width = 500, height = 300, kind, isPreviewSelected = false } = data
|
||||
|
||||
@@ -1,2 +1,2 @@
|
||||
export { PreviewEditor } from './components/preview-editor'
|
||||
export { BlockDetailsSidebar } from './components/block-details-sidebar'
|
||||
export { getLeftmostBlockId, WorkflowPreview } from './preview'
|
||||
|
||||
@@ -15,10 +15,14 @@ import 'reactflow/dist/style.css'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { BLOCK_DIMENSIONS, CONTAINER_DIMENSIONS } from '@/lib/workflows/blocks/block-dimensions'
|
||||
import { NoteBlock } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/note-block/note-block'
|
||||
import { SubflowNodeComponent } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/subflow-node'
|
||||
import { WorkflowBlock } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/workflow-block'
|
||||
import { WorkflowEdge } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-edge/workflow-edge'
|
||||
import { estimateBlockDimensions } from '@/app/workspace/[workspaceId]/w/[workflowId]/utils'
|
||||
import { WorkflowPreviewBlock } from '@/app/workspace/[workspaceId]/w/components/preview/components/block'
|
||||
import { WorkflowPreviewSubflow } from '@/app/workspace/[workspaceId]/w/components/preview/components/subflow'
|
||||
import { getBlock } from '@/blocks'
|
||||
import type { BlockState, WorkflowState } from '@/stores/workflows/workflow/types'
|
||||
|
||||
const logger = createLogger('WorkflowPreview')
|
||||
@@ -130,6 +134,8 @@ interface WorkflowPreviewProps {
|
||||
onNodeContextMenu?: (blockId: string, mousePosition: { x: number; y: number }) => void
|
||||
/** Callback when the canvas (empty area) is clicked */
|
||||
onPaneClick?: () => void
|
||||
/** Use lightweight blocks for better performance in template cards */
|
||||
lightweight?: boolean
|
||||
/** Cursor style to show when hovering the canvas */
|
||||
cursorStyle?: 'default' | 'pointer' | 'grab'
|
||||
/** Map of executed block IDs to their status for highlighting the execution path */
|
||||
@@ -139,10 +145,19 @@ interface WorkflowPreviewProps {
|
||||
}
|
||||
|
||||
/**
|
||||
* Preview node types using minimal components without hooks or store subscriptions.
|
||||
* This prevents interaction issues while allowing canvas panning and node clicking.
|
||||
* Full node types with interactive WorkflowBlock for detailed previews
|
||||
*/
|
||||
const previewNodeTypes: NodeTypes = {
|
||||
const fullNodeTypes: NodeTypes = {
|
||||
workflowBlock: WorkflowBlock,
|
||||
noteBlock: NoteBlock,
|
||||
subflowNode: SubflowNodeComponent,
|
||||
}
|
||||
|
||||
/**
|
||||
* Lightweight node types for template cards and other high-volume previews.
|
||||
* Uses minimal components without hooks or store subscriptions.
|
||||
*/
|
||||
const lightweightNodeTypes: NodeTypes = {
|
||||
workflowBlock: WorkflowPreviewBlock,
|
||||
noteBlock: WorkflowPreviewBlock,
|
||||
subflowNode: WorkflowPreviewSubflow,
|
||||
@@ -157,19 +172,17 @@ const edgeTypes: EdgeTypes = {
|
||||
interface FitViewOnChangeProps {
|
||||
nodeIds: string
|
||||
fitPadding: number
|
||||
containerRef: React.RefObject<HTMLDivElement | null>
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper component that calls fitView when the set of nodes changes or when the container resizes.
|
||||
* Helper component that calls fitView when the set of nodes changes.
|
||||
* Only triggers on actual node additions/removals, not on selection changes.
|
||||
* Must be rendered inside ReactFlowProvider.
|
||||
*/
|
||||
function FitViewOnChange({ nodeIds, fitPadding, containerRef }: FitViewOnChangeProps) {
|
||||
function FitViewOnChange({ nodeIds, fitPadding }: FitViewOnChangeProps) {
|
||||
const { fitView } = useReactFlow()
|
||||
const lastNodeIdsRef = useRef<string | null>(null)
|
||||
|
||||
// Fit view when nodes change
|
||||
useEffect(() => {
|
||||
if (!nodeIds.length) return
|
||||
const shouldFit = lastNodeIdsRef.current !== nodeIds
|
||||
@@ -182,27 +195,6 @@ function FitViewOnChange({ nodeIds, fitPadding, containerRef }: FitViewOnChangeP
|
||||
return () => clearTimeout(timeoutId)
|
||||
}, [nodeIds, fitPadding, fitView])
|
||||
|
||||
// Fit view when container resizes (debounced to avoid excessive calls during drag)
|
||||
useEffect(() => {
|
||||
const container = containerRef.current
|
||||
if (!container) return
|
||||
|
||||
let timeoutId: ReturnType<typeof setTimeout> | null = null
|
||||
|
||||
const resizeObserver = new ResizeObserver(() => {
|
||||
if (timeoutId) clearTimeout(timeoutId)
|
||||
timeoutId = setTimeout(() => {
|
||||
fitView({ padding: fitPadding, duration: 150 })
|
||||
}, 100)
|
||||
})
|
||||
|
||||
resizeObserver.observe(container)
|
||||
return () => {
|
||||
if (timeoutId) clearTimeout(timeoutId)
|
||||
resizeObserver.disconnect()
|
||||
}
|
||||
}, [containerRef, fitPadding, fitView])
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
@@ -218,12 +210,12 @@ export function WorkflowPreview({
|
||||
onNodeClick,
|
||||
onNodeContextMenu,
|
||||
onPaneClick,
|
||||
lightweight = false,
|
||||
cursorStyle = 'grab',
|
||||
executedBlocks,
|
||||
selectedBlockId,
|
||||
}: WorkflowPreviewProps) {
|
||||
const containerRef = useRef<HTMLDivElement>(null)
|
||||
const nodeTypes = previewNodeTypes
|
||||
const nodeTypes = lightweight ? lightweightNodeTypes : fullNodeTypes
|
||||
const isValidWorkflowState = workflowState?.blocks && workflowState.edges
|
||||
|
||||
const blocksStructure = useMemo(() => {
|
||||
@@ -293,28 +285,119 @@ export function WorkflowPreview({
|
||||
|
||||
const absolutePosition = calculateAbsolutePosition(block, workflowState.blocks)
|
||||
|
||||
// Handle loop/parallel containers
|
||||
if (block.type === 'loop' || block.type === 'parallel') {
|
||||
if (lightweight) {
|
||||
if (block.type === 'loop' || block.type === 'parallel') {
|
||||
const isSelected = selectedBlockId === blockId
|
||||
const dimensions = calculateContainerDimensions(blockId, workflowState.blocks)
|
||||
nodeArray.push({
|
||||
id: blockId,
|
||||
type: 'subflowNode',
|
||||
position: absolutePosition,
|
||||
draggable: false,
|
||||
data: {
|
||||
name: block.name,
|
||||
width: dimensions.width,
|
||||
height: dimensions.height,
|
||||
kind: block.type as 'loop' | 'parallel',
|
||||
isPreviewSelected: isSelected,
|
||||
},
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
const isSelected = selectedBlockId === blockId
|
||||
|
||||
let lightweightExecutionStatus: ExecutionStatus | undefined
|
||||
if (executedBlocks) {
|
||||
const blockExecution = executedBlocks[blockId]
|
||||
if (blockExecution) {
|
||||
if (blockExecution.status === 'error') {
|
||||
lightweightExecutionStatus = 'error'
|
||||
} else if (blockExecution.status === 'success') {
|
||||
lightweightExecutionStatus = 'success'
|
||||
} else {
|
||||
lightweightExecutionStatus = 'not-executed'
|
||||
}
|
||||
} else {
|
||||
lightweightExecutionStatus = 'not-executed'
|
||||
}
|
||||
}
|
||||
|
||||
nodeArray.push({
|
||||
id: blockId,
|
||||
type: 'workflowBlock',
|
||||
position: absolutePosition,
|
||||
draggable: false,
|
||||
// Blocks inside subflows need higher z-index to appear above the container
|
||||
zIndex: block.data?.parentId ? 10 : undefined,
|
||||
data: {
|
||||
type: block.type,
|
||||
name: block.name,
|
||||
isTrigger: block.triggerMode === true,
|
||||
horizontalHandles: block.horizontalHandles ?? false,
|
||||
enabled: block.enabled ?? true,
|
||||
isPreviewSelected: isSelected,
|
||||
executionStatus: lightweightExecutionStatus,
|
||||
},
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
if (block.type === 'loop') {
|
||||
const isSelected = selectedBlockId === blockId
|
||||
const dimensions = calculateContainerDimensions(blockId, workflowState.blocks)
|
||||
nodeArray.push({
|
||||
id: blockId,
|
||||
type: 'subflowNode',
|
||||
position: absolutePosition,
|
||||
parentId: block.data?.parentId,
|
||||
extent: block.data?.extent || undefined,
|
||||
draggable: false,
|
||||
data: {
|
||||
...block.data,
|
||||
name: block.name,
|
||||
width: dimensions.width,
|
||||
height: dimensions.height,
|
||||
kind: block.type as 'loop' | 'parallel',
|
||||
state: 'valid',
|
||||
isPreview: true,
|
||||
isPreviewSelected: isSelected,
|
||||
kind: 'loop',
|
||||
},
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
// Handle regular blocks
|
||||
const isSelected = selectedBlockId === blockId
|
||||
if (block.type === 'parallel') {
|
||||
const isSelected = selectedBlockId === blockId
|
||||
const dimensions = calculateContainerDimensions(blockId, workflowState.blocks)
|
||||
nodeArray.push({
|
||||
id: blockId,
|
||||
type: 'subflowNode',
|
||||
position: absolutePosition,
|
||||
parentId: block.data?.parentId,
|
||||
extent: block.data?.extent || undefined,
|
||||
draggable: false,
|
||||
data: {
|
||||
...block.data,
|
||||
name: block.name,
|
||||
width: dimensions.width,
|
||||
height: dimensions.height,
|
||||
state: 'valid',
|
||||
isPreview: true,
|
||||
isPreviewSelected: isSelected,
|
||||
kind: 'parallel',
|
||||
},
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
const blockConfig = getBlock(block.type)
|
||||
if (!blockConfig) {
|
||||
logger.error(`No configuration found for block type: ${block.type}`, { blockId })
|
||||
return
|
||||
}
|
||||
|
||||
const nodeType = block.type === 'note' ? 'noteBlock' : 'workflowBlock'
|
||||
|
||||
let executionStatus: ExecutionStatus | undefined
|
||||
if (executedBlocks) {
|
||||
@@ -332,20 +415,24 @@ export function WorkflowPreview({
|
||||
}
|
||||
}
|
||||
|
||||
const isSelected = selectedBlockId === blockId
|
||||
|
||||
nodeArray.push({
|
||||
id: blockId,
|
||||
type: 'workflowBlock',
|
||||
type: nodeType,
|
||||
position: absolutePosition,
|
||||
draggable: false,
|
||||
// Blocks inside subflows need higher z-index to appear above the container
|
||||
zIndex: block.data?.parentId ? 10 : undefined,
|
||||
data: {
|
||||
type: block.type,
|
||||
config: blockConfig,
|
||||
name: block.name,
|
||||
isTrigger: block.triggerMode === true,
|
||||
horizontalHandles: block.horizontalHandles ?? false,
|
||||
enabled: block.enabled ?? true,
|
||||
blockState: block,
|
||||
canEdit: false,
|
||||
isPreview: true,
|
||||
isPreviewSelected: isSelected,
|
||||
subBlockValues: block.subBlocks ?? {},
|
||||
executionStatus,
|
||||
},
|
||||
})
|
||||
@@ -358,6 +445,7 @@ export function WorkflowPreview({
|
||||
parallelsStructure,
|
||||
workflowState.blocks,
|
||||
isValidWorkflowState,
|
||||
lightweight,
|
||||
executedBlocks,
|
||||
selectedBlockId,
|
||||
])
|
||||
@@ -415,7 +503,6 @@ export function WorkflowPreview({
|
||||
return (
|
||||
<ReactFlowProvider>
|
||||
<div
|
||||
ref={containerRef}
|
||||
style={{ height, width, backgroundColor: 'var(--bg)' }}
|
||||
className={cn('preview-mode', onNodeClick && 'interactive-nodes', className)}
|
||||
>
|
||||
@@ -426,20 +513,6 @@ export function WorkflowPreview({
|
||||
.preview-mode .react-flow__selectionpane { cursor: ${cursorStyle} !important; }
|
||||
.preview-mode .react-flow__renderer { cursor: ${cursorStyle}; }
|
||||
|
||||
/* Active/grabbing cursor when dragging */
|
||||
${
|
||||
cursorStyle === 'grab'
|
||||
? `
|
||||
.preview-mode .react-flow:active { cursor: grabbing; }
|
||||
.preview-mode .react-flow__pane:active { cursor: grabbing !important; }
|
||||
.preview-mode .react-flow__selectionpane:active { cursor: grabbing !important; }
|
||||
.preview-mode .react-flow__renderer:active { cursor: grabbing; }
|
||||
.preview-mode .react-flow__node:active { cursor: grabbing !important; }
|
||||
.preview-mode .react-flow__node:active * { cursor: grabbing !important; }
|
||||
`
|
||||
: ''
|
||||
}
|
||||
|
||||
/* Node cursor - pointer on nodes when onNodeClick is provided */
|
||||
.preview-mode.interactive-nodes .react-flow__node { cursor: pointer !important; }
|
||||
.preview-mode.interactive-nodes .react-flow__node > div { cursor: pointer !important; }
|
||||
@@ -487,11 +560,7 @@ export function WorkflowPreview({
|
||||
}
|
||||
onPaneClick={onPaneClick}
|
||||
/>
|
||||
<FitViewOnChange
|
||||
nodeIds={blocksStructure.ids}
|
||||
fitPadding={fitPadding}
|
||||
containerRef={containerRef}
|
||||
/>
|
||||
<FitViewOnChange nodeIds={blocksStructure.ids} fitPadding={fitPadding} />
|
||||
</div>
|
||||
</ReactFlowProvider>
|
||||
)
|
||||
|
||||
@@ -16,7 +16,6 @@ import {
|
||||
} from '@/components/emcn'
|
||||
import { Input, Skeleton } from '@/components/ui'
|
||||
import { useSession } from '@/lib/auth/auth-client'
|
||||
import { formatDate } from '@/lib/core/utils/formatting'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
import {
|
||||
type ApiKey,
|
||||
@@ -134,9 +133,13 @@ export function ApiKeys({ onOpenChange, registerCloseHandler }: ApiKeysProps) {
|
||||
}
|
||||
}, [shouldScrollToBottom])
|
||||
|
||||
const formatLastUsed = (dateString?: string) => {
|
||||
const formatDate = (dateString?: string) => {
|
||||
if (!dateString) return 'Never'
|
||||
return formatDate(new Date(dateString))
|
||||
return new Date(dateString).toLocaleDateString('en-US', {
|
||||
year: 'numeric',
|
||||
month: 'short',
|
||||
day: 'numeric',
|
||||
})
|
||||
}
|
||||
|
||||
return (
|
||||
@@ -213,7 +216,7 @@ export function ApiKeys({ onOpenChange, registerCloseHandler }: ApiKeysProps) {
|
||||
{key.name}
|
||||
</span>
|
||||
<span className='text-[13px] text-[var(--text-secondary)]'>
|
||||
(last used: {formatLastUsed(key.lastUsed).toLowerCase()})
|
||||
(last used: {formatDate(key.lastUsed).toLowerCase()})
|
||||
</span>
|
||||
</div>
|
||||
<p className='truncate text-[13px] text-[var(--text-muted)]'>
|
||||
@@ -248,7 +251,7 @@ export function ApiKeys({ onOpenChange, registerCloseHandler }: ApiKeysProps) {
|
||||
{key.name}
|
||||
</span>
|
||||
<span className='text-[13px] text-[var(--text-secondary)]'>
|
||||
(last used: {formatLastUsed(key.lastUsed).toLowerCase()})
|
||||
(last used: {formatDate(key.lastUsed).toLowerCase()})
|
||||
</span>
|
||||
</div>
|
||||
<p className='truncate text-[13px] text-[var(--text-muted)]'>
|
||||
@@ -288,7 +291,7 @@ export function ApiKeys({ onOpenChange, registerCloseHandler }: ApiKeysProps) {
|
||||
{key.name}
|
||||
</span>
|
||||
<span className='text-[13px] text-[var(--text-secondary)]'>
|
||||
(last used: {formatLastUsed(key.lastUsed).toLowerCase()})
|
||||
(last used: {formatDate(key.lastUsed).toLowerCase()})
|
||||
</span>
|
||||
</div>
|
||||
<p className='truncate text-[13px] text-[var(--text-muted)]'>
|
||||
|
||||
@@ -13,7 +13,6 @@ import {
|
||||
ModalHeader,
|
||||
} from '@/components/emcn'
|
||||
import { Input, Skeleton } from '@/components/ui'
|
||||
import { formatDate } from '@/lib/core/utils/formatting'
|
||||
import {
|
||||
type CopilotKey,
|
||||
useCopilotKeys,
|
||||
@@ -116,9 +115,13 @@ export function Copilot() {
|
||||
}
|
||||
}
|
||||
|
||||
const formatLastUsed = (dateString?: string | null) => {
|
||||
const formatDate = (dateString?: string | null) => {
|
||||
if (!dateString) return 'Never'
|
||||
return formatDate(new Date(dateString))
|
||||
return new Date(dateString).toLocaleDateString('en-US', {
|
||||
year: 'numeric',
|
||||
month: 'short',
|
||||
day: 'numeric',
|
||||
})
|
||||
}
|
||||
|
||||
const hasKeys = keys.length > 0
|
||||
@@ -177,7 +180,7 @@ export function Copilot() {
|
||||
{key.name || 'Unnamed Key'}
|
||||
</span>
|
||||
<span className='text-[13px] text-[var(--text-secondary)]'>
|
||||
(last used: {formatLastUsed(key.lastUsed).toLowerCase()})
|
||||
(last used: {formatDate(key.lastUsed).toLowerCase()})
|
||||
</span>
|
||||
</div>
|
||||
<p className='truncate text-[13px] text-[var(--text-muted)]'>
|
||||
|
||||
@@ -11,7 +11,7 @@ import { useSidebarStore } from '@/stores/sidebar/store'
|
||||
* Avatar display configuration for responsive layout.
|
||||
*/
|
||||
const AVATAR_CONFIG = {
|
||||
MIN_COUNT: 4,
|
||||
MIN_COUNT: 3,
|
||||
MAX_COUNT: 12,
|
||||
WIDTH_PER_AVATAR: 20,
|
||||
} as const
|
||||
@@ -106,9 +106,7 @@ export function Avatars({ workflowId }: AvatarsProps) {
|
||||
}, [presenceUsers, currentWorkflowId, workflowId, currentSocketId])
|
||||
|
||||
/**
|
||||
* Calculate visible users and overflow count.
|
||||
* Shows up to maxVisible avatars, with overflow indicator for any remaining.
|
||||
* Users are reversed so new avatars appear on the left (keeping right side stable).
|
||||
* Calculate visible users and overflow count
|
||||
*/
|
||||
const { visibleUsers, overflowCount } = useMemo(() => {
|
||||
if (workflowUsers.length === 0) {
|
||||
@@ -118,8 +116,7 @@ export function Avatars({ workflowId }: AvatarsProps) {
|
||||
const visible = workflowUsers.slice(0, maxVisible)
|
||||
const overflow = Math.max(0, workflowUsers.length - maxVisible)
|
||||
|
||||
// Reverse so rightmost avatars stay stable as new ones are revealed on the left
|
||||
return { visibleUsers: [...visible].reverse(), overflowCount: overflow }
|
||||
return { visibleUsers: visible, overflowCount: overflow }
|
||||
}, [workflowUsers, maxVisible])
|
||||
|
||||
if (visibleUsers.length === 0) {
|
||||
@@ -142,8 +139,9 @@ export function Avatars({ workflowId }: AvatarsProps) {
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
)}
|
||||
|
||||
{visibleUsers.map((user, index) => (
|
||||
<UserAvatar key={user.socketId} user={user} index={index} />
|
||||
<UserAvatar key={user.socketId} user={user} index={overflowCount > 0 ? index + 1 : index} />
|
||||
))}
|
||||
</div>
|
||||
)
|
||||
|
||||
@@ -347,7 +347,7 @@ export function WorkflowItem({
|
||||
) : (
|
||||
<div
|
||||
className={clsx(
|
||||
'min-w-0 truncate font-medium',
|
||||
'min-w-0 flex-1 truncate font-medium',
|
||||
active
|
||||
? 'text-[var(--text-primary)]'
|
||||
: 'text-[var(--text-tertiary)] group-hover:text-[var(--text-primary)]'
|
||||
|
||||
@@ -242,9 +242,15 @@ Return ONLY the email body - no explanations, no extra text.`,
|
||||
id: 'messageId',
|
||||
title: 'Message ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Read specific email by ID (overrides label/folder)',
|
||||
condition: { field: 'operation', value: 'read_gmail' },
|
||||
mode: 'advanced',
|
||||
placeholder: 'Enter message ID to read (optional)',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'read_gmail',
|
||||
and: {
|
||||
field: 'folder',
|
||||
value: '',
|
||||
},
|
||||
},
|
||||
},
|
||||
// Search Fields
|
||||
{
|
||||
|
||||
@@ -129,9 +129,12 @@ ROUTING RULES:
|
||||
3. If the context is even partially related to a route's description, select that route
|
||||
4. ONLY output NO_MATCH if the context is completely unrelated to ALL route descriptions
|
||||
|
||||
Respond with a JSON object containing:
|
||||
- route: EXACTLY one route ID (copied exactly as shown above) OR "NO_MATCH"
|
||||
- reasoning: A brief explanation (1-2 sentences) of why you chose this route`
|
||||
OUTPUT FORMAT:
|
||||
- Output EXACTLY one route ID (copied exactly as shown above) OR "NO_MATCH"
|
||||
- No explanation, no punctuation, no additional text
|
||||
- Just the route ID or NO_MATCH
|
||||
|
||||
Your response:`
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -269,7 +272,6 @@ interface RouterV2Response extends ToolResponse {
|
||||
total: number
|
||||
}
|
||||
selectedRoute: string
|
||||
reasoning: string
|
||||
selectedPath: {
|
||||
blockId: string
|
||||
blockType: string
|
||||
@@ -353,7 +355,6 @@ export const RouterV2Block: BlockConfig<RouterV2Response> = {
|
||||
tokens: { type: 'json', description: 'Token usage' },
|
||||
cost: { type: 'json', description: 'Cost information' },
|
||||
selectedRoute: { type: 'string', description: 'Selected route ID' },
|
||||
reasoning: { type: 'string', description: 'Explanation of why this route was chosen' },
|
||||
selectedPath: { type: 'json', description: 'Selected routing path' },
|
||||
},
|
||||
}
|
||||
|
||||
@@ -1,5 +1,30 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { WorkflowIcon } from '@/components/icons'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
|
||||
const logger = createLogger('WorkflowBlock')
|
||||
|
||||
// Helper function to get available workflows for the dropdown
|
||||
const getAvailableWorkflows = (): Array<{ label: string; id: string }> => {
|
||||
try {
|
||||
const { workflows, activeWorkflowId } = useWorkflowRegistry.getState()
|
||||
|
||||
// Filter out the current workflow to prevent recursion
|
||||
const availableWorkflows = Object.entries(workflows)
|
||||
.filter(([id]) => id !== activeWorkflowId)
|
||||
.map(([id, workflow]) => ({
|
||||
label: workflow.name || `Workflow ${id.slice(0, 8)}`,
|
||||
id: id,
|
||||
}))
|
||||
.sort((a, b) => a.label.localeCompare(b.label))
|
||||
|
||||
return availableWorkflows
|
||||
} catch (error) {
|
||||
logger.error('Error getting available workflows:', error)
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
export const WorkflowBlock: BlockConfig = {
|
||||
type: 'workflow',
|
||||
@@ -13,7 +38,8 @@ export const WorkflowBlock: BlockConfig = {
|
||||
{
|
||||
id: 'workflowId',
|
||||
title: 'Select Workflow',
|
||||
type: 'workflow-selector',
|
||||
type: 'combobox',
|
||||
options: getAvailableWorkflows,
|
||||
placeholder: 'Search workflows...',
|
||||
required: true,
|
||||
},
|
||||
|
||||
@@ -1,5 +1,18 @@
|
||||
import { WorkflowIcon } from '@/components/icons'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
|
||||
const getAvailableWorkflows = (): Array<{ label: string; id: string }> => {
|
||||
try {
|
||||
const { workflows, activeWorkflowId } = useWorkflowRegistry.getState()
|
||||
return Object.entries(workflows)
|
||||
.filter(([id]) => id !== activeWorkflowId)
|
||||
.map(([id, w]) => ({ label: w.name || `Workflow ${id.slice(0, 8)}`, id }))
|
||||
.sort((a, b) => a.label.localeCompare(b.label))
|
||||
} catch {
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
export const WorkflowInputBlock: BlockConfig = {
|
||||
type: 'workflow_input',
|
||||
@@ -12,19 +25,21 @@ export const WorkflowInputBlock: BlockConfig = {
|
||||
`,
|
||||
category: 'blocks',
|
||||
docsLink: 'https://docs.sim.ai/blocks/workflow',
|
||||
bgColor: '#6366F1',
|
||||
bgColor: '#6366F1', // Indigo - modern and professional
|
||||
icon: WorkflowIcon,
|
||||
subBlocks: [
|
||||
{
|
||||
id: 'workflowId',
|
||||
title: 'Select Workflow',
|
||||
type: 'workflow-selector',
|
||||
type: 'combobox',
|
||||
options: getAvailableWorkflows,
|
||||
placeholder: 'Search workflows...',
|
||||
required: true,
|
||||
},
|
||||
// Renders dynamic mapping UI based on selected child workflow's Start trigger inputFormat
|
||||
{
|
||||
id: 'inputMapping',
|
||||
title: 'Inputs',
|
||||
title: 'Input Mapping',
|
||||
type: 'input-mapping',
|
||||
description:
|
||||
"Map fields defined in the child workflow's Start block to variables/values in this workflow.",
|
||||
|
||||
@@ -23,13 +23,7 @@ import { cn } from '@/lib/core/utils/cn'
|
||||
* ```
|
||||
*/
|
||||
const checkboxVariants = cva(
|
||||
[
|
||||
'peer shrink-0 cursor-pointer rounded-[4px] border transition-colors',
|
||||
'border-[var(--border-1)] bg-transparent',
|
||||
'focus-visible:outline-none',
|
||||
'data-[disabled]:cursor-not-allowed data-[disabled]:opacity-50',
|
||||
'data-[state=checked]:border-[var(--text-primary)] data-[state=checked]:bg-[var(--text-primary)]',
|
||||
].join(' '),
|
||||
'peer shrink-0 rounded-sm border border-[var(--border-1)] bg-[var(--surface-4)] ring-offset-background transition-colors hover:border-[var(--border-muted)] hover:bg-[var(--surface-7)] focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 data-[disabled]:cursor-not-allowed data-[disabled]:opacity-50 data-[state=checked]:border-[var(--text-muted)] data-[state=checked]:bg-[var(--text-muted)] data-[state=checked]:text-white dark:bg-[var(--surface-5)] dark:data-[state=checked]:border-[var(--surface-7)] dark:data-[state=checked]:bg-[var(--surface-7)] dark:data-[state=checked]:text-[var(--text-primary)] dark:hover:border-[var(--surface-7)] dark:hover:bg-[var(--border-1)]',
|
||||
{
|
||||
variants: {
|
||||
size: {
|
||||
@@ -89,7 +83,7 @@ const Checkbox = React.forwardRef<React.ElementRef<typeof CheckboxPrimitive.Root
|
||||
className={cn(checkboxVariants({ size }), className)}
|
||||
{...props}
|
||||
>
|
||||
<CheckboxPrimitive.Indicator className='flex items-center justify-center text-[var(--white)]'>
|
||||
<CheckboxPrimitive.Indicator className={cn('flex items-center justify-center text-current')}>
|
||||
<Check className={cn(checkboxIconVariants({ size }))} />
|
||||
</CheckboxPrimitive.Indicator>
|
||||
</CheckboxPrimitive.Root>
|
||||
|
||||
@@ -24,71 +24,6 @@ function createBlock(id: string, metadataId: string): SerializedBlock {
|
||||
}
|
||||
}
|
||||
|
||||
describe('DAGBuilder disabled subflow validation', () => {
|
||||
it('skips validation for disabled loops with no blocks inside', () => {
|
||||
const workflow: SerializedWorkflow = {
|
||||
version: '1',
|
||||
blocks: [
|
||||
createBlock('start', BlockType.STARTER),
|
||||
{ ...createBlock('loop-block', BlockType.FUNCTION), enabled: false },
|
||||
],
|
||||
connections: [],
|
||||
loops: {
|
||||
'loop-1': {
|
||||
id: 'loop-1',
|
||||
nodes: [], // Empty loop - would normally throw
|
||||
iterations: 3,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
const builder = new DAGBuilder()
|
||||
// Should not throw even though loop has no blocks inside
|
||||
expect(() => builder.build(workflow)).not.toThrow()
|
||||
})
|
||||
|
||||
it('skips validation for disabled parallels with no blocks inside', () => {
|
||||
const workflow: SerializedWorkflow = {
|
||||
version: '1',
|
||||
blocks: [createBlock('start', BlockType.STARTER)],
|
||||
connections: [],
|
||||
loops: {},
|
||||
parallels: {
|
||||
'parallel-1': {
|
||||
id: 'parallel-1',
|
||||
nodes: [], // Empty parallel - would normally throw
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
const builder = new DAGBuilder()
|
||||
// Should not throw even though parallel has no blocks inside
|
||||
expect(() => builder.build(workflow)).not.toThrow()
|
||||
})
|
||||
|
||||
it('skips validation for loops where all inner blocks are disabled', () => {
|
||||
const workflow: SerializedWorkflow = {
|
||||
version: '1',
|
||||
blocks: [
|
||||
createBlock('start', BlockType.STARTER),
|
||||
{ ...createBlock('inner-block', BlockType.FUNCTION), enabled: false },
|
||||
],
|
||||
connections: [],
|
||||
loops: {
|
||||
'loop-1': {
|
||||
id: 'loop-1',
|
||||
nodes: ['inner-block'], // Has node but it's disabled
|
||||
iterations: 3,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
const builder = new DAGBuilder()
|
||||
// Should not throw - loop is effectively disabled since all inner blocks are disabled
|
||||
expect(() => builder.build(workflow)).not.toThrow()
|
||||
})
|
||||
})
|
||||
|
||||
describe('DAGBuilder human-in-the-loop transformation', () => {
|
||||
it('creates trigger nodes and rewires edges for pause blocks', () => {
|
||||
const workflow: SerializedWorkflow = {
|
||||
|
||||
@@ -136,18 +136,17 @@ export class DAGBuilder {
|
||||
nodes: string[] | undefined,
|
||||
type: 'Loop' | 'Parallel'
|
||||
): void {
|
||||
const sentinelStartId =
|
||||
type === 'Loop' ? buildSentinelStartId(id) : buildParallelSentinelStartId(id)
|
||||
const sentinelStartNode = dag.nodes.get(sentinelStartId)
|
||||
|
||||
if (!sentinelStartNode) return
|
||||
|
||||
if (!nodes || nodes.length === 0) {
|
||||
throw new Error(
|
||||
`${type} has no blocks inside. Add at least one block to the ${type.toLowerCase()}.`
|
||||
)
|
||||
}
|
||||
|
||||
const sentinelStartId =
|
||||
type === 'Loop' ? buildSentinelStartId(id) : buildParallelSentinelStartId(id)
|
||||
const sentinelStartNode = dag.nodes.get(sentinelStartId)
|
||||
if (!sentinelStartNode) return
|
||||
|
||||
const hasConnections = Array.from(sentinelStartNode.outgoingEdges.values()).some((edge) =>
|
||||
nodes.includes(extractBaseBlockId(edge.target))
|
||||
)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -20,13 +20,21 @@ export class EdgeManager {
|
||||
const activatedTargets: string[] = []
|
||||
const edgesToDeactivate: Array<{ target: string; handle?: string }> = []
|
||||
|
||||
for (const [, edge] of node.outgoingEdges) {
|
||||
// First pass: categorize edges as activating or deactivating
|
||||
// Don't modify incomingEdges yet - we need the original state for deactivation checks
|
||||
for (const [edgeId, edge] of node.outgoingEdges) {
|
||||
if (skipBackwardsEdge && this.isBackwardsEdge(edge.sourceHandle)) {
|
||||
continue
|
||||
}
|
||||
|
||||
if (!this.shouldActivateEdge(edge, output)) {
|
||||
if (!this.isLoopEdge(edge.sourceHandle)) {
|
||||
const shouldActivate = this.shouldActivateEdge(edge, output)
|
||||
if (!shouldActivate) {
|
||||
const isLoopEdge =
|
||||
edge.sourceHandle === EDGE.LOOP_CONTINUE ||
|
||||
edge.sourceHandle === EDGE.LOOP_CONTINUE_ALT ||
|
||||
edge.sourceHandle === EDGE.LOOP_EXIT
|
||||
|
||||
if (!isLoopEdge) {
|
||||
edgesToDeactivate.push({ target: edge.target, handle: edge.sourceHandle })
|
||||
}
|
||||
continue
|
||||
@@ -35,19 +43,13 @@ export class EdgeManager {
|
||||
activatedTargets.push(edge.target)
|
||||
}
|
||||
|
||||
const cascadeTargets = new Set<string>()
|
||||
// Second pass: process deactivations while incomingEdges is still intact
|
||||
// This ensures hasActiveIncomingEdges can find all potential sources
|
||||
for (const { target, handle } of edgesToDeactivate) {
|
||||
this.deactivateEdgeAndDescendants(node.id, target, handle, cascadeTargets)
|
||||
}
|
||||
|
||||
if (activatedTargets.length === 0) {
|
||||
for (const { target } of edgesToDeactivate) {
|
||||
if (this.isTerminalControlNode(target)) {
|
||||
cascadeTargets.add(target)
|
||||
}
|
||||
}
|
||||
this.deactivateEdgeAndDescendants(node.id, target, handle)
|
||||
}
|
||||
|
||||
// Third pass: update incomingEdges for activated targets
|
||||
for (const targetId of activatedTargets) {
|
||||
const targetNode = this.dag.nodes.get(targetId)
|
||||
if (!targetNode) {
|
||||
@@ -57,25 +59,28 @@ export class EdgeManager {
|
||||
targetNode.incomingEdges.delete(node.id)
|
||||
}
|
||||
|
||||
// Fourth pass: check readiness after all edge processing is complete
|
||||
for (const targetId of activatedTargets) {
|
||||
if (this.isTargetReady(targetId)) {
|
||||
const targetNode = this.dag.nodes.get(targetId)
|
||||
if (targetNode && this.isNodeReady(targetNode)) {
|
||||
readyNodes.push(targetId)
|
||||
}
|
||||
}
|
||||
|
||||
for (const targetId of cascadeTargets) {
|
||||
if (!readyNodes.includes(targetId) && !activatedTargets.includes(targetId)) {
|
||||
if (this.isTargetReady(targetId)) {
|
||||
readyNodes.push(targetId)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return readyNodes
|
||||
}
|
||||
|
||||
isNodeReady(node: DAGNode): boolean {
|
||||
return node.incomingEdges.size === 0 || this.countActiveIncomingEdges(node) === 0
|
||||
if (node.incomingEdges.size === 0) {
|
||||
return true
|
||||
}
|
||||
|
||||
const activeIncomingCount = this.countActiveIncomingEdges(node)
|
||||
if (activeIncomingCount > 0) {
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
restoreIncomingEdge(targetNodeId: string, sourceNodeId: string): void {
|
||||
@@ -94,10 +99,13 @@ export class EdgeManager {
|
||||
|
||||
/**
|
||||
* Clear deactivated edges for a set of nodes (used when restoring loop state for next iteration).
|
||||
* This ensures error/success edges can be re-evaluated on each iteration.
|
||||
*/
|
||||
clearDeactivatedEdgesForNodes(nodeIds: Set<string>): void {
|
||||
const edgesToRemove: string[] = []
|
||||
for (const edgeKey of this.deactivatedEdges) {
|
||||
// Edge key format is "sourceId-targetId-handle"
|
||||
// Check if either source or target is in the nodeIds set
|
||||
for (const nodeId of nodeIds) {
|
||||
if (edgeKey.startsWith(`${nodeId}-`) || edgeKey.includes(`-${nodeId}-`)) {
|
||||
edgesToRemove.push(edgeKey)
|
||||
@@ -110,44 +118,6 @@ export class EdgeManager {
|
||||
}
|
||||
}
|
||||
|
||||
private isTargetReady(targetId: string): boolean {
|
||||
const targetNode = this.dag.nodes.get(targetId)
|
||||
return targetNode ? this.isNodeReady(targetNode) : false
|
||||
}
|
||||
|
||||
private isLoopEdge(handle?: string): boolean {
|
||||
return (
|
||||
handle === EDGE.LOOP_CONTINUE ||
|
||||
handle === EDGE.LOOP_CONTINUE_ALT ||
|
||||
handle === EDGE.LOOP_EXIT
|
||||
)
|
||||
}
|
||||
|
||||
private isControlEdge(handle?: string): boolean {
|
||||
return (
|
||||
handle === EDGE.LOOP_CONTINUE ||
|
||||
handle === EDGE.LOOP_CONTINUE_ALT ||
|
||||
handle === EDGE.LOOP_EXIT ||
|
||||
handle === EDGE.PARALLEL_EXIT
|
||||
)
|
||||
}
|
||||
|
||||
private isBackwardsEdge(sourceHandle?: string): boolean {
|
||||
return sourceHandle === EDGE.LOOP_CONTINUE || sourceHandle === EDGE.LOOP_CONTINUE_ALT
|
||||
}
|
||||
|
||||
private isTerminalControlNode(nodeId: string): boolean {
|
||||
const node = this.dag.nodes.get(nodeId)
|
||||
if (!node || node.outgoingEdges.size === 0) return false
|
||||
|
||||
for (const [, edge] of node.outgoingEdges) {
|
||||
if (!this.isControlEdge(edge.sourceHandle)) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
private shouldActivateEdge(edge: DAGEdge, output: NormalizedBlockOutput): boolean {
|
||||
const handle = edge.sourceHandle
|
||||
|
||||
@@ -189,12 +159,14 @@ export class EdgeManager {
|
||||
}
|
||||
}
|
||||
|
||||
private isBackwardsEdge(sourceHandle?: string): boolean {
|
||||
return sourceHandle === EDGE.LOOP_CONTINUE || sourceHandle === EDGE.LOOP_CONTINUE_ALT
|
||||
}
|
||||
|
||||
private deactivateEdgeAndDescendants(
|
||||
sourceId: string,
|
||||
targetId: string,
|
||||
sourceHandle?: string,
|
||||
cascadeTargets?: Set<string>,
|
||||
isCascade = false
|
||||
sourceHandle?: string
|
||||
): void {
|
||||
const edgeKey = this.createEdgeKey(sourceId, targetId, sourceHandle)
|
||||
if (this.deactivatedEdges.has(edgeKey)) {
|
||||
@@ -202,46 +174,38 @@ export class EdgeManager {
|
||||
}
|
||||
|
||||
this.deactivatedEdges.add(edgeKey)
|
||||
|
||||
const targetNode = this.dag.nodes.get(targetId)
|
||||
if (!targetNode) return
|
||||
|
||||
if (isCascade && this.isTerminalControlNode(targetId)) {
|
||||
cascadeTargets?.add(targetId)
|
||||
}
|
||||
|
||||
if (this.hasActiveIncomingEdges(targetNode, edgeKey)) {
|
||||
return
|
||||
}
|
||||
|
||||
for (const [, outgoingEdge] of targetNode.outgoingEdges) {
|
||||
if (!this.isControlEdge(outgoingEdge.sourceHandle)) {
|
||||
this.deactivateEdgeAndDescendants(
|
||||
targetId,
|
||||
outgoingEdge.target,
|
||||
outgoingEdge.sourceHandle,
|
||||
cascadeTargets,
|
||||
true
|
||||
)
|
||||
// Check if target has other active incoming edges
|
||||
// Pass the specific edge key being deactivated, not just source ID,
|
||||
// to handle multiple edges from same source to same target (e.g., condition branches)
|
||||
const hasOtherActiveIncoming = this.hasActiveIncomingEdges(targetNode, edgeKey)
|
||||
if (!hasOtherActiveIncoming) {
|
||||
for (const [_, outgoingEdge] of targetNode.outgoingEdges) {
|
||||
this.deactivateEdgeAndDescendants(targetId, outgoingEdge.target, outgoingEdge.sourceHandle)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a node has any active incoming edges besides the one being excluded.
|
||||
* This properly handles the case where multiple edges from the same source go to
|
||||
* the same target (e.g., multiple condition branches pointing to one block).
|
||||
*/
|
||||
private hasActiveIncomingEdges(node: DAGNode, excludeEdgeKey: string): boolean {
|
||||
for (const incomingSourceId of node.incomingEdges) {
|
||||
const incomingNode = this.dag.nodes.get(incomingSourceId)
|
||||
if (!incomingNode) continue
|
||||
|
||||
for (const [, incomingEdge] of incomingNode.outgoingEdges) {
|
||||
for (const [_, incomingEdge] of incomingNode.outgoingEdges) {
|
||||
if (incomingEdge.target === node.id) {
|
||||
const incomingEdgeKey = this.createEdgeKey(
|
||||
incomingSourceId,
|
||||
node.id,
|
||||
incomingEdge.sourceHandle
|
||||
)
|
||||
// Skip the specific edge being excluded, but check other edges from same source
|
||||
if (incomingEdgeKey === excludeEdgeKey) continue
|
||||
if (!this.deactivatedEdges.has(incomingEdgeKey)) {
|
||||
return true
|
||||
|
||||
@@ -554,413 +554,6 @@ describe('ExecutionEngine', () => {
|
||||
})
|
||||
})
|
||||
|
||||
describe('Error handling in execution', () => {
|
||||
it('should fail execution when a single node throws an error', async () => {
|
||||
const startNode = createMockNode('start', 'starter')
|
||||
const errorNode = createMockNode('error-node', 'function')
|
||||
startNode.outgoingEdges.set('edge1', { target: 'error-node' })
|
||||
|
||||
const dag = createMockDAG([startNode, errorNode])
|
||||
const context = createMockContext()
|
||||
const edgeManager = createMockEdgeManager((node) => {
|
||||
if (node.id === 'start') return ['error-node']
|
||||
return []
|
||||
})
|
||||
|
||||
const nodeOrchestrator = {
|
||||
executionCount: 0,
|
||||
executeNode: vi.fn().mockImplementation(async (_ctx: ExecutionContext, nodeId: string) => {
|
||||
if (nodeId === 'error-node') {
|
||||
throw new Error('Block execution failed')
|
||||
}
|
||||
return { nodeId, output: {}, isFinalOutput: false }
|
||||
}),
|
||||
handleNodeCompletion: vi.fn(),
|
||||
} as unknown as MockNodeOrchestrator
|
||||
|
||||
const engine = new ExecutionEngine(context, dag, edgeManager, nodeOrchestrator)
|
||||
|
||||
await expect(engine.run('start')).rejects.toThrow('Block execution failed')
|
||||
})
|
||||
|
||||
it('should stop parallel branches when one branch throws an error', async () => {
|
||||
const startNode = createMockNode('start', 'starter')
|
||||
const parallelNodes = Array.from({ length: 5 }, (_, i) =>
|
||||
createMockNode(`parallel${i}`, 'function')
|
||||
)
|
||||
|
||||
parallelNodes.forEach((_, i) => {
|
||||
startNode.outgoingEdges.set(`edge${i}`, { target: `parallel${i}` })
|
||||
})
|
||||
|
||||
const dag = createMockDAG([startNode, ...parallelNodes])
|
||||
const context = createMockContext()
|
||||
const edgeManager = createMockEdgeManager((node) => {
|
||||
if (node.id === 'start') return parallelNodes.map((_, i) => `parallel${i}`)
|
||||
return []
|
||||
})
|
||||
|
||||
const executedNodes: string[] = []
|
||||
const nodeOrchestrator = {
|
||||
executionCount: 0,
|
||||
executeNode: vi.fn().mockImplementation(async (_ctx: ExecutionContext, nodeId: string) => {
|
||||
executedNodes.push(nodeId)
|
||||
if (nodeId === 'parallel0') {
|
||||
await new Promise((resolve) => setTimeout(resolve, 10))
|
||||
throw new Error('Parallel branch failed')
|
||||
}
|
||||
await new Promise((resolve) => setTimeout(resolve, 100))
|
||||
return { nodeId, output: {}, isFinalOutput: false }
|
||||
}),
|
||||
handleNodeCompletion: vi.fn(),
|
||||
} as unknown as MockNodeOrchestrator
|
||||
|
||||
const engine = new ExecutionEngine(context, dag, edgeManager, nodeOrchestrator)
|
||||
|
||||
await expect(engine.run('start')).rejects.toThrow('Parallel branch failed')
|
||||
})
|
||||
|
||||
it('should capture only the first error when multiple parallel branches fail', async () => {
|
||||
const startNode = createMockNode('start', 'starter')
|
||||
const parallelNodes = Array.from({ length: 3 }, (_, i) =>
|
||||
createMockNode(`parallel${i}`, 'function')
|
||||
)
|
||||
|
||||
parallelNodes.forEach((_, i) => {
|
||||
startNode.outgoingEdges.set(`edge${i}`, { target: `parallel${i}` })
|
||||
})
|
||||
|
||||
const dag = createMockDAG([startNode, ...parallelNodes])
|
||||
const context = createMockContext()
|
||||
const edgeManager = createMockEdgeManager((node) => {
|
||||
if (node.id === 'start') return parallelNodes.map((_, i) => `parallel${i}`)
|
||||
return []
|
||||
})
|
||||
|
||||
const nodeOrchestrator = {
|
||||
executionCount: 0,
|
||||
executeNode: vi.fn().mockImplementation(async (_ctx: ExecutionContext, nodeId: string) => {
|
||||
if (nodeId === 'parallel0') {
|
||||
await new Promise((resolve) => setTimeout(resolve, 10))
|
||||
throw new Error('First error')
|
||||
}
|
||||
if (nodeId === 'parallel1') {
|
||||
await new Promise((resolve) => setTimeout(resolve, 20))
|
||||
throw new Error('Second error')
|
||||
}
|
||||
if (nodeId === 'parallel2') {
|
||||
await new Promise((resolve) => setTimeout(resolve, 30))
|
||||
throw new Error('Third error')
|
||||
}
|
||||
return { nodeId, output: {}, isFinalOutput: false }
|
||||
}),
|
||||
handleNodeCompletion: vi.fn(),
|
||||
} as unknown as MockNodeOrchestrator
|
||||
|
||||
const engine = new ExecutionEngine(context, dag, edgeManager, nodeOrchestrator)
|
||||
|
||||
await expect(engine.run('start')).rejects.toThrow('First error')
|
||||
})
|
||||
|
||||
it('should wait for ongoing executions to complete before throwing error', async () => {
|
||||
const startNode = createMockNode('start', 'starter')
|
||||
const fastErrorNode = createMockNode('fast-error', 'function')
|
||||
const slowNode = createMockNode('slow', 'function')
|
||||
|
||||
startNode.outgoingEdges.set('edge1', { target: 'fast-error' })
|
||||
startNode.outgoingEdges.set('edge2', { target: 'slow' })
|
||||
|
||||
const dag = createMockDAG([startNode, fastErrorNode, slowNode])
|
||||
const context = createMockContext()
|
||||
const edgeManager = createMockEdgeManager((node) => {
|
||||
if (node.id === 'start') return ['fast-error', 'slow']
|
||||
return []
|
||||
})
|
||||
|
||||
let slowNodeCompleted = false
|
||||
const nodeOrchestrator = {
|
||||
executionCount: 0,
|
||||
executeNode: vi.fn().mockImplementation(async (_ctx: ExecutionContext, nodeId: string) => {
|
||||
if (nodeId === 'fast-error') {
|
||||
await new Promise((resolve) => setTimeout(resolve, 10))
|
||||
throw new Error('Fast error')
|
||||
}
|
||||
if (nodeId === 'slow') {
|
||||
await new Promise((resolve) => setTimeout(resolve, 50))
|
||||
slowNodeCompleted = true
|
||||
return { nodeId, output: {}, isFinalOutput: false }
|
||||
}
|
||||
return { nodeId, output: {}, isFinalOutput: false }
|
||||
}),
|
||||
handleNodeCompletion: vi.fn(),
|
||||
} as unknown as MockNodeOrchestrator
|
||||
|
||||
const engine = new ExecutionEngine(context, dag, edgeManager, nodeOrchestrator)
|
||||
|
||||
await expect(engine.run('start')).rejects.toThrow('Fast error')
|
||||
|
||||
expect(slowNodeCompleted).toBe(true)
|
||||
})
|
||||
|
||||
it('should not queue new nodes after an error occurs', async () => {
|
||||
const startNode = createMockNode('start', 'starter')
|
||||
const errorNode = createMockNode('error-node', 'function')
|
||||
const afterErrorNode = createMockNode('after-error', 'function')
|
||||
|
||||
startNode.outgoingEdges.set('edge1', { target: 'error-node' })
|
||||
errorNode.outgoingEdges.set('edge2', { target: 'after-error' })
|
||||
|
||||
const dag = createMockDAG([startNode, errorNode, afterErrorNode])
|
||||
const context = createMockContext()
|
||||
|
||||
const queuedNodes: string[] = []
|
||||
const edgeManager = createMockEdgeManager((node) => {
|
||||
if (node.id === 'start') {
|
||||
queuedNodes.push('error-node')
|
||||
return ['error-node']
|
||||
}
|
||||
if (node.id === 'error-node') {
|
||||
queuedNodes.push('after-error')
|
||||
return ['after-error']
|
||||
}
|
||||
return []
|
||||
})
|
||||
|
||||
const executedNodes: string[] = []
|
||||
const nodeOrchestrator = {
|
||||
executionCount: 0,
|
||||
executeNode: vi.fn().mockImplementation(async (_ctx: ExecutionContext, nodeId: string) => {
|
||||
executedNodes.push(nodeId)
|
||||
if (nodeId === 'error-node') {
|
||||
throw new Error('Node error')
|
||||
}
|
||||
return { nodeId, output: {}, isFinalOutput: false }
|
||||
}),
|
||||
handleNodeCompletion: vi.fn(),
|
||||
} as unknown as MockNodeOrchestrator
|
||||
|
||||
const engine = new ExecutionEngine(context, dag, edgeManager, nodeOrchestrator)
|
||||
|
||||
await expect(engine.run('start')).rejects.toThrow('Node error')
|
||||
|
||||
expect(executedNodes).not.toContain('after-error')
|
||||
})
|
||||
|
||||
it('should populate error result with metadata when execution fails', async () => {
|
||||
const startNode = createMockNode('start', 'starter')
|
||||
const errorNode = createMockNode('error-node', 'function')
|
||||
startNode.outgoingEdges.set('edge1', { target: 'error-node' })
|
||||
|
||||
const dag = createMockDAG([startNode, errorNode])
|
||||
const context = createMockContext()
|
||||
context.blockLogs.push({
|
||||
blockId: 'start',
|
||||
blockName: 'Start',
|
||||
blockType: 'starter',
|
||||
startedAt: new Date().toISOString(),
|
||||
endedAt: new Date().toISOString(),
|
||||
durationMs: 10,
|
||||
success: true,
|
||||
})
|
||||
|
||||
const edgeManager = createMockEdgeManager((node) => {
|
||||
if (node.id === 'start') return ['error-node']
|
||||
return []
|
||||
})
|
||||
|
||||
const nodeOrchestrator = {
|
||||
executionCount: 0,
|
||||
executeNode: vi.fn().mockImplementation(async (_ctx: ExecutionContext, nodeId: string) => {
|
||||
if (nodeId === 'error-node') {
|
||||
const error = new Error('Execution failed') as any
|
||||
error.executionResult = {
|
||||
success: false,
|
||||
output: { partial: 'data' },
|
||||
logs: context.blockLogs,
|
||||
metadata: context.metadata,
|
||||
}
|
||||
throw error
|
||||
}
|
||||
return { nodeId, output: {}, isFinalOutput: false }
|
||||
}),
|
||||
handleNodeCompletion: vi.fn(),
|
||||
} as unknown as MockNodeOrchestrator
|
||||
|
||||
const engine = new ExecutionEngine(context, dag, edgeManager, nodeOrchestrator)
|
||||
|
||||
try {
|
||||
await engine.run('start')
|
||||
expect.fail('Should have thrown')
|
||||
} catch (error: any) {
|
||||
expect(error.executionResult).toBeDefined()
|
||||
expect(error.executionResult.metadata.endTime).toBeDefined()
|
||||
expect(error.executionResult.metadata.duration).toBeDefined()
|
||||
}
|
||||
})
|
||||
|
||||
it('should prefer cancellation status over error when both occur', async () => {
|
||||
const abortController = new AbortController()
|
||||
|
||||
const startNode = createMockNode('start', 'starter')
|
||||
const errorNode = createMockNode('error-node', 'function')
|
||||
startNode.outgoingEdges.set('edge1', { target: 'error-node' })
|
||||
|
||||
const dag = createMockDAG([startNode, errorNode])
|
||||
const context = createMockContext({ abortSignal: abortController.signal })
|
||||
const edgeManager = createMockEdgeManager((node) => {
|
||||
if (node.id === 'start') return ['error-node']
|
||||
return []
|
||||
})
|
||||
|
||||
const nodeOrchestrator = {
|
||||
executionCount: 0,
|
||||
executeNode: vi.fn().mockImplementation(async (_ctx: ExecutionContext, nodeId: string) => {
|
||||
if (nodeId === 'error-node') {
|
||||
abortController.abort()
|
||||
throw new Error('Node error')
|
||||
}
|
||||
return { nodeId, output: {}, isFinalOutput: false }
|
||||
}),
|
||||
handleNodeCompletion: vi.fn(),
|
||||
} as unknown as MockNodeOrchestrator
|
||||
|
||||
const engine = new ExecutionEngine(context, dag, edgeManager, nodeOrchestrator)
|
||||
const result = await engine.run('start')
|
||||
|
||||
expect(result.status).toBe('cancelled')
|
||||
expect(result.success).toBe(false)
|
||||
})
|
||||
|
||||
it('should stop loop iteration when error occurs in loop body', async () => {
|
||||
const loopStartNode = createMockNode('loop-start', 'loop_sentinel')
|
||||
loopStartNode.metadata = { isSentinel: true, sentinelType: 'start', loopId: 'loop1' }
|
||||
|
||||
const loopBodyNode = createMockNode('loop-body', 'function')
|
||||
loopBodyNode.metadata = { isLoopNode: true, loopId: 'loop1' }
|
||||
|
||||
const loopEndNode = createMockNode('loop-end', 'loop_sentinel')
|
||||
loopEndNode.metadata = { isSentinel: true, sentinelType: 'end', loopId: 'loop1' }
|
||||
|
||||
const afterLoopNode = createMockNode('after-loop', 'function')
|
||||
|
||||
loopStartNode.outgoingEdges.set('edge1', { target: 'loop-body' })
|
||||
loopBodyNode.outgoingEdges.set('edge2', { target: 'loop-end' })
|
||||
loopEndNode.outgoingEdges.set('loop_continue', {
|
||||
target: 'loop-start',
|
||||
sourceHandle: 'loop_continue',
|
||||
})
|
||||
loopEndNode.outgoingEdges.set('loop_complete', {
|
||||
target: 'after-loop',
|
||||
sourceHandle: 'loop_complete',
|
||||
})
|
||||
|
||||
const dag = createMockDAG([loopStartNode, loopBodyNode, loopEndNode, afterLoopNode])
|
||||
const context = createMockContext()
|
||||
|
||||
let iterationCount = 0
|
||||
const edgeManager = createMockEdgeManager((node) => {
|
||||
if (node.id === 'loop-start') return ['loop-body']
|
||||
if (node.id === 'loop-body') return ['loop-end']
|
||||
if (node.id === 'loop-end') {
|
||||
iterationCount++
|
||||
if (iterationCount < 5) return ['loop-start']
|
||||
return ['after-loop']
|
||||
}
|
||||
return []
|
||||
})
|
||||
|
||||
const nodeOrchestrator = {
|
||||
executionCount: 0,
|
||||
executeNode: vi.fn().mockImplementation(async (_ctx: ExecutionContext, nodeId: string) => {
|
||||
if (nodeId === 'loop-body' && iterationCount >= 2) {
|
||||
throw new Error('Loop body error on iteration 3')
|
||||
}
|
||||
return { nodeId, output: {}, isFinalOutput: false }
|
||||
}),
|
||||
handleNodeCompletion: vi.fn(),
|
||||
} as unknown as MockNodeOrchestrator
|
||||
|
||||
const engine = new ExecutionEngine(context, dag, edgeManager, nodeOrchestrator)
|
||||
|
||||
await expect(engine.run('loop-start')).rejects.toThrow('Loop body error on iteration 3')
|
||||
|
||||
expect(iterationCount).toBeLessThanOrEqual(3)
|
||||
})
|
||||
|
||||
it('should handle error that is not an Error instance', async () => {
|
||||
const startNode = createMockNode('start', 'starter')
|
||||
const errorNode = createMockNode('error-node', 'function')
|
||||
startNode.outgoingEdges.set('edge1', { target: 'error-node' })
|
||||
|
||||
const dag = createMockDAG([startNode, errorNode])
|
||||
const context = createMockContext()
|
||||
const edgeManager = createMockEdgeManager((node) => {
|
||||
if (node.id === 'start') return ['error-node']
|
||||
return []
|
||||
})
|
||||
|
||||
const nodeOrchestrator = {
|
||||
executionCount: 0,
|
||||
executeNode: vi.fn().mockImplementation(async (_ctx: ExecutionContext, nodeId: string) => {
|
||||
if (nodeId === 'error-node') {
|
||||
throw 'String error message'
|
||||
}
|
||||
return { nodeId, output: {}, isFinalOutput: false }
|
||||
}),
|
||||
handleNodeCompletion: vi.fn(),
|
||||
} as unknown as MockNodeOrchestrator
|
||||
|
||||
const engine = new ExecutionEngine(context, dag, edgeManager, nodeOrchestrator)
|
||||
|
||||
await expect(engine.run('start')).rejects.toThrow('String error message')
|
||||
})
|
||||
|
||||
it('should preserve partial output when error occurs after some blocks complete', async () => {
|
||||
const startNode = createMockNode('start', 'starter')
|
||||
const successNode = createMockNode('success', 'function')
|
||||
const errorNode = createMockNode('error-node', 'function')
|
||||
|
||||
startNode.outgoingEdges.set('edge1', { target: 'success' })
|
||||
successNode.outgoingEdges.set('edge2', { target: 'error-node' })
|
||||
|
||||
const dag = createMockDAG([startNode, successNode, errorNode])
|
||||
const context = createMockContext()
|
||||
const edgeManager = createMockEdgeManager((node) => {
|
||||
if (node.id === 'start') return ['success']
|
||||
if (node.id === 'success') return ['error-node']
|
||||
return []
|
||||
})
|
||||
|
||||
const nodeOrchestrator = {
|
||||
executionCount: 0,
|
||||
executeNode: vi.fn().mockImplementation(async (_ctx: ExecutionContext, nodeId: string) => {
|
||||
if (nodeId === 'success') {
|
||||
return { nodeId, output: { successData: 'preserved' }, isFinalOutput: false }
|
||||
}
|
||||
if (nodeId === 'error-node') {
|
||||
throw new Error('Late error')
|
||||
}
|
||||
return { nodeId, output: {}, isFinalOutput: false }
|
||||
}),
|
||||
handleNodeCompletion: vi.fn(),
|
||||
} as unknown as MockNodeOrchestrator
|
||||
|
||||
const engine = new ExecutionEngine(context, dag, edgeManager, nodeOrchestrator)
|
||||
|
||||
try {
|
||||
await engine.run('start')
|
||||
expect.fail('Should have thrown')
|
||||
} catch (error: any) {
|
||||
// Verify the error was thrown
|
||||
expect(error.message).toBe('Late error')
|
||||
// The partial output should be available in executionResult if attached
|
||||
if (error.executionResult) {
|
||||
expect(error.executionResult.output).toBeDefined()
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
describe('Cancellation flag behavior', () => {
|
||||
it('should set cancelledFlag when abort signal fires', async () => {
|
||||
const abortController = new AbortController()
|
||||
|
||||
@@ -25,8 +25,6 @@ export class ExecutionEngine {
|
||||
private pausedBlocks: Map<string, PauseMetadata> = new Map()
|
||||
private allowResumeTriggers: boolean
|
||||
private cancelledFlag = false
|
||||
private errorFlag = false
|
||||
private executionError: Error | null = null
|
||||
private lastCancellationCheck = 0
|
||||
private readonly useRedisCancellation: boolean
|
||||
private readonly CANCELLATION_CHECK_INTERVAL_MS = 500
|
||||
@@ -105,7 +103,7 @@ export class ExecutionEngine {
|
||||
this.initializeQueue(triggerBlockId)
|
||||
|
||||
while (this.hasWork()) {
|
||||
if ((await this.checkCancellation()) || this.errorFlag) {
|
||||
if (await this.checkCancellation()) {
|
||||
break
|
||||
}
|
||||
await this.processQueue()
|
||||
@@ -115,11 +113,6 @@ export class ExecutionEngine {
|
||||
await this.waitForAllExecutions()
|
||||
}
|
||||
|
||||
// Rethrow the captured error so it's handled by the catch block
|
||||
if (this.errorFlag && this.executionError) {
|
||||
throw this.executionError
|
||||
}
|
||||
|
||||
if (this.pausedBlocks.size > 0) {
|
||||
return this.buildPausedResult(startTime)
|
||||
}
|
||||
@@ -203,17 +196,11 @@ export class ExecutionEngine {
|
||||
}
|
||||
|
||||
private trackExecution(promise: Promise<void>): void {
|
||||
const trackedPromise = promise
|
||||
.catch((error) => {
|
||||
if (!this.errorFlag) {
|
||||
this.errorFlag = true
|
||||
this.executionError = error instanceof Error ? error : new Error(String(error))
|
||||
}
|
||||
})
|
||||
.finally(() => {
|
||||
this.executing.delete(trackedPromise)
|
||||
})
|
||||
this.executing.add(trackedPromise)
|
||||
this.executing.add(promise)
|
||||
promise.catch(() => {})
|
||||
promise.finally(() => {
|
||||
this.executing.delete(promise)
|
||||
})
|
||||
}
|
||||
|
||||
private async waitForAnyExecution(): Promise<void> {
|
||||
@@ -328,7 +315,7 @@ export class ExecutionEngine {
|
||||
|
||||
private async processQueue(): Promise<void> {
|
||||
while (this.readyQueue.length > 0) {
|
||||
if ((await this.checkCancellation()) || this.errorFlag) {
|
||||
if (await this.checkCancellation()) {
|
||||
break
|
||||
}
|
||||
const nodeId = this.dequeue()
|
||||
@@ -337,7 +324,7 @@ export class ExecutionEngine {
|
||||
this.trackExecution(promise)
|
||||
}
|
||||
|
||||
if (this.executing.size > 0 && !this.cancelledFlag && !this.errorFlag) {
|
||||
if (this.executing.size > 0 && !this.cancelledFlag) {
|
||||
await this.waitForAnyExecution()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -305,7 +305,7 @@ export class AgentBlockHandler implements BlockHandler {
|
||||
base.executeFunction = async (callParams: Record<string, any>) => {
|
||||
const mergedParams = mergeToolParameters(userProvidedParams, callParams)
|
||||
|
||||
const { blockData, blockNameMapping, blockOutputSchemas } = collectBlockData(ctx)
|
||||
const { blockData, blockNameMapping } = collectBlockData(ctx)
|
||||
|
||||
const result = await executeTool(
|
||||
'function_execute',
|
||||
@@ -317,7 +317,6 @@ export class AgentBlockHandler implements BlockHandler {
|
||||
workflowVariables: ctx.workflowVariables || {},
|
||||
blockData,
|
||||
blockNameMapping,
|
||||
blockOutputSchemas,
|
||||
isCustomTool: true,
|
||||
_context: {
|
||||
workflowId: ctx.workflowId,
|
||||
@@ -348,8 +347,8 @@ export class AgentBlockHandler implements BlockHandler {
|
||||
): Promise<{ schema: any; code: string; title: string } | null> {
|
||||
if (typeof window !== 'undefined') {
|
||||
try {
|
||||
const { getCustomTool } = await import('@/hooks/queries/custom-tools')
|
||||
const tool = getCustomTool(customToolId, ctx.workspaceId)
|
||||
const { useCustomToolsStore } = await import('@/stores/custom-tools')
|
||||
const tool = useCustomToolsStore.getState().getTool(customToolId)
|
||||
if (tool) {
|
||||
return {
|
||||
schema: tool.schema,
|
||||
@@ -357,9 +356,9 @@ export class AgentBlockHandler implements BlockHandler {
|
||||
title: tool.title,
|
||||
}
|
||||
}
|
||||
logger.warn(`Custom tool not found in cache: ${customToolId}`)
|
||||
logger.warn(`Custom tool not found in store: ${customToolId}`)
|
||||
} catch (error) {
|
||||
logger.error('Error accessing custom tools cache:', { error })
|
||||
logger.error('Error accessing custom tools store:', { error })
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -26,7 +26,7 @@ export async function evaluateConditionExpression(
|
||||
const contextSetup = `const context = ${JSON.stringify(evalContext)};`
|
||||
const code = `${contextSetup}\nreturn Boolean(${conditionExpression})`
|
||||
|
||||
const { blockData, blockNameMapping, blockOutputSchemas } = collectBlockData(ctx)
|
||||
const { blockData, blockNameMapping } = collectBlockData(ctx)
|
||||
|
||||
const result = await executeTool(
|
||||
'function_execute',
|
||||
@@ -37,7 +37,6 @@ export async function evaluateConditionExpression(
|
||||
workflowVariables: ctx.workflowVariables || {},
|
||||
blockData,
|
||||
blockNameMapping,
|
||||
blockOutputSchemas,
|
||||
_context: {
|
||||
workflowId: ctx.workflowId,
|
||||
workspaceId: ctx.workspaceId,
|
||||
|
||||
@@ -75,12 +75,7 @@ describe('FunctionBlockHandler', () => {
|
||||
workflowVariables: {},
|
||||
blockData: {},
|
||||
blockNameMapping: {},
|
||||
blockOutputSchemas: {},
|
||||
_context: {
|
||||
workflowId: mockContext.workflowId,
|
||||
workspaceId: mockContext.workspaceId,
|
||||
isDeployedContext: mockContext.isDeployedContext,
|
||||
},
|
||||
_context: { workflowId: mockContext.workflowId, workspaceId: mockContext.workspaceId },
|
||||
}
|
||||
const expectedOutput: any = { result: 'Success' }
|
||||
|
||||
@@ -89,8 +84,8 @@ describe('FunctionBlockHandler', () => {
|
||||
expect(mockExecuteTool).toHaveBeenCalledWith(
|
||||
'function_execute',
|
||||
expectedToolParams,
|
||||
false,
|
||||
mockContext
|
||||
false, // skipPostProcess
|
||||
mockContext // execution context
|
||||
)
|
||||
expect(result).toEqual(expectedOutput)
|
||||
})
|
||||
@@ -112,12 +107,7 @@ describe('FunctionBlockHandler', () => {
|
||||
workflowVariables: {},
|
||||
blockData: {},
|
||||
blockNameMapping: {},
|
||||
blockOutputSchemas: {},
|
||||
_context: {
|
||||
workflowId: mockContext.workflowId,
|
||||
workspaceId: mockContext.workspaceId,
|
||||
isDeployedContext: mockContext.isDeployedContext,
|
||||
},
|
||||
_context: { workflowId: mockContext.workflowId, workspaceId: mockContext.workspaceId },
|
||||
}
|
||||
const expectedOutput: any = { result: 'Success' }
|
||||
|
||||
@@ -126,8 +116,8 @@ describe('FunctionBlockHandler', () => {
|
||||
expect(mockExecuteTool).toHaveBeenCalledWith(
|
||||
'function_execute',
|
||||
expectedToolParams,
|
||||
false,
|
||||
mockContext
|
||||
false, // skipPostProcess
|
||||
mockContext // execution context
|
||||
)
|
||||
expect(result).toEqual(expectedOutput)
|
||||
})
|
||||
@@ -142,12 +132,7 @@ describe('FunctionBlockHandler', () => {
|
||||
workflowVariables: {},
|
||||
blockData: {},
|
||||
blockNameMapping: {},
|
||||
blockOutputSchemas: {},
|
||||
_context: {
|
||||
workflowId: mockContext.workflowId,
|
||||
workspaceId: mockContext.workspaceId,
|
||||
isDeployedContext: mockContext.isDeployedContext,
|
||||
},
|
||||
_context: { workflowId: mockContext.workflowId, workspaceId: mockContext.workspaceId },
|
||||
}
|
||||
|
||||
await handler.execute(mockContext, mockBlock, inputs)
|
||||
|
||||
@@ -23,7 +23,7 @@ export class FunctionBlockHandler implements BlockHandler {
|
||||
? inputs.code.map((c: { content: string }) => c.content).join('\n')
|
||||
: inputs.code
|
||||
|
||||
const { blockData, blockNameMapping, blockOutputSchemas } = collectBlockData(ctx)
|
||||
const { blockData, blockNameMapping } = collectBlockData(ctx)
|
||||
|
||||
const result = await executeTool(
|
||||
'function_execute',
|
||||
@@ -35,7 +35,6 @@ export class FunctionBlockHandler implements BlockHandler {
|
||||
workflowVariables: ctx.workflowVariables || {},
|
||||
blockData,
|
||||
blockNameMapping,
|
||||
blockOutputSchemas,
|
||||
_context: {
|
||||
workflowId: ctx.workflowId,
|
||||
workspaceId: ctx.workspaceId,
|
||||
|
||||
@@ -17,7 +17,6 @@ import {
|
||||
} from '@/executor/human-in-the-loop/utils'
|
||||
import type { BlockHandler, ExecutionContext, PauseMetadata } from '@/executor/types'
|
||||
import { collectBlockData } from '@/executor/utils/block-data'
|
||||
import { parseObjectStrings } from '@/executor/utils/json'
|
||||
import type { SerializedBlock } from '@/serializer/types'
|
||||
import { executeTool } from '@/tools'
|
||||
|
||||
@@ -266,7 +265,7 @@ export class HumanInTheLoopBlockHandler implements BlockHandler {
|
||||
|
||||
if (dataMode === 'structured' && inputs.builderData) {
|
||||
const convertedData = this.convertBuilderDataToJson(inputs.builderData)
|
||||
return parseObjectStrings(convertedData)
|
||||
return this.parseObjectStrings(convertedData)
|
||||
}
|
||||
|
||||
return inputs.data || {}
|
||||
@@ -486,6 +485,29 @@ export class HumanInTheLoopBlockHandler implements BlockHandler {
|
||||
)
|
||||
}
|
||||
|
||||
private parseObjectStrings(data: any): any {
|
||||
if (typeof data === 'string') {
|
||||
try {
|
||||
const parsed = JSON.parse(data)
|
||||
if (typeof parsed === 'object' && parsed !== null) {
|
||||
return this.parseObjectStrings(parsed)
|
||||
}
|
||||
return parsed
|
||||
} catch {
|
||||
return data
|
||||
}
|
||||
} else if (Array.isArray(data)) {
|
||||
return data.map((item) => this.parseObjectStrings(item))
|
||||
} else if (typeof data === 'object' && data !== null) {
|
||||
const result: any = {}
|
||||
for (const [key, value] of Object.entries(data)) {
|
||||
result[key] = this.parseObjectStrings(value)
|
||||
}
|
||||
return result
|
||||
}
|
||||
return data
|
||||
}
|
||||
|
||||
private parseStatus(status?: string): number {
|
||||
if (!status) return HTTP.STATUS.OK
|
||||
const parsed = Number(status)
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { BlockType, HTTP, REFERENCE } from '@/executor/constants'
|
||||
import type { BlockHandler, ExecutionContext, NormalizedBlockOutput } from '@/executor/types'
|
||||
import { parseObjectStrings } from '@/executor/utils/json'
|
||||
import type { SerializedBlock } from '@/serializer/types'
|
||||
|
||||
const logger = createLogger('ResponseBlockHandler')
|
||||
@@ -74,7 +73,7 @@ export class ResponseBlockHandler implements BlockHandler {
|
||||
|
||||
if (dataMode === 'structured' && inputs.builderData) {
|
||||
const convertedData = this.convertBuilderDataToJson(inputs.builderData)
|
||||
return parseObjectStrings(convertedData)
|
||||
return this.parseObjectStrings(convertedData)
|
||||
}
|
||||
|
||||
return inputs.data || {}
|
||||
@@ -223,6 +222,29 @@ export class ResponseBlockHandler implements BlockHandler {
|
||||
)
|
||||
}
|
||||
|
||||
private parseObjectStrings(data: any): any {
|
||||
if (typeof data === 'string') {
|
||||
try {
|
||||
const parsed = JSON.parse(data)
|
||||
if (typeof parsed === 'object' && parsed !== null) {
|
||||
return this.parseObjectStrings(parsed)
|
||||
}
|
||||
return parsed
|
||||
} catch {
|
||||
return data
|
||||
}
|
||||
} else if (Array.isArray(data)) {
|
||||
return data.map((item) => this.parseObjectStrings(item))
|
||||
} else if (typeof data === 'object' && data !== null) {
|
||||
const result: any = {}
|
||||
for (const [key, value] of Object.entries(data)) {
|
||||
result[key] = this.parseObjectStrings(value)
|
||||
}
|
||||
return result
|
||||
}
|
||||
return data
|
||||
}
|
||||
|
||||
private parseStatus(status?: string): number {
|
||||
if (!status) return HTTP.STATUS.OK
|
||||
const parsed = Number(status)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import '@sim/testing/mocks/executor'
|
||||
|
||||
import { beforeEach, describe, expect, it, type Mock, vi } from 'vitest'
|
||||
import { generateRouterPrompt, generateRouterV2Prompt } from '@/blocks/blocks/router'
|
||||
import { generateRouterPrompt } from '@/blocks/blocks/router'
|
||||
import { BlockType } from '@/executor/constants'
|
||||
import { RouterBlockHandler } from '@/executor/handlers/router/router-handler'
|
||||
import type { ExecutionContext } from '@/executor/types'
|
||||
@@ -9,7 +9,6 @@ import { getProviderFromModel } from '@/providers/utils'
|
||||
import type { SerializedBlock, SerializedWorkflow } from '@/serializer/types'
|
||||
|
||||
const mockGenerateRouterPrompt = generateRouterPrompt as Mock
|
||||
const mockGenerateRouterV2Prompt = generateRouterV2Prompt as Mock
|
||||
const mockGetProviderFromModel = getProviderFromModel as Mock
|
||||
const mockFetch = global.fetch as unknown as Mock
|
||||
|
||||
@@ -45,7 +44,7 @@ describe('RouterBlockHandler', () => {
|
||||
metadata: { id: BlockType.ROUTER, name: 'Test Router' },
|
||||
position: { x: 50, y: 50 },
|
||||
config: { tool: BlockType.ROUTER, params: {} },
|
||||
inputs: { prompt: 'string', model: 'string' },
|
||||
inputs: { prompt: 'string', model: 'string' }, // Using ParamType strings
|
||||
outputs: {},
|
||||
enabled: true,
|
||||
}
|
||||
@@ -73,11 +72,14 @@ describe('RouterBlockHandler', () => {
|
||||
workflow: mockWorkflow as SerializedWorkflow,
|
||||
}
|
||||
|
||||
// Reset mocks using vi
|
||||
vi.clearAllMocks()
|
||||
|
||||
// Default mock implementations
|
||||
mockGetProviderFromModel.mockReturnValue('openai')
|
||||
mockGenerateRouterPrompt.mockReturnValue('Generated System Prompt')
|
||||
|
||||
// Set up fetch mock to return a successful response
|
||||
mockFetch.mockImplementation(() => {
|
||||
return Promise.resolve({
|
||||
ok: true,
|
||||
@@ -145,6 +147,7 @@ describe('RouterBlockHandler', () => {
|
||||
})
|
||||
)
|
||||
|
||||
// Verify the request body contains the expected data
|
||||
const fetchCallArgs = mockFetch.mock.calls[0]
|
||||
const requestBody = JSON.parse(fetchCallArgs[1].body)
|
||||
expect(requestBody).toMatchObject({
|
||||
@@ -177,6 +180,7 @@ describe('RouterBlockHandler', () => {
|
||||
const inputs = { prompt: 'Test' }
|
||||
mockContext.workflow!.blocks = [mockBlock, mockTargetBlock2]
|
||||
|
||||
// Expect execute to throw because getTargetBlocks (called internally) will throw
|
||||
await expect(handler.execute(mockContext, mockBlock, inputs)).rejects.toThrow(
|
||||
'Target block target-block-1 not found'
|
||||
)
|
||||
@@ -186,6 +190,7 @@ describe('RouterBlockHandler', () => {
|
||||
it('should throw error if LLM response is not a valid target block ID', async () => {
|
||||
const inputs = { prompt: 'Test', apiKey: 'test-api-key' }
|
||||
|
||||
// Override fetch mock to return an invalid block ID
|
||||
mockFetch.mockImplementationOnce(() => {
|
||||
return Promise.resolve({
|
||||
ok: true,
|
||||
@@ -223,6 +228,7 @@ describe('RouterBlockHandler', () => {
|
||||
it('should handle server error responses', async () => {
|
||||
const inputs = { prompt: 'Test error handling.', apiKey: 'test-api-key' }
|
||||
|
||||
// Override fetch mock to return an error
|
||||
mockFetch.mockImplementationOnce(() => {
|
||||
return Promise.resolve({
|
||||
ok: false,
|
||||
@@ -270,12 +276,13 @@ describe('RouterBlockHandler', () => {
|
||||
|
||||
mockGetProviderFromModel.mockReturnValue('vertex')
|
||||
|
||||
// Mock the database query for Vertex credential
|
||||
const mockDb = await import('@sim/db')
|
||||
const mockAccount = {
|
||||
id: 'test-vertex-credential-id',
|
||||
accessToken: 'mock-access-token',
|
||||
refreshToken: 'mock-refresh-token',
|
||||
expiresAt: new Date(Date.now() + 3600000),
|
||||
expiresAt: new Date(Date.now() + 3600000), // 1 hour from now
|
||||
}
|
||||
vi.spyOn(mockDb.db.query.account, 'findFirst').mockResolvedValue(mockAccount as any)
|
||||
|
||||
@@ -293,287 +300,3 @@ describe('RouterBlockHandler', () => {
|
||||
expect(requestBody.apiKey).toBe('mock-access-token')
|
||||
})
|
||||
})
|
||||
|
||||
describe('RouterBlockHandler V2', () => {
|
||||
let handler: RouterBlockHandler
|
||||
let mockRouterV2Block: SerializedBlock
|
||||
let mockContext: ExecutionContext
|
||||
let mockWorkflow: Partial<SerializedWorkflow>
|
||||
let mockTargetBlock1: SerializedBlock
|
||||
let mockTargetBlock2: SerializedBlock
|
||||
|
||||
beforeEach(() => {
|
||||
mockTargetBlock1 = {
|
||||
id: 'target-block-1',
|
||||
metadata: { id: 'agent', name: 'Support Agent' },
|
||||
position: { x: 100, y: 100 },
|
||||
config: { tool: 'agent', params: {} },
|
||||
inputs: {},
|
||||
outputs: {},
|
||||
enabled: true,
|
||||
}
|
||||
mockTargetBlock2 = {
|
||||
id: 'target-block-2',
|
||||
metadata: { id: 'agent', name: 'Sales Agent' },
|
||||
position: { x: 100, y: 150 },
|
||||
config: { tool: 'agent', params: {} },
|
||||
inputs: {},
|
||||
outputs: {},
|
||||
enabled: true,
|
||||
}
|
||||
mockRouterV2Block = {
|
||||
id: 'router-v2-block-1',
|
||||
metadata: { id: BlockType.ROUTER_V2, name: 'Test Router V2' },
|
||||
position: { x: 50, y: 50 },
|
||||
config: { tool: BlockType.ROUTER_V2, params: {} },
|
||||
inputs: {},
|
||||
outputs: {},
|
||||
enabled: true,
|
||||
}
|
||||
mockWorkflow = {
|
||||
blocks: [mockRouterV2Block, mockTargetBlock1, mockTargetBlock2],
|
||||
connections: [
|
||||
{
|
||||
source: mockRouterV2Block.id,
|
||||
target: mockTargetBlock1.id,
|
||||
sourceHandle: 'router-route-support',
|
||||
},
|
||||
{
|
||||
source: mockRouterV2Block.id,
|
||||
target: mockTargetBlock2.id,
|
||||
sourceHandle: 'router-route-sales',
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
handler = new RouterBlockHandler({})
|
||||
|
||||
mockContext = {
|
||||
workflowId: 'test-workflow-id',
|
||||
blockStates: new Map(),
|
||||
blockLogs: [],
|
||||
metadata: { duration: 0 },
|
||||
environmentVariables: {},
|
||||
decisions: { router: new Map(), condition: new Map() },
|
||||
loopExecutions: new Map(),
|
||||
completedLoops: new Set(),
|
||||
executedBlocks: new Set(),
|
||||
activeExecutionPath: new Set(),
|
||||
workflow: mockWorkflow as SerializedWorkflow,
|
||||
}
|
||||
|
||||
vi.clearAllMocks()
|
||||
|
||||
mockGetProviderFromModel.mockReturnValue('openai')
|
||||
mockGenerateRouterV2Prompt.mockReturnValue('Generated V2 System Prompt')
|
||||
})
|
||||
|
||||
it('should handle router_v2 blocks', () => {
|
||||
expect(handler.canHandle(mockRouterV2Block)).toBe(true)
|
||||
})
|
||||
|
||||
it('should execute router V2 and return reasoning', async () => {
|
||||
const inputs = {
|
||||
context: 'I need help with a billing issue',
|
||||
model: 'gpt-4o',
|
||||
apiKey: 'test-api-key',
|
||||
routes: JSON.stringify([
|
||||
{ id: 'route-support', title: 'Support', value: 'Customer support inquiries' },
|
||||
{ id: 'route-sales', title: 'Sales', value: 'Sales and pricing questions' },
|
||||
]),
|
||||
}
|
||||
|
||||
mockFetch.mockImplementationOnce(() => {
|
||||
return Promise.resolve({
|
||||
ok: true,
|
||||
json: () =>
|
||||
Promise.resolve({
|
||||
content: JSON.stringify({
|
||||
route: 'route-support',
|
||||
reasoning: 'The user mentioned a billing issue which is a customer support matter.',
|
||||
}),
|
||||
model: 'gpt-4o',
|
||||
tokens: { input: 150, output: 25, total: 175 },
|
||||
}),
|
||||
})
|
||||
})
|
||||
|
||||
const result = await handler.execute(mockContext, mockRouterV2Block, inputs)
|
||||
|
||||
expect(result).toMatchObject({
|
||||
context: 'I need help with a billing issue',
|
||||
model: 'gpt-4o',
|
||||
selectedRoute: 'route-support',
|
||||
reasoning: 'The user mentioned a billing issue which is a customer support matter.',
|
||||
selectedPath: {
|
||||
blockId: 'target-block-1',
|
||||
blockType: 'agent',
|
||||
blockTitle: 'Support Agent',
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
it('should include responseFormat in provider request', async () => {
|
||||
const inputs = {
|
||||
context: 'Test context',
|
||||
model: 'gpt-4o',
|
||||
apiKey: 'test-api-key',
|
||||
routes: JSON.stringify([{ id: 'route-1', title: 'Route 1', value: 'Description 1' }]),
|
||||
}
|
||||
|
||||
mockFetch.mockImplementationOnce(() => {
|
||||
return Promise.resolve({
|
||||
ok: true,
|
||||
json: () =>
|
||||
Promise.resolve({
|
||||
content: JSON.stringify({ route: 'route-1', reasoning: 'Test reasoning' }),
|
||||
model: 'gpt-4o',
|
||||
tokens: { input: 100, output: 20, total: 120 },
|
||||
}),
|
||||
})
|
||||
})
|
||||
|
||||
await handler.execute(mockContext, mockRouterV2Block, inputs)
|
||||
|
||||
const fetchCallArgs = mockFetch.mock.calls[0]
|
||||
const requestBody = JSON.parse(fetchCallArgs[1].body)
|
||||
|
||||
expect(requestBody.responseFormat).toEqual({
|
||||
name: 'router_response',
|
||||
schema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
route: {
|
||||
type: 'string',
|
||||
description: 'The selected route ID or NO_MATCH',
|
||||
},
|
||||
reasoning: {
|
||||
type: 'string',
|
||||
description: 'Brief explanation of why this route was chosen',
|
||||
},
|
||||
},
|
||||
required: ['route', 'reasoning'],
|
||||
additionalProperties: false,
|
||||
},
|
||||
strict: true,
|
||||
})
|
||||
})
|
||||
|
||||
it('should handle NO_MATCH response with reasoning', async () => {
|
||||
const inputs = {
|
||||
context: 'Random unrelated query',
|
||||
model: 'gpt-4o',
|
||||
apiKey: 'test-api-key',
|
||||
routes: JSON.stringify([{ id: 'route-1', title: 'Route 1', value: 'Specific topic' }]),
|
||||
}
|
||||
|
||||
mockFetch.mockImplementationOnce(() => {
|
||||
return Promise.resolve({
|
||||
ok: true,
|
||||
json: () =>
|
||||
Promise.resolve({
|
||||
content: JSON.stringify({
|
||||
route: 'NO_MATCH',
|
||||
reasoning: 'The query does not relate to any available route.',
|
||||
}),
|
||||
model: 'gpt-4o',
|
||||
tokens: { input: 100, output: 20, total: 120 },
|
||||
}),
|
||||
})
|
||||
})
|
||||
|
||||
await expect(handler.execute(mockContext, mockRouterV2Block, inputs)).rejects.toThrow(
|
||||
'Router could not determine a matching route: The query does not relate to any available route.'
|
||||
)
|
||||
})
|
||||
|
||||
it('should throw error for invalid route ID in response', async () => {
|
||||
const inputs = {
|
||||
context: 'Test context',
|
||||
model: 'gpt-4o',
|
||||
apiKey: 'test-api-key',
|
||||
routes: JSON.stringify([{ id: 'route-1', title: 'Route 1', value: 'Description' }]),
|
||||
}
|
||||
|
||||
mockFetch.mockImplementationOnce(() => {
|
||||
return Promise.resolve({
|
||||
ok: true,
|
||||
json: () =>
|
||||
Promise.resolve({
|
||||
content: JSON.stringify({ route: 'invalid-route', reasoning: 'Some reasoning' }),
|
||||
model: 'gpt-4o',
|
||||
tokens: { input: 100, output: 20, total: 120 },
|
||||
}),
|
||||
})
|
||||
})
|
||||
|
||||
await expect(handler.execute(mockContext, mockRouterV2Block, inputs)).rejects.toThrow(
|
||||
/Router could not determine a valid route/
|
||||
)
|
||||
})
|
||||
|
||||
it('should handle routes passed as array instead of JSON string', async () => {
|
||||
const inputs = {
|
||||
context: 'Test context',
|
||||
model: 'gpt-4o',
|
||||
apiKey: 'test-api-key',
|
||||
routes: [{ id: 'route-1', title: 'Route 1', value: 'Description' }],
|
||||
}
|
||||
|
||||
mockFetch.mockImplementationOnce(() => {
|
||||
return Promise.resolve({
|
||||
ok: true,
|
||||
json: () =>
|
||||
Promise.resolve({
|
||||
content: JSON.stringify({ route: 'route-1', reasoning: 'Matched route 1' }),
|
||||
model: 'gpt-4o',
|
||||
tokens: { input: 100, output: 20, total: 120 },
|
||||
}),
|
||||
})
|
||||
})
|
||||
|
||||
const result = await handler.execute(mockContext, mockRouterV2Block, inputs)
|
||||
|
||||
expect(result.selectedRoute).toBe('route-1')
|
||||
expect(result.reasoning).toBe('Matched route 1')
|
||||
})
|
||||
|
||||
it('should throw error when no routes are defined', async () => {
|
||||
const inputs = {
|
||||
context: 'Test context',
|
||||
model: 'gpt-4o',
|
||||
apiKey: 'test-api-key',
|
||||
routes: '[]',
|
||||
}
|
||||
|
||||
await expect(handler.execute(mockContext, mockRouterV2Block, inputs)).rejects.toThrow(
|
||||
'No routes defined for router'
|
||||
)
|
||||
})
|
||||
|
||||
it('should handle fallback when JSON parsing fails', async () => {
|
||||
const inputs = {
|
||||
context: 'Test context',
|
||||
model: 'gpt-4o',
|
||||
apiKey: 'test-api-key',
|
||||
routes: JSON.stringify([{ id: 'route-1', title: 'Route 1', value: 'Description' }]),
|
||||
}
|
||||
|
||||
mockFetch.mockImplementationOnce(() => {
|
||||
return Promise.resolve({
|
||||
ok: true,
|
||||
json: () =>
|
||||
Promise.resolve({
|
||||
content: 'route-1',
|
||||
model: 'gpt-4o',
|
||||
tokens: { input: 100, output: 5, total: 105 },
|
||||
}),
|
||||
})
|
||||
})
|
||||
|
||||
const result = await handler.execute(mockContext, mockRouterV2Block, inputs)
|
||||
|
||||
expect(result.selectedRoute).toBe('route-1')
|
||||
expect(result.reasoning).toBe('')
|
||||
})
|
||||
})
|
||||
|
||||
@@ -238,25 +238,6 @@ export class RouterBlockHandler implements BlockHandler {
|
||||
apiKey: finalApiKey,
|
||||
workflowId: ctx.workflowId,
|
||||
workspaceId: ctx.workspaceId,
|
||||
responseFormat: {
|
||||
name: 'router_response',
|
||||
schema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
route: {
|
||||
type: 'string',
|
||||
description: 'The selected route ID or NO_MATCH',
|
||||
},
|
||||
reasoning: {
|
||||
type: 'string',
|
||||
description: 'Brief explanation of why this route was chosen',
|
||||
},
|
||||
},
|
||||
required: ['route', 'reasoning'],
|
||||
additionalProperties: false,
|
||||
},
|
||||
strict: true,
|
||||
},
|
||||
}
|
||||
|
||||
if (providerId === 'vertex') {
|
||||
@@ -296,31 +277,16 @@ export class RouterBlockHandler implements BlockHandler {
|
||||
|
||||
const result = await response.json()
|
||||
|
||||
let chosenRouteId: string
|
||||
let reasoning = ''
|
||||
|
||||
try {
|
||||
const parsedResponse = JSON.parse(result.content)
|
||||
chosenRouteId = parsedResponse.route?.trim() || ''
|
||||
reasoning = parsedResponse.reasoning || ''
|
||||
} catch (_parseError) {
|
||||
logger.error('Router response was not valid JSON despite responseFormat', {
|
||||
content: result.content,
|
||||
})
|
||||
chosenRouteId = result.content.trim()
|
||||
}
|
||||
const chosenRouteId = result.content.trim()
|
||||
|
||||
if (chosenRouteId === 'NO_MATCH' || chosenRouteId.toUpperCase() === 'NO_MATCH') {
|
||||
logger.info('Router determined no route matches the context, routing to error path')
|
||||
throw new Error(
|
||||
reasoning
|
||||
? `Router could not determine a matching route: ${reasoning}`
|
||||
: 'Router could not determine a matching route for the given context'
|
||||
)
|
||||
throw new Error('Router could not determine a matching route for the given context')
|
||||
}
|
||||
|
||||
const chosenRoute = routes.find((r) => r.id === chosenRouteId)
|
||||
|
||||
// Throw error if LLM returns invalid route ID - this routes through error path
|
||||
if (!chosenRoute) {
|
||||
const availableRoutes = routes.map((r) => ({ id: r.id, title: r.title }))
|
||||
logger.error(
|
||||
@@ -332,6 +298,7 @@ export class RouterBlockHandler implements BlockHandler {
|
||||
)
|
||||
}
|
||||
|
||||
// Find the target block connected to this route's handle
|
||||
const connection = ctx.workflow?.connections.find(
|
||||
(conn) => conn.source === block.id && conn.sourceHandle === `router-${chosenRoute.id}`
|
||||
)
|
||||
@@ -367,7 +334,6 @@ export class RouterBlockHandler implements BlockHandler {
|
||||
total: cost.total,
|
||||
},
|
||||
selectedRoute: chosenRoute.id,
|
||||
reasoning,
|
||||
selectedPath: targetBlock
|
||||
? {
|
||||
blockId: targetBlock.id,
|
||||
@@ -387,7 +353,7 @@ export class RouterBlockHandler implements BlockHandler {
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse routes from input (can be JSON string or array)
|
||||
* Parse routes from input (can be JSON string or array).
|
||||
*/
|
||||
private parseRoutes(input: any): RouteDefinition[] {
|
||||
try {
|
||||
|
||||
@@ -204,21 +204,26 @@ describe('WorkflowBlockHandler', () => {
|
||||
})
|
||||
})
|
||||
|
||||
it('should throw error for failed child output so BlockExecutor can check error port', () => {
|
||||
it('should map failed child output correctly', () => {
|
||||
const childResult = {
|
||||
success: false,
|
||||
error: 'Child workflow failed',
|
||||
}
|
||||
|
||||
expect(() =>
|
||||
(handler as any).mapChildOutputToParent(childResult, 'child-id', 'Child Workflow', 100)
|
||||
).toThrow('Error in child workflow "Child Workflow": Child workflow failed')
|
||||
const result = (handler as any).mapChildOutputToParent(
|
||||
childResult,
|
||||
'child-id',
|
||||
'Child Workflow',
|
||||
100
|
||||
)
|
||||
|
||||
try {
|
||||
;(handler as any).mapChildOutputToParent(childResult, 'child-id', 'Child Workflow', 100)
|
||||
} catch (error: any) {
|
||||
expect(error.childTraceSpans).toEqual([])
|
||||
}
|
||||
expect(result).toEqual({
|
||||
success: false,
|
||||
childWorkflowName: 'Child Workflow',
|
||||
result: {},
|
||||
error: 'Child workflow failed',
|
||||
childTraceSpans: [],
|
||||
})
|
||||
})
|
||||
|
||||
it('should handle nested response structures', () => {
|
||||
|
||||
@@ -144,11 +144,6 @@ export class WorkflowBlockHandler implements BlockHandler {
|
||||
const workflowMetadata = workflows[workflowId]
|
||||
const childWorkflowName = workflowMetadata?.name || workflowId
|
||||
|
||||
const originalError = error.message || 'Unknown error'
|
||||
const wrappedError = new Error(
|
||||
`Error in child workflow "${childWorkflowName}": ${originalError}`
|
||||
)
|
||||
|
||||
if (error.executionResult?.logs) {
|
||||
const executionResult = error.executionResult as ExecutionResult
|
||||
|
||||
@@ -164,12 +159,28 @@ export class WorkflowBlockHandler implements BlockHandler {
|
||||
)
|
||||
|
||||
logger.info(`Captured ${childTraceSpans.length} child trace spans from failed execution`)
|
||||
;(wrappedError as any).childTraceSpans = childTraceSpans
|
||||
} else if (error.childTraceSpans && Array.isArray(error.childTraceSpans)) {
|
||||
;(wrappedError as any).childTraceSpans = error.childTraceSpans
|
||||
|
||||
return {
|
||||
success: false,
|
||||
childWorkflowName,
|
||||
result: {},
|
||||
error: error.message || 'Child workflow execution failed',
|
||||
childTraceSpans: childTraceSpans,
|
||||
} as Record<string, any>
|
||||
}
|
||||
|
||||
throw wrappedError
|
||||
if (error.childTraceSpans && Array.isArray(error.childTraceSpans)) {
|
||||
return {
|
||||
success: false,
|
||||
childWorkflowName,
|
||||
result: {},
|
||||
error: error.message || 'Child workflow execution failed',
|
||||
childTraceSpans: error.childTraceSpans,
|
||||
} as Record<string, any>
|
||||
}
|
||||
|
||||
const originalError = error.message || 'Unknown error'
|
||||
throw new Error(`Error in child workflow "${childWorkflowName}": ${originalError}`)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -441,13 +452,17 @@ export class WorkflowBlockHandler implements BlockHandler {
|
||||
|
||||
if (!success) {
|
||||
logger.warn(`Child workflow ${childWorkflowName} failed`)
|
||||
const error = new Error(
|
||||
`Error in child workflow "${childWorkflowName}": ${childResult.error || 'Child workflow execution failed'}`
|
||||
)
|
||||
;(error as any).childTraceSpans = childTraceSpans || []
|
||||
throw error
|
||||
// Return failure with child trace spans so they can be displayed
|
||||
return {
|
||||
success: false,
|
||||
childWorkflowName,
|
||||
result,
|
||||
error: childResult.error || 'Child workflow execution failed',
|
||||
childTraceSpans: childTraceSpans || [],
|
||||
} as Record<string, any>
|
||||
}
|
||||
|
||||
// Success case
|
||||
return {
|
||||
success: true,
|
||||
childWorkflowName,
|
||||
|
||||
@@ -1,43 +1,24 @@
|
||||
import { getBlockOutputs } from '@/lib/workflows/blocks/block-outputs'
|
||||
import { normalizeName } from '@/executor/constants'
|
||||
import type { ExecutionContext } from '@/executor/types'
|
||||
import type { OutputSchema } from '@/executor/utils/block-reference'
|
||||
|
||||
export interface BlockDataCollection {
|
||||
blockData: Record<string, unknown>
|
||||
blockData: Record<string, any>
|
||||
blockNameMapping: Record<string, string>
|
||||
blockOutputSchemas: Record<string, OutputSchema>
|
||||
}
|
||||
|
||||
export function collectBlockData(ctx: ExecutionContext): BlockDataCollection {
|
||||
const blockData: Record<string, unknown> = {}
|
||||
const blockData: Record<string, any> = {}
|
||||
const blockNameMapping: Record<string, string> = {}
|
||||
const blockOutputSchemas: Record<string, OutputSchema> = {}
|
||||
|
||||
for (const [id, state] of ctx.blockStates.entries()) {
|
||||
if (state.output !== undefined) {
|
||||
blockData[id] = state.output
|
||||
}
|
||||
|
||||
const workflowBlock = ctx.workflow?.blocks?.find((b) => b.id === id)
|
||||
if (!workflowBlock) continue
|
||||
|
||||
if (workflowBlock.metadata?.name) {
|
||||
blockNameMapping[normalizeName(workflowBlock.metadata.name)] = id
|
||||
}
|
||||
|
||||
const blockType = workflowBlock.metadata?.id
|
||||
if (blockType) {
|
||||
const params = workflowBlock.config?.params as Record<string, unknown> | undefined
|
||||
const subBlocks = params
|
||||
? Object.fromEntries(Object.entries(params).map(([k, v]) => [k, { value: v }]))
|
||||
: undefined
|
||||
const schema = getBlockOutputs(blockType, subBlocks)
|
||||
if (schema && Object.keys(schema).length > 0) {
|
||||
blockOutputSchemas[id] = schema
|
||||
const workflowBlock = ctx.workflow?.blocks?.find((b) => b.id === id)
|
||||
if (workflowBlock?.metadata?.name) {
|
||||
blockNameMapping[normalizeName(workflowBlock.metadata.name)] = id
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { blockData, blockNameMapping, blockOutputSchemas }
|
||||
return { blockData, blockNameMapping }
|
||||
}
|
||||
|
||||
@@ -1,255 +0,0 @@
|
||||
/**
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { describe, expect, it } from 'vitest'
|
||||
import {
|
||||
type BlockReferenceContext,
|
||||
InvalidFieldError,
|
||||
resolveBlockReference,
|
||||
} from './block-reference'
|
||||
|
||||
describe('resolveBlockReference', () => {
|
||||
const createContext = (
|
||||
overrides: Partial<BlockReferenceContext> = {}
|
||||
): BlockReferenceContext => ({
|
||||
blockNameMapping: { start: 'block-1', agent: 'block-2' },
|
||||
blockData: {},
|
||||
blockOutputSchemas: {},
|
||||
...overrides,
|
||||
})
|
||||
|
||||
describe('block name resolution', () => {
|
||||
it('should return undefined when block name does not exist', () => {
|
||||
const ctx = createContext()
|
||||
const result = resolveBlockReference('unknown', ['field'], ctx)
|
||||
expect(result).toBeUndefined()
|
||||
})
|
||||
|
||||
it('should normalize block name before lookup', () => {
|
||||
const ctx = createContext({
|
||||
blockNameMapping: { myblock: 'block-1' },
|
||||
blockData: { 'block-1': { value: 'test' } },
|
||||
})
|
||||
|
||||
const result = resolveBlockReference('MyBlock', ['value'], ctx)
|
||||
expect(result).toEqual({ value: 'test', blockId: 'block-1' })
|
||||
})
|
||||
|
||||
it('should handle block names with spaces', () => {
|
||||
const ctx = createContext({
|
||||
blockNameMapping: { myblock: 'block-1' },
|
||||
blockData: { 'block-1': { value: 'test' } },
|
||||
})
|
||||
|
||||
const result = resolveBlockReference('My Block', ['value'], ctx)
|
||||
expect(result).toEqual({ value: 'test', blockId: 'block-1' })
|
||||
})
|
||||
})
|
||||
|
||||
describe('field resolution', () => {
|
||||
it('should return entire block output when no path specified', () => {
|
||||
const ctx = createContext({
|
||||
blockData: { 'block-1': { input: 'hello', other: 'data' } },
|
||||
})
|
||||
|
||||
const result = resolveBlockReference('start', [], ctx)
|
||||
expect(result).toEqual({
|
||||
value: { input: 'hello', other: 'data' },
|
||||
blockId: 'block-1',
|
||||
})
|
||||
})
|
||||
|
||||
it('should resolve simple field path', () => {
|
||||
const ctx = createContext({
|
||||
blockData: { 'block-1': { input: 'hello' } },
|
||||
})
|
||||
|
||||
const result = resolveBlockReference('start', ['input'], ctx)
|
||||
expect(result).toEqual({ value: 'hello', blockId: 'block-1' })
|
||||
})
|
||||
|
||||
it('should resolve nested field path', () => {
|
||||
const ctx = createContext({
|
||||
blockData: { 'block-1': { response: { data: { name: 'test' } } } },
|
||||
})
|
||||
|
||||
const result = resolveBlockReference('start', ['response', 'data', 'name'], ctx)
|
||||
expect(result).toEqual({ value: 'test', blockId: 'block-1' })
|
||||
})
|
||||
|
||||
it('should resolve array index path', () => {
|
||||
const ctx = createContext({
|
||||
blockData: { 'block-1': { items: ['a', 'b', 'c'] } },
|
||||
})
|
||||
|
||||
const result = resolveBlockReference('start', ['items', '1'], ctx)
|
||||
expect(result).toEqual({ value: 'b', blockId: 'block-1' })
|
||||
})
|
||||
|
||||
it('should return undefined value when field exists but has no value', () => {
|
||||
const ctx = createContext({
|
||||
blockData: { 'block-1': { input: undefined } },
|
||||
blockOutputSchemas: {
|
||||
'block-1': { input: { type: 'string' } },
|
||||
},
|
||||
})
|
||||
|
||||
const result = resolveBlockReference('start', ['input'], ctx)
|
||||
expect(result).toEqual({ value: undefined, blockId: 'block-1' })
|
||||
})
|
||||
|
||||
it('should return null value when field has null', () => {
|
||||
const ctx = createContext({
|
||||
blockData: { 'block-1': { input: null } },
|
||||
})
|
||||
|
||||
const result = resolveBlockReference('start', ['input'], ctx)
|
||||
expect(result).toEqual({ value: null, blockId: 'block-1' })
|
||||
})
|
||||
})
|
||||
|
||||
describe('schema validation', () => {
|
||||
it('should throw InvalidFieldError when field not in schema', () => {
|
||||
const ctx = createContext({
|
||||
blockData: { 'block-1': { existing: 'value' } },
|
||||
blockOutputSchemas: {
|
||||
'block-1': {
|
||||
input: { type: 'string' },
|
||||
conversationId: { type: 'string' },
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
expect(() => resolveBlockReference('start', ['invalid'], ctx)).toThrow(InvalidFieldError)
|
||||
expect(() => resolveBlockReference('start', ['invalid'], ctx)).toThrow(
|
||||
/"invalid" doesn't exist on block "start"/
|
||||
)
|
||||
})
|
||||
|
||||
it('should include available fields in error message', () => {
|
||||
const ctx = createContext({
|
||||
blockData: { 'block-1': {} },
|
||||
blockOutputSchemas: {
|
||||
'block-1': {
|
||||
input: { type: 'string' },
|
||||
conversationId: { type: 'string' },
|
||||
files: { type: 'files' },
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
try {
|
||||
resolveBlockReference('start', ['typo'], ctx)
|
||||
expect.fail('Should have thrown')
|
||||
} catch (error) {
|
||||
expect(error).toBeInstanceOf(InvalidFieldError)
|
||||
const fieldError = error as InvalidFieldError
|
||||
expect(fieldError.availableFields).toContain('input')
|
||||
expect(fieldError.availableFields).toContain('conversationId')
|
||||
expect(fieldError.availableFields).toContain('files')
|
||||
}
|
||||
})
|
||||
|
||||
it('should allow valid field even when value is undefined', () => {
|
||||
const ctx = createContext({
|
||||
blockData: { 'block-1': {} },
|
||||
blockOutputSchemas: {
|
||||
'block-1': { input: { type: 'string' } },
|
||||
},
|
||||
})
|
||||
|
||||
const result = resolveBlockReference('start', ['input'], ctx)
|
||||
expect(result).toEqual({ value: undefined, blockId: 'block-1' })
|
||||
})
|
||||
|
||||
it('should validate path when block has no output yet', () => {
|
||||
const ctx = createContext({
|
||||
blockData: {},
|
||||
blockOutputSchemas: {
|
||||
'block-1': { input: { type: 'string' } },
|
||||
},
|
||||
})
|
||||
|
||||
expect(() => resolveBlockReference('start', ['invalid'], ctx)).toThrow(InvalidFieldError)
|
||||
})
|
||||
|
||||
it('should return undefined for valid field when block has no output', () => {
|
||||
const ctx = createContext({
|
||||
blockData: {},
|
||||
blockOutputSchemas: {
|
||||
'block-1': { input: { type: 'string' } },
|
||||
},
|
||||
})
|
||||
|
||||
const result = resolveBlockReference('start', ['input'], ctx)
|
||||
expect(result).toEqual({ value: undefined, blockId: 'block-1' })
|
||||
})
|
||||
})
|
||||
|
||||
describe('without schema (pass-through mode)', () => {
|
||||
it('should return undefined value without throwing when no schema', () => {
|
||||
const ctx = createContext({
|
||||
blockData: { 'block-1': { existing: 'value' } },
|
||||
})
|
||||
|
||||
const result = resolveBlockReference('start', ['missing'], ctx)
|
||||
expect(result).toEqual({ value: undefined, blockId: 'block-1' })
|
||||
})
|
||||
})
|
||||
|
||||
describe('file type handling', () => {
|
||||
it('should allow file property access', () => {
|
||||
const ctx = createContext({
|
||||
blockData: {
|
||||
'block-1': {
|
||||
files: [{ name: 'test.txt', url: 'http://example.com/file' }],
|
||||
},
|
||||
},
|
||||
blockOutputSchemas: {
|
||||
'block-1': { files: { type: 'files' } },
|
||||
},
|
||||
})
|
||||
|
||||
const result = resolveBlockReference('start', ['files', '0', 'name'], ctx)
|
||||
expect(result).toEqual({ value: 'test.txt', blockId: 'block-1' })
|
||||
})
|
||||
|
||||
it('should validate file property names', () => {
|
||||
const ctx = createContext({
|
||||
blockData: { 'block-1': { files: [] } },
|
||||
blockOutputSchemas: {
|
||||
'block-1': { files: { type: 'files' } },
|
||||
},
|
||||
})
|
||||
|
||||
expect(() => resolveBlockReference('start', ['files', '0', 'invalid'], ctx)).toThrow(
|
||||
InvalidFieldError
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('InvalidFieldError', () => {
|
||||
it('should have correct properties', () => {
|
||||
const error = new InvalidFieldError('myBlock', 'invalid.path', ['field1', 'field2'])
|
||||
|
||||
expect(error.blockName).toBe('myBlock')
|
||||
expect(error.fieldPath).toBe('invalid.path')
|
||||
expect(error.availableFields).toEqual(['field1', 'field2'])
|
||||
expect(error.name).toBe('InvalidFieldError')
|
||||
})
|
||||
|
||||
it('should format message correctly', () => {
|
||||
const error = new InvalidFieldError('start', 'typo', ['input', 'files'])
|
||||
|
||||
expect(error.message).toBe(
|
||||
'"typo" doesn\'t exist on block "start". Available fields: input, files'
|
||||
)
|
||||
})
|
||||
|
||||
it('should handle empty available fields', () => {
|
||||
const error = new InvalidFieldError('start', 'field', [])
|
||||
|
||||
expect(error.message).toBe('"field" doesn\'t exist on block "start". Available fields: none')
|
||||
})
|
||||
})
|
||||
@@ -1,210 +0,0 @@
|
||||
import { USER_FILE_ACCESSIBLE_PROPERTIES } from '@/lib/workflows/types'
|
||||
import { normalizeName } from '@/executor/constants'
|
||||
import { navigatePath } from '@/executor/variables/resolvers/reference'
|
||||
|
||||
export type OutputSchema = Record<string, { type?: string; description?: string } | unknown>
|
||||
|
||||
export interface BlockReferenceContext {
|
||||
blockNameMapping: Record<string, string>
|
||||
blockData: Record<string, unknown>
|
||||
blockOutputSchemas?: Record<string, OutputSchema>
|
||||
}
|
||||
|
||||
export interface BlockReferenceResult {
|
||||
value: unknown
|
||||
blockId: string
|
||||
}
|
||||
|
||||
export class InvalidFieldError extends Error {
|
||||
constructor(
|
||||
public readonly blockName: string,
|
||||
public readonly fieldPath: string,
|
||||
public readonly availableFields: string[]
|
||||
) {
|
||||
super(
|
||||
`"${fieldPath}" doesn't exist on block "${blockName}". ` +
|
||||
`Available fields: ${availableFields.length > 0 ? availableFields.join(', ') : 'none'}`
|
||||
)
|
||||
this.name = 'InvalidFieldError'
|
||||
}
|
||||
}
|
||||
|
||||
function isFileType(value: unknown): boolean {
|
||||
if (typeof value !== 'object' || value === null) return false
|
||||
const typed = value as { type?: string }
|
||||
return typed.type === 'file[]' || typed.type === 'files'
|
||||
}
|
||||
|
||||
function isArrayType(value: unknown): value is { type: 'array'; items?: unknown } {
|
||||
if (typeof value !== 'object' || value === null) return false
|
||||
return (value as { type?: string }).type === 'array'
|
||||
}
|
||||
|
||||
function getArrayItems(schema: unknown): unknown {
|
||||
if (typeof schema !== 'object' || schema === null) return undefined
|
||||
return (schema as { items?: unknown }).items
|
||||
}
|
||||
|
||||
function getProperties(schema: unknown): Record<string, unknown> | undefined {
|
||||
if (typeof schema !== 'object' || schema === null) return undefined
|
||||
const props = (schema as { properties?: unknown }).properties
|
||||
return typeof props === 'object' && props !== null
|
||||
? (props as Record<string, unknown>)
|
||||
: undefined
|
||||
}
|
||||
|
||||
function lookupField(schema: unknown, fieldName: string): unknown | undefined {
|
||||
if (typeof schema !== 'object' || schema === null) return undefined
|
||||
const typed = schema as Record<string, unknown>
|
||||
|
||||
if (fieldName in typed) {
|
||||
return typed[fieldName]
|
||||
}
|
||||
|
||||
const props = getProperties(schema)
|
||||
if (props && fieldName in props) {
|
||||
return props[fieldName]
|
||||
}
|
||||
|
||||
return undefined
|
||||
}
|
||||
|
||||
function isPathInSchema(schema: OutputSchema | undefined, pathParts: string[]): boolean {
|
||||
if (!schema || pathParts.length === 0) {
|
||||
return true
|
||||
}
|
||||
|
||||
let current: unknown = schema
|
||||
|
||||
for (let i = 0; i < pathParts.length; i++) {
|
||||
const part = pathParts[i]
|
||||
|
||||
if (current === null || current === undefined) {
|
||||
return false
|
||||
}
|
||||
|
||||
if (/^\d+$/.test(part)) {
|
||||
if (isFileType(current)) {
|
||||
const nextPart = pathParts[i + 1]
|
||||
return (
|
||||
!nextPart ||
|
||||
USER_FILE_ACCESSIBLE_PROPERTIES.includes(
|
||||
nextPart as (typeof USER_FILE_ACCESSIBLE_PROPERTIES)[number]
|
||||
)
|
||||
)
|
||||
}
|
||||
if (isArrayType(current)) {
|
||||
current = getArrayItems(current)
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
const arrayMatch = part.match(/^([^[]+)\[(\d+)\]$/)
|
||||
if (arrayMatch) {
|
||||
const [, prop] = arrayMatch
|
||||
const fieldDef = lookupField(current, prop)
|
||||
if (!fieldDef) return false
|
||||
|
||||
if (isFileType(fieldDef)) {
|
||||
const nextPart = pathParts[i + 1]
|
||||
return (
|
||||
!nextPart ||
|
||||
USER_FILE_ACCESSIBLE_PROPERTIES.includes(
|
||||
nextPart as (typeof USER_FILE_ACCESSIBLE_PROPERTIES)[number]
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
current = isArrayType(fieldDef) ? getArrayItems(fieldDef) : fieldDef
|
||||
continue
|
||||
}
|
||||
|
||||
if (
|
||||
isFileType(current) &&
|
||||
USER_FILE_ACCESSIBLE_PROPERTIES.includes(
|
||||
part as (typeof USER_FILE_ACCESSIBLE_PROPERTIES)[number]
|
||||
)
|
||||
) {
|
||||
return true
|
||||
}
|
||||
|
||||
const fieldDef = lookupField(current, part)
|
||||
if (fieldDef !== undefined) {
|
||||
if (isFileType(fieldDef)) {
|
||||
const nextPart = pathParts[i + 1]
|
||||
if (!nextPart) return true
|
||||
if (/^\d+$/.test(nextPart)) {
|
||||
const afterIndex = pathParts[i + 2]
|
||||
return (
|
||||
!afterIndex ||
|
||||
USER_FILE_ACCESSIBLE_PROPERTIES.includes(
|
||||
afterIndex as (typeof USER_FILE_ACCESSIBLE_PROPERTIES)[number]
|
||||
)
|
||||
)
|
||||
}
|
||||
return USER_FILE_ACCESSIBLE_PROPERTIES.includes(
|
||||
nextPart as (typeof USER_FILE_ACCESSIBLE_PROPERTIES)[number]
|
||||
)
|
||||
}
|
||||
current = fieldDef
|
||||
continue
|
||||
}
|
||||
|
||||
if (isArrayType(current)) {
|
||||
const items = getArrayItems(current)
|
||||
const itemField = lookupField(items, part)
|
||||
if (itemField !== undefined) {
|
||||
current = itemField
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
function getSchemaFieldNames(schema: OutputSchema | undefined): string[] {
|
||||
if (!schema) return []
|
||||
return Object.keys(schema)
|
||||
}
|
||||
|
||||
export function resolveBlockReference(
|
||||
blockName: string,
|
||||
pathParts: string[],
|
||||
context: BlockReferenceContext
|
||||
): BlockReferenceResult | undefined {
|
||||
const normalizedName = normalizeName(blockName)
|
||||
const blockId = context.blockNameMapping[normalizedName]
|
||||
|
||||
if (!blockId) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
const blockOutput = context.blockData[blockId]
|
||||
const schema = context.blockOutputSchemas?.[blockId]
|
||||
|
||||
if (blockOutput === undefined) {
|
||||
if (schema && pathParts.length > 0) {
|
||||
if (!isPathInSchema(schema, pathParts)) {
|
||||
throw new InvalidFieldError(blockName, pathParts.join('.'), getSchemaFieldNames(schema))
|
||||
}
|
||||
}
|
||||
return { value: undefined, blockId }
|
||||
}
|
||||
|
||||
if (pathParts.length === 0) {
|
||||
return { value: blockOutput, blockId }
|
||||
}
|
||||
|
||||
const value = navigatePath(blockOutput, pathParts)
|
||||
|
||||
if (value === undefined && schema) {
|
||||
if (!isPathInSchema(schema, pathParts)) {
|
||||
throw new InvalidFieldError(blockName, pathParts.join('.'), getSchemaFieldNames(schema))
|
||||
}
|
||||
}
|
||||
|
||||
return { value, blockId }
|
||||
}
|
||||
@@ -40,30 +40,3 @@ export function isJSONString(value: string): boolean {
|
||||
const trimmed = value.trim()
|
||||
return trimmed.startsWith('{') || trimmed.startsWith('[')
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively parses JSON strings within an object or array.
|
||||
* Useful for normalizing data that may contain stringified JSON at various levels.
|
||||
*/
|
||||
export function parseObjectStrings(data: unknown): unknown {
|
||||
if (typeof data === 'string') {
|
||||
try {
|
||||
const parsed = JSON.parse(data)
|
||||
if (typeof parsed === 'object' && parsed !== null) {
|
||||
return parseObjectStrings(parsed)
|
||||
}
|
||||
return parsed
|
||||
} catch {
|
||||
return data
|
||||
}
|
||||
} else if (Array.isArray(data)) {
|
||||
return data.map((item) => parseObjectStrings(item))
|
||||
} else if (typeof data === 'object' && data !== null) {
|
||||
const result: Record<string, unknown> = {}
|
||||
for (const [key, value] of Object.entries(data)) {
|
||||
result[key] = parseObjectStrings(value)
|
||||
}
|
||||
return result
|
||||
}
|
||||
return data
|
||||
}
|
||||
|
||||
@@ -6,10 +6,6 @@ import type { ResolutionContext } from './reference'
|
||||
|
||||
vi.mock('@sim/logger', () => loggerMock)
|
||||
|
||||
vi.mock('@/lib/workflows/blocks/block-outputs', () => ({
|
||||
getBlockOutputs: vi.fn(() => ({})),
|
||||
}))
|
||||
|
||||
function createTestWorkflow(
|
||||
blocks: Array<{
|
||||
id: string
|
||||
@@ -144,21 +140,16 @@ describe('BlockResolver', () => {
|
||||
expect(resolver.resolve('<source.nonexistent>', ctx)).toBeUndefined()
|
||||
})
|
||||
|
||||
it.concurrent('should throw error for path not in output schema', async () => {
|
||||
const { getBlockOutputs } = await import('@/lib/workflows/blocks/block-outputs')
|
||||
const mockGetBlockOutputs = vi.mocked(getBlockOutputs)
|
||||
const customOutputs = {
|
||||
validField: { type: 'string', description: 'A valid field' },
|
||||
nested: {
|
||||
child: { type: 'number', description: 'Nested child' },
|
||||
},
|
||||
}
|
||||
mockGetBlockOutputs.mockReturnValue(customOutputs as any)
|
||||
|
||||
it.concurrent('should throw error for path not in output schema', () => {
|
||||
const workflow = createTestWorkflow([
|
||||
{
|
||||
id: 'source',
|
||||
outputs: customOutputs,
|
||||
outputs: {
|
||||
validField: { type: 'string', description: 'A valid field' },
|
||||
nested: {
|
||||
child: { type: 'number', description: 'Nested child' },
|
||||
},
|
||||
},
|
||||
},
|
||||
])
|
||||
const resolver = new BlockResolver(workflow)
|
||||
@@ -170,8 +161,6 @@ describe('BlockResolver', () => {
|
||||
/"invalidField" doesn't exist on block "source"/
|
||||
)
|
||||
expect(() => resolver.resolve('<source.invalidField>', ctx)).toThrow(/Available fields:/)
|
||||
|
||||
mockGetBlockOutputs.mockReturnValue({})
|
||||
})
|
||||
|
||||
it.concurrent('should return undefined for path in schema but missing in data', () => {
|
||||
@@ -309,6 +298,45 @@ describe('BlockResolver', () => {
|
||||
})
|
||||
})
|
||||
|
||||
describe('tryParseJSON', () => {
|
||||
it.concurrent('should parse valid JSON object string', () => {
|
||||
const resolver = new BlockResolver(createTestWorkflow())
|
||||
expect(resolver.tryParseJSON('{"key": "value"}')).toEqual({ key: 'value' })
|
||||
})
|
||||
|
||||
it.concurrent('should parse valid JSON array string', () => {
|
||||
const resolver = new BlockResolver(createTestWorkflow())
|
||||
expect(resolver.tryParseJSON('[1, 2, 3]')).toEqual([1, 2, 3])
|
||||
})
|
||||
|
||||
it.concurrent('should return original value for non-string input', () => {
|
||||
const resolver = new BlockResolver(createTestWorkflow())
|
||||
const obj = { key: 'value' }
|
||||
expect(resolver.tryParseJSON(obj)).toBe(obj)
|
||||
expect(resolver.tryParseJSON(123)).toBe(123)
|
||||
expect(resolver.tryParseJSON(null)).toBe(null)
|
||||
})
|
||||
|
||||
it.concurrent('should return original string for non-JSON strings', () => {
|
||||
const resolver = new BlockResolver(createTestWorkflow())
|
||||
expect(resolver.tryParseJSON('plain text')).toBe('plain text')
|
||||
expect(resolver.tryParseJSON('123')).toBe('123')
|
||||
expect(resolver.tryParseJSON('')).toBe('')
|
||||
})
|
||||
|
||||
it.concurrent('should return original string for invalid JSON', () => {
|
||||
const resolver = new BlockResolver(createTestWorkflow())
|
||||
expect(resolver.tryParseJSON('{invalid json}')).toBe('{invalid json}')
|
||||
expect(resolver.tryParseJSON('[1, 2,')).toBe('[1, 2,')
|
||||
})
|
||||
|
||||
it.concurrent('should handle whitespace around JSON', () => {
|
||||
const resolver = new BlockResolver(createTestWorkflow())
|
||||
expect(resolver.tryParseJSON(' {"key": "value"} ')).toEqual({ key: 'value' })
|
||||
expect(resolver.tryParseJSON('\n[1, 2]\n')).toEqual([1, 2])
|
||||
})
|
||||
})
|
||||
|
||||
describe('Response block backwards compatibility', () => {
|
||||
it.concurrent('should resolve new format: <responseBlock.data>', () => {
|
||||
const workflow = createTestWorkflow([
|
||||
|
||||
@@ -1,15 +1,10 @@
|
||||
import { getBlockOutputs } from '@/lib/workflows/blocks/block-outputs'
|
||||
import { USER_FILE_ACCESSIBLE_PROPERTIES } from '@/lib/workflows/types'
|
||||
import {
|
||||
isReference,
|
||||
normalizeName,
|
||||
parseReferencePath,
|
||||
SPECIAL_REFERENCE_PREFIXES,
|
||||
} from '@/executor/constants'
|
||||
import {
|
||||
InvalidFieldError,
|
||||
type OutputSchema,
|
||||
resolveBlockReference,
|
||||
} from '@/executor/utils/block-reference'
|
||||
import {
|
||||
navigatePath,
|
||||
type ResolutionContext,
|
||||
@@ -18,6 +13,123 @@ import {
|
||||
import type { SerializedBlock, SerializedWorkflow } from '@/serializer/types'
|
||||
import { getTool } from '@/tools/utils'
|
||||
|
||||
function isPathInOutputSchema(
|
||||
outputs: Record<string, any> | undefined,
|
||||
pathParts: string[]
|
||||
): boolean {
|
||||
if (!outputs || pathParts.length === 0) {
|
||||
return true
|
||||
}
|
||||
|
||||
const isFileArrayType = (value: any): boolean =>
|
||||
value?.type === 'file[]' || value?.type === 'files'
|
||||
|
||||
let current: any = outputs
|
||||
for (let i = 0; i < pathParts.length; i++) {
|
||||
const part = pathParts[i]
|
||||
|
||||
const arrayMatch = part.match(/^([^[]+)\[(\d+)\]$/)
|
||||
if (arrayMatch) {
|
||||
const [, prop] = arrayMatch
|
||||
let fieldDef: any
|
||||
|
||||
if (prop in current) {
|
||||
fieldDef = current[prop]
|
||||
} else if (current.properties && prop in current.properties) {
|
||||
fieldDef = current.properties[prop]
|
||||
} else if (current.type === 'array' && current.items) {
|
||||
if (current.items.properties && prop in current.items.properties) {
|
||||
fieldDef = current.items.properties[prop]
|
||||
} else if (prop in current.items) {
|
||||
fieldDef = current.items[prop]
|
||||
}
|
||||
}
|
||||
|
||||
if (!fieldDef) {
|
||||
return false
|
||||
}
|
||||
|
||||
if (isFileArrayType(fieldDef)) {
|
||||
if (i + 1 < pathParts.length) {
|
||||
return USER_FILE_ACCESSIBLE_PROPERTIES.includes(pathParts[i + 1] as any)
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
if (fieldDef.type === 'array' && fieldDef.items) {
|
||||
current = fieldDef.items
|
||||
continue
|
||||
}
|
||||
|
||||
current = fieldDef
|
||||
continue
|
||||
}
|
||||
|
||||
if (/^\d+$/.test(part)) {
|
||||
if (isFileArrayType(current)) {
|
||||
if (i + 1 < pathParts.length) {
|
||||
const nextPart = pathParts[i + 1]
|
||||
return USER_FILE_ACCESSIBLE_PROPERTIES.includes(nextPart as any)
|
||||
}
|
||||
return true
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
if (current === null || current === undefined) {
|
||||
return false
|
||||
}
|
||||
|
||||
if (part in current) {
|
||||
const nextCurrent = current[part]
|
||||
if (nextCurrent?.type === 'file[]' && i + 1 < pathParts.length) {
|
||||
const nextPart = pathParts[i + 1]
|
||||
if (/^\d+$/.test(nextPart) && i + 2 < pathParts.length) {
|
||||
const propertyPart = pathParts[i + 2]
|
||||
return USER_FILE_ACCESSIBLE_PROPERTIES.includes(propertyPart as any)
|
||||
}
|
||||
}
|
||||
current = nextCurrent
|
||||
continue
|
||||
}
|
||||
|
||||
if (current.properties && part in current.properties) {
|
||||
current = current.properties[part]
|
||||
continue
|
||||
}
|
||||
|
||||
if (current.type === 'array' && current.items) {
|
||||
if (current.items.properties && part in current.items.properties) {
|
||||
current = current.items.properties[part]
|
||||
continue
|
||||
}
|
||||
if (part in current.items) {
|
||||
current = current.items[part]
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
if (isFileArrayType(current) && USER_FILE_ACCESSIBLE_PROPERTIES.includes(part as any)) {
|
||||
return true
|
||||
}
|
||||
|
||||
if ('type' in current && typeof current.type === 'string') {
|
||||
if (!current.properties && !current.items) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
function getSchemaFieldNames(outputs: Record<string, any> | undefined): string[] {
|
||||
if (!outputs) return []
|
||||
return Object.keys(outputs)
|
||||
}
|
||||
|
||||
export class BlockResolver implements Resolver {
|
||||
private nameToBlockId: Map<string, string>
|
||||
private blockById: Map<string, SerializedBlock>
|
||||
@@ -57,94 +169,77 @@ export class BlockResolver implements Resolver {
|
||||
return undefined
|
||||
}
|
||||
|
||||
const block = this.blockById.get(blockId)!
|
||||
const block = this.blockById.get(blockId)
|
||||
const output = this.getBlockOutput(blockId, context)
|
||||
|
||||
const blockData: Record<string, unknown> = {}
|
||||
const blockOutputSchemas: Record<string, OutputSchema> = {}
|
||||
|
||||
if (output !== undefined) {
|
||||
blockData[blockId] = output
|
||||
}
|
||||
|
||||
const blockType = block.metadata?.id
|
||||
const params = block.config?.params as Record<string, unknown> | undefined
|
||||
const subBlocks = params
|
||||
? Object.fromEntries(Object.entries(params).map(([k, v]) => [k, { value: v }]))
|
||||
: undefined
|
||||
const toolId = block.config?.tool
|
||||
const toolConfig = toolId ? getTool(toolId) : undefined
|
||||
const outputSchema =
|
||||
toolConfig?.outputs ?? (blockType ? getBlockOutputs(blockType, subBlocks) : block.outputs)
|
||||
|
||||
if (outputSchema && Object.keys(outputSchema).length > 0) {
|
||||
blockOutputSchemas[blockId] = outputSchema
|
||||
}
|
||||
|
||||
try {
|
||||
const result = resolveBlockReference(blockName, pathParts, {
|
||||
blockNameMapping: Object.fromEntries(this.nameToBlockId),
|
||||
blockData,
|
||||
blockOutputSchemas,
|
||||
})!
|
||||
|
||||
if (result.value !== undefined) {
|
||||
return result.value
|
||||
}
|
||||
|
||||
return this.handleBackwardsCompat(block, output, pathParts)
|
||||
} catch (error) {
|
||||
if (error instanceof InvalidFieldError) {
|
||||
const fallback = this.handleBackwardsCompat(block, output, pathParts)
|
||||
if (fallback !== undefined) {
|
||||
return fallback
|
||||
}
|
||||
throw new Error(error.message)
|
||||
}
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
private handleBackwardsCompat(
|
||||
block: SerializedBlock,
|
||||
output: unknown,
|
||||
pathParts: string[]
|
||||
): unknown {
|
||||
if (output === undefined || pathParts.length === 0) {
|
||||
if (output === undefined) {
|
||||
return undefined
|
||||
}
|
||||
if (pathParts.length === 0) {
|
||||
return output
|
||||
}
|
||||
|
||||
// Try the original path first
|
||||
let result = navigatePath(output, pathParts)
|
||||
|
||||
// If successful, return it immediately
|
||||
if (result !== undefined) {
|
||||
return result
|
||||
}
|
||||
|
||||
// Response block backwards compatibility:
|
||||
// Old: <responseBlock.response.data> -> New: <responseBlock.data>
|
||||
// Only apply fallback if:
|
||||
// 1. Block type is 'response'
|
||||
// 2. Path starts with 'response.'
|
||||
// 3. Output doesn't have a 'response' key (confirming it's the new format)
|
||||
if (
|
||||
block.metadata?.id === 'response' &&
|
||||
block?.metadata?.id === 'response' &&
|
||||
pathParts[0] === 'response' &&
|
||||
(output as Record<string, unknown>)?.response === undefined
|
||||
output?.response === undefined
|
||||
) {
|
||||
const adjustedPathParts = pathParts.slice(1)
|
||||
if (adjustedPathParts.length === 0) {
|
||||
return output
|
||||
}
|
||||
const fallbackResult = navigatePath(output, adjustedPathParts)
|
||||
if (fallbackResult !== undefined) {
|
||||
return fallbackResult
|
||||
result = navigatePath(output, adjustedPathParts)
|
||||
if (result !== undefined) {
|
||||
return result
|
||||
}
|
||||
}
|
||||
|
||||
// Workflow block backwards compatibility:
|
||||
// Old: <workflowBlock.result.response.data> -> New: <workflowBlock.result.data>
|
||||
// Only apply fallback if:
|
||||
// 1. Block type is 'workflow' or 'workflow_input'
|
||||
// 2. Path starts with 'result.response.'
|
||||
// 3. output.result.response doesn't exist (confirming child used new format)
|
||||
const isWorkflowBlock =
|
||||
block.metadata?.id === 'workflow' || block.metadata?.id === 'workflow_input'
|
||||
const outputRecord = output as Record<string, Record<string, unknown> | undefined>
|
||||
block?.metadata?.id === 'workflow' || block?.metadata?.id === 'workflow_input'
|
||||
if (
|
||||
isWorkflowBlock &&
|
||||
pathParts[0] === 'result' &&
|
||||
pathParts[1] === 'response' &&
|
||||
outputRecord?.result?.response === undefined
|
||||
output?.result?.response === undefined
|
||||
) {
|
||||
const adjustedPathParts = ['result', ...pathParts.slice(2)]
|
||||
const fallbackResult = navigatePath(output, adjustedPathParts)
|
||||
if (fallbackResult !== undefined) {
|
||||
return fallbackResult
|
||||
result = navigatePath(output, adjustedPathParts)
|
||||
if (result !== undefined) {
|
||||
return result
|
||||
}
|
||||
}
|
||||
|
||||
const toolId = block?.config?.tool
|
||||
const toolConfig = toolId ? getTool(toolId) : undefined
|
||||
const outputSchema = toolConfig?.outputs ?? block?.outputs
|
||||
const schemaFields = getSchemaFieldNames(outputSchema)
|
||||
if (schemaFields.length > 0 && !isPathInOutputSchema(outputSchema, pathParts)) {
|
||||
throw new Error(
|
||||
`"${pathParts.join('.')}" doesn't exist on block "${blockName}". ` +
|
||||
`Available fields: ${schemaFields.join(', ')}`
|
||||
)
|
||||
}
|
||||
|
||||
return undefined
|
||||
}
|
||||
|
||||
@@ -241,4 +336,21 @@ export class BlockResolver implements Resolver {
|
||||
}
|
||||
return String(value)
|
||||
}
|
||||
|
||||
tryParseJSON(value: any): any {
|
||||
if (typeof value !== 'string') {
|
||||
return value
|
||||
}
|
||||
|
||||
const trimmed = value.trim()
|
||||
if (trimmed.length > 0 && (trimmed.startsWith('{') || trimmed.startsWith('['))) {
|
||||
try {
|
||||
return JSON.parse(trimmed)
|
||||
} catch {
|
||||
return value
|
||||
}
|
||||
}
|
||||
|
||||
return value
|
||||
}
|
||||
}
|
||||
|
||||
@@ -27,28 +27,23 @@ export function navigatePath(obj: any, path: string[]): any {
|
||||
return undefined
|
||||
}
|
||||
|
||||
const arrayMatch = part.match(/^([^[]+)(\[.+)$/)
|
||||
// Handle array indexing like "items[0]" or just numeric indices
|
||||
const arrayMatch = part.match(/^([^[]+)\[(\d+)\](.*)$/)
|
||||
if (arrayMatch) {
|
||||
const [, prop, bracketsPart] = arrayMatch
|
||||
// Handle complex array access like "items[0]"
|
||||
const [, prop, index] = arrayMatch
|
||||
current = current[prop]
|
||||
if (current === undefined || current === null) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
const indices = bracketsPart.match(/\[(\d+)\]/g)
|
||||
if (indices) {
|
||||
for (const indexMatch of indices) {
|
||||
if (current === null || current === undefined) {
|
||||
return undefined
|
||||
}
|
||||
const idx = Number.parseInt(indexMatch.slice(1, -1), 10)
|
||||
current = Array.isArray(current) ? current[idx] : undefined
|
||||
}
|
||||
}
|
||||
const idx = Number.parseInt(index, 10)
|
||||
current = Array.isArray(current) ? current[idx] : undefined
|
||||
} else if (/^\d+$/.test(part)) {
|
||||
// Handle plain numeric index
|
||||
const index = Number.parseInt(part, 10)
|
||||
current = Array.isArray(current) ? current[index] : undefined
|
||||
} else {
|
||||
// Handle regular property access
|
||||
current = current[part]
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
'use client'
|
||||
|
||||
import { useCallback } from 'react'
|
||||
import { useQueryClient } from '@tanstack/react-query'
|
||||
import { useCallback, useEffect, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type { AllTagSlot } from '@/lib/knowledge/constants'
|
||||
import { knowledgeKeys, useTagDefinitionsQuery } from '@/hooks/queries/knowledge'
|
||||
|
||||
const logger = createLogger('useKnowledgeBaseTagDefinitions')
|
||||
|
||||
export interface TagDefinition {
|
||||
id: string
|
||||
@@ -16,23 +17,54 @@ export interface TagDefinition {
|
||||
|
||||
/**
|
||||
* Hook for fetching KB-scoped tag definitions (for filtering/selection)
|
||||
* Uses React Query as single source of truth
|
||||
* @param knowledgeBaseId - The knowledge base ID
|
||||
*/
|
||||
export function useKnowledgeBaseTagDefinitions(knowledgeBaseId: string | null) {
|
||||
const queryClient = useQueryClient()
|
||||
const query = useTagDefinitionsQuery(knowledgeBaseId)
|
||||
const [tagDefinitions, setTagDefinitions] = useState<TagDefinition[]>([])
|
||||
const [isLoading, setIsLoading] = useState(false)
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
|
||||
const fetchTagDefinitions = useCallback(async () => {
|
||||
if (!knowledgeBaseId) return
|
||||
await queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.tagDefinitions(knowledgeBaseId),
|
||||
})
|
||||
}, [queryClient, knowledgeBaseId])
|
||||
if (!knowledgeBaseId) {
|
||||
setTagDefinitions([])
|
||||
return
|
||||
}
|
||||
|
||||
setIsLoading(true)
|
||||
setError(null)
|
||||
|
||||
try {
|
||||
const response = await fetch(`/api/knowledge/${knowledgeBaseId}/tag-definitions`)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to fetch tag definitions: ${response.statusText}`)
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
if (data.success && Array.isArray(data.data)) {
|
||||
setTagDefinitions(data.data)
|
||||
} else {
|
||||
throw new Error('Invalid response format')
|
||||
}
|
||||
} catch (err) {
|
||||
const errorMessage = err instanceof Error ? err.message : 'Unknown error occurred'
|
||||
logger.error('Error fetching tag definitions:', err)
|
||||
setError(errorMessage)
|
||||
setTagDefinitions([])
|
||||
} finally {
|
||||
setIsLoading(false)
|
||||
}
|
||||
}, [knowledgeBaseId])
|
||||
|
||||
useEffect(() => {
|
||||
fetchTagDefinitions()
|
||||
}, [fetchTagDefinitions])
|
||||
|
||||
return {
|
||||
tagDefinitions: (query.data ?? []) as TagDefinition[],
|
||||
isLoading: query.isLoading,
|
||||
error: query.error instanceof Error ? query.error.message : null,
|
||||
tagDefinitions,
|
||||
isLoading,
|
||||
error,
|
||||
fetchTagDefinitions,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { useCallback, useMemo } from 'react'
|
||||
import { useCallback } from 'react'
|
||||
import { useQueryClient } from '@tanstack/react-query'
|
||||
import type { ChunkData, DocumentData, KnowledgeBaseData } from '@/lib/knowledge/types'
|
||||
import {
|
||||
@@ -67,17 +67,12 @@ export function useKnowledgeBaseDocuments(
|
||||
sortBy?: string
|
||||
sortOrder?: string
|
||||
enabled?: boolean
|
||||
refetchInterval?:
|
||||
| number
|
||||
| false
|
||||
| ((data: KnowledgeDocumentsResponse | undefined) => number | false)
|
||||
enabledFilter?: 'all' | 'enabled' | 'disabled'
|
||||
refetchInterval?: number | false
|
||||
}
|
||||
) {
|
||||
const queryClient = useQueryClient()
|
||||
const requestLimit = options?.limit ?? DEFAULT_PAGE_SIZE
|
||||
const requestOffset = options?.offset ?? 0
|
||||
const enabledFilter = options?.enabledFilter ?? 'all'
|
||||
const paramsKey = serializeDocumentParams({
|
||||
knowledgeBaseId,
|
||||
limit: requestLimit,
|
||||
@@ -85,19 +80,8 @@ export function useKnowledgeBaseDocuments(
|
||||
search: options?.search,
|
||||
sortBy: options?.sortBy,
|
||||
sortOrder: options?.sortOrder,
|
||||
enabledFilter,
|
||||
})
|
||||
|
||||
const refetchIntervalFn = useMemo(() => {
|
||||
if (typeof options?.refetchInterval === 'function') {
|
||||
const userFn = options.refetchInterval
|
||||
return (query: { state: { data?: KnowledgeDocumentsResponse } }) => {
|
||||
return userFn(query.state.data)
|
||||
}
|
||||
}
|
||||
return options?.refetchInterval
|
||||
}, [options?.refetchInterval])
|
||||
|
||||
const query = useKnowledgeDocumentsQuery(
|
||||
{
|
||||
knowledgeBaseId,
|
||||
@@ -106,11 +90,10 @@ export function useKnowledgeBaseDocuments(
|
||||
search: options?.search,
|
||||
sortBy: options?.sortBy,
|
||||
sortOrder: options?.sortOrder,
|
||||
enabledFilter,
|
||||
},
|
||||
{
|
||||
enabled: (options?.enabled ?? true) && Boolean(knowledgeBaseId),
|
||||
refetchInterval: refetchIntervalFn,
|
||||
refetchInterval: options?.refetchInterval,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -122,14 +105,6 @@ export function useKnowledgeBaseDocuments(
|
||||
hasMore: false,
|
||||
}
|
||||
|
||||
const hasProcessingDocs = useMemo(
|
||||
() =>
|
||||
documents.some(
|
||||
(doc) => doc.processingStatus === 'pending' || doc.processingStatus === 'processing'
|
||||
),
|
||||
[documents]
|
||||
)
|
||||
|
||||
const refreshDocuments = useCallback(async () => {
|
||||
await queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.documents(knowledgeBaseId, paramsKey),
|
||||
@@ -161,7 +136,6 @@ export function useKnowledgeBaseDocuments(
|
||||
isFetching: query.isFetching,
|
||||
isPlaceholderData: query.isPlaceholderData,
|
||||
error: query.error instanceof Error ? query.error.message : null,
|
||||
hasProcessingDocuments: hasProcessingDocs,
|
||||
refreshDocuments,
|
||||
updateDocument,
|
||||
}
|
||||
@@ -259,8 +233,8 @@ export function useDocumentChunks(
|
||||
const hasPrevPage = currentPage > 1
|
||||
|
||||
const goToPage = useCallback(
|
||||
(newPage: number): boolean => {
|
||||
return newPage >= 1 && newPage <= totalPages
|
||||
async (newPage: number) => {
|
||||
if (newPage < 1 || newPage > totalPages) return
|
||||
},
|
||||
[totalPages]
|
||||
)
|
||||
|
||||
@@ -1,15 +1,10 @@
|
||||
'use client'
|
||||
|
||||
import { useCallback } from 'react'
|
||||
import { useQueryClient } from '@tanstack/react-query'
|
||||
import { useCallback, useEffect, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type { AllTagSlot } from '@/lib/knowledge/constants'
|
||||
import {
|
||||
type DocumentTagDefinitionInput,
|
||||
knowledgeKeys,
|
||||
useDeleteDocumentTagDefinitions,
|
||||
useDocumentTagDefinitionsQuery,
|
||||
useSaveDocumentTagDefinitions,
|
||||
} from '@/hooks/queries/knowledge'
|
||||
|
||||
const logger = createLogger('useTagDefinitions')
|
||||
|
||||
export interface TagDefinition {
|
||||
id: string
|
||||
@@ -24,30 +19,57 @@ export interface TagDefinitionInput {
|
||||
tagSlot: AllTagSlot
|
||||
displayName: string
|
||||
fieldType: string
|
||||
// Optional: for editing existing definitions
|
||||
_originalDisplayName?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook for managing document-scoped tag definitions
|
||||
* Uses React Query as single source of truth
|
||||
* Hook for managing KB-scoped tag definitions
|
||||
* @param knowledgeBaseId - The knowledge base ID
|
||||
* @param documentId - The document ID (required for API calls)
|
||||
*/
|
||||
export function useTagDefinitions(
|
||||
knowledgeBaseId: string | null,
|
||||
documentId: string | null = null
|
||||
) {
|
||||
const queryClient = useQueryClient()
|
||||
const query = useDocumentTagDefinitionsQuery(knowledgeBaseId, documentId)
|
||||
const { mutateAsync: saveTagDefinitionsMutation } = useSaveDocumentTagDefinitions()
|
||||
const { mutateAsync: deleteTagDefinitionsMutation } = useDeleteDocumentTagDefinitions()
|
||||
|
||||
const tagDefinitions = (query.data ?? []) as TagDefinition[]
|
||||
const [tagDefinitions, setTagDefinitions] = useState<TagDefinition[]>([])
|
||||
const [isLoading, setIsLoading] = useState(false)
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
|
||||
const fetchTagDefinitions = useCallback(async () => {
|
||||
if (!knowledgeBaseId || !documentId) return
|
||||
await queryClient.invalidateQueries({
|
||||
queryKey: knowledgeKeys.documentTagDefinitions(knowledgeBaseId, documentId),
|
||||
})
|
||||
}, [queryClient, knowledgeBaseId, documentId])
|
||||
if (!knowledgeBaseId || !documentId) {
|
||||
setTagDefinitions([])
|
||||
return
|
||||
}
|
||||
|
||||
setIsLoading(true)
|
||||
setError(null)
|
||||
|
||||
try {
|
||||
const response = await fetch(
|
||||
`/api/knowledge/${knowledgeBaseId}/documents/${documentId}/tag-definitions`
|
||||
)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to fetch tag definitions: ${response.statusText}`)
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
if (data.success && Array.isArray(data.data)) {
|
||||
setTagDefinitions(data.data)
|
||||
} else {
|
||||
throw new Error('Invalid response format')
|
||||
}
|
||||
} catch (err) {
|
||||
const errorMessage = err instanceof Error ? err.message : 'Unknown error occurred'
|
||||
logger.error('Error fetching tag definitions:', err)
|
||||
setError(errorMessage)
|
||||
setTagDefinitions([])
|
||||
} finally {
|
||||
setIsLoading(false)
|
||||
}
|
||||
}, [knowledgeBaseId, documentId])
|
||||
|
||||
const saveTagDefinitions = useCallback(
|
||||
async (definitions: TagDefinitionInput[]) => {
|
||||
@@ -55,13 +77,43 @@ export function useTagDefinitions(
|
||||
throw new Error('Knowledge base ID and document ID are required')
|
||||
}
|
||||
|
||||
return saveTagDefinitionsMutation({
|
||||
knowledgeBaseId,
|
||||
documentId,
|
||||
definitions: definitions as DocumentTagDefinitionInput[],
|
||||
})
|
||||
// Simple validation
|
||||
const validDefinitions = (definitions || []).filter(
|
||||
(def) => def?.tagSlot && def.displayName && def.displayName.trim()
|
||||
)
|
||||
|
||||
try {
|
||||
const response = await fetch(
|
||||
`/api/knowledge/${knowledgeBaseId}/documents/${documentId}/tag-definitions`,
|
||||
{
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({ definitions: validDefinitions }),
|
||||
}
|
||||
)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to save tag definitions: ${response.statusText}`)
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
if (!data.success) {
|
||||
throw new Error(data.error || 'Failed to save tag definitions')
|
||||
}
|
||||
|
||||
// Refresh the definitions after saving
|
||||
await fetchTagDefinitions()
|
||||
|
||||
return data.data
|
||||
} catch (err) {
|
||||
logger.error('Error saving tag definitions:', err)
|
||||
throw err
|
||||
}
|
||||
},
|
||||
[knowledgeBaseId, documentId, saveTagDefinitionsMutation]
|
||||
[knowledgeBaseId, documentId, fetchTagDefinitions]
|
||||
)
|
||||
|
||||
const deleteTagDefinitions = useCallback(async () => {
|
||||
@@ -69,11 +121,25 @@ export function useTagDefinitions(
|
||||
throw new Error('Knowledge base ID and document ID are required')
|
||||
}
|
||||
|
||||
return deleteTagDefinitionsMutation({
|
||||
knowledgeBaseId,
|
||||
documentId,
|
||||
})
|
||||
}, [knowledgeBaseId, documentId, deleteTagDefinitionsMutation])
|
||||
try {
|
||||
const response = await fetch(
|
||||
`/api/knowledge/${knowledgeBaseId}/documents/${documentId}/tag-definitions`,
|
||||
{
|
||||
method: 'DELETE',
|
||||
}
|
||||
)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to delete tag definitions: ${response.statusText}`)
|
||||
}
|
||||
|
||||
// Refresh the definitions after deleting
|
||||
await fetchTagDefinitions()
|
||||
} catch (err) {
|
||||
logger.error('Error deleting tag definitions:', err)
|
||||
throw err
|
||||
}
|
||||
}, [knowledgeBaseId, documentId, fetchTagDefinitions])
|
||||
|
||||
const getTagLabel = useCallback(
|
||||
(tagSlot: string): string => {
|
||||
@@ -90,10 +156,15 @@ export function useTagDefinitions(
|
||||
[tagDefinitions]
|
||||
)
|
||||
|
||||
// Auto-fetch on mount and when dependencies change
|
||||
useEffect(() => {
|
||||
fetchTagDefinitions()
|
||||
}, [fetchTagDefinitions])
|
||||
|
||||
return {
|
||||
tagDefinitions,
|
||||
isLoading: query.isLoading,
|
||||
error: query.error instanceof Error ? query.error.message : null,
|
||||
isLoading,
|
||||
error,
|
||||
fetchTagDefinitions,
|
||||
saveTagDefinitions,
|
||||
deleteTagDefinitions,
|
||||
|
||||
@@ -1,34 +1,11 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { keepPreviousData, useMutation, useQuery, useQueryClient } from '@tanstack/react-query'
|
||||
import { getQueryClient } from '@/app/_shell/providers/query-provider'
|
||||
import type { CustomToolDefinition, CustomToolSchema } from '@/stores/custom-tools'
|
||||
import { useCustomToolsStore } from '@/stores/custom-tools'
|
||||
|
||||
const logger = createLogger('CustomToolsQueries')
|
||||
const API_ENDPOINT = '/api/tools/custom'
|
||||
|
||||
export interface CustomToolSchema {
|
||||
type: string
|
||||
function: {
|
||||
name: string
|
||||
description?: string
|
||||
parameters: {
|
||||
type: string
|
||||
properties: Record<string, unknown>
|
||||
required?: string[]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export interface CustomToolDefinition {
|
||||
id: string
|
||||
workspaceId: string | null
|
||||
userId: string | null
|
||||
title: string
|
||||
schema: CustomToolSchema
|
||||
code: string
|
||||
createdAt: string
|
||||
updatedAt?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Query key factories for custom tools queries
|
||||
*/
|
||||
@@ -87,39 +64,8 @@ function normalizeCustomTool(tool: ApiCustomTool, workspaceId: string): CustomTo
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract workspaceId from the current URL path
|
||||
* Expected format: /workspace/{workspaceId}/...
|
||||
*/
|
||||
function getWorkspaceIdFromUrl(): string | null {
|
||||
if (typeof window === 'undefined') return null
|
||||
const match = window.location.pathname.match(/^\/workspace\/([^/]+)/)
|
||||
return match?.[1] ?? null
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all custom tools from the query cache (for non-React code)
|
||||
* If workspaceId is not provided, extracts it from the current URL
|
||||
*/
|
||||
export function getCustomTools(workspaceId?: string): CustomToolDefinition[] {
|
||||
if (typeof window === 'undefined') return []
|
||||
const wsId = workspaceId ?? getWorkspaceIdFromUrl()
|
||||
if (!wsId) return []
|
||||
const queryClient = getQueryClient()
|
||||
return queryClient.getQueryData<CustomToolDefinition[]>(customToolsKeys.list(wsId)) ?? []
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a specific custom tool from the query cache by ID or title (for non-React code)
|
||||
* Custom tools are referenced by title in the system (custom_${title}), so title lookup is required.
|
||||
* If workspaceId is not provided, extracts it from the current URL
|
||||
*/
|
||||
export function getCustomTool(
|
||||
identifier: string,
|
||||
workspaceId?: string
|
||||
): CustomToolDefinition | undefined {
|
||||
const tools = getCustomTools(workspaceId)
|
||||
return tools.find((tool) => tool.id === identifier || tool.title === identifier)
|
||||
function syncCustomToolsToStore(tools: CustomToolDefinition[]) {
|
||||
useCustomToolsStore.getState().setTools(tools)
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -188,13 +134,19 @@ async function fetchCustomTools(workspaceId: string): Promise<CustomToolDefiniti
|
||||
* Hook to fetch custom tools
|
||||
*/
|
||||
export function useCustomTools(workspaceId: string) {
|
||||
return useQuery<CustomToolDefinition[]>({
|
||||
const query = useQuery<CustomToolDefinition[]>({
|
||||
queryKey: customToolsKeys.list(workspaceId),
|
||||
queryFn: () => fetchCustomTools(workspaceId),
|
||||
enabled: !!workspaceId,
|
||||
staleTime: 60 * 1000, // 1 minute - tools don't change frequently
|
||||
placeholderData: keepPreviousData,
|
||||
})
|
||||
|
||||
if (query.data) {
|
||||
syncCustomToolsToStore(query.data)
|
||||
}
|
||||
|
||||
return query
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user