Compare commits

..

1 Commits

212 changed files with 7120 additions and 30031 deletions

View File

@@ -4678,349 +4678,3 @@ export function BedrockIcon(props: SVGProps<SVGSVGElement>) {
</svg>
)
}
export function ReductoIcon(props: SVGProps<SVGSVGElement>) {
return (
<svg
{...props}
width='400'
height='400'
viewBox='50 40 300 320'
fill='none'
xmlns='http://www.w3.org/2000/svg'
>
<path
fillRule='evenodd'
clipRule='evenodd'
d='M85.3434 70.7805H314.657V240.307L226.44 329.219H85.3434V70.7805ZM107.796 93.2319H292.205V204.487H206.493V306.767H107.801L107.796 93.2319Z'
fill='#FFFFFF'
/>
</svg>
)
}
export function PulseIcon(props: SVGProps<SVGSVGElement>) {
return (
<svg
{...props}
width='24'
height='24'
viewBox='0 6 24 24'
fill='none'
xmlns='http://www.w3.org/2000/svg'
>
<path
d='M0 6.63667C0 6.28505 0.284685 6 0.635863 6H1.54133C1.89251 6 2.17719 6.28505 2.17719 6.63667V7.54329C2.17719 7.89492 1.89251 8.17997 1.54133 8.17997H0.635863C0.284686 8.17997 0 7.89492 0 7.54329V6.63667Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M3.11318 6.63667C3.11318 6.28505 3.39787 6 3.74905 6H4.65452C5.00569 6 5.29038 6.28505 5.29038 6.63667V7.54329C5.29038 7.89492 5.00569 8.17997 4.65452 8.17997H3.74905C3.39787 8.17997 3.11318 7.89492 3.11318 7.54329V6.63667Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M6.22637 6.63667C6.22637 6.28505 6.51105 6 6.86223 6H7.7677C8.11888 6 8.40356 6.28505 8.40356 6.63667V7.54329C8.40356 7.89492 8.11888 8.17997 7.7677 8.17997H6.86223C6.51105 8.17997 6.22637 7.89492 6.22637 7.54329V6.63667Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M9.33955 6.63667C9.33955 6.28505 9.62424 6 9.97541 6H10.8809C11.2321 6 11.5167 6.28505 11.5167 6.63667V7.54329C11.5167 7.89492 11.2321 8.17997 10.8809 8.17997H9.97541C9.62424 8.17997 9.33955 7.89492 9.33955 7.54329V6.63667Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M12.4527 6.63667C12.4527 6.28505 12.7374 6 13.0886 6H13.9941C14.3452 6 14.6299 6.28505 14.6299 6.63667V7.54329C14.6299 7.89492 14.3452 8.17997 13.9941 8.17997H13.0886C12.7374 8.17997 12.4527 7.89492 12.4527 7.54329V6.63667Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M15.5659 6.63667C15.5659 6.28505 15.8506 6 16.2018 6H17.1073C17.4584 6 17.7431 6.28505 17.7431 6.63667V7.54329C17.7431 7.89492 17.4584 8.17997 17.1073 8.17997H16.2018C15.8506 8.17997 15.5659 7.89492 15.5659 7.54329V6.63667Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M18.6791 6.63667C18.6791 6.28505 18.9638 6 19.315 6H20.2204C20.5716 6 20.8563 6.28505 20.8563 6.63667V7.54329C20.8563 7.89492 20.5716 8.17997 20.2204 8.17997H19.315C18.9638 8.17997 18.6791 7.89492 18.6791 7.54329V6.63667Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M21.7923 6.63667C21.7923 6.28505 22.077 6 22.4282 6H23.3336C23.6848 6 23.9695 6.28505 23.9695 6.63667V7.54329C23.9695 7.89492 23.6848 8.17997 23.3336 8.17997H22.4282C22.077 8.17997 21.7923 7.89492 21.7923 7.54329V6.63667Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M0 9.75382C0 9.4022 0.284685 9.11715 0.635863 9.11715H1.54133C1.89251 9.11715 2.17719 9.4022 2.17719 9.75382V10.6604C2.17719 11.0121 1.89251 11.2971 1.54133 11.2971H0.635863C0.284686 11.2971 0 11.0121 0 10.6604V9.75382Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M3.11318 9.75382C3.11318 9.4022 3.39787 9.11715 3.74905 9.11715H4.65452C5.00569 9.11715 5.29038 9.4022 5.29038 9.75382V10.6604C5.29038 11.0121 5.00569 11.2971 4.65452 11.2971H3.74905C3.39787 11.2971 3.11318 11.0121 3.11318 10.6604V9.75382Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M6.22637 9.75382C6.22637 9.4022 6.51105 9.11715 6.86223 9.11715H7.7677C8.11888 9.11715 8.40356 9.4022 8.40356 9.75382V10.6604C8.40356 11.0121 8.11888 11.2971 7.7677 11.2971H6.86223C6.51105 11.2971 6.22637 11.0121 6.22637 10.6604V9.75382Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M9.33955 9.75382C9.33955 9.4022 9.62424 9.11715 9.97541 9.11715H10.8809C11.2321 9.11715 11.5167 9.4022 11.5167 9.75382V10.6604C11.5167 11.0121 11.2321 11.2971 10.8809 11.2971H9.97541C9.62424 11.2971 9.33955 11.0121 9.33955 10.6604V9.75382Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M12.4527 9.75382C12.4527 9.4022 12.7374 9.11715 13.0886 9.11715H13.9941C14.3452 9.11715 14.6299 9.4022 14.6299 9.75382V10.6604C14.6299 11.0121 14.3452 11.2971 13.9941 11.2971H13.0886C12.7374 11.2971 12.4527 11.0121 12.4527 10.6604V9.75382Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M15.5659 9.75382C15.5659 9.4022 15.8506 9.11715 16.2018 9.11715H17.1073C17.4584 9.11715 17.7431 9.4022 17.7431 9.75382V10.6604C17.7431 11.0121 17.4584 11.2971 17.1073 11.2971H16.2018C15.8506 11.2971 15.5659 11.0121 15.5659 10.6604V9.75382Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M18.6791 9.75382C18.6791 9.4022 18.9638 9.11715 19.315 9.11715H20.2204C20.5716 9.11715 20.8563 9.4022 20.8563 9.75382V10.6604C20.8563 11.0121 20.5716 11.2971 20.2204 11.2971H19.315C18.9638 11.2971 18.6791 11.0121 18.6791 10.6604V9.75382Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M21.7923 9.75382C21.7923 9.4022 22.077 9.11715 22.4282 9.11715H23.3336C23.6848 9.11715 23.9695 9.4022 23.9695 9.75382V10.6604C23.9695 11.0121 23.6848 11.2971 23.3336 11.2971H22.4282C22.077 11.2971 21.7923 11.0121 21.7923 10.6604V9.75382Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M0 12.871C0 12.5193 0.284685 12.2343 0.635863 12.2343H1.54133C1.89251 12.2343 2.17719 12.5193 2.17719 12.871V13.7776C2.17719 14.1292 1.89251 14.4143 1.54133 14.4143H0.635863C0.284686 14.4143 0 14.1292 0 13.7776V12.871Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M3.11318 12.871C3.11318 12.5193 3.39787 12.2343 3.74905 12.2343H4.65452C5.00569 12.2343 5.29038 12.5193 5.29038 12.871V13.7776C5.29038 14.1292 5.00569 14.4143 4.65452 14.4143H3.74905C3.39787 14.4143 3.11318 14.1292 3.11318 13.7776V12.871Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M6.22637 12.871C6.22637 12.5193 6.51105 12.2343 6.86223 12.2343H7.7677C8.11888 12.2343 8.40356 12.5193 8.40356 12.871V13.7776C8.40356 14.1292 8.11888 14.4143 7.7677 14.4143H6.86223C6.51105 14.4143 6.22637 14.1292 6.22637 13.7776V12.871Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M9.33955 12.871C9.33955 12.5193 9.62424 12.2343 9.97541 12.2343H10.8809C11.2321 12.2343 11.5167 12.5193 11.5167 12.871V13.7776C11.5167 14.1292 11.2321 14.4143 10.8809 14.4143H9.97541C9.62424 14.4143 9.33955 14.1292 9.33955 13.7776V12.871Z'
fill='#0E7BC9'
/>
<path
d='M12.4527 12.871C12.4527 12.5193 12.7374 12.2343 13.0886 12.2343H13.9941C14.3452 12.2343 14.6299 12.5193 14.6299 12.871V13.7776C14.6299 14.1292 14.3452 14.4143 13.9941 14.4143H13.0886C12.7374 14.4143 12.4527 14.1292 12.4527 13.7776V12.871Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M15.5659 12.871C15.5659 12.5193 15.8506 12.2343 16.2018 12.2343H17.1073C17.4584 12.2343 17.7431 12.5193 17.7431 12.871V13.7776C17.7431 14.1292 17.4584 14.4143 17.1073 14.4143H16.2018C15.8506 14.4143 15.5659 14.1292 15.5659 13.7776V12.871Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M18.6791 12.871C18.6791 12.5193 18.9638 12.2343 19.315 12.2343H20.2204C20.5716 12.2343 20.8563 12.5193 20.8563 12.871V13.7776C20.8563 14.1292 20.5716 14.4143 20.2204 14.4143H19.315C18.9638 14.4143 18.6791 14.1292 18.6791 13.7776V12.871Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M21.7923 12.871C21.7923 12.5193 22.077 12.2343 22.4282 12.2343H23.3336C23.6848 12.2343 23.9695 12.5193 23.9695 12.871V13.7776C23.9695 14.1292 23.6848 14.4143 23.3336 14.4143H22.4282C22.077 14.4143 21.7923 14.1292 21.7923 13.7776V12.871Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M0 15.9881C0 15.6365 0.284685 15.3514 0.635863 15.3514H1.54133C1.89251 15.3514 2.17719 15.6365 2.17719 15.9881V16.8947C2.17719 17.2464 1.89251 17.5314 1.54133 17.5314H0.635863C0.284686 17.5314 0 17.2464 0 16.8947V15.9881Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M3.11318 15.9881C3.11318 15.6365 3.39787 15.3514 3.74905 15.3514H4.65452C5.00569 15.3514 5.29038 15.6365 5.29038 15.9881V16.8947C5.29038 17.2464 5.00569 17.5314 4.65452 17.5314H3.74905C3.39787 17.5314 3.11318 17.2464 3.11318 16.8947V15.9881Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M6.22637 15.9881C6.22637 15.6365 6.51105 15.3514 6.86223 15.3514H7.7677C8.11888 15.3514 8.40356 15.6365 8.40356 15.9881V16.8947C8.40356 17.2464 8.11888 17.5314 7.7677 17.5314H6.86223C6.51105 17.5314 6.22637 17.2464 6.22637 16.8947V15.9881Z'
fill='#0E7BC9'
/>
<path
d='M9.33955 15.9881C9.33955 15.6365 9.62424 15.3514 9.97541 15.3514H10.8809C11.2321 15.3514 11.5167 15.6365 11.5167 15.9881V16.8947C11.5167 17.2464 11.2321 17.5314 10.8809 17.5314H9.97541C9.62424 17.5314 9.33955 17.2464 9.33955 16.8947V15.9881Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M12.4527 15.9881C12.4527 15.6365 12.7374 15.3514 13.0886 15.3514H13.9941C14.3452 15.3514 14.6299 15.6365 14.6299 15.9881V16.8947C14.6299 17.2464 14.3452 17.5314 13.9941 17.5314H13.0886C12.7374 17.5314 12.4527 17.2464 12.4527 16.8947V15.9881Z'
fill='#0E7BC9'
/>
<path
d='M15.5659 15.9881C15.5659 15.6365 15.8506 15.3514 16.2018 15.3514H17.1073C17.4584 15.3514 17.7431 15.6365 17.7431 15.9881V16.8947C17.7431 17.2464 17.4584 17.5314 17.1073 17.5314H16.2018C15.8506 17.5314 15.5659 17.2464 15.5659 16.8947V15.9881Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M18.6791 15.9881C18.6791 15.6365 18.9638 15.3514 19.315 15.3514H20.2204C20.5716 15.3514 20.8563 15.6365 20.8563 15.9881V16.8947C20.8563 17.2464 20.5716 17.5314 20.2204 17.5314H19.315C18.9638 17.5314 18.6791 17.2464 18.6791 16.8947V15.9881Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M21.7923 15.9881C21.7923 15.6365 22.077 15.3514 22.4282 15.3514H23.3336C23.6848 15.3514 23.9695 15.6365 23.9695 15.9881V16.8947C23.9695 17.2464 23.6848 17.5314 23.3336 17.5314H22.4282C22.077 17.5314 21.7923 17.2464 21.7923 16.8947V15.9881Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M0 19.1053C0 18.7536 0.284685 18.4686 0.635863 18.4686H1.54133C1.89251 18.4686 2.17719 18.7536 2.17719 19.1053V20.0119C2.17719 20.3635 1.89251 20.6486 1.54133 20.6486H0.635863C0.284686 20.6486 0 20.3635 0 20.0119V19.1053Z'
fill='#0E7BC9'
/>
<path
d='M3.11318 19.1053C3.11318 18.7536 3.39787 18.4686 3.74905 18.4686H4.65452C5.00569 18.4686 5.29038 18.7536 5.29038 19.1053V20.0119C5.29038 20.3635 5.00569 20.6486 4.65452 20.6486H3.74905C3.39787 20.6486 3.11318 20.3635 3.11318 20.0119V19.1053Z'
fill='#0E7BC9'
/>
<path
d='M6.22637 19.1053C6.22637 18.7536 6.51105 18.4686 6.86223 18.4686H7.7677C8.11888 18.4686 8.40356 18.7536 8.40356 19.1053V20.0119C8.40356 20.3635 8.11888 20.6486 7.7677 20.6486H6.86223C6.51105 20.6486 6.22637 20.3635 6.22637 20.0119V19.1053Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M9.33955 19.1053C9.33955 18.7536 9.62424 18.4686 9.97541 18.4686H10.8809C11.2321 18.4686 11.5167 18.7536 11.5167 19.1053V20.0119C11.5167 20.3635 11.2321 20.6486 10.8809 20.6486H9.97541C9.62424 20.6486 9.33955 20.3635 9.33955 20.0119V19.1053Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M12.4527 19.1053C12.4527 18.7536 12.7374 18.4686 13.0886 18.4686H13.9941C14.3452 18.4686 14.6299 18.7536 14.6299 19.1053V20.0119C14.6299 20.3635 14.3452 20.6486 13.9941 20.6486H13.0886C12.7374 20.6486 12.4527 20.3635 12.4527 20.0119V19.1053Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M15.5659 19.1053C15.5659 18.7536 15.8506 18.4686 16.2018 18.4686H17.1073C17.4584 18.4686 17.7431 18.7536 17.7431 19.1053V20.0119C17.7431 20.3635 17.4584 20.6486 17.1073 20.6486H16.2018C15.8506 20.6486 15.5659 20.3635 15.5659 20.0119V19.1053Z'
fill='#0E7BC9'
/>
<path
d='M18.6791 19.1053C18.6791 18.7536 18.9638 18.4686 19.315 18.4686H20.2204C20.5716 18.4686 20.8563 18.7536 20.8563 19.1053V20.0119C20.8563 20.3635 20.5716 20.6486 20.2204 20.6486H19.315C18.9638 20.6486 18.6791 20.3635 18.6791 20.0119V19.1053Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M21.7923 19.1053C21.7923 18.7536 22.077 18.4686 22.4282 18.4686H23.3336C23.6848 18.4686 23.9695 18.7536 23.9695 19.1053V20.0119C23.9695 20.3635 23.6848 20.6486 23.3336 20.6486H22.4282C22.077 20.6486 21.7923 20.3635 21.7923 20.0119V19.1053Z'
fill='#0E7BC9'
/>
<path
d='M0 22.2224C0 21.8708 0.284685 21.5857 0.635863 21.5857H1.54133C1.89251 21.5857 2.17719 21.8708 2.17719 22.2224V23.129C2.17719 23.4807 1.89251 23.7657 1.54133 23.7657H0.635863C0.284686 23.7657 0 23.4807 0 23.129V22.2224Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M3.11318 22.2224C3.11318 21.8708 3.39787 21.5857 3.74905 21.5857H4.65452C5.00569 21.5857 5.29038 21.8708 5.29038 22.2224V23.129C5.29038 23.4807 5.00569 23.7657 4.65452 23.7657H3.74905C3.39787 23.7657 3.11318 23.4807 3.11318 23.129V22.2224Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M6.22637 22.2224C6.22637 21.8708 6.51105 21.5857 6.86223 21.5857H7.7677C8.11888 21.5857 8.40356 21.8708 8.40356 22.2224V23.129C8.40356 23.4807 8.11888 23.7657 7.7677 23.7657H6.86223C6.51105 23.7657 6.22637 23.4807 6.22637 23.129V22.2224Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M9.33955 22.2224C9.33955 21.8708 9.62424 21.5857 9.97541 21.5857H10.8809C11.2321 21.5857 11.5167 21.8708 11.5167 22.2224V23.129C11.5167 23.4807 11.2321 23.7657 10.8809 23.7657H9.97541C9.62424 23.7657 9.33955 23.4807 9.33955 23.129V22.2224Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M12.4527 22.2224C12.4527 21.8708 12.7374 21.5857 13.0886 21.5857H13.9941C14.3452 21.5857 14.6299 21.8708 14.6299 22.2224V23.129C14.6299 23.4807 14.3452 23.7657 13.9941 23.7657H13.0886C12.7374 23.7657 12.4527 23.4807 12.4527 23.129V22.2224Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M15.5659 22.2224C15.5659 21.8708 15.8506 21.5857 16.2018 21.5857H17.1073C17.4584 21.5857 17.7431 21.8708 17.7431 22.2224V23.129C17.7431 23.4807 17.4584 23.7657 17.1073 23.7657H16.2018C15.8506 23.7657 15.5659 23.4807 15.5659 23.129V22.2224Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M18.6791 22.2224C18.6791 21.8708 18.9638 21.5857 19.315 21.5857H20.2204C20.5716 21.5857 20.8563 21.8708 20.8563 22.2224V23.129C20.8563 23.4807 20.5716 23.7657 20.2204 23.7657H19.315C18.9638 23.7657 18.6791 23.4807 18.6791 23.129V22.2224Z'
fill='#0E7BC9'
/>
<path
d='M21.7923 22.2224C21.7923 21.8708 22.077 21.5857 22.4282 21.5857H23.3336C23.6848 21.5857 23.9695 21.8708 23.9695 22.2224V23.129C23.9695 23.4807 23.6848 23.7657 23.3336 23.7657H22.4282C22.077 23.7657 21.7923 23.4807 21.7923 23.129V22.2224Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M0 25.3396C0 24.9879 0.284685 24.7029 0.635863 24.7029H1.54133C1.89251 24.7029 2.17719 24.9879 2.17719 25.3396V26.2462C2.17719 26.5978 1.89251 26.8829 1.54133 26.8829H0.635863C0.284686 26.8829 0 26.5978 0 26.2462V25.3396Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M3.11318 25.3396C3.11318 24.9879 3.39787 24.7029 3.74905 24.7029H4.65452C5.00569 24.7029 5.29038 24.9879 5.29038 25.3396V26.2462C5.29038 26.5978 5.00569 26.8829 4.65452 26.8829H3.74905C3.39787 26.8829 3.11318 26.5978 3.11318 26.2462V25.3396Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M6.22637 25.3396C6.22637 24.9879 6.51105 24.7029 6.86223 24.7029H7.7677C8.11888 24.7029 8.40356 24.9879 8.40356 25.3396V26.2462C8.40356 26.5978 8.11888 26.8829 7.7677 26.8829H6.86223C6.51105 26.8829 6.22637 26.5978 6.22637 26.2462V25.3396Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M9.33955 25.3396C9.33955 24.9879 9.62424 24.7029 9.97541 24.7029H10.8809C11.2321 24.7029 11.5167 24.9879 11.5167 25.3396V26.2462C11.5167 26.5978 11.2321 26.8829 10.8809 26.8829H9.97541C9.62424 26.8829 9.33955 26.5978 9.33955 26.2462V25.3396Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M12.4527 25.3396C12.4527 24.9879 12.7374 24.7029 13.0886 24.7029H13.9941C14.3452 24.7029 14.6299 24.9879 14.6299 25.3396V26.2462C14.6299 26.5978 14.3452 26.8829 13.9941 26.8829H13.0886C12.7374 26.8829 12.4527 26.5978 12.4527 26.2462V25.3396Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M15.5659 25.3396C15.5659 24.9879 15.8506 24.7029 16.2018 24.7029H17.1073C17.4584 24.7029 17.7431 24.9879 17.7431 25.3396V26.2462C17.7431 26.5978 17.4584 26.8829 17.1073 26.8829H16.2018C15.8506 26.8829 15.5659 26.5978 15.5659 26.2462V25.3396Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M18.6791 25.3396C18.6791 24.9879 18.9638 24.7029 19.315 24.7029H20.2204C20.5716 24.7029 20.8563 24.9879 20.8563 25.3396V26.2462C20.8563 26.5978 20.5716 26.8829 20.2204 26.8829H19.315C18.9638 26.8829 18.6791 26.5978 18.6791 26.2462V25.3396Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M21.7923 25.3396C21.7923 24.9879 22.077 24.7029 22.4282 24.7029H23.3336C23.6848 24.7029 23.9695 24.9879 23.9695 25.3396V26.2462C23.9695 26.5978 23.6848 26.8829 23.3336 26.8829H22.4282C22.077 26.8829 21.7923 26.5978 21.7923 26.2462V25.3396Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M0 28.4567C0 28.1051 0.284685 27.82 0.635863 27.82H1.54133C1.89251 27.82 2.17719 28.1051 2.17719 28.4567V29.3633C2.17719 29.715 1.89251 30 1.54133 30H0.635863C0.284686 30 0 29.715 0 29.3633V28.4567Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M3.11318 28.4567C3.11318 28.1051 3.39787 27.82 3.74905 27.82H4.65452C5.00569 27.82 5.29038 28.1051 5.29038 28.4567V29.3633C5.29038 29.715 5.00569 30 4.65452 30H3.74905C3.39787 30 3.11318 29.715 3.11318 29.3633V28.4567Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M6.22637 28.4567C6.22637 28.1051 6.51105 27.82 6.86223 27.82H7.7677C8.11888 27.82 8.40356 28.1051 8.40356 28.4567V29.3633C8.40356 29.715 8.11888 30 7.7677 30H6.86223C6.51105 30 6.22637 29.715 6.22637 29.3633V28.4567Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M9.33955 28.4567C9.33955 28.1051 9.62424 27.82 9.97541 27.82H10.8809C11.2321 27.82 11.5167 28.1051 11.5167 28.4567V29.3633C11.5167 29.715 11.2321 30 10.8809 30H9.97541C9.62424 30 9.33955 29.715 9.33955 29.3633V28.4567Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M12.4527 28.4567C12.4527 28.1051 12.7374 27.82 13.0886 27.82H13.9941C14.3452 27.82 14.6299 28.1051 14.6299 28.4567V29.3633C14.6299 29.715 14.3452 30 13.9941 30H13.0886C12.7374 30 12.4527 29.715 12.4527 29.3633V28.4567Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M15.5659 28.4567C15.5659 28.1051 15.8506 27.82 16.2018 27.82H17.1073C17.4584 27.82 17.7431 28.1051 17.7431 28.4567V29.3633C17.7431 29.715 17.4584 30 17.1073 30H16.2018C15.8506 30 15.5659 29.715 15.5659 29.3633V28.4567Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M18.6791 28.4567C18.6791 28.1051 18.9638 27.82 19.315 27.82H20.2204C20.5716 27.82 20.8563 28.1051 20.8563 28.4567V29.3633C20.8563 29.715 20.5716 30 20.2204 30H19.315C18.9638 30 18.6791 29.715 18.6791 29.3633V28.4567Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M21.7923 28.4567C21.7923 28.1051 22.077 27.82 22.4282 27.82H23.3336C23.6848 27.82 23.9695 28.1051 23.9695 28.4567V29.3633C23.9695 29.715 23.6848 30 23.3336 30H22.4282C22.077 30 21.7923 29.715 21.7923 29.3633V28.4567Z'
fill='#030712'
fillOpacity='0.1'
/>
</svg>
)
}

View File

@@ -84,11 +84,9 @@ import {
PolymarketIcon,
PostgresIcon,
PosthogIcon,
PulseIcon,
QdrantIcon,
RDSIcon,
RedditIcon,
ReductoIcon,
ResendIcon,
S3Icon,
SalesforceIcon,
@@ -210,11 +208,9 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
polymarket: PolymarketIcon,
postgresql: PostgresIcon,
posthog: PosthogIcon,
pulse: PulseIcon,
qdrant: QdrantIcon,
rds: RDSIcon,
reddit: RedditIcon,
reducto: ReductoIcon,
resend: ResendIcon,
s3: S3Icon,
salesforce: SalesforceIcon,

View File

@@ -79,11 +79,9 @@
"polymarket",
"postgresql",
"posthog",
"pulse",
"qdrant",
"rds",
"reddit",
"reducto",
"resend",
"s3",
"salesforce",

View File

@@ -1,72 +0,0 @@
---
title: Pulse
description: Extract text from documents using Pulse OCR
---
import { BlockInfoCard } from "@/components/ui/block-info-card"
<BlockInfoCard
type="pulse"
color="#E0E0E0"
/>
{/* MANUAL-CONTENT-START:intro */}
The [Pulse](https://www.pulseapi.com/) tool enables seamless extraction of text and structured content from a wide variety of documents—including PDFs, images, and Office files—using state-of-the-art OCR (Optical Character Recognition) powered by Pulse. Designed for automated agentic workflows, Pulse Parser makes it easy to unlock valuable information trapped in unstructured documents and integrate the extracted content directly into your workflow.
With Pulse, you can:
- **Extract text from documents**: Quickly convert scanned PDFs, images, and Office documents to usable text, markdown, or JSON.
- **Process documents by URL or upload**: Simply provide a file URL or use upload to extract text from local documents or remote resources.
- **Flexible output formats**: Choose between markdown, plain text, or JSON representations of the extracted content for downstream processing.
- **Selective page processing**: Specify a range of pages to process, reducing processing time and cost when you only need part of a document.
- **Figure and table extraction**: Optionally extract figures and tables, with automatic caption and description generation for populated context.
- **Get processing insights**: Receive detailed metadata on each job, including file type, page count, processing time, and more.
- **Integration-ready responses**: Incorporate extracted content into research, workflow automation, or data analysis pipelines.
Ideal for automating tedious document review, enabling content summarization, research, and more, Pulse Parser brings real-world documents into the digital workflow era.
If you need accurate, scalable, and developer-friendly document parsing capabilities—across formats, languages, and layouts—Pulse empowers your agents to read the world.
{/* MANUAL-CONTENT-END */}
## Usage Instructions
Integrate Pulse into the workflow. Extract text from PDF documents, images, and Office files via URL or upload.
## Tools
### `pulse_parser`
Parse documents (PDF, images, Office docs) using Pulse OCR API
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `filePath` | string | Yes | URL to a document to be processed |
| `fileUpload` | object | No | File upload data from file-upload component |
| `pages` | string | No | Page range to process \(1-indexed, e.g., "1-2,5"\) |
| `extractFigure` | boolean | No | Enable figure extraction from the document |
| `figureDescription` | boolean | No | Generate descriptions/captions for extracted figures |
| `returnHtml` | boolean | No | Include HTML in the response |
| `chunking` | string | No | Chunking strategies \(comma-separated: semantic, header, page, recursive\) |
| `chunkSize` | number | No | Maximum characters per chunk when chunking is enabled |
| `apiKey` | string | Yes | Pulse API key |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `markdown` | string | Extracted content in markdown format |
| `page_count` | number | Number of pages in the document |
| `job_id` | string | Unique job identifier |
| `bounding_boxes` | json | Bounding box layout information |
| `extraction_url` | string | URL for extraction results \(for large documents\) |
| `html` | string | HTML content if requested |
| `structured_output` | json | Structured output if schema was provided |
| `chunks` | json | Chunked content if chunking was enabled |
| `figures` | json | Extracted figures if figure extraction was enabled |

View File

@@ -1,63 +0,0 @@
---
title: Reducto
description: Extract text from PDF documents
---
import { BlockInfoCard } from "@/components/ui/block-info-card"
<BlockInfoCard
type="reducto"
color="#5c0c5c"
/>
{/* MANUAL-CONTENT-START:intro */}
The [Reducto](https://reducto.ai/) tool enables fast and accurate extraction of text and data from PDF documents via OCR (Optical Character Recognition). Reducto is designed for agent workflows, making it easy to process uploaded or linked PDFs and transform their contents into ready-to-use information.
With the Reducto tool, you can:
- **Extract text and tables from PDFs**: Quickly convert scanned or digital PDFs to text, markdown, or structured JSON.
- **Parse PDFs from uploads or URLs**: Process documents either by uploading a PDF or specifying a direct URL.
- **Customize output formatting**: Choose your preferred output format—markdown, plain text, or JSON—and specify table formats as markdown or HTML.
- **Select specific pages**: Optionally extract content from particular pages to optimize processing and focus on what matters.
- **Receive detailed processing metadata**: Alongside extracted content, get job details, processing times, source file info, page counts, and OCR usage stats for audit and automation.
Whether youre automating workflow steps, extracting business-critical information, or unlocking archival documents for search and analysis, Reductos OCR parser gives you structured, actionable data from even the most complex PDFs.
Looking for reliable and scalable PDF parsing? Reducto is optimized for developer and agent use—providing accuracy, speed, and flexibility for modern document understanding.
{/* MANUAL-CONTENT-END */}
## Usage Instructions
Integrate Reducto Parse into the workflow. Can extract text from uploaded PDF documents, or from a URL.
## Tools
### `reducto_parser`
Parse PDF documents using Reducto OCR API
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `filePath` | string | Yes | URL to a PDF document to be processed |
| `fileUpload` | object | No | File upload data from file-upload component |
| `pages` | array | No | Specific pages to process \(1-indexed page numbers\) |
| `tableOutputFormat` | string | No | Table output format \(html or markdown\). Defaults to markdown. |
| `apiKey` | string | Yes | Reducto API key \(REDUCTO_API_KEY\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `job_id` | string | Unique identifier for the processing job |
| `duration` | number | Processing time in seconds |
| `usage` | json | Resource consumption data |
| `result` | json | Parsed document content with chunks and blocks |
| `pdf_url` | string | Storage URL of converted PDF |
| `studio_link` | string | Link to Reducto studio interface |

View File

@@ -1,11 +0,0 @@
'use client'
import { Tooltip } from '@/components/emcn'
interface TooltipProviderProps {
children: React.ReactNode
}
export function TooltipProvider({ children }: TooltipProviderProps) {
return <Tooltip.Provider>{children}</Tooltip.Provider>
}

View File

@@ -58,25 +58,6 @@
pointer-events: none !important;
}
/**
* Workflow canvas cursor styles
* Override React Flow's default selection cursor based on canvas mode
*/
.workflow-container.canvas-mode-cursor .react-flow__pane,
.workflow-container.canvas-mode-cursor .react-flow__selectionpane {
cursor: default !important;
}
.workflow-container.canvas-mode-hand .react-flow__pane,
.workflow-container.canvas-mode-hand .react-flow__selectionpane {
cursor: grab !important;
}
.workflow-container.canvas-mode-hand .react-flow__pane:active,
.workflow-container.canvas-mode-hand .react-flow__selectionpane:active {
cursor: grabbing !important;
}
/**
* Selected node ring indicator
* Uses a pseudo-element overlay to match the original behavior (absolute inset-0 z-40)
@@ -676,20 +657,6 @@ input[type="search"]::-ms-clear {
}
}
/**
* Notification toast enter animation
*/
@keyframes notification-enter {
from {
opacity: 0;
transform: translateX(-16px);
}
to {
opacity: 1;
transform: translateX(var(--stack-offset, 0px));
}
}
/**
* @depricated
* Legacy globals (light/dark) kept for backward-compat with old classes.

File diff suppressed because it is too large Load Diff

View File

@@ -3,60 +3,13 @@
*
* @vitest-environment node
*/
import {
createMockRequest,
mockConsoleLogger,
mockCryptoUuid,
mockDrizzleOrm,
mockUuid,
setupCommonApiMocks,
} from '@sim/testing'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import { createMockRequest, setupAuthApiMocks } from '@/app/api/__test-utils__/utils'
vi.mock('@/lib/core/utils/urls', () => ({
getBaseUrl: vi.fn(() => 'https://app.example.com'),
}))
/** Setup auth API mocks for testing authentication routes */
function setupAuthApiMocks(
options: {
operations?: {
forgetPassword?: { success?: boolean; error?: string }
resetPassword?: { success?: boolean; error?: string }
}
} = {}
) {
setupCommonApiMocks()
mockUuid()
mockCryptoUuid()
mockConsoleLogger()
mockDrizzleOrm()
const { operations = {} } = options
const defaultOperations = {
forgetPassword: { success: true, error: 'Forget password error', ...operations.forgetPassword },
resetPassword: { success: true, error: 'Reset password error', ...operations.resetPassword },
}
const createAuthMethod = (config: { success?: boolean; error?: string }) => {
return vi.fn().mockImplementation(() => {
if (config.success) {
return Promise.resolve()
}
return Promise.reject(new Error(config.error))
})
}
vi.doMock('@/lib/auth', () => ({
auth: {
api: {
forgetPassword: createAuthMethod(defaultOperations.forgetPassword),
resetPassword: createAuthMethod(defaultOperations.resetPassword),
},
},
}))
}
describe('Forget Password API Route', () => {
beforeEach(() => {
vi.resetModules()

View File

@@ -3,8 +3,8 @@
*
* @vitest-environment node
*/
import { createMockLogger, createMockRequest } from '@sim/testing'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import { createMockLogger, createMockRequest } from '@/app/api/__test-utils__/utils'
describe('OAuth Connections API Route', () => {
const mockGetSession = vi.fn()

View File

@@ -4,9 +4,9 @@
* @vitest-environment node
*/
import { createMockLogger } from '@sim/testing'
import { NextRequest } from 'next/server'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import { createMockLogger } from '@/app/api/__test-utils__/utils'
describe('OAuth Credentials API Route', () => {
const mockGetSession = vi.fn()

View File

@@ -3,8 +3,8 @@
*
* @vitest-environment node
*/
import { createMockLogger, createMockRequest } from '@sim/testing'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import { createMockLogger, createMockRequest } from '@/app/api/__test-utils__/utils'
describe('OAuth Disconnect API Route', () => {
const mockGetSession = vi.fn()

View File

@@ -3,8 +3,8 @@
*
* @vitest-environment node
*/
import { createMockLogger, createMockRequest } from '@sim/testing'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import { createMockLogger, createMockRequest } from '@/app/api/__test-utils__/utils'
describe('OAuth Token API Routes', () => {
const mockGetUserId = vi.fn()

View File

@@ -3,55 +3,8 @@
*
* @vitest-environment node
*/
import {
createMockRequest,
mockConsoleLogger,
mockCryptoUuid,
mockDrizzleOrm,
mockUuid,
setupCommonApiMocks,
} from '@sim/testing'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
/** Setup auth API mocks for testing authentication routes */
function setupAuthApiMocks(
options: {
operations?: {
forgetPassword?: { success?: boolean; error?: string }
resetPassword?: { success?: boolean; error?: string }
}
} = {}
) {
setupCommonApiMocks()
mockUuid()
mockCryptoUuid()
mockConsoleLogger()
mockDrizzleOrm()
const { operations = {} } = options
const defaultOperations = {
forgetPassword: { success: true, error: 'Forget password error', ...operations.forgetPassword },
resetPassword: { success: true, error: 'Reset password error', ...operations.resetPassword },
}
const createAuthMethod = (config: { success?: boolean; error?: string }) => {
return vi.fn().mockImplementation(() => {
if (config.success) {
return Promise.resolve()
}
return Promise.reject(new Error(config.error))
})
}
vi.doMock('@/lib/auth', () => ({
auth: {
api: {
forgetPassword: createAuthMethod(defaultOperations.forgetPassword),
resetPassword: createAuthMethod(defaultOperations.resetPassword),
},
},
}))
}
import { createMockRequest, setupAuthApiMocks } from '@/app/api/__test-utils__/utils'
describe('Reset Password API Route', () => {
beforeEach(() => {

View File

@@ -5,34 +5,7 @@
*/
import { loggerMock } from '@sim/testing'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
/**
* Creates a mock NextRequest with cookies support for testing.
*/
function createMockNextRequest(
method = 'GET',
body?: unknown,
headers: Record<string, string> = {},
url = 'http://localhost:3000/api/test'
): any {
const headersObj = new Headers({
'Content-Type': 'application/json',
...headers,
})
return {
method,
headers: headersObj,
cookies: {
get: vi.fn().mockReturnValue(undefined),
},
json:
body !== undefined
? vi.fn().mockResolvedValue(body)
: vi.fn().mockRejectedValue(new Error('No body')),
url,
}
}
import { createMockRequest } from '@/app/api/__test-utils__/utils'
const createMockStream = () => {
return new ReadableStream({
@@ -98,15 +71,10 @@ vi.mock('@/lib/core/utils/request', () => ({
generateRequestId: vi.fn().mockReturnValue('test-request-id'),
}))
vi.mock('@/lib/core/security/encryption', () => ({
decryptSecret: vi.fn().mockResolvedValue({ decrypted: 'test-password' }),
}))
describe('Chat Identifier API Route', () => {
const mockAddCorsHeaders = vi.fn().mockImplementation((response) => response)
const mockValidateChatAuth = vi.fn().mockResolvedValue({ authorized: true })
const mockSetChatAuthCookie = vi.fn()
const mockValidateAuthToken = vi.fn().mockReturnValue(false)
const mockChatResult = [
{
@@ -146,16 +114,11 @@ describe('Chat Identifier API Route', () => {
beforeEach(() => {
vi.resetModules()
vi.doMock('@/lib/core/security/deployment', () => ({
addCorsHeaders: mockAddCorsHeaders,
validateAuthToken: mockValidateAuthToken,
setDeploymentAuthCookie: vi.fn(),
isEmailAllowed: vi.fn().mockReturnValue(false),
}))
vi.doMock('@/app/api/chat/utils', () => ({
addCorsHeaders: mockAddCorsHeaders,
validateChatAuth: mockValidateChatAuth,
setChatAuthCookie: mockSetChatAuthCookie,
validateAuthToken: vi.fn().mockReturnValue(true),
}))
// Mock logger - use loggerMock from @sim/testing
@@ -212,7 +175,7 @@ describe('Chat Identifier API Route', () => {
describe('GET endpoint', () => {
it('should return chat info for a valid identifier', async () => {
const req = createMockNextRequest('GET')
const req = createMockRequest('GET')
const params = Promise.resolve({ identifier: 'test-chat' })
const { GET } = await import('@/app/api/chat/[identifier]/route')
@@ -243,7 +206,7 @@ describe('Chat Identifier API Route', () => {
}
})
const req = createMockNextRequest('GET')
const req = createMockRequest('GET')
const params = Promise.resolve({ identifier: 'nonexistent' })
const { GET } = await import('@/app/api/chat/[identifier]/route')
@@ -277,7 +240,7 @@ describe('Chat Identifier API Route', () => {
}
})
const req = createMockNextRequest('GET')
const req = createMockRequest('GET')
const params = Promise.resolve({ identifier: 'inactive-chat' })
const { GET } = await import('@/app/api/chat/[identifier]/route')
@@ -298,7 +261,7 @@ describe('Chat Identifier API Route', () => {
error: 'auth_required_password',
}))
const req = createMockNextRequest('GET')
const req = createMockRequest('GET')
const params = Promise.resolve({ identifier: 'password-protected-chat' })
const { GET } = await import('@/app/api/chat/[identifier]/route')
@@ -319,7 +282,7 @@ describe('Chat Identifier API Route', () => {
describe('POST endpoint', () => {
it('should handle authentication requests without input', async () => {
const req = createMockNextRequest('POST', { password: 'test-password' })
const req = createMockRequest('POST', { password: 'test-password' })
const params = Promise.resolve({ identifier: 'password-protected-chat' })
const { POST } = await import('@/app/api/chat/[identifier]/route')
@@ -335,7 +298,7 @@ describe('Chat Identifier API Route', () => {
})
it('should return 400 for requests without input', async () => {
const req = createMockNextRequest('POST', {})
const req = createMockRequest('POST', {})
const params = Promise.resolve({ identifier: 'test-chat' })
const { POST } = await import('@/app/api/chat/[identifier]/route')
@@ -356,7 +319,7 @@ describe('Chat Identifier API Route', () => {
error: 'Authentication required',
}))
const req = createMockNextRequest('POST', { input: 'Hello' })
const req = createMockRequest('POST', { input: 'Hello' })
const params = Promise.resolve({ identifier: 'protected-chat' })
const { POST } = await import('@/app/api/chat/[identifier]/route')
@@ -387,7 +350,7 @@ describe('Chat Identifier API Route', () => {
},
})
const req = createMockNextRequest('POST', { input: 'Hello' })
const req = createMockRequest('POST', { input: 'Hello' })
const params = Promise.resolve({ identifier: 'test-chat' })
const { POST } = await import('@/app/api/chat/[identifier]/route')
@@ -406,10 +369,7 @@ describe('Chat Identifier API Route', () => {
})
it('should return streaming response for valid chat messages', async () => {
const req = createMockNextRequest('POST', {
input: 'Hello world',
conversationId: 'conv-123',
})
const req = createMockRequest('POST', { input: 'Hello world', conversationId: 'conv-123' })
const params = Promise.resolve({ identifier: 'test-chat' })
const { POST } = await import('@/app/api/chat/[identifier]/route')
@@ -441,7 +401,7 @@ describe('Chat Identifier API Route', () => {
}, 10000)
it('should handle streaming response body correctly', async () => {
const req = createMockNextRequest('POST', { input: 'Hello world' })
const req = createMockRequest('POST', { input: 'Hello world' })
const params = Promise.resolve({ identifier: 'test-chat' })
const { POST } = await import('@/app/api/chat/[identifier]/route')
@@ -471,7 +431,7 @@ describe('Chat Identifier API Route', () => {
throw new Error('Execution failed')
})
const req = createMockNextRequest('POST', { input: 'Trigger error' })
const req = createMockRequest('POST', { input: 'Trigger error' })
const params = Promise.resolve({ identifier: 'test-chat' })
const { POST } = await import('@/app/api/chat/[identifier]/route')
@@ -510,7 +470,7 @@ describe('Chat Identifier API Route', () => {
})
it('should pass conversationId to streaming execution when provided', async () => {
const req = createMockNextRequest('POST', {
const req = createMockRequest('POST', {
input: 'Hello world',
conversationId: 'test-conversation-123',
})
@@ -532,7 +492,7 @@ describe('Chat Identifier API Route', () => {
})
it('should handle missing conversationId gracefully', async () => {
const req = createMockNextRequest('POST', { input: 'Hello world' })
const req = createMockRequest('POST', { input: 'Hello world' })
const params = Promise.resolve({ identifier: 'test-chat' })
const { POST } = await import('@/app/api/chat/[identifier]/route')

View File

@@ -3,9 +3,9 @@
*
* @vitest-environment node
*/
import { mockAuth, mockCryptoUuid, setupCommonApiMocks } from '@sim/testing'
import { NextRequest } from 'next/server'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import { mockAuth, mockCryptoUuid, setupCommonApiMocks } from '@/app/api/__test-utils__/utils'
describe('Copilot API Keys API Route', () => {
const mockFetch = vi.fn()

View File

@@ -3,9 +3,14 @@
*
* @vitest-environment node
*/
import { createMockRequest, mockAuth, mockCryptoUuid, setupCommonApiMocks } from '@sim/testing'
import { NextRequest } from 'next/server'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import {
createMockRequest,
mockAuth,
mockCryptoUuid,
setupCommonApiMocks,
} from '@/app/api/__test-utils__/utils'
describe('Copilot Chat Delete API Route', () => {
const mockDelete = vi.fn()

View File

@@ -3,9 +3,14 @@
*
* @vitest-environment node
*/
import { createMockRequest, mockAuth, mockCryptoUuid, setupCommonApiMocks } from '@sim/testing'
import { NextRequest } from 'next/server'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import {
createMockRequest,
mockAuth,
mockCryptoUuid,
setupCommonApiMocks,
} from '@/app/api/__test-utils__/utils'
describe('Copilot Chat Update Messages API Route', () => {
const mockSelect = vi.fn()

View File

@@ -3,8 +3,8 @@
*
* @vitest-environment node
*/
import { mockCryptoUuid, setupCommonApiMocks } from '@sim/testing'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import { mockCryptoUuid, setupCommonApiMocks } from '@/app/api/__test-utils__/utils'
describe('Copilot Chats List API Route', () => {
const mockSelect = vi.fn()

View File

@@ -3,9 +3,14 @@
*
* @vitest-environment node
*/
import { createMockRequest, mockAuth, mockCryptoUuid, setupCommonApiMocks } from '@sim/testing'
import { NextRequest } from 'next/server'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import {
createMockRequest,
mockAuth,
mockCryptoUuid,
setupCommonApiMocks,
} from '@/app/api/__test-utils__/utils'
describe('Copilot Checkpoints Revert API Route', () => {
const mockSelect = vi.fn()

View File

@@ -3,9 +3,14 @@
*
* @vitest-environment node
*/
import { createMockRequest, mockAuth, mockCryptoUuid, setupCommonApiMocks } from '@sim/testing'
import { NextRequest } from 'next/server'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import {
createMockRequest,
mockAuth,
mockCryptoUuid,
setupCommonApiMocks,
} from '@/app/api/__test-utils__/utils'
describe('Copilot Checkpoints API Route', () => {
const mockSelect = vi.fn()

View File

@@ -3,9 +3,14 @@
*
* @vitest-environment node
*/
import { createMockRequest, mockAuth, mockCryptoUuid, setupCommonApiMocks } from '@sim/testing'
import { NextRequest } from 'next/server'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import {
createMockRequest,
mockAuth,
mockCryptoUuid,
setupCommonApiMocks,
} from '@/app/api/__test-utils__/utils'
describe('Copilot Confirm API Route', () => {
const mockRedisExists = vi.fn()

View File

@@ -3,9 +3,13 @@
*
* @vitest-environment node
*/
import { createMockRequest, mockCryptoUuid, setupCommonApiMocks } from '@sim/testing'
import { NextRequest } from 'next/server'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import {
createMockRequest,
mockCryptoUuid,
setupCommonApiMocks,
} from '@/app/api/__test-utils__/utils'
describe('Copilot Feedback API Route', () => {
const mockInsert = vi.fn()

View File

@@ -3,9 +3,13 @@
*
* @vitest-environment node
*/
import { createMockRequest, mockCryptoUuid, setupCommonApiMocks } from '@sim/testing'
import { NextRequest } from 'next/server'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import {
createMockRequest,
mockCryptoUuid,
setupCommonApiMocks,
} from '@/app/api/__test-utils__/utils'
describe('Copilot Stats API Route', () => {
const mockFetch = vi.fn()

View File

@@ -1,87 +1,5 @@
import {
createMockRequest,
mockAuth,
mockCryptoUuid,
mockUuid,
setupCommonApiMocks,
} from '@sim/testing'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
/** Setup file API mocks for file delete tests */
function setupFileApiMocks(
options: {
authenticated?: boolean
storageProvider?: 's3' | 'blob' | 'local'
cloudEnabled?: boolean
} = {}
) {
const { authenticated = true, storageProvider = 's3', cloudEnabled = true } = options
setupCommonApiMocks()
mockUuid()
mockCryptoUuid()
const authMocks = mockAuth()
if (authenticated) {
authMocks.setAuthenticated()
} else {
authMocks.setUnauthenticated()
}
vi.doMock('@/lib/auth/hybrid', () => ({
checkHybridAuth: vi.fn().mockResolvedValue({
success: authenticated,
userId: authenticated ? 'test-user-id' : undefined,
error: authenticated ? undefined : 'Unauthorized',
}),
}))
vi.doMock('@/app/api/files/authorization', () => ({
verifyFileAccess: vi.fn().mockResolvedValue(true),
verifyWorkspaceFileAccess: vi.fn().mockResolvedValue(true),
}))
const uploadFileMock = vi.fn().mockResolvedValue({
path: '/api/files/serve/test-key.txt',
key: 'test-key.txt',
name: 'test.txt',
size: 100,
type: 'text/plain',
})
const downloadFileMock = vi.fn().mockResolvedValue(Buffer.from('test content'))
const deleteFileMock = vi.fn().mockResolvedValue(undefined)
const hasCloudStorageMock = vi.fn().mockReturnValue(cloudEnabled)
vi.doMock('@/lib/uploads', () => ({
getStorageProvider: vi.fn().mockReturnValue(storageProvider),
isUsingCloudStorage: vi.fn().mockReturnValue(cloudEnabled),
StorageService: {
uploadFile: uploadFileMock,
downloadFile: downloadFileMock,
deleteFile: deleteFileMock,
hasCloudStorage: hasCloudStorageMock,
},
uploadFile: uploadFileMock,
downloadFile: downloadFileMock,
deleteFile: deleteFileMock,
hasCloudStorage: hasCloudStorageMock,
}))
vi.doMock('@/lib/uploads/core/storage-service', () => ({
uploadFile: uploadFileMock,
downloadFile: downloadFileMock,
deleteFile: deleteFileMock,
hasCloudStorage: hasCloudStorageMock,
}))
vi.doMock('fs/promises', () => ({
unlink: vi.fn().mockResolvedValue(undefined),
access: vi.fn().mockResolvedValue(undefined),
stat: vi.fn().mockResolvedValue({ isFile: () => true }),
}))
return { auth: authMocks }
}
import { createMockRequest, setupFileApiMocks } from '@/app/api/__test-utils__/utils'
describe('File Delete API Route', () => {
beforeEach(() => {

View File

@@ -1,59 +1,12 @@
import path from 'path'
import { NextRequest } from 'next/server'
/**
* Tests for file parse API route
*
* @vitest-environment node
*/
import {
createMockRequest,
mockAuth,
mockCryptoUuid,
mockUuid,
setupCommonApiMocks,
} from '@sim/testing'
import { NextRequest } from 'next/server'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
function setupFileApiMocks(
options: {
authenticated?: boolean
storageProvider?: 's3' | 'blob' | 'local'
cloudEnabled?: boolean
} = {}
) {
const { authenticated = true, storageProvider = 's3', cloudEnabled = true } = options
setupCommonApiMocks()
mockUuid()
mockCryptoUuid()
const authMocks = mockAuth()
if (authenticated) {
authMocks.setAuthenticated()
} else {
authMocks.setUnauthenticated()
}
vi.doMock('@/lib/auth/hybrid', () => ({
checkHybridAuth: vi.fn().mockResolvedValue({
success: authenticated,
userId: authenticated ? 'test-user-id' : undefined,
error: authenticated ? undefined : 'Unauthorized',
}),
}))
vi.doMock('@/app/api/files/authorization', () => ({
verifyFileAccess: vi.fn().mockResolvedValue(true),
verifyWorkspaceFileAccess: vi.fn().mockResolvedValue(true),
}))
vi.doMock('@/lib/uploads', () => ({
getStorageProvider: vi.fn().mockReturnValue(storageProvider),
isUsingCloudStorage: vi.fn().mockReturnValue(cloudEnabled),
}))
return { auth: authMocks }
}
import { createMockRequest, setupFileApiMocks } from '@/app/api/__test-utils__/utils'
const mockJoin = vi.fn((...args: string[]): string => {
if (args[0] === '/test/uploads') {

View File

@@ -1,6 +1,6 @@
import { mockAuth, mockCryptoUuid, mockUuid, setupCommonApiMocks } from '@sim/testing'
import { NextRequest } from 'next/server'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import { setupFileApiMocks } from '@/app/api/__test-utils__/utils'
/**
* Tests for file presigned API route
@@ -8,106 +8,6 @@ import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
* @vitest-environment node
*/
function setupFileApiMocks(
options: {
authenticated?: boolean
storageProvider?: 's3' | 'blob' | 'local'
cloudEnabled?: boolean
} = {}
) {
const { authenticated = true, storageProvider = 's3', cloudEnabled = true } = options
setupCommonApiMocks()
mockUuid()
mockCryptoUuid()
const authMocks = mockAuth()
if (authenticated) {
authMocks.setAuthenticated()
} else {
authMocks.setUnauthenticated()
}
vi.doMock('@/lib/auth/hybrid', () => ({
checkHybridAuth: vi.fn().mockResolvedValue({
success: authenticated,
userId: authenticated ? 'test-user-id' : undefined,
error: authenticated ? undefined : 'Unauthorized',
}),
}))
vi.doMock('@/app/api/files/authorization', () => ({
verifyFileAccess: vi.fn().mockResolvedValue(true),
verifyWorkspaceFileAccess: vi.fn().mockResolvedValue(true),
}))
const useBlobStorage = storageProvider === 'blob' && cloudEnabled
const useS3Storage = storageProvider === 's3' && cloudEnabled
vi.doMock('@/lib/uploads/config', () => ({
USE_BLOB_STORAGE: useBlobStorage,
USE_S3_STORAGE: useS3Storage,
UPLOAD_DIR: '/uploads',
getStorageConfig: vi.fn().mockReturnValue(
useBlobStorage
? {
accountName: 'testaccount',
accountKey: 'testkey',
connectionString: 'testconnection',
containerName: 'testcontainer',
}
: {
bucket: 'test-bucket',
region: 'us-east-1',
}
),
isUsingCloudStorage: vi.fn().mockReturnValue(cloudEnabled),
getStorageProvider: vi
.fn()
.mockReturnValue(
storageProvider === 'blob' ? 'Azure Blob' : storageProvider === 's3' ? 'S3' : 'Local'
),
}))
const mockGeneratePresignedUploadUrl = vi.fn().mockImplementation(async (opts) => {
const timestamp = Date.now()
const safeFileName = opts.fileName.replace(/[^a-zA-Z0-9.-]/g, '_')
const key = `${opts.context}/${timestamp}-ik3a6w4-${safeFileName}`
return {
url: 'https://example.com/presigned-url',
key,
}
})
vi.doMock('@/lib/uploads/core/storage-service', () => ({
hasCloudStorage: vi.fn().mockReturnValue(cloudEnabled),
generatePresignedUploadUrl: mockGeneratePresignedUploadUrl,
generatePresignedDownloadUrl: vi.fn().mockResolvedValue('https://example.com/presigned-url'),
}))
vi.doMock('@/lib/uploads/utils/validation', () => ({
validateFileType: vi.fn().mockReturnValue(null),
}))
vi.doMock('@/lib/uploads', () => ({
CopilotFiles: {
generateCopilotUploadUrl: vi.fn().mockResolvedValue({
url: 'https://example.com/presigned-url',
key: 'copilot/test-key.txt',
}),
isImageFileType: vi.fn().mockReturnValue(true),
},
getStorageProvider: vi
.fn()
.mockReturnValue(
storageProvider === 'blob' ? 'Azure Blob' : storageProvider === 's3' ? 'S3' : 'Local'
),
isUsingCloudStorage: vi.fn().mockReturnValue(cloudEnabled),
}))
return { auth: authMocks }
}
describe('/api/files/presigned', () => {
beforeEach(() => {
vi.clearAllMocks()
@@ -310,7 +210,7 @@ describe('/api/files/presigned', () => {
const data = await response.json()
expect(response.status).toBe(200)
expect(data.fileInfo.key).toMatch(/^knowledge-base\/.*knowledge-doc\.pdf$/)
expect(data.fileInfo.key).toMatch(/^kb\/.*knowledge-doc\.pdf$/)
expect(data.directUploadSupported).toBe(true)
})

View File

@@ -1,49 +1,11 @@
import { NextRequest } from 'next/server'
/**
* Tests for file serve API route
*
* @vitest-environment node
*/
import {
defaultMockUser,
mockAuth,
mockCryptoUuid,
mockUuid,
setupCommonApiMocks,
} from '@sim/testing'
import { NextRequest } from 'next/server'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
function setupApiTestMocks(
options: {
authenticated?: boolean
user?: { id: string; email: string }
withFileSystem?: boolean
withUploadUtils?: boolean
} = {}
) {
const { authenticated = true, user = defaultMockUser, withFileSystem = false } = options
setupCommonApiMocks()
mockUuid()
mockCryptoUuid()
const authMocks = mockAuth(user)
if (authenticated) {
authMocks.setAuthenticated(user)
} else {
authMocks.setUnauthenticated()
}
if (withFileSystem) {
vi.doMock('fs/promises', () => ({
readFile: vi.fn().mockResolvedValue(Buffer.from('test content')),
access: vi.fn().mockResolvedValue(undefined),
stat: vi.fn().mockResolvedValue({ isFile: () => true, size: 100 }),
}))
}
return { auth: authMocks }
}
import { setupApiTestMocks } from '@/app/api/__test-utils__/utils'
describe('File Serve API Route', () => {
beforeEach(() => {
@@ -69,17 +31,6 @@ describe('File Serve API Route', () => {
existsSync: vi.fn().mockReturnValue(true),
}))
vi.doMock('@/lib/uploads', () => ({
CopilotFiles: {
downloadCopilotFile: vi.fn(),
},
isUsingCloudStorage: vi.fn().mockReturnValue(false),
}))
vi.doMock('@/lib/uploads/utils/file-utils', () => ({
inferContextFromKey: vi.fn().mockReturnValue('workspace'),
}))
vi.doMock('@/app/api/files/utils', () => ({
FileNotFoundError: class FileNotFoundError extends Error {
constructor(message: string) {
@@ -175,17 +126,6 @@ describe('File Serve API Route', () => {
verifyFileAccess: vi.fn().mockResolvedValue(true),
}))
vi.doMock('@/lib/uploads', () => ({
CopilotFiles: {
downloadCopilotFile: vi.fn(),
},
isUsingCloudStorage: vi.fn().mockReturnValue(false),
}))
vi.doMock('@/lib/uploads/utils/file-utils', () => ({
inferContextFromKey: vi.fn().mockReturnValue('workspace'),
}))
const req = new NextRequest(
'http://localhost:3000/api/files/serve/workspace/test-workspace-id/nested-path-file.txt'
)

View File

@@ -1,76 +1,11 @@
import { NextRequest } from 'next/server'
/**
* Tests for file upload API route
*
* @vitest-environment node
*/
import { mockAuth, mockCryptoUuid, mockUuid, setupCommonApiMocks } from '@sim/testing'
import { NextRequest } from 'next/server'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
function setupFileApiMocks(
options: {
authenticated?: boolean
storageProvider?: 's3' | 'blob' | 'local'
cloudEnabled?: boolean
} = {}
) {
const { authenticated = true, storageProvider = 's3', cloudEnabled = true } = options
setupCommonApiMocks()
mockUuid()
mockCryptoUuid()
const authMocks = mockAuth()
if (authenticated) {
authMocks.setAuthenticated()
} else {
authMocks.setUnauthenticated()
}
vi.doMock('@/lib/auth/hybrid', () => ({
checkHybridAuth: vi.fn().mockResolvedValue({
success: authenticated,
userId: authenticated ? 'test-user-id' : undefined,
error: authenticated ? undefined : 'Unauthorized',
}),
}))
vi.doMock('@/app/api/files/authorization', () => ({
verifyFileAccess: vi.fn().mockResolvedValue(true),
verifyWorkspaceFileAccess: vi.fn().mockResolvedValue(true),
verifyKBFileAccess: vi.fn().mockResolvedValue(true),
verifyCopilotFileAccess: vi.fn().mockResolvedValue(true),
}))
vi.doMock('@/lib/uploads/contexts/workspace', () => ({
uploadWorkspaceFile: vi.fn().mockResolvedValue({
id: 'test-file-id',
name: 'test.txt',
url: '/api/files/serve/workspace/test-workspace-id/test-file.txt',
size: 100,
type: 'text/plain',
key: 'workspace/test-workspace-id/1234567890-test.txt',
uploadedAt: new Date().toISOString(),
expiresAt: new Date(Date.now() + 24 * 60 * 60 * 1000).toISOString(),
}),
}))
const uploadFileMock = vi.fn().mockResolvedValue({
path: '/api/files/serve/test-key.txt',
key: 'test-key.txt',
name: 'test.txt',
size: 100,
type: 'text/plain',
})
vi.doMock('@/lib/uploads', () => ({
getStorageProvider: vi.fn().mockReturnValue(storageProvider),
isUsingCloudStorage: vi.fn().mockReturnValue(cloudEnabled),
uploadFile: uploadFileMock,
}))
return { auth: authMocks }
}
import { setupFileApiMocks } from '@/app/api/__test-utils__/utils'
describe('File Upload API Route', () => {
const createMockFormData = (files: File[], context = 'workspace'): FormData => {

View File

@@ -3,24 +3,15 @@
*
* @vitest-environment node
*/
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import {
type CapturedFolderValues,
createMockRequest,
type MockUser,
mockAuth,
mockConsoleLogger,
mockLogger,
setupCommonApiMocks,
} from '@sim/testing'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
/** Type for captured folder values in tests */
interface CapturedFolderValues {
name?: string
color?: string
parentId?: string | null
isExpanded?: boolean
sortOrder?: number
updatedAt?: Date
}
} from '@/app/api/__test-utils__/utils'
interface FolderDbMockOptions {
folderLookupResult?: any
@@ -30,8 +21,6 @@ interface FolderDbMockOptions {
}
describe('Individual Folder API Route', () => {
let mockLogger: ReturnType<typeof mockConsoleLogger>
const TEST_USER: MockUser = {
id: 'user-123',
email: 'test@example.com',
@@ -50,8 +39,7 @@ describe('Individual Folder API Route', () => {
updatedAt: new Date('2024-01-01T00:00:00Z'),
}
let mockAuthenticatedUser: (user?: MockUser) => void
let mockUnauthenticated: () => void
const { mockAuthenticatedUser, mockUnauthenticated } = mockAuth(TEST_USER)
const mockGetUserEntityPermissions = vi.fn()
function createFolderDbMock(options: FolderDbMockOptions = {}) {
@@ -122,10 +110,6 @@ describe('Individual Folder API Route', () => {
vi.resetModules()
vi.clearAllMocks()
setupCommonApiMocks()
mockLogger = mockConsoleLogger()
const auth = mockAuth(TEST_USER)
mockAuthenticatedUser = auth.mockAuthenticatedUser
mockUnauthenticated = auth.mockUnauthenticated
mockGetUserEntityPermissions.mockResolvedValue('admin')

View File

@@ -3,46 +3,17 @@
*
* @vitest-environment node
*/
import { createMockRequest, mockAuth, mockConsoleLogger, setupCommonApiMocks } from '@sim/testing'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
interface CapturedFolderValues {
name?: string
color?: string
parentId?: string | null
isExpanded?: boolean
sortOrder?: number
updatedAt?: Date
}
function createMockTransaction(mockData: {
selectData?: Array<{ id: string; [key: string]: unknown }>
insertResult?: Array<{ id: string; [key: string]: unknown }>
}) {
const { selectData = [], insertResult = [] } = mockData
return vi.fn().mockImplementation(async (callback: (tx: unknown) => Promise<unknown>) => {
const tx = {
select: vi.fn().mockReturnValue({
from: vi.fn().mockReturnValue({
where: vi.fn().mockReturnValue({
orderBy: vi.fn().mockReturnValue({
limit: vi.fn().mockReturnValue(selectData),
}),
}),
}),
}),
insert: vi.fn().mockReturnValue({
values: vi.fn().mockReturnValue({
returning: vi.fn().mockReturnValue(insertResult),
}),
}),
}
return await callback(tx)
})
}
import {
type CapturedFolderValues,
createMockRequest,
createMockTransaction,
mockAuth,
mockLogger,
setupCommonApiMocks,
} from '@/app/api/__test-utils__/utils'
describe('Folders API Route', () => {
let mockLogger: ReturnType<typeof mockConsoleLogger>
const mockFolders = [
{
id: 'folder-1',
@@ -70,8 +41,7 @@ describe('Folders API Route', () => {
},
]
let mockAuthenticatedUser: () => void
let mockUnauthenticated: () => void
const { mockAuthenticatedUser, mockUnauthenticated } = mockAuth()
const mockUUID = 'mock-uuid-12345678-90ab-cdef-1234-567890abcdef'
const mockSelect = vi.fn()
@@ -93,10 +63,6 @@ describe('Folders API Route', () => {
})
setupCommonApiMocks()
mockLogger = mockConsoleLogger()
const auth = mockAuth()
mockAuthenticatedUser = auth.mockAuthenticatedUser
mockUnauthenticated = auth.mockUnauthenticated
mockSelect.mockReturnValue({ from: mockFrom })
mockFrom.mockReturnValue({ where: mockWhere })

View File

@@ -9,7 +9,6 @@ import { addCorsHeaders, validateAuthToken } from '@/lib/core/security/deploymen
import { generateRequestId } from '@/lib/core/utils/request'
import { preprocessExecution } from '@/lib/execution/preprocessing'
import { LoggingSession } from '@/lib/logs/execution/logging-session'
import { normalizeInputFormatValue } from '@/lib/workflows/input-format'
import { createStreamingResponse } from '@/lib/workflows/streaming/streaming'
import { setFormAuthCookie, validateFormAuth } from '@/app/api/form/utils'
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
@@ -35,17 +34,22 @@ async function getWorkflowInputSchema(workflowId: string): Promise<any[]> {
.from(workflowBlocks)
.where(eq(workflowBlocks.workflowId, workflowId))
// Find the start block (starter or start_trigger type)
const startBlock = blocks.find(
(block) =>
block.type === 'starter' || block.type === 'start_trigger' || block.type === 'input_trigger'
(block) => block.type === 'starter' || block.type === 'start_trigger'
)
if (!startBlock) {
return []
}
// Extract inputFormat from subBlocks
const subBlocks = startBlock.subBlocks as Record<string, any> | null
return normalizeInputFormatValue(subBlocks?.inputFormat?.value)
if (!subBlocks?.inputFormat?.value) {
return []
}
return Array.isArray(subBlocks.inputFormat.value) ? subBlocks.inputFormat.value : []
} catch (error) {
logger.error('Error fetching workflow input schema:', error)
return []

View File

@@ -3,9 +3,10 @@
*
* @vitest-environment node
*/
import { createMockRequest, loggerMock } from '@sim/testing'
import { loggerMock } from '@sim/testing'
import { NextRequest } from 'next/server'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import { createMockRequest } from '@/app/api/__test-utils__/utils'
vi.mock('@/lib/execution/isolated-vm', () => ({
executeInIsolatedVM: vi.fn().mockImplementation(async (req) => {

View File

@@ -3,14 +3,14 @@
*
* @vitest-environment node
*/
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import {
createMockRequest,
mockAuth,
mockConsoleLogger,
mockDrizzleOrm,
mockKnowledgeSchemas,
} from '@sim/testing'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
} from '@/app/api/__test-utils__/utils'
mockKnowledgeSchemas()

View File

@@ -3,14 +3,14 @@
*
* @vitest-environment node
*/
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import {
createMockRequest,
mockAuth,
mockConsoleLogger,
mockDrizzleOrm,
mockKnowledgeSchemas,
} from '@sim/testing'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
} from '@/app/api/__test-utils__/utils'
mockKnowledgeSchemas()

View File

@@ -3,14 +3,14 @@
*
* @vitest-environment node
*/
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import {
createMockRequest,
mockAuth,
mockConsoleLogger,
mockDrizzleOrm,
mockKnowledgeSchemas,
} from '@sim/testing'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
} from '@/app/api/__test-utils__/utils'
mockKnowledgeSchemas()
mockDrizzleOrm()

View File

@@ -3,14 +3,14 @@
*
* @vitest-environment node
*/
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import {
createMockRequest,
mockAuth,
mockConsoleLogger,
mockDrizzleOrm,
mockKnowledgeSchemas,
} from '@sim/testing'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
} from '@/app/api/__test-utils__/utils'
mockKnowledgeSchemas()
mockDrizzleOrm()

View File

@@ -5,13 +5,13 @@
*
* @vitest-environment node
*/
import { createEnvMock } from '@sim/testing'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import {
createEnvMock,
createMockRequest,
mockConsoleLogger,
mockKnowledgeSchemas,
} from '@sim/testing'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
} from '@/app/api/__test-utils__/utils'
vi.mock('drizzle-orm', () => ({
and: vi.fn().mockImplementation((...args) => ({ and: args })),

View File

@@ -4,8 +4,6 @@ import {
invitation,
member,
organization,
permissionGroup,
permissionGroupMember,
permissions,
subscription as subscriptionTable,
user,
@@ -19,7 +17,6 @@ import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getEmailSubject, renderInvitationEmail } from '@/components/emails'
import { getSession } from '@/lib/auth'
import { hasAccessControlAccess } from '@/lib/billing'
import { requireStripeClient } from '@/lib/billing/stripe-client'
import { getBaseUrl } from '@/lib/core/utils/urls'
import { sendEmail } from '@/lib/messaging/email/mailer'
@@ -385,47 +382,6 @@ export async function PUT(
// Don't fail the whole invitation acceptance due to this
}
// Auto-assign to permission group if one has autoAddNewMembers enabled
try {
const hasAccessControl = await hasAccessControlAccess(session.user.id)
if (hasAccessControl) {
const [autoAddGroup] = await tx
.select({ id: permissionGroup.id, name: permissionGroup.name })
.from(permissionGroup)
.where(
and(
eq(permissionGroup.organizationId, organizationId),
eq(permissionGroup.autoAddNewMembers, true)
)
)
.limit(1)
if (autoAddGroup) {
await tx.insert(permissionGroupMember).values({
id: randomUUID(),
permissionGroupId: autoAddGroup.id,
userId: session.user.id,
assignedBy: null,
assignedAt: new Date(),
})
logger.info('Auto-assigned new member to permission group', {
userId: session.user.id,
organizationId,
permissionGroupId: autoAddGroup.id,
permissionGroupName: autoAddGroup.name,
})
}
}
} catch (error) {
logger.error('Failed to auto-assign user to permission group', {
userId: session.user.id,
organizationId,
error,
})
// Don't fail the whole invitation acceptance due to this
}
const linkedWorkspaceInvitations = await tx
.select()
.from(workspaceInvitation)

View File

@@ -25,19 +25,12 @@ const configSchema = z.object({
disableMcpTools: z.boolean().optional(),
disableCustomTools: z.boolean().optional(),
hideTemplates: z.boolean().optional(),
disableInvitations: z.boolean().optional(),
hideDeployApi: z.boolean().optional(),
hideDeployMcp: z.boolean().optional(),
hideDeployA2a: z.boolean().optional(),
hideDeployChatbot: z.boolean().optional(),
hideDeployTemplate: z.boolean().optional(),
})
const updateSchema = z.object({
name: z.string().trim().min(1).max(100).optional(),
description: z.string().max(500).nullable().optional(),
config: configSchema.optional(),
autoAddNewMembers: z.boolean().optional(),
})
async function getPermissionGroupWithAccess(groupId: string, userId: string) {
@@ -51,7 +44,6 @@ async function getPermissionGroupWithAccess(groupId: string, userId: string) {
createdBy: permissionGroup.createdBy,
createdAt: permissionGroup.createdAt,
updatedAt: permissionGroup.updatedAt,
autoAddNewMembers: permissionGroup.autoAddNewMembers,
})
.from(permissionGroup)
.where(eq(permissionGroup.id, groupId))
@@ -148,27 +140,11 @@ export async function PUT(req: NextRequest, { params }: { params: Promise<{ id:
? { ...currentConfig, ...updates.config }
: currentConfig
// If setting autoAddNewMembers to true, unset it on other groups in the org first
if (updates.autoAddNewMembers === true) {
await db
.update(permissionGroup)
.set({ autoAddNewMembers: false, updatedAt: new Date() })
.where(
and(
eq(permissionGroup.organizationId, result.group.organizationId),
eq(permissionGroup.autoAddNewMembers, true)
)
)
}
await db
.update(permissionGroup)
.set({
...(updates.name !== undefined && { name: updates.name }),
...(updates.description !== undefined && { description: updates.description }),
...(updates.autoAddNewMembers !== undefined && {
autoAddNewMembers: updates.autoAddNewMembers,
}),
config: newConfig,
updatedAt: new Date(),
})

View File

@@ -26,12 +26,6 @@ const configSchema = z.object({
disableMcpTools: z.boolean().optional(),
disableCustomTools: z.boolean().optional(),
hideTemplates: z.boolean().optional(),
disableInvitations: z.boolean().optional(),
hideDeployApi: z.boolean().optional(),
hideDeployMcp: z.boolean().optional(),
hideDeployA2a: z.boolean().optional(),
hideDeployChatbot: z.boolean().optional(),
hideDeployTemplate: z.boolean().optional(),
})
const createSchema = z.object({
@@ -39,7 +33,6 @@ const createSchema = z.object({
name: z.string().trim().min(1).max(100),
description: z.string().max(500).optional(),
config: configSchema.optional(),
autoAddNewMembers: z.boolean().optional(),
})
export async function GET(req: Request) {
@@ -75,7 +68,6 @@ export async function GET(req: Request) {
createdBy: permissionGroup.createdBy,
createdAt: permissionGroup.createdAt,
updatedAt: permissionGroup.updatedAt,
autoAddNewMembers: permissionGroup.autoAddNewMembers,
creatorName: user.name,
creatorEmail: user.email,
})
@@ -119,8 +111,7 @@ export async function POST(req: Request) {
}
const body = await req.json()
const { organizationId, name, description, config, autoAddNewMembers } =
createSchema.parse(body)
const { organizationId, name, description, config } = createSchema.parse(body)
const membership = await db
.select({ id: member.id, role: member.role })
@@ -163,19 +154,6 @@ export async function POST(req: Request) {
...config,
}
// If autoAddNewMembers is true, unset it on any existing groups first
if (autoAddNewMembers) {
await db
.update(permissionGroup)
.set({ autoAddNewMembers: false, updatedAt: new Date() })
.where(
and(
eq(permissionGroup.organizationId, organizationId),
eq(permissionGroup.autoAddNewMembers, true)
)
)
}
const now = new Date()
const newGroup = {
id: crypto.randomUUID(),
@@ -186,7 +164,6 @@ export async function POST(req: Request) {
createdBy: session.user.id,
createdAt: now,
updatedAt: now,
autoAddNewMembers: autoAddNewMembers || false,
}
await db.insert(permissionGroup).values(newGroup)

View File

@@ -3,9 +3,10 @@
*
* @vitest-environment node
*/
import { createMockRequest, loggerMock } from '@sim/testing'
import { loggerMock } from '@sim/testing'
import { NextRequest } from 'next/server'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import { createMockRequest } from '@/app/api/__test-utils__/utils'
describe('Custom Tools API Routes', () => {
const sampleTools = [
@@ -363,7 +364,7 @@ describe('Custom Tools API Routes', () => {
})
it('should reject requests missing tool ID', async () => {
const req = new NextRequest('http://localhost:3000/api/tools/custom')
const req = createMockRequest('DELETE')
const { DELETE } = await import('@/app/api/tools/custom/route')

View File

@@ -1,169 +0,0 @@
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import { getBaseUrl } from '@/lib/core/utils/urls'
import { StorageService } from '@/lib/uploads'
import { extractStorageKey, inferContextFromKey } from '@/lib/uploads/utils/file-utils'
import { verifyFileAccess } from '@/app/api/files/authorization'
export const dynamic = 'force-dynamic'
const logger = createLogger('PulseParseAPI')
const PulseParseSchema = z.object({
apiKey: z.string().min(1, 'API key is required'),
filePath: z.string().min(1, 'File path is required'),
pages: z.string().optional(),
extractFigure: z.boolean().optional(),
figureDescription: z.boolean().optional(),
returnHtml: z.boolean().optional(),
chunking: z.string().optional(),
chunkSize: z.number().optional(),
})
export async function POST(request: NextRequest) {
const requestId = generateRequestId()
try {
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
if (!authResult.success || !authResult.userId) {
logger.warn(`[${requestId}] Unauthorized Pulse parse attempt`, {
error: authResult.error || 'Missing userId',
})
return NextResponse.json(
{
success: false,
error: authResult.error || 'Unauthorized',
},
{ status: 401 }
)
}
const userId = authResult.userId
const body = await request.json()
const validatedData = PulseParseSchema.parse(body)
logger.info(`[${requestId}] Pulse parse request`, {
filePath: validatedData.filePath,
isWorkspaceFile: validatedData.filePath.includes('/api/files/serve/'),
userId,
})
let fileUrl = validatedData.filePath
if (validatedData.filePath?.includes('/api/files/serve/')) {
try {
const storageKey = extractStorageKey(validatedData.filePath)
const context = inferContextFromKey(storageKey)
const hasAccess = await verifyFileAccess(storageKey, userId, undefined, context, false)
if (!hasAccess) {
logger.warn(`[${requestId}] Unauthorized presigned URL generation attempt`, {
userId,
key: storageKey,
context,
})
return NextResponse.json(
{
success: false,
error: 'File not found',
},
{ status: 404 }
)
}
fileUrl = await StorageService.generatePresignedDownloadUrl(storageKey, context, 5 * 60)
logger.info(`[${requestId}] Generated presigned URL for ${context} file`)
} catch (error) {
logger.error(`[${requestId}] Failed to generate presigned URL:`, error)
return NextResponse.json(
{
success: false,
error: 'Failed to generate file access URL',
},
{ status: 500 }
)
}
} else if (validatedData.filePath?.startsWith('/')) {
const baseUrl = getBaseUrl()
fileUrl = `${baseUrl}${validatedData.filePath}`
}
const formData = new FormData()
formData.append('file_url', fileUrl)
if (validatedData.pages) {
formData.append('pages', validatedData.pages)
}
if (validatedData.extractFigure !== undefined) {
formData.append('extract_figure', String(validatedData.extractFigure))
}
if (validatedData.figureDescription !== undefined) {
formData.append('figure_description', String(validatedData.figureDescription))
}
if (validatedData.returnHtml !== undefined) {
formData.append('return_html', String(validatedData.returnHtml))
}
if (validatedData.chunking) {
formData.append('chunking', validatedData.chunking)
}
if (validatedData.chunkSize !== undefined) {
formData.append('chunk_size', String(validatedData.chunkSize))
}
const pulseResponse = await fetch('https://api.runpulse.com/extract', {
method: 'POST',
headers: {
'x-api-key': validatedData.apiKey,
},
body: formData,
})
if (!pulseResponse.ok) {
const errorText = await pulseResponse.text()
logger.error(`[${requestId}] Pulse API error:`, errorText)
return NextResponse.json(
{
success: false,
error: `Pulse API error: ${pulseResponse.statusText}`,
},
{ status: pulseResponse.status }
)
}
const pulseData = await pulseResponse.json()
logger.info(`[${requestId}] Pulse parse successful`)
return NextResponse.json({
success: true,
output: pulseData,
})
} catch (error) {
if (error instanceof z.ZodError) {
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
return NextResponse.json(
{
success: false,
error: 'Invalid request data',
details: error.errors,
},
{ status: 400 }
)
}
logger.error(`[${requestId}] Error in Pulse parse:`, error)
return NextResponse.json(
{
success: false,
error: error instanceof Error ? error.message : 'Internal server error',
},
{ status: 500 }
)
}
}

View File

@@ -1,167 +0,0 @@
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import { getBaseUrl } from '@/lib/core/utils/urls'
import { StorageService } from '@/lib/uploads'
import { extractStorageKey, inferContextFromKey } from '@/lib/uploads/utils/file-utils'
import { verifyFileAccess } from '@/app/api/files/authorization'
export const dynamic = 'force-dynamic'
const logger = createLogger('ReductoParseAPI')
const ReductoParseSchema = z.object({
apiKey: z.string().min(1, 'API key is required'),
filePath: z.string().min(1, 'File path is required'),
pages: z.array(z.number()).optional(),
tableOutputFormat: z.enum(['html', 'md']).optional(),
})
export async function POST(request: NextRequest) {
const requestId = generateRequestId()
try {
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
if (!authResult.success || !authResult.userId) {
logger.warn(`[${requestId}] Unauthorized Reducto parse attempt`, {
error: authResult.error || 'Missing userId',
})
return NextResponse.json(
{
success: false,
error: authResult.error || 'Unauthorized',
},
{ status: 401 }
)
}
const userId = authResult.userId
const body = await request.json()
const validatedData = ReductoParseSchema.parse(body)
logger.info(`[${requestId}] Reducto parse request`, {
filePath: validatedData.filePath,
isWorkspaceFile: validatedData.filePath.includes('/api/files/serve/'),
userId,
})
let fileUrl = validatedData.filePath
if (validatedData.filePath?.includes('/api/files/serve/')) {
try {
const storageKey = extractStorageKey(validatedData.filePath)
const context = inferContextFromKey(storageKey)
const hasAccess = await verifyFileAccess(
storageKey,
userId,
undefined, // customConfig
context, // context
false // isLocal
)
if (!hasAccess) {
logger.warn(`[${requestId}] Unauthorized presigned URL generation attempt`, {
userId,
key: storageKey,
context,
})
return NextResponse.json(
{
success: false,
error: 'File not found',
},
{ status: 404 }
)
}
fileUrl = await StorageService.generatePresignedDownloadUrl(storageKey, context, 5 * 60)
logger.info(`[${requestId}] Generated presigned URL for ${context} file`)
} catch (error) {
logger.error(`[${requestId}] Failed to generate presigned URL:`, error)
return NextResponse.json(
{
success: false,
error: 'Failed to generate file access URL',
},
{ status: 500 }
)
}
} else if (validatedData.filePath?.startsWith('/')) {
const baseUrl = getBaseUrl()
fileUrl = `${baseUrl}${validatedData.filePath}`
}
const reductoBody: Record<string, unknown> = {
input: fileUrl,
}
if (validatedData.pages && validatedData.pages.length > 0) {
reductoBody.settings = {
page_range: validatedData.pages,
}
}
if (validatedData.tableOutputFormat) {
reductoBody.formatting = {
table_output_format: validatedData.tableOutputFormat,
}
}
const reductoResponse = await fetch('https://platform.reducto.ai/parse', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Accept: 'application/json',
Authorization: `Bearer ${validatedData.apiKey}`,
},
body: JSON.stringify(reductoBody),
})
if (!reductoResponse.ok) {
const errorText = await reductoResponse.text()
logger.error(`[${requestId}] Reducto API error:`, errorText)
return NextResponse.json(
{
success: false,
error: `Reducto API error: ${reductoResponse.statusText}`,
},
{ status: reductoResponse.status }
)
}
const reductoData = await reductoResponse.json()
logger.info(`[${requestId}] Reducto parse successful`)
return NextResponse.json({
success: true,
output: reductoData,
})
} catch (error) {
if (error instanceof z.ZodError) {
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
return NextResponse.json(
{
success: false,
error: 'Invalid request data',
details: error.errors,
},
{ status: 400 }
)
}
logger.error(`[${requestId}] Error in Reducto parse:`, error)
return NextResponse.json(
{
success: false,
error: error instanceof Error ? error.message : 'Internal server error',
},
{ status: 500 }
)
}
}

View File

@@ -27,11 +27,10 @@ const SettingsSchema = z.object({
superUserModeEnabled: z.boolean().optional(),
errorNotificationsEnabled: z.boolean().optional(),
snapToGridSize: z.number().min(0).max(50).optional(),
showActionBar: z.boolean().optional(),
})
const defaultSettings = {
theme: 'dark',
theme: 'system',
autoConnect: true,
telemetryEnabled: true,
emailPreferences: {},
@@ -40,7 +39,6 @@ const defaultSettings = {
superUserModeEnabled: false,
errorNotificationsEnabled: true,
snapToGridSize: 0,
showActionBar: true,
}
export async function GET() {
@@ -75,7 +73,6 @@ export async function GET() {
superUserModeEnabled: userSettings.superUserModeEnabled ?? true,
errorNotificationsEnabled: userSettings.errorNotificationsEnabled ?? true,
snapToGridSize: userSettings.snapToGridSize ?? 0,
showActionBar: userSettings.showActionBar ?? true,
},
},
{ status: 200 }

View File

@@ -1,8 +1,6 @@
import { db, workflow } from '@sim/db'
import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm'
import { generateRequestId } from '@/lib/core/utils/request'
import { cleanupWebhooksForWorkflow } from '@/lib/webhooks/deploy'
import {
deployWorkflow,
loadWorkflowFromNormalizedTables,
@@ -82,11 +80,10 @@ export const POST = withAdminAuthParams<RouteParams>(async (request, context) =>
export const DELETE = withAdminAuthParams<RouteParams>(async (request, context) => {
const { id: workflowId } = await context.params
const requestId = generateRequestId()
try {
const [workflowRecord] = await db
.select()
.select({ id: workflow.id })
.from(workflow)
.where(eq(workflow.id, workflowId))
.limit(1)
@@ -95,13 +92,6 @@ export const DELETE = withAdminAuthParams<RouteParams>(async (request, context)
return notFoundResponse('Workflow')
}
// Clean up external webhook subscriptions before undeploying
await cleanupWebhooksForWorkflow(
workflowId,
workflowRecord as Record<string, unknown>,
requestId
)
const result = await undeployWorkflow({ workflowId })
if (!result.success) {
return internalErrorResponse(result.error || 'Failed to undeploy workflow')

View File

@@ -7,11 +7,6 @@ import { getSession } from '@/lib/auth'
import { validateInteger } from '@/lib/core/security/input-validation'
import { PlatformEvents } from '@/lib/core/telemetry'
import { generateRequestId } from '@/lib/core/utils/request'
import {
cleanupExternalWebhook,
createExternalWebhookSubscription,
shouldRecreateExternalWebhookSubscription,
} from '@/lib/webhooks/provider-subscriptions'
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
const logger = createLogger('WebhookAPI')
@@ -182,46 +177,6 @@ export async function PATCH(request: NextRequest, { params }: { params: Promise<
return NextResponse.json({ error: 'Access denied' }, { status: 403 })
}
const existingProviderConfig =
(webhookData.webhook.providerConfig as Record<string, unknown>) || {}
let nextProviderConfig =
providerConfig !== undefined &&
resolvedProviderConfig &&
typeof resolvedProviderConfig === 'object'
? (resolvedProviderConfig as Record<string, unknown>)
: existingProviderConfig
const nextProvider = (provider ?? webhookData.webhook.provider) as string
if (
providerConfig !== undefined &&
shouldRecreateExternalWebhookSubscription({
previousProvider: webhookData.webhook.provider as string,
nextProvider,
previousConfig: existingProviderConfig,
nextConfig: nextProviderConfig,
})
) {
await cleanupExternalWebhook(
{ ...webhookData.webhook, providerConfig: existingProviderConfig },
webhookData.workflow,
requestId
)
const result = await createExternalWebhookSubscription(
request,
{
...webhookData.webhook,
provider: nextProvider,
providerConfig: nextProviderConfig,
},
webhookData.workflow,
session.user.id,
requestId
)
nextProviderConfig = result.updatedProviderConfig as Record<string, unknown>
}
logger.debug(`[${requestId}] Updating webhook properties`, {
hasPathUpdate: path !== undefined,
hasProviderUpdate: provider !== undefined,
@@ -233,16 +188,16 @@ export async function PATCH(request: NextRequest, { params }: { params: Promise<
// Merge providerConfig to preserve credential-related fields
let finalProviderConfig = webhooks[0].webhook.providerConfig
if (providerConfig !== undefined) {
const existingConfig = existingProviderConfig
const existingConfig = (webhooks[0].webhook.providerConfig as Record<string, unknown>) || {}
finalProviderConfig = {
...nextProviderConfig,
...resolvedProviderConfig,
credentialId: existingConfig.credentialId,
credentialSetId: existingConfig.credentialSetId,
userId: existingConfig.userId,
historyId: existingConfig.historyId,
lastCheckedTimestamp: existingConfig.lastCheckedTimestamp,
setupCompleted: existingConfig.setupCompleted,
externalId: nextProviderConfig.externalId ?? existingConfig.externalId,
externalId: existingConfig.externalId,
}
}

View File

@@ -7,8 +7,9 @@ import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { PlatformEvents } from '@/lib/core/telemetry'
import { generateRequestId } from '@/lib/core/utils/request'
import { createExternalWebhookSubscription } from '@/lib/webhooks/provider-subscriptions'
import { getBaseUrl } from '@/lib/core/utils/urls'
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
import { getOAuthToken } from '@/app/api/auth/oauth/utils'
const logger = createLogger('WebhooksAPI')
@@ -256,7 +257,7 @@ export async function POST(request: NextRequest) {
const finalProviderConfig = providerConfig || {}
const { resolveEnvVarsInObject } = await import('@/lib/webhooks/env-resolver')
let resolvedProviderConfig = await resolveEnvVarsInObject(
const resolvedProviderConfig = await resolveEnvVarsInObject(
finalProviderConfig,
userId,
workflowRecord.workspaceId || undefined
@@ -413,33 +414,149 @@ export async function POST(request: NextRequest) {
}
// --- End Credential Set Handling ---
// Create external subscriptions before saving to DB to prevent orphaned records
let externalSubscriptionId: string | undefined
let externalSubscriptionCreated = false
const createTempWebhookData = (providerConfigOverride = resolvedProviderConfig) => ({
const createTempWebhookData = () => ({
id: targetWebhookId || nanoid(),
path: finalPath,
provider,
providerConfig: providerConfigOverride,
providerConfig: resolvedProviderConfig,
})
try {
const result = await createExternalWebhookSubscription(
request,
createTempWebhookData(),
workflowRecord,
userId,
requestId
)
resolvedProviderConfig = result.updatedProviderConfig as Record<string, unknown>
externalSubscriptionCreated = result.externalSubscriptionCreated
} catch (err) {
logger.error(`[${requestId}] Error creating external webhook subscription`, err)
return NextResponse.json(
{
error: 'Failed to create external webhook subscription',
details: err instanceof Error ? err.message : 'Unknown error',
},
{ status: 500 }
)
if (provider === 'airtable') {
logger.info(`[${requestId}] Creating Airtable subscription before saving to database`)
try {
externalSubscriptionId = await createAirtableWebhookSubscription(
request,
userId,
createTempWebhookData(),
requestId
)
if (externalSubscriptionId) {
resolvedProviderConfig.externalId = externalSubscriptionId
externalSubscriptionCreated = true
}
} catch (err) {
logger.error(`[${requestId}] Error creating Airtable webhook subscription`, err)
return NextResponse.json(
{
error: 'Failed to create webhook in Airtable',
details: err instanceof Error ? err.message : 'Unknown error',
},
{ status: 500 }
)
}
}
if (provider === 'calendly') {
logger.info(`[${requestId}] Creating Calendly subscription before saving to database`)
try {
externalSubscriptionId = await createCalendlyWebhookSubscription(
request,
userId,
createTempWebhookData(),
requestId
)
if (externalSubscriptionId) {
resolvedProviderConfig.externalId = externalSubscriptionId
externalSubscriptionCreated = true
}
} catch (err) {
logger.error(`[${requestId}] Error creating Calendly webhook subscription`, err)
return NextResponse.json(
{
error: 'Failed to create webhook in Calendly',
details: err instanceof Error ? err.message : 'Unknown error',
},
{ status: 500 }
)
}
}
if (provider === 'microsoft-teams') {
const { createTeamsSubscription } = await import('@/lib/webhooks/provider-subscriptions')
logger.info(`[${requestId}] Creating Teams subscription before saving to database`)
try {
await createTeamsSubscription(request, createTempWebhookData(), workflowRecord, requestId)
externalSubscriptionCreated = true
} catch (err) {
logger.error(`[${requestId}] Error creating Teams subscription`, err)
return NextResponse.json(
{
error: 'Failed to create Teams subscription',
details: err instanceof Error ? err.message : 'Unknown error',
},
{ status: 500 }
)
}
}
if (provider === 'telegram') {
const { createTelegramWebhook } = await import('@/lib/webhooks/provider-subscriptions')
logger.info(`[${requestId}] Creating Telegram webhook before saving to database`)
try {
await createTelegramWebhook(request, createTempWebhookData(), requestId)
externalSubscriptionCreated = true
} catch (err) {
logger.error(`[${requestId}] Error creating Telegram webhook`, err)
return NextResponse.json(
{
error: 'Failed to create Telegram webhook',
details: err instanceof Error ? err.message : 'Unknown error',
},
{ status: 500 }
)
}
}
if (provider === 'webflow') {
logger.info(`[${requestId}] Creating Webflow subscription before saving to database`)
try {
externalSubscriptionId = await createWebflowWebhookSubscription(
request,
userId,
createTempWebhookData(),
requestId
)
if (externalSubscriptionId) {
resolvedProviderConfig.externalId = externalSubscriptionId
externalSubscriptionCreated = true
}
} catch (err) {
logger.error(`[${requestId}] Error creating Webflow webhook subscription`, err)
return NextResponse.json(
{
error: 'Failed to create webhook in Webflow',
details: err instanceof Error ? err.message : 'Unknown error',
},
{ status: 500 }
)
}
}
if (provider === 'typeform') {
const { createTypeformWebhook } = await import('@/lib/webhooks/provider-subscriptions')
logger.info(`[${requestId}] Creating Typeform webhook before saving to database`)
try {
const usedTag = await createTypeformWebhook(request, createTempWebhookData(), requestId)
if (!resolvedProviderConfig.webhookTag) {
resolvedProviderConfig.webhookTag = usedTag
logger.info(`[${requestId}] Stored auto-generated webhook tag: ${usedTag}`)
}
externalSubscriptionCreated = true
} catch (err) {
logger.error(`[${requestId}] Error creating Typeform webhook`, err)
return NextResponse.json(
{
error: 'Failed to create webhook in Typeform',
details: err instanceof Error ? err.message : 'Unknown error',
},
{ status: 500 }
)
}
}
// Now save to database (only if subscription succeeded or provider doesn't need external subscription)
@@ -500,11 +617,7 @@ export async function POST(request: NextRequest) {
logger.error(`[${requestId}] DB save failed, cleaning up external subscription`, dbError)
try {
const { cleanupExternalWebhook } = await import('@/lib/webhooks/provider-subscriptions')
await cleanupExternalWebhook(
createTempWebhookData(resolvedProviderConfig),
workflowRecord,
requestId
)
await cleanupExternalWebhook(createTempWebhookData(), workflowRecord, requestId)
} catch (cleanupError) {
logger.error(
`[${requestId}] Failed to cleanup external subscription after DB save failure`,
@@ -628,6 +741,110 @@ export async function POST(request: NextRequest) {
}
// --- End RSS specific logic ---
if (savedWebhook && provider === 'grain') {
logger.info(`[${requestId}] Grain provider detected. Creating Grain webhook subscription.`)
try {
const grainResult = await createGrainWebhookSubscription(
request,
{
id: savedWebhook.id,
path: savedWebhook.path,
providerConfig: savedWebhook.providerConfig,
},
requestId
)
if (grainResult) {
// Update the webhook record with the external Grain hook ID and event types for filtering
const updatedConfig = {
...(savedWebhook.providerConfig as Record<string, any>),
externalId: grainResult.id,
eventTypes: grainResult.eventTypes,
}
await db
.update(webhook)
.set({
providerConfig: updatedConfig,
updatedAt: new Date(),
})
.where(eq(webhook.id, savedWebhook.id))
savedWebhook.providerConfig = updatedConfig
logger.info(`[${requestId}] Successfully created Grain webhook`, {
grainHookId: grainResult.id,
eventTypes: grainResult.eventTypes,
webhookId: savedWebhook.id,
})
}
} catch (err) {
logger.error(
`[${requestId}] Error creating Grain webhook subscription, rolling back webhook`,
err
)
await db.delete(webhook).where(eq(webhook.id, savedWebhook.id))
return NextResponse.json(
{
error: 'Failed to create webhook in Grain',
details: err instanceof Error ? err.message : 'Unknown error',
},
{ status: 500 }
)
}
}
// --- End Grain specific logic ---
// --- Lemlist specific logic ---
if (savedWebhook && provider === 'lemlist') {
logger.info(
`[${requestId}] Lemlist provider detected. Creating Lemlist webhook subscription.`
)
try {
const lemlistResult = await createLemlistWebhookSubscription(
{
id: savedWebhook.id,
path: savedWebhook.path,
providerConfig: savedWebhook.providerConfig,
},
requestId
)
if (lemlistResult) {
// Update the webhook record with the external Lemlist hook ID
const updatedConfig = {
...(savedWebhook.providerConfig as Record<string, any>),
externalId: lemlistResult.id,
}
await db
.update(webhook)
.set({
providerConfig: updatedConfig,
updatedAt: new Date(),
})
.where(eq(webhook.id, savedWebhook.id))
savedWebhook.providerConfig = updatedConfig
logger.info(`[${requestId}] Successfully created Lemlist webhook`, {
lemlistHookId: lemlistResult.id,
webhookId: savedWebhook.id,
})
}
} catch (err) {
logger.error(
`[${requestId}] Error creating Lemlist webhook subscription, rolling back webhook`,
err
)
await db.delete(webhook).where(eq(webhook.id, savedWebhook.id))
return NextResponse.json(
{
error: 'Failed to create webhook in Lemlist',
details: err instanceof Error ? err.message : 'Unknown error',
},
{ status: 500 }
)
}
}
// --- End Lemlist specific logic ---
if (!targetWebhookId && savedWebhook) {
try {
PlatformEvents.webhookCreated({
@@ -651,3 +868,616 @@ export async function POST(request: NextRequest) {
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}
// Helper function to create the webhook subscription in Airtable
async function createAirtableWebhookSubscription(
request: NextRequest,
userId: string,
webhookData: any,
requestId: string
): Promise<string | undefined> {
try {
const { path, providerConfig } = webhookData
const { baseId, tableId, includeCellValuesInFieldIds } = providerConfig || {}
if (!baseId || !tableId) {
logger.warn(`[${requestId}] Missing baseId or tableId for Airtable webhook creation.`, {
webhookId: webhookData.id,
})
throw new Error(
'Base ID and Table ID are required to create Airtable webhook. Please provide valid Airtable base and table IDs.'
)
}
const accessToken = await getOAuthToken(userId, 'airtable')
if (!accessToken) {
logger.warn(
`[${requestId}] Could not retrieve Airtable access token for user ${userId}. Cannot create webhook in Airtable.`
)
throw new Error(
'Airtable account connection required. Please connect your Airtable account in the trigger configuration and try again.'
)
}
const notificationUrl = `${getBaseUrl()}/api/webhooks/trigger/${path}`
const airtableApiUrl = `https://api.airtable.com/v0/bases/${baseId}/webhooks`
const specification: any = {
options: {
filters: {
dataTypes: ['tableData'], // Watch table data changes
recordChangeScope: tableId, // Watch only the specified table
},
},
}
// Conditionally add the 'includes' field based on the config
if (includeCellValuesInFieldIds === 'all') {
specification.options.includes = {
includeCellValuesInFieldIds: 'all',
}
}
const requestBody: any = {
notificationUrl: notificationUrl,
specification: specification,
}
const airtableResponse = await fetch(airtableApiUrl, {
method: 'POST',
headers: {
Authorization: `Bearer ${accessToken}`,
'Content-Type': 'application/json',
},
body: JSON.stringify(requestBody),
})
// Airtable often returns 200 OK even for errors in the body, check payload
const responseBody = await airtableResponse.json()
if (!airtableResponse.ok || responseBody.error) {
const errorMessage =
responseBody.error?.message || responseBody.error || 'Unknown Airtable API error'
const errorType = responseBody.error?.type
logger.error(
`[${requestId}] Failed to create webhook in Airtable for webhook ${webhookData.id}. Status: ${airtableResponse.status}`,
{ type: errorType, message: errorMessage, response: responseBody }
)
let userFriendlyMessage = 'Failed to create webhook subscription in Airtable'
if (airtableResponse.status === 404) {
userFriendlyMessage =
'Airtable base or table not found. Please verify that the Base ID and Table ID are correct and that you have access to them.'
} else if (errorMessage && errorMessage !== 'Unknown Airtable API error') {
userFriendlyMessage = `Airtable error: ${errorMessage}`
}
throw new Error(userFriendlyMessage)
}
logger.info(
`[${requestId}] Successfully created webhook in Airtable for webhook ${webhookData.id}.`,
{
airtableWebhookId: responseBody.id,
}
)
return responseBody.id
} catch (error: any) {
logger.error(
`[${requestId}] Exception during Airtable webhook creation for webhook ${webhookData.id}.`,
{
message: error.message,
stack: error.stack,
}
)
// Re-throw the error so it can be caught by the outer try-catch
throw error
}
}
// Helper function to create the webhook subscription in Calendly
async function createCalendlyWebhookSubscription(
request: NextRequest,
userId: string,
webhookData: any,
requestId: string
): Promise<string | undefined> {
try {
const { path, providerConfig } = webhookData
const { apiKey, organization, triggerId } = providerConfig || {}
if (!apiKey) {
logger.warn(`[${requestId}] Missing apiKey for Calendly webhook creation.`, {
webhookId: webhookData.id,
})
throw new Error(
'Personal Access Token is required to create Calendly webhook. Please provide your Calendly Personal Access Token.'
)
}
if (!organization) {
logger.warn(`[${requestId}] Missing organization URI for Calendly webhook creation.`, {
webhookId: webhookData.id,
})
throw new Error(
'Organization URI is required to create Calendly webhook. Please provide your Organization URI from the "Get Current User" operation.'
)
}
if (!triggerId) {
logger.warn(`[${requestId}] Missing triggerId for Calendly webhook creation.`, {
webhookId: webhookData.id,
})
throw new Error('Trigger ID is required to create Calendly webhook')
}
const notificationUrl = `${getBaseUrl()}/api/webhooks/trigger/${path}`
// Map trigger IDs to Calendly event types
const eventTypeMap: Record<string, string[]> = {
calendly_invitee_created: ['invitee.created'],
calendly_invitee_canceled: ['invitee.canceled'],
calendly_routing_form_submitted: ['routing_form_submission.created'],
calendly_webhook: ['invitee.created', 'invitee.canceled', 'routing_form_submission.created'],
}
const events = eventTypeMap[triggerId] || ['invitee.created']
const calendlyApiUrl = 'https://api.calendly.com/webhook_subscriptions'
const requestBody = {
url: notificationUrl,
events,
organization,
scope: 'organization',
}
const calendlyResponse = await fetch(calendlyApiUrl, {
method: 'POST',
headers: {
Authorization: `Bearer ${apiKey}`,
'Content-Type': 'application/json',
},
body: JSON.stringify(requestBody),
})
if (!calendlyResponse.ok) {
const errorBody = await calendlyResponse.json().catch(() => ({}))
const errorMessage = errorBody.message || errorBody.title || 'Unknown Calendly API error'
logger.error(
`[${requestId}] Failed to create webhook in Calendly for webhook ${webhookData.id}. Status: ${calendlyResponse.status}`,
{ response: errorBody }
)
let userFriendlyMessage = 'Failed to create webhook subscription in Calendly'
if (calendlyResponse.status === 401) {
userFriendlyMessage =
'Calendly authentication failed. Please verify your Personal Access Token is correct.'
} else if (calendlyResponse.status === 403) {
userFriendlyMessage =
'Calendly access denied. Please ensure you have appropriate permissions and a paid Calendly subscription.'
} else if (calendlyResponse.status === 404) {
userFriendlyMessage =
'Calendly organization not found. Please verify the Organization URI is correct.'
} else if (errorMessage && errorMessage !== 'Unknown Calendly API error') {
userFriendlyMessage = `Calendly error: ${errorMessage}`
}
throw new Error(userFriendlyMessage)
}
const responseBody = await calendlyResponse.json()
const webhookUri = responseBody.resource?.uri
if (!webhookUri) {
logger.error(
`[${requestId}] Calendly webhook created but no webhook URI returned for webhook ${webhookData.id}`,
{ response: responseBody }
)
throw new Error('Calendly webhook creation succeeded but no webhook URI was returned')
}
// Extract the webhook ID from the URI (e.g., https://api.calendly.com/webhook_subscriptions/WEBHOOK_ID)
const webhookId = webhookUri.split('/').pop()
if (!webhookId) {
logger.error(`[${requestId}] Could not extract webhook ID from Calendly URI: ${webhookUri}`, {
response: responseBody,
})
throw new Error('Failed to extract webhook ID from Calendly response')
}
logger.info(
`[${requestId}] Successfully created webhook in Calendly for webhook ${webhookData.id}.`,
{
calendlyWebhookUri: webhookUri,
calendlyWebhookId: webhookId,
}
)
return webhookId
} catch (error: any) {
logger.error(
`[${requestId}] Exception during Calendly webhook creation for webhook ${webhookData.id}.`,
{
message: error.message,
stack: error.stack,
}
)
// Re-throw the error so it can be caught by the outer try-catch
throw error
}
}
// Helper function to create the webhook subscription in Webflow
async function createWebflowWebhookSubscription(
request: NextRequest,
userId: string,
webhookData: any,
requestId: string
): Promise<string | undefined> {
try {
const { path, providerConfig } = webhookData
const { siteId, triggerId, collectionId, formId } = providerConfig || {}
if (!siteId) {
logger.warn(`[${requestId}] Missing siteId for Webflow webhook creation.`, {
webhookId: webhookData.id,
})
throw new Error('Site ID is required to create Webflow webhook')
}
if (!triggerId) {
logger.warn(`[${requestId}] Missing triggerId for Webflow webhook creation.`, {
webhookId: webhookData.id,
})
throw new Error('Trigger type is required to create Webflow webhook')
}
const accessToken = await getOAuthToken(userId, 'webflow')
if (!accessToken) {
logger.warn(
`[${requestId}] Could not retrieve Webflow access token for user ${userId}. Cannot create webhook in Webflow.`
)
throw new Error(
'Webflow account connection required. Please connect your Webflow account in the trigger configuration and try again.'
)
}
const notificationUrl = `${getBaseUrl()}/api/webhooks/trigger/${path}`
// Map trigger IDs to Webflow trigger types
const triggerTypeMap: Record<string, string> = {
webflow_collection_item_created: 'collection_item_created',
webflow_collection_item_changed: 'collection_item_changed',
webflow_collection_item_deleted: 'collection_item_deleted',
webflow_form_submission: 'form_submission',
}
const webflowTriggerType = triggerTypeMap[triggerId]
if (!webflowTriggerType) {
logger.warn(`[${requestId}] Invalid triggerId for Webflow: ${triggerId}`, {
webhookId: webhookData.id,
})
throw new Error(`Invalid Webflow trigger type: ${triggerId}`)
}
const webflowApiUrl = `https://api.webflow.com/v2/sites/${siteId}/webhooks`
const requestBody: any = {
triggerType: webflowTriggerType,
url: notificationUrl,
}
// Add filter for collection-based triggers
if (collectionId && webflowTriggerType.startsWith('collection_item_')) {
requestBody.filter = {
resource_type: 'collection',
resource_id: collectionId,
}
}
// Add filter for form submissions
if (formId && webflowTriggerType === 'form_submission') {
requestBody.filter = {
resource_type: 'form',
resource_id: formId,
}
}
const webflowResponse = await fetch(webflowApiUrl, {
method: 'POST',
headers: {
Authorization: `Bearer ${accessToken}`,
'Content-Type': 'application/json',
accept: 'application/json',
},
body: JSON.stringify(requestBody),
})
const responseBody = await webflowResponse.json()
if (!webflowResponse.ok || responseBody.error) {
const errorMessage = responseBody.message || responseBody.error || 'Unknown Webflow API error'
logger.error(
`[${requestId}] Failed to create webhook in Webflow for webhook ${webhookData.id}. Status: ${webflowResponse.status}`,
{ message: errorMessage, response: responseBody }
)
throw new Error(errorMessage)
}
logger.info(
`[${requestId}] Successfully created webhook in Webflow for webhook ${webhookData.id}.`,
{
webflowWebhookId: responseBody.id || responseBody._id,
}
)
return responseBody.id || responseBody._id
} catch (error: any) {
logger.error(
`[${requestId}] Exception during Webflow webhook creation for webhook ${webhookData.id}.`,
{
message: error.message,
stack: error.stack,
}
)
throw error
}
}
// Helper function to create the webhook subscription in Grain
async function createGrainWebhookSubscription(
request: NextRequest,
webhookData: any,
requestId: string
): Promise<{ id: string; eventTypes: string[] } | undefined> {
try {
const { path, providerConfig } = webhookData
const { apiKey, triggerId, includeHighlights, includeParticipants, includeAiSummary } =
providerConfig || {}
if (!apiKey) {
logger.warn(`[${requestId}] Missing apiKey for Grain webhook creation.`, {
webhookId: webhookData.id,
})
throw new Error(
'Grain API Key is required. Please provide your Grain Personal Access Token in the trigger configuration.'
)
}
// Map trigger IDs to Grain API hook_type (only 2 options: recording_added, upload_status)
const hookTypeMap: Record<string, string> = {
grain_webhook: 'recording_added',
grain_recording_created: 'recording_added',
grain_recording_updated: 'recording_added',
grain_highlight_created: 'recording_added',
grain_highlight_updated: 'recording_added',
grain_story_created: 'recording_added',
grain_upload_status: 'upload_status',
}
const eventTypeMap: Record<string, string[]> = {
grain_webhook: [],
grain_recording_created: ['recording_added'],
grain_recording_updated: ['recording_updated'],
grain_highlight_created: ['highlight_created'],
grain_highlight_updated: ['highlight_updated'],
grain_story_created: ['story_created'],
grain_upload_status: ['upload_status'],
}
const hookType = hookTypeMap[triggerId] ?? 'recording_added'
const eventTypes = eventTypeMap[triggerId] ?? []
if (!hookTypeMap[triggerId]) {
logger.warn(
`[${requestId}] Unknown triggerId for Grain: ${triggerId}, defaulting to recording_added`,
{
webhookId: webhookData.id,
}
)
}
logger.info(`[${requestId}] Creating Grain webhook`, {
triggerId,
hookType,
eventTypes,
webhookId: webhookData.id,
})
const notificationUrl = `${getBaseUrl()}/api/webhooks/trigger/${path}`
const grainApiUrl = 'https://api.grain.com/_/public-api/v2/hooks/create'
const requestBody: Record<string, any> = {
hook_url: notificationUrl,
hook_type: hookType,
}
// Build include object based on configuration
const include: Record<string, boolean> = {}
if (includeHighlights) {
include.highlights = true
}
if (includeParticipants) {
include.participants = true
}
if (includeAiSummary) {
include.ai_summary = true
}
if (Object.keys(include).length > 0) {
requestBody.include = include
}
const grainResponse = await fetch(grainApiUrl, {
method: 'POST',
headers: {
Authorization: `Bearer ${apiKey}`,
'Content-Type': 'application/json',
'Public-Api-Version': '2025-10-31',
},
body: JSON.stringify(requestBody),
})
const responseBody = await grainResponse.json()
if (!grainResponse.ok || responseBody.error || responseBody.errors) {
logger.warn('[App] Grain response body:', responseBody)
const errorMessage =
responseBody.errors?.detail ||
responseBody.error?.message ||
responseBody.error ||
responseBody.message ||
'Unknown Grain API error'
logger.error(
`[${requestId}] Failed to create webhook in Grain for webhook ${webhookData.id}. Status: ${grainResponse.status}`,
{ message: errorMessage, response: responseBody }
)
let userFriendlyMessage = 'Failed to create webhook subscription in Grain'
if (grainResponse.status === 401) {
userFriendlyMessage =
'Invalid Grain API Key. Please verify your Personal Access Token is correct.'
} else if (grainResponse.status === 403) {
userFriendlyMessage =
'Access denied. Please ensure your Grain API Key has appropriate permissions.'
} else if (errorMessage && errorMessage !== 'Unknown Grain API error') {
userFriendlyMessage = `Grain error: ${errorMessage}`
}
throw new Error(userFriendlyMessage)
}
logger.info(
`[${requestId}] Successfully created webhook in Grain for webhook ${webhookData.id}.`,
{
grainWebhookId: responseBody.id,
eventTypes,
}
)
return { id: responseBody.id, eventTypes }
} catch (error: any) {
logger.error(
`[${requestId}] Exception during Grain webhook creation for webhook ${webhookData.id}.`,
{
message: error.message,
stack: error.stack,
}
)
throw error
}
}
// Helper function to create the webhook subscription in Lemlist
async function createLemlistWebhookSubscription(
webhookData: any,
requestId: string
): Promise<{ id: string } | undefined> {
try {
const { path, providerConfig } = webhookData
const { apiKey, triggerId, campaignId } = providerConfig || {}
if (!apiKey) {
logger.warn(`[${requestId}] Missing apiKey for Lemlist webhook creation.`, {
webhookId: webhookData.id,
})
throw new Error(
'Lemlist API Key is required. Please provide your Lemlist API Key in the trigger configuration.'
)
}
// Map trigger IDs to Lemlist event types
const eventTypeMap: Record<string, string | undefined> = {
lemlist_email_replied: 'emailsReplied',
lemlist_linkedin_replied: 'linkedinReplied',
lemlist_interested: 'interested',
lemlist_not_interested: 'notInterested',
lemlist_email_opened: 'emailsOpened',
lemlist_email_clicked: 'emailsClicked',
lemlist_email_bounced: 'emailsBounced',
lemlist_email_sent: 'emailsSent',
lemlist_webhook: undefined, // Generic webhook - no type filter
}
const eventType = eventTypeMap[triggerId]
logger.info(`[${requestId}] Creating Lemlist webhook`, {
triggerId,
eventType,
hasCampaignId: !!campaignId,
webhookId: webhookData.id,
})
const notificationUrl = `${getBaseUrl()}/api/webhooks/trigger/${path}`
const lemlistApiUrl = 'https://api.lemlist.com/api/hooks'
// Build request body
const requestBody: Record<string, any> = {
targetUrl: notificationUrl,
}
// Add event type if specified (omit for generic webhook to receive all events)
if (eventType) {
requestBody.type = eventType
}
// Add campaign filter if specified
if (campaignId) {
requestBody.campaignId = campaignId
}
// Lemlist uses Basic Auth with empty username and API key as password
const authString = Buffer.from(`:${apiKey}`).toString('base64')
const lemlistResponse = await fetch(lemlistApiUrl, {
method: 'POST',
headers: {
Authorization: `Basic ${authString}`,
'Content-Type': 'application/json',
},
body: JSON.stringify(requestBody),
})
const responseBody = await lemlistResponse.json()
if (!lemlistResponse.ok || responseBody.error) {
const errorMessage = responseBody.message || responseBody.error || 'Unknown Lemlist API error'
logger.error(
`[${requestId}] Failed to create webhook in Lemlist for webhook ${webhookData.id}. Status: ${lemlistResponse.status}`,
{ message: errorMessage, response: responseBody }
)
let userFriendlyMessage = 'Failed to create webhook subscription in Lemlist'
if (lemlistResponse.status === 401) {
userFriendlyMessage = 'Invalid Lemlist API Key. Please verify your API Key is correct.'
} else if (lemlistResponse.status === 403) {
userFriendlyMessage =
'Access denied. Please ensure your Lemlist API Key has appropriate permissions.'
} else if (errorMessage && errorMessage !== 'Unknown Lemlist API error') {
userFriendlyMessage = `Lemlist error: ${errorMessage}`
}
throw new Error(userFriendlyMessage)
}
logger.info(
`[${requestId}] Successfully created webhook in Lemlist for webhook ${webhookData.id}.`,
{
lemlistWebhookId: responseBody._id,
}
)
return { id: responseBody._id }
} catch (error: any) {
logger.error(
`[${requestId}] Exception during Lemlist webhook creation for webhook ${webhookData.id}.`,
{
message: error.message,
stack: error.stack,
}
)
throw error
}
}

View File

@@ -3,92 +3,15 @@
*
* @vitest-environment node
*/
import { createMockRequest, loggerMock } from '@sim/testing'
import { loggerMock } from '@sim/testing'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
/** Mock execution dependencies for webhook tests */
function mockExecutionDependencies() {
vi.mock('@/lib/core/security/encryption', () => ({
decryptSecret: vi.fn().mockResolvedValue({ decrypted: 'decrypted-value' }),
}))
vi.mock('@/lib/logs/execution/trace-spans/trace-spans', () => ({
buildTraceSpans: vi.fn().mockReturnValue({ traceSpans: [], totalDuration: 100 }),
}))
vi.mock('@/lib/workflows/utils', () => ({
updateWorkflowRunCounts: vi.fn().mockResolvedValue(undefined),
}))
vi.mock('@/serializer', () => ({
Serializer: vi.fn().mockImplementation(() => ({
serializeWorkflow: vi.fn().mockReturnValue({
version: '1.0',
blocks: [
{
id: 'starter-id',
metadata: { id: 'starter', name: 'Start' },
config: {},
inputs: {},
outputs: {},
position: { x: 100, y: 100 },
enabled: true,
},
{
id: 'agent-id',
metadata: { id: 'agent', name: 'Agent 1' },
config: {},
inputs: {},
outputs: {},
position: { x: 634, y: -167 },
enabled: true,
},
],
edges: [
{
id: 'edge-1',
source: 'starter-id',
target: 'agent-id',
sourceHandle: 'source',
targetHandle: 'target',
},
],
loops: {},
parallels: {},
}),
})),
}))
}
/** Mock Trigger.dev SDK */
function mockTriggerDevSdk() {
vi.mock('@trigger.dev/sdk', () => ({
tasks: { trigger: vi.fn().mockResolvedValue({ id: 'mock-task-id' }) },
task: vi.fn().mockReturnValue({}),
}))
}
/**
* Test data store - isolated per test via beforeEach reset
* This replaces the global mutable state pattern with local test data
*/
const testData = {
webhooks: [] as Array<{
id: string
provider: string
path: string
isActive: boolean
providerConfig?: Record<string, unknown>
workflowId: string
rateLimitCount?: number
rateLimitPeriod?: number
}>,
workflows: [] as Array<{
id: string
userId: string
workspaceId?: string
}>,
}
import {
createMockRequest,
globalMockData,
mockExecutionDependencies,
mockTriggerDevSdk,
} from '@/app/api/__test-utils__/utils'
const {
generateRequestHashMock,
@@ -236,8 +159,8 @@ vi.mock('@/lib/workflows/persistence/utils', () => ({
vi.mock('@/lib/webhooks/processor', () => ({
findAllWebhooksForPath: vi.fn().mockImplementation(async (options: { path: string }) => {
// Filter webhooks by path from testData
const matchingWebhooks = testData.webhooks.filter(
// Filter webhooks by path from globalMockData
const matchingWebhooks = globalMockData.webhooks.filter(
(wh) => wh.path === options.path && wh.isActive
)
@@ -247,7 +170,7 @@ vi.mock('@/lib/webhooks/processor', () => ({
// Return array of {webhook, workflow} objects
return matchingWebhooks.map((wh) => {
const matchingWorkflow = testData.workflows.find((w) => w.id === wh.workflowId) || {
const matchingWorkflow = globalMockData.workflows.find((w) => w.id === wh.workflowId) || {
id: wh.workflowId || 'test-workflow-id',
userId: 'test-user-id',
workspaceId: 'test-workspace-id',
@@ -360,15 +283,14 @@ describe('Webhook Trigger API Route', () => {
beforeEach(() => {
vi.clearAllMocks()
// Reset test data arrays
testData.webhooks.length = 0
testData.workflows.length = 0
globalMockData.webhooks.length = 0
globalMockData.workflows.length = 0
globalMockData.schedules.length = 0
mockExecutionDependencies()
mockTriggerDevSdk()
// Set up default workflow for tests
testData.workflows.push({
globalMockData.workflows.push({
id: 'test-workflow-id',
userId: 'test-user-id',
workspaceId: 'test-workspace-id',
@@ -404,7 +326,7 @@ describe('Webhook Trigger API Route', () => {
describe('Generic Webhook Authentication', () => {
it('should process generic webhook without authentication', async () => {
testData.webhooks.push({
globalMockData.webhooks.push({
id: 'generic-webhook-id',
provider: 'generic',
path: 'test-path',
@@ -414,7 +336,7 @@ describe('Webhook Trigger API Route', () => {
rateLimitCount: 100,
rateLimitPeriod: 60,
})
testData.workflows.push({
globalMockData.workflows.push({
id: 'test-workflow-id',
userId: 'test-user-id',
workspaceId: 'test-workspace-id',
@@ -432,7 +354,7 @@ describe('Webhook Trigger API Route', () => {
})
it('should authenticate with Bearer token when no custom header is configured', async () => {
testData.webhooks.push({
globalMockData.webhooks.push({
id: 'generic-webhook-id',
provider: 'generic',
path: 'test-path',
@@ -440,7 +362,7 @@ describe('Webhook Trigger API Route', () => {
providerConfig: { requireAuth: true, token: 'test-token-123' },
workflowId: 'test-workflow-id',
})
testData.workflows.push({
globalMockData.workflows.push({
id: 'test-workflow-id',
userId: 'test-user-id',
workspaceId: 'test-workspace-id',
@@ -459,7 +381,7 @@ describe('Webhook Trigger API Route', () => {
})
it('should authenticate with custom header when configured', async () => {
testData.webhooks.push({
globalMockData.webhooks.push({
id: 'generic-webhook-id',
provider: 'generic',
path: 'test-path',
@@ -471,7 +393,7 @@ describe('Webhook Trigger API Route', () => {
},
workflowId: 'test-workflow-id',
})
testData.workflows.push({
globalMockData.workflows.push({
id: 'test-workflow-id',
userId: 'test-user-id',
workspaceId: 'test-workspace-id',
@@ -490,7 +412,7 @@ describe('Webhook Trigger API Route', () => {
})
it('should handle case insensitive Bearer token authentication', async () => {
testData.webhooks.push({
globalMockData.webhooks.push({
id: 'generic-webhook-id',
provider: 'generic',
path: 'test-path',
@@ -498,7 +420,7 @@ describe('Webhook Trigger API Route', () => {
providerConfig: { requireAuth: true, token: 'case-test-token' },
workflowId: 'test-workflow-id',
})
testData.workflows.push({
globalMockData.workflows.push({
id: 'test-workflow-id',
userId: 'test-user-id',
workspaceId: 'test-workspace-id',
@@ -532,7 +454,7 @@ describe('Webhook Trigger API Route', () => {
})
it('should handle case insensitive custom header authentication', async () => {
testData.webhooks.push({
globalMockData.webhooks.push({
id: 'generic-webhook-id',
provider: 'generic',
path: 'test-path',
@@ -544,7 +466,7 @@ describe('Webhook Trigger API Route', () => {
},
workflowId: 'test-workflow-id',
})
testData.workflows.push({
globalMockData.workflows.push({
id: 'test-workflow-id',
userId: 'test-user-id',
workspaceId: 'test-workspace-id',
@@ -573,7 +495,7 @@ describe('Webhook Trigger API Route', () => {
})
it('should reject wrong Bearer token', async () => {
testData.webhooks.push({
globalMockData.webhooks.push({
id: 'generic-webhook-id',
provider: 'generic',
path: 'test-path',
@@ -597,7 +519,7 @@ describe('Webhook Trigger API Route', () => {
})
it('should reject wrong custom header token', async () => {
testData.webhooks.push({
globalMockData.webhooks.push({
id: 'generic-webhook-id',
provider: 'generic',
path: 'test-path',
@@ -625,7 +547,7 @@ describe('Webhook Trigger API Route', () => {
})
it('should reject missing authentication when required', async () => {
testData.webhooks.push({
globalMockData.webhooks.push({
id: 'generic-webhook-id',
provider: 'generic',
path: 'test-path',
@@ -645,7 +567,7 @@ describe('Webhook Trigger API Route', () => {
})
it('should reject Bearer token when custom header is configured', async () => {
testData.webhooks.push({
globalMockData.webhooks.push({
id: 'generic-webhook-id',
provider: 'generic',
path: 'test-path',
@@ -673,7 +595,7 @@ describe('Webhook Trigger API Route', () => {
})
it('should reject wrong custom header name', async () => {
testData.webhooks.push({
globalMockData.webhooks.push({
id: 'generic-webhook-id',
provider: 'generic',
path: 'test-path',
@@ -701,7 +623,7 @@ describe('Webhook Trigger API Route', () => {
})
it('should reject when auth is required but no token is configured', async () => {
testData.webhooks.push({
globalMockData.webhooks.push({
id: 'generic-webhook-id',
provider: 'generic',
path: 'test-path',
@@ -709,7 +631,7 @@ describe('Webhook Trigger API Route', () => {
providerConfig: { requireAuth: true },
workflowId: 'test-workflow-id',
})
testData.workflows.push({ id: 'test-workflow-id', userId: 'test-user-id' })
globalMockData.workflows.push({ id: 'test-workflow-id', userId: 'test-user-id' })
const headers = {
'Content-Type': 'application/json',

View File

@@ -4,7 +4,6 @@ import { and, desc, eq } from 'drizzle-orm'
import type { NextRequest } from 'next/server'
import { generateRequestId } from '@/lib/core/utils/request'
import { removeMcpToolsForWorkflow, syncMcpToolsForWorkflow } from '@/lib/mcp/workflow-mcp-sync'
import { cleanupWebhooksForWorkflow, saveTriggerWebhooksForDeploy } from '@/lib/webhooks/deploy'
import {
deployWorkflow,
loadWorkflowFromNormalizedTables,
@@ -131,22 +130,6 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
return createErrorResponse(`Invalid schedule configuration: ${scheduleValidation.error}`, 400)
}
const triggerSaveResult = await saveTriggerWebhooksForDeploy({
request,
workflowId: id,
workflow: workflowData,
userId: actorUserId,
blocks: normalizedData.blocks,
requestId,
})
if (!triggerSaveResult.success) {
return createErrorResponse(
triggerSaveResult.error?.message || 'Failed to save trigger configuration',
triggerSaveResult.error?.status || 500
)
}
const deployResult = await deployWorkflow({
workflowId: id,
deployedBy: actorUserId,
@@ -219,18 +202,11 @@ export async function DELETE(
try {
logger.debug(`[${requestId}] Undeploying workflow: ${id}`)
const { error, workflow: workflowData } = await validateWorkflowPermissions(
id,
requestId,
'admin'
)
const { error } = await validateWorkflowPermissions(id, requestId, 'admin')
if (error) {
return createErrorResponse(error.message, error.status)
}
// Clean up external webhook subscriptions before undeploying
await cleanupWebhooksForWorkflow(id, workflowData as Record<string, unknown>, requestId)
const result = await undeployWorkflow({ workflowId: id })
if (!result.success) {
return createErrorResponse(result.error || 'Failed to undeploy workflow', 500)

View File

@@ -1,5 +1,5 @@
import { db } from '@sim/db'
import { workflow } from '@sim/db/schema'
import { webhook, workflow } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
@@ -13,6 +13,7 @@ import { sanitizeAgentToolsInBlocks } from '@/lib/workflows/sanitization/validat
import { getWorkflowAccessContext } from '@/lib/workflows/utils'
import type { BlockState } from '@/stores/workflows/workflow/types'
import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/workflow/utils'
import { getTrigger } from '@/triggers'
const logger = createLogger('WorkflowStateAPI')
@@ -202,6 +203,8 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
)
}
await syncWorkflowWebhooks(workflowId, workflowState.blocks)
// Extract and persist custom tools to database
try {
const workspaceId = workflowData.workspaceId
@@ -287,3 +290,213 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}
function getSubBlockValue<T = unknown>(block: BlockState, subBlockId: string): T | undefined {
const value = block.subBlocks?.[subBlockId]?.value
if (value === undefined || value === null) {
return undefined
}
return value as T
}
async function syncWorkflowWebhooks(
workflowId: string,
blocks: Record<string, any>
): Promise<void> {
await syncBlockResources(workflowId, blocks, {
resourceName: 'webhook',
subBlockId: 'webhookId',
buildMetadata: buildWebhookMetadata,
applyMetadata: upsertWebhookRecord,
})
}
interface WebhookMetadata {
triggerPath: string
provider: string | null
providerConfig: Record<string, any>
}
const CREDENTIAL_SET_PREFIX = 'credentialSet:'
function buildWebhookMetadata(block: BlockState): WebhookMetadata | null {
const triggerId =
getSubBlockValue<string>(block, 'triggerId') ||
getSubBlockValue<string>(block, 'selectedTriggerId')
const triggerConfig = getSubBlockValue<Record<string, any>>(block, 'triggerConfig') || {}
const triggerCredentials = getSubBlockValue<string>(block, 'triggerCredentials')
const triggerPath = getSubBlockValue<string>(block, 'triggerPath') || block.id
const triggerDef = triggerId ? getTrigger(triggerId) : undefined
const provider = triggerDef?.provider || null
// Handle credential sets vs individual credentials
const isCredentialSet = triggerCredentials?.startsWith(CREDENTIAL_SET_PREFIX)
const credentialSetId = isCredentialSet
? triggerCredentials!.slice(CREDENTIAL_SET_PREFIX.length)
: undefined
const credentialId = isCredentialSet ? undefined : triggerCredentials
const providerConfig = {
...(typeof triggerConfig === 'object' ? triggerConfig : {}),
...(credentialId ? { credentialId } : {}),
...(credentialSetId ? { credentialSetId } : {}),
...(triggerId ? { triggerId } : {}),
}
return {
triggerPath,
provider,
providerConfig,
}
}
async function upsertWebhookRecord(
workflowId: string,
block: BlockState,
webhookId: string,
metadata: WebhookMetadata
): Promise<void> {
const providerConfig = metadata.providerConfig as Record<string, unknown>
const credentialSetId = providerConfig?.credentialSetId as string | undefined
// For credential sets, delegate to the sync function which handles fan-out
if (credentialSetId && metadata.provider) {
const { syncWebhooksForCredentialSet } = await import('@/lib/webhooks/utils.server')
const { getProviderIdFromServiceId } = await import('@/lib/oauth')
const oauthProviderId = getProviderIdFromServiceId(metadata.provider)
const requestId = crypto.randomUUID().slice(0, 8)
// Extract base config (without credential-specific fields)
const {
credentialId: _cId,
credentialSetId: _csId,
userId: _uId,
...baseConfig
} = providerConfig
try {
await syncWebhooksForCredentialSet({
workflowId,
blockId: block.id,
provider: metadata.provider,
basePath: metadata.triggerPath,
credentialSetId,
oauthProviderId,
providerConfig: baseConfig as Record<string, any>,
requestId,
})
logger.info('Synced credential set webhooks during workflow save', {
workflowId,
blockId: block.id,
credentialSetId,
})
} catch (error) {
logger.error('Failed to sync credential set webhooks during workflow save', {
workflowId,
blockId: block.id,
credentialSetId,
error,
})
}
return
}
// For individual credentials, use the existing single webhook logic
const [existing] = await db.select().from(webhook).where(eq(webhook.id, webhookId)).limit(1)
if (existing) {
const needsUpdate =
existing.blockId !== block.id ||
existing.workflowId !== workflowId ||
existing.path !== metadata.triggerPath
if (needsUpdate) {
await db
.update(webhook)
.set({
workflowId,
blockId: block.id,
path: metadata.triggerPath,
provider: metadata.provider || existing.provider,
providerConfig: Object.keys(metadata.providerConfig).length
? metadata.providerConfig
: existing.providerConfig,
isActive: true,
updatedAt: new Date(),
})
.where(eq(webhook.id, webhookId))
}
return
}
await db.insert(webhook).values({
id: webhookId,
workflowId,
blockId: block.id,
path: metadata.triggerPath,
provider: metadata.provider,
providerConfig: metadata.providerConfig,
credentialSetId: null,
isActive: true,
createdAt: new Date(),
updatedAt: new Date(),
})
logger.info('Recreated missing webhook after workflow save', {
workflowId,
blockId: block.id,
webhookId,
})
}
interface BlockResourceSyncConfig<T> {
resourceName: string
subBlockId: string
buildMetadata: (block: BlockState, resourceId: string) => T | null
applyMetadata: (
workflowId: string,
block: BlockState,
resourceId: string,
metadata: T
) => Promise<void>
}
async function syncBlockResources<T>(
workflowId: string,
blocks: Record<string, any>,
config: BlockResourceSyncConfig<T>
): Promise<void> {
const blockEntries = Object.values(blocks || {}).filter(Boolean) as BlockState[]
if (blockEntries.length === 0) return
for (const block of blockEntries) {
const resourceId = getSubBlockValue<string>(block, config.subBlockId)
if (!resourceId) continue
const metadata = config.buildMetadata(block, resourceId)
if (!metadata) {
logger.warn(`Skipping ${config.resourceName} sync due to invalid configuration`, {
workflowId,
blockId: block.id,
resourceId,
resourceName: config.resourceName,
})
continue
}
try {
await config.applyMetadata(workflowId, block, resourceId, metadata)
} catch (error) {
logger.error(`Failed to sync ${config.resourceName}`, {
workflowId,
blockId: block.id,
resourceId,
resourceName: config.resourceName,
error,
})
}
}
}

View File

@@ -4,29 +4,29 @@
*
* @vitest-environment node
*/
import {
databaseMock,
defaultMockUser,
mockAuth,
mockCryptoUuid,
setupCommonApiMocks,
} from '@sim/testing'
import { NextRequest } from 'next/server'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import {
createMockDatabase,
mockAuth,
mockCryptoUuid,
mockUser,
setupCommonApiMocks,
} from '@/app/api/__test-utils__/utils'
describe('Workflow Variables API Route', () => {
let authMocks: ReturnType<typeof mockAuth>
let databaseMocks: ReturnType<typeof createMockDatabase>
const mockGetWorkflowAccessContext = vi.fn()
beforeEach(() => {
vi.resetModules()
setupCommonApiMocks()
mockCryptoUuid('mock-request-id-12345678')
authMocks = mockAuth(defaultMockUser)
authMocks = mockAuth(mockUser)
mockGetWorkflowAccessContext.mockReset()
vi.doMock('@sim/db', () => databaseMock)
vi.doMock('@/lib/workflows/utils', () => ({
getWorkflowAccessContext: mockGetWorkflowAccessContext,
}))
@@ -203,6 +203,10 @@ describe('Workflow Variables API Route', () => {
isWorkspaceOwner: false,
})
databaseMocks = createMockDatabase({
update: { results: [{}] },
})
const variables = {
'var-1': {
id: 'var-1',

View File

@@ -1,5 +1,5 @@
import { createMockRequest, mockAuth, mockConsoleLogger } from '@sim/testing'
import { beforeEach, describe, expect, it, vi } from 'vitest'
import { createMockRequest, mockAuth, mockConsoleLogger } from '@/app/api/__test-utils__/utils'
describe('Workspace Invitations API Route', () => {
const mockWorkspace = { id: 'workspace-1', name: 'Test Workspace' }

View File

@@ -12,7 +12,6 @@ import { HydrationErrorHandler } from '@/app/_shell/hydration-error-handler'
import { QueryProvider } from '@/app/_shell/providers/query-provider'
import { SessionProvider } from '@/app/_shell/providers/session-provider'
import { ThemeProvider } from '@/app/_shell/providers/theme-provider'
import { TooltipProvider } from '@/app/_shell/providers/tooltip-provider'
import { season } from '@/app/_styles/fonts/season/season'
export const viewport: Viewport = {
@@ -209,9 +208,7 @@ export default function RootLayout({ children }: { children: React.ReactNode })
<ThemeProvider>
<QueryProvider>
<SessionProvider>
<TooltipProvider>
<BrandedLayout>{children}</BrandedLayout>
</TooltipProvider>
<BrandedLayout>{children}</BrandedLayout>
</SessionProvider>
</QueryProvider>
</ThemeProvider>

View File

@@ -21,15 +21,12 @@ import {
Combobox,
Connections,
Copy,
Cursor,
DatePicker,
DocumentAttachment,
Duplicate,
Expand,
Eye,
FolderCode,
FolderPlus,
Hand,
HexSimple,
Input,
Key as KeyIcon,
@@ -994,14 +991,11 @@ export default function PlaygroundPage() {
{ Icon: ChevronDown, name: 'ChevronDown' },
{ Icon: Connections, name: 'Connections' },
{ Icon: Copy, name: 'Copy' },
{ Icon: Cursor, name: 'Cursor' },
{ Icon: DocumentAttachment, name: 'DocumentAttachment' },
{ Icon: Duplicate, name: 'Duplicate' },
{ Icon: Expand, name: 'Expand' },
{ Icon: Eye, name: 'Eye' },
{ Icon: FolderCode, name: 'FolderCode' },
{ Icon: FolderPlus, name: 'FolderPlus' },
{ Icon: Hand, name: 'Hand' },
{ Icon: HexSimple, name: 'HexSimple' },
{ Icon: KeyIcon, name: 'Key' },
{ Icon: Layout, name: 'Layout' },

View File

@@ -1,12 +1,15 @@
'use client'
import { Tooltip } from '@/components/emcn'
import { season } from '@/app/_styles/fonts/season/season'
export default function TemplatesLayoutClient({ children }: { children: React.ReactNode }) {
return (
<div className={`${season.variable} relative flex min-h-screen flex-col font-season`}>
<div className='-z-50 pointer-events-none fixed inset-0 bg-white' />
{children}
</div>
<Tooltip.Provider delayDuration={600} skipDelayDuration={0}>
<div className={`${season.variable} relative flex min-h-screen flex-col font-season`}>
<div className='-z-50 pointer-events-none fixed inset-0 bg-white' />
{children}
</div>
</Tooltip.Provider>
)
}

View File

@@ -1,5 +1,6 @@
'use client'
import { Tooltip } from '@/components/emcn'
import { GlobalCommandsProvider } from '@/app/workspace/[workspaceId]/providers/global-commands-provider'
import { ProviderModelsLoader } from '@/app/workspace/[workspaceId]/providers/provider-models-loader'
import { SettingsLoader } from '@/app/workspace/[workspaceId]/providers/settings-loader'
@@ -12,14 +13,16 @@ export default function WorkspaceLayout({ children }: { children: React.ReactNod
<SettingsLoader />
<ProviderModelsLoader />
<GlobalCommandsProvider>
<div className='flex h-screen w-full bg-[var(--bg)]'>
<WorkspacePermissionsProvider>
<div className='shrink-0' suppressHydrationWarning>
<Sidebar />
</div>
{children}
</WorkspacePermissionsProvider>
</div>
<Tooltip.Provider delayDuration={600} skipDelayDuration={0}>
<div className='flex h-screen w-full bg-[var(--bg)]'>
<WorkspacePermissionsProvider>
<div className='shrink-0' suppressHydrationWarning>
<Sidebar />
</div>
{children}
</WorkspacePermissionsProvider>
</div>
</Tooltip.Provider>
</GlobalCommandsProvider>
</>
)

View File

@@ -19,7 +19,6 @@ export type CommandId =
| 'clear-terminal-console'
| 'focus-toolbar-search'
| 'clear-notifications'
| 'fit-to-view'
/**
* Static metadata for a global command.
@@ -105,11 +104,6 @@ export const COMMAND_DEFINITIONS: Record<CommandId, CommandDefinition> = {
shortcut: 'Mod+E',
allowInEditable: false,
},
'fit-to-view': {
id: 'fit-to-view',
shortcut: 'Mod+Shift+F',
allowInEditable: false,
},
}
/**

View File

@@ -1,2 +0,0 @@
export type { BlockInfo, BlockMenuProps } from './block-menu'
export { BlockMenu } from './block-menu'

View File

@@ -1,2 +0,0 @@
export type { CanvasMenuProps } from './canvas-menu'
export { CanvasMenu } from './canvas-menu'

View File

@@ -20,7 +20,6 @@ import {
PopoverItem,
PopoverScrollArea,
PopoverTrigger,
Tooltip,
Trash,
} from '@/components/emcn'
import { useSession } from '@/lib/auth/auth-client'
@@ -30,7 +29,7 @@ import {
extractPathFromOutputId,
parseOutputContentSafely,
} from '@/lib/core/utils/response-format'
import { normalizeInputFormatValue } from '@/lib/workflows/input-format'
import { normalizeInputFormatValue } from '@/lib/workflows/input-format-utils'
import { StartBlockPath, TriggerUtils } from '@/lib/workflows/triggers/triggers'
import { START_BLOCK_RESERVED_FIELDS } from '@/lib/workflows/types'
import {
@@ -94,9 +93,6 @@ interface ProcessedAttachment {
dataUrl: string
}
/** Timeout for FileReader operations in milliseconds */
const FILE_READ_TIMEOUT_MS = 60000
/**
* Reads files and converts them to data URLs for image display
* @param chatFiles - Array of chat files to process
@@ -110,37 +106,8 @@ const processFileAttachments = async (chatFiles: ChatFile[]): Promise<ProcessedA
try {
dataUrl = await new Promise<string>((resolve, reject) => {
const reader = new FileReader()
let settled = false
const timeoutId = setTimeout(() => {
if (!settled) {
settled = true
reader.abort()
reject(new Error(`File read timed out after ${FILE_READ_TIMEOUT_MS}ms`))
}
}, FILE_READ_TIMEOUT_MS)
reader.onload = () => {
if (!settled) {
settled = true
clearTimeout(timeoutId)
resolve(reader.result as string)
}
}
reader.onerror = () => {
if (!settled) {
settled = true
clearTimeout(timeoutId)
reject(reader.error)
}
}
reader.onabort = () => {
if (!settled) {
settled = true
clearTimeout(timeoutId)
reject(new Error('File read aborted'))
}
}
reader.onload = () => resolve(reader.result as string)
reader.onerror = reject
reader.readAsDataURL(file.file)
})
} catch (error) {
@@ -234,6 +201,7 @@ export function Chat() {
const triggerWorkflowUpdate = useWorkflowStore((state) => state.triggerUpdate)
const setSubBlockValue = useSubBlockStore((state) => state.setValue)
// Chat state (UI and messages from unified store)
const {
isChatOpen,
chatPosition,
@@ -261,16 +229,19 @@ export function Chat() {
const { data: session } = useSession()
const { addToQueue } = useOperationQueue()
// Local state
const [chatMessage, setChatMessage] = useState('')
const [promptHistory, setPromptHistory] = useState<string[]>([])
const [historyIndex, setHistoryIndex] = useState(-1)
const [moreMenuOpen, setMoreMenuOpen] = useState(false)
// Refs
const inputRef = useRef<HTMLInputElement>(null)
const timeoutRef = useRef<NodeJS.Timeout | null>(null)
const streamReaderRef = useRef<ReadableStreamDefaultReader<Uint8Array> | null>(null)
const preventZoomRef = usePreventZoom()
// File upload hook
const {
chatFiles,
uploadErrors,
@@ -285,38 +256,6 @@ export function Chat() {
handleDrop,
} = useChatFileUpload()
const filePreviewUrls = useRef<Map<string, string>>(new Map())
const getFilePreviewUrl = useCallback((file: ChatFile): string | null => {
if (!file.type.startsWith('image/')) return null
const existing = filePreviewUrls.current.get(file.id)
if (existing) return existing
const url = URL.createObjectURL(file.file)
filePreviewUrls.current.set(file.id, url)
return url
}, [])
useEffect(() => {
const currentFileIds = new Set(chatFiles.map((f) => f.id))
const urlMap = filePreviewUrls.current
for (const [fileId, url] of urlMap.entries()) {
if (!currentFileIds.has(fileId)) {
URL.revokeObjectURL(url)
urlMap.delete(fileId)
}
}
return () => {
for (const url of urlMap.values()) {
URL.revokeObjectURL(url)
}
urlMap.clear()
}
}, [chatFiles])
/**
* Resolves the unified start block for chat execution, if available.
*/
@@ -382,11 +321,13 @@ export function Chat() {
const shouldShowConfigureStartInputsButton =
Boolean(startBlockId) && missingStartReservedFields.length > 0
// Get actual position (default if not set)
const actualPosition = useMemo(
() => getChatPosition(chatPosition, chatWidth, chatHeight),
[chatPosition, chatWidth, chatHeight]
)
// Drag hook
const { handleMouseDown } = useFloatDrag({
position: actualPosition,
width: chatWidth,
@@ -394,6 +335,7 @@ export function Chat() {
onPositionChange: setChatPosition,
})
// Boundary sync hook - keeps chat within bounds when layout changes
useFloatBoundarySync({
isOpen: isChatOpen,
position: actualPosition,
@@ -402,6 +344,7 @@ export function Chat() {
onPositionChange: setChatPosition,
})
// Resize hook - enables resizing from all edges and corners
const {
cursor: resizeCursor,
handleMouseMove: handleResizeMouseMove,
@@ -415,11 +358,13 @@ export function Chat() {
onDimensionsChange: setChatDimensions,
})
// Get output entries from console
const outputEntries = useMemo(() => {
if (!activeWorkflowId) return []
return entries.filter((entry) => entry.workflowId === activeWorkflowId && entry.output)
}, [entries, activeWorkflowId])
// Get filtered messages for current workflow
const workflowMessages = useMemo(() => {
if (!activeWorkflowId) return []
return messages
@@ -427,11 +372,14 @@ export function Chat() {
.sort((a, b) => new Date(a.timestamp).getTime() - new Date(b.timestamp).getTime())
}, [messages, activeWorkflowId])
// Check if any message is currently streaming
const isStreaming = useMemo(() => {
// Match copilot semantics: only treat as streaming if the LAST message is streaming
const lastMessage = workflowMessages[workflowMessages.length - 1]
return Boolean(lastMessage?.isStreaming)
}, [workflowMessages])
// Map chat messages to copilot message format (type -> role) for scroll hook
const messagesForScrollHook = useMemo(() => {
return workflowMessages.map((msg) => ({
...msg,
@@ -439,6 +387,8 @@ export function Chat() {
}))
}, [workflowMessages])
// Scroll management hook - reuse copilot's implementation
// Use immediate scroll behavior to keep the view pinned to the bottom during streaming
const { scrollAreaRef, scrollToBottom } = useScrollManagement(
messagesForScrollHook,
isStreaming,
@@ -447,6 +397,7 @@ export function Chat() {
}
)
// Memoize user messages for performance
const userMessages = useMemo(() => {
return workflowMessages
.filter((msg) => msg.type === 'user')
@@ -454,6 +405,7 @@ export function Chat() {
.filter((content): content is string => typeof content === 'string')
}, [workflowMessages])
// Update prompt history when workflow changes
useEffect(() => {
if (!activeWorkflowId) {
setPromptHistory([])
@@ -466,7 +418,7 @@ export function Chat() {
}, [activeWorkflowId, userMessages])
/**
* Auto-scroll to bottom when messages load and chat is open
* Auto-scroll to bottom when messages load
*/
useEffect(() => {
if (workflowMessages.length > 0 && isChatOpen) {
@@ -474,6 +426,7 @@ export function Chat() {
}
}, [workflowMessages.length, scrollToBottom, isChatOpen])
// Get selected workflow outputs (deduplicated)
const selectedOutputs = useMemo(() => {
if (!activeWorkflowId) return []
const selected = selectedWorkflowOutputs[activeWorkflowId]
@@ -494,6 +447,7 @@ export function Chat() {
}, delay)
}, [])
// Cleanup on unmount
useEffect(() => {
return () => {
timeoutRef.current && clearTimeout(timeoutRef.current)
@@ -501,6 +455,7 @@ export function Chat() {
}
}, [])
// React to execution cancellation from run button
useEffect(() => {
if (!isExecuting && isStreaming) {
const lastMessage = workflowMessages[workflowMessages.length - 1]
@@ -544,6 +499,7 @@ export function Chat() {
const chunk = decoder.decode(value, { stream: true })
buffer += chunk
// Process only complete SSE messages; keep any partial trailing data in buffer
const separatorIndex = buffer.lastIndexOf('\n\n')
if (separatorIndex === -1) {
continue
@@ -593,6 +549,7 @@ export function Chat() {
}
finalizeMessageStream(responseMessageId)
} finally {
// Only clear ref if it's still our reader (prevents clobbering a new stream)
if (streamReaderRef.current === reader) {
streamReaderRef.current = null
}
@@ -912,7 +869,7 @@ export function Chat() {
<div className='flex flex-shrink-0 items-center gap-[8px]'>
{/* More menu with actions */}
<Popover variant='default' size='sm' open={moreMenuOpen} onOpenChange={setMoreMenuOpen}>
<Popover variant='default' open={moreMenuOpen} onOpenChange={setMoreMenuOpen}>
<PopoverTrigger asChild>
<Button
variant='ghost'
@@ -1021,7 +978,8 @@ export function Chat() {
{chatFiles.length > 0 && (
<div className='mt-[4px] flex gap-[6px] overflow-x-auto [-ms-overflow-style:none] [scrollbar-width:none] [&::-webkit-scrollbar]:hidden'>
{chatFiles.map((file) => {
const previewUrl = getFilePreviewUrl(file)
const isImage = file.type.startsWith('image/')
const previewUrl = isImage ? URL.createObjectURL(file.file) : null
return (
<div
@@ -1038,6 +996,7 @@ export function Chat() {
src={previewUrl}
alt={file.name}
className='h-full w-full object-cover'
onLoad={() => URL.revokeObjectURL(previewUrl)}
/>
) : (
<div className='min-w-0 flex-1'>
@@ -1083,21 +1042,17 @@ export function Chat() {
{/* Buttons positioned absolutely on the right */}
<div className='-translate-y-1/2 absolute top-1/2 right-[2px] flex items-center gap-[10px]'>
<Tooltip.Root>
<Tooltip.Trigger asChild>
<Badge
onClick={() => document.getElementById('floating-chat-file-input')?.click()}
className={cn(
'!bg-transparent !border-0 cursor-pointer rounded-[6px] p-[0px]',
(!activeWorkflowId || isExecuting || chatFiles.length >= 15) &&
'cursor-not-allowed opacity-50'
)}
>
<Paperclip className='!h-3.5 !w-3.5' />
</Badge>
</Tooltip.Trigger>
<Tooltip.Content>Attach file</Tooltip.Content>
</Tooltip.Root>
<Badge
onClick={() => document.getElementById('floating-chat-file-input')?.click()}
title='Attach file'
className={cn(
'!bg-transparent !border-0 cursor-pointer rounded-[6px] p-[0px]',
(!activeWorkflowId || isExecuting || chatFiles.length >= 15) &&
'cursor-not-allowed opacity-50'
)}
>
<Paperclip className='!h-3.5 !w-3.5' />
</Badge>
{isStreaming ? (
<Button

View File

@@ -113,17 +113,16 @@ export function ChatMessage({ message }: ChatMessageProps) {
{message.attachments && message.attachments.length > 0 && (
<div className='mb-2 flex flex-wrap gap-[6px]'>
{message.attachments.map((attachment) => {
const isImage = attachment.type.startsWith('image/')
const hasValidDataUrl =
attachment.dataUrl?.trim() && attachment.dataUrl.startsWith('data:')
// Only treat as displayable image if we have both image type AND valid data URL
const canDisplayAsImage = attachment.type.startsWith('image/') && hasValidDataUrl
return (
<div
key={attachment.id}
className={`group relative flex-shrink-0 overflow-hidden rounded-[6px] bg-[var(--surface-2)] ${
hasValidDataUrl ? 'cursor-pointer' : ''
} ${canDisplayAsImage ? 'h-[40px] w-[40px]' : 'flex min-w-[80px] max-w-[120px] items-center justify-center px-[8px] py-[2px]'}`}
} ${isImage ? 'h-[40px] w-[40px]' : 'flex min-w-[80px] max-w-[120px] items-center justify-center px-[8px] py-[2px]'}`}
onClick={(e) => {
if (hasValidDataUrl) {
e.preventDefault()
@@ -132,7 +131,7 @@ export function ChatMessage({ message }: ChatMessageProps) {
}
}}
>
{canDisplayAsImage ? (
{isImage && hasValidDataUrl ? (
<img
src={attachment.dataUrl}
alt={attachment.name}

View File

@@ -331,16 +331,13 @@ export function OutputSelect({
return (
<Combobox
size='sm'
className='!w-fit !py-[2px] min-w-[100px] rounded-[6px] px-[9px]'
className='!w-fit !py-[2px] [&>svg]:!ml-[4px] [&>svg]:!h-3 [&>svg]:!w-3 [&>span]:!text-[var(--text-secondary)] min-w-[100px] rounded-[6px] bg-transparent px-[9px] hover:bg-[var(--surface-5)] dark:hover:border-[var(--surface-6)] dark:hover:bg-transparent [&>span]:text-center'
groups={comboboxGroups}
options={[]}
multiSelect
multiSelectValues={normalizedSelectedValues}
onMultiSelectChange={onOutputSelect}
placeholder={selectedDisplayText}
overlayContent={
<span className='truncate text-[var(--text-primary)]'>{selectedDisplayText}</span>
}
disabled={disabled || workflowOutputs.length === 0}
align={align}
maxHeight={maxHeight}

View File

@@ -24,11 +24,10 @@ export function useChatFileUpload() {
/**
* Validate and add files
* Uses functional state update to avoid stale closure issues with rapid file additions
*/
const addFiles = useCallback((files: File[]) => {
setChatFiles((currentFiles) => {
const remainingSlots = Math.max(0, MAX_FILES - currentFiles.length)
const addFiles = useCallback(
(files: File[]) => {
const remainingSlots = Math.max(0, MAX_FILES - chatFiles.length)
const candidateFiles = files.slice(0, remainingSlots)
const errors: string[] = []
const validNewFiles: ChatFile[] = []
@@ -40,14 +39,11 @@ export function useChatFileUpload() {
continue
}
// Check for duplicates against current files and newly added valid files
const isDuplicateInCurrent = currentFiles.some(
// Check for duplicates
const isDuplicate = chatFiles.some(
(existingFile) => existingFile.name === file.name && existingFile.size === file.size
)
const isDuplicateInNew = validNewFiles.some(
(newFile) => newFile.name === file.name && newFile.size === file.size
)
if (isDuplicateInCurrent || isDuplicateInNew) {
if (isDuplicate) {
errors.push(`${file.name} already added`)
continue
}
@@ -61,20 +57,20 @@ export function useChatFileUpload() {
})
}
// Update errors outside the state setter to avoid nested state updates
if (errors.length > 0) {
// Use setTimeout to avoid state update during render
setTimeout(() => setUploadErrors(errors), 0)
} else if (validNewFiles.length > 0) {
setTimeout(() => setUploadErrors([]), 0)
setUploadErrors(errors)
}
if (validNewFiles.length > 0) {
return [...currentFiles, ...validNewFiles]
setChatFiles([...chatFiles, ...validNewFiles])
// Clear errors when files are successfully added
if (errors.length === 0) {
setUploadErrors([])
}
}
return currentFiles
})
}, [])
},
[chatFiles]
)
/**
* Remove a file

View File

@@ -1,6 +1,5 @@
'use client'
import type { RefObject } from 'react'
import {
Popover,
PopoverAnchor,
@@ -8,48 +7,14 @@ import {
PopoverDivider,
PopoverItem,
} from '@/components/emcn'
/**
* Block information for context menu actions
*/
export interface BlockInfo {
id: string
type: string
enabled: boolean
horizontalHandles: boolean
parentId?: string
parentType?: string
}
/**
* Props for BlockMenu component
*/
export interface BlockMenuProps {
isOpen: boolean
position: { x: number; y: number }
menuRef: RefObject<HTMLDivElement | null>
onClose: () => void
selectedBlocks: BlockInfo[]
onCopy: () => void
onPaste: () => void
onDuplicate: () => void
onDelete: () => void
onToggleEnabled: () => void
onToggleHandles: () => void
onRemoveFromSubflow: () => void
onOpenEditor: () => void
onRename: () => void
hasClipboard?: boolean
showRemoveFromSubflow?: boolean
disableEdit?: boolean
}
import type { BlockContextMenuProps } from './types'
/**
* Context menu for workflow block(s).
* Displays block-specific actions in a popover at right-click position.
* Supports multi-selection - actions apply to all selected blocks.
*/
export function BlockMenu({
export function BlockContextMenu({
isOpen,
position,
menuRef,
@@ -67,7 +32,7 @@ export function BlockMenu({
hasClipboard = false,
showRemoveFromSubflow = false,
disableEdit = false,
}: BlockMenuProps) {
}: BlockContextMenuProps) {
const isSingleBlock = selectedBlocks.length === 1
const allEnabled = selectedBlocks.every((b) => b.enabled)

View File

@@ -0,0 +1,8 @@
export { BlockContextMenu } from './block-context-menu'
export { PaneContextMenu } from './pane-context-menu'
export type {
BlockContextMenuProps,
ContextMenuBlockInfo,
ContextMenuPosition,
PaneContextMenuProps,
} from './types'

View File

@@ -1,6 +1,5 @@
'use client'
import type { RefObject } from 'react'
import {
Popover,
PopoverAnchor,
@@ -8,40 +7,13 @@ import {
PopoverDivider,
PopoverItem,
} from '@/components/emcn'
import type { PaneContextMenuProps } from './types'
/**
* Props for CanvasMenu component
*/
export interface CanvasMenuProps {
isOpen: boolean
position: { x: number; y: number }
menuRef: RefObject<HTMLDivElement | null>
onClose: () => void
onUndo: () => void
onRedo: () => void
onPaste: () => void
onAddBlock: () => void
onAutoLayout: () => void
onFitToView: () => void
onOpenLogs: () => void
onToggleVariables: () => void
onToggleChat: () => void
onInvite: () => void
isVariablesOpen?: boolean
isChatOpen?: boolean
hasClipboard?: boolean
disableEdit?: boolean
disableAdmin?: boolean
canUndo?: boolean
canRedo?: boolean
isInvitationsDisabled?: boolean
}
/**
* Context menu for workflow canvas.
* Context menu for workflow canvas pane.
* Displays canvas-level actions when right-clicking empty space.
*/
export function CanvasMenu({
export function PaneContextMenu({
isOpen,
position,
menuRef,
@@ -51,7 +23,6 @@ export function CanvasMenu({
onPaste,
onAddBlock,
onAutoLayout,
onFitToView,
onOpenLogs,
onToggleVariables,
onToggleChat,
@@ -64,7 +35,7 @@ export function CanvasMenu({
canUndo = false,
canRedo = false,
isInvitationsDisabled = false,
}: CanvasMenuProps) {
}: PaneContextMenuProps) {
return (
<Popover
open={isOpen}
@@ -142,14 +113,6 @@ export function CanvasMenu({
<span>Auto-layout</span>
<span className='ml-auto opacity-70 group-hover:opacity-100'>L</span>
</PopoverItem>
<PopoverItem
onClick={() => {
onFitToView()
onClose()
}}
>
Fit to View
</PopoverItem>
{/* Navigation actions */}
<PopoverDivider />

View File

@@ -0,0 +1,99 @@
import type { RefObject } from 'react'
/**
* Position for context menu placement
*/
export interface ContextMenuPosition {
x: number
y: number
}
/**
* Block information passed to context menu for action handling
*/
export interface ContextMenuBlockInfo {
/** Block ID */
id: string
/** Block type (e.g., 'agent', 'function', 'loop') */
type: string
/** Whether block is enabled */
enabled: boolean
/** Whether block uses horizontal handles */
horizontalHandles: boolean
/** Parent subflow ID if nested in loop/parallel */
parentId?: string
/** Parent type ('loop' | 'parallel') if nested */
parentType?: string
}
/**
* Props for BlockContextMenu component
*/
export interface BlockContextMenuProps {
/** Whether the context menu is open */
isOpen: boolean
/** Position of the context menu */
position: ContextMenuPosition
/** Ref for the menu element (for click-outside detection) */
menuRef: RefObject<HTMLDivElement | null>
/** Callback when menu should close */
onClose: () => void
/** Selected block(s) info */
selectedBlocks: ContextMenuBlockInfo[]
/** Callbacks for menu actions */
onCopy: () => void
onPaste: () => void
onDuplicate: () => void
onDelete: () => void
onToggleEnabled: () => void
onToggleHandles: () => void
onRemoveFromSubflow: () => void
onOpenEditor: () => void
onRename: () => void
/** Whether clipboard has content for pasting */
hasClipboard?: boolean
/** Whether remove from subflow option should be shown */
showRemoveFromSubflow?: boolean
/** Whether edit actions are disabled (no permission) */
disableEdit?: boolean
}
/**
* Props for PaneContextMenu component
*/
export interface PaneContextMenuProps {
/** Whether the context menu is open */
isOpen: boolean
/** Position of the context menu */
position: ContextMenuPosition
/** Ref for the menu element */
menuRef: RefObject<HTMLDivElement | null>
/** Callback when menu should close */
onClose: () => void
/** Callbacks for menu actions */
onUndo: () => void
onRedo: () => void
onPaste: () => void
onAddBlock: () => void
onAutoLayout: () => void
onOpenLogs: () => void
onToggleVariables: () => void
onToggleChat: () => void
onInvite: () => void
/** Whether the variables panel is currently open */
isVariablesOpen?: boolean
/** Whether the chat panel is currently open */
isChatOpen?: boolean
/** Whether clipboard has content for pasting */
hasClipboard?: boolean
/** Whether edit actions are disabled (no permission) */
disableEdit?: boolean
/** Whether admin actions are disabled (no admin permission) */
disableAdmin?: boolean
/** Whether undo is available */
canUndo?: boolean
/** Whether redo is available */
canRedo?: boolean
/** Whether invitations are disabled (feature flag or permission group) */
isInvitationsDisabled?: boolean
}

View File

@@ -2,6 +2,7 @@
import { memo, useMemo } from 'react'
import { useViewport } from 'reactflow'
import { useSession } from '@/lib/auth/auth-client'
import { getUserColor } from '@/lib/workspaces/colors'
import { usePreventZoom } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks'
import { useSocket } from '@/app/workspace/providers/socket-provider'
@@ -19,31 +20,30 @@ interface CursorRenderData {
}
const CursorsComponent = () => {
const { presenceUsers, currentSocketId } = useSocket()
const { presenceUsers } = useSocket()
const viewport = useViewport()
const session = useSession()
const currentUserId = session.data?.user?.id
const preventZoomRef = usePreventZoom()
const cursors = useMemo<CursorRenderData[]>(() => {
return presenceUsers
.filter((user): user is typeof user & { cursor: CursorPoint } => Boolean(user.cursor))
.filter((user) => user.socketId !== currentSocketId)
.filter((user) => user.userId !== currentUserId)
.map((user) => ({
id: user.socketId,
name: user.userName?.trim() || 'Collaborator',
cursor: user.cursor,
color: getUserColor(user.userId),
}))
}, [currentSocketId, presenceUsers])
}, [currentUserId, presenceUsers])
if (!cursors.length) {
return null
}
return (
<div
ref={preventZoomRef}
className='pointer-events-none absolute inset-0 z-[5] select-none overflow-hidden'
>
<div ref={preventZoomRef} className='pointer-events-none absolute inset-0 z-30 select-none'>
{cursors.map(({ id, name, cursor, color }) => {
const x = cursor.x * viewport.zoom + viewport.x
const y = cursor.y * viewport.zoom + viewport.y

View File

@@ -4,7 +4,6 @@ import clsx from 'clsx'
import { useRegisterGlobalCommands } from '@/app/workspace/[workspaceId]/providers/global-commands-provider'
import { createCommand } from '@/app/workspace/[workspaceId]/utils/commands-utils'
import { usePreventZoom } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks'
import { useNotificationStore } from '@/stores/notifications'
import { useCopilotStore, usePanelStore } from '@/stores/panel'
import { useTerminalStore } from '@/stores/terminal'
import { useWorkflowDiffStore } from '@/stores/workflow-diff'
@@ -13,8 +12,6 @@ import { mergeSubblockState } from '@/stores/workflows/utils'
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
const logger = createLogger('DiffControls')
const NOTIFICATION_WIDTH = 240
const NOTIFICATION_GAP = 16
export const DiffControls = memo(function DiffControls() {
const isTerminalResizing = useTerminalStore((state) => state.isResizing)
@@ -48,12 +45,6 @@ export const DiffControls = memo(function DiffControls() {
useCallback((state) => ({ activeWorkflowId: state.activeWorkflowId }), [])
)
const allNotifications = useNotificationStore((state) => state.notifications)
const hasVisibleNotifications = useMemo(() => {
if (!activeWorkflowId) return false
return allNotifications.some((n) => !n.workflowId || n.workflowId === activeWorkflowId)
}, [allNotifications, activeWorkflowId])
const createCheckpoint = useCallback(async () => {
if (!activeWorkflowId || !currentChat?.id) {
logger.warn('Cannot create checkpoint: missing workflowId or chatId', {
@@ -304,15 +295,16 @@ export const DiffControls = memo(function DiffControls() {
const isResizing = isTerminalResizing || isPanelResizing
const notificationOffset = hasVisibleNotifications ? NOTIFICATION_WIDTH + NOTIFICATION_GAP : 0
return (
<div
ref={preventZoomRef}
className={clsx('fixed z-30', !isResizing && 'transition-[bottom] duration-100 ease-out')}
className={clsx(
'fixed z-30',
!isResizing && 'transition-[bottom,right] duration-100 ease-out'
)}
style={{
bottom: 'calc(var(--terminal-height) + 16px)',
right: `calc(var(--panel-width) + 16px + ${notificationOffset}px)`,
right: 'calc(var(--panel-width) + 16px)',
}}
>
<div

View File

@@ -1,5 +1,3 @@
export { BlockMenu } from './block-menu'
export { CanvasMenu } from './canvas-menu'
export { CommandList } from './command-list/command-list'
export { Cursors } from './cursors/cursors'
export { DiffControls } from './diff-controls/diff-controls'
@@ -10,5 +8,4 @@ export { SubflowNodeComponent } from './subflows/subflow-node'
export { Terminal } from './terminal/terminal'
export { WandPromptBar } from './wand-prompt-bar/wand-prompt-bar'
export { WorkflowBlock } from './workflow-block/workflow-block'
export { WorkflowControls } from './workflow-controls'
export { WorkflowEdge } from './workflow-edge/workflow-edge'

View File

@@ -4,13 +4,13 @@ import type { NodeProps } from 'reactflow'
import remarkGfm from 'remark-gfm'
import { cn } from '@/lib/core/utils/cn'
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
import { ActionBar } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/action-bar/action-bar'
import { useBlockVisual } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks'
import {
BLOCK_DIMENSIONS,
useBlockDimensions,
} from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-block-dimensions'
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
import { ActionBar } from '../workflow-block/components'
import type { WorkflowBlockProps } from '../workflow-block/types'
interface NoteBlockNodeData extends WorkflowBlockProps {}

View File

@@ -11,7 +11,7 @@ import {
openCopilotWithMessage,
useNotificationStore,
} from '@/stores/notifications'
import { usePanelStore } from '@/stores/panel'
import { useSidebarStore } from '@/stores/sidebar/store'
import { useTerminalStore } from '@/stores/terminal'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
@@ -19,9 +19,9 @@ const logger = createLogger('Notifications')
const MAX_VISIBLE_NOTIFICATIONS = 4
/**
* Notifications display component.
* Positioned in the bottom-right workspace area, reactive to panel width and terminal height.
* Shows both global notifications and workflow-specific notifications.
* Notifications display component
* Positioned in the bottom-left workspace area, reactive to sidebar width and terminal height
* Shows both global notifications and workflow-specific notifications
*/
export const Notifications = memo(function Notifications() {
const activeWorkflowId = useWorkflowRegistry((state) => state.activeWorkflowId)
@@ -37,7 +37,7 @@ export const Notifications = memo(function Notifications() {
.slice(0, MAX_VISIBLE_NOTIFICATIONS)
}, [allNotifications, activeWorkflowId])
const isTerminalResizing = useTerminalStore((state) => state.isResizing)
const isPanelResizing = usePanelStore((state) => state.isResizing)
const isSidebarResizing = useSidebarStore((state) => state.isResizing)
/**
* Executes a notification action and handles side effects.
@@ -105,19 +105,15 @@ export const Notifications = memo(function Notifications() {
return null
}
const isResizing = isTerminalResizing || isPanelResizing
const isResizing = isTerminalResizing || isSidebarResizing
return (
<div
ref={preventZoomRef}
className={clsx(
'fixed z-30 flex flex-col items-start',
!isResizing && 'transition-[bottom,right] duration-100 ease-out'
'fixed bottom-[calc(var(--terminal-height)+16px)] left-[calc(var(--sidebar-width)+16px)] z-30 flex flex-col items-start',
!isResizing && 'transition-[bottom,left] duration-100 ease-out'
)}
style={{
bottom: 'calc(var(--terminal-height) + 16px)',
right: 'calc(var(--panel-width) + 16px)',
}}
>
{[...visibleNotifications].reverse().map((notification, index, stacked) => {
const depth = stacked.length - index - 1
@@ -127,13 +123,8 @@ export const Notifications = memo(function Notifications() {
return (
<div
key={notification.id}
style={
{
'--stack-offset': `${xOffset}px`,
animation: 'notification-enter 200ms ease-out forwards',
} as React.CSSProperties
}
className={`relative h-[80px] w-[240px] overflow-hidden rounded-[4px] border bg-[var(--surface-2)] ${
style={{ transform: `translateX(${xOffset}px)` }}
className={`relative h-[80px] w-[240px] overflow-hidden rounded-[4px] border bg-[var(--surface-2)] transition-transform duration-200 ${
index > 0 ? '-mt-[80px]' : ''
}`}
>

View File

@@ -22,7 +22,7 @@ import {
import { Skeleton } from '@/components/ui'
import type { AgentAuthentication, AgentCapabilities } from '@/lib/a2a/types'
import { getBaseUrl } from '@/lib/core/utils/urls'
import { normalizeInputFormatValue } from '@/lib/workflows/input-format'
import { normalizeInputFormatValue } from '@/lib/workflows/input-format-utils'
import { StartBlockPath, TriggerUtils } from '@/lib/workflows/triggers/triggers'
import {
useA2AAgentByWorkflow,

View File

@@ -3,12 +3,16 @@
import { useState } from 'react'
import { Check, Clipboard } from 'lucide-react'
import {
Badge,
Button,
ButtonGroup,
ButtonGroupItem,
Code,
Combobox,
Label,
Popover,
PopoverContent,
PopoverItem,
PopoverTrigger,
Tooltip,
} from '@/components/emcn'
import { Skeleton } from '@/components/ui'
@@ -598,19 +602,48 @@ console.log(limits);`
<span>{copied.async ? 'Copied' : 'Copy'}</span>
</Tooltip.Content>
</Tooltip.Root>
<Combobox
size='sm'
className='!w-fit !py-[2px] min-w-[100px] rounded-[6px] px-[9px]'
options={[
{ label: 'Execute Job', value: 'execute' },
{ label: 'Check Status', value: 'status' },
{ label: 'Rate Limits', value: 'rate-limits' },
]}
value={asyncExampleType}
onChange={(value) => setAsyncExampleType(value as AsyncExampleType)}
align='end'
dropdownWidth={160}
/>
<Popover>
<PopoverTrigger asChild>
<div className='min-w-0 max-w-full'>
<Badge
variant='outline'
className='flex-none cursor-pointer whitespace-nowrap rounded-[6px]'
>
<span className='whitespace-nowrap text-[12px]'>
{getAsyncExampleTitle()}
</span>
</Badge>
</div>
</PopoverTrigger>
<PopoverContent
side='bottom'
align='end'
sideOffset={4}
maxHeight={300}
maxWidth={300}
minWidth={160}
border
>
<PopoverItem
active={asyncExampleType === 'execute'}
onClick={() => setAsyncExampleType('execute')}
>
Execute Job
</PopoverItem>
<PopoverItem
active={asyncExampleType === 'status'}
onClick={() => setAsyncExampleType('status')}
>
Check Status
</PopoverItem>
<PopoverItem
active={asyncExampleType === 'rate-limits'}
onClick={() => setAsyncExampleType('rate-limits')}
>
Rate Limits
</PopoverItem>
</PopoverContent>
</Popover>
</div>
</div>
<Code.Viewer

View File

@@ -2,11 +2,11 @@
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
import { createLogger } from '@sim/logger'
import { Maximize2 } from 'lucide-react'
import {
Button,
ButtonGroup,
ButtonGroupItem,
Expand,
Label,
Modal,
ModalBody,
@@ -222,7 +222,7 @@ export function GeneralDeploy({
onClick={() => setShowExpandedPreview(true)}
className='absolute right-[8px] bottom-[8px] z-10 h-[28px] w-[28px] cursor-pointer border border-[var(--border)] bg-transparent p-0 backdrop-blur-sm hover:bg-[var(--surface-3)]'
>
<Expand className='h-[14px] w-[14px]' />
<Maximize2 className='h-[14px] w-[14px]' />
</Button>
</Tooltip.Trigger>
<Tooltip.Content side='top'>See preview</Tooltip.Content>
@@ -334,6 +334,7 @@ export function GeneralDeploy({
}}
onPaneClick={() => setExpandedSelectedBlockId(null)}
selectedBlockId={expandedSelectedBlockId}
lightweight
/>
</div>
{expandedSelectedBlockId && workflowToShow.blocks?.[expandedSelectedBlockId] && (

View File

@@ -14,7 +14,7 @@ import {
} from '@/components/emcn'
import { Skeleton } from '@/components/ui'
import { generateToolInputSchema, sanitizeToolName } from '@/lib/mcp/workflow-tool-schema'
import { normalizeInputFormatValue } from '@/lib/workflows/input-format'
import { normalizeInputFormatValue } from '@/lib/workflows/input-format-utils'
import { isValidStartBlockType } from '@/lib/workflows/triggers/trigger-utils'
import type { InputFormatField } from '@/lib/workflows/types'
import {

View File

@@ -23,7 +23,6 @@ import { CreateApiKeyModal } from '@/app/workspace/[workspaceId]/w/components/si
import { startsWithUuid } from '@/executor/constants'
import { useApiKeys } from '@/hooks/queries/api-keys'
import { useWorkspaceSettings } from '@/hooks/queries/workspace'
import { usePermissionConfig } from '@/hooks/use-permission-config'
import { useSettingsModalStore } from '@/stores/modals/settings/store'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
@@ -114,12 +113,16 @@ export function DeployModal({
const [existingChat, setExistingChat] = useState<ExistingChat | null>(null)
const [isLoadingChat, setIsLoadingChat] = useState(false)
const [formSubmitting, setFormSubmitting] = useState(false)
const [formExists, setFormExists] = useState(false)
const [isFormValid, setIsFormValid] = useState(false)
const [chatSuccess, setChatSuccess] = useState(false)
const [formSuccess, setFormSuccess] = useState(false)
const [isCreateKeyModalOpen, setIsCreateKeyModalOpen] = useState(false)
const userPermissions = useUserPermissionsContext()
const canManageWorkspaceKeys = userPermissions.canAdmin
const { config: permissionConfig } = usePermissionConfig()
const { data: apiKeysData, isLoading: isLoadingKeys } = useApiKeys(workflowWorkspaceId || '')
const { data: workspaceSettingsData, isLoading: isLoadingSettings } = useWorkspaceSettings(
workflowWorkspaceId || ''
@@ -186,7 +189,6 @@ export function DeployModal({
useEffect(() => {
if (open && workflowId) {
setActiveTab('general')
setApiDeployError(null)
fetchChatDeploymentInfo()
}
}, [open, workflowId, fetchChatDeploymentInfo])
@@ -505,7 +507,6 @@ export function DeployModal({
const handleCloseModal = () => {
setIsSubmitting(false)
setChatSubmitting(false)
setApiDeployError(null)
onOpenChange(false)
}
@@ -515,6 +516,12 @@ export function DeployModal({
setTimeout(() => setChatSuccess(false), 2000)
}
const handleFormDeployed = async () => {
await handlePostDeploymentUpdate()
setFormSuccess(true)
setTimeout(() => setFormSuccess(false), 2000)
}
const handlePostDeploymentUpdate = async () => {
if (!workflowId) return
@@ -623,6 +630,17 @@ export function DeployModal({
deleteTrigger?.click()
}, [])
const handleFormFormSubmit = useCallback(() => {
const form = document.getElementById('form-deploy-form') as HTMLFormElement
form?.requestSubmit()
}, [])
const handleFormDelete = useCallback(() => {
const form = document.getElementById('form-deploy-form')
const deleteTrigger = form?.querySelector('[data-delete-trigger]') as HTMLButtonElement
deleteTrigger?.click()
}, [])
return (
<>
<Modal open={open} onOpenChange={handleCloseModal}>
@@ -636,31 +654,15 @@ export function DeployModal({
>
<ModalTabsList activeValue={activeTab}>
<ModalTabsTrigger value='general'>General</ModalTabsTrigger>
{!permissionConfig.hideDeployApi && (
<ModalTabsTrigger value='api'>API</ModalTabsTrigger>
)}
{!permissionConfig.hideDeployMcp && (
<ModalTabsTrigger value='mcp'>MCP</ModalTabsTrigger>
)}
{!permissionConfig.hideDeployA2a && (
<ModalTabsTrigger value='a2a'>A2A</ModalTabsTrigger>
)}
{!permissionConfig.hideDeployChatbot && (
<ModalTabsTrigger value='chat'>Chat</ModalTabsTrigger>
)}
<ModalTabsTrigger value='api'>API</ModalTabsTrigger>
<ModalTabsTrigger value='mcp'>MCP</ModalTabsTrigger>
<ModalTabsTrigger value='a2a'>A2A</ModalTabsTrigger>
<ModalTabsTrigger value='chat'>Chat</ModalTabsTrigger>
{/* <ModalTabsTrigger value='form'>Form</ModalTabsTrigger> */}
{!permissionConfig.hideDeployTemplate && (
<ModalTabsTrigger value='template'>Template</ModalTabsTrigger>
)}
<ModalTabsTrigger value='template'>Template</ModalTabsTrigger>
</ModalTabsList>
<ModalBody className='min-h-0 flex-1'>
{apiDeployError && (
<div className='mb-3 rounded-[4px] border border-destructive/30 bg-destructive/10 p-3 text-destructive text-sm'>
<div className='font-semibold'>Deployment Error</div>
<div>{apiDeployError}</div>
</div>
)}
<ModalTabsContent value='general'>
<GeneralDeploy
workflowId={workflowId}

View File

@@ -33,4 +33,5 @@ export { Table } from './table/table'
export { Text } from './text/text'
export { TimeInput } from './time-input/time-input'
export { ToolInput } from './tool-input/tool-input'
export { TriggerSave } from './trigger-save/trigger-save'
export { VariablesInput } from './variables-input/variables-input'

View File

@@ -1,4 +1,4 @@
import { useMemo, useRef, useState } from 'react'
import { useEffect, useMemo, useRef, useState } from 'react'
import { Badge, Input } from '@/components/emcn'
import { Label } from '@/components/ui/label'
import { cn } from '@/lib/core/utils/cn'
@@ -7,7 +7,39 @@ import { TagDropdown } from '@/app/workspace/[workspaceId]/w/[workflowId]/compon
import { useSubBlockInput } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-input'
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value'
import { useAccessibleReferencePrefixes } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-accessible-reference-prefixes'
import { useWorkflowInputFields } from '@/hooks/queries/workflows'
/**
* Represents a field in the input format configuration
*/
interface InputFormatField {
name: string
type?: string
}
/**
* Represents an input trigger block structure
*/
interface InputTriggerBlock {
type: 'input_trigger' | 'start_trigger'
subBlocks?: {
inputFormat?: { value?: InputFormatField[] }
}
}
/**
* Represents a legacy starter block structure
*/
interface StarterBlockLegacy {
type: 'starter'
subBlocks?: {
inputFormat?: { value?: InputFormatField[] }
}
config?: {
params?: {
inputFormat?: InputFormatField[]
}
}
}
/**
* Props for the InputMappingField component
@@ -38,6 +70,73 @@ interface InputMappingProps {
disabled?: boolean
}
/**
* Type guard to check if a value is an InputTriggerBlock
* @param value - The value to check
* @returns True if the value is an InputTriggerBlock
*/
function isInputTriggerBlock(value: unknown): value is InputTriggerBlock {
const type = (value as { type?: unknown }).type
return (
!!value && typeof value === 'object' && (type === 'input_trigger' || type === 'start_trigger')
)
}
/**
* Type guard to check if a value is a StarterBlockLegacy
* @param value - The value to check
* @returns True if the value is a StarterBlockLegacy
*/
function isStarterBlock(value: unknown): value is StarterBlockLegacy {
return !!value && typeof value === 'object' && (value as { type?: unknown }).type === 'starter'
}
/**
* Type guard to check if a value is an InputFormatField
* @param value - The value to check
* @returns True if the value is an InputFormatField
*/
function isInputFormatField(value: unknown): value is InputFormatField {
if (typeof value !== 'object' || value === null) return false
if (!('name' in value)) return false
const { name, type } = value as { name: unknown; type?: unknown }
if (typeof name !== 'string' || name.trim() === '') return false
if (type !== undefined && typeof type !== 'string') return false
return true
}
/**
* Extracts input format fields from workflow blocks
* @param blocks - The workflow blocks to extract from
* @returns Array of input format fields or null if not found
*/
function extractInputFormatFields(blocks: Record<string, unknown>): InputFormatField[] | null {
const triggerEntry = Object.entries(blocks).find(([, b]) => isInputTriggerBlock(b))
if (triggerEntry && isInputTriggerBlock(triggerEntry[1])) {
const inputFormat = triggerEntry[1].subBlocks?.inputFormat?.value
if (Array.isArray(inputFormat)) {
return (inputFormat as unknown[])
.filter(isInputFormatField)
.map((f) => ({ name: f.name, type: f.type }))
}
}
const starterEntry = Object.entries(blocks).find(([, b]) => isStarterBlock(b))
if (starterEntry && isStarterBlock(starterEntry[1])) {
const starter = starterEntry[1]
const subBlockFormat = starter.subBlocks?.inputFormat?.value
const legacyParamsFormat = starter.config?.params?.inputFormat
const chosen = Array.isArray(subBlockFormat) ? subBlockFormat : legacyParamsFormat
if (Array.isArray(chosen)) {
return (chosen as unknown[])
.filter(isInputFormatField)
.map((f) => ({ name: f.name, type: f.type }))
}
}
return null
}
/**
* InputMapping component displays and manages input field mappings for workflow execution
* @param props - The component props
@@ -69,10 +168,62 @@ export function InputMapping({
const inputRefs = useRef<Map<string, HTMLInputElement>>(new Map())
const overlayRefs = useRef<Map<string, HTMLDivElement>>(new Map())
const workflowId = typeof selectedWorkflowId === 'string' ? selectedWorkflowId : undefined
const { data: childInputFields = [], isLoading } = useWorkflowInputFields(workflowId)
const [childInputFields, setChildInputFields] = useState<InputFormatField[]>([])
const [isLoading, setIsLoading] = useState(false)
const [collapsedFields, setCollapsedFields] = useState<Record<string, boolean>>({})
useEffect(() => {
let isMounted = true
const controller = new AbortController()
async function fetchChildSchema() {
if (!selectedWorkflowId) {
if (isMounted) {
setChildInputFields([])
setIsLoading(false)
}
return
}
try {
if (isMounted) setIsLoading(true)
const res = await fetch(`/api/workflows/${selectedWorkflowId}`, {
signal: controller.signal,
})
if (!res.ok) {
if (isMounted) {
setChildInputFields([])
setIsLoading(false)
}
return
}
const { data } = await res.json()
const blocks = (data?.state?.blocks as Record<string, unknown>) || {}
const fields = extractInputFormatFields(blocks)
if (isMounted) {
setChildInputFields(fields || [])
setIsLoading(false)
}
} catch (error) {
if (isMounted) {
setChildInputFields([])
setIsLoading(false)
}
}
}
fetchChildSchema()
return () => {
isMounted = false
controller.abort()
}
}, [selectedWorkflowId])
const valueObj: Record<string, string> = useMemo(() => {
if (isPreview && previewValue && typeof previewValue === 'object') {
return previewValue as Record<string, string>

View File

@@ -1,6 +1,7 @@
import type React from 'react'
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
import { createLogger } from '@sim/logger'
import { useQuery, useQueryClient } from '@tanstack/react-query'
import { Loader2, WrenchIcon, XIcon } from 'lucide-react'
import { useParams } from 'next/navigation'
import {
@@ -60,7 +61,7 @@ import {
useCustomTools,
} from '@/hooks/queries/custom-tools'
import { useForceRefreshMcpTools, useMcpServers, useStoredMcpTools } from '@/hooks/queries/mcp'
import { useWorkflowInputFields, useWorkflows } from '@/hooks/queries/workflows'
import { useWorkflows } from '@/hooks/queries/workflows'
import { usePermissionConfig } from '@/hooks/use-permission-config'
import { getProviderFromModel, supportsToolUsageControl } from '@/providers/utils'
import { useSettingsModalStore } from '@/stores/modals/settings/store'
@@ -644,7 +645,56 @@ function WorkflowInputMapperSyncWrapper({
disabled: boolean
workflowId: string
}) {
const { data: inputFields = [], isLoading } = useWorkflowInputFields(workflowId)
const { data: workflowData, isLoading } = useQuery({
queryKey: ['workflow-input-fields', workflowId],
queryFn: async () => {
const response = await fetch(`/api/workflows/${workflowId}`)
if (!response.ok) throw new Error('Failed to fetch workflow')
const { data } = await response.json()
return data
},
enabled: Boolean(workflowId),
staleTime: 60 * 1000,
})
const inputFields = useMemo(() => {
if (!workflowData?.state?.blocks) return []
const blocks = workflowData.state.blocks as Record<string, any>
const triggerEntry = Object.entries(blocks).find(
([, block]) =>
block.type === 'start_trigger' || block.type === 'input_trigger' || block.type === 'starter'
)
if (!triggerEntry) return []
const triggerBlock = triggerEntry[1]
const inputFormat = triggerBlock.subBlocks?.inputFormat?.value
if (Array.isArray(inputFormat)) {
return inputFormat
.filter((field: any) => field.name && typeof field.name === 'string')
.map((field: any) => ({
name: field.name,
type: field.type || 'string',
}))
}
const legacyFormat = triggerBlock.config?.params?.inputFormat
if (Array.isArray(legacyFormat)) {
return legacyFormat
.filter((field: any) => field.name && typeof field.name === 'string')
.map((field: any) => ({
name: field.name,
type: field.type || 'string',
}))
}
return []
}, [workflowData])
const parsedValue = useMemo(() => {
try {
@@ -893,8 +943,9 @@ export function ToolInput({
const params = useParams()
const workspaceId = params.workspaceId as string
const workflowId = params.workflowId as string
const queryClient = useQueryClient()
const [storeValue, setStoreValue] = useSubBlockValue(blockId, subBlockId)
const [open, setOpen] = useState(false)
const [_, setOpen] = useState(false)
const [customToolModalOpen, setCustomToolModalOpen] = useState(false)
const [editingToolIndex, setEditingToolIndex] = useState<number | null>(null)
const [draggedIndex, setDraggedIndex] = useState<number | null>(null)
@@ -2378,7 +2429,14 @@ export function ToolInput({
})()}
{(tool.type === 'workflow' || tool.type === 'workflow_input') &&
tool.params?.workflowId && (
<WorkflowToolDeployBadge workflowId={tool.params.workflowId} />
<WorkflowToolDeployBadge
workflowId={tool.params.workflowId}
onDeploySuccess={() => {
queryClient.invalidateQueries({
queryKey: ['workflow-input-fields', tool.params?.workflowId],
})
}}
/>
)}
</div>
<div className='flex flex-shrink-0 items-center gap-[8px]'>

View File

@@ -0,0 +1,348 @@
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
import { createLogger } from '@sim/logger'
import {
Button,
Modal,
ModalBody,
ModalContent,
ModalFooter,
ModalHeader,
} from '@/components/emcn/components'
import { Trash } from '@/components/emcn/icons/trash'
import { cn } from '@/lib/core/utils/cn'
import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow'
import { useTriggerConfigAggregation } from '@/hooks/use-trigger-config-aggregation'
import { useWebhookManagement } from '@/hooks/use-webhook-management'
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
import { getTrigger, isTriggerValid } from '@/triggers'
import { SYSTEM_SUBBLOCK_IDS } from '@/triggers/constants'
const logger = createLogger('TriggerSave')
interface TriggerSaveProps {
blockId: string
subBlockId: string
triggerId?: string
isPreview?: boolean
disabled?: boolean
}
type SaveStatus = 'idle' | 'saving' | 'saved' | 'error'
export function TriggerSave({
blockId,
subBlockId,
triggerId,
isPreview = false,
disabled = false,
}: TriggerSaveProps) {
const [saveStatus, setSaveStatus] = useState<SaveStatus>('idle')
const [errorMessage, setErrorMessage] = useState<string | null>(null)
const [deleteStatus, setDeleteStatus] = useState<'idle' | 'deleting'>('idle')
const [showDeleteDialog, setShowDeleteDialog] = useState(false)
const effectiveTriggerId = useMemo(() => {
if (triggerId && isTriggerValid(triggerId)) {
return triggerId
}
const selectedTriggerId = useSubBlockStore.getState().getValue(blockId, 'selectedTriggerId')
if (typeof selectedTriggerId === 'string' && isTriggerValid(selectedTriggerId)) {
return selectedTriggerId
}
return triggerId
}, [blockId, triggerId])
const { collaborativeSetSubblockValue } = useCollaborativeWorkflow()
const { webhookId, saveConfig, deleteConfig, isLoading } = useWebhookManagement({
blockId,
triggerId: effectiveTriggerId,
isPreview,
useWebhookUrl: true, // to store the webhook url in the store
})
const triggerConfig = useSubBlockStore((state) => state.getValue(blockId, 'triggerConfig'))
const triggerCredentials = useSubBlockStore((state) =>
state.getValue(blockId, 'triggerCredentials')
)
const triggerDef =
effectiveTriggerId && isTriggerValid(effectiveTriggerId) ? getTrigger(effectiveTriggerId) : null
const validateRequiredFields = useCallback(
(
configToCheck: Record<string, any> | null | undefined
): { valid: boolean; missingFields: string[] } => {
if (!triggerDef) {
return { valid: true, missingFields: [] }
}
const missingFields: string[] = []
triggerDef.subBlocks
.filter(
(sb) => sb.required && sb.mode === 'trigger' && !SYSTEM_SUBBLOCK_IDS.includes(sb.id)
)
.forEach((subBlock) => {
if (subBlock.id === 'triggerCredentials') {
if (!triggerCredentials) {
missingFields.push(subBlock.title || 'Credentials')
}
} else {
const value = configToCheck?.[subBlock.id]
if (value === undefined || value === null || value === '') {
missingFields.push(subBlock.title || subBlock.id)
}
}
})
return {
valid: missingFields.length === 0,
missingFields,
}
},
[triggerDef, triggerCredentials]
)
const requiredSubBlockIds = useMemo(() => {
if (!triggerDef) return []
return triggerDef.subBlocks
.filter((sb) => sb.required && sb.mode === 'trigger' && !SYSTEM_SUBBLOCK_IDS.includes(sb.id))
.map((sb) => sb.id)
}, [triggerDef])
const subscribedSubBlockValues = useSubBlockStore(
useCallback(
(state) => {
if (!triggerDef) return {}
const values: Record<string, any> = {}
requiredSubBlockIds.forEach((subBlockId) => {
const value = state.getValue(blockId, subBlockId)
if (value !== null && value !== undefined && value !== '') {
values[subBlockId] = value
}
})
return values
},
[blockId, triggerDef, requiredSubBlockIds]
)
)
const previousValuesRef = useRef<Record<string, any>>({})
const validationTimeoutRef = useRef<NodeJS.Timeout | null>(null)
useEffect(() => {
if (saveStatus !== 'error' || !triggerDef) {
previousValuesRef.current = subscribedSubBlockValues
return
}
const hasChanges = Object.keys(subscribedSubBlockValues).some(
(key) =>
previousValuesRef.current[key] !== (subscribedSubBlockValues as Record<string, any>)[key]
)
if (!hasChanges) {
return
}
if (validationTimeoutRef.current) {
clearTimeout(validationTimeoutRef.current)
}
validationTimeoutRef.current = setTimeout(() => {
const aggregatedConfig = useTriggerConfigAggregation(blockId, effectiveTriggerId)
if (aggregatedConfig) {
useSubBlockStore.getState().setValue(blockId, 'triggerConfig', aggregatedConfig)
}
const validation = validateRequiredFields(aggregatedConfig)
if (validation.valid) {
setErrorMessage(null)
setSaveStatus('idle')
logger.debug('Error cleared after validation passed', {
blockId,
triggerId: effectiveTriggerId,
})
} else {
setErrorMessage(`Missing required fields: ${validation.missingFields.join(', ')}`)
logger.debug('Error message updated', {
blockId,
triggerId: effectiveTriggerId,
missingFields: validation.missingFields,
})
}
previousValuesRef.current = subscribedSubBlockValues
}, 300)
return () => {
if (validationTimeoutRef.current) {
clearTimeout(validationTimeoutRef.current)
}
}
}, [
blockId,
effectiveTriggerId,
triggerDef,
subscribedSubBlockValues,
saveStatus,
validateRequiredFields,
])
const handleSave = async () => {
if (isPreview || disabled) return
setSaveStatus('saving')
setErrorMessage(null)
try {
const aggregatedConfig = useTriggerConfigAggregation(blockId, effectiveTriggerId)
if (aggregatedConfig) {
useSubBlockStore.getState().setValue(blockId, 'triggerConfig', aggregatedConfig)
logger.debug('Stored aggregated trigger config', {
blockId,
triggerId: effectiveTriggerId,
aggregatedConfig,
})
}
const validation = validateRequiredFields(aggregatedConfig)
if (!validation.valid) {
setErrorMessage(`Missing required fields: ${validation.missingFields.join(', ')}`)
setSaveStatus('error')
return
}
const success = await saveConfig()
if (!success) {
throw new Error('Save config returned false')
}
setSaveStatus('saved')
setErrorMessage(null)
const savedWebhookId = useSubBlockStore.getState().getValue(blockId, 'webhookId')
const savedTriggerPath = useSubBlockStore.getState().getValue(blockId, 'triggerPath')
const savedTriggerId = useSubBlockStore.getState().getValue(blockId, 'triggerId')
const savedTriggerConfig = useSubBlockStore.getState().getValue(blockId, 'triggerConfig')
collaborativeSetSubblockValue(blockId, 'webhookId', savedWebhookId)
collaborativeSetSubblockValue(blockId, 'triggerPath', savedTriggerPath)
collaborativeSetSubblockValue(blockId, 'triggerId', savedTriggerId)
collaborativeSetSubblockValue(blockId, 'triggerConfig', savedTriggerConfig)
setTimeout(() => {
setSaveStatus('idle')
}, 2000)
logger.info('Trigger configuration saved successfully', {
blockId,
triggerId: effectiveTriggerId,
hasWebhookId: !!webhookId,
})
} catch (error: any) {
setSaveStatus('error')
setErrorMessage(error.message || 'An error occurred while saving.')
logger.error('Error saving trigger configuration', { error })
}
}
const handleDeleteClick = () => {
if (isPreview || disabled || !webhookId) return
setShowDeleteDialog(true)
}
const handleDeleteConfirm = async () => {
setShowDeleteDialog(false)
setDeleteStatus('deleting')
setErrorMessage(null)
try {
const success = await deleteConfig()
if (success) {
setDeleteStatus('idle')
setSaveStatus('idle')
setErrorMessage(null)
collaborativeSetSubblockValue(blockId, 'triggerPath', '')
collaborativeSetSubblockValue(blockId, 'webhookId', null)
collaborativeSetSubblockValue(blockId, 'triggerConfig', null)
logger.info('Trigger configuration deleted successfully', {
blockId,
triggerId: effectiveTriggerId,
})
} else {
setDeleteStatus('idle')
setErrorMessage('Failed to delete trigger configuration.')
logger.error('Failed to delete trigger configuration')
}
} catch (error: any) {
setDeleteStatus('idle')
setErrorMessage(error.message || 'An error occurred while deleting.')
logger.error('Error deleting trigger configuration', { error })
}
}
if (isPreview) {
return null
}
const isProcessing = saveStatus === 'saving' || deleteStatus === 'deleting' || isLoading
return (
<div id={`${blockId}-${subBlockId}`}>
<div className='flex gap-2'>
<Button
variant='default'
onClick={handleSave}
disabled={disabled || isProcessing}
className={cn(
'flex-1',
saveStatus === 'saved' && '!bg-green-600 !text-white hover:!bg-green-700',
saveStatus === 'error' && '!bg-red-600 !text-white hover:!bg-red-700'
)}
>
{saveStatus === 'saving' && 'Saving...'}
{saveStatus === 'saved' && 'Saved'}
{saveStatus === 'error' && 'Error'}
{saveStatus === 'idle' && (webhookId ? 'Update Configuration' : 'Save Configuration')}
</Button>
{webhookId && (
<Button variant='default' onClick={handleDeleteClick} disabled={disabled || isProcessing}>
<Trash className='h-[14px] w-[14px]' />
</Button>
)}
</div>
{errorMessage && <p className='mt-2 text-[12px] text-[var(--text-error)]'>{errorMessage}</p>}
<Modal open={showDeleteDialog} onOpenChange={setShowDeleteDialog}>
<ModalContent size='sm'>
<ModalHeader>Delete Trigger</ModalHeader>
<ModalBody>
<p className='text-[12px] text-[var(--text-secondary)]'>
Are you sure you want to delete this trigger configuration? This will remove the
webhook and stop all incoming triggers.{' '}
<span className='text-[var(--text-error)]'>This action cannot be undone.</span>
</p>
</ModalBody>
<ModalFooter>
<Button variant='active' onClick={() => setShowDeleteDialog(false)}>
Cancel
</Button>
<Button variant='destructive' onClick={handleDeleteConfirm}>
Delete
</Button>
</ModalFooter>
</ModalContent>
</Modal>
</div>
)
}

View File

@@ -39,6 +39,7 @@ import {
Text,
TimeInput,
ToolInput,
TriggerSave,
VariablesInput,
} from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components'
import { useDependsOnGate } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-depends-on-gate'
@@ -866,6 +867,17 @@ function SubBlockComponent({
}
/>
)
case 'trigger-save':
return (
<TriggerSave
blockId={blockId}
subBlockId={config.id}
triggerId={config.triggerId}
isPreview={isPreview}
disabled={disabled}
/>
)
case 'messages-input':
return (
<MessagesInput

View File

@@ -2,3 +2,4 @@ export { Copilot } from './copilot/copilot'
export { Deploy } from './deploy/deploy'
export { Editor } from './editor/editor'
export { Toolbar } from './toolbar/toolbar'
export { WorkflowControls } from './workflow-controls/workflow-controls'

View File

@@ -327,14 +327,12 @@ export const Toolbar = forwardRef<ToolbarRef, ToolbarProps>(function Toolbar(
/**
* Handle search input blur.
*
* If the search query is empty, deactivate search mode to show the search icon again.
* If there's a query, keep search mode active so ArrowUp/Down navigation continues
* to work after focus moves into the triggers/blocks list (e.g. when initiated via Mod+F).
* We intentionally keep search mode active after blur so that ArrowUp/Down
* navigation continues to work after the first move from the search input
* into the triggers/blocks list (e.g. when initiated via Mod+F).
*/
const handleSearchBlur = () => {
if (!searchQuery.trim()) {
setIsSearchActive(false)
}
// No-op by design
}
/**

View File

@@ -0,0 +1,51 @@
'use client'
import { Button, Redo, Undo } from '@/components/emcn'
import { useSession } from '@/lib/auth/auth-client'
import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow'
import { useUndoRedoStore } from '@/stores/undo-redo'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
/**
* Workflow controls component that provides undo/redo functionality.
* Styled to align with the panel tab buttons.
*/
export function WorkflowControls() {
const { undo, redo } = useCollaborativeWorkflow()
const { activeWorkflowId } = useWorkflowRegistry()
const { data: session } = useSession()
const userId = session?.user?.id || 'unknown'
const stacks = useUndoRedoStore((s) => s.stacks)
const undoRedoSizes = (() => {
const key = activeWorkflowId && userId ? `${activeWorkflowId}:${userId}` : ''
const stack = (key && stacks[key]) || { undo: [], redo: [] }
return { undoSize: stack.undo.length, redoSize: stack.redo.length }
})()
const canUndo = undoRedoSizes.undoSize > 0
const canRedo = undoRedoSizes.redoSize > 0
return (
<div className='flex gap-[2px]'>
<Button
className='h-[28px] rounded-[6px] rounded-r-none border border-transparent px-[6px] py-[5px] hover:border-[var(--border-1)] hover:bg-[var(--surface-5)]'
onClick={undo}
variant={canUndo ? 'active' : 'ghost'}
disabled={!canUndo}
title='Undo (Cmd+Z)'
>
<Undo className='h-[12px] w-[12px]' />
</Button>
<Button
className='h-[28px] rounded-[6px] rounded-l-none border border-transparent px-[6px] py-[5px] hover:border-[var(--border-1)] hover:bg-[var(--surface-5)]'
onClick={redo}
variant={canRedo ? 'active' : 'ghost'}
disabled={!canRedo}
title='Redo (Cmd+Shift+Z)'
>
<Redo className='h-[12px] w-[12px]' />
</Button>
</div>
)
}

View File

@@ -495,6 +495,9 @@ export function Panel() {
Editor
</Button>
</div>
{/* Workflow Controls (Undo/Redo) */}
{/* <WorkflowControls /> */}
</div>
{/* Tab Content - Keep all tabs mounted but hidden to preserve state */}

View File

@@ -1,12 +1,12 @@
import { memo, useMemo, useRef } from 'react'
import { RepeatIcon, SplitIcon } from 'lucide-react'
import { Handle, type NodeProps, Position, useReactFlow } from 'reactflow'
import { Button, Trash } from '@/components/emcn'
import { cn } from '@/lib/core/utils/cn'
import { HANDLE_POSITIONS } from '@/lib/workflows/blocks/block-dimensions'
import { type DiffStatus, hasDiffStatus } from '@/lib/workflows/diff/types'
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
import { ActionBar } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/action-bar/action-bar'
import { useCurrentWorkflow } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks'
import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow'
import { usePanelEditorStore } from '@/stores/panel'
/**
@@ -18,16 +18,11 @@ import { usePanelEditorStore } from '@/stores/panel'
const SubflowNodeStyles: React.FC = () => {
return (
<style jsx global>{`
/* Z-index management for subflow nodes - default behind blocks */
/* Z-index management for subflow nodes */
.workflow-container .react-flow__node-subflowNode {
z-index: -1 !important;
}
/* Selected subflows appear above other subflows but below blocks (z-21) */
.workflow-container .react-flow__node-subflowNode:has([data-subflow-selected='true']) {
z-index: 10 !important;
}
/* Drag-over states */
.loop-node-drag-over,
.parallel-node-drag-over {
@@ -68,8 +63,8 @@ export interface SubflowNodeData {
*/
export const SubflowNodeComponent = memo(({ data, id }: NodeProps<SubflowNodeData>) => {
const { getNodes } = useReactFlow()
const { collaborativeBatchRemoveBlocks } = useCollaborativeWorkflow()
const blockRef = useRef<HTMLDivElement>(null)
const userPermissions = useUserPermissionsContext()
const currentWorkflow = useCurrentWorkflow()
const currentBlock = currentWorkflow.getBlockById(id)
@@ -85,8 +80,6 @@ export const SubflowNodeComponent = memo(({ data, id }: NodeProps<SubflowNodeDat
const currentBlockId = usePanelEditorStore((state) => state.currentBlockId)
const isFocused = currentBlockId === id
const isPreviewSelected = data?.isPreviewSelected || false
/**
* Calculate the nesting level of this subflow node based on its parent hierarchy.
* Used to apply appropriate styling for nested containers.
@@ -132,6 +125,8 @@ export const SubflowNodeComponent = memo(({ data, id }: NodeProps<SubflowNodeDat
return { top: `${HANDLE_POSITIONS.DEFAULT_Y_OFFSET}px`, transform: 'translateY(-50%)' }
}
const isPreviewSelected = data?.isPreviewSelected || false
/**
* Determine the ring styling based on subflow state priority:
* 1. Focused (selected in editor) or preview selected - blue ring
@@ -153,7 +148,7 @@ export const SubflowNodeComponent = memo(({ data, id }: NodeProps<SubflowNodeDat
ref={blockRef}
onClick={() => setCurrentBlockId(id)}
className={cn(
'workflow-drag-handle relative cursor-grab select-none rounded-[8px] border border-[var(--border-1)] [&:active]:cursor-grabbing',
'relative cursor-pointer select-none rounded-[8px] border border-[var(--border-1)]',
'transition-block-bg transition-ring',
'z-[20]'
)}
@@ -167,17 +162,15 @@ export const SubflowNodeComponent = memo(({ data, id }: NodeProps<SubflowNodeDat
data-node-id={id}
data-type='subflowNode'
data-nesting-level={nestingLevel}
data-subflow-selected={isFocused || isPreviewSelected}
>
{!isPreview && (
<ActionBar blockId={id} blockType={data.kind} disabled={!userPermissions.canEdit} />
)}
{/* Header Section */}
<div
className={cn(
'flex items-center justify-between rounded-t-[8px] border-[var(--border)] border-b bg-[var(--surface-2)] py-[8px] pr-[12px] pl-[8px]'
'workflow-drag-handle flex cursor-grab items-center justify-between rounded-t-[8px] border-[var(--border)] border-b bg-[var(--surface-2)] py-[8px] pr-[12px] pl-[8px] [&:active]:cursor-grabbing'
)}
onMouseDown={(e) => {
e.stopPropagation()
}}
>
<div className='flex min-w-0 flex-1 items-center gap-[10px]'>
<div
@@ -190,6 +183,18 @@ export const SubflowNodeComponent = memo(({ data, id }: NodeProps<SubflowNodeDat
{blockName}
</span>
</div>
{!isPreview && (
<Button
variant='ghost'
onClick={(e) => {
e.stopPropagation()
collaborativeBatchRemoveBlocks([id])
}}
className='h-[14px] w-[14px] p-0 opacity-0 transition-opacity duration-100 group-hover:opacity-100'
>
<Trash className='h-[14px] w-[14px]' />
</Button>
)}
</div>
{!isPreview && (

View File

@@ -11,16 +11,6 @@ import { useWorkflowStore } from '@/stores/workflows/workflow/store'
const DEFAULT_DUPLICATE_OFFSET = { x: 50, y: 50 }
const ACTION_BUTTON_STYLES = [
'h-[23px] w-[23px] rounded-[8px] p-0',
'border border-[var(--border)] bg-[var(--surface-5)]',
'text-[var(--text-secondary)]',
'hover:border-transparent hover:bg-[var(--brand-secondary)] hover:!text-[var(--text-inverse)]',
'dark:border-transparent dark:bg-[var(--surface-7)] dark:hover:bg-[var(--brand-secondary)]',
].join(' ')
const ICON_SIZE = 'h-[11px] w-[11px]'
/**
* Props for the ActionBar component
*/
@@ -100,7 +90,6 @@ export const ActionBar = memo(
const isStartBlock = blockType === 'starter' || blockType === 'start_trigger'
const isResponseBlock = blockType === 'response'
const isNoteBlock = blockType === 'note'
const isSubflowBlock = blockType === 'loop' || blockType === 'parallel'
/**
* Get appropriate tooltip message based on disabled state
@@ -121,12 +110,10 @@ export const ActionBar = memo(
'-top-[46px] absolute right-0',
'flex flex-row items-center',
'opacity-0 transition-opacity duration-200 group-hover:opacity-100',
'gap-[5px] rounded-[10px] p-[5px]',
'border border-[var(--border)] bg-[var(--surface-2)]',
'dark:border-transparent dark:bg-[var(--surface-4)]'
'gap-[5px] rounded-[10px] bg-[var(--surface-4)] p-[5px]'
)}
>
{!isNoteBlock && !isSubflowBlock && (
{!isNoteBlock && (
<Tooltip.Root>
<Tooltip.Trigger asChild>
<Button
@@ -137,10 +124,14 @@ export const ActionBar = memo(
collaborativeBatchToggleBlockEnabled([blockId])
}
}}
className={ACTION_BUTTON_STYLES}
className='hover:!text-[var(--text-inverse)] h-[23px] w-[23px] rounded-[8px] bg-[var(--surface-7)] p-0 text-[var(--text-secondary)] hover:bg-[var(--brand-secondary)]'
disabled={disabled}
>
{isEnabled ? <Circle className={ICON_SIZE} /> : <CircleOff className={ICON_SIZE} />}
{isEnabled ? (
<Circle className='h-[11px] w-[11px]' />
) : (
<CircleOff className='h-[11px] w-[11px]' />
)}
</Button>
</Tooltip.Trigger>
<Tooltip.Content side='top'>
@@ -149,7 +140,7 @@ export const ActionBar = memo(
</Tooltip.Root>
)}
{!isStartBlock && !isResponseBlock && !isSubflowBlock && (
{!isStartBlock && !isResponseBlock && (
<Tooltip.Root>
<Tooltip.Trigger asChild>
<Button
@@ -160,17 +151,17 @@ export const ActionBar = memo(
handleDuplicateBlock()
}
}}
className={ACTION_BUTTON_STYLES}
className='hover:!text-[var(--text-inverse)] h-[23px] w-[23px] rounded-[8px] bg-[var(--surface-7)] p-0 text-[var(--text-secondary)] hover:bg-[var(--brand-secondary)]'
disabled={disabled}
>
<Copy className={ICON_SIZE} />
<Copy className='h-[11px] w-[11px]' />
</Button>
</Tooltip.Trigger>
<Tooltip.Content side='top'>{getTooltipMessage('Duplicate Block')}</Tooltip.Content>
</Tooltip.Root>
)}
{!isNoteBlock && !isSubflowBlock && (
{!isNoteBlock && (
<Tooltip.Root>
<Tooltip.Trigger asChild>
<Button
@@ -181,13 +172,13 @@ export const ActionBar = memo(
collaborativeBatchToggleBlockHandles([blockId])
}
}}
className={ACTION_BUTTON_STYLES}
className='hover:!text-[var(--text-inverse)] h-[23px] w-[23px] rounded-[8px] bg-[var(--surface-7)] p-0 text-[var(--text-secondary)] hover:bg-[var(--brand-secondary)]'
disabled={disabled}
>
{horizontalHandles ? (
<ArrowLeftRight className={ICON_SIZE} />
<ArrowLeftRight className='h-[11px] w-[11px]' />
) : (
<ArrowUpDown className={ICON_SIZE} />
<ArrowUpDown className='h-[11px] w-[11px]' />
)}
</Button>
</Tooltip.Trigger>
@@ -210,10 +201,10 @@ export const ActionBar = memo(
)
}
}}
className={ACTION_BUTTON_STYLES}
className='hover:!text-[var(--text-inverse)] h-[23px] w-[23px] rounded-[8px] bg-[var(--surface-7)] p-0 text-[var(--text-secondary)] hover:bg-[var(--brand-secondary)]'
disabled={disabled || !userPermissions.canEdit}
>
<LogOut className={ICON_SIZE} />
<LogOut className='h-[11px] w-[11px]' />
</Button>
</Tooltip.Trigger>
<Tooltip.Content side='top'>{getTooltipMessage('Remove from Subflow')}</Tooltip.Content>
@@ -230,10 +221,10 @@ export const ActionBar = memo(
collaborativeBatchRemoveBlocks([blockId])
}
}}
className={ACTION_BUTTON_STYLES}
className='hover:!text-[var(--text-inverse)] h-[23px] w-[23px] rounded-[8px] bg-[var(--surface-7)] p-0 text-[var(--text-secondary)] hover:bg-[var(--brand-secondary)]'
disabled={disabled}
>
<Trash2 className={ICON_SIZE} />
<Trash2 className='h-[11px] w-[11px]' />
</Button>
</Tooltip.Trigger>
<Tooltip.Content side='top'>{getTooltipMessage('Delete Block')}</Tooltip.Content>

View File

@@ -0,0 +1,23 @@
import { useBlockConnections } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/hooks/use-block-connections'
interface ConnectionsProps {
blockId: string
}
/**
* Displays incoming connections at the bottom left of the workflow block
*/
export function Connections({ blockId }: ConnectionsProps) {
const { incomingConnections, hasIncomingConnections } = useBlockConnections(blockId)
if (!hasIncomingConnections) return null
const connectionCount = incomingConnections.length
const connectionText = `${connectionCount} ${connectionCount === 1 ? 'connection' : 'connections'}`
return (
<div className='pointer-events-none absolute top-full left-0 ml-[8px] flex items-center gap-[8px] pt-[8px] opacity-0 transition-opacity group-hover:opacity-100'>
<span className='text-[12px] text-[var(--text-tertiary)]'>{connectionText}</span>
</div>
)
}

View File

@@ -0,0 +1,2 @@
export { ActionBar } from './action-bar/action-bar'
export { Connections } from './connections/connections'

View File

@@ -1,5 +1,6 @@
import { memo, useCallback, useEffect, useMemo, useRef, useState } from 'react'
import { createLogger } from '@sim/logger'
import { useQueryClient } from '@tanstack/react-query'
import { useParams } from 'next/navigation'
import { Handle, type NodeProps, Position, useUpdateNodeInternals } from 'reactflow'
import { Badge, Tooltip } from '@/components/emcn'
@@ -9,7 +10,10 @@ import { getBaseUrl } from '@/lib/core/utils/urls'
import { createMcpToolId } from '@/lib/mcp/utils'
import { getProviderIdFromServiceId } from '@/lib/oauth'
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
import { ActionBar } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/action-bar/action-bar'
import {
ActionBar,
Connections,
} from '@/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/components'
import {
useBlockProperties,
useChildWorkflow,
@@ -525,6 +529,7 @@ export const WorkflowBlock = memo(function WorkflowBlock({
const params = useParams()
const currentWorkflowId = params.workflowId as string
const workspaceId = params.workspaceId as string
const queryClient = useQueryClient()
const {
currentWorkflow,
@@ -597,6 +602,10 @@ export const WorkflowBlock = memo(function WorkflowBlock({
responseData.apiKey || ''
)
refetchDeployment()
// Invalidate the workflow schema cache so new config is loaded immediately
queryClient.invalidateQueries({
queryKey: ['workflow-input-fields', workflowId],
})
} else {
logger.error('Failed to deploy workflow')
}
@@ -606,7 +615,7 @@ export const WorkflowBlock = memo(function WorkflowBlock({
setIsDeploying(false)
}
},
[isDeploying, setDeploymentStatus, refetchDeployment]
[isDeploying, setDeploymentStatus, refetchDeployment, queryClient]
)
const currentStoreBlock = currentWorkflow.getBlockById(id)
@@ -918,7 +927,7 @@ export const WorkflowBlock = memo(function WorkflowBlock({
ref={contentRef}
onClick={handleClick}
className={cn(
'workflow-drag-handle relative z-[20] w-[250px] cursor-grab select-none rounded-[8px] border border-[var(--border-1)] bg-[var(--surface-2)] [&:active]:cursor-grabbing'
'relative z-[20] w-[250px] cursor-default select-none rounded-[8px] border border-[var(--border-1)] bg-[var(--surface-2)]'
)}
>
{isPending && (
@@ -931,6 +940,8 @@ export const WorkflowBlock = memo(function WorkflowBlock({
<ActionBar blockId={id} blockType={type} disabled={!userPermissions.canEdit} />
)}
{shouldShowDefaultHandles && <Connections blockId={id} />}
{shouldShowDefaultHandles && (
<Handle
type='target'
@@ -952,9 +963,12 @@ export const WorkflowBlock = memo(function WorkflowBlock({
<div
className={cn(
'flex items-center justify-between p-[8px]',
'workflow-drag-handle flex cursor-grab items-center justify-between p-[8px] [&:active]:cursor-grabbing',
hasContentBelowHeader && 'border-[var(--border-1)] border-b'
)}
onMouseDown={(e) => {
e.stopPropagation()
}}
>
<div className='relative z-10 flex min-w-0 flex-1 items-center gap-[10px]'>
<div

View File

@@ -1 +0,0 @@
export { WorkflowControls } from './workflow-controls'

View File

@@ -1,225 +0,0 @@
'use client'
import { useCallback, useRef, useState } from 'react'
import { createLogger } from '@sim/logger'
import clsx from 'clsx'
import { Scan } from 'lucide-react'
import { useReactFlow } from 'reactflow'
import {
Button,
ChevronDown,
Cursor,
Hand,
Popover,
PopoverAnchor,
PopoverContent,
PopoverItem,
PopoverTrigger,
Redo,
Tooltip,
Undo,
} from '@/components/emcn'
import { useSession } from '@/lib/auth/auth-client'
import { useRegisterGlobalCommands } from '@/app/workspace/[workspaceId]/providers/global-commands-provider'
import { createCommand } from '@/app/workspace/[workspaceId]/utils/commands-utils'
import { useUpdateGeneralSetting } from '@/hooks/queries/general-settings'
import { useCanvasViewport } from '@/hooks/use-canvas-viewport'
import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow'
import { useCanvasModeStore } from '@/stores/canvas-mode'
import { useGeneralStore } from '@/stores/settings/general'
import { useTerminalStore } from '@/stores/terminal'
import { useUndoRedoStore } from '@/stores/undo-redo'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
const logger = createLogger('WorkflowControls')
export function WorkflowControls() {
const reactFlowInstance = useReactFlow()
const { fitViewToBounds } = useCanvasViewport(reactFlowInstance)
const { mode, setMode } = useCanvasModeStore()
const { undo, redo } = useCollaborativeWorkflow()
const showWorkflowControls = useGeneralStore((s) => s.showActionBar)
const updateSetting = useUpdateGeneralSetting()
const isTerminalResizing = useTerminalStore((state) => state.isResizing)
const { activeWorkflowId } = useWorkflowRegistry()
const { data: session } = useSession()
const userId = session?.user?.id || 'unknown'
const stacks = useUndoRedoStore((s) => s.stacks)
const key = activeWorkflowId && userId ? `${activeWorkflowId}:${userId}` : ''
const stack = (key && stacks[key]) || { undo: [], redo: [] }
const canUndo = stack.undo.length > 0
const canRedo = stack.redo.length > 0
const handleFitToView = useCallback(() => {
fitViewToBounds({ padding: 0.1, duration: 300 })
}, [fitViewToBounds])
useRegisterGlobalCommands([
createCommand({
id: 'fit-to-view',
handler: handleFitToView,
}),
])
const [contextMenu, setContextMenu] = useState<{ x: number; y: number } | null>(null)
const [isCanvasModeOpen, setIsCanvasModeOpen] = useState(false)
const menuRef = useRef<HTMLDivElement>(null)
const handleContextMenu = (e: React.MouseEvent) => {
e.preventDefault()
setContextMenu({ x: e.clientX, y: e.clientY })
}
const handleHide = async () => {
try {
await updateSetting.mutateAsync({ key: 'showActionBar', value: false })
} catch (error) {
logger.error('Failed to hide workflow controls', error)
} finally {
setContextMenu(null)
}
}
if (!showWorkflowControls) {
return null
}
return (
<>
<div
className={clsx(
'fixed z-10 flex h-[36px] items-center gap-[2px] rounded-[8px] border border-[var(--border)] bg-[var(--surface-1)] p-[4px]',
!isTerminalResizing && 'transition-[bottom] duration-100 ease-out'
)}
style={{
bottom: 'calc(var(--terminal-height) + 16px)',
left: 'calc(var(--sidebar-width) + 16px)',
}}
onContextMenu={handleContextMenu}
>
{/* Canvas Mode Selector */}
<Popover
open={isCanvasModeOpen}
onOpenChange={setIsCanvasModeOpen}
variant='secondary'
size='sm'
>
<Tooltip.Root>
<PopoverTrigger asChild>
<div className='flex cursor-pointer items-center gap-[4px]'>
<Tooltip.Trigger asChild>
<Button className='h-[28px] w-[28px] rounded-[6px] p-0' variant='active'>
{mode === 'hand' ? (
<Hand className='h-[14px] w-[14px]' />
) : (
<Cursor className='h-[14px] w-[14px]' />
)}
</Button>
</Tooltip.Trigger>
<Button className='-m-[4px] !p-[6px] group' variant='ghost'>
<ChevronDown
className={`h-[8px] w-[10px] text-[var(--text-muted)] transition-transform duration-100 group-hover:text-[var(--text-secondary)] ${isCanvasModeOpen ? 'rotate-180' : ''}`}
/>
</Button>
</div>
</PopoverTrigger>
<Tooltip.Content side='top'>{mode === 'hand' ? 'Mover' : 'Pointer'}</Tooltip.Content>
</Tooltip.Root>
<PopoverContent side='top' sideOffset={8} maxWidth={100} minWidth={100}>
<PopoverItem
onClick={() => {
setMode('hand')
setIsCanvasModeOpen(false)
}}
>
<Hand className='h-3 w-3' />
<span>Mover</span>
</PopoverItem>
<PopoverItem
onClick={() => {
setMode('cursor')
setIsCanvasModeOpen(false)
}}
>
<Cursor className='h-3 w-3' />
<span>Pointer</span>
</PopoverItem>
</PopoverContent>
</Popover>
<div className='mx-[4px] h-[20px] w-[1px] bg-[var(--border)]' />
<Tooltip.Root>
<Tooltip.Trigger asChild>
<Button
variant='ghost'
className='h-[28px] w-[28px] rounded-[6px] p-0 hover:bg-[var(--surface-5)]'
onClick={undo}
disabled={!canUndo}
>
<Undo className='h-[16px] w-[16px]' />
</Button>
</Tooltip.Trigger>
<Tooltip.Content side='top'>
<Tooltip.Shortcut keys='⌘Z'>Undo</Tooltip.Shortcut>
</Tooltip.Content>
</Tooltip.Root>
<Tooltip.Root>
<Tooltip.Trigger asChild>
<Button
variant='ghost'
className='h-[28px] w-[28px] rounded-[6px] p-0 hover:bg-[var(--surface-5)]'
onClick={redo}
disabled={!canRedo}
>
<Redo className='h-[16px] w-[16px]' />
</Button>
</Tooltip.Trigger>
<Tooltip.Content side='top'>
<Tooltip.Shortcut keys='⌘⇧Z'>Redo</Tooltip.Shortcut>
</Tooltip.Content>
</Tooltip.Root>
<div className='mx-[4px] h-[20px] w-[1px] bg-[var(--border)]' />
<Tooltip.Root>
<Tooltip.Trigger asChild>
<Button
variant='ghost'
className='h-[28px] w-[28px] rounded-[6px] p-0 hover:bg-[var(--surface-5)]'
onClick={handleFitToView}
>
<Scan className='h-[16px] w-[16px]' />
</Button>
</Tooltip.Trigger>
<Tooltip.Content side='top'>
<Tooltip.Shortcut keys='⌘⇧F'>Fit to View</Tooltip.Shortcut>
</Tooltip.Content>
</Tooltip.Root>
</div>
<Popover
open={contextMenu !== null}
onOpenChange={(open) => !open && setContextMenu(null)}
variant='secondary'
size='sm'
colorScheme='inverted'
>
<PopoverAnchor
style={{
position: 'fixed',
left: `${contextMenu?.x ?? 0}px`,
top: `${contextMenu?.y ?? 0}px`,
width: '1px',
height: '1px',
}}
/>
<PopoverContent ref={menuRef} align='start' side='bottom' sideOffset={4}>
<PopoverItem onClick={handleHide}>Hide canvas controls</PopoverItem>
</PopoverContent>
</Popover>
</>
)
}

View File

@@ -3,7 +3,6 @@ import { createLogger } from '@sim/logger'
import { useReactFlow } from 'reactflow'
import type { AutoLayoutOptions } from '@/app/workspace/[workspaceId]/w/[workflowId]/utils/auto-layout-utils'
import { applyAutoLayoutAndUpdateStore as applyAutoLayoutStandalone } from '@/app/workspace/[workspaceId]/w/[workflowId]/utils/auto-layout-utils'
import { useCanvasViewport } from '@/hooks/use-canvas-viewport'
export type { AutoLayoutOptions }
@@ -17,8 +16,7 @@ const logger = createLogger('useAutoLayout')
* Note: This hook requires a ReactFlowProvider ancestor.
*/
export function useAutoLayout(workflowId: string | null) {
const reactFlowInstance = useReactFlow()
const { fitViewToBounds } = useCanvasViewport(reactFlowInstance)
const { fitView } = useReactFlow()
const applyAutoLayoutAndUpdateStore = useCallback(
async (options: AutoLayoutOptions = {}) => {
@@ -40,7 +38,7 @@ export function useAutoLayout(workflowId: string | null) {
if (result.success) {
logger.info('Auto layout completed successfully')
requestAnimationFrame(() => {
fitViewToBounds({ padding: 0.15, duration: 600 })
fitView({ padding: 0.8, duration: 600 })
})
} else {
logger.error('Auto layout failed:', result.error)
@@ -54,7 +52,7 @@ export function useAutoLayout(workflowId: string | null) {
error: error instanceof Error ? error.message : 'Unknown error',
}
}
}, [applyAutoLayoutAndUpdateStore, fitViewToBounds])
}, [applyAutoLayoutAndUpdateStore, fitView])
return {
applyAutoLayoutAndUpdateStore,

View File

@@ -1,28 +1,36 @@
import { useCallback, useEffect, useRef, useState } from 'react'
import type { Node } from 'reactflow'
import type { BlockState } from '@/stores/workflows/workflow/types'
import type { BlockInfo } from '../components/block-menu'
import type { ContextMenuBlockInfo, ContextMenuPosition } from '../components/context-menu/types'
type MenuType = 'block' | 'pane' | null
interface UseCanvasContextMenuProps {
/** Current blocks from workflow store */
blocks: Record<string, BlockState>
/** Function to get nodes from ReactFlow */
getNodes: () => Node[]
}
/**
* Hook for managing workflow canvas context menus.
* Handles right-click events, menu state, click-outside detection, and block info extraction.
*
* Handles:
* - Right-click event handling for blocks and pane
* - Menu open/close state for both menu types
* - Click-outside detection to close menus
* - Selected block info extraction for multi-selection support
*/
export function useCanvasContextMenu({ blocks, getNodes }: UseCanvasContextMenuProps) {
const [activeMenu, setActiveMenu] = useState<MenuType>(null)
const [position, setPosition] = useState({ x: 0, y: 0 })
const [selectedBlocks, setSelectedBlocks] = useState<BlockInfo[]>([])
const [position, setPosition] = useState<ContextMenuPosition>({ x: 0, y: 0 })
const [selectedBlocks, setSelectedBlocks] = useState<ContextMenuBlockInfo[]>([])
const menuRef = useRef<HTMLDivElement>(null)
/** Converts nodes to block info for context menu */
const nodesToBlockInfos = useCallback(
(nodes: Node[]): BlockInfo[] =>
(nodes: Node[]): ContextMenuBlockInfo[] =>
nodes.map((n) => {
const block = blocks[n.id]
const parentId = block?.data?.parentId
@@ -39,6 +47,9 @@ export function useCanvasContextMenu({ blocks, getNodes }: UseCanvasContextMenuP
[blocks]
)
/**
* Handle right-click on a node (block)
*/
const handleNodeContextMenu = useCallback(
(event: React.MouseEvent, node: Node) => {
event.preventDefault()
@@ -54,6 +65,9 @@ export function useCanvasContextMenu({ blocks, getNodes }: UseCanvasContextMenuP
[getNodes, nodesToBlockInfos]
)
/**
* Handle right-click on the pane (empty canvas area)
*/
const handlePaneContextMenu = useCallback((event: React.MouseEvent) => {
event.preventDefault()
event.stopPropagation()
@@ -63,6 +77,9 @@ export function useCanvasContextMenu({ blocks, getNodes }: UseCanvasContextMenuP
setActiveMenu('pane')
}, [])
/**
* Handle right-click on a selection (multiple selected nodes)
*/
const handleSelectionContextMenu = useCallback(
(event: React.MouseEvent) => {
event.preventDefault()
@@ -77,10 +94,16 @@ export function useCanvasContextMenu({ blocks, getNodes }: UseCanvasContextMenuP
[getNodes, nodesToBlockInfos]
)
/**
* Close the active context menu
*/
const closeMenu = useCallback(() => {
setActiveMenu(null)
}, [])
/**
* Handle clicks outside the menu to close it
*/
useEffect(() => {
if (!activeMenu) return
@@ -100,6 +123,9 @@ export function useCanvasContextMenu({ blocks, getNodes }: UseCanvasContextMenuP
}
}, [activeMenu, closeMenu])
/**
* Close menu on scroll or zoom to prevent menu from being positioned incorrectly
*/
useEffect(() => {
if (!activeMenu) return
@@ -113,14 +139,23 @@ export function useCanvasContextMenu({ blocks, getNodes }: UseCanvasContextMenuP
}, [activeMenu, closeMenu])
return {
/** Whether the block context menu is open */
isBlockMenuOpen: activeMenu === 'block',
/** Whether the pane context menu is open */
isPaneMenuOpen: activeMenu === 'pane',
/** Position for the context menu */
position,
/** Ref for the menu element */
menuRef,
/** Selected blocks info for multi-selection actions */
selectedBlocks,
/** Handler for ReactFlow onNodeContextMenu */
handleNodeContextMenu,
/** Handler for ReactFlow onPaneContextMenu */
handlePaneContextMenu,
/** Handler for ReactFlow onSelectionContextMenu */
handleSelectionContextMenu,
/** Close the active context menu */
closeMenu,
}
}

View File

@@ -1124,9 +1124,7 @@ function BlockDetailsSidebarContent({
const visibleSubBlocks = blockConfig.subBlocks.filter((subBlock) => {
if (subBlock.hidden || subBlock.hideFromPreview) return false
// Only filter out trigger-mode subblocks for non-trigger blocks
// Trigger-only blocks (category 'triggers') should display their trigger subblocks
if (subBlock.mode === 'trigger' && blockConfig.category !== 'triggers') return false
if (subBlock.mode === 'trigger') return false
if (subBlock.condition) {
return evaluateCondition(subBlock.condition, subBlockValues)
}
@@ -1156,6 +1154,11 @@ function BlockDetailsSidebarContent({
<span className='min-w-0 flex-1 truncate font-medium text-[14px] text-[var(--text-primary)]'>
{block.name || blockConfig.name}
</span>
{block.enabled === false && (
<Badge variant='red' size='sm'>
Disabled
</Badge>
)}
{onClose && (
<Button variant='ghost' className='!p-[4px] flex-shrink-0' onClick={onClose}>
<X className='h-[14px] w-[14px]' />

View File

@@ -46,7 +46,7 @@ function WorkflowPreviewBlockInner({ data }: NodeProps<WorkflowPreviewBlockData>
return blockConfig.subBlocks.filter((subBlock) => {
if (subBlock.hidden) return false
if (subBlock.hideFromPreview) return false
if (subBlock.mode === 'trigger' && blockConfig.category !== 'triggers') return false
if (subBlock.mode === 'trigger') return false
if (subBlock.mode === 'advanced') return false
return true
})

View File

@@ -181,18 +181,17 @@ interface FitViewOnChangeProps {
*/
function FitViewOnChange({ nodeIds, fitPadding }: FitViewOnChangeProps) {
const { fitView } = useReactFlow()
const lastNodeIdsRef = useRef<string | null>(null)
const hasFittedRef = useRef(false)
useEffect(() => {
if (!nodeIds.length) return
const shouldFit = lastNodeIdsRef.current !== nodeIds
if (!shouldFit) return
lastNodeIdsRef.current = nodeIds
const timeoutId = setTimeout(() => {
fitView({ padding: fitPadding, duration: 200 })
}, 50)
return () => clearTimeout(timeoutId)
if (nodeIds.length > 0 && !hasFittedRef.current) {
hasFittedRef.current = true
// Small delay to ensure nodes are rendered before fitting
const timeoutId = setTimeout(() => {
fitView({ padding: fitPadding, duration: 200 })
}, 50)
return () => clearTimeout(timeoutId)
}
}, [nodeIds, fitPadding, fitView])
return null

Some files were not shown because too many files have changed in this diff Show More