Compare commits

..

9 Commits

Author SHA1 Message Date
Siddharth Ganesan
23fdbbfea9 Fix redeploy 2026-01-15 15:53:26 -08:00
Siddharth Ganesan
59df90ab0c Fix bugs 2026-01-15 15:43:26 -08:00
Siddharth Ganesan
a10f32dfa5 Fix get block options text 2026-01-15 15:12:08 -08:00
Siddharth Ganesan
72384f190d Fix thinking text 2026-01-15 15:10:42 -08:00
Siddharth Ganesan
e63fd8c482 Fix thinking tags 2026-01-15 15:06:01 -08:00
Siddharth Ganesan
080ab94165 Cleanup 2026-01-15 15:02:14 -08:00
Siddharth Ganesan
69309ecf5f Clean up autosend and continue options and enable mention menu 2026-01-15 11:58:50 -08:00
Siddharth Ganesan
2bc181d3a6 Fix block id edit, slash commands at end, thinking tag resolution, add continue button 2026-01-15 11:48:29 -08:00
Siddharth Ganesan
5db5c1c7d6 Fix edit workflow returning bad state 2026-01-15 10:55:40 -08:00
253 changed files with 8165 additions and 30773 deletions

View File

@@ -4678,349 +4678,3 @@ export function BedrockIcon(props: SVGProps<SVGSVGElement>) {
</svg>
)
}
export function ReductoIcon(props: SVGProps<SVGSVGElement>) {
return (
<svg
{...props}
width='400'
height='400'
viewBox='50 40 300 320'
fill='none'
xmlns='http://www.w3.org/2000/svg'
>
<path
fillRule='evenodd'
clipRule='evenodd'
d='M85.3434 70.7805H314.657V240.307L226.44 329.219H85.3434V70.7805ZM107.796 93.2319H292.205V204.487H206.493V306.767H107.801L107.796 93.2319Z'
fill='#FFFFFF'
/>
</svg>
)
}
export function PulseIcon(props: SVGProps<SVGSVGElement>) {
return (
<svg
{...props}
width='24'
height='24'
viewBox='0 6 24 24'
fill='none'
xmlns='http://www.w3.org/2000/svg'
>
<path
d='M0 6.63667C0 6.28505 0.284685 6 0.635863 6H1.54133C1.89251 6 2.17719 6.28505 2.17719 6.63667V7.54329C2.17719 7.89492 1.89251 8.17997 1.54133 8.17997H0.635863C0.284686 8.17997 0 7.89492 0 7.54329V6.63667Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M3.11318 6.63667C3.11318 6.28505 3.39787 6 3.74905 6H4.65452C5.00569 6 5.29038 6.28505 5.29038 6.63667V7.54329C5.29038 7.89492 5.00569 8.17997 4.65452 8.17997H3.74905C3.39787 8.17997 3.11318 7.89492 3.11318 7.54329V6.63667Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M6.22637 6.63667C6.22637 6.28505 6.51105 6 6.86223 6H7.7677C8.11888 6 8.40356 6.28505 8.40356 6.63667V7.54329C8.40356 7.89492 8.11888 8.17997 7.7677 8.17997H6.86223C6.51105 8.17997 6.22637 7.89492 6.22637 7.54329V6.63667Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M9.33955 6.63667C9.33955 6.28505 9.62424 6 9.97541 6H10.8809C11.2321 6 11.5167 6.28505 11.5167 6.63667V7.54329C11.5167 7.89492 11.2321 8.17997 10.8809 8.17997H9.97541C9.62424 8.17997 9.33955 7.89492 9.33955 7.54329V6.63667Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M12.4527 6.63667C12.4527 6.28505 12.7374 6 13.0886 6H13.9941C14.3452 6 14.6299 6.28505 14.6299 6.63667V7.54329C14.6299 7.89492 14.3452 8.17997 13.9941 8.17997H13.0886C12.7374 8.17997 12.4527 7.89492 12.4527 7.54329V6.63667Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M15.5659 6.63667C15.5659 6.28505 15.8506 6 16.2018 6H17.1073C17.4584 6 17.7431 6.28505 17.7431 6.63667V7.54329C17.7431 7.89492 17.4584 8.17997 17.1073 8.17997H16.2018C15.8506 8.17997 15.5659 7.89492 15.5659 7.54329V6.63667Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M18.6791 6.63667C18.6791 6.28505 18.9638 6 19.315 6H20.2204C20.5716 6 20.8563 6.28505 20.8563 6.63667V7.54329C20.8563 7.89492 20.5716 8.17997 20.2204 8.17997H19.315C18.9638 8.17997 18.6791 7.89492 18.6791 7.54329V6.63667Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M21.7923 6.63667C21.7923 6.28505 22.077 6 22.4282 6H23.3336C23.6848 6 23.9695 6.28505 23.9695 6.63667V7.54329C23.9695 7.89492 23.6848 8.17997 23.3336 8.17997H22.4282C22.077 8.17997 21.7923 7.89492 21.7923 7.54329V6.63667Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M0 9.75382C0 9.4022 0.284685 9.11715 0.635863 9.11715H1.54133C1.89251 9.11715 2.17719 9.4022 2.17719 9.75382V10.6604C2.17719 11.0121 1.89251 11.2971 1.54133 11.2971H0.635863C0.284686 11.2971 0 11.0121 0 10.6604V9.75382Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M3.11318 9.75382C3.11318 9.4022 3.39787 9.11715 3.74905 9.11715H4.65452C5.00569 9.11715 5.29038 9.4022 5.29038 9.75382V10.6604C5.29038 11.0121 5.00569 11.2971 4.65452 11.2971H3.74905C3.39787 11.2971 3.11318 11.0121 3.11318 10.6604V9.75382Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M6.22637 9.75382C6.22637 9.4022 6.51105 9.11715 6.86223 9.11715H7.7677C8.11888 9.11715 8.40356 9.4022 8.40356 9.75382V10.6604C8.40356 11.0121 8.11888 11.2971 7.7677 11.2971H6.86223C6.51105 11.2971 6.22637 11.0121 6.22637 10.6604V9.75382Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M9.33955 9.75382C9.33955 9.4022 9.62424 9.11715 9.97541 9.11715H10.8809C11.2321 9.11715 11.5167 9.4022 11.5167 9.75382V10.6604C11.5167 11.0121 11.2321 11.2971 10.8809 11.2971H9.97541C9.62424 11.2971 9.33955 11.0121 9.33955 10.6604V9.75382Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M12.4527 9.75382C12.4527 9.4022 12.7374 9.11715 13.0886 9.11715H13.9941C14.3452 9.11715 14.6299 9.4022 14.6299 9.75382V10.6604C14.6299 11.0121 14.3452 11.2971 13.9941 11.2971H13.0886C12.7374 11.2971 12.4527 11.0121 12.4527 10.6604V9.75382Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M15.5659 9.75382C15.5659 9.4022 15.8506 9.11715 16.2018 9.11715H17.1073C17.4584 9.11715 17.7431 9.4022 17.7431 9.75382V10.6604C17.7431 11.0121 17.4584 11.2971 17.1073 11.2971H16.2018C15.8506 11.2971 15.5659 11.0121 15.5659 10.6604V9.75382Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M18.6791 9.75382C18.6791 9.4022 18.9638 9.11715 19.315 9.11715H20.2204C20.5716 9.11715 20.8563 9.4022 20.8563 9.75382V10.6604C20.8563 11.0121 20.5716 11.2971 20.2204 11.2971H19.315C18.9638 11.2971 18.6791 11.0121 18.6791 10.6604V9.75382Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M21.7923 9.75382C21.7923 9.4022 22.077 9.11715 22.4282 9.11715H23.3336C23.6848 9.11715 23.9695 9.4022 23.9695 9.75382V10.6604C23.9695 11.0121 23.6848 11.2971 23.3336 11.2971H22.4282C22.077 11.2971 21.7923 11.0121 21.7923 10.6604V9.75382Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M0 12.871C0 12.5193 0.284685 12.2343 0.635863 12.2343H1.54133C1.89251 12.2343 2.17719 12.5193 2.17719 12.871V13.7776C2.17719 14.1292 1.89251 14.4143 1.54133 14.4143H0.635863C0.284686 14.4143 0 14.1292 0 13.7776V12.871Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M3.11318 12.871C3.11318 12.5193 3.39787 12.2343 3.74905 12.2343H4.65452C5.00569 12.2343 5.29038 12.5193 5.29038 12.871V13.7776C5.29038 14.1292 5.00569 14.4143 4.65452 14.4143H3.74905C3.39787 14.4143 3.11318 14.1292 3.11318 13.7776V12.871Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M6.22637 12.871C6.22637 12.5193 6.51105 12.2343 6.86223 12.2343H7.7677C8.11888 12.2343 8.40356 12.5193 8.40356 12.871V13.7776C8.40356 14.1292 8.11888 14.4143 7.7677 14.4143H6.86223C6.51105 14.4143 6.22637 14.1292 6.22637 13.7776V12.871Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M9.33955 12.871C9.33955 12.5193 9.62424 12.2343 9.97541 12.2343H10.8809C11.2321 12.2343 11.5167 12.5193 11.5167 12.871V13.7776C11.5167 14.1292 11.2321 14.4143 10.8809 14.4143H9.97541C9.62424 14.4143 9.33955 14.1292 9.33955 13.7776V12.871Z'
fill='#0E7BC9'
/>
<path
d='M12.4527 12.871C12.4527 12.5193 12.7374 12.2343 13.0886 12.2343H13.9941C14.3452 12.2343 14.6299 12.5193 14.6299 12.871V13.7776C14.6299 14.1292 14.3452 14.4143 13.9941 14.4143H13.0886C12.7374 14.4143 12.4527 14.1292 12.4527 13.7776V12.871Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M15.5659 12.871C15.5659 12.5193 15.8506 12.2343 16.2018 12.2343H17.1073C17.4584 12.2343 17.7431 12.5193 17.7431 12.871V13.7776C17.7431 14.1292 17.4584 14.4143 17.1073 14.4143H16.2018C15.8506 14.4143 15.5659 14.1292 15.5659 13.7776V12.871Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M18.6791 12.871C18.6791 12.5193 18.9638 12.2343 19.315 12.2343H20.2204C20.5716 12.2343 20.8563 12.5193 20.8563 12.871V13.7776C20.8563 14.1292 20.5716 14.4143 20.2204 14.4143H19.315C18.9638 14.4143 18.6791 14.1292 18.6791 13.7776V12.871Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M21.7923 12.871C21.7923 12.5193 22.077 12.2343 22.4282 12.2343H23.3336C23.6848 12.2343 23.9695 12.5193 23.9695 12.871V13.7776C23.9695 14.1292 23.6848 14.4143 23.3336 14.4143H22.4282C22.077 14.4143 21.7923 14.1292 21.7923 13.7776V12.871Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M0 15.9881C0 15.6365 0.284685 15.3514 0.635863 15.3514H1.54133C1.89251 15.3514 2.17719 15.6365 2.17719 15.9881V16.8947C2.17719 17.2464 1.89251 17.5314 1.54133 17.5314H0.635863C0.284686 17.5314 0 17.2464 0 16.8947V15.9881Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M3.11318 15.9881C3.11318 15.6365 3.39787 15.3514 3.74905 15.3514H4.65452C5.00569 15.3514 5.29038 15.6365 5.29038 15.9881V16.8947C5.29038 17.2464 5.00569 17.5314 4.65452 17.5314H3.74905C3.39787 17.5314 3.11318 17.2464 3.11318 16.8947V15.9881Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M6.22637 15.9881C6.22637 15.6365 6.51105 15.3514 6.86223 15.3514H7.7677C8.11888 15.3514 8.40356 15.6365 8.40356 15.9881V16.8947C8.40356 17.2464 8.11888 17.5314 7.7677 17.5314H6.86223C6.51105 17.5314 6.22637 17.2464 6.22637 16.8947V15.9881Z'
fill='#0E7BC9'
/>
<path
d='M9.33955 15.9881C9.33955 15.6365 9.62424 15.3514 9.97541 15.3514H10.8809C11.2321 15.3514 11.5167 15.6365 11.5167 15.9881V16.8947C11.5167 17.2464 11.2321 17.5314 10.8809 17.5314H9.97541C9.62424 17.5314 9.33955 17.2464 9.33955 16.8947V15.9881Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M12.4527 15.9881C12.4527 15.6365 12.7374 15.3514 13.0886 15.3514H13.9941C14.3452 15.3514 14.6299 15.6365 14.6299 15.9881V16.8947C14.6299 17.2464 14.3452 17.5314 13.9941 17.5314H13.0886C12.7374 17.5314 12.4527 17.2464 12.4527 16.8947V15.9881Z'
fill='#0E7BC9'
/>
<path
d='M15.5659 15.9881C15.5659 15.6365 15.8506 15.3514 16.2018 15.3514H17.1073C17.4584 15.3514 17.7431 15.6365 17.7431 15.9881V16.8947C17.7431 17.2464 17.4584 17.5314 17.1073 17.5314H16.2018C15.8506 17.5314 15.5659 17.2464 15.5659 16.8947V15.9881Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M18.6791 15.9881C18.6791 15.6365 18.9638 15.3514 19.315 15.3514H20.2204C20.5716 15.3514 20.8563 15.6365 20.8563 15.9881V16.8947C20.8563 17.2464 20.5716 17.5314 20.2204 17.5314H19.315C18.9638 17.5314 18.6791 17.2464 18.6791 16.8947V15.9881Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M21.7923 15.9881C21.7923 15.6365 22.077 15.3514 22.4282 15.3514H23.3336C23.6848 15.3514 23.9695 15.6365 23.9695 15.9881V16.8947C23.9695 17.2464 23.6848 17.5314 23.3336 17.5314H22.4282C22.077 17.5314 21.7923 17.2464 21.7923 16.8947V15.9881Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M0 19.1053C0 18.7536 0.284685 18.4686 0.635863 18.4686H1.54133C1.89251 18.4686 2.17719 18.7536 2.17719 19.1053V20.0119C2.17719 20.3635 1.89251 20.6486 1.54133 20.6486H0.635863C0.284686 20.6486 0 20.3635 0 20.0119V19.1053Z'
fill='#0E7BC9'
/>
<path
d='M3.11318 19.1053C3.11318 18.7536 3.39787 18.4686 3.74905 18.4686H4.65452C5.00569 18.4686 5.29038 18.7536 5.29038 19.1053V20.0119C5.29038 20.3635 5.00569 20.6486 4.65452 20.6486H3.74905C3.39787 20.6486 3.11318 20.3635 3.11318 20.0119V19.1053Z'
fill='#0E7BC9'
/>
<path
d='M6.22637 19.1053C6.22637 18.7536 6.51105 18.4686 6.86223 18.4686H7.7677C8.11888 18.4686 8.40356 18.7536 8.40356 19.1053V20.0119C8.40356 20.3635 8.11888 20.6486 7.7677 20.6486H6.86223C6.51105 20.6486 6.22637 20.3635 6.22637 20.0119V19.1053Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M9.33955 19.1053C9.33955 18.7536 9.62424 18.4686 9.97541 18.4686H10.8809C11.2321 18.4686 11.5167 18.7536 11.5167 19.1053V20.0119C11.5167 20.3635 11.2321 20.6486 10.8809 20.6486H9.97541C9.62424 20.6486 9.33955 20.3635 9.33955 20.0119V19.1053Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M12.4527 19.1053C12.4527 18.7536 12.7374 18.4686 13.0886 18.4686H13.9941C14.3452 18.4686 14.6299 18.7536 14.6299 19.1053V20.0119C14.6299 20.3635 14.3452 20.6486 13.9941 20.6486H13.0886C12.7374 20.6486 12.4527 20.3635 12.4527 20.0119V19.1053Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M15.5659 19.1053C15.5659 18.7536 15.8506 18.4686 16.2018 18.4686H17.1073C17.4584 18.4686 17.7431 18.7536 17.7431 19.1053V20.0119C17.7431 20.3635 17.4584 20.6486 17.1073 20.6486H16.2018C15.8506 20.6486 15.5659 20.3635 15.5659 20.0119V19.1053Z'
fill='#0E7BC9'
/>
<path
d='M18.6791 19.1053C18.6791 18.7536 18.9638 18.4686 19.315 18.4686H20.2204C20.5716 18.4686 20.8563 18.7536 20.8563 19.1053V20.0119C20.8563 20.3635 20.5716 20.6486 20.2204 20.6486H19.315C18.9638 20.6486 18.6791 20.3635 18.6791 20.0119V19.1053Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M21.7923 19.1053C21.7923 18.7536 22.077 18.4686 22.4282 18.4686H23.3336C23.6848 18.4686 23.9695 18.7536 23.9695 19.1053V20.0119C23.9695 20.3635 23.6848 20.6486 23.3336 20.6486H22.4282C22.077 20.6486 21.7923 20.3635 21.7923 20.0119V19.1053Z'
fill='#0E7BC9'
/>
<path
d='M0 22.2224C0 21.8708 0.284685 21.5857 0.635863 21.5857H1.54133C1.89251 21.5857 2.17719 21.8708 2.17719 22.2224V23.129C2.17719 23.4807 1.89251 23.7657 1.54133 23.7657H0.635863C0.284686 23.7657 0 23.4807 0 23.129V22.2224Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M3.11318 22.2224C3.11318 21.8708 3.39787 21.5857 3.74905 21.5857H4.65452C5.00569 21.5857 5.29038 21.8708 5.29038 22.2224V23.129C5.29038 23.4807 5.00569 23.7657 4.65452 23.7657H3.74905C3.39787 23.7657 3.11318 23.4807 3.11318 23.129V22.2224Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M6.22637 22.2224C6.22637 21.8708 6.51105 21.5857 6.86223 21.5857H7.7677C8.11888 21.5857 8.40356 21.8708 8.40356 22.2224V23.129C8.40356 23.4807 8.11888 23.7657 7.7677 23.7657H6.86223C6.51105 23.7657 6.22637 23.4807 6.22637 23.129V22.2224Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M9.33955 22.2224C9.33955 21.8708 9.62424 21.5857 9.97541 21.5857H10.8809C11.2321 21.5857 11.5167 21.8708 11.5167 22.2224V23.129C11.5167 23.4807 11.2321 23.7657 10.8809 23.7657H9.97541C9.62424 23.7657 9.33955 23.4807 9.33955 23.129V22.2224Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M12.4527 22.2224C12.4527 21.8708 12.7374 21.5857 13.0886 21.5857H13.9941C14.3452 21.5857 14.6299 21.8708 14.6299 22.2224V23.129C14.6299 23.4807 14.3452 23.7657 13.9941 23.7657H13.0886C12.7374 23.7657 12.4527 23.4807 12.4527 23.129V22.2224Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M15.5659 22.2224C15.5659 21.8708 15.8506 21.5857 16.2018 21.5857H17.1073C17.4584 21.5857 17.7431 21.8708 17.7431 22.2224V23.129C17.7431 23.4807 17.4584 23.7657 17.1073 23.7657H16.2018C15.8506 23.7657 15.5659 23.4807 15.5659 23.129V22.2224Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M18.6791 22.2224C18.6791 21.8708 18.9638 21.5857 19.315 21.5857H20.2204C20.5716 21.5857 20.8563 21.8708 20.8563 22.2224V23.129C20.8563 23.4807 20.5716 23.7657 20.2204 23.7657H19.315C18.9638 23.7657 18.6791 23.4807 18.6791 23.129V22.2224Z'
fill='#0E7BC9'
/>
<path
d='M21.7923 22.2224C21.7923 21.8708 22.077 21.5857 22.4282 21.5857H23.3336C23.6848 21.5857 23.9695 21.8708 23.9695 22.2224V23.129C23.9695 23.4807 23.6848 23.7657 23.3336 23.7657H22.4282C22.077 23.7657 21.7923 23.4807 21.7923 23.129V22.2224Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M0 25.3396C0 24.9879 0.284685 24.7029 0.635863 24.7029H1.54133C1.89251 24.7029 2.17719 24.9879 2.17719 25.3396V26.2462C2.17719 26.5978 1.89251 26.8829 1.54133 26.8829H0.635863C0.284686 26.8829 0 26.5978 0 26.2462V25.3396Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M3.11318 25.3396C3.11318 24.9879 3.39787 24.7029 3.74905 24.7029H4.65452C5.00569 24.7029 5.29038 24.9879 5.29038 25.3396V26.2462C5.29038 26.5978 5.00569 26.8829 4.65452 26.8829H3.74905C3.39787 26.8829 3.11318 26.5978 3.11318 26.2462V25.3396Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M6.22637 25.3396C6.22637 24.9879 6.51105 24.7029 6.86223 24.7029H7.7677C8.11888 24.7029 8.40356 24.9879 8.40356 25.3396V26.2462C8.40356 26.5978 8.11888 26.8829 7.7677 26.8829H6.86223C6.51105 26.8829 6.22637 26.5978 6.22637 26.2462V25.3396Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M9.33955 25.3396C9.33955 24.9879 9.62424 24.7029 9.97541 24.7029H10.8809C11.2321 24.7029 11.5167 24.9879 11.5167 25.3396V26.2462C11.5167 26.5978 11.2321 26.8829 10.8809 26.8829H9.97541C9.62424 26.8829 9.33955 26.5978 9.33955 26.2462V25.3396Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M12.4527 25.3396C12.4527 24.9879 12.7374 24.7029 13.0886 24.7029H13.9941C14.3452 24.7029 14.6299 24.9879 14.6299 25.3396V26.2462C14.6299 26.5978 14.3452 26.8829 13.9941 26.8829H13.0886C12.7374 26.8829 12.4527 26.5978 12.4527 26.2462V25.3396Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M15.5659 25.3396C15.5659 24.9879 15.8506 24.7029 16.2018 24.7029H17.1073C17.4584 24.7029 17.7431 24.9879 17.7431 25.3396V26.2462C17.7431 26.5978 17.4584 26.8829 17.1073 26.8829H16.2018C15.8506 26.8829 15.5659 26.5978 15.5659 26.2462V25.3396Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M18.6791 25.3396C18.6791 24.9879 18.9638 24.7029 19.315 24.7029H20.2204C20.5716 24.7029 20.8563 24.9879 20.8563 25.3396V26.2462C20.8563 26.5978 20.5716 26.8829 20.2204 26.8829H19.315C18.9638 26.8829 18.6791 26.5978 18.6791 26.2462V25.3396Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M21.7923 25.3396C21.7923 24.9879 22.077 24.7029 22.4282 24.7029H23.3336C23.6848 24.7029 23.9695 24.9879 23.9695 25.3396V26.2462C23.9695 26.5978 23.6848 26.8829 23.3336 26.8829H22.4282C22.077 26.8829 21.7923 26.5978 21.7923 26.2462V25.3396Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M0 28.4567C0 28.1051 0.284685 27.82 0.635863 27.82H1.54133C1.89251 27.82 2.17719 28.1051 2.17719 28.4567V29.3633C2.17719 29.715 1.89251 30 1.54133 30H0.635863C0.284686 30 0 29.715 0 29.3633V28.4567Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M3.11318 28.4567C3.11318 28.1051 3.39787 27.82 3.74905 27.82H4.65452C5.00569 27.82 5.29038 28.1051 5.29038 28.4567V29.3633C5.29038 29.715 5.00569 30 4.65452 30H3.74905C3.39787 30 3.11318 29.715 3.11318 29.3633V28.4567Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M6.22637 28.4567C6.22637 28.1051 6.51105 27.82 6.86223 27.82H7.7677C8.11888 27.82 8.40356 28.1051 8.40356 28.4567V29.3633C8.40356 29.715 8.11888 30 7.7677 30H6.86223C6.51105 30 6.22637 29.715 6.22637 29.3633V28.4567Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M9.33955 28.4567C9.33955 28.1051 9.62424 27.82 9.97541 27.82H10.8809C11.2321 27.82 11.5167 28.1051 11.5167 28.4567V29.3633C11.5167 29.715 11.2321 30 10.8809 30H9.97541C9.62424 30 9.33955 29.715 9.33955 29.3633V28.4567Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M12.4527 28.4567C12.4527 28.1051 12.7374 27.82 13.0886 27.82H13.9941C14.3452 27.82 14.6299 28.1051 14.6299 28.4567V29.3633C14.6299 29.715 14.3452 30 13.9941 30H13.0886C12.7374 30 12.4527 29.715 12.4527 29.3633V28.4567Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M15.5659 28.4567C15.5659 28.1051 15.8506 27.82 16.2018 27.82H17.1073C17.4584 27.82 17.7431 28.1051 17.7431 28.4567V29.3633C17.7431 29.715 17.4584 30 17.1073 30H16.2018C15.8506 30 15.5659 29.715 15.5659 29.3633V28.4567Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M18.6791 28.4567C18.6791 28.1051 18.9638 27.82 19.315 27.82H20.2204C20.5716 27.82 20.8563 28.1051 20.8563 28.4567V29.3633C20.8563 29.715 20.5716 30 20.2204 30H19.315C18.9638 30 18.6791 29.715 18.6791 29.3633V28.4567Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M21.7923 28.4567C21.7923 28.1051 22.077 27.82 22.4282 27.82H23.3336C23.6848 27.82 23.9695 28.1051 23.9695 28.4567V29.3633C23.9695 29.715 23.6848 30 23.3336 30H22.4282C22.077 30 21.7923 29.715 21.7923 29.3633V28.4567Z'
fill='#030712'
fillOpacity='0.1'
/>
</svg>
)
}

View File

@@ -84,11 +84,9 @@ import {
PolymarketIcon,
PostgresIcon,
PosthogIcon,
PulseIcon,
QdrantIcon,
RDSIcon,
RedditIcon,
ReductoIcon,
ResendIcon,
S3Icon,
SalesforceIcon,
@@ -210,11 +208,9 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
polymarket: PolymarketIcon,
postgresql: PostgresIcon,
posthog: PosthogIcon,
pulse: PulseIcon,
qdrant: QdrantIcon,
rds: RDSIcon,
reddit: RedditIcon,
reducto: ReductoIcon,
resend: ResendIcon,
s3: S3Icon,
salesforce: SalesforceIcon,

View File

@@ -79,11 +79,9 @@
"polymarket",
"postgresql",
"posthog",
"pulse",
"qdrant",
"rds",
"reddit",
"reducto",
"resend",
"s3",
"salesforce",

View File

@@ -1,72 +0,0 @@
---
title: Pulse
description: Extract text from documents using Pulse OCR
---
import { BlockInfoCard } from "@/components/ui/block-info-card"
<BlockInfoCard
type="pulse"
color="#E0E0E0"
/>
{/* MANUAL-CONTENT-START:intro */}
The [Pulse](https://www.pulseapi.com/) tool enables seamless extraction of text and structured content from a wide variety of documents—including PDFs, images, and Office files—using state-of-the-art OCR (Optical Character Recognition) powered by Pulse. Designed for automated agentic workflows, Pulse Parser makes it easy to unlock valuable information trapped in unstructured documents and integrate the extracted content directly into your workflow.
With Pulse, you can:
- **Extract text from documents**: Quickly convert scanned PDFs, images, and Office documents to usable text, markdown, or JSON.
- **Process documents by URL or upload**: Simply provide a file URL or use upload to extract text from local documents or remote resources.
- **Flexible output formats**: Choose between markdown, plain text, or JSON representations of the extracted content for downstream processing.
- **Selective page processing**: Specify a range of pages to process, reducing processing time and cost when you only need part of a document.
- **Figure and table extraction**: Optionally extract figures and tables, with automatic caption and description generation for populated context.
- **Get processing insights**: Receive detailed metadata on each job, including file type, page count, processing time, and more.
- **Integration-ready responses**: Incorporate extracted content into research, workflow automation, or data analysis pipelines.
Ideal for automating tedious document review, enabling content summarization, research, and more, Pulse Parser brings real-world documents into the digital workflow era.
If you need accurate, scalable, and developer-friendly document parsing capabilities—across formats, languages, and layouts—Pulse empowers your agents to read the world.
{/* MANUAL-CONTENT-END */}
## Usage Instructions
Integrate Pulse into the workflow. Extract text from PDF documents, images, and Office files via URL or upload.
## Tools
### `pulse_parser`
Parse documents (PDF, images, Office docs) using Pulse OCR API
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `filePath` | string | Yes | URL to a document to be processed |
| `fileUpload` | object | No | File upload data from file-upload component |
| `pages` | string | No | Page range to process \(1-indexed, e.g., "1-2,5"\) |
| `extractFigure` | boolean | No | Enable figure extraction from the document |
| `figureDescription` | boolean | No | Generate descriptions/captions for extracted figures |
| `returnHtml` | boolean | No | Include HTML in the response |
| `chunking` | string | No | Chunking strategies \(comma-separated: semantic, header, page, recursive\) |
| `chunkSize` | number | No | Maximum characters per chunk when chunking is enabled |
| `apiKey` | string | Yes | Pulse API key |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `markdown` | string | Extracted content in markdown format |
| `page_count` | number | Number of pages in the document |
| `job_id` | string | Unique job identifier |
| `bounding_boxes` | json | Bounding box layout information |
| `extraction_url` | string | URL for extraction results \(for large documents\) |
| `html` | string | HTML content if requested |
| `structured_output` | json | Structured output if schema was provided |
| `chunks` | json | Chunked content if chunking was enabled |
| `figures` | json | Extracted figures if figure extraction was enabled |

View File

@@ -1,63 +0,0 @@
---
title: Reducto
description: Extract text from PDF documents
---
import { BlockInfoCard } from "@/components/ui/block-info-card"
<BlockInfoCard
type="reducto"
color="#5c0c5c"
/>
{/* MANUAL-CONTENT-START:intro */}
The [Reducto](https://reducto.ai/) tool enables fast and accurate extraction of text and data from PDF documents via OCR (Optical Character Recognition). Reducto is designed for agent workflows, making it easy to process uploaded or linked PDFs and transform their contents into ready-to-use information.
With the Reducto tool, you can:
- **Extract text and tables from PDFs**: Quickly convert scanned or digital PDFs to text, markdown, or structured JSON.
- **Parse PDFs from uploads or URLs**: Process documents either by uploading a PDF or specifying a direct URL.
- **Customize output formatting**: Choose your preferred output format—markdown, plain text, or JSON—and specify table formats as markdown or HTML.
- **Select specific pages**: Optionally extract content from particular pages to optimize processing and focus on what matters.
- **Receive detailed processing metadata**: Alongside extracted content, get job details, processing times, source file info, page counts, and OCR usage stats for audit and automation.
Whether youre automating workflow steps, extracting business-critical information, or unlocking archival documents for search and analysis, Reductos OCR parser gives you structured, actionable data from even the most complex PDFs.
Looking for reliable and scalable PDF parsing? Reducto is optimized for developer and agent use—providing accuracy, speed, and flexibility for modern document understanding.
{/* MANUAL-CONTENT-END */}
## Usage Instructions
Integrate Reducto Parse into the workflow. Can extract text from uploaded PDF documents, or from a URL.
## Tools
### `reducto_parser`
Parse PDF documents using Reducto OCR API
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `filePath` | string | Yes | URL to a PDF document to be processed |
| `fileUpload` | object | No | File upload data from file-upload component |
| `pages` | array | No | Specific pages to process \(1-indexed page numbers\) |
| `tableOutputFormat` | string | No | Table output format \(html or markdown\). Defaults to markdown. |
| `apiKey` | string | Yes | Reducto API key \(REDUCTO_API_KEY\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `job_id` | string | Unique identifier for the processing job |
| `duration` | number | Processing time in seconds |
| `usage` | json | Resource consumption data |
| `result` | json | Parsed document content with chunks and blocks |
| `pdf_url` | string | Storage URL of converted PDF |
| `studio_link` | string | Link to Reducto studio interface |

View File

@@ -1,11 +0,0 @@
'use client'
import { Tooltip } from '@/components/emcn'
interface TooltipProviderProps {
children: React.ReactNode
}
export function TooltipProvider({ children }: TooltipProviderProps) {
return <Tooltip.Provider>{children}</Tooltip.Provider>
}

View File

@@ -11,7 +11,7 @@
*/
:root {
--sidebar-width: 232px; /* SIDEBAR_WIDTH.DEFAULT */
--panel-width: 320px; /* PANEL_WIDTH.DEFAULT */
--panel-width: 290px; /* PANEL_WIDTH.DEFAULT */
--toolbar-triggers-height: 300px; /* TOOLBAR_TRIGGERS_HEIGHT.DEFAULT */
--editor-connections-height: 172px; /* EDITOR_CONNECTIONS_HEIGHT.DEFAULT */
--terminal-height: 155px; /* TERMINAL_HEIGHT.DEFAULT */
@@ -59,22 +59,21 @@
}
/**
* Workflow canvas cursor styles
* Override React Flow's default selection cursor based on canvas mode
* Selected node ring indicator
* Uses a pseudo-element overlay to match the original behavior (absolute inset-0 z-40)
*/
.workflow-container.canvas-mode-cursor .react-flow__pane,
.workflow-container.canvas-mode-cursor .react-flow__selectionpane {
cursor: default !important;
.react-flow__node.selected > div > div {
position: relative;
}
.workflow-container.canvas-mode-hand .react-flow__pane,
.workflow-container.canvas-mode-hand .react-flow__selectionpane {
cursor: grab !important;
}
.workflow-container.canvas-mode-hand .react-flow__pane:active,
.workflow-container.canvas-mode-hand .react-flow__selectionpane:active {
cursor: grabbing !important;
.react-flow__node.selected > div > div::after {
content: "";
position: absolute;
inset: 0;
z-index: 40;
border-radius: 8px;
box-shadow: 0 0 0 1.75px var(--brand-secondary);
pointer-events: none;
}
/**
@@ -558,6 +557,32 @@ input[type="search"]::-ms-clear {
transition-duration: 300ms;
}
.streaming-effect {
@apply relative overflow-hidden;
}
.streaming-effect::after {
content: "";
@apply pointer-events-none absolute left-0 top-0 h-full w-full;
background: linear-gradient(
90deg,
rgba(128, 128, 128, 0) 0%,
rgba(128, 128, 128, 0.1) 50%,
rgba(128, 128, 128, 0) 100%
);
animation: code-shimmer 1.5s infinite;
z-index: 10;
}
.dark .streaming-effect::after {
background: linear-gradient(
90deg,
rgba(180, 180, 180, 0) 0%,
rgba(180, 180, 180, 0.1) 50%,
rgba(180, 180, 180, 0) 100%
);
}
.loading-placeholder::placeholder {
animation: placeholder-pulse 1.5s ease-in-out infinite;
}
@@ -632,20 +657,6 @@ input[type="search"]::-ms-clear {
}
}
/**
* Notification toast enter animation
*/
@keyframes notification-enter {
from {
opacity: 0;
transform: translateX(-16px);
}
to {
opacity: 1;
transform: translateX(var(--stack-offset, 0px));
}
}
/**
* @depricated
* Legacy globals (light/dark) kept for backward-compat with old classes.

File diff suppressed because it is too large Load Diff

View File

@@ -3,60 +3,13 @@
*
* @vitest-environment node
*/
import {
createMockRequest,
mockConsoleLogger,
mockCryptoUuid,
mockDrizzleOrm,
mockUuid,
setupCommonApiMocks,
} from '@sim/testing'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import { createMockRequest, setupAuthApiMocks } from '@/app/api/__test-utils__/utils'
vi.mock('@/lib/core/utils/urls', () => ({
getBaseUrl: vi.fn(() => 'https://app.example.com'),
}))
/** Setup auth API mocks for testing authentication routes */
function setupAuthApiMocks(
options: {
operations?: {
forgetPassword?: { success?: boolean; error?: string }
resetPassword?: { success?: boolean; error?: string }
}
} = {}
) {
setupCommonApiMocks()
mockUuid()
mockCryptoUuid()
mockConsoleLogger()
mockDrizzleOrm()
const { operations = {} } = options
const defaultOperations = {
forgetPassword: { success: true, error: 'Forget password error', ...operations.forgetPassword },
resetPassword: { success: true, error: 'Reset password error', ...operations.resetPassword },
}
const createAuthMethod = (config: { success?: boolean; error?: string }) => {
return vi.fn().mockImplementation(() => {
if (config.success) {
return Promise.resolve()
}
return Promise.reject(new Error(config.error))
})
}
vi.doMock('@/lib/auth', () => ({
auth: {
api: {
forgetPassword: createAuthMethod(defaultOperations.forgetPassword),
resetPassword: createAuthMethod(defaultOperations.resetPassword),
},
},
}))
}
describe('Forget Password API Route', () => {
beforeEach(() => {
vi.resetModules()

View File

@@ -3,8 +3,8 @@
*
* @vitest-environment node
*/
import { createMockLogger, createMockRequest } from '@sim/testing'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import { createMockLogger, createMockRequest } from '@/app/api/__test-utils__/utils'
describe('OAuth Connections API Route', () => {
const mockGetSession = vi.fn()

View File

@@ -4,9 +4,9 @@
* @vitest-environment node
*/
import { createMockLogger } from '@sim/testing'
import { NextRequest } from 'next/server'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import { createMockLogger } from '@/app/api/__test-utils__/utils'
describe('OAuth Credentials API Route', () => {
const mockGetSession = vi.fn()

View File

@@ -3,8 +3,8 @@
*
* @vitest-environment node
*/
import { createMockLogger, createMockRequest } from '@sim/testing'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import { createMockLogger, createMockRequest } from '@/app/api/__test-utils__/utils'
describe('OAuth Disconnect API Route', () => {
const mockGetSession = vi.fn()

View File

@@ -3,8 +3,8 @@
*
* @vitest-environment node
*/
import { createMockLogger, createMockRequest } from '@sim/testing'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import { createMockLogger, createMockRequest } from '@/app/api/__test-utils__/utils'
describe('OAuth Token API Routes', () => {
const mockGetUserId = vi.fn()

View File

@@ -3,55 +3,8 @@
*
* @vitest-environment node
*/
import {
createMockRequest,
mockConsoleLogger,
mockCryptoUuid,
mockDrizzleOrm,
mockUuid,
setupCommonApiMocks,
} from '@sim/testing'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
/** Setup auth API mocks for testing authentication routes */
function setupAuthApiMocks(
options: {
operations?: {
forgetPassword?: { success?: boolean; error?: string }
resetPassword?: { success?: boolean; error?: string }
}
} = {}
) {
setupCommonApiMocks()
mockUuid()
mockCryptoUuid()
mockConsoleLogger()
mockDrizzleOrm()
const { operations = {} } = options
const defaultOperations = {
forgetPassword: { success: true, error: 'Forget password error', ...operations.forgetPassword },
resetPassword: { success: true, error: 'Reset password error', ...operations.resetPassword },
}
const createAuthMethod = (config: { success?: boolean; error?: string }) => {
return vi.fn().mockImplementation(() => {
if (config.success) {
return Promise.resolve()
}
return Promise.reject(new Error(config.error))
})
}
vi.doMock('@/lib/auth', () => ({
auth: {
api: {
forgetPassword: createAuthMethod(defaultOperations.forgetPassword),
resetPassword: createAuthMethod(defaultOperations.resetPassword),
},
},
}))
}
import { createMockRequest, setupAuthApiMocks } from '@/app/api/__test-utils__/utils'
describe('Reset Password API Route', () => {
beforeEach(() => {

View File

@@ -5,34 +5,7 @@
*/
import { loggerMock } from '@sim/testing'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
/**
* Creates a mock NextRequest with cookies support for testing.
*/
function createMockNextRequest(
method = 'GET',
body?: unknown,
headers: Record<string, string> = {},
url = 'http://localhost:3000/api/test'
): any {
const headersObj = new Headers({
'Content-Type': 'application/json',
...headers,
})
return {
method,
headers: headersObj,
cookies: {
get: vi.fn().mockReturnValue(undefined),
},
json:
body !== undefined
? vi.fn().mockResolvedValue(body)
: vi.fn().mockRejectedValue(new Error('No body')),
url,
}
}
import { createMockRequest } from '@/app/api/__test-utils__/utils'
const createMockStream = () => {
return new ReadableStream({
@@ -98,15 +71,10 @@ vi.mock('@/lib/core/utils/request', () => ({
generateRequestId: vi.fn().mockReturnValue('test-request-id'),
}))
vi.mock('@/lib/core/security/encryption', () => ({
decryptSecret: vi.fn().mockResolvedValue({ decrypted: 'test-password' }),
}))
describe('Chat Identifier API Route', () => {
const mockAddCorsHeaders = vi.fn().mockImplementation((response) => response)
const mockValidateChatAuth = vi.fn().mockResolvedValue({ authorized: true })
const mockSetChatAuthCookie = vi.fn()
const mockValidateAuthToken = vi.fn().mockReturnValue(false)
const mockChatResult = [
{
@@ -146,16 +114,11 @@ describe('Chat Identifier API Route', () => {
beforeEach(() => {
vi.resetModules()
vi.doMock('@/lib/core/security/deployment', () => ({
addCorsHeaders: mockAddCorsHeaders,
validateAuthToken: mockValidateAuthToken,
setDeploymentAuthCookie: vi.fn(),
isEmailAllowed: vi.fn().mockReturnValue(false),
}))
vi.doMock('@/app/api/chat/utils', () => ({
addCorsHeaders: mockAddCorsHeaders,
validateChatAuth: mockValidateChatAuth,
setChatAuthCookie: mockSetChatAuthCookie,
validateAuthToken: vi.fn().mockReturnValue(true),
}))
// Mock logger - use loggerMock from @sim/testing
@@ -212,7 +175,7 @@ describe('Chat Identifier API Route', () => {
describe('GET endpoint', () => {
it('should return chat info for a valid identifier', async () => {
const req = createMockNextRequest('GET')
const req = createMockRequest('GET')
const params = Promise.resolve({ identifier: 'test-chat' })
const { GET } = await import('@/app/api/chat/[identifier]/route')
@@ -243,7 +206,7 @@ describe('Chat Identifier API Route', () => {
}
})
const req = createMockNextRequest('GET')
const req = createMockRequest('GET')
const params = Promise.resolve({ identifier: 'nonexistent' })
const { GET } = await import('@/app/api/chat/[identifier]/route')
@@ -277,7 +240,7 @@ describe('Chat Identifier API Route', () => {
}
})
const req = createMockNextRequest('GET')
const req = createMockRequest('GET')
const params = Promise.resolve({ identifier: 'inactive-chat' })
const { GET } = await import('@/app/api/chat/[identifier]/route')
@@ -298,7 +261,7 @@ describe('Chat Identifier API Route', () => {
error: 'auth_required_password',
}))
const req = createMockNextRequest('GET')
const req = createMockRequest('GET')
const params = Promise.resolve({ identifier: 'password-protected-chat' })
const { GET } = await import('@/app/api/chat/[identifier]/route')
@@ -319,7 +282,7 @@ describe('Chat Identifier API Route', () => {
describe('POST endpoint', () => {
it('should handle authentication requests without input', async () => {
const req = createMockNextRequest('POST', { password: 'test-password' })
const req = createMockRequest('POST', { password: 'test-password' })
const params = Promise.resolve({ identifier: 'password-protected-chat' })
const { POST } = await import('@/app/api/chat/[identifier]/route')
@@ -335,7 +298,7 @@ describe('Chat Identifier API Route', () => {
})
it('should return 400 for requests without input', async () => {
const req = createMockNextRequest('POST', {})
const req = createMockRequest('POST', {})
const params = Promise.resolve({ identifier: 'test-chat' })
const { POST } = await import('@/app/api/chat/[identifier]/route')
@@ -356,7 +319,7 @@ describe('Chat Identifier API Route', () => {
error: 'Authentication required',
}))
const req = createMockNextRequest('POST', { input: 'Hello' })
const req = createMockRequest('POST', { input: 'Hello' })
const params = Promise.resolve({ identifier: 'protected-chat' })
const { POST } = await import('@/app/api/chat/[identifier]/route')
@@ -387,7 +350,7 @@ describe('Chat Identifier API Route', () => {
},
})
const req = createMockNextRequest('POST', { input: 'Hello' })
const req = createMockRequest('POST', { input: 'Hello' })
const params = Promise.resolve({ identifier: 'test-chat' })
const { POST } = await import('@/app/api/chat/[identifier]/route')
@@ -406,10 +369,7 @@ describe('Chat Identifier API Route', () => {
})
it('should return streaming response for valid chat messages', async () => {
const req = createMockNextRequest('POST', {
input: 'Hello world',
conversationId: 'conv-123',
})
const req = createMockRequest('POST', { input: 'Hello world', conversationId: 'conv-123' })
const params = Promise.resolve({ identifier: 'test-chat' })
const { POST } = await import('@/app/api/chat/[identifier]/route')
@@ -441,7 +401,7 @@ describe('Chat Identifier API Route', () => {
}, 10000)
it('should handle streaming response body correctly', async () => {
const req = createMockNextRequest('POST', { input: 'Hello world' })
const req = createMockRequest('POST', { input: 'Hello world' })
const params = Promise.resolve({ identifier: 'test-chat' })
const { POST } = await import('@/app/api/chat/[identifier]/route')
@@ -471,7 +431,7 @@ describe('Chat Identifier API Route', () => {
throw new Error('Execution failed')
})
const req = createMockNextRequest('POST', { input: 'Trigger error' })
const req = createMockRequest('POST', { input: 'Trigger error' })
const params = Promise.resolve({ identifier: 'test-chat' })
const { POST } = await import('@/app/api/chat/[identifier]/route')
@@ -510,7 +470,7 @@ describe('Chat Identifier API Route', () => {
})
it('should pass conversationId to streaming execution when provided', async () => {
const req = createMockNextRequest('POST', {
const req = createMockRequest('POST', {
input: 'Hello world',
conversationId: 'test-conversation-123',
})
@@ -532,7 +492,7 @@ describe('Chat Identifier API Route', () => {
})
it('should handle missing conversationId gracefully', async () => {
const req = createMockNextRequest('POST', { input: 'Hello world' })
const req = createMockRequest('POST', { input: 'Hello world' })
const params = Promise.resolve({ identifier: 'test-chat' })
const { POST } = await import('@/app/api/chat/[identifier]/route')

View File

@@ -3,9 +3,9 @@
*
* @vitest-environment node
*/
import { mockAuth, mockCryptoUuid, setupCommonApiMocks } from '@sim/testing'
import { NextRequest } from 'next/server'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import { mockAuth, mockCryptoUuid, setupCommonApiMocks } from '@/app/api/__test-utils__/utils'
describe('Copilot API Keys API Route', () => {
const mockFetch = vi.fn()

View File

@@ -3,9 +3,14 @@
*
* @vitest-environment node
*/
import { createMockRequest, mockAuth, mockCryptoUuid, setupCommonApiMocks } from '@sim/testing'
import { NextRequest } from 'next/server'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import {
createMockRequest,
mockAuth,
mockCryptoUuid,
setupCommonApiMocks,
} from '@/app/api/__test-utils__/utils'
describe('Copilot Chat Delete API Route', () => {
const mockDelete = vi.fn()

View File

@@ -7,6 +7,7 @@ import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { generateChatTitle } from '@/lib/copilot/chat-title'
import { getCopilotModel } from '@/lib/copilot/config'
import { COPILOT_MODEL_IDS, COPILOT_REQUEST_MODES } from '@/lib/copilot/models'
import { SIM_AGENT_API_URL_DEFAULT, SIM_AGENT_VERSION } from '@/lib/copilot/constants'
import {
authenticateCopilotRequestSessionOnly,
@@ -40,34 +41,8 @@ const ChatMessageSchema = z.object({
userMessageId: z.string().optional(), // ID from frontend for the user message
chatId: z.string().optional(),
workflowId: z.string().min(1, 'Workflow ID is required'),
model: z
.enum([
'gpt-5-fast',
'gpt-5',
'gpt-5-medium',
'gpt-5-high',
'gpt-5.1-fast',
'gpt-5.1',
'gpt-5.1-medium',
'gpt-5.1-high',
'gpt-5-codex',
'gpt-5.1-codex',
'gpt-5.2',
'gpt-5.2-codex',
'gpt-5.2-pro',
'gpt-4o',
'gpt-4.1',
'o3',
'claude-4-sonnet',
'claude-4.5-haiku',
'claude-4.5-sonnet',
'claude-4.5-opus',
'claude-4.1-opus',
'gemini-3-pro',
])
.optional()
.default('claude-4.5-opus'),
mode: z.enum(['ask', 'agent', 'plan']).optional().default('agent'),
model: z.enum(COPILOT_MODEL_IDS).optional().default('claude-4.5-opus'),
mode: z.enum(COPILOT_REQUEST_MODES).optional().default('agent'),
prefetch: z.boolean().optional(),
createNewChat: z.boolean().optional().default(false),
stream: z.boolean().optional().default(true),
@@ -295,7 +270,8 @@ export async function POST(req: NextRequest) {
}
const defaults = getCopilotModel('chat')
const modelToUse = env.COPILOT_MODEL || defaults.model
const selectedModel = model || defaults.model
const envModel = env.COPILOT_MODEL || defaults.model
let providerConfig: CopilotProviderConfig | undefined
const providerEnv = env.COPILOT_PROVIDER as any
@@ -304,7 +280,7 @@ export async function POST(req: NextRequest) {
if (providerEnv === 'azure-openai') {
providerConfig = {
provider: 'azure-openai',
model: modelToUse,
model: envModel,
apiKey: env.AZURE_OPENAI_API_KEY,
apiVersion: 'preview',
endpoint: env.AZURE_OPENAI_ENDPOINT,
@@ -312,7 +288,7 @@ export async function POST(req: NextRequest) {
} else if (providerEnv === 'vertex') {
providerConfig = {
provider: 'vertex',
model: modelToUse,
model: envModel,
apiKey: env.COPILOT_API_KEY,
vertexProject: env.VERTEX_PROJECT,
vertexLocation: env.VERTEX_LOCATION,
@@ -320,12 +296,15 @@ export async function POST(req: NextRequest) {
} else {
providerConfig = {
provider: providerEnv,
model: modelToUse,
model: selectedModel,
apiKey: env.COPILOT_API_KEY,
}
}
}
const effectiveMode = mode === 'agent' ? 'build' : mode
const transportMode = effectiveMode === 'build' ? 'agent' : effectiveMode
// Determine conversationId to use for this request
const effectiveConversationId =
(currentChat?.conversationId as string | undefined) || conversationId
@@ -345,7 +324,7 @@ export async function POST(req: NextRequest) {
}
} | null = null
if (mode === 'agent') {
if (effectiveMode === 'build') {
// Build base tools (executed locally, not deferred)
// Include function_execute for code execution capability
baseTools = [
@@ -452,8 +431,8 @@ export async function POST(req: NextRequest) {
userId: authenticatedUserId,
stream: stream,
streamToolCalls: true,
model: model,
mode: mode,
model: selectedModel,
mode: transportMode,
messageId: userMessageIdToUse,
version: SIM_AGENT_VERSION,
...(providerConfig ? { provider: providerConfig } : {}),
@@ -477,7 +456,7 @@ export async function POST(req: NextRequest) {
hasConversationId: !!effectiveConversationId,
hasFileAttachments: processedFileContents.length > 0,
messageLength: message.length,
mode,
mode: effectiveMode,
hasTools: integrationTools.length > 0,
toolCount: integrationTools.length,
hasBaseTools: baseTools.length > 0,

View File

@@ -3,9 +3,14 @@
*
* @vitest-environment node
*/
import { createMockRequest, mockAuth, mockCryptoUuid, setupCommonApiMocks } from '@sim/testing'
import { NextRequest } from 'next/server'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import {
createMockRequest,
mockAuth,
mockCryptoUuid,
setupCommonApiMocks,
} from '@/app/api/__test-utils__/utils'
describe('Copilot Chat Update Messages API Route', () => {
const mockSelect = vi.fn()

View File

@@ -11,6 +11,7 @@ import {
createRequestTracker,
createUnauthorizedResponse,
} from '@/lib/copilot/request-helpers'
import { COPILOT_MODES } from '@/lib/copilot/models'
const logger = createLogger('CopilotChatUpdateAPI')
@@ -45,7 +46,7 @@ const UpdateMessagesSchema = z.object({
planArtifact: z.string().nullable().optional(),
config: z
.object({
mode: z.enum(['ask', 'build', 'plan']).optional(),
mode: z.enum(COPILOT_MODES).optional(),
model: z.string().optional(),
})
.nullable()

View File

@@ -3,8 +3,8 @@
*
* @vitest-environment node
*/
import { mockCryptoUuid, setupCommonApiMocks } from '@sim/testing'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import { mockCryptoUuid, setupCommonApiMocks } from '@/app/api/__test-utils__/utils'
describe('Copilot Chats List API Route', () => {
const mockSelect = vi.fn()

View File

@@ -3,9 +3,14 @@
*
* @vitest-environment node
*/
import { createMockRequest, mockAuth, mockCryptoUuid, setupCommonApiMocks } from '@sim/testing'
import { NextRequest } from 'next/server'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import {
createMockRequest,
mockAuth,
mockCryptoUuid,
setupCommonApiMocks,
} from '@/app/api/__test-utils__/utils'
describe('Copilot Checkpoints Revert API Route', () => {
const mockSelect = vi.fn()

View File

@@ -3,9 +3,14 @@
*
* @vitest-environment node
*/
import { createMockRequest, mockAuth, mockCryptoUuid, setupCommonApiMocks } from '@sim/testing'
import { NextRequest } from 'next/server'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import {
createMockRequest,
mockAuth,
mockCryptoUuid,
setupCommonApiMocks,
} from '@/app/api/__test-utils__/utils'
describe('Copilot Checkpoints API Route', () => {
const mockSelect = vi.fn()

View File

@@ -3,9 +3,14 @@
*
* @vitest-environment node
*/
import { createMockRequest, mockAuth, mockCryptoUuid, setupCommonApiMocks } from '@sim/testing'
import { NextRequest } from 'next/server'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import {
createMockRequest,
mockAuth,
mockCryptoUuid,
setupCommonApiMocks,
} from '@/app/api/__test-utils__/utils'
describe('Copilot Confirm API Route', () => {
const mockRedisExists = vi.fn()

View File

@@ -3,9 +3,13 @@
*
* @vitest-environment node
*/
import { createMockRequest, mockCryptoUuid, setupCommonApiMocks } from '@sim/testing'
import { NextRequest } from 'next/server'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import {
createMockRequest,
mockCryptoUuid,
setupCommonApiMocks,
} from '@/app/api/__test-utils__/utils'
describe('Copilot Feedback API Route', () => {
const mockInsert = vi.fn()

View File

@@ -3,9 +3,13 @@
*
* @vitest-environment node
*/
import { createMockRequest, mockCryptoUuid, setupCommonApiMocks } from '@sim/testing'
import { NextRequest } from 'next/server'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import {
createMockRequest,
mockCryptoUuid,
setupCommonApiMocks,
} from '@/app/api/__test-utils__/utils'
describe('Copilot Stats API Route', () => {
const mockFetch = vi.fn()

View File

@@ -2,12 +2,13 @@ import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import type { CopilotModelId } from '@/lib/copilot/models'
import { db } from '@/../../packages/db'
import { settings } from '@/../../packages/db/schema'
const logger = createLogger('CopilotUserModelsAPI')
const DEFAULT_ENABLED_MODELS: Record<string, boolean> = {
const DEFAULT_ENABLED_MODELS: Record<CopilotModelId, boolean> = {
'gpt-4o': false,
'gpt-4.1': false,
'gpt-5-fast': false,
@@ -28,7 +29,7 @@ const DEFAULT_ENABLED_MODELS: Record<string, boolean> = {
'claude-4.5-haiku': true,
'claude-4.5-sonnet': true,
'claude-4.5-opus': true,
// 'claude-4.1-opus': true,
'claude-4.1-opus': false,
'gemini-3-pro': true,
}
@@ -54,7 +55,9 @@ export async function GET(request: NextRequest) {
const mergedModels = { ...DEFAULT_ENABLED_MODELS }
for (const [modelId, enabled] of Object.entries(userModelsMap)) {
mergedModels[modelId] = enabled
if (modelId in mergedModels) {
mergedModels[modelId as CopilotModelId] = enabled
}
}
const hasNewModels = Object.keys(DEFAULT_ENABLED_MODELS).some(

View File

@@ -1,87 +1,5 @@
import {
createMockRequest,
mockAuth,
mockCryptoUuid,
mockUuid,
setupCommonApiMocks,
} from '@sim/testing'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
/** Setup file API mocks for file delete tests */
function setupFileApiMocks(
options: {
authenticated?: boolean
storageProvider?: 's3' | 'blob' | 'local'
cloudEnabled?: boolean
} = {}
) {
const { authenticated = true, storageProvider = 's3', cloudEnabled = true } = options
setupCommonApiMocks()
mockUuid()
mockCryptoUuid()
const authMocks = mockAuth()
if (authenticated) {
authMocks.setAuthenticated()
} else {
authMocks.setUnauthenticated()
}
vi.doMock('@/lib/auth/hybrid', () => ({
checkHybridAuth: vi.fn().mockResolvedValue({
success: authenticated,
userId: authenticated ? 'test-user-id' : undefined,
error: authenticated ? undefined : 'Unauthorized',
}),
}))
vi.doMock('@/app/api/files/authorization', () => ({
verifyFileAccess: vi.fn().mockResolvedValue(true),
verifyWorkspaceFileAccess: vi.fn().mockResolvedValue(true),
}))
const uploadFileMock = vi.fn().mockResolvedValue({
path: '/api/files/serve/test-key.txt',
key: 'test-key.txt',
name: 'test.txt',
size: 100,
type: 'text/plain',
})
const downloadFileMock = vi.fn().mockResolvedValue(Buffer.from('test content'))
const deleteFileMock = vi.fn().mockResolvedValue(undefined)
const hasCloudStorageMock = vi.fn().mockReturnValue(cloudEnabled)
vi.doMock('@/lib/uploads', () => ({
getStorageProvider: vi.fn().mockReturnValue(storageProvider),
isUsingCloudStorage: vi.fn().mockReturnValue(cloudEnabled),
StorageService: {
uploadFile: uploadFileMock,
downloadFile: downloadFileMock,
deleteFile: deleteFileMock,
hasCloudStorage: hasCloudStorageMock,
},
uploadFile: uploadFileMock,
downloadFile: downloadFileMock,
deleteFile: deleteFileMock,
hasCloudStorage: hasCloudStorageMock,
}))
vi.doMock('@/lib/uploads/core/storage-service', () => ({
uploadFile: uploadFileMock,
downloadFile: downloadFileMock,
deleteFile: deleteFileMock,
hasCloudStorage: hasCloudStorageMock,
}))
vi.doMock('fs/promises', () => ({
unlink: vi.fn().mockResolvedValue(undefined),
access: vi.fn().mockResolvedValue(undefined),
stat: vi.fn().mockResolvedValue({ isFile: () => true }),
}))
return { auth: authMocks }
}
import { createMockRequest, setupFileApiMocks } from '@/app/api/__test-utils__/utils'
describe('File Delete API Route', () => {
beforeEach(() => {

View File

@@ -1,59 +1,12 @@
import path from 'path'
import { NextRequest } from 'next/server'
/**
* Tests for file parse API route
*
* @vitest-environment node
*/
import {
createMockRequest,
mockAuth,
mockCryptoUuid,
mockUuid,
setupCommonApiMocks,
} from '@sim/testing'
import { NextRequest } from 'next/server'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
function setupFileApiMocks(
options: {
authenticated?: boolean
storageProvider?: 's3' | 'blob' | 'local'
cloudEnabled?: boolean
} = {}
) {
const { authenticated = true, storageProvider = 's3', cloudEnabled = true } = options
setupCommonApiMocks()
mockUuid()
mockCryptoUuid()
const authMocks = mockAuth()
if (authenticated) {
authMocks.setAuthenticated()
} else {
authMocks.setUnauthenticated()
}
vi.doMock('@/lib/auth/hybrid', () => ({
checkHybridAuth: vi.fn().mockResolvedValue({
success: authenticated,
userId: authenticated ? 'test-user-id' : undefined,
error: authenticated ? undefined : 'Unauthorized',
}),
}))
vi.doMock('@/app/api/files/authorization', () => ({
verifyFileAccess: vi.fn().mockResolvedValue(true),
verifyWorkspaceFileAccess: vi.fn().mockResolvedValue(true),
}))
vi.doMock('@/lib/uploads', () => ({
getStorageProvider: vi.fn().mockReturnValue(storageProvider),
isUsingCloudStorage: vi.fn().mockReturnValue(cloudEnabled),
}))
return { auth: authMocks }
}
import { createMockRequest, setupFileApiMocks } from '@/app/api/__test-utils__/utils'
const mockJoin = vi.fn((...args: string[]): string => {
if (args[0] === '/test/uploads') {

View File

@@ -1,6 +1,6 @@
import { mockAuth, mockCryptoUuid, mockUuid, setupCommonApiMocks } from '@sim/testing'
import { NextRequest } from 'next/server'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import { setupFileApiMocks } from '@/app/api/__test-utils__/utils'
/**
* Tests for file presigned API route
@@ -8,106 +8,6 @@ import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
* @vitest-environment node
*/
function setupFileApiMocks(
options: {
authenticated?: boolean
storageProvider?: 's3' | 'blob' | 'local'
cloudEnabled?: boolean
} = {}
) {
const { authenticated = true, storageProvider = 's3', cloudEnabled = true } = options
setupCommonApiMocks()
mockUuid()
mockCryptoUuid()
const authMocks = mockAuth()
if (authenticated) {
authMocks.setAuthenticated()
} else {
authMocks.setUnauthenticated()
}
vi.doMock('@/lib/auth/hybrid', () => ({
checkHybridAuth: vi.fn().mockResolvedValue({
success: authenticated,
userId: authenticated ? 'test-user-id' : undefined,
error: authenticated ? undefined : 'Unauthorized',
}),
}))
vi.doMock('@/app/api/files/authorization', () => ({
verifyFileAccess: vi.fn().mockResolvedValue(true),
verifyWorkspaceFileAccess: vi.fn().mockResolvedValue(true),
}))
const useBlobStorage = storageProvider === 'blob' && cloudEnabled
const useS3Storage = storageProvider === 's3' && cloudEnabled
vi.doMock('@/lib/uploads/config', () => ({
USE_BLOB_STORAGE: useBlobStorage,
USE_S3_STORAGE: useS3Storage,
UPLOAD_DIR: '/uploads',
getStorageConfig: vi.fn().mockReturnValue(
useBlobStorage
? {
accountName: 'testaccount',
accountKey: 'testkey',
connectionString: 'testconnection',
containerName: 'testcontainer',
}
: {
bucket: 'test-bucket',
region: 'us-east-1',
}
),
isUsingCloudStorage: vi.fn().mockReturnValue(cloudEnabled),
getStorageProvider: vi
.fn()
.mockReturnValue(
storageProvider === 'blob' ? 'Azure Blob' : storageProvider === 's3' ? 'S3' : 'Local'
),
}))
const mockGeneratePresignedUploadUrl = vi.fn().mockImplementation(async (opts) => {
const timestamp = Date.now()
const safeFileName = opts.fileName.replace(/[^a-zA-Z0-9.-]/g, '_')
const key = `${opts.context}/${timestamp}-ik3a6w4-${safeFileName}`
return {
url: 'https://example.com/presigned-url',
key,
}
})
vi.doMock('@/lib/uploads/core/storage-service', () => ({
hasCloudStorage: vi.fn().mockReturnValue(cloudEnabled),
generatePresignedUploadUrl: mockGeneratePresignedUploadUrl,
generatePresignedDownloadUrl: vi.fn().mockResolvedValue('https://example.com/presigned-url'),
}))
vi.doMock('@/lib/uploads/utils/validation', () => ({
validateFileType: vi.fn().mockReturnValue(null),
}))
vi.doMock('@/lib/uploads', () => ({
CopilotFiles: {
generateCopilotUploadUrl: vi.fn().mockResolvedValue({
url: 'https://example.com/presigned-url',
key: 'copilot/test-key.txt',
}),
isImageFileType: vi.fn().mockReturnValue(true),
},
getStorageProvider: vi
.fn()
.mockReturnValue(
storageProvider === 'blob' ? 'Azure Blob' : storageProvider === 's3' ? 'S3' : 'Local'
),
isUsingCloudStorage: vi.fn().mockReturnValue(cloudEnabled),
}))
return { auth: authMocks }
}
describe('/api/files/presigned', () => {
beforeEach(() => {
vi.clearAllMocks()
@@ -310,7 +210,7 @@ describe('/api/files/presigned', () => {
const data = await response.json()
expect(response.status).toBe(200)
expect(data.fileInfo.key).toMatch(/^knowledge-base\/.*knowledge-doc\.pdf$/)
expect(data.fileInfo.key).toMatch(/^kb\/.*knowledge-doc\.pdf$/)
expect(data.directUploadSupported).toBe(true)
})

View File

@@ -1,49 +1,11 @@
import { NextRequest } from 'next/server'
/**
* Tests for file serve API route
*
* @vitest-environment node
*/
import {
defaultMockUser,
mockAuth,
mockCryptoUuid,
mockUuid,
setupCommonApiMocks,
} from '@sim/testing'
import { NextRequest } from 'next/server'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
function setupApiTestMocks(
options: {
authenticated?: boolean
user?: { id: string; email: string }
withFileSystem?: boolean
withUploadUtils?: boolean
} = {}
) {
const { authenticated = true, user = defaultMockUser, withFileSystem = false } = options
setupCommonApiMocks()
mockUuid()
mockCryptoUuid()
const authMocks = mockAuth(user)
if (authenticated) {
authMocks.setAuthenticated(user)
} else {
authMocks.setUnauthenticated()
}
if (withFileSystem) {
vi.doMock('fs/promises', () => ({
readFile: vi.fn().mockResolvedValue(Buffer.from('test content')),
access: vi.fn().mockResolvedValue(undefined),
stat: vi.fn().mockResolvedValue({ isFile: () => true, size: 100 }),
}))
}
return { auth: authMocks }
}
import { setupApiTestMocks } from '@/app/api/__test-utils__/utils'
describe('File Serve API Route', () => {
beforeEach(() => {
@@ -69,17 +31,6 @@ describe('File Serve API Route', () => {
existsSync: vi.fn().mockReturnValue(true),
}))
vi.doMock('@/lib/uploads', () => ({
CopilotFiles: {
downloadCopilotFile: vi.fn(),
},
isUsingCloudStorage: vi.fn().mockReturnValue(false),
}))
vi.doMock('@/lib/uploads/utils/file-utils', () => ({
inferContextFromKey: vi.fn().mockReturnValue('workspace'),
}))
vi.doMock('@/app/api/files/utils', () => ({
FileNotFoundError: class FileNotFoundError extends Error {
constructor(message: string) {
@@ -175,17 +126,6 @@ describe('File Serve API Route', () => {
verifyFileAccess: vi.fn().mockResolvedValue(true),
}))
vi.doMock('@/lib/uploads', () => ({
CopilotFiles: {
downloadCopilotFile: vi.fn(),
},
isUsingCloudStorage: vi.fn().mockReturnValue(false),
}))
vi.doMock('@/lib/uploads/utils/file-utils', () => ({
inferContextFromKey: vi.fn().mockReturnValue('workspace'),
}))
const req = new NextRequest(
'http://localhost:3000/api/files/serve/workspace/test-workspace-id/nested-path-file.txt'
)

View File

@@ -1,76 +1,11 @@
import { NextRequest } from 'next/server'
/**
* Tests for file upload API route
*
* @vitest-environment node
*/
import { mockAuth, mockCryptoUuid, mockUuid, setupCommonApiMocks } from '@sim/testing'
import { NextRequest } from 'next/server'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
function setupFileApiMocks(
options: {
authenticated?: boolean
storageProvider?: 's3' | 'blob' | 'local'
cloudEnabled?: boolean
} = {}
) {
const { authenticated = true, storageProvider = 's3', cloudEnabled = true } = options
setupCommonApiMocks()
mockUuid()
mockCryptoUuid()
const authMocks = mockAuth()
if (authenticated) {
authMocks.setAuthenticated()
} else {
authMocks.setUnauthenticated()
}
vi.doMock('@/lib/auth/hybrid', () => ({
checkHybridAuth: vi.fn().mockResolvedValue({
success: authenticated,
userId: authenticated ? 'test-user-id' : undefined,
error: authenticated ? undefined : 'Unauthorized',
}),
}))
vi.doMock('@/app/api/files/authorization', () => ({
verifyFileAccess: vi.fn().mockResolvedValue(true),
verifyWorkspaceFileAccess: vi.fn().mockResolvedValue(true),
verifyKBFileAccess: vi.fn().mockResolvedValue(true),
verifyCopilotFileAccess: vi.fn().mockResolvedValue(true),
}))
vi.doMock('@/lib/uploads/contexts/workspace', () => ({
uploadWorkspaceFile: vi.fn().mockResolvedValue({
id: 'test-file-id',
name: 'test.txt',
url: '/api/files/serve/workspace/test-workspace-id/test-file.txt',
size: 100,
type: 'text/plain',
key: 'workspace/test-workspace-id/1234567890-test.txt',
uploadedAt: new Date().toISOString(),
expiresAt: new Date(Date.now() + 24 * 60 * 60 * 1000).toISOString(),
}),
}))
const uploadFileMock = vi.fn().mockResolvedValue({
path: '/api/files/serve/test-key.txt',
key: 'test-key.txt',
name: 'test.txt',
size: 100,
type: 'text/plain',
})
vi.doMock('@/lib/uploads', () => ({
getStorageProvider: vi.fn().mockReturnValue(storageProvider),
isUsingCloudStorage: vi.fn().mockReturnValue(cloudEnabled),
uploadFile: uploadFileMock,
}))
return { auth: authMocks }
}
import { setupFileApiMocks } from '@/app/api/__test-utils__/utils'
describe('File Upload API Route', () => {
const createMockFormData = (files: File[], context = 'workspace'): FormData => {

View File

@@ -3,24 +3,15 @@
*
* @vitest-environment node
*/
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import {
type CapturedFolderValues,
createMockRequest,
type MockUser,
mockAuth,
mockConsoleLogger,
mockLogger,
setupCommonApiMocks,
} from '@sim/testing'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
/** Type for captured folder values in tests */
interface CapturedFolderValues {
name?: string
color?: string
parentId?: string | null
isExpanded?: boolean
sortOrder?: number
updatedAt?: Date
}
} from '@/app/api/__test-utils__/utils'
interface FolderDbMockOptions {
folderLookupResult?: any
@@ -30,8 +21,6 @@ interface FolderDbMockOptions {
}
describe('Individual Folder API Route', () => {
let mockLogger: ReturnType<typeof mockConsoleLogger>
const TEST_USER: MockUser = {
id: 'user-123',
email: 'test@example.com',
@@ -50,8 +39,7 @@ describe('Individual Folder API Route', () => {
updatedAt: new Date('2024-01-01T00:00:00Z'),
}
let mockAuthenticatedUser: (user?: MockUser) => void
let mockUnauthenticated: () => void
const { mockAuthenticatedUser, mockUnauthenticated } = mockAuth(TEST_USER)
const mockGetUserEntityPermissions = vi.fn()
function createFolderDbMock(options: FolderDbMockOptions = {}) {
@@ -122,10 +110,6 @@ describe('Individual Folder API Route', () => {
vi.resetModules()
vi.clearAllMocks()
setupCommonApiMocks()
mockLogger = mockConsoleLogger()
const auth = mockAuth(TEST_USER)
mockAuthenticatedUser = auth.mockAuthenticatedUser
mockUnauthenticated = auth.mockUnauthenticated
mockGetUserEntityPermissions.mockResolvedValue('admin')

View File

@@ -3,46 +3,17 @@
*
* @vitest-environment node
*/
import { createMockRequest, mockAuth, mockConsoleLogger, setupCommonApiMocks } from '@sim/testing'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
interface CapturedFolderValues {
name?: string
color?: string
parentId?: string | null
isExpanded?: boolean
sortOrder?: number
updatedAt?: Date
}
function createMockTransaction(mockData: {
selectData?: Array<{ id: string; [key: string]: unknown }>
insertResult?: Array<{ id: string; [key: string]: unknown }>
}) {
const { selectData = [], insertResult = [] } = mockData
return vi.fn().mockImplementation(async (callback: (tx: unknown) => Promise<unknown>) => {
const tx = {
select: vi.fn().mockReturnValue({
from: vi.fn().mockReturnValue({
where: vi.fn().mockReturnValue({
orderBy: vi.fn().mockReturnValue({
limit: vi.fn().mockReturnValue(selectData),
}),
}),
}),
}),
insert: vi.fn().mockReturnValue({
values: vi.fn().mockReturnValue({
returning: vi.fn().mockReturnValue(insertResult),
}),
}),
}
return await callback(tx)
})
}
import {
type CapturedFolderValues,
createMockRequest,
createMockTransaction,
mockAuth,
mockLogger,
setupCommonApiMocks,
} from '@/app/api/__test-utils__/utils'
describe('Folders API Route', () => {
let mockLogger: ReturnType<typeof mockConsoleLogger>
const mockFolders = [
{
id: 'folder-1',
@@ -70,8 +41,7 @@ describe('Folders API Route', () => {
},
]
let mockAuthenticatedUser: () => void
let mockUnauthenticated: () => void
const { mockAuthenticatedUser, mockUnauthenticated } = mockAuth()
const mockUUID = 'mock-uuid-12345678-90ab-cdef-1234-567890abcdef'
const mockSelect = vi.fn()
@@ -93,10 +63,6 @@ describe('Folders API Route', () => {
})
setupCommonApiMocks()
mockLogger = mockConsoleLogger()
const auth = mockAuth()
mockAuthenticatedUser = auth.mockAuthenticatedUser
mockUnauthenticated = auth.mockUnauthenticated
mockSelect.mockReturnValue({ from: mockFrom })
mockFrom.mockReturnValue({ where: mockWhere })

View File

@@ -9,7 +9,6 @@ import { addCorsHeaders, validateAuthToken } from '@/lib/core/security/deploymen
import { generateRequestId } from '@/lib/core/utils/request'
import { preprocessExecution } from '@/lib/execution/preprocessing'
import { LoggingSession } from '@/lib/logs/execution/logging-session'
import { normalizeInputFormatValue } from '@/lib/workflows/input-format'
import { createStreamingResponse } from '@/lib/workflows/streaming/streaming'
import { setFormAuthCookie, validateFormAuth } from '@/app/api/form/utils'
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
@@ -35,17 +34,22 @@ async function getWorkflowInputSchema(workflowId: string): Promise<any[]> {
.from(workflowBlocks)
.where(eq(workflowBlocks.workflowId, workflowId))
// Find the start block (starter or start_trigger type)
const startBlock = blocks.find(
(block) =>
block.type === 'starter' || block.type === 'start_trigger' || block.type === 'input_trigger'
(block) => block.type === 'starter' || block.type === 'start_trigger'
)
if (!startBlock) {
return []
}
// Extract inputFormat from subBlocks
const subBlocks = startBlock.subBlocks as Record<string, any> | null
return normalizeInputFormatValue(subBlocks?.inputFormat?.value)
if (!subBlocks?.inputFormat?.value) {
return []
}
return Array.isArray(subBlocks.inputFormat.value) ? subBlocks.inputFormat.value : []
} catch (error) {
logger.error('Error fetching workflow input schema:', error)
return []

View File

@@ -3,9 +3,10 @@
*
* @vitest-environment node
*/
import { createMockRequest, loggerMock } from '@sim/testing'
import { loggerMock } from '@sim/testing'
import { NextRequest } from 'next/server'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import { createMockRequest } from '@/app/api/__test-utils__/utils'
vi.mock('@/lib/execution/isolated-vm', () => ({
executeInIsolatedVM: vi.fn().mockImplementation(async (req) => {

View File

@@ -3,14 +3,14 @@
*
* @vitest-environment node
*/
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import {
createMockRequest,
mockAuth,
mockConsoleLogger,
mockDrizzleOrm,
mockKnowledgeSchemas,
} from '@sim/testing'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
} from '@/app/api/__test-utils__/utils'
mockKnowledgeSchemas()

View File

@@ -3,14 +3,14 @@
*
* @vitest-environment node
*/
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import {
createMockRequest,
mockAuth,
mockConsoleLogger,
mockDrizzleOrm,
mockKnowledgeSchemas,
} from '@sim/testing'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
} from '@/app/api/__test-utils__/utils'
mockKnowledgeSchemas()

View File

@@ -3,14 +3,14 @@
*
* @vitest-environment node
*/
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import {
createMockRequest,
mockAuth,
mockConsoleLogger,
mockDrizzleOrm,
mockKnowledgeSchemas,
} from '@sim/testing'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
} from '@/app/api/__test-utils__/utils'
mockKnowledgeSchemas()
mockDrizzleOrm()

View File

@@ -3,14 +3,14 @@
*
* @vitest-environment node
*/
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import {
createMockRequest,
mockAuth,
mockConsoleLogger,
mockDrizzleOrm,
mockKnowledgeSchemas,
} from '@sim/testing'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
} from '@/app/api/__test-utils__/utils'
mockKnowledgeSchemas()
mockDrizzleOrm()

View File

@@ -5,13 +5,13 @@
*
* @vitest-environment node
*/
import { createEnvMock } from '@sim/testing'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import {
createEnvMock,
createMockRequest,
mockConsoleLogger,
mockKnowledgeSchemas,
} from '@sim/testing'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
} from '@/app/api/__test-utils__/utils'
vi.mock('drizzle-orm', () => ({
and: vi.fn().mockImplementation((...args) => ({ and: args })),

View File

@@ -4,8 +4,6 @@ import {
invitation,
member,
organization,
permissionGroup,
permissionGroupMember,
permissions,
subscription as subscriptionTable,
user,
@@ -19,7 +17,6 @@ import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getEmailSubject, renderInvitationEmail } from '@/components/emails'
import { getSession } from '@/lib/auth'
import { hasAccessControlAccess } from '@/lib/billing'
import { requireStripeClient } from '@/lib/billing/stripe-client'
import { getBaseUrl } from '@/lib/core/utils/urls'
import { sendEmail } from '@/lib/messaging/email/mailer'
@@ -385,47 +382,6 @@ export async function PUT(
// Don't fail the whole invitation acceptance due to this
}
// Auto-assign to permission group if one has autoAddNewMembers enabled
try {
const hasAccessControl = await hasAccessControlAccess(session.user.id)
if (hasAccessControl) {
const [autoAddGroup] = await tx
.select({ id: permissionGroup.id, name: permissionGroup.name })
.from(permissionGroup)
.where(
and(
eq(permissionGroup.organizationId, organizationId),
eq(permissionGroup.autoAddNewMembers, true)
)
)
.limit(1)
if (autoAddGroup) {
await tx.insert(permissionGroupMember).values({
id: randomUUID(),
permissionGroupId: autoAddGroup.id,
userId: session.user.id,
assignedBy: null,
assignedAt: new Date(),
})
logger.info('Auto-assigned new member to permission group', {
userId: session.user.id,
organizationId,
permissionGroupId: autoAddGroup.id,
permissionGroupName: autoAddGroup.name,
})
}
}
} catch (error) {
logger.error('Failed to auto-assign user to permission group', {
userId: session.user.id,
organizationId,
error,
})
// Don't fail the whole invitation acceptance due to this
}
const linkedWorkspaceInvitations = await tx
.select()
.from(workspaceInvitation)

View File

@@ -25,19 +25,12 @@ const configSchema = z.object({
disableMcpTools: z.boolean().optional(),
disableCustomTools: z.boolean().optional(),
hideTemplates: z.boolean().optional(),
disableInvitations: z.boolean().optional(),
hideDeployApi: z.boolean().optional(),
hideDeployMcp: z.boolean().optional(),
hideDeployA2a: z.boolean().optional(),
hideDeployChatbot: z.boolean().optional(),
hideDeployTemplate: z.boolean().optional(),
})
const updateSchema = z.object({
name: z.string().trim().min(1).max(100).optional(),
description: z.string().max(500).nullable().optional(),
config: configSchema.optional(),
autoAddNewMembers: z.boolean().optional(),
})
async function getPermissionGroupWithAccess(groupId: string, userId: string) {
@@ -51,7 +44,6 @@ async function getPermissionGroupWithAccess(groupId: string, userId: string) {
createdBy: permissionGroup.createdBy,
createdAt: permissionGroup.createdAt,
updatedAt: permissionGroup.updatedAt,
autoAddNewMembers: permissionGroup.autoAddNewMembers,
})
.from(permissionGroup)
.where(eq(permissionGroup.id, groupId))
@@ -148,27 +140,11 @@ export async function PUT(req: NextRequest, { params }: { params: Promise<{ id:
? { ...currentConfig, ...updates.config }
: currentConfig
// If setting autoAddNewMembers to true, unset it on other groups in the org first
if (updates.autoAddNewMembers === true) {
await db
.update(permissionGroup)
.set({ autoAddNewMembers: false, updatedAt: new Date() })
.where(
and(
eq(permissionGroup.organizationId, result.group.organizationId),
eq(permissionGroup.autoAddNewMembers, true)
)
)
}
await db
.update(permissionGroup)
.set({
...(updates.name !== undefined && { name: updates.name }),
...(updates.description !== undefined && { description: updates.description }),
...(updates.autoAddNewMembers !== undefined && {
autoAddNewMembers: updates.autoAddNewMembers,
}),
config: newConfig,
updatedAt: new Date(),
})

View File

@@ -26,12 +26,6 @@ const configSchema = z.object({
disableMcpTools: z.boolean().optional(),
disableCustomTools: z.boolean().optional(),
hideTemplates: z.boolean().optional(),
disableInvitations: z.boolean().optional(),
hideDeployApi: z.boolean().optional(),
hideDeployMcp: z.boolean().optional(),
hideDeployA2a: z.boolean().optional(),
hideDeployChatbot: z.boolean().optional(),
hideDeployTemplate: z.boolean().optional(),
})
const createSchema = z.object({
@@ -39,7 +33,6 @@ const createSchema = z.object({
name: z.string().trim().min(1).max(100),
description: z.string().max(500).optional(),
config: configSchema.optional(),
autoAddNewMembers: z.boolean().optional(),
})
export async function GET(req: Request) {
@@ -75,7 +68,6 @@ export async function GET(req: Request) {
createdBy: permissionGroup.createdBy,
createdAt: permissionGroup.createdAt,
updatedAt: permissionGroup.updatedAt,
autoAddNewMembers: permissionGroup.autoAddNewMembers,
creatorName: user.name,
creatorEmail: user.email,
})
@@ -119,8 +111,7 @@ export async function POST(req: Request) {
}
const body = await req.json()
const { organizationId, name, description, config, autoAddNewMembers } =
createSchema.parse(body)
const { organizationId, name, description, config } = createSchema.parse(body)
const membership = await db
.select({ id: member.id, role: member.role })
@@ -163,19 +154,6 @@ export async function POST(req: Request) {
...config,
}
// If autoAddNewMembers is true, unset it on any existing groups first
if (autoAddNewMembers) {
await db
.update(permissionGroup)
.set({ autoAddNewMembers: false, updatedAt: new Date() })
.where(
and(
eq(permissionGroup.organizationId, organizationId),
eq(permissionGroup.autoAddNewMembers, true)
)
)
}
const now = new Date()
const newGroup = {
id: crypto.randomUUID(),
@@ -186,7 +164,6 @@ export async function POST(req: Request) {
createdBy: session.user.id,
createdAt: now,
updatedAt: now,
autoAddNewMembers: autoAddNewMembers || false,
}
await db.insert(permissionGroup).values(newGroup)

View File

@@ -3,9 +3,10 @@
*
* @vitest-environment node
*/
import { createMockRequest, loggerMock } from '@sim/testing'
import { loggerMock } from '@sim/testing'
import { NextRequest } from 'next/server'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import { createMockRequest } from '@/app/api/__test-utils__/utils'
describe('Custom Tools API Routes', () => {
const sampleTools = [
@@ -363,7 +364,7 @@ describe('Custom Tools API Routes', () => {
})
it('should reject requests missing tool ID', async () => {
const req = new NextRequest('http://localhost:3000/api/tools/custom')
const req = createMockRequest('DELETE')
const { DELETE } = await import('@/app/api/tools/custom/route')

View File

@@ -1,169 +0,0 @@
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import { getBaseUrl } from '@/lib/core/utils/urls'
import { StorageService } from '@/lib/uploads'
import { extractStorageKey, inferContextFromKey } from '@/lib/uploads/utils/file-utils'
import { verifyFileAccess } from '@/app/api/files/authorization'
export const dynamic = 'force-dynamic'
const logger = createLogger('PulseParseAPI')
const PulseParseSchema = z.object({
apiKey: z.string().min(1, 'API key is required'),
filePath: z.string().min(1, 'File path is required'),
pages: z.string().optional(),
extractFigure: z.boolean().optional(),
figureDescription: z.boolean().optional(),
returnHtml: z.boolean().optional(),
chunking: z.string().optional(),
chunkSize: z.number().optional(),
})
export async function POST(request: NextRequest) {
const requestId = generateRequestId()
try {
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
if (!authResult.success || !authResult.userId) {
logger.warn(`[${requestId}] Unauthorized Pulse parse attempt`, {
error: authResult.error || 'Missing userId',
})
return NextResponse.json(
{
success: false,
error: authResult.error || 'Unauthorized',
},
{ status: 401 }
)
}
const userId = authResult.userId
const body = await request.json()
const validatedData = PulseParseSchema.parse(body)
logger.info(`[${requestId}] Pulse parse request`, {
filePath: validatedData.filePath,
isWorkspaceFile: validatedData.filePath.includes('/api/files/serve/'),
userId,
})
let fileUrl = validatedData.filePath
if (validatedData.filePath?.includes('/api/files/serve/')) {
try {
const storageKey = extractStorageKey(validatedData.filePath)
const context = inferContextFromKey(storageKey)
const hasAccess = await verifyFileAccess(storageKey, userId, undefined, context, false)
if (!hasAccess) {
logger.warn(`[${requestId}] Unauthorized presigned URL generation attempt`, {
userId,
key: storageKey,
context,
})
return NextResponse.json(
{
success: false,
error: 'File not found',
},
{ status: 404 }
)
}
fileUrl = await StorageService.generatePresignedDownloadUrl(storageKey, context, 5 * 60)
logger.info(`[${requestId}] Generated presigned URL for ${context} file`)
} catch (error) {
logger.error(`[${requestId}] Failed to generate presigned URL:`, error)
return NextResponse.json(
{
success: false,
error: 'Failed to generate file access URL',
},
{ status: 500 }
)
}
} else if (validatedData.filePath?.startsWith('/')) {
const baseUrl = getBaseUrl()
fileUrl = `${baseUrl}${validatedData.filePath}`
}
const formData = new FormData()
formData.append('file_url', fileUrl)
if (validatedData.pages) {
formData.append('pages', validatedData.pages)
}
if (validatedData.extractFigure !== undefined) {
formData.append('extract_figure', String(validatedData.extractFigure))
}
if (validatedData.figureDescription !== undefined) {
formData.append('figure_description', String(validatedData.figureDescription))
}
if (validatedData.returnHtml !== undefined) {
formData.append('return_html', String(validatedData.returnHtml))
}
if (validatedData.chunking) {
formData.append('chunking', validatedData.chunking)
}
if (validatedData.chunkSize !== undefined) {
formData.append('chunk_size', String(validatedData.chunkSize))
}
const pulseResponse = await fetch('https://api.runpulse.com/extract', {
method: 'POST',
headers: {
'x-api-key': validatedData.apiKey,
},
body: formData,
})
if (!pulseResponse.ok) {
const errorText = await pulseResponse.text()
logger.error(`[${requestId}] Pulse API error:`, errorText)
return NextResponse.json(
{
success: false,
error: `Pulse API error: ${pulseResponse.statusText}`,
},
{ status: pulseResponse.status }
)
}
const pulseData = await pulseResponse.json()
logger.info(`[${requestId}] Pulse parse successful`)
return NextResponse.json({
success: true,
output: pulseData,
})
} catch (error) {
if (error instanceof z.ZodError) {
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
return NextResponse.json(
{
success: false,
error: 'Invalid request data',
details: error.errors,
},
{ status: 400 }
)
}
logger.error(`[${requestId}] Error in Pulse parse:`, error)
return NextResponse.json(
{
success: false,
error: error instanceof Error ? error.message : 'Internal server error',
},
{ status: 500 }
)
}
}

View File

@@ -1,167 +0,0 @@
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import { getBaseUrl } from '@/lib/core/utils/urls'
import { StorageService } from '@/lib/uploads'
import { extractStorageKey, inferContextFromKey } from '@/lib/uploads/utils/file-utils'
import { verifyFileAccess } from '@/app/api/files/authorization'
export const dynamic = 'force-dynamic'
const logger = createLogger('ReductoParseAPI')
const ReductoParseSchema = z.object({
apiKey: z.string().min(1, 'API key is required'),
filePath: z.string().min(1, 'File path is required'),
pages: z.array(z.number()).optional(),
tableOutputFormat: z.enum(['html', 'md']).optional(),
})
export async function POST(request: NextRequest) {
const requestId = generateRequestId()
try {
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
if (!authResult.success || !authResult.userId) {
logger.warn(`[${requestId}] Unauthorized Reducto parse attempt`, {
error: authResult.error || 'Missing userId',
})
return NextResponse.json(
{
success: false,
error: authResult.error || 'Unauthorized',
},
{ status: 401 }
)
}
const userId = authResult.userId
const body = await request.json()
const validatedData = ReductoParseSchema.parse(body)
logger.info(`[${requestId}] Reducto parse request`, {
filePath: validatedData.filePath,
isWorkspaceFile: validatedData.filePath.includes('/api/files/serve/'),
userId,
})
let fileUrl = validatedData.filePath
if (validatedData.filePath?.includes('/api/files/serve/')) {
try {
const storageKey = extractStorageKey(validatedData.filePath)
const context = inferContextFromKey(storageKey)
const hasAccess = await verifyFileAccess(
storageKey,
userId,
undefined, // customConfig
context, // context
false // isLocal
)
if (!hasAccess) {
logger.warn(`[${requestId}] Unauthorized presigned URL generation attempt`, {
userId,
key: storageKey,
context,
})
return NextResponse.json(
{
success: false,
error: 'File not found',
},
{ status: 404 }
)
}
fileUrl = await StorageService.generatePresignedDownloadUrl(storageKey, context, 5 * 60)
logger.info(`[${requestId}] Generated presigned URL for ${context} file`)
} catch (error) {
logger.error(`[${requestId}] Failed to generate presigned URL:`, error)
return NextResponse.json(
{
success: false,
error: 'Failed to generate file access URL',
},
{ status: 500 }
)
}
} else if (validatedData.filePath?.startsWith('/')) {
const baseUrl = getBaseUrl()
fileUrl = `${baseUrl}${validatedData.filePath}`
}
const reductoBody: Record<string, unknown> = {
input: fileUrl,
}
if (validatedData.pages && validatedData.pages.length > 0) {
reductoBody.settings = {
page_range: validatedData.pages,
}
}
if (validatedData.tableOutputFormat) {
reductoBody.formatting = {
table_output_format: validatedData.tableOutputFormat,
}
}
const reductoResponse = await fetch('https://platform.reducto.ai/parse', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Accept: 'application/json',
Authorization: `Bearer ${validatedData.apiKey}`,
},
body: JSON.stringify(reductoBody),
})
if (!reductoResponse.ok) {
const errorText = await reductoResponse.text()
logger.error(`[${requestId}] Reducto API error:`, errorText)
return NextResponse.json(
{
success: false,
error: `Reducto API error: ${reductoResponse.statusText}`,
},
{ status: reductoResponse.status }
)
}
const reductoData = await reductoResponse.json()
logger.info(`[${requestId}] Reducto parse successful`)
return NextResponse.json({
success: true,
output: reductoData,
})
} catch (error) {
if (error instanceof z.ZodError) {
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
return NextResponse.json(
{
success: false,
error: 'Invalid request data',
details: error.errors,
},
{ status: 400 }
)
}
logger.error(`[${requestId}] Error in Reducto parse:`, error)
return NextResponse.json(
{
success: false,
error: error instanceof Error ? error.message : 'Internal server error',
},
{ status: 500 }
)
}
}

View File

@@ -27,11 +27,10 @@ const SettingsSchema = z.object({
superUserModeEnabled: z.boolean().optional(),
errorNotificationsEnabled: z.boolean().optional(),
snapToGridSize: z.number().min(0).max(50).optional(),
showActionBar: z.boolean().optional(),
})
const defaultSettings = {
theme: 'dark',
theme: 'system',
autoConnect: true,
telemetryEnabled: true,
emailPreferences: {},
@@ -40,7 +39,6 @@ const defaultSettings = {
superUserModeEnabled: false,
errorNotificationsEnabled: true,
snapToGridSize: 0,
showActionBar: true,
}
export async function GET() {
@@ -75,7 +73,6 @@ export async function GET() {
superUserModeEnabled: userSettings.superUserModeEnabled ?? true,
errorNotificationsEnabled: userSettings.errorNotificationsEnabled ?? true,
snapToGridSize: userSettings.snapToGridSize ?? 0,
showActionBar: userSettings.showActionBar ?? true,
},
},
{ status: 200 }

View File

@@ -1,8 +1,6 @@
import { db, workflow } from '@sim/db'
import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm'
import { generateRequestId } from '@/lib/core/utils/request'
import { cleanupWebhooksForWorkflow } from '@/lib/webhooks/deploy'
import {
deployWorkflow,
loadWorkflowFromNormalizedTables,
@@ -82,11 +80,10 @@ export const POST = withAdminAuthParams<RouteParams>(async (request, context) =>
export const DELETE = withAdminAuthParams<RouteParams>(async (request, context) => {
const { id: workflowId } = await context.params
const requestId = generateRequestId()
try {
const [workflowRecord] = await db
.select()
.select({ id: workflow.id })
.from(workflow)
.where(eq(workflow.id, workflowId))
.limit(1)
@@ -95,13 +92,6 @@ export const DELETE = withAdminAuthParams<RouteParams>(async (request, context)
return notFoundResponse('Workflow')
}
// Clean up external webhook subscriptions before undeploying
await cleanupWebhooksForWorkflow(
workflowId,
workflowRecord as Record<string, unknown>,
requestId
)
const result = await undeployWorkflow({ workflowId })
if (!result.success) {
return internalErrorResponse(result.error || 'Failed to undeploy workflow')

View File

@@ -7,11 +7,6 @@ import { getSession } from '@/lib/auth'
import { validateInteger } from '@/lib/core/security/input-validation'
import { PlatformEvents } from '@/lib/core/telemetry'
import { generateRequestId } from '@/lib/core/utils/request'
import {
cleanupExternalWebhook,
createExternalWebhookSubscription,
shouldRecreateExternalWebhookSubscription,
} from '@/lib/webhooks/provider-subscriptions'
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
const logger = createLogger('WebhookAPI')
@@ -182,46 +177,6 @@ export async function PATCH(request: NextRequest, { params }: { params: Promise<
return NextResponse.json({ error: 'Access denied' }, { status: 403 })
}
const existingProviderConfig =
(webhookData.webhook.providerConfig as Record<string, unknown>) || {}
let nextProviderConfig =
providerConfig !== undefined &&
resolvedProviderConfig &&
typeof resolvedProviderConfig === 'object'
? (resolvedProviderConfig as Record<string, unknown>)
: existingProviderConfig
const nextProvider = (provider ?? webhookData.webhook.provider) as string
if (
providerConfig !== undefined &&
shouldRecreateExternalWebhookSubscription({
previousProvider: webhookData.webhook.provider as string,
nextProvider,
previousConfig: existingProviderConfig,
nextConfig: nextProviderConfig,
})
) {
await cleanupExternalWebhook(
{ ...webhookData.webhook, providerConfig: existingProviderConfig },
webhookData.workflow,
requestId
)
const result = await createExternalWebhookSubscription(
request,
{
...webhookData.webhook,
provider: nextProvider,
providerConfig: nextProviderConfig,
},
webhookData.workflow,
session.user.id,
requestId
)
nextProviderConfig = result.updatedProviderConfig as Record<string, unknown>
}
logger.debug(`[${requestId}] Updating webhook properties`, {
hasPathUpdate: path !== undefined,
hasProviderUpdate: provider !== undefined,
@@ -233,16 +188,16 @@ export async function PATCH(request: NextRequest, { params }: { params: Promise<
// Merge providerConfig to preserve credential-related fields
let finalProviderConfig = webhooks[0].webhook.providerConfig
if (providerConfig !== undefined) {
const existingConfig = existingProviderConfig
const existingConfig = (webhooks[0].webhook.providerConfig as Record<string, unknown>) || {}
finalProviderConfig = {
...nextProviderConfig,
...resolvedProviderConfig,
credentialId: existingConfig.credentialId,
credentialSetId: existingConfig.credentialSetId,
userId: existingConfig.userId,
historyId: existingConfig.historyId,
lastCheckedTimestamp: existingConfig.lastCheckedTimestamp,
setupCompleted: existingConfig.setupCompleted,
externalId: nextProviderConfig.externalId ?? existingConfig.externalId,
externalId: existingConfig.externalId,
}
}

View File

@@ -7,8 +7,9 @@ import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { PlatformEvents } from '@/lib/core/telemetry'
import { generateRequestId } from '@/lib/core/utils/request'
import { createExternalWebhookSubscription } from '@/lib/webhooks/provider-subscriptions'
import { getBaseUrl } from '@/lib/core/utils/urls'
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
import { getOAuthToken } from '@/app/api/auth/oauth/utils'
const logger = createLogger('WebhooksAPI')
@@ -256,7 +257,7 @@ export async function POST(request: NextRequest) {
const finalProviderConfig = providerConfig || {}
const { resolveEnvVarsInObject } = await import('@/lib/webhooks/env-resolver')
let resolvedProviderConfig = await resolveEnvVarsInObject(
const resolvedProviderConfig = await resolveEnvVarsInObject(
finalProviderConfig,
userId,
workflowRecord.workspaceId || undefined
@@ -413,33 +414,149 @@ export async function POST(request: NextRequest) {
}
// --- End Credential Set Handling ---
// Create external subscriptions before saving to DB to prevent orphaned records
let externalSubscriptionId: string | undefined
let externalSubscriptionCreated = false
const createTempWebhookData = (providerConfigOverride = resolvedProviderConfig) => ({
const createTempWebhookData = () => ({
id: targetWebhookId || nanoid(),
path: finalPath,
provider,
providerConfig: providerConfigOverride,
providerConfig: resolvedProviderConfig,
})
try {
const result = await createExternalWebhookSubscription(
request,
createTempWebhookData(),
workflowRecord,
userId,
requestId
)
resolvedProviderConfig = result.updatedProviderConfig as Record<string, unknown>
externalSubscriptionCreated = result.externalSubscriptionCreated
} catch (err) {
logger.error(`[${requestId}] Error creating external webhook subscription`, err)
return NextResponse.json(
{
error: 'Failed to create external webhook subscription',
details: err instanceof Error ? err.message : 'Unknown error',
},
{ status: 500 }
)
if (provider === 'airtable') {
logger.info(`[${requestId}] Creating Airtable subscription before saving to database`)
try {
externalSubscriptionId = await createAirtableWebhookSubscription(
request,
userId,
createTempWebhookData(),
requestId
)
if (externalSubscriptionId) {
resolvedProviderConfig.externalId = externalSubscriptionId
externalSubscriptionCreated = true
}
} catch (err) {
logger.error(`[${requestId}] Error creating Airtable webhook subscription`, err)
return NextResponse.json(
{
error: 'Failed to create webhook in Airtable',
details: err instanceof Error ? err.message : 'Unknown error',
},
{ status: 500 }
)
}
}
if (provider === 'calendly') {
logger.info(`[${requestId}] Creating Calendly subscription before saving to database`)
try {
externalSubscriptionId = await createCalendlyWebhookSubscription(
request,
userId,
createTempWebhookData(),
requestId
)
if (externalSubscriptionId) {
resolvedProviderConfig.externalId = externalSubscriptionId
externalSubscriptionCreated = true
}
} catch (err) {
logger.error(`[${requestId}] Error creating Calendly webhook subscription`, err)
return NextResponse.json(
{
error: 'Failed to create webhook in Calendly',
details: err instanceof Error ? err.message : 'Unknown error',
},
{ status: 500 }
)
}
}
if (provider === 'microsoft-teams') {
const { createTeamsSubscription } = await import('@/lib/webhooks/provider-subscriptions')
logger.info(`[${requestId}] Creating Teams subscription before saving to database`)
try {
await createTeamsSubscription(request, createTempWebhookData(), workflowRecord, requestId)
externalSubscriptionCreated = true
} catch (err) {
logger.error(`[${requestId}] Error creating Teams subscription`, err)
return NextResponse.json(
{
error: 'Failed to create Teams subscription',
details: err instanceof Error ? err.message : 'Unknown error',
},
{ status: 500 }
)
}
}
if (provider === 'telegram') {
const { createTelegramWebhook } = await import('@/lib/webhooks/provider-subscriptions')
logger.info(`[${requestId}] Creating Telegram webhook before saving to database`)
try {
await createTelegramWebhook(request, createTempWebhookData(), requestId)
externalSubscriptionCreated = true
} catch (err) {
logger.error(`[${requestId}] Error creating Telegram webhook`, err)
return NextResponse.json(
{
error: 'Failed to create Telegram webhook',
details: err instanceof Error ? err.message : 'Unknown error',
},
{ status: 500 }
)
}
}
if (provider === 'webflow') {
logger.info(`[${requestId}] Creating Webflow subscription before saving to database`)
try {
externalSubscriptionId = await createWebflowWebhookSubscription(
request,
userId,
createTempWebhookData(),
requestId
)
if (externalSubscriptionId) {
resolvedProviderConfig.externalId = externalSubscriptionId
externalSubscriptionCreated = true
}
} catch (err) {
logger.error(`[${requestId}] Error creating Webflow webhook subscription`, err)
return NextResponse.json(
{
error: 'Failed to create webhook in Webflow',
details: err instanceof Error ? err.message : 'Unknown error',
},
{ status: 500 }
)
}
}
if (provider === 'typeform') {
const { createTypeformWebhook } = await import('@/lib/webhooks/provider-subscriptions')
logger.info(`[${requestId}] Creating Typeform webhook before saving to database`)
try {
const usedTag = await createTypeformWebhook(request, createTempWebhookData(), requestId)
if (!resolvedProviderConfig.webhookTag) {
resolvedProviderConfig.webhookTag = usedTag
logger.info(`[${requestId}] Stored auto-generated webhook tag: ${usedTag}`)
}
externalSubscriptionCreated = true
} catch (err) {
logger.error(`[${requestId}] Error creating Typeform webhook`, err)
return NextResponse.json(
{
error: 'Failed to create webhook in Typeform',
details: err instanceof Error ? err.message : 'Unknown error',
},
{ status: 500 }
)
}
}
// Now save to database (only if subscription succeeded or provider doesn't need external subscription)
@@ -500,11 +617,7 @@ export async function POST(request: NextRequest) {
logger.error(`[${requestId}] DB save failed, cleaning up external subscription`, dbError)
try {
const { cleanupExternalWebhook } = await import('@/lib/webhooks/provider-subscriptions')
await cleanupExternalWebhook(
createTempWebhookData(resolvedProviderConfig),
workflowRecord,
requestId
)
await cleanupExternalWebhook(createTempWebhookData(), workflowRecord, requestId)
} catch (cleanupError) {
logger.error(
`[${requestId}] Failed to cleanup external subscription after DB save failure`,
@@ -628,6 +741,110 @@ export async function POST(request: NextRequest) {
}
// --- End RSS specific logic ---
if (savedWebhook && provider === 'grain') {
logger.info(`[${requestId}] Grain provider detected. Creating Grain webhook subscription.`)
try {
const grainResult = await createGrainWebhookSubscription(
request,
{
id: savedWebhook.id,
path: savedWebhook.path,
providerConfig: savedWebhook.providerConfig,
},
requestId
)
if (grainResult) {
// Update the webhook record with the external Grain hook ID and event types for filtering
const updatedConfig = {
...(savedWebhook.providerConfig as Record<string, any>),
externalId: grainResult.id,
eventTypes: grainResult.eventTypes,
}
await db
.update(webhook)
.set({
providerConfig: updatedConfig,
updatedAt: new Date(),
})
.where(eq(webhook.id, savedWebhook.id))
savedWebhook.providerConfig = updatedConfig
logger.info(`[${requestId}] Successfully created Grain webhook`, {
grainHookId: grainResult.id,
eventTypes: grainResult.eventTypes,
webhookId: savedWebhook.id,
})
}
} catch (err) {
logger.error(
`[${requestId}] Error creating Grain webhook subscription, rolling back webhook`,
err
)
await db.delete(webhook).where(eq(webhook.id, savedWebhook.id))
return NextResponse.json(
{
error: 'Failed to create webhook in Grain',
details: err instanceof Error ? err.message : 'Unknown error',
},
{ status: 500 }
)
}
}
// --- End Grain specific logic ---
// --- Lemlist specific logic ---
if (savedWebhook && provider === 'lemlist') {
logger.info(
`[${requestId}] Lemlist provider detected. Creating Lemlist webhook subscription.`
)
try {
const lemlistResult = await createLemlistWebhookSubscription(
{
id: savedWebhook.id,
path: savedWebhook.path,
providerConfig: savedWebhook.providerConfig,
},
requestId
)
if (lemlistResult) {
// Update the webhook record with the external Lemlist hook ID
const updatedConfig = {
...(savedWebhook.providerConfig as Record<string, any>),
externalId: lemlistResult.id,
}
await db
.update(webhook)
.set({
providerConfig: updatedConfig,
updatedAt: new Date(),
})
.where(eq(webhook.id, savedWebhook.id))
savedWebhook.providerConfig = updatedConfig
logger.info(`[${requestId}] Successfully created Lemlist webhook`, {
lemlistHookId: lemlistResult.id,
webhookId: savedWebhook.id,
})
}
} catch (err) {
logger.error(
`[${requestId}] Error creating Lemlist webhook subscription, rolling back webhook`,
err
)
await db.delete(webhook).where(eq(webhook.id, savedWebhook.id))
return NextResponse.json(
{
error: 'Failed to create webhook in Lemlist',
details: err instanceof Error ? err.message : 'Unknown error',
},
{ status: 500 }
)
}
}
// --- End Lemlist specific logic ---
if (!targetWebhookId && savedWebhook) {
try {
PlatformEvents.webhookCreated({
@@ -651,3 +868,616 @@ export async function POST(request: NextRequest) {
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}
// Helper function to create the webhook subscription in Airtable
async function createAirtableWebhookSubscription(
request: NextRequest,
userId: string,
webhookData: any,
requestId: string
): Promise<string | undefined> {
try {
const { path, providerConfig } = webhookData
const { baseId, tableId, includeCellValuesInFieldIds } = providerConfig || {}
if (!baseId || !tableId) {
logger.warn(`[${requestId}] Missing baseId or tableId for Airtable webhook creation.`, {
webhookId: webhookData.id,
})
throw new Error(
'Base ID and Table ID are required to create Airtable webhook. Please provide valid Airtable base and table IDs.'
)
}
const accessToken = await getOAuthToken(userId, 'airtable')
if (!accessToken) {
logger.warn(
`[${requestId}] Could not retrieve Airtable access token for user ${userId}. Cannot create webhook in Airtable.`
)
throw new Error(
'Airtable account connection required. Please connect your Airtable account in the trigger configuration and try again.'
)
}
const notificationUrl = `${getBaseUrl()}/api/webhooks/trigger/${path}`
const airtableApiUrl = `https://api.airtable.com/v0/bases/${baseId}/webhooks`
const specification: any = {
options: {
filters: {
dataTypes: ['tableData'], // Watch table data changes
recordChangeScope: tableId, // Watch only the specified table
},
},
}
// Conditionally add the 'includes' field based on the config
if (includeCellValuesInFieldIds === 'all') {
specification.options.includes = {
includeCellValuesInFieldIds: 'all',
}
}
const requestBody: any = {
notificationUrl: notificationUrl,
specification: specification,
}
const airtableResponse = await fetch(airtableApiUrl, {
method: 'POST',
headers: {
Authorization: `Bearer ${accessToken}`,
'Content-Type': 'application/json',
},
body: JSON.stringify(requestBody),
})
// Airtable often returns 200 OK even for errors in the body, check payload
const responseBody = await airtableResponse.json()
if (!airtableResponse.ok || responseBody.error) {
const errorMessage =
responseBody.error?.message || responseBody.error || 'Unknown Airtable API error'
const errorType = responseBody.error?.type
logger.error(
`[${requestId}] Failed to create webhook in Airtable for webhook ${webhookData.id}. Status: ${airtableResponse.status}`,
{ type: errorType, message: errorMessage, response: responseBody }
)
let userFriendlyMessage = 'Failed to create webhook subscription in Airtable'
if (airtableResponse.status === 404) {
userFriendlyMessage =
'Airtable base or table not found. Please verify that the Base ID and Table ID are correct and that you have access to them.'
} else if (errorMessage && errorMessage !== 'Unknown Airtable API error') {
userFriendlyMessage = `Airtable error: ${errorMessage}`
}
throw new Error(userFriendlyMessage)
}
logger.info(
`[${requestId}] Successfully created webhook in Airtable for webhook ${webhookData.id}.`,
{
airtableWebhookId: responseBody.id,
}
)
return responseBody.id
} catch (error: any) {
logger.error(
`[${requestId}] Exception during Airtable webhook creation for webhook ${webhookData.id}.`,
{
message: error.message,
stack: error.stack,
}
)
// Re-throw the error so it can be caught by the outer try-catch
throw error
}
}
// Helper function to create the webhook subscription in Calendly
async function createCalendlyWebhookSubscription(
request: NextRequest,
userId: string,
webhookData: any,
requestId: string
): Promise<string | undefined> {
try {
const { path, providerConfig } = webhookData
const { apiKey, organization, triggerId } = providerConfig || {}
if (!apiKey) {
logger.warn(`[${requestId}] Missing apiKey for Calendly webhook creation.`, {
webhookId: webhookData.id,
})
throw new Error(
'Personal Access Token is required to create Calendly webhook. Please provide your Calendly Personal Access Token.'
)
}
if (!organization) {
logger.warn(`[${requestId}] Missing organization URI for Calendly webhook creation.`, {
webhookId: webhookData.id,
})
throw new Error(
'Organization URI is required to create Calendly webhook. Please provide your Organization URI from the "Get Current User" operation.'
)
}
if (!triggerId) {
logger.warn(`[${requestId}] Missing triggerId for Calendly webhook creation.`, {
webhookId: webhookData.id,
})
throw new Error('Trigger ID is required to create Calendly webhook')
}
const notificationUrl = `${getBaseUrl()}/api/webhooks/trigger/${path}`
// Map trigger IDs to Calendly event types
const eventTypeMap: Record<string, string[]> = {
calendly_invitee_created: ['invitee.created'],
calendly_invitee_canceled: ['invitee.canceled'],
calendly_routing_form_submitted: ['routing_form_submission.created'],
calendly_webhook: ['invitee.created', 'invitee.canceled', 'routing_form_submission.created'],
}
const events = eventTypeMap[triggerId] || ['invitee.created']
const calendlyApiUrl = 'https://api.calendly.com/webhook_subscriptions'
const requestBody = {
url: notificationUrl,
events,
organization,
scope: 'organization',
}
const calendlyResponse = await fetch(calendlyApiUrl, {
method: 'POST',
headers: {
Authorization: `Bearer ${apiKey}`,
'Content-Type': 'application/json',
},
body: JSON.stringify(requestBody),
})
if (!calendlyResponse.ok) {
const errorBody = await calendlyResponse.json().catch(() => ({}))
const errorMessage = errorBody.message || errorBody.title || 'Unknown Calendly API error'
logger.error(
`[${requestId}] Failed to create webhook in Calendly for webhook ${webhookData.id}. Status: ${calendlyResponse.status}`,
{ response: errorBody }
)
let userFriendlyMessage = 'Failed to create webhook subscription in Calendly'
if (calendlyResponse.status === 401) {
userFriendlyMessage =
'Calendly authentication failed. Please verify your Personal Access Token is correct.'
} else if (calendlyResponse.status === 403) {
userFriendlyMessage =
'Calendly access denied. Please ensure you have appropriate permissions and a paid Calendly subscription.'
} else if (calendlyResponse.status === 404) {
userFriendlyMessage =
'Calendly organization not found. Please verify the Organization URI is correct.'
} else if (errorMessage && errorMessage !== 'Unknown Calendly API error') {
userFriendlyMessage = `Calendly error: ${errorMessage}`
}
throw new Error(userFriendlyMessage)
}
const responseBody = await calendlyResponse.json()
const webhookUri = responseBody.resource?.uri
if (!webhookUri) {
logger.error(
`[${requestId}] Calendly webhook created but no webhook URI returned for webhook ${webhookData.id}`,
{ response: responseBody }
)
throw new Error('Calendly webhook creation succeeded but no webhook URI was returned')
}
// Extract the webhook ID from the URI (e.g., https://api.calendly.com/webhook_subscriptions/WEBHOOK_ID)
const webhookId = webhookUri.split('/').pop()
if (!webhookId) {
logger.error(`[${requestId}] Could not extract webhook ID from Calendly URI: ${webhookUri}`, {
response: responseBody,
})
throw new Error('Failed to extract webhook ID from Calendly response')
}
logger.info(
`[${requestId}] Successfully created webhook in Calendly for webhook ${webhookData.id}.`,
{
calendlyWebhookUri: webhookUri,
calendlyWebhookId: webhookId,
}
)
return webhookId
} catch (error: any) {
logger.error(
`[${requestId}] Exception during Calendly webhook creation for webhook ${webhookData.id}.`,
{
message: error.message,
stack: error.stack,
}
)
// Re-throw the error so it can be caught by the outer try-catch
throw error
}
}
// Helper function to create the webhook subscription in Webflow
async function createWebflowWebhookSubscription(
request: NextRequest,
userId: string,
webhookData: any,
requestId: string
): Promise<string | undefined> {
try {
const { path, providerConfig } = webhookData
const { siteId, triggerId, collectionId, formId } = providerConfig || {}
if (!siteId) {
logger.warn(`[${requestId}] Missing siteId for Webflow webhook creation.`, {
webhookId: webhookData.id,
})
throw new Error('Site ID is required to create Webflow webhook')
}
if (!triggerId) {
logger.warn(`[${requestId}] Missing triggerId for Webflow webhook creation.`, {
webhookId: webhookData.id,
})
throw new Error('Trigger type is required to create Webflow webhook')
}
const accessToken = await getOAuthToken(userId, 'webflow')
if (!accessToken) {
logger.warn(
`[${requestId}] Could not retrieve Webflow access token for user ${userId}. Cannot create webhook in Webflow.`
)
throw new Error(
'Webflow account connection required. Please connect your Webflow account in the trigger configuration and try again.'
)
}
const notificationUrl = `${getBaseUrl()}/api/webhooks/trigger/${path}`
// Map trigger IDs to Webflow trigger types
const triggerTypeMap: Record<string, string> = {
webflow_collection_item_created: 'collection_item_created',
webflow_collection_item_changed: 'collection_item_changed',
webflow_collection_item_deleted: 'collection_item_deleted',
webflow_form_submission: 'form_submission',
}
const webflowTriggerType = triggerTypeMap[triggerId]
if (!webflowTriggerType) {
logger.warn(`[${requestId}] Invalid triggerId for Webflow: ${triggerId}`, {
webhookId: webhookData.id,
})
throw new Error(`Invalid Webflow trigger type: ${triggerId}`)
}
const webflowApiUrl = `https://api.webflow.com/v2/sites/${siteId}/webhooks`
const requestBody: any = {
triggerType: webflowTriggerType,
url: notificationUrl,
}
// Add filter for collection-based triggers
if (collectionId && webflowTriggerType.startsWith('collection_item_')) {
requestBody.filter = {
resource_type: 'collection',
resource_id: collectionId,
}
}
// Add filter for form submissions
if (formId && webflowTriggerType === 'form_submission') {
requestBody.filter = {
resource_type: 'form',
resource_id: formId,
}
}
const webflowResponse = await fetch(webflowApiUrl, {
method: 'POST',
headers: {
Authorization: `Bearer ${accessToken}`,
'Content-Type': 'application/json',
accept: 'application/json',
},
body: JSON.stringify(requestBody),
})
const responseBody = await webflowResponse.json()
if (!webflowResponse.ok || responseBody.error) {
const errorMessage = responseBody.message || responseBody.error || 'Unknown Webflow API error'
logger.error(
`[${requestId}] Failed to create webhook in Webflow for webhook ${webhookData.id}. Status: ${webflowResponse.status}`,
{ message: errorMessage, response: responseBody }
)
throw new Error(errorMessage)
}
logger.info(
`[${requestId}] Successfully created webhook in Webflow for webhook ${webhookData.id}.`,
{
webflowWebhookId: responseBody.id || responseBody._id,
}
)
return responseBody.id || responseBody._id
} catch (error: any) {
logger.error(
`[${requestId}] Exception during Webflow webhook creation for webhook ${webhookData.id}.`,
{
message: error.message,
stack: error.stack,
}
)
throw error
}
}
// Helper function to create the webhook subscription in Grain
async function createGrainWebhookSubscription(
request: NextRequest,
webhookData: any,
requestId: string
): Promise<{ id: string; eventTypes: string[] } | undefined> {
try {
const { path, providerConfig } = webhookData
const { apiKey, triggerId, includeHighlights, includeParticipants, includeAiSummary } =
providerConfig || {}
if (!apiKey) {
logger.warn(`[${requestId}] Missing apiKey for Grain webhook creation.`, {
webhookId: webhookData.id,
})
throw new Error(
'Grain API Key is required. Please provide your Grain Personal Access Token in the trigger configuration.'
)
}
// Map trigger IDs to Grain API hook_type (only 2 options: recording_added, upload_status)
const hookTypeMap: Record<string, string> = {
grain_webhook: 'recording_added',
grain_recording_created: 'recording_added',
grain_recording_updated: 'recording_added',
grain_highlight_created: 'recording_added',
grain_highlight_updated: 'recording_added',
grain_story_created: 'recording_added',
grain_upload_status: 'upload_status',
}
const eventTypeMap: Record<string, string[]> = {
grain_webhook: [],
grain_recording_created: ['recording_added'],
grain_recording_updated: ['recording_updated'],
grain_highlight_created: ['highlight_created'],
grain_highlight_updated: ['highlight_updated'],
grain_story_created: ['story_created'],
grain_upload_status: ['upload_status'],
}
const hookType = hookTypeMap[triggerId] ?? 'recording_added'
const eventTypes = eventTypeMap[triggerId] ?? []
if (!hookTypeMap[triggerId]) {
logger.warn(
`[${requestId}] Unknown triggerId for Grain: ${triggerId}, defaulting to recording_added`,
{
webhookId: webhookData.id,
}
)
}
logger.info(`[${requestId}] Creating Grain webhook`, {
triggerId,
hookType,
eventTypes,
webhookId: webhookData.id,
})
const notificationUrl = `${getBaseUrl()}/api/webhooks/trigger/${path}`
const grainApiUrl = 'https://api.grain.com/_/public-api/v2/hooks/create'
const requestBody: Record<string, any> = {
hook_url: notificationUrl,
hook_type: hookType,
}
// Build include object based on configuration
const include: Record<string, boolean> = {}
if (includeHighlights) {
include.highlights = true
}
if (includeParticipants) {
include.participants = true
}
if (includeAiSummary) {
include.ai_summary = true
}
if (Object.keys(include).length > 0) {
requestBody.include = include
}
const grainResponse = await fetch(grainApiUrl, {
method: 'POST',
headers: {
Authorization: `Bearer ${apiKey}`,
'Content-Type': 'application/json',
'Public-Api-Version': '2025-10-31',
},
body: JSON.stringify(requestBody),
})
const responseBody = await grainResponse.json()
if (!grainResponse.ok || responseBody.error || responseBody.errors) {
logger.warn('[App] Grain response body:', responseBody)
const errorMessage =
responseBody.errors?.detail ||
responseBody.error?.message ||
responseBody.error ||
responseBody.message ||
'Unknown Grain API error'
logger.error(
`[${requestId}] Failed to create webhook in Grain for webhook ${webhookData.id}. Status: ${grainResponse.status}`,
{ message: errorMessage, response: responseBody }
)
let userFriendlyMessage = 'Failed to create webhook subscription in Grain'
if (grainResponse.status === 401) {
userFriendlyMessage =
'Invalid Grain API Key. Please verify your Personal Access Token is correct.'
} else if (grainResponse.status === 403) {
userFriendlyMessage =
'Access denied. Please ensure your Grain API Key has appropriate permissions.'
} else if (errorMessage && errorMessage !== 'Unknown Grain API error') {
userFriendlyMessage = `Grain error: ${errorMessage}`
}
throw new Error(userFriendlyMessage)
}
logger.info(
`[${requestId}] Successfully created webhook in Grain for webhook ${webhookData.id}.`,
{
grainWebhookId: responseBody.id,
eventTypes,
}
)
return { id: responseBody.id, eventTypes }
} catch (error: any) {
logger.error(
`[${requestId}] Exception during Grain webhook creation for webhook ${webhookData.id}.`,
{
message: error.message,
stack: error.stack,
}
)
throw error
}
}
// Helper function to create the webhook subscription in Lemlist
async function createLemlistWebhookSubscription(
webhookData: any,
requestId: string
): Promise<{ id: string } | undefined> {
try {
const { path, providerConfig } = webhookData
const { apiKey, triggerId, campaignId } = providerConfig || {}
if (!apiKey) {
logger.warn(`[${requestId}] Missing apiKey for Lemlist webhook creation.`, {
webhookId: webhookData.id,
})
throw new Error(
'Lemlist API Key is required. Please provide your Lemlist API Key in the trigger configuration.'
)
}
// Map trigger IDs to Lemlist event types
const eventTypeMap: Record<string, string | undefined> = {
lemlist_email_replied: 'emailsReplied',
lemlist_linkedin_replied: 'linkedinReplied',
lemlist_interested: 'interested',
lemlist_not_interested: 'notInterested',
lemlist_email_opened: 'emailsOpened',
lemlist_email_clicked: 'emailsClicked',
lemlist_email_bounced: 'emailsBounced',
lemlist_email_sent: 'emailsSent',
lemlist_webhook: undefined, // Generic webhook - no type filter
}
const eventType = eventTypeMap[triggerId]
logger.info(`[${requestId}] Creating Lemlist webhook`, {
triggerId,
eventType,
hasCampaignId: !!campaignId,
webhookId: webhookData.id,
})
const notificationUrl = `${getBaseUrl()}/api/webhooks/trigger/${path}`
const lemlistApiUrl = 'https://api.lemlist.com/api/hooks'
// Build request body
const requestBody: Record<string, any> = {
targetUrl: notificationUrl,
}
// Add event type if specified (omit for generic webhook to receive all events)
if (eventType) {
requestBody.type = eventType
}
// Add campaign filter if specified
if (campaignId) {
requestBody.campaignId = campaignId
}
// Lemlist uses Basic Auth with empty username and API key as password
const authString = Buffer.from(`:${apiKey}`).toString('base64')
const lemlistResponse = await fetch(lemlistApiUrl, {
method: 'POST',
headers: {
Authorization: `Basic ${authString}`,
'Content-Type': 'application/json',
},
body: JSON.stringify(requestBody),
})
const responseBody = await lemlistResponse.json()
if (!lemlistResponse.ok || responseBody.error) {
const errorMessage = responseBody.message || responseBody.error || 'Unknown Lemlist API error'
logger.error(
`[${requestId}] Failed to create webhook in Lemlist for webhook ${webhookData.id}. Status: ${lemlistResponse.status}`,
{ message: errorMessage, response: responseBody }
)
let userFriendlyMessage = 'Failed to create webhook subscription in Lemlist'
if (lemlistResponse.status === 401) {
userFriendlyMessage = 'Invalid Lemlist API Key. Please verify your API Key is correct.'
} else if (lemlistResponse.status === 403) {
userFriendlyMessage =
'Access denied. Please ensure your Lemlist API Key has appropriate permissions.'
} else if (errorMessage && errorMessage !== 'Unknown Lemlist API error') {
userFriendlyMessage = `Lemlist error: ${errorMessage}`
}
throw new Error(userFriendlyMessage)
}
logger.info(
`[${requestId}] Successfully created webhook in Lemlist for webhook ${webhookData.id}.`,
{
lemlistWebhookId: responseBody._id,
}
)
return { id: responseBody._id }
} catch (error: any) {
logger.error(
`[${requestId}] Exception during Lemlist webhook creation for webhook ${webhookData.id}.`,
{
message: error.message,
stack: error.stack,
}
)
throw error
}
}

View File

@@ -3,92 +3,15 @@
*
* @vitest-environment node
*/
import { createMockRequest, loggerMock } from '@sim/testing'
import { loggerMock } from '@sim/testing'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
/** Mock execution dependencies for webhook tests */
function mockExecutionDependencies() {
vi.mock('@/lib/core/security/encryption', () => ({
decryptSecret: vi.fn().mockResolvedValue({ decrypted: 'decrypted-value' }),
}))
vi.mock('@/lib/logs/execution/trace-spans/trace-spans', () => ({
buildTraceSpans: vi.fn().mockReturnValue({ traceSpans: [], totalDuration: 100 }),
}))
vi.mock('@/lib/workflows/utils', () => ({
updateWorkflowRunCounts: vi.fn().mockResolvedValue(undefined),
}))
vi.mock('@/serializer', () => ({
Serializer: vi.fn().mockImplementation(() => ({
serializeWorkflow: vi.fn().mockReturnValue({
version: '1.0',
blocks: [
{
id: 'starter-id',
metadata: { id: 'starter', name: 'Start' },
config: {},
inputs: {},
outputs: {},
position: { x: 100, y: 100 },
enabled: true,
},
{
id: 'agent-id',
metadata: { id: 'agent', name: 'Agent 1' },
config: {},
inputs: {},
outputs: {},
position: { x: 634, y: -167 },
enabled: true,
},
],
edges: [
{
id: 'edge-1',
source: 'starter-id',
target: 'agent-id',
sourceHandle: 'source',
targetHandle: 'target',
},
],
loops: {},
parallels: {},
}),
})),
}))
}
/** Mock Trigger.dev SDK */
function mockTriggerDevSdk() {
vi.mock('@trigger.dev/sdk', () => ({
tasks: { trigger: vi.fn().mockResolvedValue({ id: 'mock-task-id' }) },
task: vi.fn().mockReturnValue({}),
}))
}
/**
* Test data store - isolated per test via beforeEach reset
* This replaces the global mutable state pattern with local test data
*/
const testData = {
webhooks: [] as Array<{
id: string
provider: string
path: string
isActive: boolean
providerConfig?: Record<string, unknown>
workflowId: string
rateLimitCount?: number
rateLimitPeriod?: number
}>,
workflows: [] as Array<{
id: string
userId: string
workspaceId?: string
}>,
}
import {
createMockRequest,
globalMockData,
mockExecutionDependencies,
mockTriggerDevSdk,
} from '@/app/api/__test-utils__/utils'
const {
generateRequestHashMock,
@@ -236,8 +159,8 @@ vi.mock('@/lib/workflows/persistence/utils', () => ({
vi.mock('@/lib/webhooks/processor', () => ({
findAllWebhooksForPath: vi.fn().mockImplementation(async (options: { path: string }) => {
// Filter webhooks by path from testData
const matchingWebhooks = testData.webhooks.filter(
// Filter webhooks by path from globalMockData
const matchingWebhooks = globalMockData.webhooks.filter(
(wh) => wh.path === options.path && wh.isActive
)
@@ -247,7 +170,7 @@ vi.mock('@/lib/webhooks/processor', () => ({
// Return array of {webhook, workflow} objects
return matchingWebhooks.map((wh) => {
const matchingWorkflow = testData.workflows.find((w) => w.id === wh.workflowId) || {
const matchingWorkflow = globalMockData.workflows.find((w) => w.id === wh.workflowId) || {
id: wh.workflowId || 'test-workflow-id',
userId: 'test-user-id',
workspaceId: 'test-workspace-id',
@@ -360,15 +283,14 @@ describe('Webhook Trigger API Route', () => {
beforeEach(() => {
vi.clearAllMocks()
// Reset test data arrays
testData.webhooks.length = 0
testData.workflows.length = 0
globalMockData.webhooks.length = 0
globalMockData.workflows.length = 0
globalMockData.schedules.length = 0
mockExecutionDependencies()
mockTriggerDevSdk()
// Set up default workflow for tests
testData.workflows.push({
globalMockData.workflows.push({
id: 'test-workflow-id',
userId: 'test-user-id',
workspaceId: 'test-workspace-id',
@@ -404,7 +326,7 @@ describe('Webhook Trigger API Route', () => {
describe('Generic Webhook Authentication', () => {
it('should process generic webhook without authentication', async () => {
testData.webhooks.push({
globalMockData.webhooks.push({
id: 'generic-webhook-id',
provider: 'generic',
path: 'test-path',
@@ -414,7 +336,7 @@ describe('Webhook Trigger API Route', () => {
rateLimitCount: 100,
rateLimitPeriod: 60,
})
testData.workflows.push({
globalMockData.workflows.push({
id: 'test-workflow-id',
userId: 'test-user-id',
workspaceId: 'test-workspace-id',
@@ -432,7 +354,7 @@ describe('Webhook Trigger API Route', () => {
})
it('should authenticate with Bearer token when no custom header is configured', async () => {
testData.webhooks.push({
globalMockData.webhooks.push({
id: 'generic-webhook-id',
provider: 'generic',
path: 'test-path',
@@ -440,7 +362,7 @@ describe('Webhook Trigger API Route', () => {
providerConfig: { requireAuth: true, token: 'test-token-123' },
workflowId: 'test-workflow-id',
})
testData.workflows.push({
globalMockData.workflows.push({
id: 'test-workflow-id',
userId: 'test-user-id',
workspaceId: 'test-workspace-id',
@@ -459,7 +381,7 @@ describe('Webhook Trigger API Route', () => {
})
it('should authenticate with custom header when configured', async () => {
testData.webhooks.push({
globalMockData.webhooks.push({
id: 'generic-webhook-id',
provider: 'generic',
path: 'test-path',
@@ -471,7 +393,7 @@ describe('Webhook Trigger API Route', () => {
},
workflowId: 'test-workflow-id',
})
testData.workflows.push({
globalMockData.workflows.push({
id: 'test-workflow-id',
userId: 'test-user-id',
workspaceId: 'test-workspace-id',
@@ -490,7 +412,7 @@ describe('Webhook Trigger API Route', () => {
})
it('should handle case insensitive Bearer token authentication', async () => {
testData.webhooks.push({
globalMockData.webhooks.push({
id: 'generic-webhook-id',
provider: 'generic',
path: 'test-path',
@@ -498,7 +420,7 @@ describe('Webhook Trigger API Route', () => {
providerConfig: { requireAuth: true, token: 'case-test-token' },
workflowId: 'test-workflow-id',
})
testData.workflows.push({
globalMockData.workflows.push({
id: 'test-workflow-id',
userId: 'test-user-id',
workspaceId: 'test-workspace-id',
@@ -532,7 +454,7 @@ describe('Webhook Trigger API Route', () => {
})
it('should handle case insensitive custom header authentication', async () => {
testData.webhooks.push({
globalMockData.webhooks.push({
id: 'generic-webhook-id',
provider: 'generic',
path: 'test-path',
@@ -544,7 +466,7 @@ describe('Webhook Trigger API Route', () => {
},
workflowId: 'test-workflow-id',
})
testData.workflows.push({
globalMockData.workflows.push({
id: 'test-workflow-id',
userId: 'test-user-id',
workspaceId: 'test-workspace-id',
@@ -573,7 +495,7 @@ describe('Webhook Trigger API Route', () => {
})
it('should reject wrong Bearer token', async () => {
testData.webhooks.push({
globalMockData.webhooks.push({
id: 'generic-webhook-id',
provider: 'generic',
path: 'test-path',
@@ -597,7 +519,7 @@ describe('Webhook Trigger API Route', () => {
})
it('should reject wrong custom header token', async () => {
testData.webhooks.push({
globalMockData.webhooks.push({
id: 'generic-webhook-id',
provider: 'generic',
path: 'test-path',
@@ -625,7 +547,7 @@ describe('Webhook Trigger API Route', () => {
})
it('should reject missing authentication when required', async () => {
testData.webhooks.push({
globalMockData.webhooks.push({
id: 'generic-webhook-id',
provider: 'generic',
path: 'test-path',
@@ -645,7 +567,7 @@ describe('Webhook Trigger API Route', () => {
})
it('should reject Bearer token when custom header is configured', async () => {
testData.webhooks.push({
globalMockData.webhooks.push({
id: 'generic-webhook-id',
provider: 'generic',
path: 'test-path',
@@ -673,7 +595,7 @@ describe('Webhook Trigger API Route', () => {
})
it('should reject wrong custom header name', async () => {
testData.webhooks.push({
globalMockData.webhooks.push({
id: 'generic-webhook-id',
provider: 'generic',
path: 'test-path',
@@ -701,7 +623,7 @@ describe('Webhook Trigger API Route', () => {
})
it('should reject when auth is required but no token is configured', async () => {
testData.webhooks.push({
globalMockData.webhooks.push({
id: 'generic-webhook-id',
provider: 'generic',
path: 'test-path',
@@ -709,7 +631,7 @@ describe('Webhook Trigger API Route', () => {
providerConfig: { requireAuth: true },
workflowId: 'test-workflow-id',
})
testData.workflows.push({ id: 'test-workflow-id', userId: 'test-user-id' })
globalMockData.workflows.push({ id: 'test-workflow-id', userId: 'test-user-id' })
const headers = {
'Content-Type': 'application/json',

View File

@@ -22,6 +22,13 @@ export async function GET(_request: Request, { params }: { params: Promise<{ id:
.select({
id: chat.id,
identifier: chat.identifier,
title: chat.title,
description: chat.description,
customizations: chat.customizations,
authType: chat.authType,
allowedEmails: chat.allowedEmails,
outputConfigs: chat.outputConfigs,
password: chat.password,
isActive: chat.isActive,
})
.from(chat)
@@ -34,6 +41,13 @@ export async function GET(_request: Request, { params }: { params: Promise<{ id:
? {
id: deploymentResults[0].id,
identifier: deploymentResults[0].identifier,
title: deploymentResults[0].title,
description: deploymentResults[0].description,
customizations: deploymentResults[0].customizations,
authType: deploymentResults[0].authType,
allowedEmails: deploymentResults[0].allowedEmails,
outputConfigs: deploymentResults[0].outputConfigs,
hasPassword: Boolean(deploymentResults[0].password),
}
: null

View File

@@ -4,7 +4,6 @@ import { and, desc, eq } from 'drizzle-orm'
import type { NextRequest } from 'next/server'
import { generateRequestId } from '@/lib/core/utils/request'
import { removeMcpToolsForWorkflow, syncMcpToolsForWorkflow } from '@/lib/mcp/workflow-mcp-sync'
import { cleanupWebhooksForWorkflow, saveTriggerWebhooksForDeploy } from '@/lib/webhooks/deploy'
import {
deployWorkflow,
loadWorkflowFromNormalizedTables,
@@ -131,22 +130,6 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
return createErrorResponse(`Invalid schedule configuration: ${scheduleValidation.error}`, 400)
}
const triggerSaveResult = await saveTriggerWebhooksForDeploy({
request,
workflowId: id,
workflow: workflowData,
userId: actorUserId,
blocks: normalizedData.blocks,
requestId,
})
if (!triggerSaveResult.success) {
return createErrorResponse(
triggerSaveResult.error?.message || 'Failed to save trigger configuration',
triggerSaveResult.error?.status || 500
)
}
const deployResult = await deployWorkflow({
workflowId: id,
deployedBy: actorUserId,
@@ -219,18 +202,11 @@ export async function DELETE(
try {
logger.debug(`[${requestId}] Undeploying workflow: ${id}`)
const { error, workflow: workflowData } = await validateWorkflowPermissions(
id,
requestId,
'admin'
)
const { error } = await validateWorkflowPermissions(id, requestId, 'admin')
if (error) {
return createErrorResponse(error.message, error.status)
}
// Clean up external webhook subscriptions before undeploying
await cleanupWebhooksForWorkflow(id, workflowData as Record<string, unknown>, requestId)
const result = await undeployWorkflow({ workflowId: id })
if (!result.success) {
return createErrorResponse(result.error || 'Failed to undeploy workflow', 500)

View File

@@ -1,5 +1,5 @@
import { db } from '@sim/db'
import { workflow } from '@sim/db/schema'
import { webhook, workflow } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
@@ -13,6 +13,7 @@ import { sanitizeAgentToolsInBlocks } from '@/lib/workflows/sanitization/validat
import { getWorkflowAccessContext } from '@/lib/workflows/utils'
import type { BlockState } from '@/stores/workflows/workflow/types'
import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/workflow/utils'
import { getTrigger } from '@/triggers'
const logger = createLogger('WorkflowStateAPI')
@@ -202,6 +203,8 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
)
}
await syncWorkflowWebhooks(workflowId, workflowState.blocks)
// Extract and persist custom tools to database
try {
const workspaceId = workflowData.workspaceId
@@ -287,3 +290,213 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}
function getSubBlockValue<T = unknown>(block: BlockState, subBlockId: string): T | undefined {
const value = block.subBlocks?.[subBlockId]?.value
if (value === undefined || value === null) {
return undefined
}
return value as T
}
async function syncWorkflowWebhooks(
workflowId: string,
blocks: Record<string, any>
): Promise<void> {
await syncBlockResources(workflowId, blocks, {
resourceName: 'webhook',
subBlockId: 'webhookId',
buildMetadata: buildWebhookMetadata,
applyMetadata: upsertWebhookRecord,
})
}
interface WebhookMetadata {
triggerPath: string
provider: string | null
providerConfig: Record<string, any>
}
const CREDENTIAL_SET_PREFIX = 'credentialSet:'
function buildWebhookMetadata(block: BlockState): WebhookMetadata | null {
const triggerId =
getSubBlockValue<string>(block, 'triggerId') ||
getSubBlockValue<string>(block, 'selectedTriggerId')
const triggerConfig = getSubBlockValue<Record<string, any>>(block, 'triggerConfig') || {}
const triggerCredentials = getSubBlockValue<string>(block, 'triggerCredentials')
const triggerPath = getSubBlockValue<string>(block, 'triggerPath') || block.id
const triggerDef = triggerId ? getTrigger(triggerId) : undefined
const provider = triggerDef?.provider || null
// Handle credential sets vs individual credentials
const isCredentialSet = triggerCredentials?.startsWith(CREDENTIAL_SET_PREFIX)
const credentialSetId = isCredentialSet
? triggerCredentials!.slice(CREDENTIAL_SET_PREFIX.length)
: undefined
const credentialId = isCredentialSet ? undefined : triggerCredentials
const providerConfig = {
...(typeof triggerConfig === 'object' ? triggerConfig : {}),
...(credentialId ? { credentialId } : {}),
...(credentialSetId ? { credentialSetId } : {}),
...(triggerId ? { triggerId } : {}),
}
return {
triggerPath,
provider,
providerConfig,
}
}
async function upsertWebhookRecord(
workflowId: string,
block: BlockState,
webhookId: string,
metadata: WebhookMetadata
): Promise<void> {
const providerConfig = metadata.providerConfig as Record<string, unknown>
const credentialSetId = providerConfig?.credentialSetId as string | undefined
// For credential sets, delegate to the sync function which handles fan-out
if (credentialSetId && metadata.provider) {
const { syncWebhooksForCredentialSet } = await import('@/lib/webhooks/utils.server')
const { getProviderIdFromServiceId } = await import('@/lib/oauth')
const oauthProviderId = getProviderIdFromServiceId(metadata.provider)
const requestId = crypto.randomUUID().slice(0, 8)
// Extract base config (without credential-specific fields)
const {
credentialId: _cId,
credentialSetId: _csId,
userId: _uId,
...baseConfig
} = providerConfig
try {
await syncWebhooksForCredentialSet({
workflowId,
blockId: block.id,
provider: metadata.provider,
basePath: metadata.triggerPath,
credentialSetId,
oauthProviderId,
providerConfig: baseConfig as Record<string, any>,
requestId,
})
logger.info('Synced credential set webhooks during workflow save', {
workflowId,
blockId: block.id,
credentialSetId,
})
} catch (error) {
logger.error('Failed to sync credential set webhooks during workflow save', {
workflowId,
blockId: block.id,
credentialSetId,
error,
})
}
return
}
// For individual credentials, use the existing single webhook logic
const [existing] = await db.select().from(webhook).where(eq(webhook.id, webhookId)).limit(1)
if (existing) {
const needsUpdate =
existing.blockId !== block.id ||
existing.workflowId !== workflowId ||
existing.path !== metadata.triggerPath
if (needsUpdate) {
await db
.update(webhook)
.set({
workflowId,
blockId: block.id,
path: metadata.triggerPath,
provider: metadata.provider || existing.provider,
providerConfig: Object.keys(metadata.providerConfig).length
? metadata.providerConfig
: existing.providerConfig,
isActive: true,
updatedAt: new Date(),
})
.where(eq(webhook.id, webhookId))
}
return
}
await db.insert(webhook).values({
id: webhookId,
workflowId,
blockId: block.id,
path: metadata.triggerPath,
provider: metadata.provider,
providerConfig: metadata.providerConfig,
credentialSetId: null,
isActive: true,
createdAt: new Date(),
updatedAt: new Date(),
})
logger.info('Recreated missing webhook after workflow save', {
workflowId,
blockId: block.id,
webhookId,
})
}
interface BlockResourceSyncConfig<T> {
resourceName: string
subBlockId: string
buildMetadata: (block: BlockState, resourceId: string) => T | null
applyMetadata: (
workflowId: string,
block: BlockState,
resourceId: string,
metadata: T
) => Promise<void>
}
async function syncBlockResources<T>(
workflowId: string,
blocks: Record<string, any>,
config: BlockResourceSyncConfig<T>
): Promise<void> {
const blockEntries = Object.values(blocks || {}).filter(Boolean) as BlockState[]
if (blockEntries.length === 0) return
for (const block of blockEntries) {
const resourceId = getSubBlockValue<string>(block, config.subBlockId)
if (!resourceId) continue
const metadata = config.buildMetadata(block, resourceId)
if (!metadata) {
logger.warn(`Skipping ${config.resourceName} sync due to invalid configuration`, {
workflowId,
blockId: block.id,
resourceId,
resourceName: config.resourceName,
})
continue
}
try {
await config.applyMetadata(workflowId, block, resourceId, metadata)
} catch (error) {
logger.error(`Failed to sync ${config.resourceName}`, {
workflowId,
blockId: block.id,
resourceId,
resourceName: config.resourceName,
error,
})
}
}
}

View File

@@ -4,29 +4,29 @@
*
* @vitest-environment node
*/
import {
databaseMock,
defaultMockUser,
mockAuth,
mockCryptoUuid,
setupCommonApiMocks,
} from '@sim/testing'
import { NextRequest } from 'next/server'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import {
createMockDatabase,
mockAuth,
mockCryptoUuid,
mockUser,
setupCommonApiMocks,
} from '@/app/api/__test-utils__/utils'
describe('Workflow Variables API Route', () => {
let authMocks: ReturnType<typeof mockAuth>
let databaseMocks: ReturnType<typeof createMockDatabase>
const mockGetWorkflowAccessContext = vi.fn()
beforeEach(() => {
vi.resetModules()
setupCommonApiMocks()
mockCryptoUuid('mock-request-id-12345678')
authMocks = mockAuth(defaultMockUser)
authMocks = mockAuth(mockUser)
mockGetWorkflowAccessContext.mockReset()
vi.doMock('@sim/db', () => databaseMock)
vi.doMock('@/lib/workflows/utils', () => ({
getWorkflowAccessContext: mockGetWorkflowAccessContext,
}))
@@ -203,6 +203,10 @@ describe('Workflow Variables API Route', () => {
isWorkspaceOwner: false,
})
databaseMocks = createMockDatabase({
update: { results: [{}] },
})
const variables = {
'var-1': {
id: 'var-1',

View File

@@ -1,5 +1,5 @@
import { createMockRequest, mockAuth, mockConsoleLogger } from '@sim/testing'
import { beforeEach, describe, expect, it, vi } from 'vitest'
import { createMockRequest, mockAuth, mockConsoleLogger } from '@/app/api/__test-utils__/utils'
describe('Workspace Invitations API Route', () => {
const mockWorkspace = { id: 'workspace-1', name: 'Test Workspace' }

View File

@@ -12,7 +12,6 @@ import { HydrationErrorHandler } from '@/app/_shell/hydration-error-handler'
import { QueryProvider } from '@/app/_shell/providers/query-provider'
import { SessionProvider } from '@/app/_shell/providers/session-provider'
import { ThemeProvider } from '@/app/_shell/providers/theme-provider'
import { TooltipProvider } from '@/app/_shell/providers/tooltip-provider'
import { season } from '@/app/_styles/fonts/season/season'
export const viewport: Viewport = {
@@ -209,9 +208,7 @@ export default function RootLayout({ children }: { children: React.ReactNode })
<ThemeProvider>
<QueryProvider>
<SessionProvider>
<TooltipProvider>
<BrandedLayout>{children}</BrandedLayout>
</TooltipProvider>
<BrandedLayout>{children}</BrandedLayout>
</SessionProvider>
</QueryProvider>
</ThemeProvider>

View File

@@ -21,15 +21,12 @@ import {
Combobox,
Connections,
Copy,
Cursor,
DatePicker,
DocumentAttachment,
Duplicate,
Expand,
Eye,
FolderCode,
FolderPlus,
Hand,
HexSimple,
Input,
Key as KeyIcon,
@@ -994,14 +991,11 @@ export default function PlaygroundPage() {
{ Icon: ChevronDown, name: 'ChevronDown' },
{ Icon: Connections, name: 'Connections' },
{ Icon: Copy, name: 'Copy' },
{ Icon: Cursor, name: 'Cursor' },
{ Icon: DocumentAttachment, name: 'DocumentAttachment' },
{ Icon: Duplicate, name: 'Duplicate' },
{ Icon: Expand, name: 'Expand' },
{ Icon: Eye, name: 'Eye' },
{ Icon: FolderCode, name: 'FolderCode' },
{ Icon: FolderPlus, name: 'FolderPlus' },
{ Icon: Hand, name: 'Hand' },
{ Icon: HexSimple, name: 'HexSimple' },
{ Icon: KeyIcon, name: 'Key' },
{ Icon: Layout, name: 'Layout' },

View File

@@ -1,12 +1,15 @@
'use client'
import { Tooltip } from '@/components/emcn'
import { season } from '@/app/_styles/fonts/season/season'
export default function TemplatesLayoutClient({ children }: { children: React.ReactNode }) {
return (
<div className={`${season.variable} relative flex min-h-screen flex-col font-season`}>
<div className='-z-50 pointer-events-none fixed inset-0 bg-white' />
{children}
</div>
<Tooltip.Provider delayDuration={600} skipDelayDuration={0}>
<div className={`${season.variable} relative flex min-h-screen flex-col font-season`}>
<div className='-z-50 pointer-events-none fixed inset-0 bg-white' />
{children}
</div>
</Tooltip.Provider>
)
}

View File

@@ -1,5 +1,6 @@
'use client'
import { Tooltip } from '@/components/emcn'
import { GlobalCommandsProvider } from '@/app/workspace/[workspaceId]/providers/global-commands-provider'
import { ProviderModelsLoader } from '@/app/workspace/[workspaceId]/providers/provider-models-loader'
import { SettingsLoader } from '@/app/workspace/[workspaceId]/providers/settings-loader'
@@ -12,14 +13,16 @@ export default function WorkspaceLayout({ children }: { children: React.ReactNod
<SettingsLoader />
<ProviderModelsLoader />
<GlobalCommandsProvider>
<div className='flex h-screen w-full bg-[var(--bg)]'>
<WorkspacePermissionsProvider>
<div className='shrink-0' suppressHydrationWarning>
<Sidebar />
</div>
{children}
</WorkspacePermissionsProvider>
</div>
<Tooltip.Provider delayDuration={600} skipDelayDuration={0}>
<div className='flex h-screen w-full bg-[var(--bg)]'>
<WorkspacePermissionsProvider>
<div className='shrink-0' suppressHydrationWarning>
<Sidebar />
</div>
{children}
</WorkspacePermissionsProvider>
</div>
</Tooltip.Provider>
</GlobalCommandsProvider>
</>
)

View File

@@ -19,7 +19,6 @@ export type CommandId =
| 'clear-terminal-console'
| 'focus-toolbar-search'
| 'clear-notifications'
| 'fit-to-view'
/**
* Static metadata for a global command.
@@ -105,11 +104,6 @@ export const COMMAND_DEFINITIONS: Record<CommandId, CommandDefinition> = {
shortcut: 'Mod+E',
allowInEditable: false,
},
'fit-to-view': {
id: 'fit-to-view',
shortcut: 'Mod+Shift+F',
allowInEditable: false,
},
}
/**

View File

@@ -1,2 +0,0 @@
export type { BlockInfo, BlockMenuProps } from './block-menu'
export { BlockMenu } from './block-menu'

View File

@@ -1,2 +0,0 @@
export type { CanvasMenuProps } from './canvas-menu'
export { CanvasMenu } from './canvas-menu'

View File

@@ -20,7 +20,6 @@ import {
PopoverItem,
PopoverScrollArea,
PopoverTrigger,
Tooltip,
Trash,
} from '@/components/emcn'
import { useSession } from '@/lib/auth/auth-client'
@@ -30,7 +29,7 @@ import {
extractPathFromOutputId,
parseOutputContentSafely,
} from '@/lib/core/utils/response-format'
import { normalizeInputFormatValue } from '@/lib/workflows/input-format'
import { normalizeInputFormatValue } from '@/lib/workflows/input-format-utils'
import { StartBlockPath, TriggerUtils } from '@/lib/workflows/triggers/triggers'
import { START_BLOCK_RESERVED_FIELDS } from '@/lib/workflows/types'
import {
@@ -94,9 +93,6 @@ interface ProcessedAttachment {
dataUrl: string
}
/** Timeout for FileReader operations in milliseconds */
const FILE_READ_TIMEOUT_MS = 60000
/**
* Reads files and converts them to data URLs for image display
* @param chatFiles - Array of chat files to process
@@ -110,37 +106,8 @@ const processFileAttachments = async (chatFiles: ChatFile[]): Promise<ProcessedA
try {
dataUrl = await new Promise<string>((resolve, reject) => {
const reader = new FileReader()
let settled = false
const timeoutId = setTimeout(() => {
if (!settled) {
settled = true
reader.abort()
reject(new Error(`File read timed out after ${FILE_READ_TIMEOUT_MS}ms`))
}
}, FILE_READ_TIMEOUT_MS)
reader.onload = () => {
if (!settled) {
settled = true
clearTimeout(timeoutId)
resolve(reader.result as string)
}
}
reader.onerror = () => {
if (!settled) {
settled = true
clearTimeout(timeoutId)
reject(reader.error)
}
}
reader.onabort = () => {
if (!settled) {
settled = true
clearTimeout(timeoutId)
reject(new Error('File read aborted'))
}
}
reader.onload = () => resolve(reader.result as string)
reader.onerror = reject
reader.readAsDataURL(file.file)
})
} catch (error) {
@@ -234,6 +201,7 @@ export function Chat() {
const triggerWorkflowUpdate = useWorkflowStore((state) => state.triggerUpdate)
const setSubBlockValue = useSubBlockStore((state) => state.setValue)
// Chat state (UI and messages from unified store)
const {
isChatOpen,
chatPosition,
@@ -261,16 +229,19 @@ export function Chat() {
const { data: session } = useSession()
const { addToQueue } = useOperationQueue()
// Local state
const [chatMessage, setChatMessage] = useState('')
const [promptHistory, setPromptHistory] = useState<string[]>([])
const [historyIndex, setHistoryIndex] = useState(-1)
const [moreMenuOpen, setMoreMenuOpen] = useState(false)
// Refs
const inputRef = useRef<HTMLInputElement>(null)
const timeoutRef = useRef<NodeJS.Timeout | null>(null)
const streamReaderRef = useRef<ReadableStreamDefaultReader<Uint8Array> | null>(null)
const preventZoomRef = usePreventZoom()
// File upload hook
const {
chatFiles,
uploadErrors,
@@ -285,38 +256,6 @@ export function Chat() {
handleDrop,
} = useChatFileUpload()
const filePreviewUrls = useRef<Map<string, string>>(new Map())
const getFilePreviewUrl = useCallback((file: ChatFile): string | null => {
if (!file.type.startsWith('image/')) return null
const existing = filePreviewUrls.current.get(file.id)
if (existing) return existing
const url = URL.createObjectURL(file.file)
filePreviewUrls.current.set(file.id, url)
return url
}, [])
useEffect(() => {
const currentFileIds = new Set(chatFiles.map((f) => f.id))
const urlMap = filePreviewUrls.current
for (const [fileId, url] of urlMap.entries()) {
if (!currentFileIds.has(fileId)) {
URL.revokeObjectURL(url)
urlMap.delete(fileId)
}
}
return () => {
for (const url of urlMap.values()) {
URL.revokeObjectURL(url)
}
urlMap.clear()
}
}, [chatFiles])
/**
* Resolves the unified start block for chat execution, if available.
*/
@@ -382,11 +321,13 @@ export function Chat() {
const shouldShowConfigureStartInputsButton =
Boolean(startBlockId) && missingStartReservedFields.length > 0
// Get actual position (default if not set)
const actualPosition = useMemo(
() => getChatPosition(chatPosition, chatWidth, chatHeight),
[chatPosition, chatWidth, chatHeight]
)
// Drag hook
const { handleMouseDown } = useFloatDrag({
position: actualPosition,
width: chatWidth,
@@ -394,6 +335,7 @@ export function Chat() {
onPositionChange: setChatPosition,
})
// Boundary sync hook - keeps chat within bounds when layout changes
useFloatBoundarySync({
isOpen: isChatOpen,
position: actualPosition,
@@ -402,6 +344,7 @@ export function Chat() {
onPositionChange: setChatPosition,
})
// Resize hook - enables resizing from all edges and corners
const {
cursor: resizeCursor,
handleMouseMove: handleResizeMouseMove,
@@ -415,11 +358,13 @@ export function Chat() {
onDimensionsChange: setChatDimensions,
})
// Get output entries from console
const outputEntries = useMemo(() => {
if (!activeWorkflowId) return []
return entries.filter((entry) => entry.workflowId === activeWorkflowId && entry.output)
}, [entries, activeWorkflowId])
// Get filtered messages for current workflow
const workflowMessages = useMemo(() => {
if (!activeWorkflowId) return []
return messages
@@ -427,11 +372,14 @@ export function Chat() {
.sort((a, b) => new Date(a.timestamp).getTime() - new Date(b.timestamp).getTime())
}, [messages, activeWorkflowId])
// Check if any message is currently streaming
const isStreaming = useMemo(() => {
// Match copilot semantics: only treat as streaming if the LAST message is streaming
const lastMessage = workflowMessages[workflowMessages.length - 1]
return Boolean(lastMessage?.isStreaming)
}, [workflowMessages])
// Map chat messages to copilot message format (type -> role) for scroll hook
const messagesForScrollHook = useMemo(() => {
return workflowMessages.map((msg) => ({
...msg,
@@ -439,6 +387,8 @@ export function Chat() {
}))
}, [workflowMessages])
// Scroll management hook - reuse copilot's implementation
// Use immediate scroll behavior to keep the view pinned to the bottom during streaming
const { scrollAreaRef, scrollToBottom } = useScrollManagement(
messagesForScrollHook,
isStreaming,
@@ -447,6 +397,7 @@ export function Chat() {
}
)
// Memoize user messages for performance
const userMessages = useMemo(() => {
return workflowMessages
.filter((msg) => msg.type === 'user')
@@ -454,6 +405,7 @@ export function Chat() {
.filter((content): content is string => typeof content === 'string')
}, [workflowMessages])
// Update prompt history when workflow changes
useEffect(() => {
if (!activeWorkflowId) {
setPromptHistory([])
@@ -466,7 +418,7 @@ export function Chat() {
}, [activeWorkflowId, userMessages])
/**
* Auto-scroll to bottom when messages load and chat is open
* Auto-scroll to bottom when messages load
*/
useEffect(() => {
if (workflowMessages.length > 0 && isChatOpen) {
@@ -474,6 +426,7 @@ export function Chat() {
}
}, [workflowMessages.length, scrollToBottom, isChatOpen])
// Get selected workflow outputs (deduplicated)
const selectedOutputs = useMemo(() => {
if (!activeWorkflowId) return []
const selected = selectedWorkflowOutputs[activeWorkflowId]
@@ -494,6 +447,7 @@ export function Chat() {
}, delay)
}, [])
// Cleanup on unmount
useEffect(() => {
return () => {
timeoutRef.current && clearTimeout(timeoutRef.current)
@@ -501,6 +455,7 @@ export function Chat() {
}
}, [])
// React to execution cancellation from run button
useEffect(() => {
if (!isExecuting && isStreaming) {
const lastMessage = workflowMessages[workflowMessages.length - 1]
@@ -544,6 +499,7 @@ export function Chat() {
const chunk = decoder.decode(value, { stream: true })
buffer += chunk
// Process only complete SSE messages; keep any partial trailing data in buffer
const separatorIndex = buffer.lastIndexOf('\n\n')
if (separatorIndex === -1) {
continue
@@ -593,6 +549,7 @@ export function Chat() {
}
finalizeMessageStream(responseMessageId)
} finally {
// Only clear ref if it's still our reader (prevents clobbering a new stream)
if (streamReaderRef.current === reader) {
streamReaderRef.current = null
}
@@ -912,7 +869,7 @@ export function Chat() {
<div className='flex flex-shrink-0 items-center gap-[8px]'>
{/* More menu with actions */}
<Popover variant='default' size='sm' open={moreMenuOpen} onOpenChange={setMoreMenuOpen}>
<Popover variant='default' open={moreMenuOpen} onOpenChange={setMoreMenuOpen}>
<PopoverTrigger asChild>
<Button
variant='ghost'
@@ -1021,7 +978,8 @@ export function Chat() {
{chatFiles.length > 0 && (
<div className='mt-[4px] flex gap-[6px] overflow-x-auto [-ms-overflow-style:none] [scrollbar-width:none] [&::-webkit-scrollbar]:hidden'>
{chatFiles.map((file) => {
const previewUrl = getFilePreviewUrl(file)
const isImage = file.type.startsWith('image/')
const previewUrl = isImage ? URL.createObjectURL(file.file) : null
return (
<div
@@ -1038,6 +996,7 @@ export function Chat() {
src={previewUrl}
alt={file.name}
className='h-full w-full object-cover'
onLoad={() => URL.revokeObjectURL(previewUrl)}
/>
) : (
<div className='min-w-0 flex-1'>
@@ -1083,21 +1042,17 @@ export function Chat() {
{/* Buttons positioned absolutely on the right */}
<div className='-translate-y-1/2 absolute top-1/2 right-[2px] flex items-center gap-[10px]'>
<Tooltip.Root>
<Tooltip.Trigger asChild>
<Badge
onClick={() => document.getElementById('floating-chat-file-input')?.click()}
className={cn(
'!bg-transparent !border-0 cursor-pointer rounded-[6px] p-[0px]',
(!activeWorkflowId || isExecuting || chatFiles.length >= 15) &&
'cursor-not-allowed opacity-50'
)}
>
<Paperclip className='!h-3.5 !w-3.5' />
</Badge>
</Tooltip.Trigger>
<Tooltip.Content>Attach file</Tooltip.Content>
</Tooltip.Root>
<Badge
onClick={() => document.getElementById('floating-chat-file-input')?.click()}
title='Attach file'
className={cn(
'!bg-transparent !border-0 cursor-pointer rounded-[6px] p-[0px]',
(!activeWorkflowId || isExecuting || chatFiles.length >= 15) &&
'cursor-not-allowed opacity-50'
)}
>
<Paperclip className='!h-3.5 !w-3.5' />
</Badge>
{isStreaming ? (
<Button

View File

@@ -113,17 +113,16 @@ export function ChatMessage({ message }: ChatMessageProps) {
{message.attachments && message.attachments.length > 0 && (
<div className='mb-2 flex flex-wrap gap-[6px]'>
{message.attachments.map((attachment) => {
const isImage = attachment.type.startsWith('image/')
const hasValidDataUrl =
attachment.dataUrl?.trim() && attachment.dataUrl.startsWith('data:')
// Only treat as displayable image if we have both image type AND valid data URL
const canDisplayAsImage = attachment.type.startsWith('image/') && hasValidDataUrl
return (
<div
key={attachment.id}
className={`group relative flex-shrink-0 overflow-hidden rounded-[6px] bg-[var(--surface-2)] ${
hasValidDataUrl ? 'cursor-pointer' : ''
} ${canDisplayAsImage ? 'h-[40px] w-[40px]' : 'flex min-w-[80px] max-w-[120px] items-center justify-center px-[8px] py-[2px]'}`}
} ${isImage ? 'h-[40px] w-[40px]' : 'flex min-w-[80px] max-w-[120px] items-center justify-center px-[8px] py-[2px]'}`}
onClick={(e) => {
if (hasValidDataUrl) {
e.preventDefault()
@@ -132,7 +131,7 @@ export function ChatMessage({ message }: ChatMessageProps) {
}
}}
>
{canDisplayAsImage ? (
{isImage && hasValidDataUrl ? (
<img
src={attachment.dataUrl}
alt={attachment.name}

View File

@@ -331,16 +331,13 @@ export function OutputSelect({
return (
<Combobox
size='sm'
className='!w-fit !py-[2px] min-w-[100px] rounded-[6px] px-[9px]'
className='!w-fit !py-[2px] [&>svg]:!ml-[4px] [&>svg]:!h-3 [&>svg]:!w-3 [&>span]:!text-[var(--text-secondary)] min-w-[100px] rounded-[6px] bg-transparent px-[9px] hover:bg-[var(--surface-5)] dark:hover:border-[var(--surface-6)] dark:hover:bg-transparent [&>span]:text-center'
groups={comboboxGroups}
options={[]}
multiSelect
multiSelectValues={normalizedSelectedValues}
onMultiSelectChange={onOutputSelect}
placeholder={selectedDisplayText}
overlayContent={
<span className='truncate text-[var(--text-primary)]'>{selectedDisplayText}</span>
}
disabled={disabled || workflowOutputs.length === 0}
align={align}
maxHeight={maxHeight}

View File

@@ -24,11 +24,10 @@ export function useChatFileUpload() {
/**
* Validate and add files
* Uses functional state update to avoid stale closure issues with rapid file additions
*/
const addFiles = useCallback((files: File[]) => {
setChatFiles((currentFiles) => {
const remainingSlots = Math.max(0, MAX_FILES - currentFiles.length)
const addFiles = useCallback(
(files: File[]) => {
const remainingSlots = Math.max(0, MAX_FILES - chatFiles.length)
const candidateFiles = files.slice(0, remainingSlots)
const errors: string[] = []
const validNewFiles: ChatFile[] = []
@@ -40,14 +39,11 @@ export function useChatFileUpload() {
continue
}
// Check for duplicates against current files and newly added valid files
const isDuplicateInCurrent = currentFiles.some(
// Check for duplicates
const isDuplicate = chatFiles.some(
(existingFile) => existingFile.name === file.name && existingFile.size === file.size
)
const isDuplicateInNew = validNewFiles.some(
(newFile) => newFile.name === file.name && newFile.size === file.size
)
if (isDuplicateInCurrent || isDuplicateInNew) {
if (isDuplicate) {
errors.push(`${file.name} already added`)
continue
}
@@ -61,20 +57,20 @@ export function useChatFileUpload() {
})
}
// Update errors outside the state setter to avoid nested state updates
if (errors.length > 0) {
// Use setTimeout to avoid state update during render
setTimeout(() => setUploadErrors(errors), 0)
} else if (validNewFiles.length > 0) {
setTimeout(() => setUploadErrors([]), 0)
setUploadErrors(errors)
}
if (validNewFiles.length > 0) {
return [...currentFiles, ...validNewFiles]
setChatFiles([...chatFiles, ...validNewFiles])
// Clear errors when files are successfully added
if (errors.length === 0) {
setUploadErrors([])
}
}
return currentFiles
})
}, [])
},
[chatFiles]
)
/**
* Remove a file

View File

@@ -1,6 +1,5 @@
'use client'
import type { RefObject } from 'react'
import {
Popover,
PopoverAnchor,
@@ -8,48 +7,14 @@ import {
PopoverDivider,
PopoverItem,
} from '@/components/emcn'
/**
* Block information for context menu actions
*/
export interface BlockInfo {
id: string
type: string
enabled: boolean
horizontalHandles: boolean
parentId?: string
parentType?: string
}
/**
* Props for BlockMenu component
*/
export interface BlockMenuProps {
isOpen: boolean
position: { x: number; y: number }
menuRef: RefObject<HTMLDivElement | null>
onClose: () => void
selectedBlocks: BlockInfo[]
onCopy: () => void
onPaste: () => void
onDuplicate: () => void
onDelete: () => void
onToggleEnabled: () => void
onToggleHandles: () => void
onRemoveFromSubflow: () => void
onOpenEditor: () => void
onRename: () => void
hasClipboard?: boolean
showRemoveFromSubflow?: boolean
disableEdit?: boolean
}
import type { BlockContextMenuProps } from './types'
/**
* Context menu for workflow block(s).
* Displays block-specific actions in a popover at right-click position.
* Supports multi-selection - actions apply to all selected blocks.
*/
export function BlockMenu({
export function BlockContextMenu({
isOpen,
position,
menuRef,
@@ -67,7 +32,7 @@ export function BlockMenu({
hasClipboard = false,
showRemoveFromSubflow = false,
disableEdit = false,
}: BlockMenuProps) {
}: BlockContextMenuProps) {
const isSingleBlock = selectedBlocks.length === 1
const allEnabled = selectedBlocks.every((b) => b.enabled)

View File

@@ -0,0 +1,8 @@
export { BlockContextMenu } from './block-context-menu'
export { PaneContextMenu } from './pane-context-menu'
export type {
BlockContextMenuProps,
ContextMenuBlockInfo,
ContextMenuPosition,
PaneContextMenuProps,
} from './types'

View File

@@ -1,6 +1,5 @@
'use client'
import type { RefObject } from 'react'
import {
Popover,
PopoverAnchor,
@@ -8,40 +7,13 @@ import {
PopoverDivider,
PopoverItem,
} from '@/components/emcn'
import type { PaneContextMenuProps } from './types'
/**
* Props for CanvasMenu component
*/
export interface CanvasMenuProps {
isOpen: boolean
position: { x: number; y: number }
menuRef: RefObject<HTMLDivElement | null>
onClose: () => void
onUndo: () => void
onRedo: () => void
onPaste: () => void
onAddBlock: () => void
onAutoLayout: () => void
onFitToView: () => void
onOpenLogs: () => void
onToggleVariables: () => void
onToggleChat: () => void
onInvite: () => void
isVariablesOpen?: boolean
isChatOpen?: boolean
hasClipboard?: boolean
disableEdit?: boolean
disableAdmin?: boolean
canUndo?: boolean
canRedo?: boolean
isInvitationsDisabled?: boolean
}
/**
* Context menu for workflow canvas.
* Context menu for workflow canvas pane.
* Displays canvas-level actions when right-clicking empty space.
*/
export function CanvasMenu({
export function PaneContextMenu({
isOpen,
position,
menuRef,
@@ -51,7 +23,6 @@ export function CanvasMenu({
onPaste,
onAddBlock,
onAutoLayout,
onFitToView,
onOpenLogs,
onToggleVariables,
onToggleChat,
@@ -64,7 +35,7 @@ export function CanvasMenu({
canUndo = false,
canRedo = false,
isInvitationsDisabled = false,
}: CanvasMenuProps) {
}: PaneContextMenuProps) {
return (
<Popover
open={isOpen}
@@ -142,14 +113,6 @@ export function CanvasMenu({
<span>Auto-layout</span>
<span className='ml-auto opacity-70 group-hover:opacity-100'>L</span>
</PopoverItem>
<PopoverItem
onClick={() => {
onFitToView()
onClose()
}}
>
Fit to View
</PopoverItem>
{/* Navigation actions */}
<PopoverDivider />

View File

@@ -0,0 +1,99 @@
import type { RefObject } from 'react'
/**
* Position for context menu placement
*/
export interface ContextMenuPosition {
x: number
y: number
}
/**
* Block information passed to context menu for action handling
*/
export interface ContextMenuBlockInfo {
/** Block ID */
id: string
/** Block type (e.g., 'agent', 'function', 'loop') */
type: string
/** Whether block is enabled */
enabled: boolean
/** Whether block uses horizontal handles */
horizontalHandles: boolean
/** Parent subflow ID if nested in loop/parallel */
parentId?: string
/** Parent type ('loop' | 'parallel') if nested */
parentType?: string
}
/**
* Props for BlockContextMenu component
*/
export interface BlockContextMenuProps {
/** Whether the context menu is open */
isOpen: boolean
/** Position of the context menu */
position: ContextMenuPosition
/** Ref for the menu element (for click-outside detection) */
menuRef: RefObject<HTMLDivElement | null>
/** Callback when menu should close */
onClose: () => void
/** Selected block(s) info */
selectedBlocks: ContextMenuBlockInfo[]
/** Callbacks for menu actions */
onCopy: () => void
onPaste: () => void
onDuplicate: () => void
onDelete: () => void
onToggleEnabled: () => void
onToggleHandles: () => void
onRemoveFromSubflow: () => void
onOpenEditor: () => void
onRename: () => void
/** Whether clipboard has content for pasting */
hasClipboard?: boolean
/** Whether remove from subflow option should be shown */
showRemoveFromSubflow?: boolean
/** Whether edit actions are disabled (no permission) */
disableEdit?: boolean
}
/**
* Props for PaneContextMenu component
*/
export interface PaneContextMenuProps {
/** Whether the context menu is open */
isOpen: boolean
/** Position of the context menu */
position: ContextMenuPosition
/** Ref for the menu element */
menuRef: RefObject<HTMLDivElement | null>
/** Callback when menu should close */
onClose: () => void
/** Callbacks for menu actions */
onUndo: () => void
onRedo: () => void
onPaste: () => void
onAddBlock: () => void
onAutoLayout: () => void
onOpenLogs: () => void
onToggleVariables: () => void
onToggleChat: () => void
onInvite: () => void
/** Whether the variables panel is currently open */
isVariablesOpen?: boolean
/** Whether the chat panel is currently open */
isChatOpen?: boolean
/** Whether clipboard has content for pasting */
hasClipboard?: boolean
/** Whether edit actions are disabled (no permission) */
disableEdit?: boolean
/** Whether admin actions are disabled (no admin permission) */
disableAdmin?: boolean
/** Whether undo is available */
canUndo?: boolean
/** Whether redo is available */
canRedo?: boolean
/** Whether invitations are disabled (feature flag or permission group) */
isInvitationsDisabled?: boolean
}

View File

@@ -2,6 +2,7 @@
import { memo, useMemo } from 'react'
import { useViewport } from 'reactflow'
import { useSession } from '@/lib/auth/auth-client'
import { getUserColor } from '@/lib/workspaces/colors'
import { usePreventZoom } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks'
import { useSocket } from '@/app/workspace/providers/socket-provider'
@@ -19,31 +20,30 @@ interface CursorRenderData {
}
const CursorsComponent = () => {
const { presenceUsers, currentSocketId } = useSocket()
const { presenceUsers } = useSocket()
const viewport = useViewport()
const session = useSession()
const currentUserId = session.data?.user?.id
const preventZoomRef = usePreventZoom()
const cursors = useMemo<CursorRenderData[]>(() => {
return presenceUsers
.filter((user): user is typeof user & { cursor: CursorPoint } => Boolean(user.cursor))
.filter((user) => user.socketId !== currentSocketId)
.filter((user) => user.userId !== currentUserId)
.map((user) => ({
id: user.socketId,
name: user.userName?.trim() || 'Collaborator',
cursor: user.cursor,
color: getUserColor(user.userId),
}))
}, [currentSocketId, presenceUsers])
}, [currentUserId, presenceUsers])
if (!cursors.length) {
return null
}
return (
<div
ref={preventZoomRef}
className='pointer-events-none absolute inset-0 z-[5] select-none overflow-hidden'
>
<div ref={preventZoomRef} className='pointer-events-none absolute inset-0 z-30 select-none'>
{cursors.map(({ id, name, cursor, color }) => {
const x = cursor.x * viewport.zoom + viewport.x
const y = cursor.y * viewport.zoom + viewport.y

View File

@@ -4,204 +4,36 @@ import clsx from 'clsx'
import { useRegisterGlobalCommands } from '@/app/workspace/[workspaceId]/providers/global-commands-provider'
import { createCommand } from '@/app/workspace/[workspaceId]/utils/commands-utils'
import { usePreventZoom } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks'
import { useNotificationStore } from '@/stores/notifications'
import { useCopilotStore, usePanelStore } from '@/stores/panel'
import { useTerminalStore } from '@/stores/terminal'
import { useWorkflowDiffStore } from '@/stores/workflow-diff'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
import { mergeSubblockState } from '@/stores/workflows/utils'
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
const logger = createLogger('DiffControls')
const NOTIFICATION_WIDTH = 240
const NOTIFICATION_GAP = 16
export const DiffControls = memo(function DiffControls() {
const isTerminalResizing = useTerminalStore((state) => state.isResizing)
const isPanelResizing = usePanelStore((state) => state.isResizing)
const { isDiffReady, hasActiveDiff, acceptChanges, rejectChanges, baselineWorkflow } =
useWorkflowDiffStore(
useCallback(
(state) => ({
isDiffReady: state.isDiffReady,
hasActiveDiff: state.hasActiveDiff,
acceptChanges: state.acceptChanges,
rejectChanges: state.rejectChanges,
baselineWorkflow: state.baselineWorkflow,
}),
[]
)
)
const { updatePreviewToolCallState, currentChat, messages } = useCopilotStore(
const { isDiffReady, hasActiveDiff, acceptChanges, rejectChanges } = useWorkflowDiffStore(
useCallback(
(state) => ({
updatePreviewToolCallState: state.updatePreviewToolCallState,
currentChat: state.currentChat,
messages: state.messages,
isDiffReady: state.isDiffReady,
hasActiveDiff: state.hasActiveDiff,
acceptChanges: state.acceptChanges,
rejectChanges: state.rejectChanges,
}),
[]
)
)
const { activeWorkflowId } = useWorkflowRegistry(
useCallback((state) => ({ activeWorkflowId: state.activeWorkflowId }), [])
const { updatePreviewToolCallState } = useCopilotStore(
useCallback(
(state) => ({
updatePreviewToolCallState: state.updatePreviewToolCallState,
}),
[]
)
)
const allNotifications = useNotificationStore((state) => state.notifications)
const hasVisibleNotifications = useMemo(() => {
if (!activeWorkflowId) return false
return allNotifications.some((n) => !n.workflowId || n.workflowId === activeWorkflowId)
}, [allNotifications, activeWorkflowId])
const createCheckpoint = useCallback(async () => {
if (!activeWorkflowId || !currentChat?.id) {
logger.warn('Cannot create checkpoint: missing workflowId or chatId', {
workflowId: activeWorkflowId,
chatId: currentChat?.id,
})
return false
}
try {
logger.info('Creating checkpoint before accepting changes')
// Use the baseline workflow (state before diff) instead of current state
// This ensures reverting to the checkpoint restores the pre-diff state
const rawState = baselineWorkflow || useWorkflowStore.getState().getWorkflowState()
// The baseline already has merged subblock values, but we'll merge again to be safe
// This ensures all user inputs and subblock data are captured
const blocksWithSubblockValues = mergeSubblockState(rawState.blocks, activeWorkflowId)
// Filter and complete blocks to ensure all required fields are present
// This matches the validation logic from /api/workflows/[id]/state
const filteredBlocks = Object.entries(blocksWithSubblockValues).reduce(
(acc, [blockId, block]) => {
if (block.type && block.name) {
// Ensure all required fields are present
acc[blockId] = {
...block,
id: block.id || blockId, // Ensure id field is set
enabled: block.enabled !== undefined ? block.enabled : true,
horizontalHandles:
block.horizontalHandles !== undefined ? block.horizontalHandles : true,
height: block.height !== undefined ? block.height : 90,
subBlocks: block.subBlocks || {},
outputs: block.outputs || {},
data: block.data || {},
position: block.position || { x: 0, y: 0 }, // Ensure position exists
}
}
return acc
},
{} as typeof rawState.blocks
)
// Clean the workflow state - only include valid fields, exclude null/undefined values
const workflowState = {
blocks: filteredBlocks,
edges: rawState.edges || [],
loops: rawState.loops || {},
parallels: rawState.parallels || {},
lastSaved: rawState.lastSaved || Date.now(),
deploymentStatuses: rawState.deploymentStatuses || {},
}
logger.info('Prepared complete workflow state for checkpoint', {
blocksCount: Object.keys(workflowState.blocks).length,
edgesCount: workflowState.edges.length,
loopsCount: Object.keys(workflowState.loops).length,
parallelsCount: Object.keys(workflowState.parallels).length,
hasRequiredFields: Object.values(workflowState.blocks).every(
(block) => block.id && block.type && block.name && block.position
),
hasSubblockValues: Object.values(workflowState.blocks).some((block) =>
Object.values(block.subBlocks || {}).some(
(subblock) => subblock.value !== null && subblock.value !== undefined
)
),
sampleBlock: Object.values(workflowState.blocks)[0],
})
// Find the most recent user message ID from the current chat
const userMessages = messages.filter((msg) => msg.role === 'user')
const lastUserMessage = userMessages[userMessages.length - 1]
const messageId = lastUserMessage?.id
logger.info('Creating checkpoint with message association', {
totalMessages: messages.length,
userMessageCount: userMessages.length,
lastUserMessageId: messageId,
chatId: currentChat.id,
entireMessageArray: messages,
allMessageIds: messages.map((m) => ({
id: m.id,
role: m.role,
content: m.content.substring(0, 50),
})),
selectedUserMessages: userMessages.map((m) => ({
id: m.id,
content: m.content.substring(0, 100),
})),
allRawMessageIds: messages.map((m) => m.id),
userMessageIds: userMessages.map((m) => m.id),
checkpointData: {
workflowId: activeWorkflowId,
chatId: currentChat.id,
messageId: messageId,
messageFound: !!lastUserMessage,
},
})
const response = await fetch('/api/copilot/checkpoints', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
workflowId: activeWorkflowId,
chatId: currentChat.id,
messageId,
workflowState: JSON.stringify(workflowState),
}),
})
if (!response.ok) {
throw new Error(`Failed to create checkpoint: ${response.statusText}`)
}
const result = await response.json()
const newCheckpoint = result.checkpoint
logger.info('Checkpoint created successfully', {
messageId,
chatId: currentChat.id,
checkpointId: newCheckpoint?.id,
})
// Update the copilot store immediately to show the checkpoint icon
if (newCheckpoint && messageId) {
const { messageCheckpoints: currentCheckpoints } = useCopilotStore.getState()
const existingCheckpoints = currentCheckpoints[messageId] || []
const updatedCheckpoints = {
...currentCheckpoints,
[messageId]: [newCheckpoint, ...existingCheckpoints],
}
useCopilotStore.setState({ messageCheckpoints: updatedCheckpoints })
logger.info('Updated copilot store with new checkpoint', {
messageId,
checkpointId: newCheckpoint.id,
})
}
return true
} catch (error) {
logger.error('Failed to create checkpoint:', error)
return false
}
}, [activeWorkflowId, currentChat, messages, baselineWorkflow])
const handleAccept = useCallback(() => {
logger.info('Accepting proposed changes with backup protection')
@@ -238,12 +70,8 @@ export const DiffControls = memo(function DiffControls() {
})
// Create checkpoint in the background (fire-and-forget) so it doesn't block UI
createCheckpoint().catch((error) => {
logger.warn('Failed to create checkpoint after accept:', error)
})
logger.info('Accept triggered; UI will update optimistically')
}, [createCheckpoint, updatePreviewToolCallState, acceptChanges])
}, [updatePreviewToolCallState, acceptChanges])
const handleReject = useCallback(() => {
logger.info('Rejecting proposed changes (optimistic)')
@@ -304,15 +132,16 @@ export const DiffControls = memo(function DiffControls() {
const isResizing = isTerminalResizing || isPanelResizing
const notificationOffset = hasVisibleNotifications ? NOTIFICATION_WIDTH + NOTIFICATION_GAP : 0
return (
<div
ref={preventZoomRef}
className={clsx('fixed z-30', !isResizing && 'transition-[bottom] duration-100 ease-out')}
className={clsx(
'fixed z-30',
!isResizing && 'transition-[bottom,right] duration-100 ease-out'
)}
style={{
bottom: 'calc(var(--terminal-height) + 16px)',
right: `calc(var(--panel-width) + 16px + ${notificationOffset}px)`,
right: 'calc(var(--panel-width) + 16px)',
}}
>
<div

View File

@@ -1,5 +1,3 @@
export { BlockMenu } from './block-menu'
export { CanvasMenu } from './canvas-menu'
export { CommandList } from './command-list/command-list'
export { Cursors } from './cursors/cursors'
export { DiffControls } from './diff-controls/diff-controls'
@@ -10,5 +8,4 @@ export { SubflowNodeComponent } from './subflows/subflow-node'
export { Terminal } from './terminal/terminal'
export { WandPromptBar } from './wand-prompt-bar/wand-prompt-bar'
export { WorkflowBlock } from './workflow-block/workflow-block'
export { WorkflowControls } from './workflow-controls'
export { WorkflowEdge } from './workflow-edge/workflow-edge'

View File

@@ -4,13 +4,13 @@ import type { NodeProps } from 'reactflow'
import remarkGfm from 'remark-gfm'
import { cn } from '@/lib/core/utils/cn'
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
import { ActionBar } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/action-bar/action-bar'
import { useBlockVisual } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks'
import {
BLOCK_DIMENSIONS,
useBlockDimensions,
} from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-block-dimensions'
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
import { ActionBar } from '../workflow-block/components'
import type { WorkflowBlockProps } from '../workflow-block/types'
interface NoteBlockNodeData extends WorkflowBlockProps {}

View File

@@ -11,7 +11,7 @@ import {
openCopilotWithMessage,
useNotificationStore,
} from '@/stores/notifications'
import { usePanelStore } from '@/stores/panel'
import { useSidebarStore } from '@/stores/sidebar/store'
import { useTerminalStore } from '@/stores/terminal'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
@@ -19,9 +19,9 @@ const logger = createLogger('Notifications')
const MAX_VISIBLE_NOTIFICATIONS = 4
/**
* Notifications display component.
* Positioned in the bottom-right workspace area, reactive to panel width and terminal height.
* Shows both global notifications and workflow-specific notifications.
* Notifications display component
* Positioned in the bottom-left workspace area, reactive to sidebar width and terminal height
* Shows both global notifications and workflow-specific notifications
*/
export const Notifications = memo(function Notifications() {
const activeWorkflowId = useWorkflowRegistry((state) => state.activeWorkflowId)
@@ -37,7 +37,7 @@ export const Notifications = memo(function Notifications() {
.slice(0, MAX_VISIBLE_NOTIFICATIONS)
}, [allNotifications, activeWorkflowId])
const isTerminalResizing = useTerminalStore((state) => state.isResizing)
const isPanelResizing = usePanelStore((state) => state.isResizing)
const isSidebarResizing = useSidebarStore((state) => state.isResizing)
/**
* Executes a notification action and handles side effects.
@@ -105,19 +105,15 @@ export const Notifications = memo(function Notifications() {
return null
}
const isResizing = isTerminalResizing || isPanelResizing
const isResizing = isTerminalResizing || isSidebarResizing
return (
<div
ref={preventZoomRef}
className={clsx(
'fixed z-30 flex flex-col items-start',
!isResizing && 'transition-[bottom,right] duration-100 ease-out'
'fixed bottom-[calc(var(--terminal-height)+16px)] left-[calc(var(--sidebar-width)+16px)] z-30 flex flex-col items-start',
!isResizing && 'transition-[bottom,left] duration-100 ease-out'
)}
style={{
bottom: 'calc(var(--terminal-height) + 16px)',
right: 'calc(var(--panel-width) + 16px)',
}}
>
{[...visibleNotifications].reverse().map((notification, index, stacked) => {
const depth = stacked.length - index - 1
@@ -127,13 +123,8 @@ export const Notifications = memo(function Notifications() {
return (
<div
key={notification.id}
style={
{
'--stack-offset': `${xOffset}px`,
animation: 'notification-enter 200ms ease-out forwards',
} as React.CSSProperties
}
className={`relative h-[80px] w-[240px] overflow-hidden rounded-[4px] border bg-[var(--surface-2)] ${
style={{ transform: `translateX(${xOffset}px)` }}
className={`relative h-[80px] w-[240px] overflow-hidden rounded-[4px] border bg-[var(--surface-2)] transition-transform duration-200 ${
index > 0 ? '-mt-[80px]' : ''
}`}
>

View File

@@ -1,10 +1,20 @@
'use client'
import { memo, useEffect, useRef, useState } from 'react'
import { memo, useEffect, useMemo, useRef, useState } from 'react'
import clsx from 'clsx'
import { ChevronUp } from 'lucide-react'
import CopilotMarkdownRenderer from './markdown-renderer'
/**
* Removes thinking tags (raw or escaped) from streamed content.
*/
function stripThinkingTags(text: string): string {
return text
.replace(/<\/?thinking[^>]*>/gi, '')
.replace(/&lt;\/?thinking[^&]*&gt;/gi, '')
.trim()
}
/**
* Max height for thinking content before internal scrolling kicks in
*/
@@ -187,6 +197,9 @@ export function ThinkingBlock({
label = 'Thought',
hasSpecialTags = false,
}: ThinkingBlockProps) {
// Strip thinking tags from content on render to handle persisted messages
const cleanContent = useMemo(() => stripThinkingTags(content || ''), [content])
const [isExpanded, setIsExpanded] = useState(false)
const [duration, setDuration] = useState(0)
const [userHasScrolledAway, setUserHasScrolledAway] = useState(false)
@@ -209,10 +222,10 @@ export function ThinkingBlock({
return
}
if (!userCollapsedRef.current && content && content.trim().length > 0) {
if (!userCollapsedRef.current && cleanContent && cleanContent.length > 0) {
setIsExpanded(true)
}
}, [isStreaming, content, hasFollowingContent, hasSpecialTags])
}, [isStreaming, cleanContent, hasFollowingContent, hasSpecialTags])
// Reset start time when streaming begins
useEffect(() => {
@@ -298,7 +311,7 @@ export function ThinkingBlock({
return `${seconds}s`
}
const hasContent = content && content.trim().length > 0
const hasContent = cleanContent.length > 0
// Thinking is "done" when streaming ends OR when there's following content (like a tool call) OR when special tags appear
const isThinkingDone = !isStreaming || hasFollowingContent || hasSpecialTags
const durationText = `${label} for ${formatDuration(duration)}`
@@ -374,7 +387,7 @@ export function ThinkingBlock({
isExpanded ? 'mt-1.5 max-h-[150px] opacity-100' : 'max-h-0 opacity-0'
)}
>
<SmoothThinkingText content={content} isStreaming={isStreaming && !hasFollowingContent} />
<SmoothThinkingText content={cleanContent} isStreaming={isStreaming && !hasFollowingContent} />
</div>
</div>
)
@@ -412,7 +425,7 @@ export function ThinkingBlock({
>
{/* Completed thinking text - dimmed with markdown */}
<div className='[&_*]:!text-[var(--text-muted)] [&_*]:!text-[12px] [&_*]:!leading-[1.4] [&_p]:!m-0 [&_p]:!mb-1 [&_h1]:!text-[12px] [&_h1]:!font-semibold [&_h1]:!m-0 [&_h1]:!mb-1 [&_h2]:!text-[12px] [&_h2]:!font-semibold [&_h2]:!m-0 [&_h2]:!mb-1 [&_h3]:!text-[12px] [&_h3]:!font-semibold [&_h3]:!m-0 [&_h3]:!mb-1 [&_code]:!text-[11px] [&_ul]:!pl-5 [&_ul]:!my-1 [&_ol]:!pl-6 [&_ol]:!my-1 [&_li]:!my-0.5 [&_li]:!py-0 font-season text-[12px] text-[var(--text-muted)]'>
<CopilotMarkdownRenderer content={content} />
<CopilotMarkdownRenderer content={cleanContent} />
</div>
</div>
</div>

View File

@@ -1,6 +1,6 @@
'use client'
import { type FC, memo, useCallback, useMemo, useState } from 'react'
import { type FC, memo, useCallback, useMemo, useRef, useState } from 'react'
import { RotateCcw } from 'lucide-react'
import { Button } from '@/components/emcn'
import {
@@ -93,6 +93,8 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
// UI state
const [isHoveringMessage, setIsHoveringMessage] = useState(false)
const cancelEditRef = useRef<(() => void) | null>(null)
// Checkpoint management hook
const {
showRestoreConfirmation,
@@ -112,7 +114,8 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
messages,
messageCheckpoints,
onRevertModeChange,
onEditModeChange
onEditModeChange,
() => cancelEditRef.current?.()
)
// Message editing hook
@@ -142,6 +145,8 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
pendingEditRef,
})
cancelEditRef.current = handleCancelEdit
// Get clean text content with double newline parsing
const cleanTextContent = useMemo(() => {
if (!message.content) return ''

View File

@@ -22,7 +22,8 @@ export function useCheckpointManagement(
messages: CopilotMessage[],
messageCheckpoints: any[],
onRevertModeChange?: (isReverting: boolean) => void,
onEditModeChange?: (isEditing: boolean) => void
onEditModeChange?: (isEditing: boolean) => void,
onCancelEdit?: () => void
) {
const [showRestoreConfirmation, setShowRestoreConfirmation] = useState(false)
const [showCheckpointDiscardModal, setShowCheckpointDiscardModal] = useState(false)
@@ -57,7 +58,7 @@ export function useCheckpointManagement(
const { messageCheckpoints: currentCheckpoints } = useCopilotStore.getState()
const updatedCheckpoints = {
...currentCheckpoints,
[message.id]: messageCheckpoints.slice(1),
[message.id]: [],
}
useCopilotStore.setState({ messageCheckpoints: updatedCheckpoints })
@@ -140,7 +141,7 @@ export function useCheckpointManagement(
const { messageCheckpoints: currentCheckpoints } = useCopilotStore.getState()
const updatedCheckpoints = {
...currentCheckpoints,
[message.id]: messageCheckpoints.slice(1),
[message.id]: [],
}
useCopilotStore.setState({ messageCheckpoints: updatedCheckpoints })
@@ -154,6 +155,8 @@ export function useCheckpointManagement(
}
setShowCheckpointDiscardModal(false)
onEditModeChange?.(false)
onCancelEdit?.()
const { sendMessage } = useCopilotStore.getState()
if (pendingEditRef.current) {
@@ -180,13 +183,22 @@ export function useCheckpointManagement(
} finally {
setIsProcessingDiscard(false)
}
}, [messageCheckpoints, revertToCheckpoint, message, messages])
}, [
messageCheckpoints,
revertToCheckpoint,
message,
messages,
onEditModeChange,
onCancelEdit,
])
/**
* Cancels checkpoint discard and clears pending edit
*/
const handleCancelCheckpointDiscard = useCallback(() => {
setShowCheckpointDiscardModal(false)
onEditModeChange?.(false)
onCancelEdit?.()
pendingEditRef.current = null
}, [])
@@ -218,7 +230,7 @@ export function useCheckpointManagement(
}
pendingEditRef.current = null
}
}, [message, messages])
}, [message, messages, onEditModeChange, onCancelEdit])
/**
* Handles keyboard events for restore confirmation (Escape/Enter)

View File

@@ -1446,8 +1446,10 @@ function WorkflowEditSummary({ toolCall }: { toolCall: CopilotToolCall }) {
blockType = blockType || op.block_type || ''
}
// Fallback name to type or ID
if (!blockName) blockName = blockType || blockId
if (!blockName) blockName = blockType || ''
if (!blockName && !blockType) {
continue
}
const change: BlockChange = { blockId, blockName, blockType }

View File

@@ -22,6 +22,9 @@ interface UseContextManagementProps {
export function useContextManagement({ message, initialContexts }: UseContextManagementProps) {
const [selectedContexts, setSelectedContexts] = useState<ChatContext[]>(initialContexts ?? [])
const initializedRef = useRef(false)
const escapeRegex = useCallback((value: string) => {
return value.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')
}, [])
// Initialize with initial contexts when they're first provided (for edit mode)
useEffect(() => {
@@ -78,10 +81,8 @@ export function useContextManagement({ message, initialContexts }: UseContextMan
// Check for slash command tokens or mention tokens based on kind
const isSlashCommand = c.kind === 'slash_command'
const prefix = isSlashCommand ? '/' : '@'
const tokenWithSpaces = ` ${prefix}${c.label} `
const tokenAtStart = `${prefix}${c.label} `
// Token can appear with leading space OR at the start of the message
return message.includes(tokenWithSpaces) || message.startsWith(tokenAtStart)
const tokenPattern = new RegExp(`(^|\\s)${escapeRegex(prefix)}${escapeRegex(c.label)}(\\s|$)`)
return tokenPattern.test(message)
})
return filtered.length === prev.length ? prev : filtered
})

View File

@@ -76,6 +76,15 @@ export function useMentionTokens({
ranges.push({ start: idx, end: idx + token.length, label })
fromIndex = idx + token.length
}
// Token at end of message without trailing space: "@label" or " /label"
const tokenAtEnd = `${prefix}${label}`
if (message.endsWith(tokenAtEnd)) {
const idx = message.lastIndexOf(tokenAtEnd)
const hasLeadingSpace = idx > 0 && message[idx - 1] === ' '
const start = hasLeadingSpace ? idx - 1 : idx
ranges.push({ start, end: message.length, label })
}
}
ranges.sort((a, b) => a.start - b.start)

View File

@@ -613,7 +613,7 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
const insertTriggerAndOpenMenu = useCallback(
(trigger: '@' | '/') => {
if (disabled || isLoading) return
if (disabled) return
const textarea = mentionMenu.textareaRef.current
if (!textarea) return
@@ -642,7 +642,7 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
}
mentionMenu.setSubmenuActiveIndex(0)
},
[disabled, isLoading, mentionMenu, message, setMessage]
[disabled, mentionMenu, message, setMessage]
)
const handleOpenMentionMenuWithAt = useCallback(
@@ -735,10 +735,7 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
variant='outline'
onClick={handleOpenMentionMenuWithAt}
title='Insert @'
className={cn(
'cursor-pointer rounded-[6px] p-[4.5px]',
(disabled || isLoading) && 'cursor-not-allowed'
)}
className={cn('cursor-pointer rounded-[6px] p-[4.5px]', disabled && 'cursor-not-allowed')}
>
<AtSign className='h-3 w-3' strokeWidth={1.75} />
</Badge>
@@ -747,10 +744,7 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
variant='outline'
onClick={handleOpenSlashMenu}
title='Insert /'
className={cn(
'cursor-pointer rounded-[6px] p-[4.5px]',
(disabled || isLoading) && 'cursor-not-allowed'
)}
className={cn('cursor-pointer rounded-[6px] p-[4.5px]', disabled && 'cursor-not-allowed')}
>
<span className='flex h-3 w-3 items-center justify-center font-medium text-[11px] leading-none'>
/

View File

@@ -22,7 +22,7 @@ import {
import { Skeleton } from '@/components/ui'
import type { AgentAuthentication, AgentCapabilities } from '@/lib/a2a/types'
import { getBaseUrl } from '@/lib/core/utils/urls'
import { normalizeInputFormatValue } from '@/lib/workflows/input-format'
import { normalizeInputFormatValue } from '@/lib/workflows/input-format-utils'
import { StartBlockPath, TriggerUtils } from '@/lib/workflows/triggers/triggers'
import {
useA2AAgentByWorkflow,

View File

@@ -3,12 +3,16 @@
import { useState } from 'react'
import { Check, Clipboard } from 'lucide-react'
import {
Badge,
Button,
ButtonGroup,
ButtonGroupItem,
Code,
Combobox,
Label,
Popover,
PopoverContent,
PopoverItem,
PopoverTrigger,
Tooltip,
} from '@/components/emcn'
import { Skeleton } from '@/components/ui'
@@ -598,19 +602,48 @@ console.log(limits);`
<span>{copied.async ? 'Copied' : 'Copy'}</span>
</Tooltip.Content>
</Tooltip.Root>
<Combobox
size='sm'
className='!w-fit !py-[2px] min-w-[100px] rounded-[6px] px-[9px]'
options={[
{ label: 'Execute Job', value: 'execute' },
{ label: 'Check Status', value: 'status' },
{ label: 'Rate Limits', value: 'rate-limits' },
]}
value={asyncExampleType}
onChange={(value) => setAsyncExampleType(value as AsyncExampleType)}
align='end'
dropdownWidth={160}
/>
<Popover>
<PopoverTrigger asChild>
<div className='min-w-0 max-w-full'>
<Badge
variant='outline'
className='flex-none cursor-pointer whitespace-nowrap rounded-[6px]'
>
<span className='whitespace-nowrap text-[12px]'>
{getAsyncExampleTitle()}
</span>
</Badge>
</div>
</PopoverTrigger>
<PopoverContent
side='bottom'
align='end'
sideOffset={4}
maxHeight={300}
maxWidth={300}
minWidth={160}
border
>
<PopoverItem
active={asyncExampleType === 'execute'}
onClick={() => setAsyncExampleType('execute')}
>
Execute Job
</PopoverItem>
<PopoverItem
active={asyncExampleType === 'status'}
onClick={() => setAsyncExampleType('status')}
>
Check Status
</PopoverItem>
<PopoverItem
active={asyncExampleType === 'rate-limits'}
onClick={() => setAsyncExampleType('rate-limits')}
>
Rate Limits
</PopoverItem>
</PopoverContent>
</Popover>
</div>
</div>
<Code.Viewer

View File

@@ -2,11 +2,11 @@
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
import { createLogger } from '@sim/logger'
import { Maximize2 } from 'lucide-react'
import {
Button,
ButtonGroup,
ButtonGroupItem,
Expand,
Label,
Modal,
ModalBody,
@@ -222,7 +222,7 @@ export function GeneralDeploy({
onClick={() => setShowExpandedPreview(true)}
className='absolute right-[8px] bottom-[8px] z-10 h-[28px] w-[28px] cursor-pointer border border-[var(--border)] bg-transparent p-0 backdrop-blur-sm hover:bg-[var(--surface-3)]'
>
<Expand className='h-[14px] w-[14px]' />
<Maximize2 className='h-[14px] w-[14px]' />
</Button>
</Tooltip.Trigger>
<Tooltip.Content side='top'>See preview</Tooltip.Content>
@@ -334,6 +334,7 @@ export function GeneralDeploy({
}}
onPaneClick={() => setExpandedSelectedBlockId(null)}
selectedBlockId={expandedSelectedBlockId}
lightweight
/>
</div>
{expandedSelectedBlockId && workflowToShow.blocks?.[expandedSelectedBlockId] && (

View File

@@ -14,7 +14,7 @@ import {
} from '@/components/emcn'
import { Skeleton } from '@/components/ui'
import { generateToolInputSchema, sanitizeToolName } from '@/lib/mcp/workflow-tool-schema'
import { normalizeInputFormatValue } from '@/lib/workflows/input-format'
import { normalizeInputFormatValue } from '@/lib/workflows/input-format-utils'
import { isValidStartBlockType } from '@/lib/workflows/triggers/trigger-utils'
import type { InputFormatField } from '@/lib/workflows/types'
import {

View File

@@ -23,7 +23,6 @@ import { CreateApiKeyModal } from '@/app/workspace/[workspaceId]/w/components/si
import { startsWithUuid } from '@/executor/constants'
import { useApiKeys } from '@/hooks/queries/api-keys'
import { useWorkspaceSettings } from '@/hooks/queries/workspace'
import { usePermissionConfig } from '@/hooks/use-permission-config'
import { useSettingsModalStore } from '@/stores/modals/settings/store'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
@@ -114,12 +113,16 @@ export function DeployModal({
const [existingChat, setExistingChat] = useState<ExistingChat | null>(null)
const [isLoadingChat, setIsLoadingChat] = useState(false)
const [formSubmitting, setFormSubmitting] = useState(false)
const [formExists, setFormExists] = useState(false)
const [isFormValid, setIsFormValid] = useState(false)
const [chatSuccess, setChatSuccess] = useState(false)
const [formSuccess, setFormSuccess] = useState(false)
const [isCreateKeyModalOpen, setIsCreateKeyModalOpen] = useState(false)
const userPermissions = useUserPermissionsContext()
const canManageWorkspaceKeys = userPermissions.canAdmin
const { config: permissionConfig } = usePermissionConfig()
const { data: apiKeysData, isLoading: isLoadingKeys } = useApiKeys(workflowWorkspaceId || '')
const { data: workspaceSettingsData, isLoading: isLoadingSettings } = useWorkspaceSettings(
workflowWorkspaceId || ''
@@ -186,7 +189,6 @@ export function DeployModal({
useEffect(() => {
if (open && workflowId) {
setActiveTab('general')
setApiDeployError(null)
fetchChatDeploymentInfo()
}
}, [open, workflowId, fetchChatDeploymentInfo])
@@ -505,7 +507,6 @@ export function DeployModal({
const handleCloseModal = () => {
setIsSubmitting(false)
setChatSubmitting(false)
setApiDeployError(null)
onOpenChange(false)
}
@@ -515,6 +516,12 @@ export function DeployModal({
setTimeout(() => setChatSuccess(false), 2000)
}
const handleFormDeployed = async () => {
await handlePostDeploymentUpdate()
setFormSuccess(true)
setTimeout(() => setFormSuccess(false), 2000)
}
const handlePostDeploymentUpdate = async () => {
if (!workflowId) return
@@ -623,6 +630,17 @@ export function DeployModal({
deleteTrigger?.click()
}, [])
const handleFormFormSubmit = useCallback(() => {
const form = document.getElementById('form-deploy-form') as HTMLFormElement
form?.requestSubmit()
}, [])
const handleFormDelete = useCallback(() => {
const form = document.getElementById('form-deploy-form')
const deleteTrigger = form?.querySelector('[data-delete-trigger]') as HTMLButtonElement
deleteTrigger?.click()
}, [])
return (
<>
<Modal open={open} onOpenChange={handleCloseModal}>
@@ -636,31 +654,15 @@ export function DeployModal({
>
<ModalTabsList activeValue={activeTab}>
<ModalTabsTrigger value='general'>General</ModalTabsTrigger>
{!permissionConfig.hideDeployApi && (
<ModalTabsTrigger value='api'>API</ModalTabsTrigger>
)}
{!permissionConfig.hideDeployMcp && (
<ModalTabsTrigger value='mcp'>MCP</ModalTabsTrigger>
)}
{!permissionConfig.hideDeployA2a && (
<ModalTabsTrigger value='a2a'>A2A</ModalTabsTrigger>
)}
{!permissionConfig.hideDeployChatbot && (
<ModalTabsTrigger value='chat'>Chat</ModalTabsTrigger>
)}
<ModalTabsTrigger value='api'>API</ModalTabsTrigger>
<ModalTabsTrigger value='mcp'>MCP</ModalTabsTrigger>
<ModalTabsTrigger value='a2a'>A2A</ModalTabsTrigger>
<ModalTabsTrigger value='chat'>Chat</ModalTabsTrigger>
{/* <ModalTabsTrigger value='form'>Form</ModalTabsTrigger> */}
{!permissionConfig.hideDeployTemplate && (
<ModalTabsTrigger value='template'>Template</ModalTabsTrigger>
)}
<ModalTabsTrigger value='template'>Template</ModalTabsTrigger>
</ModalTabsList>
<ModalBody className='min-h-0 flex-1'>
{apiDeployError && (
<div className='mb-3 rounded-[4px] border border-destructive/30 bg-destructive/10 p-3 text-destructive text-sm'>
<div className='font-semibold'>Deployment Error</div>
<div>{apiDeployError}</div>
</div>
)}
<ModalTabsContent value='general'>
<GeneralDeploy
workflowId={workflowId}

View File

@@ -336,10 +336,6 @@ export function Code({
setCode('')
}
handleStreamChunkRef.current = (chunk: string) => {
setCode((prev) => prev + chunk)
}
handleGeneratedContentRef.current = (generatedCode: string) => {
setCode(generatedCode)
if (!isPreview && !disabled) {
@@ -695,7 +691,11 @@ export function Code({
/>
)}
<CodeEditor.Container onDragOver={(e) => e.preventDefault()} onDrop={handleDrop}>
<CodeEditor.Container
onDragOver={(e) => e.preventDefault()}
onDrop={handleDrop}
isStreaming={isAiStreaming}
>
<div className='absolute top-2 right-3 z-10 flex items-center gap-1 opacity-0 transition-opacity group-hover:opacity-100'>
{wandConfig?.enabled &&
!isAiStreaming &&
@@ -761,11 +761,6 @@ export function Code({
}}
onFocus={() => {
hasEditedSinceFocusRef.current = false
// Show tag dropdown on focus when code is empty
if (!isPreview && !disabled && !readOnly && code.trim() === '') {
setShowTags(true)
setCursorPosition(0)
}
}}
highlight={createHighlightFunction(effectiveLanguage, shouldHighlightReference)}
{...getCodeEditorProps({ isStreaming: isAiStreaming, isPreview, disabled })}

View File

@@ -115,7 +115,6 @@ export function ConditionInput({
const accessiblePrefixes = useAccessibleReferencePrefixes(blockId)
const containerRef = useRef<HTMLDivElement>(null)
const inputRefs = useRef<Map<string, HTMLTextAreaElement>>(new Map())
/**
* Determines if a reference string should be highlighted in the editor.
@@ -729,20 +728,6 @@ export function ConditionInput({
})
}, [conditionalBlocks.length])
// Capture textarea refs from Editor components (condition mode)
useEffect(() => {
if (!isRouterMode && containerRef.current) {
conditionalBlocks.forEach((block) => {
const textarea = containerRef.current?.querySelector(
`[data-block-id="${block.id}"] textarea`
) as HTMLTextAreaElement | null
if (textarea) {
inputRefs.current.set(block.id, textarea)
}
})
}
}, [conditionalBlocks, isRouterMode])
// Show loading or empty state if not ready or no blocks
if (!isReady || conditionalBlocks.length === 0) {
return (
@@ -857,9 +842,6 @@ export function ConditionInput({
onDrop={(e) => handleDrop(block.id, e)}
>
<Textarea
ref={(el) => {
if (el) inputRefs.current.set(block.id, el)
}}
data-router-block-id={block.id}
value={block.value}
onChange={(e) => {
@@ -887,15 +869,6 @@ export function ConditionInput({
)
}
}}
onFocus={() => {
if (!isPreview && !disabled && block.value.trim() === '') {
setConditionalBlocks((blocks) =>
blocks.map((b) =>
b.id === block.id ? { ...b, showTags: true, cursorPosition: 0 } : b
)
)
}
}}
onBlur={() => {
setTimeout(() => {
setConditionalBlocks((blocks) =>
@@ -956,11 +929,6 @@ export function ConditionInput({
)
)
}}
inputRef={
{
current: inputRefs.current.get(block.id) || null,
} as React.RefObject<HTMLTextAreaElement>
}
/>
)}
</div>
@@ -1038,15 +1006,6 @@ export function ConditionInput({
)
}
}}
onFocus={() => {
if (!isPreview && !disabled && block.value.trim() === '') {
setConditionalBlocks((blocks) =>
blocks.map((b) =>
b.id === block.id ? { ...b, showTags: true, cursorPosition: 0 } : b
)
)
}
}}
highlight={(codeToHighlight) => {
const placeholders: {
placeholder: string
@@ -1154,11 +1113,6 @@ export function ConditionInput({
)
)
}}
inputRef={
{
current: inputRefs.current.get(block.id) || null,
} as React.RefObject<HTMLTextAreaElement>
}
/>
)}
</div>

View File

@@ -288,7 +288,6 @@ export function DocumentTagEntry({
onKeyDown={handlers.onKeyDown}
onDrop={handlers.onDrop}
onDragOver={handlers.onDragOver}
onFocus={handlers.onFocus}
onScroll={(e) => syncOverlayScroll(cellKey, e.currentTarget.scrollLeft)}
onPaste={() =>
setTimeout(() => {

View File

@@ -214,7 +214,6 @@ export function EvalInput({
onKeyDown={handlers.onKeyDown}
onDrop={handlers.onDrop}
onDragOver={handlers.onDragOver}
onFocus={handlers.onFocus}
placeholder='How accurate is the response?'
disabled={isPreview || disabled}
className={cn(

View File

@@ -33,4 +33,5 @@ export { Table } from './table/table'
export { Text } from './text/text'
export { TimeInput } from './time-input/time-input'
export { ToolInput } from './tool-input/tool-input'
export { TriggerSave } from './trigger-save/trigger-save'
export { VariablesInput } from './variables-input/variables-input'

View File

@@ -1,4 +1,4 @@
import { useMemo, useRef, useState } from 'react'
import { useEffect, useMemo, useRef, useState } from 'react'
import { Badge, Input } from '@/components/emcn'
import { Label } from '@/components/ui/label'
import { cn } from '@/lib/core/utils/cn'
@@ -7,7 +7,39 @@ import { TagDropdown } from '@/app/workspace/[workspaceId]/w/[workflowId]/compon
import { useSubBlockInput } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-input'
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value'
import { useAccessibleReferencePrefixes } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-accessible-reference-prefixes'
import { useWorkflowInputFields } from '@/hooks/queries/workflows'
/**
* Represents a field in the input format configuration
*/
interface InputFormatField {
name: string
type?: string
}
/**
* Represents an input trigger block structure
*/
interface InputTriggerBlock {
type: 'input_trigger' | 'start_trigger'
subBlocks?: {
inputFormat?: { value?: InputFormatField[] }
}
}
/**
* Represents a legacy starter block structure
*/
interface StarterBlockLegacy {
type: 'starter'
subBlocks?: {
inputFormat?: { value?: InputFormatField[] }
}
config?: {
params?: {
inputFormat?: InputFormatField[]
}
}
}
/**
* Props for the InputMappingField component
@@ -38,6 +70,73 @@ interface InputMappingProps {
disabled?: boolean
}
/**
* Type guard to check if a value is an InputTriggerBlock
* @param value - The value to check
* @returns True if the value is an InputTriggerBlock
*/
function isInputTriggerBlock(value: unknown): value is InputTriggerBlock {
const type = (value as { type?: unknown }).type
return (
!!value && typeof value === 'object' && (type === 'input_trigger' || type === 'start_trigger')
)
}
/**
* Type guard to check if a value is a StarterBlockLegacy
* @param value - The value to check
* @returns True if the value is a StarterBlockLegacy
*/
function isStarterBlock(value: unknown): value is StarterBlockLegacy {
return !!value && typeof value === 'object' && (value as { type?: unknown }).type === 'starter'
}
/**
* Type guard to check if a value is an InputFormatField
* @param value - The value to check
* @returns True if the value is an InputFormatField
*/
function isInputFormatField(value: unknown): value is InputFormatField {
if (typeof value !== 'object' || value === null) return false
if (!('name' in value)) return false
const { name, type } = value as { name: unknown; type?: unknown }
if (typeof name !== 'string' || name.trim() === '') return false
if (type !== undefined && typeof type !== 'string') return false
return true
}
/**
* Extracts input format fields from workflow blocks
* @param blocks - The workflow blocks to extract from
* @returns Array of input format fields or null if not found
*/
function extractInputFormatFields(blocks: Record<string, unknown>): InputFormatField[] | null {
const triggerEntry = Object.entries(blocks).find(([, b]) => isInputTriggerBlock(b))
if (triggerEntry && isInputTriggerBlock(triggerEntry[1])) {
const inputFormat = triggerEntry[1].subBlocks?.inputFormat?.value
if (Array.isArray(inputFormat)) {
return (inputFormat as unknown[])
.filter(isInputFormatField)
.map((f) => ({ name: f.name, type: f.type }))
}
}
const starterEntry = Object.entries(blocks).find(([, b]) => isStarterBlock(b))
if (starterEntry && isStarterBlock(starterEntry[1])) {
const starter = starterEntry[1]
const subBlockFormat = starter.subBlocks?.inputFormat?.value
const legacyParamsFormat = starter.config?.params?.inputFormat
const chosen = Array.isArray(subBlockFormat) ? subBlockFormat : legacyParamsFormat
if (Array.isArray(chosen)) {
return (chosen as unknown[])
.filter(isInputFormatField)
.map((f) => ({ name: f.name, type: f.type }))
}
}
return null
}
/**
* InputMapping component displays and manages input field mappings for workflow execution
* @param props - The component props
@@ -69,10 +168,62 @@ export function InputMapping({
const inputRefs = useRef<Map<string, HTMLInputElement>>(new Map())
const overlayRefs = useRef<Map<string, HTMLDivElement>>(new Map())
const workflowId = typeof selectedWorkflowId === 'string' ? selectedWorkflowId : undefined
const { data: childInputFields = [], isLoading } = useWorkflowInputFields(workflowId)
const [childInputFields, setChildInputFields] = useState<InputFormatField[]>([])
const [isLoading, setIsLoading] = useState(false)
const [collapsedFields, setCollapsedFields] = useState<Record<string, boolean>>({})
useEffect(() => {
let isMounted = true
const controller = new AbortController()
async function fetchChildSchema() {
if (!selectedWorkflowId) {
if (isMounted) {
setChildInputFields([])
setIsLoading(false)
}
return
}
try {
if (isMounted) setIsLoading(true)
const res = await fetch(`/api/workflows/${selectedWorkflowId}`, {
signal: controller.signal,
})
if (!res.ok) {
if (isMounted) {
setChildInputFields([])
setIsLoading(false)
}
return
}
const { data } = await res.json()
const blocks = (data?.state?.blocks as Record<string, unknown>) || {}
const fields = extractInputFormatFields(blocks)
if (isMounted) {
setChildInputFields(fields || [])
setIsLoading(false)
}
} catch (error) {
if (isMounted) {
setChildInputFields([])
setIsLoading(false)
}
}
}
fetchChildSchema()
return () => {
isMounted = false
controller.abort()
}
}, [selectedWorkflowId])
const valueObj: Record<string, string> = useMemo(() => {
if (isPreview && previewValue && typeof previewValue === 'object') {
return previewValue as Record<string, string>
@@ -237,7 +388,6 @@ function InputMappingField({
onKeyDown={handlers.onKeyDown}
onDrop={handlers.onDrop}
onDragOver={handlers.onDragOver}
onFocus={handlers.onFocus}
onScroll={(e) => handleScroll(e)}
onPaste={() =>
setTimeout(() => {

View File

@@ -276,7 +276,6 @@ export function KnowledgeTagFilters({
onKeyDown={handlers.onKeyDown}
onDrop={handlers.onDrop}
onDragOver={handlers.onDragOver}
onFocus={handlers.onFocus}
onScroll={(e) => syncOverlayScroll(cellKey, e.currentTarget.scrollLeft)}
onPaste={() =>
setTimeout(() => {

View File

@@ -1,4 +1,3 @@
import type React from 'react'
import {
useCallback,
useEffect,
@@ -160,27 +159,6 @@ export function LongInput({
const accessiblePrefixes = useAccessibleReferencePrefixes(blockId)
/**
* Callback to show tag dropdown when input is empty and focused
*/
const shouldForceTagDropdown = useCallback(
({
value,
}: {
value: string
cursor: number
event: 'focus'
}): { show: boolean } | undefined => {
if (isPreview || disabled) return { show: false }
// Show tag dropdown on focus when input is empty
if (value.trim() === '') {
return { show: true }
}
return { show: false }
},
[isPreview, disabled]
)
// During streaming, use local content; otherwise use the controller value
const value = useMemo(() => {
if (wandHook.isStreaming) return localContent
@@ -316,7 +294,6 @@ export function LongInput({
disabled={disabled}
isStreaming={wandHook.isStreaming}
previewValue={previewValue}
shouldForceTagDropdown={shouldForceTagDropdown}
>
{({ ref, onChange: handleChange, onKeyDown, onDrop, onDragOver, onFocus }) => {
const setRefs = (el: HTMLTextAreaElement | null) => {
@@ -326,7 +303,7 @@ export function LongInput({
return (
<div
ref={containerRef}
className='group relative w-full'
className={cn('group relative w-full', wandHook.isStreaming && 'streaming-effect')}
style={{ height: `${height}px` }}
>
<Textarea

View File

@@ -1,4 +1,3 @@
import type { RefObject } from 'react'
import { useCallback, useMemo, useRef, useState } from 'react'
import { createLogger } from '@sim/logger'
import { useParams } from 'next/navigation'
@@ -112,14 +111,7 @@ function McpInputWithTags({
data-lpignore='true'
data-1p-ignore
readOnly
onFocus={(e) => {
e.currentTarget.removeAttribute('readOnly')
// Show tag dropdown on focus when input is empty
if (!disabled && (value?.trim() === '' || !value)) {
setShowTags(true)
setCursorPosition(0)
}
}}
onFocus={(e) => e.currentTarget.removeAttribute('readOnly')}
className={cn(!isPassword && 'text-transparent caret-foreground')}
/>
{!isPassword && (
@@ -144,7 +136,6 @@ function McpInputWithTags({
setShowTags(false)
setActiveSourceBlockId(null)
}}
inputRef={inputRef as RefObject<HTMLInputElement>}
/>
</div>
)
@@ -234,13 +225,6 @@ function McpTextareaWithTags({
onChange={handleChange}
onDrop={handleDrop}
onDragOver={handleDragOver}
onFocus={() => {
// Show tag dropdown on focus when input is empty
if (!disabled && (value?.trim() === '' || !value)) {
setShowTags(true)
setCursorPosition(0)
}
}}
placeholder={placeholder}
disabled={disabled}
rows={rows}
@@ -270,7 +254,6 @@ function McpTextareaWithTags({
setShowTags(false)
setActiveSourceBlockId(null)
}}
inputRef={textareaRef as RefObject<HTMLTextAreaElement>}
/>
</div>
)

View File

@@ -1,4 +1,4 @@
import { useCallback, useEffect, useImperativeHandle, useMemo, useRef, useState } from 'react'
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
import { ChevronDown, ChevronsUpDown, ChevronUp, Plus } from 'lucide-react'
import { Button, Popover, PopoverContent, PopoverItem, PopoverTrigger } from '@/components/emcn'
import { Trash } from '@/components/emcn/icons/trash'
@@ -8,30 +8,12 @@ import { formatDisplayText } from '@/app/workspace/[workspaceId]/w/[workflowId]/
import { TagDropdown } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/tag-dropdown/tag-dropdown'
import { useSubBlockInput } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-input'
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value'
import type { WandControlHandlers } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/sub-block'
import { useAccessibleReferencePrefixes } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-accessible-reference-prefixes'
import { useWand } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-wand'
import type { SubBlockConfig } from '@/blocks/types'
const MIN_TEXTAREA_HEIGHT_PX = 80
const MAX_TEXTAREA_HEIGHT_PX = 320
/** Pattern to match complete message objects in JSON */
const COMPLETE_MESSAGE_PATTERN =
/"role"\s*:\s*"(system|user|assistant)"[^}]*"content"\s*:\s*"((?:[^"\\]|\\.)*)"/g
/** Pattern to match incomplete content at end of buffer */
const INCOMPLETE_CONTENT_PATTERN = /"content"\s*:\s*"((?:[^"\\]|\\.)*)$/
/** Pattern to match role before content */
const ROLE_BEFORE_CONTENT_PATTERN = /"role"\s*:\s*"(system|user|assistant)"[^{]*$/
/**
* Unescapes JSON string content
*/
const unescapeContent = (str: string): string =>
str.replace(/\\n/g, '\n').replace(/\\"/g, '"').replace(/\\\\/g, '\\')
/**
* Interface for individual message in the messages array
*/
@@ -56,8 +38,6 @@ interface MessagesInputProps {
previewValue?: Message[] | null
/** Whether the input is disabled */
disabled?: boolean
/** Ref to expose wand control handlers to parent */
wandControlRef?: React.MutableRefObject<WandControlHandlers | null>
}
/**
@@ -75,7 +55,6 @@ export function MessagesInput({
isPreview = false,
previewValue,
disabled = false,
wandControlRef,
}: MessagesInputProps) {
const [messages, setMessages] = useSubBlockValue<Message[]>(blockId, subBlockId, false)
const [localMessages, setLocalMessages] = useState<Message[]>([{ role: 'user', content: '' }])
@@ -89,142 +68,6 @@ export function MessagesInput({
disabled,
})
/**
* Gets the current messages as JSON string for wand context
*/
const getMessagesJson = useCallback((): string => {
if (localMessages.length === 0) return ''
// Filter out empty messages for cleaner context
const nonEmptyMessages = localMessages.filter((m) => m.content.trim() !== '')
if (nonEmptyMessages.length === 0) return ''
return JSON.stringify(nonEmptyMessages, null, 2)
}, [localMessages])
/**
* Streaming buffer for accumulating JSON content
*/
const streamBufferRef = useRef<string>('')
/**
* Parses and validates messages from JSON content
*/
const parseMessages = useCallback((content: string): Message[] | null => {
try {
const parsed = JSON.parse(content)
if (Array.isArray(parsed)) {
const validMessages: Message[] = parsed
.filter(
(m): m is { role: string; content: string } =>
typeof m === 'object' &&
m !== null &&
typeof m.role === 'string' &&
typeof m.content === 'string'
)
.map((m) => ({
role: (['system', 'user', 'assistant'].includes(m.role)
? m.role
: 'user') as Message['role'],
content: m.content,
}))
return validMessages.length > 0 ? validMessages : null
}
} catch {
// Parsing failed
}
return null
}, [])
/**
* Extracts messages from streaming JSON buffer
* Uses simple pattern matching for efficiency
*/
const extractStreamingMessages = useCallback(
(buffer: string): Message[] => {
// Try complete JSON parse first
const complete = parseMessages(buffer)
if (complete) return complete
const result: Message[] = []
// Reset regex lastIndex for global pattern
COMPLETE_MESSAGE_PATTERN.lastIndex = 0
let match
while ((match = COMPLETE_MESSAGE_PATTERN.exec(buffer)) !== null) {
result.push({ role: match[1] as Message['role'], content: unescapeContent(match[2]) })
}
// Check for incomplete message at end (content still streaming)
const lastContentIdx = buffer.lastIndexOf('"content"')
if (lastContentIdx !== -1) {
const tail = buffer.slice(lastContentIdx)
const incomplete = tail.match(INCOMPLETE_CONTENT_PATTERN)
if (incomplete) {
const head = buffer.slice(0, lastContentIdx)
const roleMatch = head.match(ROLE_BEFORE_CONTENT_PATTERN)
if (roleMatch) {
const content = unescapeContent(incomplete[1])
// Only add if not duplicate of last complete message
if (result.length === 0 || result[result.length - 1].content !== content) {
result.push({ role: roleMatch[1] as Message['role'], content })
}
}
}
}
return result
},
[parseMessages]
)
/**
* Wand hook for AI-assisted content generation
*/
const wandHook = useWand({
wandConfig: config.wandConfig,
currentValue: getMessagesJson(),
onStreamStart: () => {
streamBufferRef.current = ''
setLocalMessages([{ role: 'system', content: '' }])
},
onStreamChunk: (chunk) => {
streamBufferRef.current += chunk
const extracted = extractStreamingMessages(streamBufferRef.current)
if (extracted.length > 0) {
setLocalMessages(extracted)
}
},
onGeneratedContent: (content) => {
const validMessages = parseMessages(content)
if (validMessages) {
setLocalMessages(validMessages)
setMessages(validMessages)
} else {
// Fallback: treat as raw system prompt
const trimmed = content.trim()
if (trimmed) {
const fallback: Message[] = [{ role: 'system', content: trimmed }]
setLocalMessages(fallback)
setMessages(fallback)
}
}
},
})
/**
* Expose wand control handlers to parent via ref
*/
useImperativeHandle(
wandControlRef,
() => ({
onWandTrigger: (prompt: string) => {
wandHook.generateStream({ prompt })
},
isWandActive: wandHook.isPromptVisible,
isWandStreaming: wandHook.isStreaming,
}),
[wandHook]
)
/**
* Initialize local state from stored or preview value
*/
@@ -465,7 +308,7 @@ export function MessagesInput({
}, [currentMessages, autoResizeTextarea])
return (
<div className='flex w-full flex-col gap-[10px]'>
<div className='flex w-full flex-col gap-3'>
{currentMessages.map((message, index) => (
<div
key={`message-${index}`}
@@ -521,7 +364,7 @@ export function MessagesInput({
type='button'
disabled={isPreview || disabled}
className={cn(
'group -ml-1.5 -my-1 flex items-center gap-1 rounded px-1.5 py-1 font-medium text-[13px] text-[var(--text-primary)] leading-none transition-colors hover:bg-[var(--surface-5)] hover:text-[var(--text-secondary)]',
'-ml-1.5 -my-1 rounded px-1.5 py-1 font-medium text-[13px] text-[var(--text-primary)] leading-none transition-colors hover:bg-[var(--surface-5)] hover:text-[var(--text-secondary)]',
(isPreview || disabled) &&
'cursor-default hover:bg-transparent hover:text-[var(--text-primary)]'
)}
@@ -529,14 +372,6 @@ export function MessagesInput({
aria-label='Select message role'
>
{formatRole(message.role)}
{!isPreview && !disabled && (
<ChevronDown
className={cn(
'h-3 w-3 flex-shrink-0 transition-transform duration-100',
openPopoverIndex === index && 'rotate-180'
)}
/>
)}
</button>
</PopoverTrigger>
<PopoverContent minWidth={140} align='start'>
@@ -651,7 +486,6 @@ export function MessagesInput({
}}
onDrop={fieldHandlers.onDrop}
onDragOver={fieldHandlers.onDragOver}
onFocus={fieldHandlers.onFocus}
onScroll={(e) => {
const overlay = overlayRefs.current[fieldId]
if (overlay) {

View File

@@ -192,24 +192,6 @@ export function ShortInput({
[isApiKeyField, isPreview, disabled, readOnly]
)
const shouldForceTagDropdown = useCallback(
({
value,
}: {
value: string
cursor: number
event: 'focus'
}): { show: boolean } | undefined => {
if (isPreview || disabled || readOnly) return { show: false }
// Show tag dropdown on focus when input is empty (unless it's an API key field)
if (!isApiKeyField && value.trim() === '') {
return { show: true }
}
return { show: false }
},
[isPreview, disabled, readOnly, isApiKeyField]
)
const baseValue = isPreview ? previewValue : propValue !== undefined ? propValue : undefined
const effectiveValue =
@@ -334,7 +316,6 @@ export function ShortInput({
isStreaming={wandHook.isStreaming}
previewValue={previewValue}
shouldForceEnvDropdown={shouldForceEnvDropdown}
shouldForceTagDropdown={shouldForceTagDropdown}
>
{({
ref,
@@ -375,9 +356,9 @@ export function ShortInput({
type='text'
value={displayValue}
onChange={handleChange as (e: React.ChangeEvent<HTMLInputElement>) => void}
onFocus={(e) => {
onFocus={() => {
setIsFocused(true)
onFocus(e)
onFocus()
}}
onBlur={handleBlur}
onDrop={onDrop as (e: React.DragEvent<HTMLInputElement>) => void}

Some files were not shown because too many files have changed in this diff Show More