Compare commits

..

5 Commits

Author SHA1 Message Date
waleed
82fba6dc07 consolidated admin controls to settings, added impersonation 2026-01-15 15:59:57 -08:00
waleed
29ab351d0d readded migrations 2026-01-15 15:13:26 -08:00
waleed
432a40efc2 Merge branch 'staging' into feat/impersonation 2026-01-15 15:12:55 -08:00
waleed
73873bb4c6 remove migrations 2026-01-15 15:12:32 -08:00
waleed
197ada5df2 feat(impersonation): migrate to betterauth admin plugin for admin status, add impersonation 2026-01-15 15:10:58 -08:00
303 changed files with 6386 additions and 23452 deletions

View File

@@ -9,12 +9,12 @@
<p align="center">
<a href="https://sim.ai" target="_blank" rel="noopener noreferrer"><img src="https://img.shields.io/badge/sim.ai-6F3DFA" alt="Sim.ai"></a>
<a href="https://discord.gg/Hr4UWYEcTT" target="_blank" rel="noopener noreferrer"><img src="https://img.shields.io/badge/Discord-Join%20Server-5865F2?logo=discord&logoColor=white" alt="Discord"></a>
<a href="https://x.com/simdotai" target="_blank" rel="noopener noreferrer"><img src="https://img.shields.io/twitter/follow/simdotai?style=social" alt="Twitter"></a>
<a href="https://x.com/simdotai" target="_blank" rel="noopener noreferrer"><img src="https://img.shields.io/twitter/follow/simstudioai?style=social" alt="Twitter"></a>
<a href="https://docs.sim.ai" target="_blank" rel="noopener noreferrer"><img src="https://img.shields.io/badge/Docs-6F3DFA.svg" alt="Documentation"></a>
</p>
<p align="center">
<a href="https://deepwiki.com/simstudioai/sim" target="_blank" rel="noopener noreferrer"><img src="https://deepwiki.com/badge.svg" alt="Ask DeepWiki"></a> <a href="https://cursor.com/link/prompt?text=Help%20me%20set%20up%20Sim%20Studio%20locally.%20Follow%20these%20steps%3A%0A%0A1.%20First%2C%20verify%20Docker%20is%20installed%20and%20running%3A%0A%20%20%20docker%20--version%0A%20%20%20docker%20info%0A%0A2.%20Clone%20the%20repository%3A%0A%20%20%20git%20clone%20https%3A%2F%2Fgithub.com%2Fsimstudioai%2Fsim.git%0A%20%20%20cd%20sim%0A%0A3.%20Start%20the%20services%20with%20Docker%20Compose%3A%0A%20%20%20docker%20compose%20-f%20docker-compose.prod.yml%20up%20-d%0A%0A4.%20Wait%20for%20all%20containers%20to%20be%20healthy%20(this%20may%20take%201-2%20minutes)%3A%0A%20%20%20docker%20compose%20-f%20docker-compose.prod.yml%20ps%0A%0A5.%20Verify%20the%20app%20is%20accessible%20at%20http%3A%2F%2Flocalhost%3A3000%0A%0AIf%20there%20are%20any%20errors%2C%20help%20me%20troubleshoot%20them.%20Common%20issues%3A%0A-%20Port%203000%2C%203002%2C%20or%205432%20already%20in%20use%0A-%20Docker%20not%20running%0A-%20Insufficient%20memory%20(needs%2012GB%2B%20RAM)%0A%0AFor%20local%20AI%20models%20with%20Ollama%2C%20use%20this%20instead%20of%20step%203%3A%0A%20%20%20docker%20compose%20-f%20docker-compose.ollama.yml%20--profile%20setup%20up%20-d"><img src="https://img.shields.io/badge/Set%20Up%20with-Cursor-000000?logo=cursor&logoColor=white" alt="Set Up with Cursor"></a>
<a href="https://cursor.com/link/prompt?text=Help%20me%20set%20up%20Sim%20Studio%20locally.%20Follow%20these%20steps%3A%0A%0A1.%20First%2C%20verify%20Docker%20is%20installed%20and%20running%3A%0A%20%20%20docker%20--version%0A%20%20%20docker%20info%0A%0A2.%20Clone%20the%20repository%3A%0A%20%20%20git%20clone%20https%3A%2F%2Fgithub.com%2Fsimstudioai%2Fsim.git%0A%20%20%20cd%20sim%0A%0A3.%20Start%20the%20services%20with%20Docker%20Compose%3A%0A%20%20%20docker%20compose%20-f%20docker-compose.prod.yml%20up%20-d%0A%0A4.%20Wait%20for%20all%20containers%20to%20be%20healthy%20(this%20may%20take%201-2%20minutes)%3A%0A%20%20%20docker%20compose%20-f%20docker-compose.prod.yml%20ps%0A%0A5.%20Verify%20the%20app%20is%20accessible%20at%20http%3A%2F%2Flocalhost%3A3000%0A%0AIf%20there%20are%20any%20errors%2C%20help%20me%20troubleshoot%20them.%20Common%20issues%3A%0A-%20Port%203000%2C%203002%2C%20or%205432%20already%20in%20use%0A-%20Docker%20not%20running%0A-%20Insufficient%20memory%20(needs%2012GB%2B%20RAM)%0A%0AFor%20local%20AI%20models%20with%20Ollama%2C%20use%20this%20instead%20of%20step%203%3A%0A%20%20%20docker%20compose%20-f%20docker-compose.ollama.yml%20--profile%20setup%20up%20-d"><img src="https://img.shields.io/badge/Set%20Up%20with-Cursor-000000?logo=cursor&logoColor=white" alt="Set Up with Cursor"></a>
</p>
### Build Workflows with Ease

View File

@@ -4678,349 +4678,3 @@ export function BedrockIcon(props: SVGProps<SVGSVGElement>) {
</svg>
)
}
export function ReductoIcon(props: SVGProps<SVGSVGElement>) {
return (
<svg
{...props}
width='400'
height='400'
viewBox='50 40 300 320'
fill='none'
xmlns='http://www.w3.org/2000/svg'
>
<path
fillRule='evenodd'
clipRule='evenodd'
d='M85.3434 70.7805H314.657V240.307L226.44 329.219H85.3434V70.7805ZM107.796 93.2319H292.205V204.487H206.493V306.767H107.801L107.796 93.2319Z'
fill='#FFFFFF'
/>
</svg>
)
}
export function PulseIcon(props: SVGProps<SVGSVGElement>) {
return (
<svg
{...props}
width='24'
height='24'
viewBox='0 6 24 24'
fill='none'
xmlns='http://www.w3.org/2000/svg'
>
<path
d='M0 6.63667C0 6.28505 0.284685 6 0.635863 6H1.54133C1.89251 6 2.17719 6.28505 2.17719 6.63667V7.54329C2.17719 7.89492 1.89251 8.17997 1.54133 8.17997H0.635863C0.284686 8.17997 0 7.89492 0 7.54329V6.63667Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M3.11318 6.63667C3.11318 6.28505 3.39787 6 3.74905 6H4.65452C5.00569 6 5.29038 6.28505 5.29038 6.63667V7.54329C5.29038 7.89492 5.00569 8.17997 4.65452 8.17997H3.74905C3.39787 8.17997 3.11318 7.89492 3.11318 7.54329V6.63667Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M6.22637 6.63667C6.22637 6.28505 6.51105 6 6.86223 6H7.7677C8.11888 6 8.40356 6.28505 8.40356 6.63667V7.54329C8.40356 7.89492 8.11888 8.17997 7.7677 8.17997H6.86223C6.51105 8.17997 6.22637 7.89492 6.22637 7.54329V6.63667Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M9.33955 6.63667C9.33955 6.28505 9.62424 6 9.97541 6H10.8809C11.2321 6 11.5167 6.28505 11.5167 6.63667V7.54329C11.5167 7.89492 11.2321 8.17997 10.8809 8.17997H9.97541C9.62424 8.17997 9.33955 7.89492 9.33955 7.54329V6.63667Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M12.4527 6.63667C12.4527 6.28505 12.7374 6 13.0886 6H13.9941C14.3452 6 14.6299 6.28505 14.6299 6.63667V7.54329C14.6299 7.89492 14.3452 8.17997 13.9941 8.17997H13.0886C12.7374 8.17997 12.4527 7.89492 12.4527 7.54329V6.63667Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M15.5659 6.63667C15.5659 6.28505 15.8506 6 16.2018 6H17.1073C17.4584 6 17.7431 6.28505 17.7431 6.63667V7.54329C17.7431 7.89492 17.4584 8.17997 17.1073 8.17997H16.2018C15.8506 8.17997 15.5659 7.89492 15.5659 7.54329V6.63667Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M18.6791 6.63667C18.6791 6.28505 18.9638 6 19.315 6H20.2204C20.5716 6 20.8563 6.28505 20.8563 6.63667V7.54329C20.8563 7.89492 20.5716 8.17997 20.2204 8.17997H19.315C18.9638 8.17997 18.6791 7.89492 18.6791 7.54329V6.63667Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M21.7923 6.63667C21.7923 6.28505 22.077 6 22.4282 6H23.3336C23.6848 6 23.9695 6.28505 23.9695 6.63667V7.54329C23.9695 7.89492 23.6848 8.17997 23.3336 8.17997H22.4282C22.077 8.17997 21.7923 7.89492 21.7923 7.54329V6.63667Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M0 9.75382C0 9.4022 0.284685 9.11715 0.635863 9.11715H1.54133C1.89251 9.11715 2.17719 9.4022 2.17719 9.75382V10.6604C2.17719 11.0121 1.89251 11.2971 1.54133 11.2971H0.635863C0.284686 11.2971 0 11.0121 0 10.6604V9.75382Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M3.11318 9.75382C3.11318 9.4022 3.39787 9.11715 3.74905 9.11715H4.65452C5.00569 9.11715 5.29038 9.4022 5.29038 9.75382V10.6604C5.29038 11.0121 5.00569 11.2971 4.65452 11.2971H3.74905C3.39787 11.2971 3.11318 11.0121 3.11318 10.6604V9.75382Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M6.22637 9.75382C6.22637 9.4022 6.51105 9.11715 6.86223 9.11715H7.7677C8.11888 9.11715 8.40356 9.4022 8.40356 9.75382V10.6604C8.40356 11.0121 8.11888 11.2971 7.7677 11.2971H6.86223C6.51105 11.2971 6.22637 11.0121 6.22637 10.6604V9.75382Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M9.33955 9.75382C9.33955 9.4022 9.62424 9.11715 9.97541 9.11715H10.8809C11.2321 9.11715 11.5167 9.4022 11.5167 9.75382V10.6604C11.5167 11.0121 11.2321 11.2971 10.8809 11.2971H9.97541C9.62424 11.2971 9.33955 11.0121 9.33955 10.6604V9.75382Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M12.4527 9.75382C12.4527 9.4022 12.7374 9.11715 13.0886 9.11715H13.9941C14.3452 9.11715 14.6299 9.4022 14.6299 9.75382V10.6604C14.6299 11.0121 14.3452 11.2971 13.9941 11.2971H13.0886C12.7374 11.2971 12.4527 11.0121 12.4527 10.6604V9.75382Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M15.5659 9.75382C15.5659 9.4022 15.8506 9.11715 16.2018 9.11715H17.1073C17.4584 9.11715 17.7431 9.4022 17.7431 9.75382V10.6604C17.7431 11.0121 17.4584 11.2971 17.1073 11.2971H16.2018C15.8506 11.2971 15.5659 11.0121 15.5659 10.6604V9.75382Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M18.6791 9.75382C18.6791 9.4022 18.9638 9.11715 19.315 9.11715H20.2204C20.5716 9.11715 20.8563 9.4022 20.8563 9.75382V10.6604C20.8563 11.0121 20.5716 11.2971 20.2204 11.2971H19.315C18.9638 11.2971 18.6791 11.0121 18.6791 10.6604V9.75382Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M21.7923 9.75382C21.7923 9.4022 22.077 9.11715 22.4282 9.11715H23.3336C23.6848 9.11715 23.9695 9.4022 23.9695 9.75382V10.6604C23.9695 11.0121 23.6848 11.2971 23.3336 11.2971H22.4282C22.077 11.2971 21.7923 11.0121 21.7923 10.6604V9.75382Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M0 12.871C0 12.5193 0.284685 12.2343 0.635863 12.2343H1.54133C1.89251 12.2343 2.17719 12.5193 2.17719 12.871V13.7776C2.17719 14.1292 1.89251 14.4143 1.54133 14.4143H0.635863C0.284686 14.4143 0 14.1292 0 13.7776V12.871Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M3.11318 12.871C3.11318 12.5193 3.39787 12.2343 3.74905 12.2343H4.65452C5.00569 12.2343 5.29038 12.5193 5.29038 12.871V13.7776C5.29038 14.1292 5.00569 14.4143 4.65452 14.4143H3.74905C3.39787 14.4143 3.11318 14.1292 3.11318 13.7776V12.871Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M6.22637 12.871C6.22637 12.5193 6.51105 12.2343 6.86223 12.2343H7.7677C8.11888 12.2343 8.40356 12.5193 8.40356 12.871V13.7776C8.40356 14.1292 8.11888 14.4143 7.7677 14.4143H6.86223C6.51105 14.4143 6.22637 14.1292 6.22637 13.7776V12.871Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M9.33955 12.871C9.33955 12.5193 9.62424 12.2343 9.97541 12.2343H10.8809C11.2321 12.2343 11.5167 12.5193 11.5167 12.871V13.7776C11.5167 14.1292 11.2321 14.4143 10.8809 14.4143H9.97541C9.62424 14.4143 9.33955 14.1292 9.33955 13.7776V12.871Z'
fill='#0E7BC9'
/>
<path
d='M12.4527 12.871C12.4527 12.5193 12.7374 12.2343 13.0886 12.2343H13.9941C14.3452 12.2343 14.6299 12.5193 14.6299 12.871V13.7776C14.6299 14.1292 14.3452 14.4143 13.9941 14.4143H13.0886C12.7374 14.4143 12.4527 14.1292 12.4527 13.7776V12.871Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M15.5659 12.871C15.5659 12.5193 15.8506 12.2343 16.2018 12.2343H17.1073C17.4584 12.2343 17.7431 12.5193 17.7431 12.871V13.7776C17.7431 14.1292 17.4584 14.4143 17.1073 14.4143H16.2018C15.8506 14.4143 15.5659 14.1292 15.5659 13.7776V12.871Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M18.6791 12.871C18.6791 12.5193 18.9638 12.2343 19.315 12.2343H20.2204C20.5716 12.2343 20.8563 12.5193 20.8563 12.871V13.7776C20.8563 14.1292 20.5716 14.4143 20.2204 14.4143H19.315C18.9638 14.4143 18.6791 14.1292 18.6791 13.7776V12.871Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M21.7923 12.871C21.7923 12.5193 22.077 12.2343 22.4282 12.2343H23.3336C23.6848 12.2343 23.9695 12.5193 23.9695 12.871V13.7776C23.9695 14.1292 23.6848 14.4143 23.3336 14.4143H22.4282C22.077 14.4143 21.7923 14.1292 21.7923 13.7776V12.871Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M0 15.9881C0 15.6365 0.284685 15.3514 0.635863 15.3514H1.54133C1.89251 15.3514 2.17719 15.6365 2.17719 15.9881V16.8947C2.17719 17.2464 1.89251 17.5314 1.54133 17.5314H0.635863C0.284686 17.5314 0 17.2464 0 16.8947V15.9881Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M3.11318 15.9881C3.11318 15.6365 3.39787 15.3514 3.74905 15.3514H4.65452C5.00569 15.3514 5.29038 15.6365 5.29038 15.9881V16.8947C5.29038 17.2464 5.00569 17.5314 4.65452 17.5314H3.74905C3.39787 17.5314 3.11318 17.2464 3.11318 16.8947V15.9881Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M6.22637 15.9881C6.22637 15.6365 6.51105 15.3514 6.86223 15.3514H7.7677C8.11888 15.3514 8.40356 15.6365 8.40356 15.9881V16.8947C8.40356 17.2464 8.11888 17.5314 7.7677 17.5314H6.86223C6.51105 17.5314 6.22637 17.2464 6.22637 16.8947V15.9881Z'
fill='#0E7BC9'
/>
<path
d='M9.33955 15.9881C9.33955 15.6365 9.62424 15.3514 9.97541 15.3514H10.8809C11.2321 15.3514 11.5167 15.6365 11.5167 15.9881V16.8947C11.5167 17.2464 11.2321 17.5314 10.8809 17.5314H9.97541C9.62424 17.5314 9.33955 17.2464 9.33955 16.8947V15.9881Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M12.4527 15.9881C12.4527 15.6365 12.7374 15.3514 13.0886 15.3514H13.9941C14.3452 15.3514 14.6299 15.6365 14.6299 15.9881V16.8947C14.6299 17.2464 14.3452 17.5314 13.9941 17.5314H13.0886C12.7374 17.5314 12.4527 17.2464 12.4527 16.8947V15.9881Z'
fill='#0E7BC9'
/>
<path
d='M15.5659 15.9881C15.5659 15.6365 15.8506 15.3514 16.2018 15.3514H17.1073C17.4584 15.3514 17.7431 15.6365 17.7431 15.9881V16.8947C17.7431 17.2464 17.4584 17.5314 17.1073 17.5314H16.2018C15.8506 17.5314 15.5659 17.2464 15.5659 16.8947V15.9881Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M18.6791 15.9881C18.6791 15.6365 18.9638 15.3514 19.315 15.3514H20.2204C20.5716 15.3514 20.8563 15.6365 20.8563 15.9881V16.8947C20.8563 17.2464 20.5716 17.5314 20.2204 17.5314H19.315C18.9638 17.5314 18.6791 17.2464 18.6791 16.8947V15.9881Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M21.7923 15.9881C21.7923 15.6365 22.077 15.3514 22.4282 15.3514H23.3336C23.6848 15.3514 23.9695 15.6365 23.9695 15.9881V16.8947C23.9695 17.2464 23.6848 17.5314 23.3336 17.5314H22.4282C22.077 17.5314 21.7923 17.2464 21.7923 16.8947V15.9881Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M0 19.1053C0 18.7536 0.284685 18.4686 0.635863 18.4686H1.54133C1.89251 18.4686 2.17719 18.7536 2.17719 19.1053V20.0119C2.17719 20.3635 1.89251 20.6486 1.54133 20.6486H0.635863C0.284686 20.6486 0 20.3635 0 20.0119V19.1053Z'
fill='#0E7BC9'
/>
<path
d='M3.11318 19.1053C3.11318 18.7536 3.39787 18.4686 3.74905 18.4686H4.65452C5.00569 18.4686 5.29038 18.7536 5.29038 19.1053V20.0119C5.29038 20.3635 5.00569 20.6486 4.65452 20.6486H3.74905C3.39787 20.6486 3.11318 20.3635 3.11318 20.0119V19.1053Z'
fill='#0E7BC9'
/>
<path
d='M6.22637 19.1053C6.22637 18.7536 6.51105 18.4686 6.86223 18.4686H7.7677C8.11888 18.4686 8.40356 18.7536 8.40356 19.1053V20.0119C8.40356 20.3635 8.11888 20.6486 7.7677 20.6486H6.86223C6.51105 20.6486 6.22637 20.3635 6.22637 20.0119V19.1053Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M9.33955 19.1053C9.33955 18.7536 9.62424 18.4686 9.97541 18.4686H10.8809C11.2321 18.4686 11.5167 18.7536 11.5167 19.1053V20.0119C11.5167 20.3635 11.2321 20.6486 10.8809 20.6486H9.97541C9.62424 20.6486 9.33955 20.3635 9.33955 20.0119V19.1053Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M12.4527 19.1053C12.4527 18.7536 12.7374 18.4686 13.0886 18.4686H13.9941C14.3452 18.4686 14.6299 18.7536 14.6299 19.1053V20.0119C14.6299 20.3635 14.3452 20.6486 13.9941 20.6486H13.0886C12.7374 20.6486 12.4527 20.3635 12.4527 20.0119V19.1053Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M15.5659 19.1053C15.5659 18.7536 15.8506 18.4686 16.2018 18.4686H17.1073C17.4584 18.4686 17.7431 18.7536 17.7431 19.1053V20.0119C17.7431 20.3635 17.4584 20.6486 17.1073 20.6486H16.2018C15.8506 20.6486 15.5659 20.3635 15.5659 20.0119V19.1053Z'
fill='#0E7BC9'
/>
<path
d='M18.6791 19.1053C18.6791 18.7536 18.9638 18.4686 19.315 18.4686H20.2204C20.5716 18.4686 20.8563 18.7536 20.8563 19.1053V20.0119C20.8563 20.3635 20.5716 20.6486 20.2204 20.6486H19.315C18.9638 20.6486 18.6791 20.3635 18.6791 20.0119V19.1053Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M21.7923 19.1053C21.7923 18.7536 22.077 18.4686 22.4282 18.4686H23.3336C23.6848 18.4686 23.9695 18.7536 23.9695 19.1053V20.0119C23.9695 20.3635 23.6848 20.6486 23.3336 20.6486H22.4282C22.077 20.6486 21.7923 20.3635 21.7923 20.0119V19.1053Z'
fill='#0E7BC9'
/>
<path
d='M0 22.2224C0 21.8708 0.284685 21.5857 0.635863 21.5857H1.54133C1.89251 21.5857 2.17719 21.8708 2.17719 22.2224V23.129C2.17719 23.4807 1.89251 23.7657 1.54133 23.7657H0.635863C0.284686 23.7657 0 23.4807 0 23.129V22.2224Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M3.11318 22.2224C3.11318 21.8708 3.39787 21.5857 3.74905 21.5857H4.65452C5.00569 21.5857 5.29038 21.8708 5.29038 22.2224V23.129C5.29038 23.4807 5.00569 23.7657 4.65452 23.7657H3.74905C3.39787 23.7657 3.11318 23.4807 3.11318 23.129V22.2224Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M6.22637 22.2224C6.22637 21.8708 6.51105 21.5857 6.86223 21.5857H7.7677C8.11888 21.5857 8.40356 21.8708 8.40356 22.2224V23.129C8.40356 23.4807 8.11888 23.7657 7.7677 23.7657H6.86223C6.51105 23.7657 6.22637 23.4807 6.22637 23.129V22.2224Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M9.33955 22.2224C9.33955 21.8708 9.62424 21.5857 9.97541 21.5857H10.8809C11.2321 21.5857 11.5167 21.8708 11.5167 22.2224V23.129C11.5167 23.4807 11.2321 23.7657 10.8809 23.7657H9.97541C9.62424 23.7657 9.33955 23.4807 9.33955 23.129V22.2224Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M12.4527 22.2224C12.4527 21.8708 12.7374 21.5857 13.0886 21.5857H13.9941C14.3452 21.5857 14.6299 21.8708 14.6299 22.2224V23.129C14.6299 23.4807 14.3452 23.7657 13.9941 23.7657H13.0886C12.7374 23.7657 12.4527 23.4807 12.4527 23.129V22.2224Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M15.5659 22.2224C15.5659 21.8708 15.8506 21.5857 16.2018 21.5857H17.1073C17.4584 21.5857 17.7431 21.8708 17.7431 22.2224V23.129C17.7431 23.4807 17.4584 23.7657 17.1073 23.7657H16.2018C15.8506 23.7657 15.5659 23.4807 15.5659 23.129V22.2224Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M18.6791 22.2224C18.6791 21.8708 18.9638 21.5857 19.315 21.5857H20.2204C20.5716 21.5857 20.8563 21.8708 20.8563 22.2224V23.129C20.8563 23.4807 20.5716 23.7657 20.2204 23.7657H19.315C18.9638 23.7657 18.6791 23.4807 18.6791 23.129V22.2224Z'
fill='#0E7BC9'
/>
<path
d='M21.7923 22.2224C21.7923 21.8708 22.077 21.5857 22.4282 21.5857H23.3336C23.6848 21.5857 23.9695 21.8708 23.9695 22.2224V23.129C23.9695 23.4807 23.6848 23.7657 23.3336 23.7657H22.4282C22.077 23.7657 21.7923 23.4807 21.7923 23.129V22.2224Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M0 25.3396C0 24.9879 0.284685 24.7029 0.635863 24.7029H1.54133C1.89251 24.7029 2.17719 24.9879 2.17719 25.3396V26.2462C2.17719 26.5978 1.89251 26.8829 1.54133 26.8829H0.635863C0.284686 26.8829 0 26.5978 0 26.2462V25.3396Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M3.11318 25.3396C3.11318 24.9879 3.39787 24.7029 3.74905 24.7029H4.65452C5.00569 24.7029 5.29038 24.9879 5.29038 25.3396V26.2462C5.29038 26.5978 5.00569 26.8829 4.65452 26.8829H3.74905C3.39787 26.8829 3.11318 26.5978 3.11318 26.2462V25.3396Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M6.22637 25.3396C6.22637 24.9879 6.51105 24.7029 6.86223 24.7029H7.7677C8.11888 24.7029 8.40356 24.9879 8.40356 25.3396V26.2462C8.40356 26.5978 8.11888 26.8829 7.7677 26.8829H6.86223C6.51105 26.8829 6.22637 26.5978 6.22637 26.2462V25.3396Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M9.33955 25.3396C9.33955 24.9879 9.62424 24.7029 9.97541 24.7029H10.8809C11.2321 24.7029 11.5167 24.9879 11.5167 25.3396V26.2462C11.5167 26.5978 11.2321 26.8829 10.8809 26.8829H9.97541C9.62424 26.8829 9.33955 26.5978 9.33955 26.2462V25.3396Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M12.4527 25.3396C12.4527 24.9879 12.7374 24.7029 13.0886 24.7029H13.9941C14.3452 24.7029 14.6299 24.9879 14.6299 25.3396V26.2462C14.6299 26.5978 14.3452 26.8829 13.9941 26.8829H13.0886C12.7374 26.8829 12.4527 26.5978 12.4527 26.2462V25.3396Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M15.5659 25.3396C15.5659 24.9879 15.8506 24.7029 16.2018 24.7029H17.1073C17.4584 24.7029 17.7431 24.9879 17.7431 25.3396V26.2462C17.7431 26.5978 17.4584 26.8829 17.1073 26.8829H16.2018C15.8506 26.8829 15.5659 26.5978 15.5659 26.2462V25.3396Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M18.6791 25.3396C18.6791 24.9879 18.9638 24.7029 19.315 24.7029H20.2204C20.5716 24.7029 20.8563 24.9879 20.8563 25.3396V26.2462C20.8563 26.5978 20.5716 26.8829 20.2204 26.8829H19.315C18.9638 26.8829 18.6791 26.5978 18.6791 26.2462V25.3396Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M21.7923 25.3396C21.7923 24.9879 22.077 24.7029 22.4282 24.7029H23.3336C23.6848 24.7029 23.9695 24.9879 23.9695 25.3396V26.2462C23.9695 26.5978 23.6848 26.8829 23.3336 26.8829H22.4282C22.077 26.8829 21.7923 26.5978 21.7923 26.2462V25.3396Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M0 28.4567C0 28.1051 0.284685 27.82 0.635863 27.82H1.54133C1.89251 27.82 2.17719 28.1051 2.17719 28.4567V29.3633C2.17719 29.715 1.89251 30 1.54133 30H0.635863C0.284686 30 0 29.715 0 29.3633V28.4567Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M3.11318 28.4567C3.11318 28.1051 3.39787 27.82 3.74905 27.82H4.65452C5.00569 27.82 5.29038 28.1051 5.29038 28.4567V29.3633C5.29038 29.715 5.00569 30 4.65452 30H3.74905C3.39787 30 3.11318 29.715 3.11318 29.3633V28.4567Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M6.22637 28.4567C6.22637 28.1051 6.51105 27.82 6.86223 27.82H7.7677C8.11888 27.82 8.40356 28.1051 8.40356 28.4567V29.3633C8.40356 29.715 8.11888 30 7.7677 30H6.86223C6.51105 30 6.22637 29.715 6.22637 29.3633V28.4567Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M9.33955 28.4567C9.33955 28.1051 9.62424 27.82 9.97541 27.82H10.8809C11.2321 27.82 11.5167 28.1051 11.5167 28.4567V29.3633C11.5167 29.715 11.2321 30 10.8809 30H9.97541C9.62424 30 9.33955 29.715 9.33955 29.3633V28.4567Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M12.4527 28.4567C12.4527 28.1051 12.7374 27.82 13.0886 27.82H13.9941C14.3452 27.82 14.6299 28.1051 14.6299 28.4567V29.3633C14.6299 29.715 14.3452 30 13.9941 30H13.0886C12.7374 30 12.4527 29.715 12.4527 29.3633V28.4567Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M15.5659 28.4567C15.5659 28.1051 15.8506 27.82 16.2018 27.82H17.1073C17.4584 27.82 17.7431 28.1051 17.7431 28.4567V29.3633C17.7431 29.715 17.4584 30 17.1073 30H16.2018C15.8506 30 15.5659 29.715 15.5659 29.3633V28.4567Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M18.6791 28.4567C18.6791 28.1051 18.9638 27.82 19.315 27.82H20.2204C20.5716 27.82 20.8563 28.1051 20.8563 28.4567V29.3633C20.8563 29.715 20.5716 30 20.2204 30H19.315C18.9638 30 18.6791 29.715 18.6791 29.3633V28.4567Z'
fill='#030712'
fillOpacity='0.1'
/>
<path
d='M21.7923 28.4567C21.7923 28.1051 22.077 27.82 22.4282 27.82H23.3336C23.6848 27.82 23.9695 28.1051 23.9695 28.4567V29.3633C23.9695 29.715 23.6848 30 23.3336 30H22.4282C22.077 30 21.7923 29.715 21.7923 29.3633V28.4567Z'
fill='#030712'
fillOpacity='0.1'
/>
</svg>
)
}

View File

@@ -84,11 +84,9 @@ import {
PolymarketIcon,
PostgresIcon,
PosthogIcon,
PulseIcon,
QdrantIcon,
RDSIcon,
RedditIcon,
ReductoIcon,
ResendIcon,
S3Icon,
SalesforceIcon,
@@ -210,11 +208,9 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
polymarket: PolymarketIcon,
postgresql: PostgresIcon,
posthog: PosthogIcon,
pulse: PulseIcon,
qdrant: QdrantIcon,
rds: RDSIcon,
reddit: RedditIcon,
reducto: ReductoIcon,
resend: ResendIcon,
s3: S3Icon,
salesforce: SalesforceIcon,

View File

@@ -1,3 +1,3 @@
{
"pages": ["index", "basics", "api", "logging", "costs"]
"pages": ["index", "basics", "api", "form", "logging", "costs"]
}

View File

@@ -36,47 +36,43 @@ Connect Google Vault to create exports, list exports, and manage holds within ma
### `google_vault_create_matters_export`
Create an export in a matter
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `matterId` | string | Yes | The matter ID |
| `exportName` | string | Yes | Name for the export \(avoid special characters\) |
| `corpus` | string | Yes | Data corpus to export \(MAIL, DRIVE, GROUPS, HANGOUTS_CHAT, VOICE\) |
| `accountEmails` | string | No | Comma-separated list of user emails to scope export |
| `orgUnitId` | string | No | Organization unit ID to scope export \(alternative to emails\) |
| `startTime` | string | No | Start time for date filtering \(ISO 8601 format, e.g., 2024-01-01T00:00:00Z\) |
| `endTime` | string | No | End time for date filtering \(ISO 8601 format, e.g., 2024-12-31T23:59:59Z\) |
| `terms` | string | No | Search query terms to filter exported content |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `export` | json | Created export object |
| `matters` | json | Array of matter objects \(for list_matters\) |
| `exports` | json | Array of export objects \(for list_matters_export\) |
| `holds` | json | Array of hold objects \(for list_matters_holds\) |
| `matter` | json | Created matter object \(for create_matters\) |
| `export` | json | Created export object \(for create_matters_export\) |
| `hold` | json | Created hold object \(for create_matters_holds\) |
| `file` | json | Downloaded export file \(UserFile\) from execution files |
| `nextPageToken` | string | Token for fetching next page of results \(for list operations\) |
### `google_vault_list_matters_export`
List exports for a matter
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `matterId` | string | Yes | The matter ID |
| `pageSize` | number | No | Number of exports to return per page |
| `pageToken` | string | No | Token for pagination |
| `exportId` | string | No | Optional export ID to fetch a specific export |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `exports` | json | Array of export objects |
| `export` | json | Single export object \(when exportId is provided\) |
| `nextPageToken` | string | Token for fetching next page of results |
| `matters` | json | Array of matter objects \(for list_matters\) |
| `exports` | json | Array of export objects \(for list_matters_export\) |
| `holds` | json | Array of hold objects \(for list_matters_holds\) |
| `matter` | json | Created matter object \(for create_matters\) |
| `export` | json | Created export object \(for create_matters_export\) |
| `hold` | json | Created hold object \(for create_matters_holds\) |
| `file` | json | Downloaded export file \(UserFile\) from execution files |
| `nextPageToken` | string | Token for fetching next page of results \(for list operations\) |
### `google_vault_download_export_file`
@@ -86,10 +82,10 @@ Download a single file from a Google Vault export (GCS object)
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `matterId` | string | Yes | The matter ID |
| `bucketName` | string | Yes | GCS bucket name from cloudStorageSink.files.bucketName |
| `objectName` | string | Yes | GCS object name from cloudStorageSink.files.objectName |
| `fileName` | string | No | Optional filename override for the downloaded file |
| `matterId` | string | Yes | No description |
| `bucketName` | string | Yes | No description |
| `objectName` | string | Yes | No description |
| `fileName` | string | No | No description |
#### Output
@@ -99,84 +95,82 @@ Download a single file from a Google Vault export (GCS object)
### `google_vault_create_matters_holds`
Create a hold in a matter
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `matterId` | string | Yes | The matter ID |
| `holdName` | string | Yes | Name for the hold |
| `corpus` | string | Yes | Data corpus to hold \(MAIL, DRIVE, GROUPS, HANGOUTS_CHAT, VOICE\) |
| `accountEmails` | string | No | Comma-separated list of user emails to put on hold |
| `orgUnitId` | string | No | Organization unit ID to put on hold \(alternative to accounts\) |
| `terms` | string | No | Search terms to filter held content \(for MAIL and GROUPS corpus\) |
| `startTime` | string | No | Start time for date filtering \(ISO 8601 format, for MAIL and GROUPS corpus\) |
| `endTime` | string | No | End time for date filtering \(ISO 8601 format, for MAIL and GROUPS corpus\) |
| `includeSharedDrives` | boolean | No | Include files in shared drives \(for DRIVE corpus\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `hold` | json | Created hold object |
| `matters` | json | Array of matter objects \(for list_matters\) |
| `exports` | json | Array of export objects \(for list_matters_export\) |
| `holds` | json | Array of hold objects \(for list_matters_holds\) |
| `matter` | json | Created matter object \(for create_matters\) |
| `export` | json | Created export object \(for create_matters_export\) |
| `hold` | json | Created hold object \(for create_matters_holds\) |
| `file` | json | Downloaded export file \(UserFile\) from execution files |
| `nextPageToken` | string | Token for fetching next page of results \(for list operations\) |
### `google_vault_list_matters_holds`
List holds for a matter
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `matterId` | string | Yes | The matter ID |
| `pageSize` | number | No | Number of holds to return per page |
| `pageToken` | string | No | Token for pagination |
| `holdId` | string | No | Optional hold ID to fetch a specific hold |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `holds` | json | Array of hold objects |
| `hold` | json | Single hold object \(when holdId is provided\) |
| `nextPageToken` | string | Token for fetching next page of results |
| `matters` | json | Array of matter objects \(for list_matters\) |
| `exports` | json | Array of export objects \(for list_matters_export\) |
| `holds` | json | Array of hold objects \(for list_matters_holds\) |
| `matter` | json | Created matter object \(for create_matters\) |
| `export` | json | Created export object \(for create_matters_export\) |
| `hold` | json | Created hold object \(for create_matters_holds\) |
| `file` | json | Downloaded export file \(UserFile\) from execution files |
| `nextPageToken` | string | Token for fetching next page of results \(for list operations\) |
### `google_vault_create_matters`
Create a new matter in Google Vault
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `name` | string | Yes | Name for the new matter |
| `description` | string | No | Optional description for the matter |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `matter` | json | Created matter object |
| `matters` | json | Array of matter objects \(for list_matters\) |
| `exports` | json | Array of export objects \(for list_matters_export\) |
| `holds` | json | Array of hold objects \(for list_matters_holds\) |
| `matter` | json | Created matter object \(for create_matters\) |
| `export` | json | Created export object \(for create_matters_export\) |
| `hold` | json | Created hold object \(for create_matters_holds\) |
| `file` | json | Downloaded export file \(UserFile\) from execution files |
| `nextPageToken` | string | Token for fetching next page of results \(for list operations\) |
### `google_vault_list_matters`
List matters, or get a specific matter if matterId is provided
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `pageSize` | number | No | Number of matters to return per page |
| `pageToken` | string | No | Token for pagination |
| `matterId` | string | No | Optional matter ID to fetch a specific matter |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `matters` | json | Array of matter objects |
| `matter` | json | Single matter object \(when matterId is provided\) |
| `nextPageToken` | string | Token for fetching next page of results |
| `matters` | json | Array of matter objects \(for list_matters\) |
| `exports` | json | Array of export objects \(for list_matters_export\) |
| `holds` | json | Array of hold objects \(for list_matters_holds\) |
| `matter` | json | Created matter object \(for create_matters\) |
| `export` | json | Created export object \(for create_matters_export\) |
| `hold` | json | Created hold object \(for create_matters_holds\) |
| `file` | json | Downloaded export file \(UserFile\) from execution files |
| `nextPageToken` | string | Token for fetching next page of results \(for list operations\) |

View File

@@ -79,11 +79,9 @@
"polymarket",
"postgresql",
"posthog",
"pulse",
"qdrant",
"rds",
"reddit",
"reducto",
"resend",
"s3",
"salesforce",

View File

@@ -1,72 +0,0 @@
---
title: Pulse
description: Extract text from documents using Pulse OCR
---
import { BlockInfoCard } from "@/components/ui/block-info-card"
<BlockInfoCard
type="pulse"
color="#E0E0E0"
/>
{/* MANUAL-CONTENT-START:intro */}
The [Pulse](https://www.pulseapi.com/) tool enables seamless extraction of text and structured content from a wide variety of documents—including PDFs, images, and Office files—using state-of-the-art OCR (Optical Character Recognition) powered by Pulse. Designed for automated agentic workflows, Pulse Parser makes it easy to unlock valuable information trapped in unstructured documents and integrate the extracted content directly into your workflow.
With Pulse, you can:
- **Extract text from documents**: Quickly convert scanned PDFs, images, and Office documents to usable text, markdown, or JSON.
- **Process documents by URL or upload**: Simply provide a file URL or use upload to extract text from local documents or remote resources.
- **Flexible output formats**: Choose between markdown, plain text, or JSON representations of the extracted content for downstream processing.
- **Selective page processing**: Specify a range of pages to process, reducing processing time and cost when you only need part of a document.
- **Figure and table extraction**: Optionally extract figures and tables, with automatic caption and description generation for populated context.
- **Get processing insights**: Receive detailed metadata on each job, including file type, page count, processing time, and more.
- **Integration-ready responses**: Incorporate extracted content into research, workflow automation, or data analysis pipelines.
Ideal for automating tedious document review, enabling content summarization, research, and more, Pulse Parser brings real-world documents into the digital workflow era.
If you need accurate, scalable, and developer-friendly document parsing capabilities—across formats, languages, and layouts—Pulse empowers your agents to read the world.
{/* MANUAL-CONTENT-END */}
## Usage Instructions
Integrate Pulse into the workflow. Extract text from PDF documents, images, and Office files via URL or upload.
## Tools
### `pulse_parser`
Parse documents (PDF, images, Office docs) using Pulse OCR API
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `filePath` | string | Yes | URL to a document to be processed |
| `fileUpload` | object | No | File upload data from file-upload component |
| `pages` | string | No | Page range to process \(1-indexed, e.g., "1-2,5"\) |
| `extractFigure` | boolean | No | Enable figure extraction from the document |
| `figureDescription` | boolean | No | Generate descriptions/captions for extracted figures |
| `returnHtml` | boolean | No | Include HTML in the response |
| `chunking` | string | No | Chunking strategies \(comma-separated: semantic, header, page, recursive\) |
| `chunkSize` | number | No | Maximum characters per chunk when chunking is enabled |
| `apiKey` | string | Yes | Pulse API key |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `markdown` | string | Extracted content in markdown format |
| `page_count` | number | Number of pages in the document |
| `job_id` | string | Unique job identifier |
| `bounding_boxes` | json | Bounding box layout information |
| `extraction_url` | string | URL for extraction results \(for large documents\) |
| `html` | string | HTML content if requested |
| `structured_output` | json | Structured output if schema was provided |
| `chunks` | json | Chunked content if chunking was enabled |
| `figures` | json | Extracted figures if figure extraction was enabled |

View File

@@ -1,63 +0,0 @@
---
title: Reducto
description: Extract text from PDF documents
---
import { BlockInfoCard } from "@/components/ui/block-info-card"
<BlockInfoCard
type="reducto"
color="#5c0c5c"
/>
{/* MANUAL-CONTENT-START:intro */}
The [Reducto](https://reducto.ai/) tool enables fast and accurate extraction of text and data from PDF documents via OCR (Optical Character Recognition). Reducto is designed for agent workflows, making it easy to process uploaded or linked PDFs and transform their contents into ready-to-use information.
With the Reducto tool, you can:
- **Extract text and tables from PDFs**: Quickly convert scanned or digital PDFs to text, markdown, or structured JSON.
- **Parse PDFs from uploads or URLs**: Process documents either by uploading a PDF or specifying a direct URL.
- **Customize output formatting**: Choose your preferred output format—markdown, plain text, or JSON—and specify table formats as markdown or HTML.
- **Select specific pages**: Optionally extract content from particular pages to optimize processing and focus on what matters.
- **Receive detailed processing metadata**: Alongside extracted content, get job details, processing times, source file info, page counts, and OCR usage stats for audit and automation.
Whether youre automating workflow steps, extracting business-critical information, or unlocking archival documents for search and analysis, Reductos OCR parser gives you structured, actionable data from even the most complex PDFs.
Looking for reliable and scalable PDF parsing? Reducto is optimized for developer and agent use—providing accuracy, speed, and flexibility for modern document understanding.
{/* MANUAL-CONTENT-END */}
## Usage Instructions
Integrate Reducto Parse into the workflow. Can extract text from uploaded PDF documents, or from a URL.
## Tools
### `reducto_parser`
Parse PDF documents using Reducto OCR API
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `filePath` | string | Yes | URL to a PDF document to be processed |
| `fileUpload` | object | No | File upload data from file-upload component |
| `pages` | array | No | Specific pages to process \(1-indexed page numbers\) |
| `tableOutputFormat` | string | No | Table output format \(html or markdown\). Defaults to markdown. |
| `apiKey` | string | Yes | Reducto API key \(REDUCTO_API_KEY\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `job_id` | string | Unique identifier for the processing job |
| `duration` | number | Processing time in seconds |
| `usage` | json | Resource consumption data |
| `result` | json | Parsed document content with chunks and blocks |
| `pdf_url` | string | Storage URL of converted PDF |
| `studio_link` | string | Link to Reducto studio interface |

View File

@@ -11,8 +11,10 @@
"next-env.d.ts",
"**/*.ts",
"**/*.tsx",
".next/types/**/*.ts",
"content/docs/execution/index.mdx",
"content/docs/connections/index.mdx"
"content/docs/connections/index.mdx",
".next/dev/types/**/*.ts"
],
"exclude": ["node_modules", ".next"]
"exclude": ["node_modules"]
}

View File

@@ -16,12 +16,9 @@ export function PostHogProvider({ children }: { children: React.ReactNode }) {
ui_host: 'https://us.posthog.com',
defaults: '2025-05-24',
person_profiles: 'identified_only',
autocapture: false,
capture_pageview: false,
capture_pageview: true,
capture_pageleave: false,
capture_performance: false,
capture_dead_clicks: false,
enable_heatmaps: false,
session_recording: {
maskAllInputs: false,
maskInputOptions: {
@@ -32,7 +29,13 @@ export function PostHogProvider({ children }: { children: React.ReactNode }) {
recordHeaders: false,
recordBody: false,
},
autocapture: {
dom_event_allowlist: ['click', 'submit', 'change'],
element_allowlist: ['button', 'a', 'input'],
},
capture_dead_clicks: false,
persistence: 'localStorage+cookie',
enable_heatmaps: false,
})
}
}, [])

View File

@@ -20,6 +20,7 @@ export type AppSession = {
id?: string
userId?: string
activeOrganizationId?: string
impersonatedBy?: string | null
}
} | null

View File

@@ -11,7 +11,7 @@
*/
:root {
--sidebar-width: 232px; /* SIDEBAR_WIDTH.DEFAULT */
--panel-width: 320px; /* PANEL_WIDTH.DEFAULT */
--panel-width: 290px; /* PANEL_WIDTH.DEFAULT */
--toolbar-triggers-height: 300px; /* TOOLBAR_TRIGGERS_HEIGHT.DEFAULT */
--editor-connections-height: 172px; /* EDITOR_CONNECTIONS_HEIGHT.DEFAULT */
--terminal-height: 155px; /* TERMINAL_HEIGHT.DEFAULT */
@@ -77,6 +77,24 @@
cursor: grabbing !important;
}
/**
* Selected node ring indicator
* Uses a pseudo-element overlay to match the original behavior (absolute inset-0 z-40)
*/
.react-flow__node.selected > div > div {
position: relative;
}
.react-flow__node.selected > div > div::after {
content: "";
position: absolute;
inset: 0;
z-index: 40;
border-radius: 8px;
box-shadow: 0 0 0 1.75px var(--brand-secondary);
pointer-events: none;
}
/**
* Color tokens - single source of truth for all colors
* Light mode: Warm theme
@@ -558,6 +576,32 @@ input[type="search"]::-ms-clear {
transition-duration: 300ms;
}
.streaming-effect {
@apply relative overflow-hidden;
}
.streaming-effect::after {
content: "";
@apply pointer-events-none absolute left-0 top-0 h-full w-full;
background: linear-gradient(
90deg,
rgba(128, 128, 128, 0) 0%,
rgba(128, 128, 128, 0.1) 50%,
rgba(128, 128, 128, 0) 100%
);
animation: code-shimmer 1.5s infinite;
z-index: 10;
}
.dark .streaming-effect::after {
background: linear-gradient(
90deg,
rgba(180, 180, 180, 0) 0%,
rgba(180, 180, 180, 0.1) 50%,
rgba(180, 180, 180, 0) 100%
);
}
.loading-placeholder::placeholder {
animation: placeholder-pulse 1.5s ease-in-out infinite;
}

View File

@@ -0,0 +1,363 @@
'use client'
import { useCallback, useState } from 'react'
import { AlertCircle, ArrowLeft, ChevronLeft, ChevronRight, Loader2, Search } from 'lucide-react'
import Link from 'next/link'
import { useRouter } from 'next/navigation'
import {
Avatar,
AvatarFallback,
AvatarImage,
Badge,
Button,
Input,
Table,
TableBody,
TableCell,
TableHead,
TableHeader,
TableRow,
} from '@/components/emcn'
import { client } from '@/lib/auth/auth-client'
const USERS_PER_PAGE = 10
interface User {
id: string
name: string
email: string
image: string | null
role: string | null
createdAt: string
}
interface Pagination {
total: number
limit: number
offset: number
}
interface ImpersonateClientProps {
currentUserId: string
}
/**
* Extracts initials from a user's name.
*/
function getInitials(name: string | undefined | null): string {
if (!name?.trim()) return ''
const parts = name.trim().split(' ')
if (parts.length >= 2) {
return `${parts[0][0]}${parts[parts.length - 1][0]}`.toUpperCase()
}
return parts[0][0].toUpperCase()
}
/**
* Formats a date string to a readable format.
*/
function formatDate(dateString: string): string {
const date = new Date(dateString)
return date.toLocaleDateString('en-US', {
year: 'numeric',
month: 'short',
day: 'numeric',
})
}
export default function ImpersonateClient({ currentUserId }: ImpersonateClientProps) {
const router = useRouter()
const [searchTerm, setSearchTerm] = useState('')
const [users, setUsers] = useState<User[]>([])
const [pagination, setPagination] = useState<Pagination>({
total: 0,
limit: USERS_PER_PAGE,
offset: 0,
})
const [currentPage, setCurrentPage] = useState(1)
const [searching, setSearching] = useState(false)
const [error, setError] = useState<string | null>(null)
const [impersonatingId, setImpersonatingId] = useState<string | null>(null)
const totalPages = Math.ceil(pagination.total / pagination.limit)
const hasNextPage = currentPage < totalPages
const hasPrevPage = currentPage > 1
const searchUsers = useCallback(
async (page = 1) => {
if (!searchTerm.trim()) {
setUsers([])
setPagination({ total: 0, limit: USERS_PER_PAGE, offset: 0 })
return
}
setSearching(true)
setError(null)
const offset = (page - 1) * USERS_PER_PAGE
try {
const response = await fetch(
`/api/admin/impersonate/search?q=${encodeURIComponent(searchTerm.trim())}&limit=${USERS_PER_PAGE}&offset=${offset}`
)
if (!response.ok) {
throw new Error('Failed to search users')
}
const data = await response.json()
setUsers(data.users)
setPagination(data.pagination)
setCurrentPage(page)
} catch (err) {
setError(err instanceof Error ? err.message : 'Failed to search users')
setUsers([])
} finally {
setSearching(false)
}
},
[searchTerm]
)
const handleKeyDown = (e: React.KeyboardEvent) => {
if (e.key === 'Enter') {
searchUsers(1)
}
}
const goToPage = useCallback(
(page: number) => {
if (page >= 1 && page <= totalPages) {
searchUsers(page)
}
},
[totalPages, searchUsers]
)
const nextPage = useCallback(() => {
if (hasNextPage) {
searchUsers(currentPage + 1)
}
}, [hasNextPage, currentPage, searchUsers])
const prevPage = useCallback(() => {
if (hasPrevPage) {
searchUsers(currentPage - 1)
}
}, [hasPrevPage, currentPage, searchUsers])
const handleImpersonate = async (userId: string) => {
if (userId === currentUserId) {
setError('You cannot impersonate yourself')
return
}
setImpersonatingId(userId)
setError(null)
try {
const result = await client.admin.impersonateUser({
userId,
})
if (result.error) {
throw new Error(result.error.message || 'Failed to impersonate user')
}
// Redirect to workspace after successful impersonation
router.push('/workspace')
router.refresh()
} catch (err) {
setError(err instanceof Error ? err.message : 'Failed to impersonate user')
setImpersonatingId(null)
}
}
return (
<div className='flex min-h-screen flex-col bg-[var(--bg)]'>
{/* Header */}
<div className='border-[var(--border)] border-b bg-[var(--bg-secondary)] px-6 py-4'>
<div className='mx-auto flex max-w-5xl items-center gap-4'>
<Link href='/workspace'>
<Button variant='ghost' size='sm' className='gap-2'>
<ArrowLeft className='h-4 w-4' />
Back to Workspace
</Button>
</Link>
<div className='h-6 w-px bg-[var(--border)]' />
<h1 className='font-semibold text-[var(--text)] text-lg'>User Impersonation</h1>
</div>
</div>
{/* Content */}
<div className='mx-auto w-full max-w-5xl p-6'>
{/* Search */}
<div className='mb-6'>
<label
htmlFor='user-search'
className='mb-2 block font-medium text-[var(--text-secondary)] text-sm'
>
Search for a user by name or email
</label>
<div className='flex gap-2'>
<div className='relative flex-1'>
<Search className='-translate-y-1/2 absolute top-1/2 left-3 h-4 w-4 text-[var(--text-muted)]' />
<Input
id='user-search'
type='text'
placeholder='Enter name or email...'
value={searchTerm}
onChange={(e) => setSearchTerm(e.target.value)}
onKeyDown={handleKeyDown}
className='pl-10'
/>
</div>
<Button onClick={() => searchUsers(1)} disabled={searching || !searchTerm.trim()}>
{searching ? <Loader2 className='h-4 w-4 animate-spin' /> : 'Search'}
</Button>
</div>
</div>
{/* Error */}
{error && (
<div className='mb-6 rounded-lg border border-red-500/30 bg-red-500/10 p-4'>
<div className='flex gap-3'>
<AlertCircle className='h-5 w-5 flex-shrink-0 text-red-500' />
<p className='text-red-200 text-sm'>{error}</p>
</div>
</div>
)}
{/* Results */}
{users.length > 0 && (
<div className='rounded-lg border border-[var(--border)] bg-[var(--bg-secondary)]'>
<div className='border-[var(--border)] border-b px-4 py-3'>
<p className='text-[var(--text-secondary)] text-sm'>
Found {pagination.total} user{pagination.total !== 1 ? 's' : ''}
</p>
</div>
<Table>
<TableHeader>
<TableRow>
<TableHead>User</TableHead>
<TableHead>Email</TableHead>
<TableHead>Role</TableHead>
<TableHead>Created</TableHead>
<TableHead className='text-right'>Action</TableHead>
</TableRow>
</TableHeader>
<TableBody>
{users.map((user) => (
<TableRow key={user.id}>
<TableCell>
<div className='flex items-center gap-3'>
<Avatar size='sm'>
<AvatarImage src={user.image || undefined} alt={user.name} />
<AvatarFallback>{getInitials(user.name)}</AvatarFallback>
</Avatar>
<div className='flex items-center gap-2'>
<span className='font-medium text-[var(--text)]'>{user.name}</span>
{user.id === currentUserId && <Badge variant='blue'>You</Badge>}
</div>
</div>
</TableCell>
<TableCell className='text-[var(--text-secondary)]'>{user.email}</TableCell>
<TableCell>
{user.role ? (
<Badge variant='gray'>{user.role}</Badge>
) : (
<span className='text-[var(--text-muted)]'>-</span>
)}
</TableCell>
<TableCell className='text-[var(--text-secondary)]'>
{formatDate(user.createdAt)}
</TableCell>
<TableCell className='text-right'>
<Button
variant='outline'
size='sm'
onClick={() => handleImpersonate(user.id)}
disabled={impersonatingId === user.id || user.id === currentUserId}
>
{impersonatingId === user.id ? (
<>
<Loader2 className='mr-2 h-3 w-3 animate-spin' />
Impersonating...
</>
) : (
'Impersonate'
)}
</Button>
</TableCell>
</TableRow>
))}
</TableBody>
</Table>
{/* Pagination */}
{totalPages > 1 && (
<div className='flex items-center justify-center border-[var(--border)] border-t px-4 py-3'>
<div className='flex items-center gap-1'>
<Button
variant='ghost'
size='sm'
onClick={prevPage}
disabled={!hasPrevPage || searching}
>
<ChevronLeft className='h-3.5 w-3.5' />
</Button>
<div className='mx-3 flex items-center gap-4'>
{Array.from({ length: Math.min(totalPages, 5) }, (_, i) => {
let page: number
if (totalPages <= 5) {
page = i + 1
} else if (currentPage <= 3) {
page = i + 1
} else if (currentPage >= totalPages - 2) {
page = totalPages - 4 + i
} else {
page = currentPage - 2 + i
}
if (page < 1 || page > totalPages) return null
return (
<button
key={page}
onClick={() => goToPage(page)}
disabled={searching}
className={`font-medium text-sm transition-colors hover:text-[var(--text)] disabled:opacity-50 ${
page === currentPage ? 'text-[var(--text)]' : 'text-[var(--text-muted)]'
}`}
>
{page}
</button>
)
})}
</div>
<Button
variant='ghost'
size='sm'
onClick={nextPage}
disabled={!hasNextPage || searching}
>
<ChevronRight className='h-3.5 w-3.5' />
</Button>
</div>
</div>
)}
</div>
)}
{/* Empty state */}
{searchTerm && !searching && users.length === 0 && !error && (
<div className='rounded-lg border border-[var(--border)] bg-[var(--bg-secondary)] p-8 text-center'>
<p className='text-[var(--text-secondary)]'>No users found matching your search</p>
</div>
)}
</div>
</div>
)
}

View File

@@ -0,0 +1,31 @@
import { db } from '@sim/db'
import { user } from '@sim/db/schema'
import { eq } from 'drizzle-orm'
import { notFound } from 'next/navigation'
import { getSession } from '@/lib/auth'
import ImpersonateClient from './impersonate-client'
export const dynamic = 'force-dynamic'
/**
* Admin impersonation page - allows superadmins to impersonate other users.
*/
export default async function ImpersonatePage() {
const session = await getSession()
if (!session?.user?.id) {
notFound()
}
const [currentUser] = await db
.select({ role: user.role })
.from(user)
.where(eq(user.id, session.user.id))
.limit(1)
if (currentUser?.role !== 'superadmin') {
notFound()
}
return <ImpersonateClient currentUserId={session.user.id} />
}

View File

@@ -0,0 +1,94 @@
import { db } from '@sim/db'
import { user } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { count, eq, ilike, or } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
const logger = createLogger('ImpersonateSearchAPI')
const DEFAULT_LIMIT = 10
const MAX_LIMIT = 50
/**
* GET /api/admin/impersonate/search
*
* Search for users to impersonate. Only accessible by superadmins.
*
* Query params:
* - q: Search term (searches name and email)
* - limit: Number of results per page (default: 10, max: 50)
* - offset: Number of results to skip (default: 0)
*
* Response: { users: Array<{ id, name, email, image, role, createdAt }>, pagination: { total, limit, offset } }
*/
export async function GET(request: NextRequest) {
try {
const session = await getSession()
if (!session?.user?.id) {
return new NextResponse(null, { status: 404 })
}
const [currentUser] = await db
.select({ role: user.role })
.from(user)
.where(eq(user.id, session.user.id))
.limit(1)
if (currentUser?.role !== 'superadmin') {
return new NextResponse(null, { status: 404 })
}
const { searchParams } = new URL(request.url)
const query = searchParams.get('q')?.trim()
const limit = Math.min(
Math.max(1, Number.parseInt(searchParams.get('limit') || String(DEFAULT_LIMIT), 10)),
MAX_LIMIT
)
const offset = Math.max(0, Number.parseInt(searchParams.get('offset') || '0', 10))
if (!query || query.length < 2) {
return NextResponse.json({
users: [],
pagination: { total: 0, limit, offset },
})
}
const searchPattern = `%${query}%`
const whereCondition = or(ilike(user.name, searchPattern), ilike(user.email, searchPattern))
const [totalResult] = await db.select({ count: count() }).from(user).where(whereCondition)
const users = await db
.select({
id: user.id,
name: user.name,
email: user.email,
image: user.image,
role: user.role,
createdAt: user.createdAt,
})
.from(user)
.where(whereCondition)
.limit(limit)
.offset(offset)
logger.info(`Superadmin ${session.user.id} searched for users with query: ${query}`)
return NextResponse.json({
users: users.map((u) => ({
...u,
createdAt: u.createdAt.toISOString(),
})),
pagination: {
total: totalResult?.count ?? 0,
limit,
offset,
},
})
} catch (error) {
logger.error('Failed to search users for impersonation', { error })
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}

View File

@@ -8,7 +8,6 @@ import { getSession } from '@/lib/auth'
import { generateChatTitle } from '@/lib/copilot/chat-title'
import { getCopilotModel } from '@/lib/copilot/config'
import { SIM_AGENT_API_URL_DEFAULT, SIM_AGENT_VERSION } from '@/lib/copilot/constants'
import { COPILOT_MODEL_IDS, COPILOT_REQUEST_MODES } from '@/lib/copilot/models'
import {
authenticateCopilotRequestSessionOnly,
createBadRequestResponse,
@@ -41,8 +40,34 @@ const ChatMessageSchema = z.object({
userMessageId: z.string().optional(), // ID from frontend for the user message
chatId: z.string().optional(),
workflowId: z.string().min(1, 'Workflow ID is required'),
model: z.enum(COPILOT_MODEL_IDS).optional().default('claude-4.5-opus'),
mode: z.enum(COPILOT_REQUEST_MODES).optional().default('agent'),
model: z
.enum([
'gpt-5-fast',
'gpt-5',
'gpt-5-medium',
'gpt-5-high',
'gpt-5.1-fast',
'gpt-5.1',
'gpt-5.1-medium',
'gpt-5.1-high',
'gpt-5-codex',
'gpt-5.1-codex',
'gpt-5.2',
'gpt-5.2-codex',
'gpt-5.2-pro',
'gpt-4o',
'gpt-4.1',
'o3',
'claude-4-sonnet',
'claude-4.5-haiku',
'claude-4.5-sonnet',
'claude-4.5-opus',
'claude-4.1-opus',
'gemini-3-pro',
])
.optional()
.default('claude-4.5-opus'),
mode: z.enum(['ask', 'agent', 'plan']).optional().default('agent'),
prefetch: z.boolean().optional(),
createNewChat: z.boolean().optional().default(false),
stream: z.boolean().optional().default(true),
@@ -270,8 +295,7 @@ export async function POST(req: NextRequest) {
}
const defaults = getCopilotModel('chat')
const selectedModel = model || defaults.model
const envModel = env.COPILOT_MODEL || defaults.model
const modelToUse = env.COPILOT_MODEL || defaults.model
let providerConfig: CopilotProviderConfig | undefined
const providerEnv = env.COPILOT_PROVIDER as any
@@ -280,7 +304,7 @@ export async function POST(req: NextRequest) {
if (providerEnv === 'azure-openai') {
providerConfig = {
provider: 'azure-openai',
model: envModel,
model: modelToUse,
apiKey: env.AZURE_OPENAI_API_KEY,
apiVersion: 'preview',
endpoint: env.AZURE_OPENAI_ENDPOINT,
@@ -288,7 +312,7 @@ export async function POST(req: NextRequest) {
} else if (providerEnv === 'vertex') {
providerConfig = {
provider: 'vertex',
model: envModel,
model: modelToUse,
apiKey: env.COPILOT_API_KEY,
vertexProject: env.VERTEX_PROJECT,
vertexLocation: env.VERTEX_LOCATION,
@@ -296,15 +320,12 @@ export async function POST(req: NextRequest) {
} else {
providerConfig = {
provider: providerEnv,
model: selectedModel,
model: modelToUse,
apiKey: env.COPILOT_API_KEY,
}
}
}
const effectiveMode = mode === 'agent' ? 'build' : mode
const transportMode = effectiveMode === 'build' ? 'agent' : effectiveMode
// Determine conversationId to use for this request
const effectiveConversationId =
(currentChat?.conversationId as string | undefined) || conversationId
@@ -324,7 +345,7 @@ export async function POST(req: NextRequest) {
}
} | null = null
if (effectiveMode === 'build') {
if (mode === 'agent') {
// Build base tools (executed locally, not deferred)
// Include function_execute for code execution capability
baseTools = [
@@ -431,8 +452,8 @@ export async function POST(req: NextRequest) {
userId: authenticatedUserId,
stream: stream,
streamToolCalls: true,
model: selectedModel,
mode: transportMode,
model: model,
mode: mode,
messageId: userMessageIdToUse,
version: SIM_AGENT_VERSION,
...(providerConfig ? { provider: providerConfig } : {}),
@@ -456,7 +477,7 @@ export async function POST(req: NextRequest) {
hasConversationId: !!effectiveConversationId,
hasFileAttachments: processedFileContents.length > 0,
messageLength: message.length,
mode: effectiveMode,
mode,
hasTools: integrationTools.length > 0,
toolCount: integrationTools.length,
hasBaseTools: baseTools.length > 0,

View File

@@ -4,7 +4,6 @@ import { createLogger } from '@sim/logger'
import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { COPILOT_MODES } from '@/lib/copilot/models'
import {
authenticateCopilotRequestSessionOnly,
createInternalServerErrorResponse,
@@ -46,7 +45,7 @@ const UpdateMessagesSchema = z.object({
planArtifact: z.string().nullable().optional(),
config: z
.object({
mode: z.enum(COPILOT_MODES).optional(),
mode: z.enum(['ask', 'build', 'plan']).optional(),
model: z.string().optional(),
})
.nullable()

View File

@@ -14,7 +14,8 @@ import {
import { generateRequestId } from '@/lib/core/utils/request'
import { getEffectiveDecryptedEnv } from '@/lib/environment/utils'
import { refreshTokenIfNeeded } from '@/app/api/auth/oauth/utils'
import { resolveEnvVarReferences } from '@/executor/utils/reference-validation'
import { REFERENCE } from '@/executor/constants'
import { createEnvVarPattern } from '@/executor/utils/reference-validation'
import { executeTool } from '@/tools'
import { getTool, resolveToolId } from '@/tools/utils'
@@ -27,6 +28,45 @@ const ExecuteToolSchema = z.object({
workflowId: z.string().optional(),
})
/**
* Resolves all {{ENV_VAR}} references in a value recursively
* Works with strings, arrays, and objects
*/
function resolveEnvVarReferences(value: any, envVars: Record<string, string>): any {
if (typeof value === 'string') {
// Check for exact match: entire string is "{{VAR_NAME}}"
const exactMatchPattern = new RegExp(
`^\\${REFERENCE.ENV_VAR_START}([^}]+)\\${REFERENCE.ENV_VAR_END}$`
)
const exactMatch = exactMatchPattern.exec(value)
if (exactMatch) {
const envVarName = exactMatch[1].trim()
return envVars[envVarName] ?? value
}
// Check for embedded references: "prefix {{VAR}} suffix"
const envVarPattern = createEnvVarPattern()
return value.replace(envVarPattern, (match, varName) => {
const trimmedName = varName.trim()
return envVars[trimmedName] ?? match
})
}
if (Array.isArray(value)) {
return value.map((item) => resolveEnvVarReferences(item, envVars))
}
if (value !== null && typeof value === 'object') {
const resolved: Record<string, any> = {}
for (const [key, val] of Object.entries(value)) {
resolved[key] = resolveEnvVarReferences(val, envVars)
}
return resolved
}
return value
}
export async function POST(req: NextRequest) {
const tracker = createRequestTracker()
@@ -105,17 +145,7 @@ export async function POST(req: NextRequest) {
// Build execution params starting with LLM-provided arguments
// Resolve all {{ENV_VAR}} references in the arguments
const executionParams: Record<string, any> = resolveEnvVarReferences(
toolArgs,
decryptedEnvVars,
{
resolveExactMatch: true,
allowEmbedded: true,
trimKeys: true,
onMissing: 'keep',
deep: true,
}
) as Record<string, any>
const executionParams: Record<string, any> = resolveEnvVarReferences(toolArgs, decryptedEnvVars)
logger.info(`[${tracker.requestId}] Resolved env var references in arguments`, {
toolName,

View File

@@ -2,13 +2,12 @@ import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import type { CopilotModelId } from '@/lib/copilot/models'
import { db } from '@/../../packages/db'
import { settings } from '@/../../packages/db/schema'
const logger = createLogger('CopilotUserModelsAPI')
const DEFAULT_ENABLED_MODELS: Record<CopilotModelId, boolean> = {
const DEFAULT_ENABLED_MODELS: Record<string, boolean> = {
'gpt-4o': false,
'gpt-4.1': false,
'gpt-5-fast': false,
@@ -29,7 +28,7 @@ const DEFAULT_ENABLED_MODELS: Record<CopilotModelId, boolean> = {
'claude-4.5-haiku': true,
'claude-4.5-sonnet': true,
'claude-4.5-opus': true,
'claude-4.1-opus': false,
// 'claude-4.1-opus': true,
'gemini-3-pro': true,
}
@@ -55,9 +54,7 @@ export async function GET(request: NextRequest) {
const mergedModels = { ...DEFAULT_ENABLED_MODELS }
for (const [modelId, enabled] of Object.entries(userModelsMap)) {
if (modelId in mergedModels) {
mergedModels[modelId as CopilotModelId] = enabled
}
mergedModels[modelId] = enabled
}
const hasNewModels = Object.keys(DEFAULT_ENABLED_MODELS).some(

View File

@@ -22,15 +22,15 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
// Check if user is a super user
const currentUser = await db.select().from(user).where(eq(user.id, session.user.id)).limit(1)
const hasAdminPrivileges =
currentUser[0]?.role === 'admin' || currentUser[0]?.role === 'superadmin'
if (!currentUser[0]?.isSuperUser) {
logger.warn(`[${requestId}] Non-super user attempted to verify creator: ${id}`)
return NextResponse.json({ error: 'Only super users can verify creators' }, { status: 403 })
if (!hasAdminPrivileges) {
logger.warn(`[${requestId}] Non-admin user attempted to verify creator: ${id}`)
return NextResponse.json({ error: 'Only admin users can verify creators' }, { status: 403 })
}
// Check if creator exists
const existingCreator = await db
.select()
.from(templateCreators)
@@ -42,7 +42,6 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
return NextResponse.json({ error: 'Creator not found' }, { status: 404 })
}
// Update creator verified status to true
await db
.update(templateCreators)
.set({ verified: true, updatedAt: new Date() })
@@ -75,15 +74,15 @@ export async function DELETE(
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
// Check if user is a super user
const currentUser = await db.select().from(user).where(eq(user.id, session.user.id)).limit(1)
const hasAdminPrivileges =
currentUser[0]?.role === 'admin' || currentUser[0]?.role === 'superadmin'
if (!currentUser[0]?.isSuperUser) {
logger.warn(`[${requestId}] Non-super user attempted to unverify creator: ${id}`)
return NextResponse.json({ error: 'Only super users can unverify creators' }, { status: 403 })
if (!hasAdminPrivileges) {
logger.warn(`[${requestId}] Non-admin user attempted to unverify creator: ${id}`)
return NextResponse.json({ error: 'Only admin users can unverify creators' }, { status: 403 })
}
// Check if creator exists
const existingCreator = await db
.select()
.from(templateCreators)
@@ -95,7 +94,6 @@ export async function DELETE(
return NextResponse.json({ error: 'Creator not found' }, { status: 404 })
}
// Update creator verified status to false
await db
.update(templateCreators)
.set({ verified: false, updatedAt: new Date() })

View File

@@ -11,7 +11,6 @@ import { preprocessExecution } from '@/lib/execution/preprocessing'
import { LoggingSession } from '@/lib/logs/execution/logging-session'
import { normalizeInputFormatValue } from '@/lib/workflows/input-format'
import { createStreamingResponse } from '@/lib/workflows/streaming/streaming'
import { isValidStartBlockType } from '@/lib/workflows/triggers/start-block-types'
import { setFormAuthCookie, validateFormAuth } from '@/app/api/form/utils'
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
@@ -36,7 +35,10 @@ async function getWorkflowInputSchema(workflowId: string): Promise<any[]> {
.from(workflowBlocks)
.where(eq(workflowBlocks.workflowId, workflowId))
const startBlock = blocks.find((block) => isValidStartBlockType(block.type))
const startBlock = blocks.find(
(block) =>
block.type === 'starter' || block.type === 'start_trigger' || block.type === 'input_trigger'
)
if (!startBlock) {
return []

View File

@@ -9,7 +9,6 @@ import { escapeRegExp, normalizeName, REFERENCE } from '@/executor/constants'
import {
createEnvVarPattern,
createWorkflowVariablePattern,
resolveEnvVarReferences,
} from '@/executor/utils/reference-validation'
export const dynamic = 'force-dynamic'
export const runtime = 'nodejs'
@@ -480,29 +479,9 @@ function resolveEnvironmentVariables(
const replacements: Array<{ match: string; index: number; varName: string; varValue: string }> =
[]
const resolverVars: Record<string, string> = {}
Object.entries(params).forEach(([key, value]) => {
if (value) {
resolverVars[key] = String(value)
}
})
Object.entries(envVars).forEach(([key, value]) => {
if (value) {
resolverVars[key] = value
}
})
while ((match = regex.exec(code)) !== null) {
const varName = match[1].trim()
const resolved = resolveEnvVarReferences(match[0], resolverVars, {
allowEmbedded: true,
resolveExactMatch: true,
trimKeys: true,
onMissing: 'empty',
deep: false,
})
const varValue =
typeof resolved === 'string' ? resolved : resolved == null ? '' : String(resolved)
const varValue = envVars[varName] || params[varName] || ''
replacements.push({
match: match[0],
index: match.index,

View File

@@ -20,7 +20,6 @@ import { createLogger } from '@sim/logger'
import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { generateInternalToken } from '@/lib/auth/internal'
import { getBaseUrl } from '@/lib/core/utils/urls'
const logger = createLogger('WorkflowMcpServeAPI')
@@ -53,8 +52,6 @@ async function getServer(serverId: string) {
id: workflowMcpServer.id,
name: workflowMcpServer.name,
workspaceId: workflowMcpServer.workspaceId,
isPublic: workflowMcpServer.isPublic,
createdBy: workflowMcpServer.createdBy,
})
.from(workflowMcpServer)
.where(eq(workflowMcpServer.id, serverId))
@@ -93,11 +90,9 @@ export async function POST(request: NextRequest, { params }: { params: Promise<R
return NextResponse.json({ error: 'Server not found' }, { status: 404 })
}
if (!server.isPublic) {
const auth = await checkHybridAuth(request, { requireWorkflowId: false })
if (!auth.success || !auth.userId) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const auth = await checkHybridAuth(request, { requireWorkflowId: false })
if (!auth.success || !auth.userId) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const body = await request.json()
@@ -143,8 +138,7 @@ export async function POST(request: NextRequest, { params }: { params: Promise<R
id,
serverId,
rpcParams as { name: string; arguments?: Record<string, unknown> },
apiKey,
server.isPublic ? server.createdBy : undefined
apiKey
)
default:
@@ -206,8 +200,7 @@ async function handleToolsCall(
id: RequestId,
serverId: string,
params: { name: string; arguments?: Record<string, unknown> } | undefined,
apiKey?: string | null,
publicServerOwnerId?: string
apiKey?: string | null
): Promise<NextResponse> {
try {
if (!params?.name) {
@@ -250,13 +243,7 @@ async function handleToolsCall(
const executeUrl = `${getBaseUrl()}/api/workflows/${tool.workflowId}/execute`
const headers: Record<string, string> = { 'Content-Type': 'application/json' }
if (publicServerOwnerId) {
const internalToken = await generateInternalToken(publicServerOwnerId)
headers.Authorization = `Bearer ${internalToken}`
} else if (apiKey) {
headers['X-API-Key'] = apiKey
}
if (apiKey) headers['X-API-Key'] = apiKey
logger.info(`Executing workflow ${tool.workflowId} via MCP tool ${params.name}`)

View File

@@ -5,7 +5,8 @@ import { McpClient } from '@/lib/mcp/client'
import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware'
import type { McpServerConfig, McpTransport } from '@/lib/mcp/types'
import { createMcpErrorResponse, createMcpSuccessResponse } from '@/lib/mcp/utils'
import { resolveEnvVarReferences } from '@/executor/utils/reference-validation'
import { REFERENCE } from '@/executor/constants'
import { createEnvVarPattern } from '@/executor/utils/reference-validation'
const logger = createLogger('McpServerTestAPI')
@@ -23,23 +24,22 @@ function isUrlBasedTransport(transport: McpTransport): boolean {
* Resolve environment variables in strings
*/
function resolveEnvVars(value: string, envVars: Record<string, string>): string {
const missingVars: string[] = []
const resolvedValue = resolveEnvVarReferences(value, envVars, {
allowEmbedded: true,
resolveExactMatch: true,
trimKeys: true,
onMissing: 'keep',
deep: false,
missingKeys: missingVars,
}) as string
const envVarPattern = createEnvVarPattern()
const envMatches = value.match(envVarPattern)
if (!envMatches) return value
if (missingVars.length > 0) {
const uniqueMissing = Array.from(new Set(missingVars))
uniqueMissing.forEach((envKey) => {
let resolvedValue = value
for (const match of envMatches) {
const envKey = match.slice(REFERENCE.ENV_VAR_START.length, -REFERENCE.ENV_VAR_END.length).trim()
const envValue = envVars[envKey]
if (envValue === undefined) {
logger.warn(`Environment variable "${envKey}" not found in MCP server test`)
})
}
continue
}
resolvedValue = resolvedValue.replace(match, envValue)
}
return resolvedValue
}

View File

@@ -31,7 +31,6 @@ export const GET = withMcpAuth<RouteParams>('read')(
createdBy: workflowMcpServer.createdBy,
name: workflowMcpServer.name,
description: workflowMcpServer.description,
isPublic: workflowMcpServer.isPublic,
createdAt: workflowMcpServer.createdAt,
updatedAt: workflowMcpServer.updatedAt,
})
@@ -99,9 +98,6 @@ export const PATCH = withMcpAuth<RouteParams>('write')(
if (body.description !== undefined) {
updateData.description = body.description?.trim() || null
}
if (body.isPublic !== undefined) {
updateData.isPublic = body.isPublic
}
const [updatedServer] = await db
.update(workflowMcpServer)

View File

@@ -26,6 +26,7 @@ export const GET = withMcpAuth<RouteParams>('read')(
logger.info(`[${requestId}] Getting tool ${toolId} from server ${serverId}`)
// Verify server exists and belongs to workspace
const [server] = await db
.select({ id: workflowMcpServer.id })
.from(workflowMcpServer)
@@ -71,6 +72,7 @@ export const PATCH = withMcpAuth<RouteParams>('write')(
logger.info(`[${requestId}] Updating tool ${toolId} in server ${serverId}`)
// Verify server exists and belongs to workspace
const [server] = await db
.select({ id: workflowMcpServer.id })
.from(workflowMcpServer)
@@ -137,6 +139,7 @@ export const DELETE = withMcpAuth<RouteParams>('write')(
logger.info(`[${requestId}] Deleting tool ${toolId} from server ${serverId}`)
// Verify server exists and belongs to workspace
const [server] = await db
.select({ id: workflowMcpServer.id })
.from(workflowMcpServer)

View File

@@ -6,10 +6,24 @@ import type { NextRequest } from 'next/server'
import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware'
import { createMcpErrorResponse, createMcpSuccessResponse } from '@/lib/mcp/utils'
import { sanitizeToolName } from '@/lib/mcp/workflow-tool-schema'
import { hasValidStartBlock } from '@/lib/workflows/triggers/trigger-utils.server'
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
import { hasValidStartBlockInState } from '@/lib/workflows/triggers/trigger-utils'
const logger = createLogger('WorkflowMcpToolsAPI')
/**
* Check if a workflow has a valid start block by loading from database
*/
async function hasValidStartBlock(workflowId: string): Promise<boolean> {
try {
const normalizedData = await loadWorkflowFromNormalizedTables(workflowId)
return hasValidStartBlockInState(normalizedData)
} catch (error) {
logger.warn('Error checking for start block:', error)
return false
}
}
export const dynamic = 'force-dynamic'
interface RouteParams {
@@ -26,6 +40,7 @@ export const GET = withMcpAuth<RouteParams>('read')(
logger.info(`[${requestId}] Listing tools for workflow MCP server: ${serverId}`)
// Verify server exists and belongs to workspace
const [server] = await db
.select({ id: workflowMcpServer.id })
.from(workflowMcpServer)
@@ -38,6 +53,7 @@ export const GET = withMcpAuth<RouteParams>('read')(
return createMcpErrorResponse(new Error('Server not found'), 'Server not found', 404)
}
// Get tools with workflow details
const tools = await db
.select({
id: workflowMcpTool.id,
@@ -91,6 +107,7 @@ export const POST = withMcpAuth<RouteParams>('write')(
)
}
// Verify server exists and belongs to workspace
const [server] = await db
.select({ id: workflowMcpServer.id })
.from(workflowMcpServer)
@@ -103,6 +120,7 @@ export const POST = withMcpAuth<RouteParams>('write')(
return createMcpErrorResponse(new Error('Server not found'), 'Server not found', 404)
}
// Verify workflow exists and is deployed
const [workflowRecord] = await db
.select({
id: workflow.id,
@@ -119,6 +137,7 @@ export const POST = withMcpAuth<RouteParams>('write')(
return createMcpErrorResponse(new Error('Workflow not found'), 'Workflow not found', 404)
}
// Verify workflow belongs to the same workspace
if (workflowRecord.workspaceId !== workspaceId) {
return createMcpErrorResponse(
new Error('Workflow does not belong to this workspace'),
@@ -135,6 +154,7 @@ export const POST = withMcpAuth<RouteParams>('write')(
)
}
// Verify workflow has a valid start block
const hasStartBlock = await hasValidStartBlock(body.workflowId)
if (!hasStartBlock) {
return createMcpErrorResponse(
@@ -144,6 +164,7 @@ export const POST = withMcpAuth<RouteParams>('write')(
)
}
// Check if tool already exists for this workflow
const [existingTool] = await db
.select({ id: workflowMcpTool.id })
.from(workflowMcpTool)
@@ -169,6 +190,7 @@ export const POST = withMcpAuth<RouteParams>('write')(
workflowRecord.description ||
`Execute ${workflowRecord.name} workflow`
// Create the tool
const toolId = crypto.randomUUID()
const [tool] = await db
.insert(workflowMcpTool)

View File

@@ -1,12 +1,10 @@
import { db } from '@sim/db'
import { workflow, workflowMcpServer, workflowMcpTool } from '@sim/db/schema'
import { workflowMcpServer, workflowMcpTool } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { eq, inArray, sql } from 'drizzle-orm'
import type { NextRequest } from 'next/server'
import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware'
import { createMcpErrorResponse, createMcpSuccessResponse } from '@/lib/mcp/utils'
import { sanitizeToolName } from '@/lib/mcp/workflow-tool-schema'
import { hasValidStartBlock } from '@/lib/workflows/triggers/trigger-utils.server'
const logger = createLogger('WorkflowMcpServersAPI')
@@ -27,18 +25,18 @@ export const GET = withMcpAuth('read')(
createdBy: workflowMcpServer.createdBy,
name: workflowMcpServer.name,
description: workflowMcpServer.description,
isPublic: workflowMcpServer.isPublic,
createdAt: workflowMcpServer.createdAt,
updatedAt: workflowMcpServer.updatedAt,
toolCount: sql<number>`(
SELECT COUNT(*)::int
FROM "workflow_mcp_tool"
SELECT COUNT(*)::int
FROM "workflow_mcp_tool"
WHERE "workflow_mcp_tool"."server_id" = "workflow_mcp_server"."id"
)`.as('tool_count'),
})
.from(workflowMcpServer)
.where(eq(workflowMcpServer.workspaceId, workspaceId))
// Fetch all tools for these servers
const serverIds = servers.map((s) => s.id)
const tools =
serverIds.length > 0
@@ -51,6 +49,7 @@ export const GET = withMcpAuth('read')(
.where(inArray(workflowMcpTool.serverId, serverIds))
: []
// Group tool names by server
const toolNamesByServer: Record<string, string[]> = {}
for (const tool of tools) {
if (!toolNamesByServer[tool.serverId]) {
@@ -59,6 +58,7 @@ export const GET = withMcpAuth('read')(
toolNamesByServer[tool.serverId].push(tool.toolName)
}
// Attach tool names to servers
const serversWithToolNames = servers.map((server) => ({
...server,
toolNames: toolNamesByServer[server.id] || [],
@@ -90,7 +90,6 @@ export const POST = withMcpAuth('write')(
logger.info(`[${requestId}] Creating workflow MCP server:`, {
name: body.name,
workspaceId,
workflowIds: body.workflowIds,
})
if (!body.name) {
@@ -111,76 +110,16 @@ export const POST = withMcpAuth('write')(
createdBy: userId,
name: body.name.trim(),
description: body.description?.trim() || null,
isPublic: body.isPublic ?? false,
createdAt: new Date(),
updatedAt: new Date(),
})
.returning()
const workflowIds: string[] = body.workflowIds || []
const addedTools: Array<{ workflowId: string; toolName: string }> = []
if (workflowIds.length > 0) {
const workflows = await db
.select({
id: workflow.id,
name: workflow.name,
description: workflow.description,
isDeployed: workflow.isDeployed,
workspaceId: workflow.workspaceId,
})
.from(workflow)
.where(inArray(workflow.id, workflowIds))
for (const workflowRecord of workflows) {
if (workflowRecord.workspaceId !== workspaceId) {
logger.warn(
`[${requestId}] Skipping workflow ${workflowRecord.id} - does not belong to workspace`
)
continue
}
if (!workflowRecord.isDeployed) {
logger.warn(`[${requestId}] Skipping workflow ${workflowRecord.id} - not deployed`)
continue
}
const hasStartBlock = await hasValidStartBlock(workflowRecord.id)
if (!hasStartBlock) {
logger.warn(`[${requestId}] Skipping workflow ${workflowRecord.id} - no start block`)
continue
}
const toolName = sanitizeToolName(workflowRecord.name)
const toolDescription =
workflowRecord.description || `Execute ${workflowRecord.name} workflow`
const toolId = crypto.randomUUID()
await db.insert(workflowMcpTool).values({
id: toolId,
serverId,
workflowId: workflowRecord.id,
toolName,
toolDescription,
parameterSchema: {},
createdAt: new Date(),
updatedAt: new Date(),
})
addedTools.push({ workflowId: workflowRecord.id, toolName })
}
logger.info(
`[${requestId}] Added ${addedTools.length} tools to server ${serverId}:`,
addedTools.map((t) => t.toolName)
)
}
logger.info(
`[${requestId}] Successfully created workflow MCP server: ${body.name} (ID: ${serverId})`
)
return createMcpSuccessResponse({ server, addedTools }, 201)
return createMcpSuccessResponse({ server }, 201)
} catch (error) {
logger.error(`[${requestId}] Error creating workflow MCP server:`, error)
return createMcpErrorResponse(

View File

@@ -57,7 +57,6 @@ describe('Scheduled Workflow Execution API Route', () => {
not: vi.fn((condition) => ({ type: 'not', condition })),
isNull: vi.fn((field) => ({ type: 'isNull', field })),
or: vi.fn((...conditions) => ({ type: 'or', conditions })),
sql: vi.fn((strings, ...values) => ({ type: 'sql', strings, values })),
}))
vi.doMock('@sim/db', () => {
@@ -93,17 +92,6 @@ describe('Scheduled Workflow Execution API Route', () => {
status: 'status',
nextRunAt: 'nextRunAt',
lastQueuedAt: 'lastQueuedAt',
deploymentVersionId: 'deploymentVersionId',
},
workflowDeploymentVersion: {
id: 'id',
workflowId: 'workflowId',
isActive: 'isActive',
},
workflow: {
id: 'id',
userId: 'userId',
workspaceId: 'workspaceId',
},
}
})
@@ -146,7 +134,6 @@ describe('Scheduled Workflow Execution API Route', () => {
not: vi.fn((condition) => ({ type: 'not', condition })),
isNull: vi.fn((field) => ({ type: 'isNull', field })),
or: vi.fn((...conditions) => ({ type: 'or', conditions })),
sql: vi.fn((strings, ...values) => ({ type: 'sql', strings, values })),
}))
vi.doMock('@sim/db', () => {
@@ -182,17 +169,6 @@ describe('Scheduled Workflow Execution API Route', () => {
status: 'status',
nextRunAt: 'nextRunAt',
lastQueuedAt: 'lastQueuedAt',
deploymentVersionId: 'deploymentVersionId',
},
workflowDeploymentVersion: {
id: 'id',
workflowId: 'workflowId',
isActive: 'isActive',
},
workflow: {
id: 'id',
userId: 'userId',
workspaceId: 'workspaceId',
},
}
})
@@ -230,7 +206,6 @@ describe('Scheduled Workflow Execution API Route', () => {
not: vi.fn((condition) => ({ type: 'not', condition })),
isNull: vi.fn((field) => ({ type: 'isNull', field })),
or: vi.fn((...conditions) => ({ type: 'or', conditions })),
sql: vi.fn((strings, ...values) => ({ type: 'sql', strings, values })),
}))
vi.doMock('@sim/db', () => {
@@ -253,17 +228,6 @@ describe('Scheduled Workflow Execution API Route', () => {
status: 'status',
nextRunAt: 'nextRunAt',
lastQueuedAt: 'lastQueuedAt',
deploymentVersionId: 'deploymentVersionId',
},
workflowDeploymentVersion: {
id: 'id',
workflowId: 'workflowId',
isActive: 'isActive',
},
workflow: {
id: 'id',
userId: 'userId',
workspaceId: 'workspaceId',
},
}
})
@@ -301,7 +265,6 @@ describe('Scheduled Workflow Execution API Route', () => {
not: vi.fn((condition) => ({ type: 'not', condition })),
isNull: vi.fn((field) => ({ type: 'isNull', field })),
or: vi.fn((...conditions) => ({ type: 'or', conditions })),
sql: vi.fn((strings, ...values) => ({ type: 'sql', strings, values })),
}))
vi.doMock('@sim/db', () => {
@@ -347,17 +310,6 @@ describe('Scheduled Workflow Execution API Route', () => {
status: 'status',
nextRunAt: 'nextRunAt',
lastQueuedAt: 'lastQueuedAt',
deploymentVersionId: 'deploymentVersionId',
},
workflowDeploymentVersion: {
id: 'id',
workflowId: 'workflowId',
isActive: 'isActive',
},
workflow: {
id: 'id',
userId: 'userId',
workspaceId: 'workspaceId',
},
}
})

View File

@@ -1,7 +1,7 @@
import { db, workflowDeploymentVersion, workflowSchedule } from '@sim/db'
import { db, workflowSchedule } from '@sim/db'
import { createLogger } from '@sim/logger'
import { tasks } from '@trigger.dev/sdk'
import { and, eq, isNull, lt, lte, not, or, sql } from 'drizzle-orm'
import { and, eq, isNull, lt, lte, not, or } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { verifyCronAuth } from '@/lib/auth/internal'
import { isTriggerDevEnabled } from '@/lib/core/config/feature-flags'
@@ -37,8 +37,7 @@ export async function GET(request: NextRequest) {
or(
isNull(workflowSchedule.lastQueuedAt),
lt(workflowSchedule.lastQueuedAt, workflowSchedule.nextRunAt)
),
sql`${workflowSchedule.deploymentVersionId} = (select ${workflowDeploymentVersion.id} from ${workflowDeploymentVersion} where ${workflowDeploymentVersion.workflowId} = ${workflowSchedule.workflowId} and ${workflowDeploymentVersion.isActive} = true)`
)
)
)
.returning({

View File

@@ -29,23 +29,12 @@ vi.mock('@sim/db', () => ({
vi.mock('@sim/db/schema', () => ({
workflow: { id: 'id', userId: 'userId', workspaceId: 'workspaceId' },
workflowSchedule: {
workflowId: 'workflowId',
blockId: 'blockId',
deploymentVersionId: 'deploymentVersionId',
},
workflowDeploymentVersion: {
id: 'id',
workflowId: 'workflowId',
isActive: 'isActive',
},
workflowSchedule: { workflowId: 'workflowId', blockId: 'blockId' },
}))
vi.mock('drizzle-orm', () => ({
eq: vi.fn(),
and: vi.fn(),
or: vi.fn(),
isNull: vi.fn(),
}))
vi.mock('@/lib/core/utils/request', () => ({
@@ -67,11 +56,6 @@ function mockDbChain(results: any[]) {
where: () => ({
limit: () => results[callIndex++] || [],
}),
leftJoin: () => ({
where: () => ({
limit: () => results[callIndex++] || [],
}),
}),
}),
}))
}
@@ -90,16 +74,7 @@ describe('Schedule GET API', () => {
it('returns schedule data for authorized user', async () => {
mockDbChain([
[{ userId: 'user-1', workspaceId: null }],
[
{
schedule: {
id: 'sched-1',
cronExpression: '0 9 * * *',
status: 'active',
failedCount: 0,
},
},
],
[{ id: 'sched-1', cronExpression: '0 9 * * *', status: 'active', failedCount: 0 }],
])
const res = await GET(createRequest('http://test/api/schedules?workflowId=wf-1'))
@@ -153,7 +128,7 @@ describe('Schedule GET API', () => {
it('allows workspace members to view', async () => {
mockDbChain([
[{ userId: 'other-user', workspaceId: 'ws-1' }],
[{ schedule: { id: 'sched-1', status: 'active', failedCount: 0 } }],
[{ id: 'sched-1', status: 'active', failedCount: 0 }],
])
const res = await GET(createRequest('http://test/api/schedules?workflowId=wf-1'))
@@ -164,7 +139,7 @@ describe('Schedule GET API', () => {
it('indicates disabled schedule with failures', async () => {
mockDbChain([
[{ userId: 'user-1', workspaceId: null }],
[{ schedule: { id: 'sched-1', status: 'disabled', failedCount: 100 } }],
[{ id: 'sched-1', status: 'disabled', failedCount: 100 }],
])
const res = await GET(createRequest('http://test/api/schedules?workflowId=wf-1'))

View File

@@ -1,7 +1,7 @@
import { db } from '@sim/db'
import { workflow, workflowDeploymentVersion, workflowSchedule } from '@sim/db/schema'
import { workflow, workflowSchedule } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq, isNull, or } from 'drizzle-orm'
import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { generateRequestId } from '@/lib/core/utils/request'
@@ -62,24 +62,9 @@ export async function GET(req: NextRequest) {
}
const schedule = await db
.select({ schedule: workflowSchedule })
.select()
.from(workflowSchedule)
.leftJoin(
workflowDeploymentVersion,
and(
eq(workflowDeploymentVersion.workflowId, workflowSchedule.workflowId),
eq(workflowDeploymentVersion.isActive, true)
)
)
.where(
and(
...conditions,
or(
eq(workflowSchedule.deploymentVersionId, workflowDeploymentVersion.id),
and(isNull(workflowDeploymentVersion.id), isNull(workflowSchedule.deploymentVersionId))
)
)
)
.where(conditions.length > 1 ? and(...conditions) : conditions[0])
.limit(1)
const headers = new Headers()
@@ -89,7 +74,7 @@ export async function GET(req: NextRequest) {
return NextResponse.json({ schedule: null }, { headers })
}
const scheduleData = schedule[0].schedule
const scheduleData = schedule[0]
const isDisabled = scheduleData.status === 'disabled'
const hasFailures = scheduleData.failedCount > 0

View File

@@ -5,14 +5,14 @@ import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { generateRequestId } from '@/lib/core/utils/request'
import { verifySuperUser } from '@/lib/templates/permissions'
import { verifyAdminPrivileges } from '@/lib/templates/permissions'
const logger = createLogger('TemplateApprovalAPI')
export const revalidate = 0
/**
* POST /api/templates/[id]/approve - Approve a template (super users only)
* POST /api/templates/[id]/approve - Approve a template (admin users only)
*/
export async function POST(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const requestId = generateRequestId()
@@ -25,10 +25,10 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const { isSuperUser } = await verifySuperUser(session.user.id)
if (!isSuperUser) {
logger.warn(`[${requestId}] Non-super user attempted to approve template: ${id}`)
return NextResponse.json({ error: 'Only super users can approve templates' }, { status: 403 })
const { hasAdminPrivileges } = await verifyAdminPrivileges(session.user.id)
if (!hasAdminPrivileges) {
logger.warn(`[${requestId}] Non-admin user attempted to approve template: ${id}`)
return NextResponse.json({ error: 'Only admin users can approve templates' }, { status: 403 })
}
const existingTemplate = await db.select().from(templates).where(eq(templates.id, id)).limit(1)
@@ -42,7 +42,7 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
.set({ status: 'approved', updatedAt: new Date() })
.where(eq(templates.id, id))
logger.info(`[${requestId}] Template approved: ${id} by super user: ${session.user.id}`)
logger.info(`[${requestId}] Template approved: ${id} by admin: ${session.user.id}`)
return NextResponse.json({
message: 'Template approved successfully',
@@ -55,7 +55,7 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
}
/**
* DELETE /api/templates/[id]/approve - Unapprove a template (super users only)
* DELETE /api/templates/[id]/approve - Unapprove a template (admin users only)
*/
export async function DELETE(
_request: NextRequest,
@@ -71,10 +71,10 @@ export async function DELETE(
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const { isSuperUser } = await verifySuperUser(session.user.id)
if (!isSuperUser) {
logger.warn(`[${requestId}] Non-super user attempted to reject template: ${id}`)
return NextResponse.json({ error: 'Only super users can reject templates' }, { status: 403 })
const { hasAdminPrivileges } = await verifyAdminPrivileges(session.user.id)
if (!hasAdminPrivileges) {
logger.warn(`[${requestId}] Non-admin user attempted to reject template: ${id}`)
return NextResponse.json({ error: 'Only admin users can reject templates' }, { status: 403 })
}
const existingTemplate = await db.select().from(templates).where(eq(templates.id, id)).limit(1)
@@ -88,7 +88,7 @@ export async function DELETE(
.set({ status: 'rejected', updatedAt: new Date() })
.where(eq(templates.id, id))
logger.info(`[${requestId}] Template rejected: ${id} by super user: ${session.user.id}`)
logger.info(`[${requestId}] Template rejected: ${id} by admin: ${session.user.id}`)
return NextResponse.json({
message: 'Template rejected successfully',

View File

@@ -5,14 +5,14 @@ import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { generateRequestId } from '@/lib/core/utils/request'
import { verifySuperUser } from '@/lib/templates/permissions'
import { verifyAdminPrivileges } from '@/lib/templates/permissions'
const logger = createLogger('TemplateRejectionAPI')
export const revalidate = 0
/**
* POST /api/templates/[id]/reject - Reject a template (super users only)
* POST /api/templates/[id]/reject - Reject a template (admin users only)
*/
export async function POST(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const requestId = generateRequestId()
@@ -25,10 +25,10 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const { isSuperUser } = await verifySuperUser(session.user.id)
if (!isSuperUser) {
logger.warn(`[${requestId}] Non-super user attempted to reject template: ${id}`)
return NextResponse.json({ error: 'Only super users can reject templates' }, { status: 403 })
const { hasAdminPrivileges } = await verifyAdminPrivileges(session.user.id)
if (!hasAdminPrivileges) {
logger.warn(`[${requestId}] Non-admin user attempted to reject template: ${id}`)
return NextResponse.json({ error: 'Only admin users can reject templates' }, { status: 403 })
}
const existingTemplate = await db.select().from(templates).where(eq(templates.id, id)).limit(1)
@@ -42,7 +42,7 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
.set({ status: 'rejected', updatedAt: new Date() })
.where(eq(templates.id, id))
logger.info(`[${requestId}] Template rejected: ${id} by super user: ${session.user.id}`)
logger.info(`[${requestId}] Template rejected: ${id} by admin: ${session.user.id}`)
return NextResponse.json({
message: 'Template rejected successfully',

View File

@@ -23,13 +23,10 @@ const logger = createLogger('TemplatesAPI')
export const revalidate = 0
// Function to sanitize sensitive data from workflow state
// Now uses the more comprehensive sanitizeCredentials from credential-extractor
function sanitizeWorkflowState(state: any): any {
return sanitizeCredentials(state)
}
// Schema for creating a template
const CreateTemplateSchema = z.object({
workflowId: z.string().min(1, 'Workflow ID is required'),
name: z.string().min(1, 'Name is required').max(100, 'Name must be less than 100 characters'),
@@ -43,7 +40,6 @@ const CreateTemplateSchema = z.object({
tags: z.array(z.string()).max(10, 'Maximum 10 tags allowed').optional().default([]),
})
// Schema for query parameters
const QueryParamsSchema = z.object({
limit: z.coerce.number().optional().default(50),
offset: z.coerce.number().optional().default(0),
@@ -69,31 +65,21 @@ export async function GET(request: NextRequest) {
logger.debug(`[${requestId}] Fetching templates with params:`, params)
// Check if user is a super user
const currentUser = await db.select().from(user).where(eq(user.id, session.user.id)).limit(1)
const isSuperUser = currentUser[0]?.isSuperUser || false
const isSuperUser = currentUser[0]?.role === 'admin' || currentUser[0]?.role === 'superadmin'
// Build query conditions
const conditions = []
// Apply workflow filter if provided (for getting template by workflow)
// When fetching by workflowId, we want to get the template regardless of status
// This is used by the deploy modal to check if a template exists
if (params.workflowId) {
conditions.push(eq(templates.workflowId, params.workflowId))
// Don't apply status filter when fetching by workflowId - we want to show
// the template to its owner even if it's pending
} else {
// Apply status filter - only approved templates for non-super users
if (params.status) {
conditions.push(eq(templates.status, params.status))
} else if (!isSuperUser || !params.includeAllStatuses) {
// Non-super users and super users without includeAllStatuses flag see only approved templates
conditions.push(eq(templates.status, 'approved'))
}
}
// Apply search filter if provided
if (params.search) {
const searchTerm = `%${params.search}%`
conditions.push(
@@ -104,10 +90,8 @@ export async function GET(request: NextRequest) {
)
}
// Combine conditions
const whereCondition = conditions.length > 0 ? and(...conditions) : undefined
// Apply ordering, limit, and offset with star information
const results = await db
.select({
id: templates.id,
@@ -138,7 +122,6 @@ export async function GET(request: NextRequest) {
.limit(params.limit)
.offset(params.offset)
// Get total count for pagination
const totalCount = await db
.select({ count: sql<number>`count(*)` })
.from(templates)
@@ -191,7 +174,6 @@ export async function POST(request: NextRequest) {
workflowId: data.workflowId,
})
// Verify the workflow exists and belongs to the user
const workflowExists = await db
.select({ id: workflow.id })
.from(workflow)
@@ -218,7 +200,6 @@ export async function POST(request: NextRequest) {
const templateId = uuidv4()
const now = new Date()
// Get the active deployment version for the workflow to copy its state
const activeVersion = await db
.select({
id: workflowDeploymentVersion.id,
@@ -243,10 +224,8 @@ export async function POST(request: NextRequest) {
)
}
// Ensure the state includes workflow variables (if not already included)
let stateWithVariables = activeVersion[0].state as any
if (stateWithVariables && !stateWithVariables.variables) {
// Fetch workflow variables if not in deployment version
const [workflowRecord] = await db
.select({ variables: workflow.variables })
.from(workflow)
@@ -259,10 +238,8 @@ export async function POST(request: NextRequest) {
}
}
// Extract credential requirements before sanitizing
const requiredCredentials = extractRequiredCredentials(stateWithVariables)
// Sanitize the workflow state to remove all credential values
const sanitizedState = sanitizeWorkflowState(stateWithVariables)
const newTemplate = {

View File

@@ -1,169 +0,0 @@
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import { getBaseUrl } from '@/lib/core/utils/urls'
import { StorageService } from '@/lib/uploads'
import { extractStorageKey, inferContextFromKey } from '@/lib/uploads/utils/file-utils'
import { verifyFileAccess } from '@/app/api/files/authorization'
export const dynamic = 'force-dynamic'
const logger = createLogger('PulseParseAPI')
const PulseParseSchema = z.object({
apiKey: z.string().min(1, 'API key is required'),
filePath: z.string().min(1, 'File path is required'),
pages: z.string().optional(),
extractFigure: z.boolean().optional(),
figureDescription: z.boolean().optional(),
returnHtml: z.boolean().optional(),
chunking: z.string().optional(),
chunkSize: z.number().optional(),
})
export async function POST(request: NextRequest) {
const requestId = generateRequestId()
try {
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
if (!authResult.success || !authResult.userId) {
logger.warn(`[${requestId}] Unauthorized Pulse parse attempt`, {
error: authResult.error || 'Missing userId',
})
return NextResponse.json(
{
success: false,
error: authResult.error || 'Unauthorized',
},
{ status: 401 }
)
}
const userId = authResult.userId
const body = await request.json()
const validatedData = PulseParseSchema.parse(body)
logger.info(`[${requestId}] Pulse parse request`, {
filePath: validatedData.filePath,
isWorkspaceFile: validatedData.filePath.includes('/api/files/serve/'),
userId,
})
let fileUrl = validatedData.filePath
if (validatedData.filePath?.includes('/api/files/serve/')) {
try {
const storageKey = extractStorageKey(validatedData.filePath)
const context = inferContextFromKey(storageKey)
const hasAccess = await verifyFileAccess(storageKey, userId, undefined, context, false)
if (!hasAccess) {
logger.warn(`[${requestId}] Unauthorized presigned URL generation attempt`, {
userId,
key: storageKey,
context,
})
return NextResponse.json(
{
success: false,
error: 'File not found',
},
{ status: 404 }
)
}
fileUrl = await StorageService.generatePresignedDownloadUrl(storageKey, context, 5 * 60)
logger.info(`[${requestId}] Generated presigned URL for ${context} file`)
} catch (error) {
logger.error(`[${requestId}] Failed to generate presigned URL:`, error)
return NextResponse.json(
{
success: false,
error: 'Failed to generate file access URL',
},
{ status: 500 }
)
}
} else if (validatedData.filePath?.startsWith('/')) {
const baseUrl = getBaseUrl()
fileUrl = `${baseUrl}${validatedData.filePath}`
}
const formData = new FormData()
formData.append('file_url', fileUrl)
if (validatedData.pages) {
formData.append('pages', validatedData.pages)
}
if (validatedData.extractFigure !== undefined) {
formData.append('extract_figure', String(validatedData.extractFigure))
}
if (validatedData.figureDescription !== undefined) {
formData.append('figure_description', String(validatedData.figureDescription))
}
if (validatedData.returnHtml !== undefined) {
formData.append('return_html', String(validatedData.returnHtml))
}
if (validatedData.chunking) {
formData.append('chunking', validatedData.chunking)
}
if (validatedData.chunkSize !== undefined) {
formData.append('chunk_size', String(validatedData.chunkSize))
}
const pulseResponse = await fetch('https://api.runpulse.com/extract', {
method: 'POST',
headers: {
'x-api-key': validatedData.apiKey,
},
body: formData,
})
if (!pulseResponse.ok) {
const errorText = await pulseResponse.text()
logger.error(`[${requestId}] Pulse API error:`, errorText)
return NextResponse.json(
{
success: false,
error: `Pulse API error: ${pulseResponse.statusText}`,
},
{ status: pulseResponse.status }
)
}
const pulseData = await pulseResponse.json()
logger.info(`[${requestId}] Pulse parse successful`)
return NextResponse.json({
success: true,
output: pulseData,
})
} catch (error) {
if (error instanceof z.ZodError) {
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
return NextResponse.json(
{
success: false,
error: 'Invalid request data',
details: error.errors,
},
{ status: 400 }
)
}
logger.error(`[${requestId}] Error in Pulse parse:`, error)
return NextResponse.json(
{
success: false,
error: error instanceof Error ? error.message : 'Internal server error',
},
{ status: 500 }
)
}
}

View File

@@ -1,167 +0,0 @@
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import { getBaseUrl } from '@/lib/core/utils/urls'
import { StorageService } from '@/lib/uploads'
import { extractStorageKey, inferContextFromKey } from '@/lib/uploads/utils/file-utils'
import { verifyFileAccess } from '@/app/api/files/authorization'
export const dynamic = 'force-dynamic'
const logger = createLogger('ReductoParseAPI')
const ReductoParseSchema = z.object({
apiKey: z.string().min(1, 'API key is required'),
filePath: z.string().min(1, 'File path is required'),
pages: z.array(z.number()).optional(),
tableOutputFormat: z.enum(['html', 'md']).optional(),
})
export async function POST(request: NextRequest) {
const requestId = generateRequestId()
try {
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
if (!authResult.success || !authResult.userId) {
logger.warn(`[${requestId}] Unauthorized Reducto parse attempt`, {
error: authResult.error || 'Missing userId',
})
return NextResponse.json(
{
success: false,
error: authResult.error || 'Unauthorized',
},
{ status: 401 }
)
}
const userId = authResult.userId
const body = await request.json()
const validatedData = ReductoParseSchema.parse(body)
logger.info(`[${requestId}] Reducto parse request`, {
filePath: validatedData.filePath,
isWorkspaceFile: validatedData.filePath.includes('/api/files/serve/'),
userId,
})
let fileUrl = validatedData.filePath
if (validatedData.filePath?.includes('/api/files/serve/')) {
try {
const storageKey = extractStorageKey(validatedData.filePath)
const context = inferContextFromKey(storageKey)
const hasAccess = await verifyFileAccess(
storageKey,
userId,
undefined, // customConfig
context, // context
false // isLocal
)
if (!hasAccess) {
logger.warn(`[${requestId}] Unauthorized presigned URL generation attempt`, {
userId,
key: storageKey,
context,
})
return NextResponse.json(
{
success: false,
error: 'File not found',
},
{ status: 404 }
)
}
fileUrl = await StorageService.generatePresignedDownloadUrl(storageKey, context, 5 * 60)
logger.info(`[${requestId}] Generated presigned URL for ${context} file`)
} catch (error) {
logger.error(`[${requestId}] Failed to generate presigned URL:`, error)
return NextResponse.json(
{
success: false,
error: 'Failed to generate file access URL',
},
{ status: 500 }
)
}
} else if (validatedData.filePath?.startsWith('/')) {
const baseUrl = getBaseUrl()
fileUrl = `${baseUrl}${validatedData.filePath}`
}
const reductoBody: Record<string, unknown> = {
input: fileUrl,
}
if (validatedData.pages && validatedData.pages.length > 0) {
reductoBody.settings = {
page_range: validatedData.pages,
}
}
if (validatedData.tableOutputFormat) {
reductoBody.formatting = {
table_output_format: validatedData.tableOutputFormat,
}
}
const reductoResponse = await fetch('https://platform.reducto.ai/parse', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Accept: 'application/json',
Authorization: `Bearer ${validatedData.apiKey}`,
},
body: JSON.stringify(reductoBody),
})
if (!reductoResponse.ok) {
const errorText = await reductoResponse.text()
logger.error(`[${requestId}] Reducto API error:`, errorText)
return NextResponse.json(
{
success: false,
error: `Reducto API error: ${reductoResponse.statusText}`,
},
{ status: reductoResponse.status }
)
}
const reductoData = await reductoResponse.json()
logger.info(`[${requestId}] Reducto parse successful`)
return NextResponse.json({
success: true,
output: reductoData,
})
} catch (error) {
if (error instanceof z.ZodError) {
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
return NextResponse.json(
{
success: false,
error: 'Invalid request data',
details: error.errors,
},
{ status: 400 }
)
}
logger.error(`[${requestId}] Error in Reducto parse:`, error)
return NextResponse.json(
{
success: false,
error: error instanceof Error ? error.message : 'Internal server error',
},
{ status: 500 }
)
}
}

View File

@@ -6,23 +6,26 @@ import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { generateRequestId } from '@/lib/core/utils/request'
const logger = createLogger('SuperUserAPI')
const logger = createLogger('AdminStatusAPI')
export const revalidate = 0
// GET /api/user/super-user - Check if current user is a super user (database status)
/**
* GET /api/user/admin-status - Check if current user has admin privileges
* Returns hasAdminPrivileges: true if user role is 'admin' or 'superadmin'
*/
export async function GET(request: NextRequest) {
const requestId = generateRequestId()
try {
const session = await getSession()
if (!session?.user?.id) {
logger.warn(`[${requestId}] Unauthorized super user status check attempt`)
logger.warn(`[${requestId}] Unauthorized admin status check attempt`)
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const currentUser = await db
.select({ isSuperUser: user.isSuperUser })
.select({ role: user.role })
.from(user)
.where(eq(user.id, session.user.id))
.limit(1)
@@ -32,11 +35,13 @@ export async function GET(request: NextRequest) {
return NextResponse.json({ error: 'User not found' }, { status: 404 })
}
const role = currentUser[0].role
return NextResponse.json({
isSuperUser: currentUser[0].isSuperUser,
hasAdminPrivileges: role === 'admin' || role === 'superadmin',
role,
})
} catch (error) {
logger.error(`[${requestId}] Error checking super user status`, error)
logger.error(`[${requestId}] Error checking admin status`, error)
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}

View File

@@ -13,6 +13,8 @@
* GET /api/v1/admin/users/:id - Get user details
* GET /api/v1/admin/users/:id/billing - Get user billing info
* PATCH /api/v1/admin/users/:id/billing - Update user billing (limit, blocked)
* GET /api/v1/admin/users/:id/role - Get user role
* PATCH /api/v1/admin/users/:id/role - Update user role (user, admin, superadmin)
*
* Workspaces:
* GET /api/v1/admin/workspaces - List all workspaces

View File

@@ -105,6 +105,7 @@ export interface AdminUser {
email: string
emailVerified: boolean
image: string | null
role: string | null
createdAt: string
updatedAt: string
}
@@ -116,6 +117,7 @@ export function toAdminUser(dbUser: DbUser): AdminUser {
email: dbUser.email,
emailVerified: dbUser.emailVerified,
image: dbUser.image,
role: dbUser.role,
createdAt: dbUser.createdAt.toISOString(),
updatedAt: dbUser.updatedAt.toISOString(),
}

View File

@@ -0,0 +1,98 @@
/**
* GET /api/v1/admin/users/[id]/role
*
* Get a user's current role.
*
* Response: AdminSingleResponse<{ role: string | null }>
*
* PATCH /api/v1/admin/users/[id]/role
*
* Update a user's role.
*
* Body:
* - role: 'user' | 'admin' | 'superadmin' - The role to assign
*
* Response: AdminSingleResponse<AdminUser>
*/
import { db } from '@sim/db'
import { user } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm'
import { withAdminAuthParams } from '@/app/api/v1/admin/middleware'
import {
badRequestResponse,
internalErrorResponse,
notFoundResponse,
singleResponse,
} from '@/app/api/v1/admin/responses'
import { toAdminUser } from '@/app/api/v1/admin/types'
const logger = createLogger('AdminUserRoleAPI')
const VALID_ROLES = ['user', 'admin', 'superadmin'] as const
type ValidRole = (typeof VALID_ROLES)[number]
interface RouteParams {
id: string
}
export const GET = withAdminAuthParams<RouteParams>(async (request, context) => {
const { id: userId } = await context.params
try {
const [userData] = await db
.select({ role: user.role })
.from(user)
.where(eq(user.id, userId))
.limit(1)
if (!userData) {
return notFoundResponse('User')
}
logger.info(`Admin API: Retrieved role for user ${userId}`)
return singleResponse({ role: userData.role })
} catch (error) {
logger.error('Admin API: Failed to get user role', { error, userId })
return internalErrorResponse('Failed to get user role')
}
})
export const PATCH = withAdminAuthParams<RouteParams>(async (request, context) => {
const { id: userId } = await context.params
try {
const body = await request.json()
const [existing] = await db.select().from(user).where(eq(user.id, userId)).limit(1)
if (!existing) {
return notFoundResponse('User')
}
if (body.role === undefined) {
return badRequestResponse('role is required')
}
if (!VALID_ROLES.includes(body.role)) {
return badRequestResponse(`Invalid role. Must be one of: ${VALID_ROLES.join(', ')}`, {
validRoles: VALID_ROLES,
})
}
const [updated] = await db
.update(user)
.set({ role: body.role as ValidRole, updatedAt: new Date() })
.where(eq(user.id, userId))
.returning()
logger.info(`Admin API: Updated user ${userId} role to ${body.role}`)
return singleResponse(toAdminUser(updated))
} catch (error) {
logger.error('Admin API: Failed to update user role', { error, userId })
return internalErrorResponse('Failed to update user role')
}
})

View File

@@ -60,17 +60,7 @@ export const POST = withAdminAuthParams<RouteParams>(async (request, context) =>
return internalErrorResponse(deployResult.error || 'Failed to deploy workflow')
}
if (!deployResult.deploymentVersionId) {
await undeployWorkflow({ workflowId })
return internalErrorResponse('Failed to resolve deployment version')
}
const scheduleResult = await createSchedulesForDeploy(
workflowId,
normalizedData.blocks,
db,
deployResult.deploymentVersionId
)
const scheduleResult = await createSchedulesForDeploy(workflowId, normalizedData.blocks, db)
if (!scheduleResult.success) {
logger.warn(`Schedule creation failed for workflow ${workflowId}: ${scheduleResult.error}`)
}

View File

@@ -1,7 +1,7 @@
import { db } from '@sim/db'
import { webhook, workflow, workflowDeploymentVersion } from '@sim/db/schema'
import { webhook, workflow } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, desc, eq, isNull, or } from 'drizzle-orm'
import { and, desc, eq } from 'drizzle-orm'
import { nanoid } from 'nanoid'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
@@ -71,23 +71,7 @@ export async function GET(request: NextRequest) {
})
.from(webhook)
.innerJoin(workflow, eq(webhook.workflowId, workflow.id))
.leftJoin(
workflowDeploymentVersion,
and(
eq(workflowDeploymentVersion.workflowId, workflow.id),
eq(workflowDeploymentVersion.isActive, true)
)
)
.where(
and(
eq(webhook.workflowId, workflowId),
eq(webhook.blockId, blockId),
or(
eq(webhook.deploymentVersionId, workflowDeploymentVersion.id),
and(isNull(workflowDeploymentVersion.id), isNull(webhook.deploymentVersionId))
)
)
)
.where(and(eq(webhook.workflowId, workflowId), eq(webhook.blockId, blockId)))
.orderBy(desc(webhook.updatedAt))
logger.info(
@@ -165,23 +149,7 @@ export async function POST(request: NextRequest) {
const existingForBlock = await db
.select({ id: webhook.id, path: webhook.path })
.from(webhook)
.leftJoin(
workflowDeploymentVersion,
and(
eq(workflowDeploymentVersion.workflowId, workflowId),
eq(workflowDeploymentVersion.isActive, true)
)
)
.where(
and(
eq(webhook.workflowId, workflowId),
eq(webhook.blockId, blockId),
or(
eq(webhook.deploymentVersionId, workflowDeploymentVersion.id),
and(isNull(workflowDeploymentVersion.id), isNull(webhook.deploymentVersionId))
)
)
)
.where(and(eq(webhook.workflowId, workflowId), eq(webhook.blockId, blockId)))
.limit(1)
if (existingForBlock.length > 0) {
@@ -257,23 +225,7 @@ export async function POST(request: NextRequest) {
const existingForBlock = await db
.select({ id: webhook.id })
.from(webhook)
.leftJoin(
workflowDeploymentVersion,
and(
eq(workflowDeploymentVersion.workflowId, workflowId),
eq(workflowDeploymentVersion.isActive, true)
)
)
.where(
and(
eq(webhook.workflowId, workflowId),
eq(webhook.blockId, blockId),
or(
eq(webhook.deploymentVersionId, workflowDeploymentVersion.id),
and(isNull(workflowDeploymentVersion.id), isNull(webhook.deploymentVersionId))
)
)
)
.where(and(eq(webhook.workflowId, workflowId), eq(webhook.blockId, blockId)))
.limit(1)
if (existingForBlock.length > 0) {
targetWebhookId = existingForBlock[0].id

View File

@@ -152,6 +152,7 @@ export async function POST(
const response = await queueWebhookExecution(foundWebhook, foundWorkflow, body, request, {
requestId,
path,
executionTarget: 'deployed',
})
responses.push(response)
}

View File

@@ -22,13 +22,6 @@ export async function GET(_request: Request, { params }: { params: Promise<{ id:
.select({
id: chat.id,
identifier: chat.identifier,
title: chat.title,
description: chat.description,
customizations: chat.customizations,
authType: chat.authType,
allowedEmails: chat.allowedEmails,
outputConfigs: chat.outputConfigs,
password: chat.password,
isActive: chat.isActive,
})
.from(chat)
@@ -41,13 +34,6 @@ export async function GET(_request: Request, { params }: { params: Promise<{ id:
? {
id: deploymentResults[0].id,
identifier: deploymentResults[0].identifier,
title: deploymentResults[0].title,
description: deploymentResults[0].description,
customizations: deploymentResults[0].customizations,
authType: deploymentResults[0].authType,
allowedEmails: deploymentResults[0].allowedEmails,
outputConfigs: deploymentResults[0].outputConfigs,
hasPassword: Boolean(deploymentResults[0].password),
}
: null

View File

@@ -10,11 +10,7 @@ import {
loadWorkflowFromNormalizedTables,
undeployWorkflow,
} from '@/lib/workflows/persistence/utils'
import {
cleanupDeploymentVersion,
createSchedulesForDeploy,
validateWorkflowSchedules,
} from '@/lib/workflows/schedules'
import { createSchedulesForDeploy, validateWorkflowSchedules } from '@/lib/workflows/schedules'
import { validateWorkflowPermissions } from '@/lib/workflows/utils'
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
@@ -135,6 +131,22 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
return createErrorResponse(`Invalid schedule configuration: ${scheduleValidation.error}`, 400)
}
const triggerSaveResult = await saveTriggerWebhooksForDeploy({
request,
workflowId: id,
workflow: workflowData,
userId: actorUserId,
blocks: normalizedData.blocks,
requestId,
})
if (!triggerSaveResult.success) {
return createErrorResponse(
triggerSaveResult.error?.message || 'Failed to save trigger configuration',
triggerSaveResult.error?.status || 500
)
}
const deployResult = await deployWorkflow({
workflowId: id,
deployedBy: actorUserId,
@@ -146,58 +158,14 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
}
const deployedAt = deployResult.deployedAt!
const deploymentVersionId = deployResult.deploymentVersionId
if (!deploymentVersionId) {
await undeployWorkflow({ workflowId: id })
return createErrorResponse('Failed to resolve deployment version', 500)
}
const triggerSaveResult = await saveTriggerWebhooksForDeploy({
request,
workflowId: id,
workflow: workflowData,
userId: actorUserId,
blocks: normalizedData.blocks,
requestId,
deploymentVersionId,
})
if (!triggerSaveResult.success) {
await cleanupDeploymentVersion({
workflowId: id,
workflow: workflowData as Record<string, unknown>,
requestId,
deploymentVersionId,
})
await undeployWorkflow({ workflowId: id })
return createErrorResponse(
triggerSaveResult.error?.message || 'Failed to save trigger configuration',
triggerSaveResult.error?.status || 500
)
}
let scheduleInfo: { scheduleId?: string; cronExpression?: string; nextRunAt?: Date } = {}
const scheduleResult = await createSchedulesForDeploy(
id,
normalizedData.blocks,
db,
deploymentVersionId
)
const scheduleResult = await createSchedulesForDeploy(id, normalizedData.blocks, db)
if (!scheduleResult.success) {
logger.error(
`[${requestId}] Failed to create schedule for workflow ${id}: ${scheduleResult.error}`
)
await cleanupDeploymentVersion({
workflowId: id,
workflow: workflowData as Record<string, unknown>,
requestId,
deploymentVersionId,
})
await undeployWorkflow({ workflowId: id })
return createErrorResponse(scheduleResult.error || 'Failed to create schedule', 500)
}
if (scheduleResult.scheduleId) {
} else if (scheduleResult.scheduleId) {
scheduleInfo = {
scheduleId: scheduleResult.scheduleId,
cronExpression: scheduleResult.cronExpression,

View File

@@ -1,19 +1,10 @@
import { db, workflowDeploymentVersion } from '@sim/db'
import { createLogger } from '@sim/logger'
import { and, eq } from 'drizzle-orm'
import type { NextRequest } from 'next/server'
import { generateRequestId } from '@/lib/core/utils/request'
import { syncMcpToolsForWorkflow } from '@/lib/mcp/workflow-mcp-sync'
import { saveTriggerWebhooksForDeploy } from '@/lib/webhooks/deploy'
import { activateWorkflowVersion } from '@/lib/workflows/persistence/utils'
import {
cleanupDeploymentVersion,
createSchedulesForDeploy,
validateWorkflowSchedules,
} from '@/lib/workflows/schedules'
import { validateWorkflowPermissions } from '@/lib/workflows/utils'
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
import type { BlockState } from '@/stores/workflows/workflow/types'
const logger = createLogger('WorkflowActivateDeploymentAPI')
@@ -28,135 +19,30 @@ export async function POST(
const { id, version } = await params
try {
const {
error,
session,
workflow: workflowData,
} = await validateWorkflowPermissions(id, requestId, 'admin')
const { error } = await validateWorkflowPermissions(id, requestId, 'admin')
if (error) {
return createErrorResponse(error.message, error.status)
}
const actorUserId = session?.user?.id
if (!actorUserId) {
logger.warn(`[${requestId}] Unable to resolve actor user for deployment activation: ${id}`)
return createErrorResponse('Unable to determine activating user', 400)
}
const versionNum = Number(version)
if (!Number.isFinite(versionNum)) {
return createErrorResponse('Invalid version number', 400)
}
const [versionRow] = await db
.select({
id: workflowDeploymentVersion.id,
state: workflowDeploymentVersion.state,
})
.from(workflowDeploymentVersion)
.where(
and(
eq(workflowDeploymentVersion.workflowId, id),
eq(workflowDeploymentVersion.version, versionNum)
)
)
.limit(1)
if (!versionRow?.state) {
return createErrorResponse('Deployment version not found', 404)
}
const [currentActiveVersion] = await db
.select({ id: workflowDeploymentVersion.id })
.from(workflowDeploymentVersion)
.where(
and(
eq(workflowDeploymentVersion.workflowId, id),
eq(workflowDeploymentVersion.isActive, true)
)
)
.limit(1)
const previousVersionId = currentActiveVersion?.id
const deployedState = versionRow.state as { blocks?: Record<string, BlockState> }
const blocks = deployedState.blocks
if (!blocks || typeof blocks !== 'object') {
return createErrorResponse('Invalid deployed state structure', 500)
}
const triggerSaveResult = await saveTriggerWebhooksForDeploy({
request,
workflowId: id,
workflow: workflowData as Record<string, unknown>,
userId: actorUserId,
blocks,
requestId,
deploymentVersionId: versionRow.id,
})
if (!triggerSaveResult.success) {
return createErrorResponse(
triggerSaveResult.error?.message || 'Failed to sync trigger configuration',
triggerSaveResult.error?.status || 500
)
}
const scheduleValidation = validateWorkflowSchedules(blocks)
if (!scheduleValidation.isValid) {
return createErrorResponse(`Invalid schedule configuration: ${scheduleValidation.error}`, 400)
}
const scheduleResult = await createSchedulesForDeploy(id, blocks, db, versionRow.id)
if (!scheduleResult.success) {
await cleanupDeploymentVersion({
workflowId: id,
workflow: workflowData as Record<string, unknown>,
requestId,
deploymentVersionId: versionRow.id,
})
return createErrorResponse(scheduleResult.error || 'Failed to sync schedules', 500)
}
const result = await activateWorkflowVersion({ workflowId: id, version: versionNum })
if (!result.success) {
await cleanupDeploymentVersion({
workflowId: id,
workflow: workflowData as Record<string, unknown>,
requestId,
deploymentVersionId: versionRow.id,
})
return createErrorResponse(result.error || 'Failed to activate deployment', 400)
}
if (previousVersionId && previousVersionId !== versionRow.id) {
try {
logger.info(
`[${requestId}] Cleaning up previous version ${previousVersionId} webhooks/schedules`
)
await cleanupDeploymentVersion({
workflowId: id,
workflow: workflowData as Record<string, unknown>,
requestId,
deploymentVersionId: previousVersionId,
})
logger.info(`[${requestId}] Previous version cleanup completed`)
} catch (cleanupError) {
logger.error(
`[${requestId}] Failed to clean up previous version ${previousVersionId}`,
cleanupError
)
}
if (result.state) {
await syncMcpToolsForWorkflow({
workflowId: id,
requestId,
state: result.state,
context: 'activate',
})
}
await syncMcpToolsForWorkflow({
workflowId: id,
requestId,
state: versionRow.state,
context: 'activate',
})
return createSuccessResponse({ success: true, deployedAt: result.deployedAt })
} catch (error: any) {
logger.error(`[${requestId}] Error activating deployment for workflow: ${id}`, error)

View File

@@ -110,7 +110,6 @@ type AsyncExecutionParams = {
userId: string
input: any
triggerType: CoreTriggerType
preflighted?: boolean
}
/**
@@ -133,7 +132,6 @@ async function handleAsyncExecution(params: AsyncExecutionParams): Promise<NextR
userId,
input,
triggerType,
preflighted: params.preflighted,
}
try {
@@ -266,7 +264,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
requestId
)
const shouldPreflightEnvVars = isAsyncMode && isTriggerDevEnabled
const preprocessResult = await preprocessExecution({
workflowId,
userId,
@@ -275,9 +272,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
requestId,
checkDeployment: !shouldUseDraftState,
loggingSession,
preflightEnvVars: shouldPreflightEnvVars,
useDraftState: shouldUseDraftState,
envUserId: isClientSession ? userId : undefined,
})
if (!preprocessResult.success) {
@@ -309,7 +303,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
userId: actorUserId,
input,
triggerType: loggingTriggerType,
preflighted: shouldPreflightEnvVars,
})
}

File diff suppressed because it is too large Load Diff

View File

@@ -38,6 +38,7 @@ import { getBaseUrl } from '@/lib/core/utils/urls'
import type { CredentialRequirement } from '@/lib/workflows/credentials/credential-extractor'
import { WorkflowPreview } from '@/app/workspace/[workspaceId]/w/components/preview'
import { getBlock } from '@/blocks/registry'
import { useAdminStatus } from '@/hooks/queries/admin-status'
import { useStarTemplate, useTemplate } from '@/hooks/queries/templates'
const logger = createLogger('TemplateDetails')
@@ -150,7 +151,8 @@ export default function TemplateDetails({ isWorkspaceContext = false }: Template
const [currentUserOrgRoles, setCurrentUserOrgRoles] = useState<
Array<{ organizationId: string; role: string }>
>([])
const [isSuperUser, setIsSuperUser] = useState(false)
const { data: adminStatus } = useAdminStatus(!!session?.user?.id)
const hasAdminPrivileges = adminStatus?.hasAdminPrivileges ?? false
const [isUsing, setIsUsing] = useState(false)
const [isEditing, setIsEditing] = useState(false)
const [isApproving, setIsApproving] = useState(false)
@@ -188,21 +190,6 @@ export default function TemplateDetails({ isWorkspaceContext = false }: Template
}
}
const fetchSuperUserStatus = async () => {
if (!currentUserId) return
try {
const response = await fetch('/api/user/super-user')
if (response.ok) {
const data = await response.json()
setIsSuperUser(data.isSuperUser || false)
}
} catch (error) {
logger.error('Error fetching super user status:', error)
}
}
fetchSuperUserStatus()
fetchUserOrganizations()
}, [currentUserId])
@@ -650,7 +637,7 @@ export default function TemplateDetails({ isWorkspaceContext = false }: Template
{/* Action buttons */}
<div className='flex items-center gap-[8px]'>
{/* Approve/Reject buttons for super users */}
{isSuperUser && template.status === 'pending' && (
{hasAdminPrivileges && template.status === 'pending' && (
<>
<Button
variant='active'
@@ -974,7 +961,7 @@ export default function TemplateDetails({ isWorkspaceContext = false }: Template
<h3 className='font-sans font-semibold text-base text-foreground'>
About the Creator
</h3>
{isSuperUser && template.creator && (
{hasAdminPrivileges && template.creator && (
<Button
variant={template.creator.verified ? 'active' : 'default'}
onClick={handleToggleVerification}

View File

@@ -77,7 +77,7 @@ export function DeleteChunkModal({
</p>
</ModalBody>
<ModalFooter>
<Button variant='default' disabled={isDeleting} onClick={onClose}>
<Button variant='active' disabled={isDeleting} onClick={onClose}>
Cancel
</Button>
<Button variant='destructive' onClick={handleDeleteChunk} disabled={isDeleting}>

View File

@@ -392,7 +392,7 @@ export function DocumentTagsModal({
return (
<Modal open={open} onOpenChange={handleClose}>
<ModalContent size='sm'>
<ModalContent>
<ModalHeader>
<div className='flex items-center justify-between'>
<span>Document Tags</span>
@@ -486,7 +486,7 @@ export function DocumentTagsModal({
/>
)}
{tagNameConflict && (
<span className='text-[12px] text-[var(--text-error)]'>
<span className='text-[11px] text-[var(--text-error)]'>
A tag with this name already exists
</span>
)}
@@ -639,7 +639,7 @@ export function DocumentTagsModal({
/>
)}
{tagNameConflict && (
<span className='text-[12px] text-[var(--text-error)]'>
<span className='text-[11px] text-[var(--text-error)]'>
A tag with this name already exists
</span>
)}

View File

@@ -48,7 +48,7 @@ import { ActionBar } from '@/app/workspace/[workspaceId]/knowledge/[id]/componen
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
import { useContextMenu } from '@/app/workspace/[workspaceId]/w/components/sidebar/hooks'
import { useDocument, useDocumentChunks, useKnowledgeBase } from '@/hooks/kb/use-knowledge'
import { knowledgeKeys, useDocumentChunkSearchQuery } from '@/hooks/queries/knowledge'
import { knowledgeKeys } from '@/hooks/queries/knowledge'
const logger = createLogger('Document')
@@ -313,22 +313,69 @@ export function Document({
isFetching: isFetchingChunks,
} = useDocumentChunks(knowledgeBaseId, documentId, currentPageFromURL)
const {
data: searchResults = [],
isLoading: isLoadingSearch,
error: searchQueryError,
} = useDocumentChunkSearchQuery(
{
knowledgeBaseId,
documentId,
search: debouncedSearchQuery,
},
{
enabled: Boolean(debouncedSearchQuery.trim()),
}
)
const [searchResults, setSearchResults] = useState<ChunkData[]>([])
const [isLoadingSearch, setIsLoadingSearch] = useState(false)
const [searchError, setSearchError] = useState<string | null>(null)
const searchError = searchQueryError instanceof Error ? searchQueryError.message : null
useEffect(() => {
if (!debouncedSearchQuery.trim()) {
setSearchResults([])
setSearchError(null)
return
}
let isMounted = true
const searchAllChunks = async () => {
try {
setIsLoadingSearch(true)
setSearchError(null)
const allResults: ChunkData[] = []
let hasMore = true
let offset = 0
const limit = 100
while (hasMore && isMounted) {
const response = await fetch(
`/api/knowledge/${knowledgeBaseId}/documents/${documentId}/chunks?search=${encodeURIComponent(debouncedSearchQuery)}&limit=${limit}&offset=${offset}`
)
if (!response.ok) {
throw new Error('Search failed')
}
const result = await response.json()
if (result.success && result.data) {
allResults.push(...result.data)
hasMore = result.pagination?.hasMore || false
offset += limit
} else {
hasMore = false
}
}
if (isMounted) {
setSearchResults(allResults)
}
} catch (err) {
if (isMounted) {
setSearchError(err instanceof Error ? err.message : 'Search failed')
}
} finally {
if (isMounted) {
setIsLoadingSearch(false)
}
}
}
searchAllChunks()
return () => {
isMounted = false
}
}, [debouncedSearchQuery, knowledgeBaseId, documentId])
const [selectedChunks, setSelectedChunks] = useState<Set<string>>(new Set())
const [selectedChunk, setSelectedChunk] = useState<ChunkData | null>(null)
@@ -1161,19 +1208,15 @@ export function Document({
<ModalHeader>Delete Document</ModalHeader>
<ModalBody>
<p className='text-[12px] text-[var(--text-secondary)]'>
Are you sure you want to delete{' '}
<span className='font-medium text-[var(--text-primary)]'>
{effectiveDocumentName}
</span>
? This will permanently delete the document and all {documentData?.chunkCount ?? 0}{' '}
chunk
Are you sure you want to delete "{effectiveDocumentName}"? This will permanently
delete the document and all {documentData?.chunkCount ?? 0} chunk
{documentData?.chunkCount === 1 ? '' : 's'} within it.{' '}
<span className='text-[var(--text-error)]'>This action cannot be undone.</span>
</p>
</ModalBody>
<ModalFooter>
<Button
variant='default'
variant='active'
onClick={() => setShowDeleteDocumentDialog(false)}
disabled={isDeletingDocument}
>

View File

@@ -1523,16 +1523,15 @@ export function KnowledgeBase({
<ModalHeader>Delete Knowledge Base</ModalHeader>
<ModalBody>
<p className='text-[12px] text-[var(--text-secondary)]'>
Are you sure you want to delete{' '}
<span className='font-medium text-[var(--text-primary)]'>{knowledgeBaseName}</span>?
This will permanently delete the knowledge base and all {pagination.total} document
Are you sure you want to delete "{knowledgeBaseName}"? This will permanently delete
the knowledge base and all {pagination.total} document
{pagination.total === 1 ? '' : 's'} within it.{' '}
<span className='text-[var(--text-error)]'>This action cannot be undone.</span>
</p>
</ModalBody>
<ModalFooter>
<Button
variant='default'
variant='active'
onClick={() => setShowDeleteDialog(false)}
disabled={isDeleting}
>
@@ -1550,16 +1549,14 @@ export function KnowledgeBase({
<ModalHeader>Delete Document</ModalHeader>
<ModalBody>
<p className='text-[12px] text-[var(--text-secondary)]'>
Are you sure you want to delete{' '}
<span className='font-medium text-[var(--text-primary)]'>
{documents.find((doc) => doc.id === documentToDelete)?.filename ?? 'this document'}
</span>
? <span className='text-[var(--text-error)]'>This action cannot be undone.</span>
Are you sure you want to delete "
{documents.find((doc) => doc.id === documentToDelete)?.filename ?? 'this document'}"?{' '}
<span className='text-[var(--text-error)]'>This action cannot be undone.</span>
</p>
</ModalBody>
<ModalFooter>
<Button
variant='default'
variant='active'
onClick={() => {
setShowDeleteDocumentModal(false)
setDocumentToDelete(null)
@@ -1585,7 +1582,7 @@ export function KnowledgeBase({
</p>
</ModalBody>
<ModalFooter>
<Button variant='default' onClick={() => setShowBulkDeleteModal(false)}>
<Button variant='active' onClick={() => setShowBulkDeleteModal(false)}>
Cancel
</Button>
<Button variant='destructive' onClick={confirmBulkDelete} disabled={isBulkOperating}>

View File

@@ -221,14 +221,14 @@ export function AddDocumentsModal({
return (
<Modal open={open} onOpenChange={handleClose}>
<ModalContent size='md'>
<ModalContent>
<ModalHeader>Add Documents</ModalHeader>
<ModalBody>
<div className='min-h-0 flex-1 overflow-y-auto'>
<div className='space-y-[12px]'>
{fileError && (
<p className='text-[12px] text-[var(--text-error)] leading-tight'>{fileError}</p>
<p className='text-[11px] text-[var(--text-error)] leading-tight'>{fileError}</p>
)}
<div className='flex flex-col gap-[8px]'>
@@ -336,7 +336,7 @@ export function AddDocumentsModal({
<ModalFooter>
<div className='flex w-full items-center justify-between gap-[12px]'>
{uploadError ? (
<p className='min-w-0 flex-1 truncate text-[12px] text-[var(--text-error)] leading-tight'>
<p className='min-w-0 flex-1 truncate text-[11px] text-[var(--text-error)] leading-tight'>
{uploadError.message}
</p>
) : (

View File

@@ -306,7 +306,7 @@ export function BaseTagsModal({ open, onOpenChange, knowledgeBaseId }: BaseTagsM
return (
<>
<Modal open={open} onOpenChange={handleClose}>
<ModalContent size='sm'>
<ModalContent>
<ModalHeader>
<div className='flex items-center justify-between'>
<span>Tags</span>
@@ -400,7 +400,7 @@ export function BaseTagsModal({ open, onOpenChange, knowledgeBaseId }: BaseTagsM
}}
/>
{tagNameConflict && (
<span className='text-[12px] text-[var(--text-error)]'>
<span className='text-[11px] text-[var(--text-error)]'>
A tag with this name already exists
</span>
)}
@@ -417,7 +417,7 @@ export function BaseTagsModal({ open, onOpenChange, knowledgeBaseId }: BaseTagsM
placeholder='Select type'
/>
{!hasAvailableSlots(createTagForm.fieldType) && (
<span className='text-[12px] text-[var(--text-error)]'>
<span className='text-[11px] text-[var(--text-error)]'>
No available slots for this type. Choose a different type.
</span>
)}

View File

@@ -77,7 +77,7 @@ export function RenameDocumentModal({
return (
<Modal open={open} onOpenChange={onOpenChange}>
<ModalContent size='sm'>
<ModalContent>
<ModalHeader>Rename Document</ModalHeader>
<form onSubmit={handleSubmit} className='flex min-h-0 flex-1 flex-col'>
<ModalBody className='!pb-[16px]'>
@@ -108,7 +108,7 @@ export function RenameDocumentModal({
<ModalFooter>
<div className='flex w-full items-center justify-between gap-[12px]'>
{error ? (
<p className='min-w-0 flex-1 truncate text-[12px] text-[var(--text-error)] leading-tight'>
<p className='min-w-0 flex-1 truncate text-[11px] text-[var(--text-error)] leading-tight'>
{error}
</p>
) : (

View File

@@ -332,7 +332,7 @@ export function CreateBaseModal({ open, onOpenChange }: CreateBaseModalProps) {
return (
<Modal open={open} onOpenChange={handleClose}>
<ModalContent size='lg'>
<ModalContent>
<ModalHeader>Create Knowledge Base</ModalHeader>
<form onSubmit={handleSubmit(onSubmit)} className='flex min-h-0 flex-1 flex-col'>
@@ -528,7 +528,7 @@ export function CreateBaseModal({ open, onOpenChange }: CreateBaseModalProps) {
)}
{fileError && (
<p className='text-[12px] text-[var(--text-error)] leading-tight'>{fileError}</p>
<p className='text-[11px] text-[var(--text-error)] leading-tight'>{fileError}</p>
)}
</div>
</div>
@@ -537,7 +537,7 @@ export function CreateBaseModal({ open, onOpenChange }: CreateBaseModalProps) {
<ModalFooter>
<div className='flex w-full items-center justify-between gap-[12px]'>
{submitStatus?.type === 'error' || uploadError ? (
<p className='min-w-0 flex-1 truncate text-[12px] text-[var(--text-error)] leading-tight'>
<p className='min-w-0 flex-1 truncate text-[11px] text-[var(--text-error)] leading-tight'>
{uploadError?.message || submitStatus?.message}
</p>
) : (

View File

@@ -38,7 +38,7 @@ export function DeleteKnowledgeBaseModal({
}: DeleteKnowledgeBaseModalProps) {
return (
<Modal open={isOpen} onOpenChange={onClose}>
<ModalContent size='sm'>
<ModalContent className='w-[400px]'>
<ModalHeader>Delete Knowledge Base</ModalHeader>
<ModalBody>
<p className='text-[12px] text-[var(--text-secondary)]'>
@@ -55,7 +55,7 @@ export function DeleteKnowledgeBaseModal({
</p>
</ModalBody>
<ModalFooter>
<Button variant='default' onClick={onClose} disabled={isDeleting}>
<Button variant='active' onClick={onClose} disabled={isDeleting}>
Cancel
</Button>
<Button variant='destructive' onClick={onConfirm} disabled={isDeleting}>

View File

@@ -98,7 +98,7 @@ export function EditKnowledgeBaseModal({
return (
<Modal open={open} onOpenChange={onOpenChange}>
<ModalContent size='sm'>
<ModalContent>
<ModalHeader>Edit Knowledge Base</ModalHeader>
<form onSubmit={handleSubmit(onSubmit)} className='flex min-h-0 flex-1 flex-col'>
@@ -118,7 +118,7 @@ export function EditKnowledgeBaseModal({
data-form-type='other'
/>
{errors.name && (
<p className='text-[12px] text-[var(--text-error)]'>{errors.name.message}</p>
<p className='text-[11px] text-[var(--text-error)]'>{errors.name.message}</p>
)}
</div>
@@ -132,7 +132,7 @@ export function EditKnowledgeBaseModal({
className={cn(errors.description && 'border-[var(--text-error)]')}
/>
{errors.description && (
<p className='text-[12px] text-[var(--text-error)]'>
<p className='text-[11px] text-[var(--text-error)]'>
{errors.description.message}
</p>
)}
@@ -143,7 +143,7 @@ export function EditKnowledgeBaseModal({
<ModalFooter>
<div className='flex w-full items-center justify-between gap-[12px]'>
{error ? (
<p className='min-w-0 flex-1 truncate text-[12px] text-[var(--text-error)] leading-tight'>
<p className='min-w-0 flex-1 truncate text-[11px] text-[var(--text-error)] leading-tight'>
{error}
</p>
) : (

View File

@@ -112,7 +112,7 @@ export function SlackChannelSelector({
{selectedChannel.isPrivate ? 'Private' : 'Public'} channel: #{selectedChannel.name}
</p>
)}
{error && <p className='text-[12px] text-[var(--text-error)]'>{error}</p>}
{error && <p className='text-[11px] text-[var(--text-error)]'>{error}</p>}
</div>
)
}

View File

@@ -1,10 +1,9 @@
'use client'
import { useMemo } from 'react'
import { useEffect, useMemo, useState } from 'react'
import { X } from 'lucide-react'
import { Badge, Combobox, type ComboboxOption } from '@/components/emcn'
import { Skeleton } from '@/components/ui'
import { useWorkflows } from '@/hooks/queries/workflows'
interface WorkflowSelectorProps {
workspaceId: string
@@ -26,9 +25,26 @@ export function WorkflowSelector({
onChange,
error,
}: WorkflowSelectorProps) {
const { data: workflows = [], isPending: isLoading } = useWorkflows(workspaceId, {
syncRegistry: false,
})
const [workflows, setWorkflows] = useState<Array<{ id: string; name: string }>>([])
const [isLoading, setIsLoading] = useState(true)
useEffect(() => {
const load = async () => {
try {
setIsLoading(true)
const response = await fetch(`/api/workflows?workspaceId=${workspaceId}`)
if (response.ok) {
const data = await response.json()
setWorkflows(data.data || [])
}
} catch {
setWorkflows([])
} finally {
setIsLoading(false)
}
}
load()
}, [workspaceId])
const options: ComboboxOption[] = useMemo(() => {
return workflows.map((w) => ({

View File

@@ -634,7 +634,7 @@ export function NotificationSettings({
}}
/>
{formErrors.webhookUrl && (
<p className='text-[12px] text-[var(--text-error)]'>{formErrors.webhookUrl}</p>
<p className='text-[11px] text-[var(--text-error)]'>{formErrors.webhookUrl}</p>
)}
</div>
<div className='flex flex-col gap-[8px]'>
@@ -660,7 +660,7 @@ export function NotificationSettings({
placeholderWithTags='Add email'
/>
{formErrors.emailRecipients && (
<p className='text-[12px] text-[var(--text-error)]'>{formErrors.emailRecipients}</p>
<p className='text-[11px] text-[var(--text-error)]'>{formErrors.emailRecipients}</p>
)}
</div>
)}
@@ -707,7 +707,7 @@ export function NotificationSettings({
/>
)}
{formErrors.slackAccountId && (
<p className='text-[12px] text-[var(--text-error)]'>
<p className='text-[11px] text-[var(--text-error)]'>
{formErrors.slackAccountId}
</p>
)}
@@ -776,7 +776,7 @@ export function NotificationSettings({
allOptionLabel='All levels'
/>
{formErrors.levelFilter && (
<p className='text-[12px] text-[var(--text-error)]'>{formErrors.levelFilter}</p>
<p className='text-[11px] text-[var(--text-error)]'>{formErrors.levelFilter}</p>
)}
</div>
@@ -822,7 +822,7 @@ export function NotificationSettings({
allOptionLabel='All triggers'
/>
{formErrors.triggerFilter && (
<p className='text-[12px] text-[var(--text-error)]'>{formErrors.triggerFilter}</p>
<p className='text-[11px] text-[var(--text-error)]'>{formErrors.triggerFilter}</p>
)}
</div>
@@ -938,7 +938,7 @@ export function NotificationSettings({
}
/>
{formErrors.consecutiveFailures && (
<p className='text-[12px] text-[var(--text-error)]'>
<p className='text-[11px] text-[var(--text-error)]'>
{formErrors.consecutiveFailures}
</p>
)}
@@ -962,7 +962,7 @@ export function NotificationSettings({
}
/>
{formErrors.failureRatePercent && (
<p className='text-[12px] text-[var(--text-error)]'>
<p className='text-[11px] text-[var(--text-error)]'>
{formErrors.failureRatePercent}
</p>
)}
@@ -982,7 +982,7 @@ export function NotificationSettings({
}
/>
{formErrors.windowHours && (
<p className='text-[12px] text-[var(--text-error)]'>{formErrors.windowHours}</p>
<p className='text-[11px] text-[var(--text-error)]'>{formErrors.windowHours}</p>
)}
</div>
</div>
@@ -1004,7 +1004,7 @@ export function NotificationSettings({
}
/>
{formErrors.durationThresholdMs && (
<p className='text-[12px] text-[var(--text-error)]'>
<p className='text-[11px] text-[var(--text-error)]'>
{formErrors.durationThresholdMs}
</p>
)}
@@ -1028,7 +1028,7 @@ export function NotificationSettings({
}
/>
{formErrors.latencySpikePercent && (
<p className='text-[12px] text-[var(--text-error)]'>
<p className='text-[11px] text-[var(--text-error)]'>
{formErrors.latencySpikePercent}
</p>
)}
@@ -1048,7 +1048,7 @@ export function NotificationSettings({
}
/>
{formErrors.windowHours && (
<p className='text-[12px] text-[var(--text-error)]'>{formErrors.windowHours}</p>
<p className='text-[11px] text-[var(--text-error)]'>{formErrors.windowHours}</p>
)}
</div>
</div>
@@ -1071,7 +1071,7 @@ export function NotificationSettings({
}
/>
{formErrors.costThresholdDollars && (
<p className='text-[12px] text-[var(--text-error)]'>
<p className='text-[11px] text-[var(--text-error)]'>
{formErrors.costThresholdDollars}
</p>
)}
@@ -1094,7 +1094,7 @@ export function NotificationSettings({
}
/>
{formErrors.inactivityHours && (
<p className='text-[12px] text-[var(--text-error)]'>{formErrors.inactivityHours}</p>
<p className='text-[11px] text-[var(--text-error)]'>{formErrors.inactivityHours}</p>
)}
</div>
)}
@@ -1116,7 +1116,7 @@ export function NotificationSettings({
}
/>
{formErrors.errorCountThreshold && (
<p className='text-[12px] text-[var(--text-error)]'>
<p className='text-[11px] text-[var(--text-error)]'>
{formErrors.errorCountThreshold}
</p>
)}
@@ -1136,7 +1136,7 @@ export function NotificationSettings({
}
/>
{formErrors.windowHours && (
<p className='text-[12px] text-[var(--text-error)]'>{formErrors.windowHours}</p>
<p className='text-[11px] text-[var(--text-error)]'>{formErrors.windowHours}</p>
)}
</div>
</div>
@@ -1261,7 +1261,7 @@ export function NotificationSettings({
</Modal>
<Modal open={showDeleteDialog} onOpenChange={setShowDeleteDialog}>
<ModalContent size='sm'>
<ModalContent className='w-[400px]'>
<ModalHeader>Delete Notification</ModalHeader>
<ModalBody>
<p className='text-[12px] text-[var(--text-secondary)]'>

View File

@@ -39,9 +39,9 @@ export default async function TemplatesPage({ params }: TemplatesPageProps) {
redirect(`/workspace/${workspaceId}`)
}
// Determine effective super user (DB flag AND UI mode enabled)
// Determine effective super user (admin/superadmin role AND UI mode enabled)
const currentUser = await db
.select({ isSuperUser: user.isSuperUser })
.select({ role: user.role })
.from(user)
.where(eq(user.id, session.user.id))
.limit(1)
@@ -51,7 +51,7 @@ export default async function TemplatesPage({ params }: TemplatesPageProps) {
.where(eq(settings.userId, session.user.id))
.limit(1)
const isSuperUser = currentUser[0]?.isSuperUser || false
const isSuperUser = currentUser[0]?.role === 'admin' || currentUser[0]?.role === 'superadmin'
const superUserModeEnabled = userSettings[0]?.superUserModeEnabled ?? true
const effectiveSuperUser = isSuperUser && superUserModeEnabled

View File

@@ -8,7 +8,6 @@ import {
PopoverDivider,
PopoverItem,
} from '@/components/emcn'
import { isValidStartBlockType } from '@/lib/workflows/triggers/start-block-types'
/**
* Block information for context menu actions
@@ -74,7 +73,9 @@ export function BlockMenu({
const allEnabled = selectedBlocks.every((b) => b.enabled)
const allDisabled = selectedBlocks.every((b) => !b.enabled)
const hasStarterBlock = selectedBlocks.some((b) => isValidStartBlockType(b.type))
const hasStarterBlock = selectedBlocks.some(
(b) => b.type === 'starter' || b.type === 'start_trigger'
)
const allNoteBlocks = selectedBlocks.every((b) => b.type === 'note')
const isSubflow =
isSingleBlock && (selectedBlocks[0]?.type === 'loop' || selectedBlocks[0]?.type === 'parallel')

View File

@@ -94,9 +94,6 @@ interface ProcessedAttachment {
dataUrl: string
}
/** Timeout for FileReader operations in milliseconds */
const FILE_READ_TIMEOUT_MS = 60000
/**
* Reads files and converts them to data URLs for image display
* @param chatFiles - Array of chat files to process
@@ -110,37 +107,8 @@ const processFileAttachments = async (chatFiles: ChatFile[]): Promise<ProcessedA
try {
dataUrl = await new Promise<string>((resolve, reject) => {
const reader = new FileReader()
let settled = false
const timeoutId = setTimeout(() => {
if (!settled) {
settled = true
reader.abort()
reject(new Error(`File read timed out after ${FILE_READ_TIMEOUT_MS}ms`))
}
}, FILE_READ_TIMEOUT_MS)
reader.onload = () => {
if (!settled) {
settled = true
clearTimeout(timeoutId)
resolve(reader.result as string)
}
}
reader.onerror = () => {
if (!settled) {
settled = true
clearTimeout(timeoutId)
reject(reader.error)
}
}
reader.onabort = () => {
if (!settled) {
settled = true
clearTimeout(timeoutId)
reject(new Error('File read aborted'))
}
}
reader.onload = () => resolve(reader.result as string)
reader.onerror = reject
reader.readAsDataURL(file.file)
})
} catch (error) {
@@ -234,6 +202,7 @@ export function Chat() {
const triggerWorkflowUpdate = useWorkflowStore((state) => state.triggerUpdate)
const setSubBlockValue = useSubBlockStore((state) => state.setValue)
// Chat state (UI and messages from unified store)
const {
isChatOpen,
chatPosition,
@@ -261,16 +230,19 @@ export function Chat() {
const { data: session } = useSession()
const { addToQueue } = useOperationQueue()
// Local state
const [chatMessage, setChatMessage] = useState('')
const [promptHistory, setPromptHistory] = useState<string[]>([])
const [historyIndex, setHistoryIndex] = useState(-1)
const [moreMenuOpen, setMoreMenuOpen] = useState(false)
// Refs
const inputRef = useRef<HTMLInputElement>(null)
const timeoutRef = useRef<NodeJS.Timeout | null>(null)
const streamReaderRef = useRef<ReadableStreamDefaultReader<Uint8Array> | null>(null)
const preventZoomRef = usePreventZoom()
// File upload hook
const {
chatFiles,
uploadErrors,
@@ -285,38 +257,6 @@ export function Chat() {
handleDrop,
} = useChatFileUpload()
const filePreviewUrls = useRef<Map<string, string>>(new Map())
const getFilePreviewUrl = useCallback((file: ChatFile): string | null => {
if (!file.type.startsWith('image/')) return null
const existing = filePreviewUrls.current.get(file.id)
if (existing) return existing
const url = URL.createObjectURL(file.file)
filePreviewUrls.current.set(file.id, url)
return url
}, [])
useEffect(() => {
const currentFileIds = new Set(chatFiles.map((f) => f.id))
const urlMap = filePreviewUrls.current
for (const [fileId, url] of urlMap.entries()) {
if (!currentFileIds.has(fileId)) {
URL.revokeObjectURL(url)
urlMap.delete(fileId)
}
}
return () => {
for (const url of urlMap.values()) {
URL.revokeObjectURL(url)
}
urlMap.clear()
}
}, [chatFiles])
/**
* Resolves the unified start block for chat execution, if available.
*/
@@ -382,11 +322,13 @@ export function Chat() {
const shouldShowConfigureStartInputsButton =
Boolean(startBlockId) && missingStartReservedFields.length > 0
// Get actual position (default if not set)
const actualPosition = useMemo(
() => getChatPosition(chatPosition, chatWidth, chatHeight),
[chatPosition, chatWidth, chatHeight]
)
// Drag hook
const { handleMouseDown } = useFloatDrag({
position: actualPosition,
width: chatWidth,
@@ -394,6 +336,7 @@ export function Chat() {
onPositionChange: setChatPosition,
})
// Boundary sync hook - keeps chat within bounds when layout changes
useFloatBoundarySync({
isOpen: isChatOpen,
position: actualPosition,
@@ -402,6 +345,7 @@ export function Chat() {
onPositionChange: setChatPosition,
})
// Resize hook - enables resizing from all edges and corners
const {
cursor: resizeCursor,
handleMouseMove: handleResizeMouseMove,
@@ -415,11 +359,13 @@ export function Chat() {
onDimensionsChange: setChatDimensions,
})
// Get output entries from console
const outputEntries = useMemo(() => {
if (!activeWorkflowId) return []
return entries.filter((entry) => entry.workflowId === activeWorkflowId && entry.output)
}, [entries, activeWorkflowId])
// Get filtered messages for current workflow
const workflowMessages = useMemo(() => {
if (!activeWorkflowId) return []
return messages
@@ -427,11 +373,14 @@ export function Chat() {
.sort((a, b) => new Date(a.timestamp).getTime() - new Date(b.timestamp).getTime())
}, [messages, activeWorkflowId])
// Check if any message is currently streaming
const isStreaming = useMemo(() => {
// Match copilot semantics: only treat as streaming if the LAST message is streaming
const lastMessage = workflowMessages[workflowMessages.length - 1]
return Boolean(lastMessage?.isStreaming)
}, [workflowMessages])
// Map chat messages to copilot message format (type -> role) for scroll hook
const messagesForScrollHook = useMemo(() => {
return workflowMessages.map((msg) => ({
...msg,
@@ -439,6 +388,8 @@ export function Chat() {
}))
}, [workflowMessages])
// Scroll management hook - reuse copilot's implementation
// Use immediate scroll behavior to keep the view pinned to the bottom during streaming
const { scrollAreaRef, scrollToBottom } = useScrollManagement(
messagesForScrollHook,
isStreaming,
@@ -447,6 +398,7 @@ export function Chat() {
}
)
// Memoize user messages for performance
const userMessages = useMemo(() => {
return workflowMessages
.filter((msg) => msg.type === 'user')
@@ -454,6 +406,7 @@ export function Chat() {
.filter((content): content is string => typeof content === 'string')
}, [workflowMessages])
// Update prompt history when workflow changes
useEffect(() => {
if (!activeWorkflowId) {
setPromptHistory([])
@@ -466,7 +419,7 @@ export function Chat() {
}, [activeWorkflowId, userMessages])
/**
* Auto-scroll to bottom when messages load and chat is open
* Auto-scroll to bottom when messages load
*/
useEffect(() => {
if (workflowMessages.length > 0 && isChatOpen) {
@@ -474,6 +427,7 @@ export function Chat() {
}
}, [workflowMessages.length, scrollToBottom, isChatOpen])
// Get selected workflow outputs (deduplicated)
const selectedOutputs = useMemo(() => {
if (!activeWorkflowId) return []
const selected = selectedWorkflowOutputs[activeWorkflowId]
@@ -494,6 +448,7 @@ export function Chat() {
}, delay)
}, [])
// Cleanup on unmount
useEffect(() => {
return () => {
timeoutRef.current && clearTimeout(timeoutRef.current)
@@ -501,6 +456,7 @@ export function Chat() {
}
}, [])
// React to execution cancellation from run button
useEffect(() => {
if (!isExecuting && isStreaming) {
const lastMessage = workflowMessages[workflowMessages.length - 1]
@@ -544,6 +500,7 @@ export function Chat() {
const chunk = decoder.decode(value, { stream: true })
buffer += chunk
// Process only complete SSE messages; keep any partial trailing data in buffer
const separatorIndex = buffer.lastIndexOf('\n\n')
if (separatorIndex === -1) {
continue
@@ -593,6 +550,7 @@ export function Chat() {
}
finalizeMessageStream(responseMessageId)
} finally {
// Only clear ref if it's still our reader (prevents clobbering a new stream)
if (streamReaderRef.current === reader) {
streamReaderRef.current = null
}
@@ -995,7 +953,7 @@ export function Chat() {
<div className='flex items-start gap-2'>
<AlertCircle className='mt-0.5 h-3 w-3 shrink-0 text-[var(--text-error)]' />
<div className='flex-1'>
<div className='mb-1 font-medium text-[12px] text-[var(--text-error)]'>
<div className='mb-1 font-medium text-[11px] text-[var(--text-error)]'>
File upload error
</div>
<div className='space-y-1'>
@@ -1021,7 +979,8 @@ export function Chat() {
{chatFiles.length > 0 && (
<div className='mt-[4px] flex gap-[6px] overflow-x-auto [-ms-overflow-style:none] [scrollbar-width:none] [&::-webkit-scrollbar]:hidden'>
{chatFiles.map((file) => {
const previewUrl = getFilePreviewUrl(file)
const isImage = file.type.startsWith('image/')
const previewUrl = isImage ? URL.createObjectURL(file.file) : null
return (
<div
@@ -1038,6 +997,7 @@ export function Chat() {
src={previewUrl}
alt={file.name}
className='h-full w-full object-cover'
onLoad={() => URL.revokeObjectURL(previewUrl)}
/>
) : (
<div className='min-w-0 flex-1'>

View File

@@ -113,17 +113,16 @@ export function ChatMessage({ message }: ChatMessageProps) {
{message.attachments && message.attachments.length > 0 && (
<div className='mb-2 flex flex-wrap gap-[6px]'>
{message.attachments.map((attachment) => {
const isImage = attachment.type.startsWith('image/')
const hasValidDataUrl =
attachment.dataUrl?.trim() && attachment.dataUrl.startsWith('data:')
// Only treat as displayable image if we have both image type AND valid data URL
const canDisplayAsImage = attachment.type.startsWith('image/') && hasValidDataUrl
return (
<div
key={attachment.id}
className={`group relative flex-shrink-0 overflow-hidden rounded-[6px] bg-[var(--surface-2)] ${
hasValidDataUrl ? 'cursor-pointer' : ''
} ${canDisplayAsImage ? 'h-[40px] w-[40px]' : 'flex min-w-[80px] max-w-[120px] items-center justify-center px-[8px] py-[2px]'}`}
} ${isImage ? 'h-[40px] w-[40px]' : 'flex min-w-[80px] max-w-[120px] items-center justify-center px-[8px] py-[2px]'}`}
onClick={(e) => {
if (hasValidDataUrl) {
e.preventDefault()
@@ -132,7 +131,7 @@ export function ChatMessage({ message }: ChatMessageProps) {
}
}}
>
{canDisplayAsImage ? (
{isImage && hasValidDataUrl ? (
<img
src={attachment.dataUrl}
alt={attachment.name}

View File

@@ -331,16 +331,13 @@ export function OutputSelect({
return (
<Combobox
size='sm'
className='!w-fit !py-[2px] min-w-[100px] rounded-[6px] px-[9px]'
className='!w-fit !py-[2px] [&>svg]:!ml-[4px] [&>svg]:!h-3 [&>svg]:!w-3 [&>span]:!text-[var(--text-secondary)] min-w-[100px] rounded-[6px] bg-transparent px-[9px] hover:bg-[var(--surface-5)] dark:hover:border-[var(--surface-6)] dark:hover:bg-transparent [&>span]:text-center'
groups={comboboxGroups}
options={[]}
multiSelect
multiSelectValues={normalizedSelectedValues}
onMultiSelectChange={onOutputSelect}
placeholder={selectedDisplayText}
overlayContent={
<span className='truncate text-[var(--text-primary)]'>{selectedDisplayText}</span>
}
disabled={disabled || workflowOutputs.length === 0}
align={align}
maxHeight={maxHeight}

View File

@@ -24,11 +24,10 @@ export function useChatFileUpload() {
/**
* Validate and add files
* Uses functional state update to avoid stale closure issues with rapid file additions
*/
const addFiles = useCallback((files: File[]) => {
setChatFiles((currentFiles) => {
const remainingSlots = Math.max(0, MAX_FILES - currentFiles.length)
const addFiles = useCallback(
(files: File[]) => {
const remainingSlots = Math.max(0, MAX_FILES - chatFiles.length)
const candidateFiles = files.slice(0, remainingSlots)
const errors: string[] = []
const validNewFiles: ChatFile[] = []
@@ -40,14 +39,11 @@ export function useChatFileUpload() {
continue
}
// Check for duplicates against current files and newly added valid files
const isDuplicateInCurrent = currentFiles.some(
// Check for duplicates
const isDuplicate = chatFiles.some(
(existingFile) => existingFile.name === file.name && existingFile.size === file.size
)
const isDuplicateInNew = validNewFiles.some(
(newFile) => newFile.name === file.name && newFile.size === file.size
)
if (isDuplicateInCurrent || isDuplicateInNew) {
if (isDuplicate) {
errors.push(`${file.name} already added`)
continue
}
@@ -61,20 +57,20 @@ export function useChatFileUpload() {
})
}
// Update errors outside the state setter to avoid nested state updates
if (errors.length > 0) {
// Use setTimeout to avoid state update during render
setTimeout(() => setUploadErrors(errors), 0)
} else if (validNewFiles.length > 0) {
setTimeout(() => setUploadErrors([]), 0)
setUploadErrors(errors)
}
if (validNewFiles.length > 0) {
return [...currentFiles, ...validNewFiles]
setChatFiles([...chatFiles, ...validNewFiles])
// Clear errors when files are successfully added
if (errors.length === 0) {
setUploadErrors([])
}
}
return currentFiles
})
}, [])
},
[chatFiles]
)
/**
* Remove a file

View File

@@ -2,6 +2,7 @@
import { memo, useMemo } from 'react'
import { useViewport } from 'reactflow'
import { useSession } from '@/lib/auth/auth-client'
import { getUserColor } from '@/lib/workspaces/colors'
import { usePreventZoom } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks'
import { useSocket } from '@/app/workspace/providers/socket-provider'
@@ -19,31 +20,30 @@ interface CursorRenderData {
}
const CursorsComponent = () => {
const { presenceUsers, currentSocketId } = useSocket()
const { presenceUsers } = useSocket()
const viewport = useViewport()
const session = useSession()
const currentUserId = session.data?.user?.id
const preventZoomRef = usePreventZoom()
const cursors = useMemo<CursorRenderData[]>(() => {
return presenceUsers
.filter((user): user is typeof user & { cursor: CursorPoint } => Boolean(user.cursor))
.filter((user) => user.socketId !== currentSocketId)
.filter((user) => user.userId !== currentUserId)
.map((user) => ({
id: user.socketId,
name: user.userName?.trim() || 'Collaborator',
cursor: user.cursor,
color: getUserColor(user.userId),
}))
}, [currentSocketId, presenceUsers])
}, [currentUserId, presenceUsers])
if (!cursors.length) {
return null
}
return (
<div
ref={preventZoomRef}
className='pointer-events-none absolute inset-0 z-[5] select-none overflow-hidden'
>
<div ref={preventZoomRef} className='pointer-events-none absolute inset-0 z-30 select-none'>
{cursors.map(({ id, name, cursor, color }) => {
const x = cursor.x * viewport.zoom + viewport.x
const y = cursor.y * viewport.zoom + viewport.y

View File

@@ -9,6 +9,8 @@ import { useCopilotStore, usePanelStore } from '@/stores/panel'
import { useTerminalStore } from '@/stores/terminal'
import { useWorkflowDiffStore } from '@/stores/workflow-diff'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
import { mergeSubblockState } from '@/stores/workflows/utils'
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
const logger = createLogger('DiffControls')
const NOTIFICATION_WIDTH = 240
@@ -17,22 +19,26 @@ const NOTIFICATION_GAP = 16
export const DiffControls = memo(function DiffControls() {
const isTerminalResizing = useTerminalStore((state) => state.isResizing)
const isPanelResizing = usePanelStore((state) => state.isResizing)
const { isDiffReady, hasActiveDiff, acceptChanges, rejectChanges } = useWorkflowDiffStore(
useCallback(
(state) => ({
isDiffReady: state.isDiffReady,
hasActiveDiff: state.hasActiveDiff,
acceptChanges: state.acceptChanges,
rejectChanges: state.rejectChanges,
}),
[]
const { isDiffReady, hasActiveDiff, acceptChanges, rejectChanges, baselineWorkflow } =
useWorkflowDiffStore(
useCallback(
(state) => ({
isDiffReady: state.isDiffReady,
hasActiveDiff: state.hasActiveDiff,
acceptChanges: state.acceptChanges,
rejectChanges: state.rejectChanges,
baselineWorkflow: state.baselineWorkflow,
}),
[]
)
)
)
const { updatePreviewToolCallState } = useCopilotStore(
const { updatePreviewToolCallState, currentChat, messages } = useCopilotStore(
useCallback(
(state) => ({
updatePreviewToolCallState: state.updatePreviewToolCallState,
currentChat: state.currentChat,
messages: state.messages,
}),
[]
)
@@ -48,6 +54,154 @@ export const DiffControls = memo(function DiffControls() {
return allNotifications.some((n) => !n.workflowId || n.workflowId === activeWorkflowId)
}, [allNotifications, activeWorkflowId])
const createCheckpoint = useCallback(async () => {
if (!activeWorkflowId || !currentChat?.id) {
logger.warn('Cannot create checkpoint: missing workflowId or chatId', {
workflowId: activeWorkflowId,
chatId: currentChat?.id,
})
return false
}
try {
logger.info('Creating checkpoint before accepting changes')
// Use the baseline workflow (state before diff) instead of current state
// This ensures reverting to the checkpoint restores the pre-diff state
const rawState = baselineWorkflow || useWorkflowStore.getState().getWorkflowState()
// The baseline already has merged subblock values, but we'll merge again to be safe
// This ensures all user inputs and subblock data are captured
const blocksWithSubblockValues = mergeSubblockState(rawState.blocks, activeWorkflowId)
// Filter and complete blocks to ensure all required fields are present
// This matches the validation logic from /api/workflows/[id]/state
const filteredBlocks = Object.entries(blocksWithSubblockValues).reduce(
(acc, [blockId, block]) => {
if (block.type && block.name) {
// Ensure all required fields are present
acc[blockId] = {
...block,
id: block.id || blockId, // Ensure id field is set
enabled: block.enabled !== undefined ? block.enabled : true,
horizontalHandles:
block.horizontalHandles !== undefined ? block.horizontalHandles : true,
height: block.height !== undefined ? block.height : 90,
subBlocks: block.subBlocks || {},
outputs: block.outputs || {},
data: block.data || {},
position: block.position || { x: 0, y: 0 }, // Ensure position exists
}
}
return acc
},
{} as typeof rawState.blocks
)
// Clean the workflow state - only include valid fields, exclude null/undefined values
const workflowState = {
blocks: filteredBlocks,
edges: rawState.edges || [],
loops: rawState.loops || {},
parallels: rawState.parallels || {},
lastSaved: rawState.lastSaved || Date.now(),
deploymentStatuses: rawState.deploymentStatuses || {},
}
logger.info('Prepared complete workflow state for checkpoint', {
blocksCount: Object.keys(workflowState.blocks).length,
edgesCount: workflowState.edges.length,
loopsCount: Object.keys(workflowState.loops).length,
parallelsCount: Object.keys(workflowState.parallels).length,
hasRequiredFields: Object.values(workflowState.blocks).every(
(block) => block.id && block.type && block.name && block.position
),
hasSubblockValues: Object.values(workflowState.blocks).some((block) =>
Object.values(block.subBlocks || {}).some(
(subblock) => subblock.value !== null && subblock.value !== undefined
)
),
sampleBlock: Object.values(workflowState.blocks)[0],
})
// Find the most recent user message ID from the current chat
const userMessages = messages.filter((msg) => msg.role === 'user')
const lastUserMessage = userMessages[userMessages.length - 1]
const messageId = lastUserMessage?.id
logger.info('Creating checkpoint with message association', {
totalMessages: messages.length,
userMessageCount: userMessages.length,
lastUserMessageId: messageId,
chatId: currentChat.id,
entireMessageArray: messages,
allMessageIds: messages.map((m) => ({
id: m.id,
role: m.role,
content: m.content.substring(0, 50),
})),
selectedUserMessages: userMessages.map((m) => ({
id: m.id,
content: m.content.substring(0, 100),
})),
allRawMessageIds: messages.map((m) => m.id),
userMessageIds: userMessages.map((m) => m.id),
checkpointData: {
workflowId: activeWorkflowId,
chatId: currentChat.id,
messageId: messageId,
messageFound: !!lastUserMessage,
},
})
const response = await fetch('/api/copilot/checkpoints', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
workflowId: activeWorkflowId,
chatId: currentChat.id,
messageId,
workflowState: JSON.stringify(workflowState),
}),
})
if (!response.ok) {
throw new Error(`Failed to create checkpoint: ${response.statusText}`)
}
const result = await response.json()
const newCheckpoint = result.checkpoint
logger.info('Checkpoint created successfully', {
messageId,
chatId: currentChat.id,
checkpointId: newCheckpoint?.id,
})
// Update the copilot store immediately to show the checkpoint icon
if (newCheckpoint && messageId) {
const { messageCheckpoints: currentCheckpoints } = useCopilotStore.getState()
const existingCheckpoints = currentCheckpoints[messageId] || []
const updatedCheckpoints = {
...currentCheckpoints,
[messageId]: [newCheckpoint, ...existingCheckpoints],
}
useCopilotStore.setState({ messageCheckpoints: updatedCheckpoints })
logger.info('Updated copilot store with new checkpoint', {
messageId,
checkpointId: newCheckpoint.id,
})
}
return true
} catch (error) {
logger.error('Failed to create checkpoint:', error)
return false
}
}, [activeWorkflowId, currentChat, messages, baselineWorkflow])
const handleAccept = useCallback(() => {
logger.info('Accepting proposed changes with backup protection')
@@ -84,8 +238,12 @@ export const DiffControls = memo(function DiffControls() {
})
// Create checkpoint in the background (fire-and-forget) so it doesn't block UI
createCheckpoint().catch((error) => {
logger.warn('Failed to create checkpoint after accept:', error)
})
logger.info('Accept triggered; UI will update optimistically')
}, [updatePreviewToolCallState, acceptChanges])
}, [createCheckpoint, updatePreviewToolCallState, acceptChanges])
const handleReject = useCallback(() => {
logger.info('Rejecting proposed changes (optimistic)')

View File

@@ -4,13 +4,13 @@ import type { NodeProps } from 'reactflow'
import remarkGfm from 'remark-gfm'
import { cn } from '@/lib/core/utils/cn'
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
import { ActionBar } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/action-bar/action-bar'
import { useBlockVisual } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks'
import {
BLOCK_DIMENSIONS,
useBlockDimensions,
} from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-block-dimensions'
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
import { ActionBar } from '../workflow-block/components'
import type { WorkflowBlockProps } from '../workflow-block/types'
interface NoteBlockNodeData extends WorkflowBlockProps {}

View File

@@ -1,5 +1,4 @@
import { memo, useEffect, useRef, useState } from 'react'
import { cn } from '@/lib/core/utils/cn'
import CopilotMarkdownRenderer from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/markdown-renderer'
/**
@@ -7,23 +6,14 @@ import CopilotMarkdownRenderer from '@/app/workspace/[workspaceId]/w/[workflowId
*/
const CHARACTER_DELAY = 3
/**
* Props for the StreamingIndicator component
*/
interface StreamingIndicatorProps {
/** Optional class name for layout adjustments */
className?: string
}
/**
* StreamingIndicator shows animated dots during message streaming
* Used as a standalone indicator when no content has arrived yet
*
* @param props - Component props
* @returns Animated loading indicator
*/
export const StreamingIndicator = memo(({ className }: StreamingIndicatorProps) => (
<div className={cn('flex h-[1.25rem] items-center text-muted-foreground', className)}>
export const StreamingIndicator = memo(() => (
<div className='flex h-[1.25rem] items-center text-muted-foreground'>
<div className='flex space-x-0.5'>
<div className='h-1 w-1 animate-bounce rounded-full bg-muted-foreground [animation-delay:0ms] [animation-duration:1.2s]' />
<div className='h-1 w-1 animate-bounce rounded-full bg-muted-foreground [animation-delay:150ms] [animation-duration:1.2s]' />

View File

@@ -1,20 +1,10 @@
'use client'
import { memo, useEffect, useMemo, useRef, useState } from 'react'
import { memo, useEffect, useRef, useState } from 'react'
import clsx from 'clsx'
import { ChevronUp } from 'lucide-react'
import CopilotMarkdownRenderer from './markdown-renderer'
/**
* Removes thinking tags (raw or escaped) from streamed content.
*/
function stripThinkingTags(text: string): string {
return text
.replace(/<\/?thinking[^>]*>/gi, '')
.replace(/&lt;\/?thinking[^&]*&gt;/gi, '')
.trim()
}
/**
* Max height for thinking content before internal scrolling kicks in
*/
@@ -197,9 +187,6 @@ export function ThinkingBlock({
label = 'Thought',
hasSpecialTags = false,
}: ThinkingBlockProps) {
// Strip thinking tags from content on render to handle persisted messages
const cleanContent = useMemo(() => stripThinkingTags(content || ''), [content])
const [isExpanded, setIsExpanded] = useState(false)
const [duration, setDuration] = useState(0)
const [userHasScrolledAway, setUserHasScrolledAway] = useState(false)
@@ -222,10 +209,10 @@ export function ThinkingBlock({
return
}
if (!userCollapsedRef.current && cleanContent && cleanContent.length > 0) {
if (!userCollapsedRef.current && content && content.trim().length > 0) {
setIsExpanded(true)
}
}, [isStreaming, cleanContent, hasFollowingContent, hasSpecialTags])
}, [isStreaming, content, hasFollowingContent, hasSpecialTags])
// Reset start time when streaming begins
useEffect(() => {
@@ -311,7 +298,7 @@ export function ThinkingBlock({
return `${seconds}s`
}
const hasContent = cleanContent.length > 0
const hasContent = content && content.trim().length > 0
// Thinking is "done" when streaming ends OR when there's following content (like a tool call) OR when special tags appear
const isThinkingDone = !isStreaming || hasFollowingContent || hasSpecialTags
const durationText = `${label} for ${formatDuration(duration)}`
@@ -387,10 +374,7 @@ export function ThinkingBlock({
isExpanded ? 'mt-1.5 max-h-[150px] opacity-100' : 'max-h-0 opacity-0'
)}
>
<SmoothThinkingText
content={cleanContent}
isStreaming={isStreaming && !hasFollowingContent}
/>
<SmoothThinkingText content={content} isStreaming={isStreaming && !hasFollowingContent} />
</div>
</div>
)
@@ -428,7 +412,7 @@ export function ThinkingBlock({
>
{/* Completed thinking text - dimmed with markdown */}
<div className='[&_*]:!text-[var(--text-muted)] [&_*]:!text-[12px] [&_*]:!leading-[1.4] [&_p]:!m-0 [&_p]:!mb-1 [&_h1]:!text-[12px] [&_h1]:!font-semibold [&_h1]:!m-0 [&_h1]:!mb-1 [&_h2]:!text-[12px] [&_h2]:!font-semibold [&_h2]:!m-0 [&_h2]:!mb-1 [&_h3]:!text-[12px] [&_h3]:!font-semibold [&_h3]:!m-0 [&_h3]:!mb-1 [&_code]:!text-[11px] [&_ul]:!pl-5 [&_ul]:!my-1 [&_ol]:!pl-6 [&_ol]:!my-1 [&_li]:!my-0.5 [&_li]:!py-0 font-season text-[12px] text-[var(--text-muted)]'>
<CopilotMarkdownRenderer content={cleanContent} />
<CopilotMarkdownRenderer content={content} />
</div>
</div>
</div>

View File

@@ -1,6 +1,6 @@
'use client'
import { type FC, memo, useCallback, useMemo, useRef, useState } from 'react'
import { type FC, memo, useCallback, useMemo, useState } from 'react'
import { RotateCcw } from 'lucide-react'
import { Button } from '@/components/emcn'
import {
@@ -93,8 +93,6 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
// UI state
const [isHoveringMessage, setIsHoveringMessage] = useState(false)
const cancelEditRef = useRef<(() => void) | null>(null)
// Checkpoint management hook
const {
showRestoreConfirmation,
@@ -114,8 +112,7 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
messages,
messageCheckpoints,
onRevertModeChange,
onEditModeChange,
() => cancelEditRef.current?.()
onEditModeChange
)
// Message editing hook
@@ -145,8 +142,6 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
pendingEditRef,
})
cancelEditRef.current = handleCancelEdit
// Get clean text content with double newline parsing
const cleanTextContent = useMemo(() => {
if (!message.content) return ''
@@ -493,9 +488,8 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
{/* Content blocks in chronological order */}
{memoizedContentBlocks}
{isStreaming && (
<StreamingIndicator className={!hasVisibleContent ? 'mt-1' : undefined} />
)}
{/* Streaming indicator always at bottom during streaming */}
{isStreaming && <StreamingIndicator />}
{message.errorType === 'usage_limit' && (
<div className='flex gap-1.5'>

View File

@@ -22,8 +22,7 @@ export function useCheckpointManagement(
messages: CopilotMessage[],
messageCheckpoints: any[],
onRevertModeChange?: (isReverting: boolean) => void,
onEditModeChange?: (isEditing: boolean) => void,
onCancelEdit?: () => void
onEditModeChange?: (isEditing: boolean) => void
) {
const [showRestoreConfirmation, setShowRestoreConfirmation] = useState(false)
const [showCheckpointDiscardModal, setShowCheckpointDiscardModal] = useState(false)
@@ -58,7 +57,7 @@ export function useCheckpointManagement(
const { messageCheckpoints: currentCheckpoints } = useCopilotStore.getState()
const updatedCheckpoints = {
...currentCheckpoints,
[message.id]: [],
[message.id]: messageCheckpoints.slice(1),
}
useCopilotStore.setState({ messageCheckpoints: updatedCheckpoints })
@@ -94,6 +93,7 @@ export function useCheckpointManagement(
setShowRestoreConfirmation(false)
onRevertModeChange?.(false)
onEditModeChange?.(true)
logger.info('Checkpoint reverted and removed from message', {
messageId: message.id,
@@ -114,6 +114,7 @@ export function useCheckpointManagement(
messages,
currentChat,
onRevertModeChange,
onEditModeChange,
])
/**
@@ -139,7 +140,7 @@ export function useCheckpointManagement(
const { messageCheckpoints: currentCheckpoints } = useCopilotStore.getState()
const updatedCheckpoints = {
...currentCheckpoints,
[message.id]: [],
[message.id]: messageCheckpoints.slice(1),
}
useCopilotStore.setState({ messageCheckpoints: updatedCheckpoints })
@@ -153,8 +154,6 @@ export function useCheckpointManagement(
}
setShowCheckpointDiscardModal(false)
onEditModeChange?.(false)
onCancelEdit?.()
const { sendMessage } = useCopilotStore.getState()
if (pendingEditRef.current) {
@@ -174,7 +173,6 @@ export function useCheckpointManagement(
fileAttachments: fileAttachments || message.fileAttachments,
contexts: contexts || (message as any).contexts,
messageId: message.id,
queueIfBusy: false,
})
}
pendingEditRef.current = null
@@ -182,17 +180,15 @@ export function useCheckpointManagement(
} finally {
setIsProcessingDiscard(false)
}
}, [messageCheckpoints, revertToCheckpoint, message, messages, onEditModeChange, onCancelEdit])
}, [messageCheckpoints, revertToCheckpoint, message, messages])
/**
* Cancels checkpoint discard and clears pending edit
*/
const handleCancelCheckpointDiscard = useCallback(() => {
setShowCheckpointDiscardModal(false)
onEditModeChange?.(false)
onCancelEdit?.()
pendingEditRef.current = null
}, [onEditModeChange, onCancelEdit])
}, [])
/**
* Continues with edit WITHOUT reverting checkpoint
@@ -218,12 +214,11 @@ export function useCheckpointManagement(
fileAttachments: fileAttachments || message.fileAttachments,
contexts: contexts || (message as any).contexts,
messageId: message.id,
queueIfBusy: false,
})
}
pendingEditRef.current = null
}
}, [message, messages, onEditModeChange, onCancelEdit])
}, [message, messages])
/**
* Handles keyboard events for restore confirmation (Escape/Enter)

View File

@@ -166,7 +166,6 @@ export function useMessageEditing(props: UseMessageEditingProps) {
fileAttachments: fileAttachments || message.fileAttachments,
contexts: contexts || (message as any).contexts,
messageId: message.id,
queueIfBusy: false,
})
}
},

View File

@@ -1446,10 +1446,8 @@ function WorkflowEditSummary({ toolCall }: { toolCall: CopilotToolCall }) {
blockType = blockType || op.block_type || ''
}
if (!blockName) blockName = blockType || ''
if (!blockName && !blockType) {
continue
}
// Fallback name to type or ID
if (!blockName) blockName = blockType || blockId
const change: BlockChange = { blockId, blockName, blockType }

View File

@@ -22,9 +22,6 @@ interface UseContextManagementProps {
export function useContextManagement({ message, initialContexts }: UseContextManagementProps) {
const [selectedContexts, setSelectedContexts] = useState<ChatContext[]>(initialContexts ?? [])
const initializedRef = useRef(false)
const escapeRegex = useCallback((value: string) => {
return value.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')
}, [])
// Initialize with initial contexts when they're first provided (for edit mode)
useEffect(() => {
@@ -81,10 +78,10 @@ export function useContextManagement({ message, initialContexts }: UseContextMan
// Check for slash command tokens or mention tokens based on kind
const isSlashCommand = c.kind === 'slash_command'
const prefix = isSlashCommand ? '/' : '@'
const tokenPattern = new RegExp(
`(^|\\s)${escapeRegex(prefix)}${escapeRegex(c.label)}(\\s|$)`
)
return tokenPattern.test(message)
const tokenWithSpaces = ` ${prefix}${c.label} `
const tokenAtStart = `${prefix}${c.label} `
// Token can appear with leading space OR at the start of the message
return message.includes(tokenWithSpaces) || message.startsWith(tokenAtStart)
})
return filtered.length === prev.length ? prev : filtered
})

View File

@@ -76,15 +76,6 @@ export function useMentionTokens({
ranges.push({ start: idx, end: idx + token.length, label })
fromIndex = idx + token.length
}
// Token at end of message without trailing space: "@label" or " /label"
const tokenAtEnd = `${prefix}${label}`
if (message.endsWith(tokenAtEnd)) {
const idx = message.lastIndexOf(tokenAtEnd)
const hasLeadingSpace = idx > 0 && message[idx - 1] === ' '
const start = hasLeadingSpace ? idx - 1 : idx
ranges.push({ start, end: message.length, label })
}
}
ranges.sort((a, b) => a.start - b.start)

View File

@@ -613,7 +613,7 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
const insertTriggerAndOpenMenu = useCallback(
(trigger: '@' | '/') => {
if (disabled) return
if (disabled || isLoading) return
const textarea = mentionMenu.textareaRef.current
if (!textarea) return
@@ -642,7 +642,7 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
}
mentionMenu.setSubmenuActiveIndex(0)
},
[disabled, mentionMenu, message, setMessage]
[disabled, isLoading, mentionMenu, message, setMessage]
)
const handleOpenMentionMenuWithAt = useCallback(
@@ -737,7 +737,7 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
title='Insert @'
className={cn(
'cursor-pointer rounded-[6px] p-[4.5px]',
disabled && 'cursor-not-allowed'
(disabled || isLoading) && 'cursor-not-allowed'
)}
>
<AtSign className='h-3 w-3' strokeWidth={1.75} />
@@ -749,7 +749,7 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
title='Insert /'
className={cn(
'cursor-pointer rounded-[6px] p-[4.5px]',
disabled && 'cursor-not-allowed'
(disabled || isLoading) && 'cursor-not-allowed'
)}
>
<span className='flex h-3 w-3 items-center justify-center font-medium text-[11px] leading-none'>
@@ -816,7 +816,7 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
placeholder={fileAttachments.isDragging ? 'Drop files here...' : effectivePlaceholder}
disabled={disabled}
rows={2}
className='relative z-[2] m-0 box-border h-auto max-h-[120px] min-h-[48px] w-full resize-none overflow-y-auto overflow-x-hidden break-words border-0 bg-transparent px-[2px] py-1 font-medium font-sans text-sm text-transparent leading-[1.25rem] caret-foreground outline-none [-ms-overflow-style:none] [scrollbar-width:none] [text-rendering:auto] placeholder:text-[var(--text-muted)] focus-visible:ring-0 focus-visible:ring-offset-0 dark:placeholder:text-[var(--text-muted)] [&::-webkit-scrollbar]:hidden'
className='relative z-[2] m-0 box-border h-auto min-h-[48px] w-full resize-none overflow-y-auto overflow-x-hidden break-words border-0 bg-transparent px-[2px] py-1 font-medium font-sans text-sm text-transparent leading-[1.25rem] caret-foreground outline-none [-ms-overflow-style:none] [scrollbar-width:none] [text-rendering:auto] placeholder:text-[var(--text-muted)] focus-visible:ring-0 focus-visible:ring-offset-0 dark:placeholder:text-[var(--text-muted)] [&::-webkit-scrollbar]:hidden'
/>
{/* Mention Menu Portal */}

View File

@@ -83,7 +83,8 @@ interface A2aDeployProps {
workflowNeedsRedeployment?: boolean
onSubmittingChange?: (submitting: boolean) => void
onCanSaveChange?: (canSave: boolean) => void
/** Callback for when republish status changes - depends on local form state */
onAgentExistsChange?: (exists: boolean) => void
onPublishedChange?: (published: boolean) => void
onNeedsRepublishChange?: (needsRepublish: boolean) => void
onDeployWorkflow?: () => Promise<void>
}
@@ -98,6 +99,8 @@ export function A2aDeploy({
workflowNeedsRedeployment,
onSubmittingChange,
onCanSaveChange,
onAgentExistsChange,
onPublishedChange,
onNeedsRepublishChange,
onDeployWorkflow,
}: A2aDeployProps) {
@@ -233,6 +236,14 @@ export function A2aDeploy({
}
}, [existingAgent, workflowName, workflowDescription])
useEffect(() => {
onAgentExistsChange?.(!!existingAgent)
}, [existingAgent, onAgentExistsChange])
useEffect(() => {
onPublishedChange?.(existingAgent?.isPublished ?? false)
}, [existingAgent?.isPublished, onPublishedChange])
const hasFormChanges = useMemo(() => {
if (!existingAgent) return false
const savedSchemes = existingAgent.authentication?.schemes || []

View File

@@ -3,12 +3,16 @@
import { useState } from 'react'
import { Check, Clipboard } from 'lucide-react'
import {
Badge,
Button,
ButtonGroup,
ButtonGroupItem,
Code,
Combobox,
Label,
Popover,
PopoverContent,
PopoverItem,
PopoverTrigger,
Tooltip,
} from '@/components/emcn'
import { Skeleton } from '@/components/ui'
@@ -598,19 +602,48 @@ console.log(limits);`
<span>{copied.async ? 'Copied' : 'Copy'}</span>
</Tooltip.Content>
</Tooltip.Root>
<Combobox
size='sm'
className='!w-fit !py-[2px] min-w-[100px] rounded-[6px] px-[9px]'
options={[
{ label: 'Execute Job', value: 'execute' },
{ label: 'Check Status', value: 'status' },
{ label: 'Rate Limits', value: 'rate-limits' },
]}
value={asyncExampleType}
onChange={(value) => setAsyncExampleType(value as AsyncExampleType)}
align='end'
dropdownWidth={160}
/>
<Popover>
<PopoverTrigger asChild>
<div className='min-w-0 max-w-full'>
<Badge
variant='outline'
className='flex-none cursor-pointer whitespace-nowrap rounded-[6px]'
>
<span className='whitespace-nowrap text-[12px]'>
{getAsyncExampleTitle()}
</span>
</Badge>
</div>
</PopoverTrigger>
<PopoverContent
side='bottom'
align='end'
sideOffset={4}
maxHeight={300}
maxWidth={300}
minWidth={160}
border
>
<PopoverItem
active={asyncExampleType === 'execute'}
onClick={() => setAsyncExampleType('execute')}
>
Execute Job
</PopoverItem>
<PopoverItem
active={asyncExampleType === 'status'}
onClick={() => setAsyncExampleType('status')}
>
Check Status
</PopoverItem>
<PopoverItem
active={asyncExampleType === 'rate-limits'}
onClick={() => setAsyncExampleType('rate-limits')}
>
Rate Limits
</PopoverItem>
</PopoverContent>
</Popover>
</div>
</div>
<Code.Viewer

View File

@@ -29,11 +29,9 @@ import { OutputSelect } from '@/app/workspace/[workspaceId]/w/[workflowId]/compo
import {
type AuthType,
type ChatFormData,
useCreateChat,
useDeleteChat,
useUpdateChat,
} from '@/hooks/queries/chats'
import { useIdentifierValidation } from './hooks'
useChatDeployment,
useIdentifierValidation,
} from './hooks'
const logger = createLogger('ChatDeploy')
@@ -47,6 +45,7 @@ interface ChatDeployProps {
existingChat: ExistingChat | null
isLoadingChat: boolean
onRefetchChat: () => Promise<void>
onChatExistsChange?: (exists: boolean) => void
chatSubmitting: boolean
setChatSubmitting: (submitting: boolean) => void
onValidationChange?: (isValid: boolean) => void
@@ -98,6 +97,7 @@ export function ChatDeploy({
existingChat,
isLoadingChat,
onRefetchChat,
onChatExistsChange,
chatSubmitting,
setChatSubmitting,
onValidationChange,
@@ -121,11 +121,8 @@ export function ChatDeploy({
const [formData, setFormData] = useState<ChatFormData>(initialFormData)
const [errors, setErrors] = useState<FormErrors>({})
const { deployChat } = useChatDeployment()
const formRef = useRef<HTMLFormElement>(null)
const createChatMutation = useCreateChat()
const updateChatMutation = useUpdateChat()
const deleteChatMutation = useDeleteChat()
const [isIdentifierValid, setIsIdentifierValid] = useState(false)
const [hasInitializedForm, setHasInitializedForm] = useState(false)
@@ -234,26 +231,15 @@ export function ChatDeploy({
return
}
let chatUrl: string
if (existingChat?.id) {
const result = await updateChatMutation.mutateAsync({
chatId: existingChat.id,
workflowId,
formData,
imageUrl,
})
chatUrl = result.chatUrl
} else {
const result = await createChatMutation.mutateAsync({
workflowId,
formData,
apiKey: deploymentInfo?.apiKey,
imageUrl,
})
chatUrl = result.chatUrl
}
const chatUrl = await deployChat(
workflowId,
formData,
deploymentInfo,
existingChat?.id,
imageUrl
)
onChatExistsChange?.(true)
onDeployed?.()
onVersionActivated?.()
@@ -280,13 +266,18 @@ export function ChatDeploy({
try {
setIsDeleting(true)
await deleteChatMutation.mutateAsync({
chatId: existingChat.id,
workflowId,
const response = await fetch(`/api/chat/manage/${existingChat.id}`, {
method: 'DELETE',
})
if (!response.ok) {
const error = await response.json()
throw new Error(error.error || 'Failed to delete chat')
}
setImageUrl(null)
setHasInitializedForm(false)
onChatExistsChange?.(false)
await onRefetchChat()
onDeploymentComplete?.()
@@ -557,7 +548,7 @@ function IdentifierInput({
)}
</div>
</div>
{error && <p className='mt-[6.5px] text-[12px] text-[var(--text-error)]'>{error}</p>}
{error && <p className='mt-[6.5px] text-[11px] text-[var(--text-error)]'>{error}</p>}
<p className='mt-[6.5px] truncate text-[11px] text-[var(--text-secondary)]'>
{isEditingExisting && value ? (
<>
@@ -777,7 +768,7 @@ function AuthSelector({
disabled={disabled}
/>
{emailError && (
<p className='mt-[6.5px] text-[12px] text-[var(--text-error)]'>{emailError}</p>
<p className='mt-[6.5px] text-[11px] text-[var(--text-error)]'>{emailError}</p>
)}
<p className='mt-[6.5px] text-[11px] text-[var(--text-secondary)]'>
{authType === 'email'
@@ -787,7 +778,7 @@ function AuthSelector({
</div>
)}
{error && <p className='mt-[6.5px] text-[12px] text-[var(--text-error)]'>{error}</p>}
{error && <p className='mt-[6.5px] text-[11px] text-[var(--text-error)]'>{error}</p>}
</div>
)
}

View File

@@ -1 +1,2 @@
export { type AuthType, type ChatFormData, useChatDeployment } from './use-chat-deployment'
export { useIdentifierValidation } from './use-identifier-validation'

View File

@@ -0,0 +1,131 @@
import { useCallback } from 'react'
import { createLogger } from '@sim/logger'
import { z } from 'zod'
import type { OutputConfig } from '@/stores/chat/types'
const logger = createLogger('ChatDeployment')
export type AuthType = 'public' | 'password' | 'email' | 'sso'
export interface ChatFormData {
identifier: string
title: string
description: string
authType: AuthType
password: string
emails: string[]
welcomeMessage: string
selectedOutputBlocks: string[]
}
const chatSchema = z.object({
workflowId: z.string().min(1, 'Workflow ID is required'),
identifier: z
.string()
.min(1, 'Identifier is required')
.regex(/^[a-z0-9-]+$/, 'Identifier can only contain lowercase letters, numbers, and hyphens'),
title: z.string().min(1, 'Title is required'),
description: z.string().optional(),
customizations: z.object({
primaryColor: z.string(),
welcomeMessage: z.string(),
imageUrl: z.string().optional(),
}),
authType: z.enum(['public', 'password', 'email', 'sso']).default('public'),
password: z.string().optional(),
allowedEmails: z.array(z.string()).optional().default([]),
outputConfigs: z
.array(
z.object({
blockId: z.string(),
path: z.string(),
})
)
.optional()
.default([]),
})
/**
* Parses output block selections into structured output configs
*/
function parseOutputConfigs(selectedOutputBlocks: string[]): OutputConfig[] {
return selectedOutputBlocks
.map((outputId) => {
const firstUnderscoreIndex = outputId.indexOf('_')
if (firstUnderscoreIndex !== -1) {
const blockId = outputId.substring(0, firstUnderscoreIndex)
const path = outputId.substring(firstUnderscoreIndex + 1)
if (blockId && path) {
return { blockId, path }
}
}
return null
})
.filter((config): config is OutputConfig => config !== null)
}
/**
* Hook for deploying or updating a chat interface
*/
export function useChatDeployment() {
const deployChat = useCallback(
async (
workflowId: string,
formData: ChatFormData,
deploymentInfo: { apiKey: string } | null,
existingChatId?: string,
imageUrl?: string | null
): Promise<string> => {
const outputConfigs = parseOutputConfigs(formData.selectedOutputBlocks)
const payload = {
workflowId,
identifier: formData.identifier.trim(),
title: formData.title.trim(),
description: formData.description.trim(),
customizations: {
primaryColor: 'var(--brand-primary-hover-hex)',
welcomeMessage: formData.welcomeMessage.trim(),
...(imageUrl && { imageUrl }),
},
authType: formData.authType,
password: formData.authType === 'password' ? formData.password : undefined,
allowedEmails:
formData.authType === 'email' || formData.authType === 'sso' ? formData.emails : [],
outputConfigs,
apiKey: deploymentInfo?.apiKey,
deployApiEnabled: !existingChatId,
}
chatSchema.parse(payload)
const endpoint = existingChatId ? `/api/chat/manage/${existingChatId}` : '/api/chat'
const method = existingChatId ? 'PATCH' : 'POST'
const response = await fetch(endpoint, {
method,
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(payload),
})
const result = await response.json()
if (!response.ok) {
if (result.error === 'Identifier already in use') {
throw new Error('This identifier is already in use')
}
throw new Error(result.error || `Failed to ${existingChatId ? 'update' : 'deploy'} chat`)
}
if (!result.chatUrl) {
throw new Error('Response missing chatUrl')
}
logger.info(`Chat ${existingChatId ? 'updated' : 'deployed'} successfully:`, result.chatUrl)
return result.chatUrl
},
[]
)
return { deployChat }
}

View File

@@ -216,7 +216,7 @@ export function FormBuilder({
)}
</div>
{titleError && (
<p className='mt-[4px] text-[12px] text-[var(--text-error)]'>{titleError}</p>
<p className='mt-[4px] text-[11px] text-[var(--text-error)]'>{titleError}</p>
)}
<div className='mt-[4px] flex items-center gap-[6px]'>
<input

View File

@@ -17,18 +17,11 @@ import { Skeleton } from '@/components/ui'
import { isDev } from '@/lib/core/config/feature-flags'
import { cn } from '@/lib/core/utils/cn'
import { getBaseUrl, getEmailDomain } from '@/lib/core/utils/urls'
import { isValidStartBlockType } from '@/lib/workflows/triggers/start-block-types'
import {
type FieldConfig,
useCreateForm,
useDeleteForm,
useFormByWorkflow,
useUpdateForm,
} from '@/hooks/queries/forms'
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
import { EmbedCodeGenerator } from './components/embed-code-generator'
import { FormBuilder } from './components/form-builder'
import { useFormDeployment } from './hooks/use-form-deployment'
import { useIdentifierValidation } from './hooks/use-identifier-validation'
const logger = createLogger('FormDeploy')
@@ -41,11 +34,38 @@ interface FormErrors {
general?: string
}
interface FieldConfig {
name: string
type: string
label: string
description?: string
required?: boolean
}
export interface ExistingForm {
id: string
identifier: string
title: string
description?: string
customizations: {
primaryColor?: string
thankYouMessage?: string
logoUrl?: string
fieldConfigs?: FieldConfig[]
}
authType: 'public' | 'password' | 'email'
hasPassword?: boolean
allowedEmails?: string[]
showBranding: boolean
isActive: boolean
}
interface FormDeployProps {
workflowId: string
onDeploymentComplete?: () => void
onValidationChange?: (isValid: boolean) => void
onSubmittingChange?: (isSubmitting: boolean) => void
onExistingFormChange?: (exists: boolean) => void
formSubmitting?: boolean
setFormSubmitting?: (submitting: boolean) => void
onDeployed?: () => Promise<void>
@@ -61,6 +81,7 @@ export function FormDeploy({
onDeploymentComplete,
onValidationChange,
onSubmittingChange,
onExistingFormChange,
formSubmitting,
setFormSubmitting,
onDeployed,
@@ -74,6 +95,8 @@ export function FormDeploy({
const [authType, setAuthType] = useState<'public' | 'password' | 'email'>('public')
const [password, setPassword] = useState('')
const [emailItems, setEmailItems] = useState<TagItem[]>([])
const [existingForm, setExistingForm] = useState<ExistingForm | null>(null)
const [isLoading, setIsLoading] = useState(true)
const [formUrl, setFormUrl] = useState('')
const [inputFields, setInputFields] = useState<{ name: string; type: string }[]>([])
const [showPasswordField, setShowPasswordField] = useState(false)
@@ -81,12 +104,7 @@ export function FormDeploy({
const [errors, setErrors] = useState<FormErrors>({})
const [isIdentifierValid, setIsIdentifierValid] = useState(false)
const { data: existingForm, isLoading } = useFormByWorkflow(workflowId)
const createFormMutation = useCreateForm()
const updateFormMutation = useUpdateForm()
const deleteFormMutation = useDeleteForm()
const isSubmitting = createFormMutation.isPending || updateFormMutation.isPending
const { createForm, updateForm, deleteForm, isSubmitting } = useFormDeployment()
const {
isChecking: isCheckingIdentifier,
@@ -106,54 +124,85 @@ export function FormDeploy({
setErrors((prev) => ({ ...prev, [field]: undefined }))
}
// Populate form fields when existing form data is loaded
// Fetch existing form deployment
useEffect(() => {
if (existingForm) {
setIdentifier(existingForm.identifier)
setTitle(existingForm.title)
setDescription(existingForm.description || '')
setThankYouMessage(
existingForm.customizations?.thankYouMessage ||
'Your response has been submitted successfully.'
)
setAuthType(existingForm.authType)
setEmailItems(
(existingForm.allowedEmails || []).map((email) => ({ value: email, isValid: true }))
)
if (existingForm.customizations?.fieldConfigs) {
setFieldConfigs(existingForm.customizations.fieldConfigs)
}
async function fetchExistingForm() {
if (!workflowId) return
const baseUrl = getBaseUrl()
try {
const url = new URL(baseUrl)
let host = url.host
if (host.startsWith('www.')) host = host.substring(4)
setFormUrl(`${url.protocol}//${host}/form/${existingForm.identifier}`)
} catch {
setFormUrl(
isDev
? `http://localhost:3000/form/${existingForm.identifier}`
: `https://sim.ai/form/${existingForm.identifier}`
)
}
} else if (!isLoading) {
const workflowName =
useWorkflowStore.getState().blocks[Object.keys(useWorkflowStore.getState().blocks)[0]]
?.name || 'Form'
setTitle(`${workflowName} Form`)
}
}, [existingForm, isLoading])
setIsLoading(true)
const response = await fetch(`/api/workflows/${workflowId}/form/status`)
if (response.ok) {
const data = await response.json()
if (data.isDeployed && data.form) {
const detailResponse = await fetch(`/api/form/manage/${data.form.id}`)
if (detailResponse.ok) {
const formDetail = await detailResponse.json()
const form = formDetail.form as ExistingForm
setExistingForm(form)
onExistingFormChange?.(true)
setIdentifier(form.identifier)
setTitle(form.title)
setDescription(form.description || '')
setThankYouMessage(
form.customizations?.thankYouMessage ||
'Your response has been submitted successfully.'
)
setAuthType(form.authType)
setEmailItems(
(form.allowedEmails || []).map((email) => ({ value: email, isValid: true }))
)
if (form.customizations?.fieldConfigs) {
setFieldConfigs(form.customizations.fieldConfigs)
}
const baseUrl = getBaseUrl()
try {
const url = new URL(baseUrl)
let host = url.host
if (host.startsWith('www.')) host = host.substring(4)
setFormUrl(`${url.protocol}//${host}/form/${form.identifier}`)
} catch {
setFormUrl(
isDev
? `http://localhost:3000/form/${form.identifier}`
: `https://sim.ai/form/${form.identifier}`
)
}
}
} else {
setExistingForm(null)
onExistingFormChange?.(false)
const workflowName =
useWorkflowStore.getState().blocks[Object.keys(useWorkflowStore.getState().blocks)[0]]
?.name || 'Form'
setTitle(`${workflowName} Form`)
}
}
} catch (err) {
logger.error('Error fetching form deployment:', err)
} finally {
setIsLoading(false)
}
}
fetchExistingForm()
}, [workflowId, onExistingFormChange])
// Get input fields from start block and initialize field configs
useEffect(() => {
const blocks = Object.values(useWorkflowStore.getState().blocks)
const startBlock = blocks.find((b) => isValidStartBlockType(b.type))
const startBlock = blocks.find((b) => b.type === 'starter' || b.type === 'start_trigger')
if (startBlock) {
const inputFormat = useSubBlockStore.getState().getValue(startBlock.id, 'inputFormat')
if (inputFormat && Array.isArray(inputFormat)) {
setInputFields(inputFormat)
// Initialize field configs if not already set
if (fieldConfigs.length === 0) {
setFieldConfigs(
inputFormat.map((f: { name: string; type?: string }) => ({
@@ -173,6 +222,7 @@ export function FormDeploy({
const allowedEmails = emailItems.filter((item) => item.isValid).map((item) => item.value)
// Validate form
useEffect(() => {
const isValid =
inputFields.length > 0 &&
@@ -203,6 +253,7 @@ export function FormDeploy({
e.preventDefault()
setErrors({})
// Validate before submit
if (!isIdentifierValid && identifier !== existingForm?.identifier) {
setError('identifier', 'Please wait for identifier validation to complete')
return
@@ -230,21 +281,17 @@ export function FormDeploy({
try {
if (existingForm) {
await updateFormMutation.mutateAsync({
formId: existingForm.id,
workflowId,
data: {
identifier,
title,
description,
customizations,
authType,
password: password || undefined,
allowedEmails,
},
await updateForm(existingForm.id, {
identifier,
title,
description,
customizations,
authType,
password: password || undefined,
allowedEmails,
})
} else {
const result = await createFormMutation.mutateAsync({
const result = await createForm({
workflowId,
identifier,
title,
@@ -257,6 +304,7 @@ export function FormDeploy({
if (result?.formUrl) {
setFormUrl(result.formUrl)
// Open the form in a new window after successful deployment
window.open(result.formUrl, '_blank', 'noopener,noreferrer')
}
}
@@ -270,6 +318,7 @@ export function FormDeploy({
const message = err instanceof Error ? err.message : 'An error occurred'
logger.error('Error deploying form:', err)
// Parse error message and show inline
if (message.toLowerCase().includes('identifier')) {
setError('identifier', message)
} else if (message.toLowerCase().includes('password')) {
@@ -293,8 +342,8 @@ export function FormDeploy({
password,
allowedEmails,
isIdentifierValid,
createFormMutation,
updateFormMutation,
createForm,
updateForm,
onDeployed,
onDeploymentComplete,
]
@@ -304,10 +353,9 @@ export function FormDeploy({
if (!existingForm) return
try {
await deleteFormMutation.mutateAsync({
formId: existingForm.id,
workflowId,
})
await deleteForm(existingForm.id)
setExistingForm(null)
onExistingFormChange?.(false)
setIdentifier('')
setTitle('')
setDescription('')
@@ -315,7 +363,7 @@ export function FormDeploy({
} catch (err) {
logger.error('Error deleting form:', err)
}
}, [existingForm, deleteFormMutation, workflowId])
}, [existingForm, deleteForm, onExistingFormChange])
if (isLoading) {
return (
@@ -399,7 +447,7 @@ export function FormDeploy({
</div>
</div>
{(identifierError || errors.identifier) && (
<p className='mt-[6.5px] text-[12px] text-[var(--text-error)]'>
<p className='mt-[6.5px] text-[11px] text-[var(--text-error)]'>
{identifierError || errors.identifier}
</p>
)}
@@ -483,7 +531,7 @@ export function FormDeploy({
</button>
</div>
{errors.password && (
<p className='mt-[6.5px] text-[12px] text-[var(--text-error)]'>{errors.password}</p>
<p className='mt-[6.5px] text-[11px] text-[var(--text-error)]'>{errors.password}</p>
)}
<p className='mt-[6.5px] text-[11px] text-[var(--text-secondary)]'>
{existingForm?.hasPassword
@@ -520,7 +568,7 @@ export function FormDeploy({
placeholderWithTags='Add another'
/>
{errors.emails && (
<p className='mt-[6.5px] text-[12px] text-[var(--text-error)]'>{errors.emails}</p>
<p className='mt-[6.5px] text-[11px] text-[var(--text-error)]'>{errors.emails}</p>
)}
<p className='mt-[6.5px] text-[11px] text-[var(--text-secondary)]'>
Add specific emails or entire domains (@example.com)
@@ -551,7 +599,7 @@ export function FormDeploy({
)}
{errors.general && (
<p className='mt-[6.5px] text-[12px] text-[var(--text-error)]'>{errors.general}</p>
<p className='mt-[6.5px] text-[11px] text-[var(--text-error)]'>{errors.general}</p>
)}
<button type='button' data-delete-trigger onClick={handleDelete} className='hidden' />

View File

@@ -0,0 +1,151 @@
import { useCallback, useState } from 'react'
import { createLogger } from '@sim/logger'
const logger = createLogger('useFormDeployment')
interface CreateFormParams {
workflowId: string
identifier: string
title: string
description?: string
customizations?: {
primaryColor?: string
welcomeMessage?: string
thankYouTitle?: string
thankYouMessage?: string
logoUrl?: string
}
authType?: 'public' | 'password' | 'email'
password?: string
allowedEmails?: string[]
showBranding?: boolean
}
interface UpdateFormParams {
identifier?: string
title?: string
description?: string
customizations?: {
primaryColor?: string
welcomeMessage?: string
thankYouTitle?: string
thankYouMessage?: string
logoUrl?: string
}
authType?: 'public' | 'password' | 'email'
password?: string
allowedEmails?: string[]
showBranding?: boolean
isActive?: boolean
}
interface CreateFormResult {
id: string
formUrl: string
}
export function useFormDeployment() {
const [isSubmitting, setIsSubmitting] = useState(false)
const [error, setError] = useState<string | null>(null)
const createForm = useCallback(
async (params: CreateFormParams): Promise<CreateFormResult | null> => {
setIsSubmitting(true)
setError(null)
try {
const response = await fetch('/api/form', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify(params),
})
const data = await response.json()
if (!response.ok) {
throw new Error(data.error || 'Failed to create form')
}
logger.info('Form created successfully:', { id: data.id })
return {
id: data.id,
formUrl: data.formUrl,
}
} catch (err: any) {
const errorMessage = err.message || 'Failed to create form'
setError(errorMessage)
logger.error('Error creating form:', err)
throw err
} finally {
setIsSubmitting(false)
}
},
[]
)
const updateForm = useCallback(async (formId: string, params: UpdateFormParams) => {
setIsSubmitting(true)
setError(null)
try {
const response = await fetch(`/api/form/manage/${formId}`, {
method: 'PATCH',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify(params),
})
const data = await response.json()
if (!response.ok) {
throw new Error(data.error || 'Failed to update form')
}
logger.info('Form updated successfully:', { id: formId })
} catch (err: any) {
const errorMessage = err.message || 'Failed to update form'
setError(errorMessage)
logger.error('Error updating form:', err)
throw err
} finally {
setIsSubmitting(false)
}
}, [])
const deleteForm = useCallback(async (formId: string) => {
setIsSubmitting(true)
setError(null)
try {
const response = await fetch(`/api/form/manage/${formId}`, {
method: 'DELETE',
})
const data = await response.json()
if (!response.ok) {
throw new Error(data.error || 'Failed to delete form')
}
logger.info('Form deleted successfully:', { id: formId })
} catch (err: any) {
const errorMessage = err.message || 'Failed to delete form'
setError(errorMessage)
logger.error('Error deleting form:', err)
throw err
} finally {
setIsSubmitting(false)
}
}, [])
return {
createForm,
updateForm,
deleteForm,
isSubmitting,
error,
}
}

View File

@@ -334,6 +334,7 @@ export function GeneralDeploy({
}}
onPaneClick={() => setExpandedSelectedBlockId(null)}
selectedBlockId={expandedSelectedBlockId}
lightweight
/>
</div>
{expandedSelectedBlockId && workflowToShow.blocks?.[expandedSelectedBlockId] && (

View File

@@ -15,7 +15,7 @@ import {
import { Skeleton } from '@/components/ui'
import { generateToolInputSchema, sanitizeToolName } from '@/lib/mcp/workflow-tool-schema'
import { normalizeInputFormatValue } from '@/lib/workflows/input-format'
import { isValidStartBlockType } from '@/lib/workflows/triggers/start-block-types'
import { isValidStartBlockType } from '@/lib/workflows/triggers/trigger-utils'
import type { InputFormatField } from '@/lib/workflows/types'
import {
useAddWorkflowMcpTool,
@@ -43,6 +43,7 @@ interface McpDeployProps {
onAddedToServer?: () => void
onSubmittingChange?: (submitting: boolean) => void
onCanSaveChange?: (canSave: boolean) => void
onHasServersChange?: (hasServers: boolean) => void
}
/**
@@ -91,12 +92,17 @@ export function McpDeploy({
onAddedToServer,
onSubmittingChange,
onCanSaveChange,
onHasServersChange,
}: McpDeployProps) {
const params = useParams()
const workspaceId = params.workspaceId as string
const openSettingsModal = useSettingsModalStore((state) => state.openModal)
const { data: servers = [], isLoading: isLoadingServers } = useWorkflowMcpServers(workspaceId)
const {
data: servers = [],
isLoading: isLoadingServers,
refetch: refetchServers,
} = useWorkflowMcpServers(workspaceId)
const addToolMutation = useAddWorkflowMcpTool()
const deleteToolMutation = useDeleteWorkflowMcpTool()
const updateToolMutation = useUpdateWorkflowMcpTool()
@@ -255,6 +261,10 @@ export function McpDeploy({
onCanSaveChange?.(hasChanges && hasDeployedTools && !!toolName.trim())
}, [hasChanges, hasDeployedTools, toolName, onCanSaveChange])
useEffect(() => {
onHasServersChange?.(servers.length > 0)
}, [servers.length, onHasServersChange])
/**
* Save tool configuration to all deployed servers
*/
@@ -336,6 +346,7 @@ export function McpDeploy({
toolDescription: toolDescription.trim() || undefined,
parameterSchema,
})
refetchServers()
onAddedToServer?.()
logger.info(`Added workflow ${workflowId} as tool to server ${serverId}`)
} catch (error) {
@@ -364,6 +375,7 @@ export function McpDeploy({
delete next[serverId]
return next
})
refetchServers()
} catch (error) {
logger.error('Failed to remove tool:', error)
} finally {
@@ -386,6 +398,7 @@ export function McpDeploy({
parameterSchema,
addToolMutation,
deleteToolMutation,
refetchServers,
onAddedToServer,
]
)

View File

@@ -20,7 +20,6 @@ import { useSession } from '@/lib/auth/auth-client'
import { cn } from '@/lib/core/utils/cn'
import { captureAndUploadOGImage, OG_IMAGE_HEIGHT, OG_IMAGE_WIDTH } from '@/lib/og'
import { WorkflowPreview } from '@/app/workspace/[workspaceId]/w/components/preview'
import { useCreatorProfiles } from '@/hooks/queries/creator-profile'
import {
useCreateTemplate,
useDeleteTemplate,
@@ -48,11 +47,26 @@ const initialFormData: TemplateFormData = {
tags: [],
}
interface CreatorOption {
id: string
name: string
referenceType: 'user' | 'organization'
referenceId: string
}
interface TemplateStatus {
status: 'pending' | 'approved' | 'rejected' | null
views?: number
stars?: number
}
interface TemplateDeployProps {
workflowId: string
onDeploymentComplete?: () => void
onValidationChange?: (isValid: boolean) => void
onSubmittingChange?: (isSubmitting: boolean) => void
onExistingTemplateChange?: (exists: boolean) => void
onTemplateStatusChange?: (status: TemplateStatus | null) => void
}
export function TemplateDeploy({
@@ -60,9 +74,13 @@ export function TemplateDeploy({
onDeploymentComplete,
onValidationChange,
onSubmittingChange,
onExistingTemplateChange,
onTemplateStatusChange,
}: TemplateDeployProps) {
const { data: session } = useSession()
const [showDeleteDialog, setShowDeleteDialog] = useState(false)
const [creatorOptions, setCreatorOptions] = useState<CreatorOption[]>([])
const [loadingCreators, setLoadingCreators] = useState(false)
const [isCapturing, setIsCapturing] = useState(false)
const previewContainerRef = useRef<HTMLDivElement>(null)
const ogCaptureRef = useRef<HTMLDivElement>(null)
@@ -70,7 +88,6 @@ export function TemplateDeploy({
const [formData, setFormData] = useState<TemplateFormData>(initialFormData)
const { data: existingTemplate, isLoading: isLoadingTemplate } = useTemplateByWorkflow(workflowId)
const { data: creatorProfiles = [], isLoading: loadingCreators } = useCreatorProfiles()
const createMutation = useCreateTemplate()
const updateMutation = useUpdateTemplate()
const deleteMutation = useDeleteTemplate()
@@ -95,15 +112,63 @@ export function TemplateDeploy({
}, [isSubmitting, onSubmittingChange])
useEffect(() => {
if (creatorProfiles.length === 1 && !formData.creatorId) {
updateField('creatorId', creatorProfiles[0].id)
logger.info('Auto-selected single creator profile:', creatorProfiles[0].name)
}
}, [creatorProfiles, formData.creatorId])
onExistingTemplateChange?.(!!existingTemplate)
}, [existingTemplate, onExistingTemplateChange])
useEffect(() => {
const handleCreatorProfileSaved = () => {
logger.info('Creator profile saved, reopening deploy modal...')
if (existingTemplate) {
onTemplateStatusChange?.({
status: existingTemplate.status as 'pending' | 'approved' | 'rejected',
views: existingTemplate.views,
stars: existingTemplate.stars,
})
} else {
onTemplateStatusChange?.(null)
}
}, [existingTemplate, onTemplateStatusChange])
const fetchCreatorOptions = async () => {
if (!session?.user?.id) return
setLoadingCreators(true)
try {
const response = await fetch('/api/creators')
if (response.ok) {
const data = await response.json()
const profiles = (data.profiles || []).map((profile: any) => ({
id: profile.id,
name: profile.name,
referenceType: profile.referenceType,
referenceId: profile.referenceId,
}))
setCreatorOptions(profiles)
return profiles
}
} catch (error) {
logger.error('Error fetching creator profiles:', error)
} finally {
setLoadingCreators(false)
}
return []
}
useEffect(() => {
fetchCreatorOptions()
}, [session?.user?.id])
useEffect(() => {
if (creatorOptions.length === 1 && !formData.creatorId) {
updateField('creatorId', creatorOptions[0].id)
logger.info('Auto-selected single creator profile:', creatorOptions[0].name)
}
}, [creatorOptions, formData.creatorId])
useEffect(() => {
const handleCreatorProfileSaved = async () => {
logger.info('Creator profile saved, refreshing profiles...')
await fetchCreatorOptions()
window.dispatchEvent(new CustomEvent('close-settings'))
setTimeout(() => {
window.dispatchEvent(new CustomEvent('open-deploy-modal', { detail: { tab: 'template' } }))
@@ -292,7 +357,7 @@ export function TemplateDeploy({
<Label className='mb-[6.5px] block pl-[2px] font-medium text-[13px] text-[var(--text-primary)]'>
Creator <span className='text-[var(--text-error)]'>*</span>
</Label>
{creatorProfiles.length === 0 && !loadingCreators ? (
{creatorOptions.length === 0 && !loadingCreators ? (
<div className='space-y-[8px]'>
<p className='text-[12px] text-[var(--text-tertiary)]'>
A creator profile is required to publish templates.
@@ -320,9 +385,9 @@ export function TemplateDeploy({
</div>
) : (
<Combobox
options={creatorProfiles.map((profile) => ({
label: profile.name,
value: profile.id,
options={creatorOptions.map((option) => ({
label: option.name,
value: option.id,
}))}
value={formData.creatorId}
selectedValue={formData.creatorId}

View File

@@ -1,8 +1,7 @@
'use client'
import { useCallback, useEffect, useMemo, useState } from 'react'
import { useCallback, useEffect, useState } from 'react'
import { createLogger } from '@sim/logger'
import { useQueryClient } from '@tanstack/react-query'
import {
Badge,
Button,
@@ -18,22 +17,11 @@ import {
} from '@/components/emcn'
import { getBaseUrl } from '@/lib/core/utils/urls'
import { getInputFormatExample as getInputFormatExampleUtil } from '@/lib/workflows/operations/deployment-utils'
import type { WorkflowDeploymentVersionResponse } from '@/lib/workflows/persistence/utils'
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
import { CreateApiKeyModal } from '@/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/api-keys/components'
import { startsWithUuid } from '@/executor/constants'
import { useA2AAgentByWorkflow } from '@/hooks/queries/a2a/agents'
import { useApiKeys } from '@/hooks/queries/api-keys'
import {
deploymentKeys,
useActivateDeploymentVersion,
useChatDeploymentInfo,
useDeploymentInfo,
useDeploymentVersions,
useDeployWorkflow,
useUndeployWorkflow,
} from '@/hooks/queries/deployments'
import { useTemplateByWorkflow } from '@/hooks/queries/templates'
import { useWorkflowMcpServers } from '@/hooks/queries/workflow-mcp-servers'
import { useWorkspaceSettings } from '@/hooks/queries/workspace'
import { usePermissionConfig } from '@/hooks/use-permission-config'
import { useSettingsModalStore } from '@/stores/modals/settings/store'
@@ -60,7 +48,7 @@ interface DeployModalProps {
refetchDeployedState: () => Promise<void>
}
interface WorkflowDeploymentInfoUI {
interface WorkflowDeploymentInfo {
isDeployed: boolean
deployedAt?: string
apiKey: string
@@ -81,12 +69,16 @@ export function DeployModal({
isLoadingDeployedState,
refetchDeployedState,
}: DeployModalProps) {
const queryClient = useQueryClient()
const openSettingsModal = useSettingsModalStore((state) => state.openModal)
const deploymentStatus = useWorkflowRegistry((state) =>
state.getWorkflowDeploymentStatus(workflowId)
)
const isDeployed = deploymentStatus?.isDeployed ?? isDeployedProp
const setDeploymentStatus = useWorkflowRegistry((state) => state.setDeploymentStatus)
const [isSubmitting, setIsSubmitting] = useState(false)
const [isUndeploying, setIsUndeploying] = useState(false)
const [deploymentInfo, setDeploymentInfo] = useState<WorkflowDeploymentInfo | null>(null)
const [isLoading, setIsLoading] = useState(false)
const workflowMetadata = useWorkflowRegistry((state) =>
workflowId ? state.workflows[workflowId] : undefined
)
@@ -94,18 +86,33 @@ export function DeployModal({
const [activeTab, setActiveTab] = useState<TabView>('general')
const [chatSubmitting, setChatSubmitting] = useState(false)
const [apiDeployError, setApiDeployError] = useState<string | null>(null)
const [chatExists, setChatExists] = useState(false)
const [isChatFormValid, setIsChatFormValid] = useState(false)
const [selectedStreamingOutputs, setSelectedStreamingOutputs] = useState<string[]>([])
const [versions, setVersions] = useState<WorkflowDeploymentVersionResponse[]>([])
const [versionsLoading, setVersionsLoading] = useState(false)
const [showUndeployConfirm, setShowUndeployConfirm] = useState(false)
const [templateFormValid, setTemplateFormValid] = useState(false)
const [templateSubmitting, setTemplateSubmitting] = useState(false)
const [mcpToolSubmitting, setMcpToolSubmitting] = useState(false)
const [mcpToolCanSave, setMcpToolCanSave] = useState(false)
const [hasMcpServers, setHasMcpServers] = useState(false)
const [a2aSubmitting, setA2aSubmitting] = useState(false)
const [a2aCanSave, setA2aCanSave] = useState(false)
const [hasA2aAgent, setHasA2aAgent] = useState(false)
const [isA2aPublished, setIsA2aPublished] = useState(false)
const [a2aNeedsRepublish, setA2aNeedsRepublish] = useState(false)
const [showA2aDeleteConfirm, setShowA2aDeleteConfirm] = useState(false)
const [hasExistingTemplate, setHasExistingTemplate] = useState(false)
const [templateStatus, setTemplateStatus] = useState<{
status: 'pending' | 'approved' | 'rejected' | null
views?: number
stars?: number
} | null>(null)
const [existingChat, setExistingChat] = useState<ExistingChat | null>(null)
const [isLoadingChat, setIsLoadingChat] = useState(false)
const [chatSuccess, setChatSuccess] = useState(false)
@@ -126,107 +133,193 @@ export function DeployModal({
const createButtonDisabled =
isApiKeysLoading || (!allowPersonalApiKeys && !canManageWorkspaceKeys)
const {
data: deploymentInfoData,
isLoading: isLoadingDeploymentInfo,
refetch: refetchDeploymentInfo,
} = useDeploymentInfo(workflowId, { enabled: open && isDeployed })
const {
data: versionsData,
isLoading: versionsLoading,
refetch: refetchVersions,
} = useDeploymentVersions(workflowId, { enabled: open })
const {
isLoading: isLoadingChat,
chatExists,
existingChat,
refetch: refetchChatInfo,
} = useChatDeploymentInfo(workflowId, { enabled: open })
const { data: mcpServers = [] } = useWorkflowMcpServers(workflowWorkspaceId || '')
const hasMcpServers = mcpServers.length > 0
const { data: existingA2aAgent } = useA2AAgentByWorkflow(
workflowWorkspaceId || '',
workflowId || ''
)
const hasA2aAgent = !!existingA2aAgent
const isA2aPublished = existingA2aAgent?.isPublished ?? false
const { data: existingTemplate } = useTemplateByWorkflow(workflowId || '', {
enabled: !!workflowId,
})
const hasExistingTemplate = !!existingTemplate
const templateStatus = existingTemplate
? {
status: existingTemplate.status as 'pending' | 'approved' | 'rejected' | null,
views: existingTemplate.views,
stars: existingTemplate.stars,
}
: null
const deployMutation = useDeployWorkflow()
const undeployMutation = useUndeployWorkflow()
const activateVersionMutation = useActivateDeploymentVersion()
const versions = versionsData?.versions ?? []
const getApiKeyLabel = useCallback(
(value?: string | null) => {
if (value && value.trim().length > 0) {
return value
}
return workflowWorkspaceId ? 'Workspace API keys' : 'Personal API keys'
},
[workflowWorkspaceId]
)
const getApiHeaderPlaceholder = useCallback(
() => (workflowWorkspaceId ? 'YOUR_WORKSPACE_API_KEY' : 'YOUR_PERSONAL_API_KEY'),
[workflowWorkspaceId]
)
const getInputFormatExample = useCallback(
(includeStreaming = false) => {
return getInputFormatExampleUtil(includeStreaming, selectedStreamingOutputs)
},
[selectedStreamingOutputs]
)
const deploymentInfo: WorkflowDeploymentInfoUI | null = useMemo(() => {
if (!deploymentInfoData?.isDeployed || !workflowId) {
return null
const getApiKeyLabel = (value?: string | null) => {
if (value && value.trim().length > 0) {
return value
}
return workflowWorkspaceId ? 'Workspace API keys' : 'Personal API keys'
}
const endpoint = `${getBaseUrl()}/api/workflows/${workflowId}/execute`
const inputFormatExample = getInputFormatExample(selectedStreamingOutputs.length > 0)
const placeholderKey = getApiHeaderPlaceholder()
const getApiHeaderPlaceholder = () =>
workflowWorkspaceId ? 'YOUR_WORKSPACE_API_KEY' : 'YOUR_PERSONAL_API_KEY'
return {
isDeployed: deploymentInfoData.isDeployed,
deployedAt: deploymentInfoData.deployedAt ?? undefined,
apiKey: getApiKeyLabel(deploymentInfoData.apiKey),
endpoint,
exampleCommand: `curl -X POST -H "X-API-Key: ${placeholderKey}" -H "Content-Type: application/json"${inputFormatExample} ${endpoint}`,
needsRedeployment: deploymentInfoData.needsRedeployment,
const getInputFormatExample = (includeStreaming = false) => {
return getInputFormatExampleUtil(includeStreaming, selectedStreamingOutputs)
}
const fetchChatDeploymentInfo = useCallback(async () => {
if (!workflowId) return
try {
setIsLoadingChat(true)
const response = await fetch(`/api/workflows/${workflowId}/chat/status`)
if (response.ok) {
const data = await response.json()
if (data.isDeployed && data.deployment) {
const detailResponse = await fetch(`/api/chat/manage/${data.deployment.id}`)
if (detailResponse.ok) {
const chatDetail = await detailResponse.json()
setExistingChat(chatDetail)
setChatExists(true)
} else {
setExistingChat(null)
setChatExists(false)
}
} else {
setExistingChat(null)
setChatExists(false)
}
} else {
setExistingChat(null)
setChatExists(false)
}
} catch (error) {
logger.error('Error fetching chat deployment info:', { error })
setExistingChat(null)
setChatExists(false)
} finally {
setIsLoadingChat(false)
}
}, [
deploymentInfoData,
workflowId,
selectedStreamingOutputs,
getInputFormatExample,
getApiHeaderPlaceholder,
getApiKeyLabel,
])
}, [workflowId])
useEffect(() => {
if (open && workflowId) {
setActiveTab('general')
setApiDeployError(null)
fetchChatDeploymentInfo()
}
}, [open, workflowId])
}, [open, workflowId, fetchChatDeploymentInfo])
useEffect(() => {
async function fetchDeploymentInfo() {
if (!open || !workflowId || !isDeployed) {
setDeploymentInfo(null)
setIsLoading(false)
return
}
if (deploymentInfo?.isDeployed && !needsRedeployment) {
setIsLoading(false)
return
}
try {
setIsLoading(true)
const response = await fetch(`/api/workflows/${workflowId}/deploy`)
if (!response.ok) {
throw new Error('Failed to fetch deployment information')
}
const data = await response.json()
const endpoint = `${getBaseUrl()}/api/workflows/${workflowId}/execute`
const inputFormatExample = getInputFormatExample(selectedStreamingOutputs.length > 0)
const placeholderKey = workflowWorkspaceId ? 'YOUR_WORKSPACE_API_KEY' : 'YOUR_API_KEY'
setDeploymentInfo({
isDeployed: data.isDeployed,
deployedAt: data.deployedAt,
apiKey: data.apiKey || placeholderKey,
endpoint,
exampleCommand: `curl -X POST -H "X-API-Key: ${placeholderKey}" -H "Content-Type: application/json"${inputFormatExample} ${endpoint}`,
needsRedeployment,
})
} catch (error) {
logger.error('Error fetching deployment info:', { error })
} finally {
setIsLoading(false)
}
}
fetchDeploymentInfo()
}, [open, workflowId, isDeployed, needsRedeployment, deploymentInfo?.isDeployed])
const onDeploy = async () => {
setApiDeployError(null)
try {
setIsSubmitting(true)
const response = await fetch(`/api/workflows/${workflowId}/deploy`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
deployChatEnabled: false,
}),
})
if (!response.ok) {
const errorData = await response.json()
throw new Error(errorData.error || 'Failed to deploy workflow')
}
const responseData = await response.json()
const isDeployedStatus = responseData.isDeployed ?? false
const deployedAtTime = responseData.deployedAt ? new Date(responseData.deployedAt) : undefined
const apiKeyLabel = getApiKeyLabel(responseData.apiKey)
setDeploymentStatus(workflowId, isDeployedStatus, deployedAtTime, apiKeyLabel)
if (workflowId) {
useWorkflowRegistry.getState().setWorkflowNeedsRedeployment(workflowId, false)
}
await refetchDeployedState()
await fetchVersions()
const deploymentInfoResponse = await fetch(`/api/workflows/${workflowId}/deploy`)
if (deploymentInfoResponse.ok) {
const deploymentData = await deploymentInfoResponse.json()
const apiEndpoint = `${getBaseUrl()}/api/workflows/${workflowId}/execute`
const inputFormatExample = getInputFormatExample(selectedStreamingOutputs.length > 0)
const placeholderKey = getApiHeaderPlaceholder()
setDeploymentInfo({
isDeployed: deploymentData.isDeployed,
deployedAt: deploymentData.deployedAt,
apiKey: getApiKeyLabel(deploymentData.apiKey),
endpoint: apiEndpoint,
exampleCommand: `curl -X POST -H "X-API-Key: ${placeholderKey}" -H "Content-Type: application/json"${inputFormatExample} ${apiEndpoint}`,
needsRedeployment: false,
})
}
setApiDeployError(null)
} catch (error: unknown) {
logger.error('Error deploying workflow:', { error })
const errorMessage = error instanceof Error ? error.message : 'Failed to deploy workflow'
setApiDeployError(errorMessage)
} finally {
setIsSubmitting(false)
}
}
const fetchVersions = useCallback(async () => {
if (!workflowId) return
try {
const res = await fetch(`/api/workflows/${workflowId}/deployments`)
if (res.ok) {
const data = await res.json()
setVersions(Array.isArray(data.versions) ? data.versions : [])
} else {
setVersions([])
}
} catch {
setVersions([])
}
}, [workflowId])
useEffect(() => {
if (open && workflowId) {
setVersionsLoading(true)
fetchVersions().finally(() => setVersionsLoading(false))
}
}, [open, workflowId, fetchVersions])
useEffect(() => {
if (!open || selectedStreamingOutputs.length === 0) return
@@ -276,88 +369,181 @@ export function DeployModal({
}
}, [onOpenChange])
const onDeploy = useCallback(async () => {
if (!workflowId) return
setApiDeployError(null)
try {
await deployMutation.mutateAsync({ workflowId, deployChatEnabled: false })
await refetchDeployedState()
} catch (error: unknown) {
logger.error('Error deploying workflow:', { error })
const errorMessage = error instanceof Error ? error.message : 'Failed to deploy workflow'
setApiDeployError(errorMessage)
}
}, [workflowId, deployMutation, refetchDeployedState])
const handlePromoteToLive = useCallback(
async (version: number) => {
if (!workflowId) return
const previousVersions = [...versions]
setVersions((prev) =>
prev.map((v) => ({
...v,
isActive: v.version === version,
}))
)
try {
await activateVersionMutation.mutateAsync({ workflowId, version })
await refetchDeployedState()
const response = await fetch(
`/api/workflows/${workflowId}/deployments/${version}/activate`,
{
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
}
)
if (!response.ok) {
const errorData = await response.json()
throw new Error(errorData.error || 'Failed to promote version')
}
const responseData = await response.json()
const deployedAtTime = responseData.deployedAt
? new Date(responseData.deployedAt)
: undefined
const apiKeyLabel = getApiKeyLabel(responseData.apiKey)
setDeploymentStatus(workflowId, true, deployedAtTime, apiKeyLabel)
refetchDeployedState()
fetchVersions()
const deploymentInfoResponse = await fetch(`/api/workflows/${workflowId}/deploy`)
if (deploymentInfoResponse.ok) {
const deploymentData = await deploymentInfoResponse.json()
const apiEndpoint = `${getBaseUrl()}/api/workflows/${workflowId}/execute`
const inputFormatExample = getInputFormatExample(selectedStreamingOutputs.length > 0)
const placeholderKey = getApiHeaderPlaceholder()
setDeploymentInfo({
isDeployed: deploymentData.isDeployed,
deployedAt: deploymentData.deployedAt,
apiKey: getApiKeyLabel(deploymentData.apiKey),
endpoint: apiEndpoint,
exampleCommand: `curl -X POST -H "X-API-Key: ${placeholderKey}" -H "Content-Type: application/json"${inputFormatExample} ${apiEndpoint}`,
needsRedeployment: false,
})
}
} catch (error) {
logger.error('Error promoting version:', { error })
setVersions(previousVersions)
throw error
}
},
[workflowId, activateVersionMutation, refetchDeployedState]
[workflowId, versions, refetchDeployedState, fetchVersions, selectedStreamingOutputs]
)
const handleUndeploy = useCallback(async () => {
if (!workflowId) return
const handleUndeploy = async () => {
try {
await undeployMutation.mutateAsync({ workflowId })
setIsUndeploying(true)
const response = await fetch(`/api/workflows/${workflowId}/deploy`, {
method: 'DELETE',
})
if (!response.ok) {
const errorData = await response.json()
throw new Error(errorData.error || 'Failed to undeploy workflow')
}
setDeploymentStatus(workflowId, false)
setChatExists(false)
setShowUndeployConfirm(false)
onOpenChange(false)
} catch (error: unknown) {
logger.error('Error undeploying workflow:', { error })
} finally {
setIsUndeploying(false)
}
}, [workflowId, undeployMutation, onOpenChange])
const handleRedeploy = useCallback(async () => {
if (!workflowId) return
setApiDeployError(null)
}
const handleRedeploy = async () => {
try {
await deployMutation.mutateAsync({ workflowId, deployChatEnabled: false })
setIsSubmitting(true)
const response = await fetch(`/api/workflows/${workflowId}/deploy`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
deployChatEnabled: false,
}),
})
if (!response.ok) {
const errorData = await response.json()
throw new Error(errorData.error || 'Failed to redeploy workflow')
}
const { isDeployed: newDeployStatus, deployedAt, apiKey } = await response.json()
setDeploymentStatus(
workflowId,
newDeployStatus,
deployedAt ? new Date(deployedAt) : undefined,
getApiKeyLabel(apiKey)
)
if (workflowId) {
useWorkflowRegistry.getState().setWorkflowNeedsRedeployment(workflowId, false)
}
await refetchDeployedState()
await fetchVersions()
setDeploymentInfo((prev) => (prev ? { ...prev, needsRedeployment: false } : prev))
} catch (error: unknown) {
logger.error('Error redeploying workflow:', { error })
const errorMessage = error instanceof Error ? error.message : 'Failed to redeploy workflow'
setApiDeployError(errorMessage)
} finally {
setIsSubmitting(false)
}
}, [workflowId, deployMutation, refetchDeployedState])
}
const handleCloseModal = useCallback(() => {
const handleCloseModal = () => {
setIsSubmitting(false)
setChatSubmitting(false)
setApiDeployError(null)
onOpenChange(false)
}, [onOpenChange])
const handleChatDeployed = useCallback(async () => {
if (!workflowId) return
queryClient.invalidateQueries({ queryKey: deploymentKeys.info(workflowId) })
queryClient.invalidateQueries({ queryKey: deploymentKeys.versions(workflowId) })
queryClient.invalidateQueries({ queryKey: deploymentKeys.chatStatus(workflowId) })
await refetchDeployedState()
useWorkflowRegistry.getState().setWorkflowNeedsRedeployment(workflowId, false)
}
const handleChatDeployed = async () => {
await handlePostDeploymentUpdate()
setChatSuccess(true)
setTimeout(() => setChatSuccess(false), 2000)
}, [workflowId, queryClient, refetchDeployedState])
}
const handleRefetchChat = useCallback(async () => {
await refetchChatInfo()
}, [refetchChatInfo])
const handlePostDeploymentUpdate = async () => {
if (!workflowId) return
const handleChatFormSubmit = useCallback(() => {
setDeploymentStatus(workflowId, true, new Date(), getApiKeyLabel())
const deploymentInfoResponse = await fetch(`/api/workflows/${workflowId}/deploy`)
if (deploymentInfoResponse.ok) {
const deploymentData = await deploymentInfoResponse.json()
const apiEndpoint = `${getBaseUrl()}/api/workflows/${workflowId}/execute`
const inputFormatExample = getInputFormatExample(selectedStreamingOutputs.length > 0)
const placeholderKey = getApiHeaderPlaceholder()
setDeploymentInfo({
isDeployed: deploymentData.isDeployed,
deployedAt: deploymentData.deployedAt,
apiKey: getApiKeyLabel(deploymentData.apiKey),
endpoint: apiEndpoint,
exampleCommand: `curl -X POST -H "X-API-Key: ${placeholderKey}" -H "Content-Type: application/json"${inputFormatExample} ${apiEndpoint}`,
needsRedeployment: false,
})
}
await refetchDeployedState()
await fetchVersions()
useWorkflowRegistry.getState().setWorkflowNeedsRedeployment(workflowId, false)
}
const handleChatFormSubmit = () => {
const form = document.getElementById('chat-deploy-form') as HTMLFormElement
if (form) {
const updateTrigger = form.querySelector('[data-update-trigger]') as HTMLButtonElement
@@ -367,9 +553,9 @@ export function DeployModal({
form.requestSubmit()
}
}
}, [])
}
const handleChatDelete = useCallback(() => {
const handleChatDelete = () => {
const form = document.getElementById('chat-deploy-form') as HTMLFormElement
if (form) {
const deleteButton = form.querySelector('[data-delete-trigger]') as HTMLButtonElement
@@ -377,7 +563,7 @@ export function DeployModal({
deleteButton.click()
}
}
}, [])
}
const handleTemplateFormSubmit = useCallback(() => {
const form = document.getElementById('template-deploy-form') as HTMLFormElement
@@ -437,13 +623,6 @@ export function DeployModal({
deleteTrigger?.click()
}, [])
const handleFetchVersions = useCallback(async () => {
await refetchVersions()
}, [refetchVersions])
const isSubmitting = deployMutation.isPending
const isUndeploying = undeployMutation.isPending
return (
<>
<Modal open={open} onOpenChange={handleCloseModal}>
@@ -491,7 +670,7 @@ export function DeployModal({
versionsLoading={versionsLoading}
onPromoteToLive={handlePromoteToLive}
onLoadDeploymentComplete={handleCloseModal}
fetchVersions={handleFetchVersions}
fetchVersions={fetchVersions}
/>
</ModalTabsContent>
@@ -499,7 +678,7 @@ export function DeployModal({
<ApiDeploy
workflowId={workflowId}
deploymentInfo={deploymentInfo}
isLoading={isLoadingDeploymentInfo}
isLoading={isLoading}
needsRedeployment={needsRedeployment}
apiDeployError={apiDeployError}
getInputFormatExample={getInputFormatExample}
@@ -512,9 +691,10 @@ export function DeployModal({
<ChatDeploy
workflowId={workflowId || ''}
deploymentInfo={deploymentInfo}
existingChat={existingChat as ExistingChat | null}
existingChat={existingChat}
isLoadingChat={isLoadingChat}
onRefetchChat={handleRefetchChat}
onRefetchChat={fetchChatDeploymentInfo}
onChatExistsChange={setChatExists}
chatSubmitting={chatSubmitting}
setChatSubmitting={setChatSubmitting}
onValidationChange={setIsChatFormValid}
@@ -531,6 +711,8 @@ export function DeployModal({
onDeploymentComplete={handleCloseModal}
onValidationChange={setTemplateFormValid}
onSubmittingChange={setTemplateSubmitting}
onExistingTemplateChange={setHasExistingTemplate}
onTemplateStatusChange={setTemplateStatus}
/>
)}
</ModalTabsContent>
@@ -559,6 +741,7 @@ export function DeployModal({
isDeployed={isDeployed}
onSubmittingChange={setMcpToolSubmitting}
onCanSaveChange={setMcpToolCanSave}
onHasServersChange={setHasMcpServers}
/>
)}
</ModalTabsContent>
@@ -573,6 +756,8 @@ export function DeployModal({
workflowNeedsRedeployment={needsRedeployment}
onSubmittingChange={setA2aSubmitting}
onCanSaveChange={setA2aCanSave}
onAgentExistsChange={setHasA2aAgent}
onPublishedChange={setIsA2aPublished}
onNeedsRepublishChange={setA2aNeedsRepublish}
onDeployWorkflow={onDeploy}
/>
@@ -658,7 +843,7 @@ export function DeployModal({
onClick={handleMcpToolFormSubmit}
disabled={mcpToolSubmitting || !mcpToolCanSave}
>
{mcpToolSubmitting ? 'Saving...' : 'Save Tool'}
{mcpToolSubmitting ? 'Saving...' : 'Save Tool Schema'}
</Button>
</div>
</ModalFooter>

View File

@@ -336,10 +336,6 @@ export function Code({
setCode('')
}
handleStreamChunkRef.current = (chunk: string) => {
setCode((prev) => prev + chunk)
}
handleGeneratedContentRef.current = (generatedCode: string) => {
setCode(generatedCode)
if (!isPreview && !disabled) {
@@ -695,7 +691,11 @@ export function Code({
/>
)}
<CodeEditor.Container onDragOver={(e) => e.preventDefault()} onDrop={handleDrop}>
<CodeEditor.Container
onDragOver={(e) => e.preventDefault()}
onDrop={handleDrop}
isStreaming={isAiStreaming}
>
<div className='absolute top-2 right-3 z-10 flex items-center gap-1 opacity-0 transition-opacity group-hover:opacity-100'>
{wandConfig?.enabled &&
!isAiStreaming &&
@@ -761,11 +761,6 @@ export function Code({
}}
onFocus={() => {
hasEditedSinceFocusRef.current = false
// Show tag dropdown on focus when code is empty
if (!isPreview && !disabled && !readOnly && code.trim() === '') {
setShowTags(true)
setCursorPosition(0)
}
}}
highlight={createHighlightFunction(effectiveLanguage, shouldHighlightReference)}
{...getCodeEditorProps({ isStreaming: isAiStreaming, isPreview, disabled })}

View File

@@ -2,19 +2,16 @@ import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
import { useReactFlow } from 'reactflow'
import { Combobox, type ComboboxOption } from '@/components/emcn/components'
import { cn } from '@/lib/core/utils/cn'
import { buildCanonicalIndex, resolveDependencyValue } from '@/lib/workflows/subblocks/visibility'
import { formatDisplayText } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/formatted-text'
import { SubBlockInputController } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/sub-block-input-controller'
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value'
import { useAccessibleReferencePrefixes } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-accessible-reference-prefixes'
import { getBlock } from '@/blocks/registry'
import type { SubBlockConfig } from '@/blocks/types'
import { getDependsOnFields } from '@/blocks/utils'
import { usePermissionConfig } from '@/hooks/use-permission-config'
import { getProviderFromModel } from '@/providers/utils'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
/**
* Constants for ComboBox component behavior
@@ -94,24 +91,15 @@ export function ComboBox({
// Dependency tracking for fetchOptions
const dependsOnFields = useMemo(() => getDependsOnFields(dependsOn), [dependsOn])
const activeWorkflowId = useWorkflowRegistry((s) => s.activeWorkflowId)
const blockState = useWorkflowStore((state) => state.blocks[blockId])
const blockConfig = blockState?.type ? getBlock(blockState.type) : null
const canonicalIndex = useMemo(
() => buildCanonicalIndex(blockConfig?.subBlocks || []),
[blockConfig?.subBlocks]
)
const canonicalModeOverrides = blockState?.data?.canonicalModes
const dependencyValues = useSubBlockStore(
useCallback(
(state) => {
if (dependsOnFields.length === 0 || !activeWorkflowId) return []
const workflowValues = state.workflowValues[activeWorkflowId] || {}
const blockValues = workflowValues[blockId] || {}
return dependsOnFields.map((depKey) =>
resolveDependencyValue(depKey, blockValues, canonicalIndex, canonicalModeOverrides)
)
return dependsOnFields.map((depKey) => blockValues[depKey] ?? null)
},
[dependsOnFields, activeWorkflowId, blockId, canonicalIndex, canonicalModeOverrides]
[dependsOnFields, activeWorkflowId, blockId]
)
)

View File

@@ -115,7 +115,6 @@ export function ConditionInput({
const accessiblePrefixes = useAccessibleReferencePrefixes(blockId)
const containerRef = useRef<HTMLDivElement>(null)
const inputRefs = useRef<Map<string, HTMLTextAreaElement>>(new Map())
/**
* Determines if a reference string should be highlighted in the editor.
@@ -729,20 +728,6 @@ export function ConditionInput({
})
}, [conditionalBlocks.length])
// Capture textarea refs from Editor components (condition mode)
useEffect(() => {
if (!isRouterMode && containerRef.current) {
conditionalBlocks.forEach((block) => {
const textarea = containerRef.current?.querySelector(
`[data-block-id="${block.id}"] textarea`
) as HTMLTextAreaElement | null
if (textarea) {
inputRefs.current.set(block.id, textarea)
}
})
}
}, [conditionalBlocks, isRouterMode])
// Show loading or empty state if not ready or no blocks
if (!isReady || conditionalBlocks.length === 0) {
return (
@@ -857,9 +842,6 @@ export function ConditionInput({
onDrop={(e) => handleDrop(block.id, e)}
>
<Textarea
ref={(el) => {
if (el) inputRefs.current.set(block.id, el)
}}
data-router-block-id={block.id}
value={block.value}
onChange={(e) => {
@@ -887,15 +869,6 @@ export function ConditionInput({
)
}
}}
onFocus={() => {
if (!isPreview && !disabled && block.value.trim() === '') {
setConditionalBlocks((blocks) =>
blocks.map((b) =>
b.id === block.id ? { ...b, showTags: true, cursorPosition: 0 } : b
)
)
}
}}
onBlur={() => {
setTimeout(() => {
setConditionalBlocks((blocks) =>
@@ -956,11 +929,6 @@ export function ConditionInput({
)
)
}}
inputRef={
{
current: inputRefs.current.get(block.id) || null,
} as React.RefObject<HTMLTextAreaElement>
}
/>
)}
</div>
@@ -1038,15 +1006,6 @@ export function ConditionInput({
)
}
}}
onFocus={() => {
if (!isPreview && !disabled && block.value.trim() === '') {
setConditionalBlocks((blocks) =>
blocks.map((b) =>
b.id === block.id ? { ...b, showTags: true, cursorPosition: 0 } : b
)
)
}
}}
highlight={(codeToHighlight) => {
const placeholders: {
placeholder: string
@@ -1154,11 +1113,6 @@ export function ConditionInput({
)
)
}}
inputRef={
{
current: inputRefs.current.get(block.id) || null,
} as React.RefObject<HTMLTextAreaElement>
}
/>
)}
</div>

View File

@@ -1,6 +1,6 @@
'use client'
import { useMemo, useState } from 'react'
import { useMemo } from 'react'
import { createLogger } from '@sim/logger'
import { Check } from 'lucide-react'
import {
@@ -308,7 +308,6 @@ export function OAuthRequiredModal({
serviceId,
newScopes = [],
}: OAuthRequiredModalProps) {
const [error, setError] = useState<string | null>(null)
const { baseProvider } = parseProvider(provider)
const baseProviderConfig = OAUTH_PROVIDERS[baseProvider]
@@ -349,24 +348,23 @@ export function OAuthRequiredModal({
}, [requiredScopes, newScopesSet])
const handleConnectDirectly = async () => {
setError(null)
try {
const providerId = getProviderIdFromServiceId(serviceId)
onClose()
logger.info('Linking OAuth2:', {
providerId,
requiredScopes,
})
if (providerId === 'trello') {
onClose()
window.location.href = '/api/auth/trello/authorize'
return
}
if (providerId === 'shopify') {
onClose()
// Pass the current URL so we can redirect back after OAuth
const returnUrl = encodeURIComponent(window.location.href)
window.location.href = `/api/auth/shopify/authorize?returnUrl=${returnUrl}`
return
@@ -376,10 +374,8 @@ export function OAuthRequiredModal({
providerId,
callbackURL: window.location.href,
})
onClose()
} catch (err) {
logger.error('Error initiating OAuth flow:', { error: err })
setError('Failed to connect. Please try again.')
} catch (error) {
logger.error('Error initiating OAuth flow:', { error })
}
}
@@ -429,12 +425,10 @@ export function OAuthRequiredModal({
</ul>
</div>
)}
{error && <p className='text-[12px] text-[var(--text-error)]'>{error}</p>}
</div>
</ModalBody>
<ModalFooter>
<Button variant='default' onClick={onClose}>
<Button variant='active' onClick={onClose}>
Cancel
</Button>
<Button variant='tertiary' type='button' onClick={handleConnectDirectly}>

View File

@@ -288,7 +288,6 @@ export function DocumentTagEntry({
onKeyDown={handlers.onKeyDown}
onDrop={handlers.onDrop}
onDragOver={handlers.onDragOver}
onFocus={handlers.onFocus}
onScroll={(e) => syncOverlayScroll(cellKey, e.currentTarget.scrollLeft)}
onPaste={() =>
setTimeout(() => {

View File

@@ -1,15 +1,12 @@
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
import { Badge } from '@/components/emcn'
import { Combobox, type ComboboxOption } from '@/components/emcn/components'
import { buildCanonicalIndex, resolveDependencyValue } from '@/lib/workflows/subblocks/visibility'
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value'
import { getBlock } from '@/blocks/registry'
import type { SubBlockConfig } from '@/blocks/types'
import { getDependsOnFields } from '@/blocks/utils'
import { ResponseBlockHandler } from '@/executor/handlers/response/response-handler'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
/**
* Dropdown option type - can be a simple string or an object with label, id, and optional icon
@@ -92,24 +89,15 @@ export function Dropdown({
const dependsOnFields = useMemo(() => getDependsOnFields(dependsOn), [dependsOn])
const activeWorkflowId = useWorkflowRegistry((s) => s.activeWorkflowId)
const blockState = useWorkflowStore((state) => state.blocks[blockId])
const blockConfig = blockState?.type ? getBlock(blockState.type) : null
const canonicalIndex = useMemo(
() => buildCanonicalIndex(blockConfig?.subBlocks || []),
[blockConfig?.subBlocks]
)
const canonicalModeOverrides = blockState?.data?.canonicalModes
const dependencyValues = useSubBlockStore(
useCallback(
(state) => {
if (dependsOnFields.length === 0 || !activeWorkflowId) return []
const workflowValues = state.workflowValues[activeWorkflowId] || {}
const blockValues = workflowValues[blockId] || {}
return dependsOnFields.map((depKey) =>
resolveDependencyValue(depKey, blockValues, canonicalIndex, canonicalModeOverrides)
)
return dependsOnFields.map((depKey) => blockValues[depKey] ?? null)
},
[dependsOnFields, activeWorkflowId, blockId, canonicalIndex, canonicalModeOverrides]
[dependsOnFields, activeWorkflowId, blockId]
)
)

View File

@@ -214,7 +214,6 @@ export function EvalInput({
onKeyDown={handlers.onKeyDown}
onDrop={handlers.onDrop}
onDragOver={handlers.onDragOver}
onFocus={handlers.onFocus}
placeholder='How accurate is the response?'
disabled={isPreview || disabled}
className={cn(

Some files were not shown because too many files have changed in this diff Show More