mirror of
https://github.com/simstudioai/sim.git
synced 2026-03-15 03:00:33 -04:00
Compare commits
43 Commits
feat/tools
...
waleedlati
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
463ef80490 | ||
|
|
6a3e350ce4 | ||
|
|
9b72b5c83b | ||
|
|
e41fbcc266 | ||
|
|
4fd0989264 | ||
|
|
345a95f48d | ||
|
|
e07963f88c | ||
|
|
25c59e3e2e | ||
|
|
dde098e8e5 | ||
|
|
5ae0115444 | ||
|
|
fbafe204e5 | ||
|
|
ba7d6ff298 | ||
|
|
40016e79a1 | ||
|
|
e4fb8b2fdd | ||
|
|
d98545d554 | ||
|
|
fadbad4085 | ||
|
|
244e1ee495 | ||
|
|
1f3dc52d15 | ||
|
|
f625482bcb | ||
|
|
16f337f6fd | ||
|
|
063ec87ced | ||
|
|
870d4b55c6 | ||
|
|
95304b2941 | ||
|
|
8b0c47b06c | ||
|
|
774771fddd | ||
|
|
67f8a687f6 | ||
|
|
43c0f5b199 | ||
|
|
ff01825b20 | ||
|
|
58d0fda173 | ||
|
|
ecdb133d1b | ||
|
|
d06459f489 | ||
|
|
0574427d45 | ||
|
|
8f9b859a53 | ||
|
|
60f9eb21bf | ||
|
|
af592349d3 | ||
|
|
0d86ea01f0 | ||
|
|
115f04e989 | ||
|
|
34d92fae89 | ||
|
|
67aa4bb332 | ||
|
|
15ace5e63f | ||
|
|
fdca73679d | ||
|
|
da46a387c9 | ||
|
|
b7e377ec4b |
4
.gitignore
vendored
4
.gitignore
vendored
@@ -73,3 +73,7 @@ start-collector.sh
|
||||
## Helm Chart Tests
|
||||
helm/sim/test
|
||||
i18n.cache
|
||||
|
||||
## Claude Code
|
||||
.claude/launch.json
|
||||
.claude/worktrees/
|
||||
|
||||
@@ -939,6 +939,25 @@ export function GoogleIcon(props: SVGProps<SVGSVGElement>) {
|
||||
)
|
||||
}
|
||||
|
||||
export function DevinIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg {...props} viewBox='0 0 500 500' fill='none' xmlns='http://www.w3.org/2000/svg'>
|
||||
<path
|
||||
d='M59.29,209.39l48.87,28.21c1.75,1.01,3.71,1.51,5.67,1.51c1.95,0,3.92-0.52,5.67-1.51l48.87-28.21c0,0,0.14-0.11,0.2-0.16c0.74-0.45,1.44-0.99,2.07-1.6c0.09-0.09,0.18-0.2,0.27-0.29c0.54-0.58,1.03-1.21,1.44-1.89c0.06-0.11,0.16-0.2,0.2-0.32c0.43-0.74,0.74-1.53,0.99-2.37c0.05-0.18,0.09-0.36,0.14-0.54c0.2-0.86,0.36-1.74,0.36-2.66v-28.21c0-10.89,5.87-21.03,15.3-26.48c9.42-5.45,21.15-5.44,30.59,0l24.43,14.11c0.79,0.45,1.62,0.77,2.47,1.01c0.18,0.05,0.37,0.11,0.54,0.16c0.83,0.2,1.69,0.32,2.54,0.34c0.05,0,0.09,0,0.11,0c0.09,0,0.18-0.05,0.26-0.05c0.79,0,1.58-0.11,2.34-0.32c0.14-0.03,0.27-0.05,0.4-0.09c0.83-0.23,1.64-0.57,2.41-0.99c0.06-0.05,0.16-0.05,0.23-0.09l48.87-28.21c3.51-2.03,5.67-5.76,5.67-9.81V64.52c0-4.05-2.16-7.78-5.67-9.81l-48.91-28.19c-3.51-2.03-7.81-2.03-11.32,0l-48.87,28.21c0,0-0.14,0.11-0.2,0.16c-0.74,0.45-1.44,0.99-2.07,1.6c-0.09,0.09-0.18,0.2-0.27,0.29c-0.54,0.58-1.03,1.21-1.44,1.89c-0.06,0.11-0.16,0.2-0.2,0.31c-0.43,0.74-0.74,1.53-0.99,2.37c-0.05,0.18-0.09,0.36-0.14,0.54c-0.2,0.86-0.36,1.74-0.36,2.66v28.21c0,10.89-5.87,21.03-15.3,26.5c-9.42,5.44-21.15,5.44-30.59,0l-24.42-14.1c-0.79-0.45-1.63-0.77-2.47-1.01c-0.18-0.05-0.36-0.11-0.54-0.16c-0.84-0.2-1.69-0.31-2.55-0.34c-0.14,0-0.25,0-0.38,0c-0.81,0-1.6,0.11-2.37,0.31c-0.14,0.02-0.25,0.05-0.38,0.09c-0.82,0.23-1.63,0.57-2.4,1c-0.06,0.05-0.16,0.05-0.23,0.09l-48.84,28.24c-3.51,2.03-5.67,5.76-5.67,9.81v56.42c0,4.05,2.16,7.78,5.67,9.81C59.29,209.41,59.29,209.39,59.29,209.39z'
|
||||
fill='#2A6DCE'
|
||||
/>
|
||||
<path
|
||||
d='M325.46,223.49c9.42-5.44,21.15-5.44,30.59,0l24.43,14.11c0.79,0.45,1.62,0.77,2.47,1.01c0.18,0.05,0.36,0.11,0.54,0.16c0.83,0.2,1.69,0.31,2.54,0.34c0.05,0,0.09,0,0.11,0c0.09,0,0.18-0.03,0.26-0.05c0.79,0,1.58-0.11,2.34-0.31c0.14-0.03,0.27-0.05,0.4-0.09c0.83-0.23,1.62-0.57,2.41-0.99c0.06-0.05,0.16-0.05,0.25-0.09l48.87-28.21c3.51-2.03,5.67-5.76,5.67-9.81v-56.43c0-4.05-2.16-7.78-5.67-9.81l-48.84-28.22c-3.51-2.03-7.81-2.03-11.32,0l-48.87,28.21c0,0-0.14,0.11-0.2,0.16c-0.74,0.45-1.44,0.99-2.07,1.6c-0.09,0.09-0.18,0.2-0.26,0.29c-0.54,0.58-1.03,1.21-1.44,1.89c-0.06,0.11-0.16,0.2-0.2,0.32c-0.43,0.74-0.74,1.53-0.99,2.37c-0.05,0.18-0.09,0.36-0.14,0.54c-0.2,0.86-0.36,1.74-0.36,2.66v28.21c0,10.89-5.87,21.03-15.3,26.5c-9.42,5.44-21.15,5.44-30.59,0l-24.43-14.11c-0.79-0.45-1.62-0.77-2.47-1.01c-0.18-0.05-0.36-0.11-0.54-0.16c-0.83-0.2-1.69-0.32-2.54-0.34c-0.14,0-0.25,0-0.38,0c-0.81,0-1.6,0.11-2.37,0.32c-0.14,0.03-0.25,0.05-0.38,0.09c-0.83,0.23-1.64,0.57-2.41,0.99c-0.06,0.05-0.16,0.05-0.23,0.09l-48.87,28.21c-3.51,2.03-5.67,5.76-5.67,9.81v56.43c0,4.05,2.16,7.78,5.67,9.81l48.87,28.21c0,0,0.16,0.05,0.23,0.09c0.77,0.43,1.58,0.77,2.41,0.99c0.14,0.05,0.27,0.05,0.4,0.09c0.77,0.18,1.55,0.29,2.34,0.32c0.09,0,0.18,0.05,0.27,0.05c0.05,0,0.09,0,0.11,0c0.86,0,1.69-0.14,2.54-0.34c0.18-0.05,0.36-0.09,0.54-0.16c0.86-0.25,1.69-0.57,2.47-1.01l24.43-14.11c9.42-5.44,21.15-5.44,30.59,0c9.42,5.44,15.3,15.59,15.3,26.48v28.21c0,0.92,0.14,1.8,0.36,2.66c0.05,0.18,0.09,0.36,0.14,0.54c0.25,0.83,0.56,1.62,0.99,2.37c0.06,0.11,0.14,0.2,0.2,0.31c0.4,0.68,0.9,1.31,1.44,1.89c0.09,0.09,0.18,0.2,0.26,0.29c0.61,0.6,1.31,1.12,2.07,1.6c0.06,0.05,0.11,0.11,0.2,0.16l48.87,28.21c1.75,1.01,3.72,1.51,5.67,1.51s3.92-0.52,5.67-1.51l48.87-28.21c3.51-2.03,5.67-5.76,5.67-9.81v-56.43c0-4.05-2.16-7.78-5.67-9.81l-48.87-28.21c0,0-0.16-0.05-0.23-0.09c-0.77-0.43-1.58-0.77-2.41-0.99c-0.14-0.05-0.25-0.05-0.38-0.09c-0.79-0.18-1.57-0.29-2.38-0.32c-0.11,0-0.25,0-0.36,0c-0.86,0-1.71,0.14-2.54,0.34c-0.18,0.05-0.34,0.09-0.52,0.16c-0.86,0.25-1.69,0.57-2.47,1.01l-24.43,14.11c-9.42,5.44-21.15,5.44-30.58,0c-9.42-5.44-15.3-15.59-15.3-26.5c0-10.91,5.87-21.03,15.3-26.48C325.55,223.49,325.46,223.49,325.46,223.49z'
|
||||
fill='#1DC19C'
|
||||
/>
|
||||
<path
|
||||
d='M304.5,369.22l-48.87-28.21c0,0-0.16-0.05-0.23-0.09c-0.77-0.43-1.57-0.77-2.41-0.99c-0.14-0.05-0.27-0.05-0.4-0.09c-0.79-0.18-1.57-0.29-2.37-0.32c-0.14,0-0.25,0-0.38,0c-0.86,0-1.71,0.14-2.54,0.34c-0.18,0.05-0.34,0.09-0.52,0.16c-0.86,0.25-1.69,0.57-2.47,1.01l-24.43,14.11c-9.42,5.44-21.15,5.44-30.58,0c-9.42-5.44-15.3-15.59-15.3-26.5v-28.22c0-0.92-0.14-1.8-0.36-2.66c-0.05-0.18-0.09-0.36-0.14-0.54c-0.25-0.83-0.57-1.62-0.99-2.37c-0.06-0.11-0.14-0.2-0.2-0.32c-0.4-0.68-0.9-1.31-1.44-1.89c-0.09-0.09-0.18-0.2-0.27-0.29c-0.6-0.6-1.31-1.12-2.07-1.6c-0.06-0.05-0.11-0.11-0.2-0.16l-48.87-28.21c-3.51-2.03-7.81-2.03-11.32,0L59.28,290.6c-3.51,2.03-5.67,5.76-5.67,9.81v56.43c0,4.05,2.16,7.78,5.67,9.81l48.87,28.21c0,0,0.16,0.06,0.23,0.09c0.77,0.43,1.55,0.77,2.38,0.99c0.14,0.05,0.27,0.06,0.4,0.09c0.77,0.18,1.55,0.29,2.34,0.32c0.09,0,0.18,0.05,0.29,0.05c0.05,0,0.09,0,0.14,0c0.86,0,1.69-0.14,2.52-0.34c0.18-0.05,0.36-0.09,0.54-0.16c0.86-0.25,1.69-0.57,2.47-1.01l24.43-14.11c9.42-5.44,21.15-5.44,30.59,0c9.42,5.44,15.3,15.59,15.3,26.48v28.21c0,0.92,0.14,1.8,0.36,2.66c0.05,0.18,0.09,0.36,0.14,0.54c0.25,0.83,0.57,1.62,0.99,2.37c0.06,0.11,0.14,0.2,0.2,0.32c0.4,0.68,0.9,1.31,1.44,1.89c0.09,0.09,0.18,0.2,0.27,0.29c0.61,0.61,1.31,1.12,2.07,1.6c0.06,0.05,0.11,0.11,0.2,0.16l48.87,28.21c1.75,1.01,3.71,1.51,5.67,1.51c1.96,0,3.92-0.52,5.67-1.51l48.87-28.21c3.51-2.03,5.67-5.76,5.67-9.81v-56.43c0-4.05-2.16-7.78-5.67-9.81L304.5,369.22z'
|
||||
fill='#1796E2'
|
||||
/>
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
export function DiscordIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg
|
||||
@@ -1302,6 +1321,21 @@ export function GoogleCalendarIcon(props: SVGProps<SVGSVGElement>) {
|
||||
)
|
||||
}
|
||||
|
||||
export function GoogleTasksIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg {...props} viewBox='0 0 527.1 500' xmlns='http://www.w3.org/2000/svg'>
|
||||
<polygon
|
||||
fill='#0066DA'
|
||||
points='410.4,58.3 368.8,81.2 348.2,120.6 368.8,168.8 407.8,211 450,187.5 475.9,142.8 450,87.5'
|
||||
/>
|
||||
<path
|
||||
fill='#2684FC'
|
||||
d='M249.3,219.4l98.9-98.9c29.1,22.1,50.5,53.8,59.6,90.4L272.1,346.7c-12.2,12.2-32,12.2-44.2,0l-91.5-91.5 c-9.8-9.8-9.8-25.6,0-35.3l39-39c9.8-9.8,25.6-9.8,35.3,0L249.3,219.4z M519.8,63.6l-39.7-39.7c-9.7-9.7-25.6-9.7-35.3,0 l-34.4,34.4c27.5,23,49.9,51.8,65.5,84.5l43.9-43.9C529.6,89.2,529.6,73.3,519.8,63.6z M412.5,250c0,89.8-72.8,162.5-162.5,162.5 S87.5,339.8,87.5,250S160.2,87.5,250,87.5c36.9,0,70.9,12.3,98.2,33.1l62.2-62.2C367,21.9,311.1,0,250,0C111.9,0,0,111.9,0,250 s111.9,250,250,250s250-111.9,250-250c0-38.3-8.7-74.7-24.1-107.2L407.8,211C410.8,223.5,412.5,236.6,412.5,250z'
|
||||
/>
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
export function SupabaseIcon(props: SVGProps<SVGSVGElement>) {
|
||||
const id = useId()
|
||||
const gradient0 = `supabase_paint0_${id}`
|
||||
@@ -3430,6 +3464,23 @@ export const ResendIcon = (props: SVGProps<SVGSVGElement>) => (
|
||||
</svg>
|
||||
)
|
||||
|
||||
export const GoogleBigQueryIcon = (props: SVGProps<SVGSVGElement>) => (
|
||||
<svg {...props} xmlns='http://www.w3.org/2000/svg' viewBox='0 0 64 64'>
|
||||
<path
|
||||
d='M14.48 58.196L.558 34.082c-.744-1.288-.744-2.876 0-4.164L14.48 5.805c.743-1.287 2.115-2.08 3.6-2.082h27.857c1.48.007 2.845.8 3.585 2.082l13.92 24.113c.744 1.288.744 2.876 0 4.164L49.52 58.196c-.743 1.287-2.115 2.08-3.6 2.082H18.07c-1.483-.005-2.85-.798-3.593-2.082z'
|
||||
fill='#4386fa'
|
||||
/>
|
||||
<path
|
||||
d='M40.697 24.235s3.87 9.283-1.406 14.545-14.883 1.894-14.883 1.894L43.95 60.27h1.984c1.486-.002 2.858-.796 3.6-2.082L58.75 42.23z'
|
||||
opacity='.1'
|
||||
/>
|
||||
<path
|
||||
d='M45.267 43.23L41 38.953a.67.67 0 0 0-.158-.12 11.63 11.63 0 1 0-2.032 2.037.67.67 0 0 0 .113.15l4.277 4.277a.67.67 0 0 0 .947 0l1.12-1.12a.67.67 0 0 0 0-.947zM31.64 40.464a8.75 8.75 0 1 1 8.749-8.749 8.75 8.75 0 0 1-8.749 8.749zm-5.593-9.216v3.616c.557.983 1.363 1.803 2.338 2.375v-6.013zm4.375-2.998v9.772a6.45 6.45 0 0 0 2.338 0V28.25zm6.764 6.606v-2.142H34.85v4.5a6.43 6.43 0 0 0 2.338-2.368z'
|
||||
fill='#fff'
|
||||
/>
|
||||
</svg>
|
||||
)
|
||||
|
||||
export const GoogleVaultIcon = (props: SVGProps<SVGSVGElement>) => (
|
||||
<svg {...props} xmlns='http://www.w3.org/2000/svg' viewBox='0 0 82 82'>
|
||||
<path
|
||||
@@ -3552,6 +3603,15 @@ export function TrelloIcon(props: SVGProps<SVGSVGElement>) {
|
||||
)
|
||||
}
|
||||
|
||||
export function AttioIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg {...props} xmlns='http://www.w3.org/2000/svg' viewBox='0 0 60.9 50' fill='currentColor'>
|
||||
<path d='M60.3,34.8l-5.1-8.1c0,0,0,0,0,0L54.7,26c-0.8-1.2-2.1-1.9-3.5-1.9L43,24L42.5,25l-9.8,15.7l-0.5,0.9l4.1,6.6c0.8,1.2,2.1,1.9,3.5,1.9h11.5c1.4,0,2.8-0.7,3.5-1.9l0.4-0.6c0,0,0,0,0,0l5.1-8.2C61.1,37.9,61.1,36.2,60.3,34.8L60.3,34.8z M58.7,38.3l-5.1,8.2c0,0,0,0.1-0.1,0.1c-0.2,0.2-0.4,0.2-0.5,0.2c-0.1,0-0.4,0-0.6-0.3l-5.1-8.2c-0.1-0.1-0.1-0.2-0.2-0.3c0-0.1-0.1-0.2-0.1-0.3c-0.1-0.4-0.1-0.8,0-1.3c0.1-0.2,0.1-0.4,0.3-0.6l5.1-8.1c0,0,0,0,0,0c0.1-0.2,0.3-0.3,0.4-0.3c0.1,0,0.1,0,0.1,0c0,0,0,0,0.1,0c0.1,0,0.4,0,0.6,0.3l5.1,8.1C59.2,36.6,59.2,37.5,58.7,38.3L58.7,38.3z' />
|
||||
<path d='M45.2,15.1c0.8-1.3,0.8-3.1,0-4.4l-5.1-8.1l-0.4-0.7C38.9,0.7,37.6,0,36.2,0H24.7c-1.4,0-2.7,0.7-3.5,1.9L0.6,34.9C0.2,35.5,0,36.3,0,37c0,0.8,0.2,1.5,0.6,2.2l5.5,8.8C6.9,49.3,8.2,50,9.7,50h11.5c1.4,0,2.8-0.7,3.5-1.9l0.4-0.7c0,0,0,0,0,0c0,0,0,0,0,0l4.1-6.6l12.1-19.4L45.2,15.1L45.2,15.1z M44,13c0,0.4-0.1,0.8-0.4,1.2L23.5,46.4c-0.2,0.3-0.5,0.3-0.6,0.3c-0.1,0-0.4,0-0.6-0.3l-5.1-8.2c-0.5-0.7-0.5-1.7,0-2.4L37.4,3.6c0.2-0.3,0.5-0.3,0.6-0.3c0.1,0,0.4,0,0.6,0.3l5.1,8.1C43.9,12.1,44,12.5,44,13z' />
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
export function AsanaIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg {...props} xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24' fill='none'>
|
||||
@@ -5436,6 +5496,34 @@ export function GoogleMapsIcon(props: SVGProps<SVGSVGElement>) {
|
||||
)
|
||||
}
|
||||
|
||||
export function GoogleTranslateIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg {...props} xmlns='http://www.w3.org/2000/svg' viewBox='0 0 998.1 998.3'>
|
||||
<path
|
||||
fill='#DBDBDB'
|
||||
d='M931.7 998.3c36.5 0 66.4-29.4 66.4-65.4V265.8c0-36-29.9-65.4-66.4-65.4H283.6l260.1 797.9h388z'
|
||||
/>
|
||||
<path
|
||||
fill='#DCDCDC'
|
||||
d='M931.7 230.4c9.7 0 18.9 3.8 25.8 10.6 6.8 6.7 10.6 15.5 10.6 24.8v667.1c0 9.3-3.7 18.1-10.6 24.8-6.9 6.8-16.1 10.6-25.8 10.6H565.5L324.9 230.4h606.8m0-30H283.6l260.1 797.9h388c36.5 0 66.4-29.4 66.4-65.4V265.8c0-36-29.9-65.4-66.4-65.4z'
|
||||
/>
|
||||
<polygon fill='#4352B8' points='482.3,809.8 543.7,998.3 714.4,809.8' />
|
||||
<path
|
||||
fill='#607988'
|
||||
d='M936.1 476.1V437H747.6v-63.2h-61.2V437H566.1v39.1h239.4c-12.8 45.1-41.1 87.7-68.7 120.8-48.9-57.9-49.1-76.7-49.1-76.7h-50.8s2.1 28.2 70.7 108.6c-22.3 22.8-39.2 36.3-39.2 36.3l15.6 48.8s23.6-20.3 53.1-51.6c29.6 32.1 67.8 70.7 117.2 116.7l32.1-32.1c-52.9-48-91.7-86.1-120.2-116.7 38.2-45.2 77-102.1 85.2-154.2H936v.1z'
|
||||
/>
|
||||
<path
|
||||
fill='#4285F4'
|
||||
d='M66.4 0C29.9 0 0 29.9 0 66.5v677c0 36.5 29.9 66.4 66.4 66.4h648.1L454.4 0h-388z'
|
||||
/>
|
||||
<path
|
||||
fill='#EEEEEE'
|
||||
d='M371.4 430.6c-2.5 30.3-28.4 75.2-91.1 75.2-54.3 0-98.3-44.9-98.3-100.2s44-100.2 98.3-100.2c30.9 0 51.5 13.4 63.3 24.3l41.2-39.6c-27.1-25-62.4-40.6-104.5-40.6-86.1 0-156 69.9-156 156s69.9 156 156 156c90.2 0 149.8-63.3 149.8-152.6 0-12.8-1.6-22.2-3.7-31.8h-146v53.4l91 .1z'
|
||||
/>
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
export function DsPyIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg {...props} xmlns='http://www.w3.org/2000/svg' viewBox='30 28 185 175' fill='none'>
|
||||
|
||||
@@ -13,6 +13,7 @@ import {
|
||||
ApolloIcon,
|
||||
ArxivIcon,
|
||||
AsanaIcon,
|
||||
AttioIcon,
|
||||
BrainIcon,
|
||||
BrowserUseIcon,
|
||||
CalComIcon,
|
||||
@@ -24,6 +25,7 @@ import {
|
||||
ConfluenceIcon,
|
||||
CursorIcon,
|
||||
DatadogIcon,
|
||||
DevinIcon,
|
||||
DiscordIcon,
|
||||
DocumentIcon,
|
||||
DropboxIcon,
|
||||
@@ -41,6 +43,7 @@ import {
|
||||
GitLabIcon,
|
||||
GmailIcon,
|
||||
GongIcon,
|
||||
GoogleBigQueryIcon,
|
||||
GoogleBooksIcon,
|
||||
GoogleCalendarIcon,
|
||||
GoogleDocsIcon,
|
||||
@@ -51,6 +54,8 @@ import {
|
||||
GoogleMapsIcon,
|
||||
GoogleSheetsIcon,
|
||||
GoogleSlidesIcon,
|
||||
GoogleTasksIcon,
|
||||
GoogleTranslateIcon,
|
||||
GoogleVaultIcon,
|
||||
GrafanaIcon,
|
||||
GrainIcon,
|
||||
@@ -159,6 +164,7 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
|
||||
apollo: ApolloIcon,
|
||||
arxiv: ArxivIcon,
|
||||
asana: AsanaIcon,
|
||||
attio: AttioIcon,
|
||||
browser_use: BrowserUseIcon,
|
||||
calcom: CalComIcon,
|
||||
calendly: CalendlyIcon,
|
||||
@@ -169,6 +175,7 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
|
||||
confluence_v2: ConfluenceIcon,
|
||||
cursor_v2: CursorIcon,
|
||||
datadog: DatadogIcon,
|
||||
devin: DevinIcon,
|
||||
discord: DiscordIcon,
|
||||
dropbox: DropboxIcon,
|
||||
dspy: DsPyIcon,
|
||||
@@ -185,6 +192,7 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
|
||||
gitlab: GitLabIcon,
|
||||
gmail_v2: GmailIcon,
|
||||
gong: GongIcon,
|
||||
google_bigquery: GoogleBigQueryIcon,
|
||||
google_books: GoogleBooksIcon,
|
||||
google_calendar_v2: GoogleCalendarIcon,
|
||||
google_docs: GoogleDocsIcon,
|
||||
@@ -195,6 +203,8 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
|
||||
google_search: GoogleIcon,
|
||||
google_sheets_v2: GoogleSheetsIcon,
|
||||
google_slides_v2: GoogleSlidesIcon,
|
||||
google_tasks: GoogleTasksIcon,
|
||||
google_translate: GoogleTranslateIcon,
|
||||
google_vault: GoogleVaultIcon,
|
||||
grafana: GrafanaIcon,
|
||||
grain: GrainIcon,
|
||||
|
||||
@@ -1,96 +0,0 @@
|
||||
---
|
||||
title: Umgebungsvariablen
|
||||
---
|
||||
|
||||
import { Callout } from 'fumadocs-ui/components/callout'
|
||||
import { Image } from '@/components/ui/image'
|
||||
|
||||
Umgebungsvariablen bieten eine sichere Möglichkeit, Konfigurationswerte und Geheimnisse in Ihren Workflows zu verwalten, einschließlich API-Schlüssel und anderer sensibler Daten, auf die Ihre Workflows zugreifen müssen. Sie halten Geheimnisse aus Ihren Workflow-Definitionen heraus und machen sie während der Ausführung verfügbar.
|
||||
|
||||
## Variablentypen
|
||||
|
||||
Umgebungsvariablen in Sim funktionieren auf zwei Ebenen:
|
||||
|
||||
- **Persönliche Umgebungsvariablen**: Privat für Ihr Konto, nur Sie können sie sehen und verwenden
|
||||
- **Workspace-Umgebungsvariablen**: Werden im gesamten Workspace geteilt und sind für alle Teammitglieder verfügbar
|
||||
|
||||
<Callout type="info">
|
||||
Workspace-Umgebungsvariablen haben Vorrang vor persönlichen Variablen, wenn es einen Namenskonflikt gibt.
|
||||
</Callout>
|
||||
|
||||
## Einrichten von Umgebungsvariablen
|
||||
|
||||
Navigieren Sie zu den Einstellungen, um Ihre Umgebungsvariablen zu konfigurieren:
|
||||
|
||||
<Image
|
||||
src="/static/environment/environment-1.png"
|
||||
alt="Umgebungsvariablen-Modal zum Erstellen neuer Variablen"
|
||||
width={500}
|
||||
height={350}
|
||||
/>
|
||||
|
||||
In Ihren Workspace-Einstellungen können Sie sowohl persönliche als auch Workspace-Umgebungsvariablen erstellen und verwalten. Persönliche Variablen sind privat für Ihr Konto, während Workspace-Variablen mit allen Teammitgliedern geteilt werden.
|
||||
|
||||
### Variablen auf Workspace-Ebene setzen
|
||||
|
||||
Verwenden Sie den Workspace-Bereichsschalter, um Variablen für Ihr gesamtes Team verfügbar zu machen:
|
||||
|
||||
<Image
|
||||
src="/static/environment/environment-2.png"
|
||||
alt="Workspace-Bereich für Umgebungsvariablen umschalten"
|
||||
width={500}
|
||||
height={350}
|
||||
/>
|
||||
|
||||
Wenn Sie den Workspace-Bereich aktivieren, wird die Variable für alle Workspace-Mitglieder verfügbar und kann in jedem Workflow innerhalb dieses Workspaces verwendet werden.
|
||||
|
||||
### Ansicht der Workspace-Variablen
|
||||
|
||||
Sobald Sie Workspace-Variablen haben, erscheinen sie in Ihrer Liste der Umgebungsvariablen:
|
||||
|
||||
<Image
|
||||
src="/static/environment/environment-3.png"
|
||||
alt="Workspace-Variablen in der Liste der Umgebungsvariablen"
|
||||
width={500}
|
||||
height={350}
|
||||
/>
|
||||
|
||||
## Verwendung von Variablen in Workflows
|
||||
|
||||
Um Umgebungsvariablen in Ihren Workflows zu referenzieren, verwenden Sie die `{{}}` Notation. Wenn Sie `{{` in ein beliebiges Eingabefeld eingeben, erscheint ein Dropdown-Menü mit Ihren persönlichen und Workspace-Umgebungsvariablen. Wählen Sie einfach die Variable aus, die Sie verwenden möchten.
|
||||
|
||||
<Image
|
||||
src="/static/environment/environment-4.png"
|
||||
alt="Verwendung von Umgebungsvariablen mit doppelter Klammernotation"
|
||||
width={500}
|
||||
height={350}
|
||||
/>
|
||||
|
||||
## Wie Variablen aufgelöst werden
|
||||
|
||||
**Workspace-Variablen haben immer Vorrang** vor persönlichen Variablen, unabhängig davon, wer den Workflow ausführt.
|
||||
|
||||
Wenn keine Workspace-Variable für einen Schlüssel existiert, werden persönliche Variablen verwendet:
|
||||
- **Manuelle Ausführungen (UI)**: Ihre persönlichen Variablen
|
||||
- **Automatisierte Ausführungen (API, Webhook, Zeitplan, bereitgestellter Chat)**: Persönliche Variablen des Workflow-Besitzers
|
||||
|
||||
<Callout type="info">
|
||||
Persönliche Variablen eignen sich am besten zum Testen. Verwenden Sie Workspace-Variablen für Produktions-Workflows.
|
||||
</Callout>
|
||||
|
||||
## Sicherheits-Best-Practices
|
||||
|
||||
### Für sensible Daten
|
||||
- Speichern Sie API-Schlüssel, Tokens und Passwörter als Umgebungsvariablen anstatt sie im Code festzuschreiben
|
||||
- Verwenden Sie Workspace-Variablen für gemeinsam genutzte Ressourcen, die mehrere Teammitglieder benötigen
|
||||
- Bewahren Sie persönliche Anmeldedaten in persönlichen Variablen auf
|
||||
|
||||
### Variablenbenennung
|
||||
- Verwenden Sie beschreibende Namen: `DATABASE_URL` anstatt `DB`
|
||||
- Folgen Sie einheitlichen Benennungskonventionen in Ihrem Team
|
||||
- Erwägen Sie Präfixe, um Konflikte zu vermeiden: `PROD_API_KEY`, `DEV_API_KEY`
|
||||
|
||||
### Zugriffskontrolle
|
||||
- Workspace-Umgebungsvariablen respektieren Workspace-Berechtigungen
|
||||
- Nur Benutzer mit Schreibzugriff oder höher können Workspace-Variablen erstellen/ändern
|
||||
- Persönliche Variablen sind immer privat für den einzelnen Benutzer
|
||||
@@ -95,11 +95,17 @@ const apiUrl = `https://api.example.com/users/${userId}/profile`;
|
||||
|
||||
### Request Retries
|
||||
|
||||
The API block automatically handles:
|
||||
- Network timeouts with exponential backoff
|
||||
- Rate limit responses (429 status codes)
|
||||
- Server errors (5xx status codes) with retry logic
|
||||
- Connection failures with reconnection attempts
|
||||
The API block supports **configurable retries** (see the block’s **Advanced** settings):
|
||||
|
||||
- **Retries**: Number of retry attempts (additional tries after the first request)
|
||||
- **Retry delay (ms)**: Initial delay before retrying (uses exponential backoff)
|
||||
- **Max retry delay (ms)**: Maximum delay between retries
|
||||
- **Retry non-idempotent methods**: Allow retries for **POST/PATCH** (may create duplicate requests)
|
||||
|
||||
Retries are attempted for:
|
||||
|
||||
- Network/connection failures and timeouts (with exponential backoff)
|
||||
- Rate limits (**429**) and server errors (**5xx**)
|
||||
|
||||
### Response Validation
|
||||
|
||||
|
||||
192
apps/docs/content/docs/en/credentials/index.mdx
Normal file
192
apps/docs/content/docs/en/credentials/index.mdx
Normal file
@@ -0,0 +1,192 @@
|
||||
---
|
||||
title: Credentials
|
||||
description: Manage secrets, API keys, and OAuth connections for your workflows
|
||||
---
|
||||
|
||||
import { Callout } from 'fumadocs-ui/components/callout'
|
||||
import { Image } from '@/components/ui/image'
|
||||
import { Step, Steps } from 'fumadocs-ui/components/steps'
|
||||
|
||||
Credentials provide a secure way to manage API keys, tokens, and third-party service connections across your workflows. Instead of hardcoding sensitive values into your workflow, you store them as credentials and reference them at runtime.
|
||||
|
||||
Sim supports two categories of credentials: **secrets** for static values like API keys, and **OAuth accounts** for authenticated service connections like Google or Slack.
|
||||
|
||||
## Getting Started
|
||||
|
||||
To manage credentials, open your workspace **Settings** and navigate to the **Secrets** tab.
|
||||
|
||||
<Image
|
||||
src="/static/credentials/settings-secrets.png"
|
||||
alt="Settings modal showing the Secrets tab with a list of saved credentials"
|
||||
width={700}
|
||||
height={200}
|
||||
/>
|
||||
|
||||
From here you can search, create, and delete both secrets and OAuth connections.
|
||||
|
||||
## Secrets
|
||||
|
||||
Secrets are key-value pairs that store sensitive data like API keys, tokens, and passwords. Each secret has a **key** (used to reference it in workflows) and a **value** (the actual secret).
|
||||
|
||||
### Creating a Secret
|
||||
|
||||
<Image
|
||||
src="/static/credentials/create-secret.png"
|
||||
alt="Create Secret dialog with fields for key, value, description, and scope toggle"
|
||||
width={500}
|
||||
height={400}
|
||||
/>
|
||||
|
||||
<Steps>
|
||||
<Step>
|
||||
Click **+ Add** and select **Secret** as the type
|
||||
</Step>
|
||||
<Step>
|
||||
Enter a **Key** name (letters, numbers, and underscores only, e.g. `OPENAI_API_KEY`)
|
||||
</Step>
|
||||
<Step>
|
||||
Enter the **Value**
|
||||
</Step>
|
||||
<Step>
|
||||
Optionally add a **Description** to help your team understand what the secret is for
|
||||
</Step>
|
||||
<Step>
|
||||
Choose the **Scope** — Workspace or Personal
|
||||
</Step>
|
||||
<Step>
|
||||
Click **Create**
|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
### Using Secrets in Workflows
|
||||
|
||||
To reference a secret in any input field, type `{{` to open the dropdown. It will show your available secrets grouped by scope.
|
||||
|
||||
<Image
|
||||
src="/static/credentials/secret-dropdown.png"
|
||||
alt="Typing {{ in a code block opens a dropdown showing available workspace secrets"
|
||||
width={400}
|
||||
height={250}
|
||||
/>
|
||||
|
||||
Select the secret you want to use. The reference will appear highlighted in blue, indicating it will be resolved at runtime.
|
||||
|
||||
<Image
|
||||
src="/static/credentials/secret-resolved.png"
|
||||
alt="A resolved secret reference shown in blue text as {{OPENAI_API_KEY}}"
|
||||
width={400}
|
||||
height={200}
|
||||
/>
|
||||
|
||||
<Callout type="warn">
|
||||
Secret values are never exposed in the workflow editor or logs. They are only resolved during execution.
|
||||
</Callout>
|
||||
|
||||
### Bulk Import
|
||||
|
||||
You can import multiple secrets at once by pasting `.env`-style content:
|
||||
|
||||
1. Click **+ Add**, then switch to **Bulk** mode
|
||||
2. Paste your environment variables in `KEY=VALUE` format
|
||||
3. Choose the scope for all imported secrets
|
||||
4. Click **Create**
|
||||
|
||||
The parser supports standard `KEY=VALUE` pairs, quoted values, comments (`#`), and blank lines.
|
||||
|
||||
## OAuth Accounts
|
||||
|
||||
OAuth accounts are authenticated connections to third-party services like Google, Slack, GitHub, and more. Sim handles the OAuth flow, token storage, and automatic refresh.
|
||||
|
||||
You can connect **multiple accounts per provider** — for example, two separate Gmail accounts for different workflows.
|
||||
|
||||
### Connecting an OAuth Account
|
||||
|
||||
<Image
|
||||
src="/static/credentials/create-oauth.png"
|
||||
alt="Create Secret dialog with OAuth Account type selected, showing display name and provider dropdown"
|
||||
width={500}
|
||||
height={400}
|
||||
/>
|
||||
|
||||
<Steps>
|
||||
<Step>
|
||||
Click **+ Add** and select **OAuth Account** as the type
|
||||
</Step>
|
||||
<Step>
|
||||
Enter a **Display name** to identify this connection (e.g. "Work Gmail" or "Marketing Slack")
|
||||
</Step>
|
||||
<Step>
|
||||
Optionally add a **Description**
|
||||
</Step>
|
||||
<Step>
|
||||
Select the **Account** provider from the dropdown
|
||||
</Step>
|
||||
<Step>
|
||||
Click **Connect** and complete the authorization flow
|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
### Using OAuth Accounts in Workflows
|
||||
|
||||
Blocks that require authentication (e.g. Gmail, Slack, Google Sheets) display a credential selector dropdown. Select the OAuth account you want the block to use.
|
||||
|
||||
<Image
|
||||
src="/static/credentials/oauth-selector.png"
|
||||
alt="Gmail block showing the account selector dropdown with a connected account and option to connect another"
|
||||
width={500}
|
||||
height={350}
|
||||
/>
|
||||
|
||||
You can also connect additional accounts directly from the block by selecting **Connect another account** at the bottom of the dropdown.
|
||||
|
||||
<Callout type="info">
|
||||
If a block requires an OAuth connection and none is selected, the workflow will fail at that step.
|
||||
</Callout>
|
||||
|
||||
## Workspace vs. Personal
|
||||
|
||||
Credentials can be scoped to your **workspace** (shared with your team) or kept **personal** (private to you).
|
||||
|
||||
| | Workspace | Personal |
|
||||
|---|---|---|
|
||||
| **Visibility** | All workspace members | Only you |
|
||||
| **Use in workflows** | Any member can use | Only you can use |
|
||||
| **Best for** | Production workflows, shared services | Testing, personal API keys |
|
||||
| **Who can edit** | Workspace admins | Only you |
|
||||
| **Auto-shared** | Yes — all members get access on creation | No — only you have access |
|
||||
|
||||
<Callout type="info">
|
||||
When a workspace and personal secret share the same key name, the **workspace secret takes precedence**.
|
||||
</Callout>
|
||||
|
||||
### Resolution Order
|
||||
|
||||
When a workflow runs, Sim resolves secrets in this order:
|
||||
|
||||
1. **Workspace secrets** are checked first
|
||||
2. **Personal secrets** are used as a fallback — from the user who triggered the run (manual) or the workflow owner (automated runs via API, webhook, or schedule)
|
||||
|
||||
## Access Control
|
||||
|
||||
Each credential has role-based access control:
|
||||
|
||||
- **Admin** — can view, edit, delete, and manage who has access
|
||||
- **Member** — can use the credential in workflows (read-only)
|
||||
|
||||
When you create a workspace secret, all current workspace members are automatically granted access. Personal secrets are only accessible to you by default.
|
||||
|
||||
### Sharing a Credential
|
||||
|
||||
To share a credential with specific team members:
|
||||
|
||||
1. Click **Details** on the credential
|
||||
2. Invite members by email
|
||||
3. Assign them an **Admin** or **Member** role
|
||||
|
||||
## Best Practices
|
||||
|
||||
- **Use workspace credentials for production** so workflows work regardless of who triggers them
|
||||
- **Use personal credentials for development** to keep your test keys separate
|
||||
- **Name keys descriptively** — `STRIPE_SECRET_KEY` over `KEY1`
|
||||
- **Connect multiple OAuth accounts** when you need different permissions or identities per workflow
|
||||
- **Never hardcode secrets** in workflow input fields — always use `{{KEY}}` references
|
||||
@@ -97,6 +97,7 @@ Understanding these core principles will help you build better workflows:
|
||||
3. **Smart Data Flow**: Outputs flow automatically to connected blocks
|
||||
4. **Error Handling**: Failed blocks stop their execution path but don't affect independent paths
|
||||
5. **State Persistence**: All block outputs and execution details are preserved for debugging
|
||||
6. **Cycle Protection**: Workflows that call other workflows (via Workflow blocks, MCP tools, or API blocks) are tracked with a call chain. If the chain exceeds 25 hops, execution is stopped to prevent infinite loops
|
||||
|
||||
## Next Steps
|
||||
|
||||
|
||||
@@ -13,6 +13,7 @@
|
||||
"skills",
|
||||
"knowledgebase",
|
||||
"variables",
|
||||
"credentials",
|
||||
"execution",
|
||||
"permissions",
|
||||
"sdks",
|
||||
|
||||
1046
apps/docs/content/docs/en/tools/attio.mdx
Normal file
1046
apps/docs/content/docs/en/tools/attio.mdx
Normal file
File diff suppressed because it is too large
Load Diff
@@ -326,6 +326,8 @@ Get details about a specific version of a Confluence page.
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||
| `pageId` | string | ID of the page |
|
||||
| `title` | string | Page title at this version |
|
||||
| `content` | string | Page content with HTML tags stripped at this version |
|
||||
| `version` | object | Detailed version information |
|
||||
| ↳ `number` | number | Version number |
|
||||
| ↳ `message` | string | Version message |
|
||||
@@ -336,6 +338,9 @@ Get details about a specific version of a Confluence page.
|
||||
| ↳ `collaborators` | array | List of collaborator account IDs for this version |
|
||||
| ↳ `prevVersion` | number | Previous version number |
|
||||
| ↳ `nextVersion` | number | Next version number |
|
||||
| `body` | object | Raw page body content in storage format at this version |
|
||||
| ↳ `value` | string | The content value in the specified format |
|
||||
| ↳ `representation` | string | Content representation type |
|
||||
|
||||
### `confluence_list_page_properties`
|
||||
|
||||
@@ -1008,6 +1013,85 @@ Get details about a specific Confluence space.
|
||||
| ↳ `value` | string | Description text content |
|
||||
| ↳ `representation` | string | Content representation format \(e.g., plain, view, storage\) |
|
||||
|
||||
### `confluence_create_space`
|
||||
|
||||
Create a new Confluence space.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
|
||||
| `name` | string | Yes | Name for the new space |
|
||||
| `key` | string | Yes | Unique key for the space \(uppercase, no spaces\) |
|
||||
| `description` | string | No | Description for the new space |
|
||||
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||
| `spaceId` | string | Created space ID |
|
||||
| `name` | string | Space name |
|
||||
| `key` | string | Space key |
|
||||
| `type` | string | Space type |
|
||||
| `status` | string | Space status |
|
||||
| `url` | string | URL to view the space |
|
||||
| `homepageId` | string | Homepage ID |
|
||||
| `description` | object | Space description |
|
||||
| ↳ `value` | string | Description text content |
|
||||
| ↳ `representation` | string | Content representation format \(e.g., plain, view, storage\) |
|
||||
|
||||
### `confluence_update_space`
|
||||
|
||||
Update a Confluence space name or description.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
|
||||
| `spaceId` | string | Yes | ID of the space to update |
|
||||
| `name` | string | No | New name for the space |
|
||||
| `description` | string | No | New description for the space |
|
||||
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||
| `spaceId` | string | Updated space ID |
|
||||
| `name` | string | Space name |
|
||||
| `key` | string | Space key |
|
||||
| `type` | string | Space type |
|
||||
| `status` | string | Space status |
|
||||
| `url` | string | URL to view the space |
|
||||
| `description` | object | Space description |
|
||||
| ↳ `value` | string | Description text content |
|
||||
| ↳ `representation` | string | Content representation format \(e.g., plain, view, storage\) |
|
||||
|
||||
### `confluence_delete_space`
|
||||
|
||||
Delete a Confluence space.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
|
||||
| `spaceId` | string | Yes | ID of the space to delete |
|
||||
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||
| `spaceId` | string | Deleted space ID |
|
||||
| `deleted` | boolean | Deletion status |
|
||||
|
||||
### `confluence_list_spaces`
|
||||
|
||||
List all Confluence spaces accessible to the user.
|
||||
@@ -1040,4 +1124,311 @@ List all Confluence spaces accessible to the user.
|
||||
| ↳ `representation` | string | Content representation format \(e.g., plain, view, storage\) |
|
||||
| `nextCursor` | string | Cursor for fetching the next page of results |
|
||||
|
||||
### `confluence_list_space_properties`
|
||||
|
||||
List properties on a Confluence space.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
|
||||
| `spaceId` | string | Yes | Space ID to list properties for |
|
||||
| `limit` | number | No | Maximum number of properties to return \(default: 50, max: 250\) |
|
||||
| `cursor` | string | No | Pagination cursor from previous response |
|
||||
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||
| `properties` | array | Array of space properties |
|
||||
| ↳ `id` | string | Property ID |
|
||||
| ↳ `key` | string | Property key |
|
||||
| ↳ `value` | json | Property value |
|
||||
| `spaceId` | string | Space ID |
|
||||
| `nextCursor` | string | Cursor for fetching the next page of results |
|
||||
|
||||
### `confluence_create_space_property`
|
||||
|
||||
Create a property on a Confluence space.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
|
||||
| `spaceId` | string | Yes | Space ID to create the property on |
|
||||
| `key` | string | Yes | Property key/name |
|
||||
| `value` | json | No | Property value \(JSON\) |
|
||||
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||
| `propertyId` | string | Created property ID |
|
||||
| `key` | string | Property key |
|
||||
| `value` | json | Property value |
|
||||
| `spaceId` | string | Space ID |
|
||||
|
||||
### `confluence_delete_space_property`
|
||||
|
||||
Delete a property from a Confluence space.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
|
||||
| `spaceId` | string | Yes | Space ID the property belongs to |
|
||||
| `propertyId` | string | Yes | Property ID to delete |
|
||||
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||
| `spaceId` | string | Space ID |
|
||||
| `propertyId` | string | Deleted property ID |
|
||||
| `deleted` | boolean | Deletion status |
|
||||
|
||||
### `confluence_list_space_permissions`
|
||||
|
||||
List permissions for a Confluence space.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
|
||||
| `spaceId` | string | Yes | Space ID to list permissions for |
|
||||
| `limit` | number | No | Maximum number of permissions to return \(default: 50, max: 250\) |
|
||||
| `cursor` | string | No | Pagination cursor from previous response |
|
||||
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||
| `permissions` | array | Array of space permissions |
|
||||
| ↳ `id` | string | Permission ID |
|
||||
| ↳ `principalType` | string | Principal type \(user, group, role\) |
|
||||
| ↳ `principalId` | string | Principal ID |
|
||||
| ↳ `operationKey` | string | Operation key \(read, create, delete, etc.\) |
|
||||
| ↳ `operationTargetType` | string | Target type \(page, blogpost, space, etc.\) |
|
||||
| ↳ `anonymousAccess` | boolean | Whether anonymous access is allowed |
|
||||
| ↳ `unlicensedAccess` | boolean | Whether unlicensed access is allowed |
|
||||
| `spaceId` | string | Space ID |
|
||||
| `nextCursor` | string | Cursor for fetching the next page of results |
|
||||
|
||||
### `confluence_get_page_descendants`
|
||||
|
||||
Get all descendants of a Confluence page recursively.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
|
||||
| `pageId` | string | Yes | Page ID to get descendants for |
|
||||
| `limit` | number | No | Maximum number of descendants to return \(default: 50, max: 250\) |
|
||||
| `cursor` | string | No | Pagination cursor from previous response |
|
||||
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||
| `descendants` | array | Array of descendant pages |
|
||||
| ↳ `id` | string | Page ID |
|
||||
| ↳ `title` | string | Page title |
|
||||
| ↳ `type` | string | Content type \(page, whiteboard, database, etc.\) |
|
||||
| ↳ `status` | string | Page status |
|
||||
| ↳ `spaceId` | string | Space ID |
|
||||
| ↳ `parentId` | string | Parent page ID |
|
||||
| ↳ `childPosition` | number | Position among siblings |
|
||||
| ↳ `depth` | number | Depth in the hierarchy |
|
||||
| `pageId` | string | Parent page ID |
|
||||
| `nextCursor` | string | Cursor for fetching the next page of results |
|
||||
|
||||
### `confluence_list_tasks`
|
||||
|
||||
List inline tasks from Confluence. Optionally filter by page, space, assignee, or status.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
|
||||
| `pageId` | string | No | Filter tasks by page ID |
|
||||
| `spaceId` | string | No | Filter tasks by space ID |
|
||||
| `assignedTo` | string | No | Filter tasks by assignee account ID |
|
||||
| `status` | string | No | Filter tasks by status \(complete or incomplete\) |
|
||||
| `limit` | number | No | Maximum number of tasks to return \(default: 50, max: 250\) |
|
||||
| `cursor` | string | No | Pagination cursor from previous response |
|
||||
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||
| `tasks` | array | Array of Confluence tasks |
|
||||
| ↳ `id` | string | Task ID |
|
||||
| ↳ `localId` | string | Local task ID |
|
||||
| ↳ `spaceId` | string | Space ID |
|
||||
| ↳ `pageId` | string | Page ID |
|
||||
| ↳ `blogPostId` | string | Blog post ID |
|
||||
| ↳ `status` | string | Task status \(complete or incomplete\) |
|
||||
| ↳ `body` | string | Task body content in storage format |
|
||||
| ↳ `createdBy` | string | Creator account ID |
|
||||
| ↳ `assignedTo` | string | Assignee account ID |
|
||||
| ↳ `completedBy` | string | Completer account ID |
|
||||
| ↳ `createdAt` | string | Creation timestamp |
|
||||
| ↳ `updatedAt` | string | Last update timestamp |
|
||||
| ↳ `dueAt` | string | Due date |
|
||||
| ↳ `completedAt` | string | Completion timestamp |
|
||||
| `nextCursor` | string | Cursor for fetching the next page of results |
|
||||
|
||||
### `confluence_get_task`
|
||||
|
||||
Get a specific Confluence inline task by ID.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
|
||||
| `taskId` | string | Yes | The ID of the task to retrieve |
|
||||
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||
| `id` | string | Task ID |
|
||||
| `localId` | string | Local task ID |
|
||||
| `spaceId` | string | Space ID |
|
||||
| `pageId` | string | Page ID |
|
||||
| `blogPostId` | string | Blog post ID |
|
||||
| `status` | string | Task status \(complete or incomplete\) |
|
||||
| `body` | string | Task body content in storage format |
|
||||
| `createdBy` | string | Creator account ID |
|
||||
| `assignedTo` | string | Assignee account ID |
|
||||
| `completedBy` | string | Completer account ID |
|
||||
| `createdAt` | string | Creation timestamp |
|
||||
| `updatedAt` | string | Last update timestamp |
|
||||
| `dueAt` | string | Due date |
|
||||
| `completedAt` | string | Completion timestamp |
|
||||
|
||||
### `confluence_update_task`
|
||||
|
||||
Update the status of a Confluence inline task (complete or incomplete).
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
|
||||
| `taskId` | string | Yes | The ID of the task to update |
|
||||
| `status` | string | Yes | New status for the task \(complete or incomplete\) |
|
||||
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||
| `id` | string | Task ID |
|
||||
| `localId` | string | Local task ID |
|
||||
| `spaceId` | string | Space ID |
|
||||
| `pageId` | string | Page ID |
|
||||
| `blogPostId` | string | Blog post ID |
|
||||
| `status` | string | Updated task status |
|
||||
| `body` | string | Task body content in storage format |
|
||||
| `createdBy` | string | Creator account ID |
|
||||
| `assignedTo` | string | Assignee account ID |
|
||||
| `completedBy` | string | Completer account ID |
|
||||
| `createdAt` | string | Creation timestamp |
|
||||
| `updatedAt` | string | Last update timestamp |
|
||||
| `dueAt` | string | Due date |
|
||||
| `completedAt` | string | Completion timestamp |
|
||||
|
||||
### `confluence_update_blogpost`
|
||||
|
||||
Update an existing Confluence blog post title and/or content.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
|
||||
| `blogPostId` | string | Yes | The ID of the blog post to update |
|
||||
| `title` | string | No | New title for the blog post |
|
||||
| `content` | string | No | New content for the blog post in storage format |
|
||||
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||
| `blogPostId` | string | Updated blog post ID |
|
||||
| `title` | string | Blog post title |
|
||||
| `status` | string | Blog post status |
|
||||
| `spaceId` | string | Space ID |
|
||||
| `version` | json | Version information |
|
||||
| `url` | string | URL to view the blog post |
|
||||
|
||||
### `confluence_delete_blogpost`
|
||||
|
||||
Delete a Confluence blog post.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
|
||||
| `blogPostId` | string | Yes | The ID of the blog post to delete |
|
||||
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||
| `blogPostId` | string | Deleted blog post ID |
|
||||
| `deleted` | boolean | Deletion status |
|
||||
|
||||
### `confluence_get_user`
|
||||
|
||||
Get display name and profile info for a Confluence user by account ID.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
|
||||
| `accountId` | string | Yes | The Atlassian account ID of the user to look up |
|
||||
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||
| `accountId` | string | Atlassian account ID of the user |
|
||||
| `displayName` | string | Display name of the user |
|
||||
| `email` | string | Email address of the user |
|
||||
| `accountType` | string | Account type \(e.g., atlassian, app, customer\) |
|
||||
| `profilePicture` | string | Path to the user profile picture |
|
||||
| `publicName` | string | Public name of the user |
|
||||
|
||||
|
||||
|
||||
157
apps/docs/content/docs/en/tools/devin.mdx
Normal file
157
apps/docs/content/docs/en/tools/devin.mdx
Normal file
@@ -0,0 +1,157 @@
|
||||
---
|
||||
title: Devin
|
||||
description: Autonomous AI software engineer
|
||||
---
|
||||
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="devin"
|
||||
color="#12141A"
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
[Devin](https://devin.ai/) is an autonomous AI software engineer by Cognition that can independently write, run, debug, and deploy code.
|
||||
|
||||
With Devin, you can:
|
||||
|
||||
- **Automate coding tasks**: Assign software engineering tasks and let Devin autonomously write, test, and iterate on code
|
||||
- **Manage sessions**: Create, monitor, and interact with Devin sessions to track progress on assigned tasks
|
||||
- **Guide active work**: Send messages to running sessions to provide additional context, redirect efforts, or answer questions
|
||||
- **Retrieve structured output**: Poll completed sessions for pull requests, structured results, and detailed status
|
||||
- **Control costs**: Set ACU (Autonomous Compute Unit) limits to cap spending on long-running tasks
|
||||
- **Standardize workflows**: Use playbook IDs to apply repeatable task patterns across sessions
|
||||
|
||||
In Sim, the Devin integration enables your agents to programmatically manage Devin sessions as part of their workflows:
|
||||
|
||||
- **Create sessions**: Kick off new Devin sessions with a prompt describing the task, optional playbook, ACU limits, and tags
|
||||
- **Get session details**: Retrieve the full state of a session including status, pull requests, structured output, and resource consumption
|
||||
- **List sessions**: Query all sessions in your organization with optional pagination
|
||||
- **Send messages**: Communicate with active or suspended sessions to provide guidance, and automatically resume suspended sessions
|
||||
|
||||
This allows for powerful automation scenarios such as triggering code generation from upstream events, polling for completion before consuming results, orchestrating multi-step development pipelines, and integrating Devin's output into broader agent workflows.
|
||||
{/* MANUAL-CONTENT-END */}
|
||||
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Integrate Devin into your workflow. Create sessions to assign coding tasks, send messages to guide active sessions, and retrieve session status and results. Devin autonomously writes, runs, and tests code.
|
||||
|
||||
|
||||
|
||||
## Tools
|
||||
|
||||
### `devin_create_session`
|
||||
|
||||
Create a new Devin session with a prompt. Devin will autonomously work on the task described in the prompt.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Devin API key \(service user credential starting with cog_\) |
|
||||
| `prompt` | string | Yes | The task prompt for Devin to work on |
|
||||
| `playbookId` | string | No | Optional playbook ID to guide the session |
|
||||
| `maxAcuLimit` | number | No | Maximum ACU limit for the session |
|
||||
| `tags` | string | No | Comma-separated tags for the session |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `sessionId` | string | Unique identifier for the session |
|
||||
| `url` | string | URL to view the session in the Devin UI |
|
||||
| `status` | string | Session status \(new, claimed, running, exit, error, suspended, resuming\) |
|
||||
| `statusDetail` | string | Detailed status \(working, waiting_for_user, waiting_for_approval, finished, inactivity, etc.\) |
|
||||
| `title` | string | Session title |
|
||||
| `createdAt` | number | Unix timestamp when the session was created |
|
||||
| `updatedAt` | number | Unix timestamp when the session was last updated |
|
||||
| `acusConsumed` | number | ACUs consumed by the session |
|
||||
| `tags` | json | Tags associated with the session |
|
||||
| `pullRequests` | json | Pull requests created during the session |
|
||||
| `structuredOutput` | json | Structured output from the session |
|
||||
| `playbookId` | string | Associated playbook ID |
|
||||
|
||||
### `devin_get_session`
|
||||
|
||||
Retrieve details of an existing Devin session including status, tags, pull requests, and structured output.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Devin API key \(service user credential starting with cog_\) |
|
||||
| `sessionId` | string | Yes | The session ID to retrieve |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `sessionId` | string | Unique identifier for the session |
|
||||
| `url` | string | URL to view the session in the Devin UI |
|
||||
| `status` | string | Session status \(new, claimed, running, exit, error, suspended, resuming\) |
|
||||
| `statusDetail` | string | Detailed status \(working, waiting_for_user, waiting_for_approval, finished, inactivity, etc.\) |
|
||||
| `title` | string | Session title |
|
||||
| `createdAt` | number | Unix timestamp when the session was created |
|
||||
| `updatedAt` | number | Unix timestamp when the session was last updated |
|
||||
| `acusConsumed` | number | ACUs consumed by the session |
|
||||
| `tags` | json | Tags associated with the session |
|
||||
| `pullRequests` | json | Pull requests created during the session |
|
||||
| `structuredOutput` | json | Structured output from the session |
|
||||
| `playbookId` | string | Associated playbook ID |
|
||||
|
||||
### `devin_list_sessions`
|
||||
|
||||
List Devin sessions in the organization. Returns up to 100 sessions by default.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Devin API key \(service user credential starting with cog_\) |
|
||||
| `limit` | number | No | Maximum number of sessions to return \(1-200, default: 100\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `sessions` | array | List of Devin sessions |
|
||||
| ↳ `sessionId` | string | Unique identifier for the session |
|
||||
| ↳ `url` | string | URL to view the session |
|
||||
| ↳ `status` | string | Session status |
|
||||
| ↳ `statusDetail` | string | Detailed status |
|
||||
| ↳ `title` | string | Session title |
|
||||
| ↳ `createdAt` | number | Creation timestamp \(Unix\) |
|
||||
| ↳ `updatedAt` | number | Last updated timestamp \(Unix\) |
|
||||
| ↳ `tags` | json | Session tags |
|
||||
|
||||
### `devin_send_message`
|
||||
|
||||
Send a message to a Devin session. If the session is suspended, it will be automatically resumed. Returns the updated session state.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Devin API key \(service user credential starting with cog_\) |
|
||||
| `sessionId` | string | Yes | The session ID to send the message to |
|
||||
| `message` | string | Yes | The message to send to Devin |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `sessionId` | string | Unique identifier for the session |
|
||||
| `url` | string | URL to view the session in the Devin UI |
|
||||
| `status` | string | Session status \(new, claimed, running, exit, error, suspended, resuming\) |
|
||||
| `statusDetail` | string | Detailed status \(working, waiting_for_user, waiting_for_approval, finished, inactivity, etc.\) |
|
||||
| `title` | string | Session title |
|
||||
| `createdAt` | number | Unix timestamp when the session was created |
|
||||
| `updatedAt` | number | Unix timestamp when the session was last updated |
|
||||
| `acusConsumed` | number | ACUs consumed by the session |
|
||||
| `tags` | json | Tags associated with the session |
|
||||
| `pullRequests` | json | Pull requests created during the session |
|
||||
| `structuredOutput` | json | Structured output from the session |
|
||||
| `playbookId` | string | Associated playbook ID |
|
||||
|
||||
|
||||
168
apps/docs/content/docs/en/tools/google_bigquery.mdx
Normal file
168
apps/docs/content/docs/en/tools/google_bigquery.mdx
Normal file
@@ -0,0 +1,168 @@
|
||||
---
|
||||
title: Google BigQuery
|
||||
description: Query, list, and insert data in Google BigQuery
|
||||
---
|
||||
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="google_bigquery"
|
||||
color="#E0E0E0"
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
[Google BigQuery](https://cloud.google.com/bigquery) is Google Cloud's fully managed, serverless data warehouse designed for large-scale data analytics. BigQuery lets you run fast SQL queries on massive datasets, making it ideal for business intelligence, data exploration, and machine learning pipelines. It supports standard SQL, streaming inserts, and integrates with the broader Google Cloud ecosystem.
|
||||
|
||||
In Sim, the Google BigQuery integration allows your agents to query datasets, list tables, inspect schemas, and insert rows as part of automated workflows. This enables use cases such as automated reporting, data pipeline orchestration, real-time data ingestion, and analytics-driven decision making. By connecting Sim with BigQuery, your agents can pull insights from petabytes of data, write results back to tables, and keep your analytics workflows running without manual intervention.
|
||||
{/* MANUAL-CONTENT-END */}
|
||||
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Connect to Google BigQuery to run SQL queries, list datasets and tables, get table metadata, and insert rows.
|
||||
|
||||
|
||||
|
||||
## Tools
|
||||
|
||||
### `google_bigquery_query`
|
||||
|
||||
Run a SQL query against Google BigQuery and return the results
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `projectId` | string | Yes | Google Cloud project ID |
|
||||
| `query` | string | Yes | SQL query to execute |
|
||||
| `useLegacySql` | boolean | No | Whether to use legacy SQL syntax \(default: false\) |
|
||||
| `maxResults` | number | No | Maximum number of rows to return |
|
||||
| `defaultDatasetId` | string | No | Default dataset for unqualified table names |
|
||||
| `location` | string | No | Processing location \(e.g., "US", "EU"\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `columns` | array | Array of column names from the query result |
|
||||
| `rows` | array | Array of row objects keyed by column name |
|
||||
| `totalRows` | string | Total number of rows in the complete result set |
|
||||
| `jobComplete` | boolean | Whether the query completed within the timeout |
|
||||
| `totalBytesProcessed` | string | Total bytes processed by the query |
|
||||
| `cacheHit` | boolean | Whether the query result was served from cache |
|
||||
| `jobReference` | object | Job reference \(useful when jobComplete is false\) |
|
||||
| ↳ `projectId` | string | Project ID containing the job |
|
||||
| ↳ `jobId` | string | Unique job identifier |
|
||||
| ↳ `location` | string | Geographic location of the job |
|
||||
| `pageToken` | string | Token for fetching additional result pages |
|
||||
|
||||
### `google_bigquery_list_datasets`
|
||||
|
||||
List all datasets in a Google BigQuery project
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `projectId` | string | Yes | Google Cloud project ID |
|
||||
| `maxResults` | number | No | Maximum number of datasets to return |
|
||||
| `pageToken` | string | No | Token for pagination |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `datasets` | array | Array of dataset objects |
|
||||
| ↳ `datasetId` | string | Unique dataset identifier |
|
||||
| ↳ `projectId` | string | Project ID containing this dataset |
|
||||
| ↳ `friendlyName` | string | Descriptive name for the dataset |
|
||||
| ↳ `location` | string | Geographic location where the data resides |
|
||||
| `nextPageToken` | string | Token for fetching next page of results |
|
||||
|
||||
### `google_bigquery_list_tables`
|
||||
|
||||
List all tables in a Google BigQuery dataset
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `projectId` | string | Yes | Google Cloud project ID |
|
||||
| `datasetId` | string | Yes | BigQuery dataset ID |
|
||||
| `maxResults` | number | No | Maximum number of tables to return |
|
||||
| `pageToken` | string | No | Token for pagination |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `tables` | array | Array of table objects |
|
||||
| ↳ `tableId` | string | Table identifier |
|
||||
| ↳ `datasetId` | string | Dataset ID containing this table |
|
||||
| ↳ `projectId` | string | Project ID containing this table |
|
||||
| ↳ `type` | string | Table type \(TABLE, VIEW, EXTERNAL, etc.\) |
|
||||
| ↳ `friendlyName` | string | User-friendly name for the table |
|
||||
| ↳ `creationTime` | string | Time when created, in milliseconds since epoch |
|
||||
| `totalItems` | number | Total number of tables in the dataset |
|
||||
| `nextPageToken` | string | Token for fetching next page of results |
|
||||
|
||||
### `google_bigquery_get_table`
|
||||
|
||||
Get metadata and schema for a Google BigQuery table
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `projectId` | string | Yes | Google Cloud project ID |
|
||||
| `datasetId` | string | Yes | BigQuery dataset ID |
|
||||
| `tableId` | string | Yes | BigQuery table ID |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `tableId` | string | Table ID |
|
||||
| `datasetId` | string | Dataset ID |
|
||||
| `projectId` | string | Project ID |
|
||||
| `type` | string | Table type \(TABLE, VIEW, SNAPSHOT, MATERIALIZED_VIEW, EXTERNAL\) |
|
||||
| `description` | string | Table description |
|
||||
| `numRows` | string | Total number of rows |
|
||||
| `numBytes` | string | Total size in bytes, excluding data in streaming buffer |
|
||||
| `schema` | array | Array of column definitions |
|
||||
| ↳ `name` | string | Column name |
|
||||
| ↳ `type` | string | Data type \(STRING, INTEGER, FLOAT, BOOLEAN, TIMESTAMP, RECORD, etc.\) |
|
||||
| ↳ `mode` | string | Column mode \(NULLABLE, REQUIRED, or REPEATED\) |
|
||||
| ↳ `description` | string | Column description |
|
||||
| `creationTime` | string | Table creation time \(milliseconds since epoch\) |
|
||||
| `lastModifiedTime` | string | Last modification time \(milliseconds since epoch\) |
|
||||
| `location` | string | Geographic location where the table resides |
|
||||
|
||||
### `google_bigquery_insert_rows`
|
||||
|
||||
Insert rows into a Google BigQuery table using streaming insert
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `projectId` | string | Yes | Google Cloud project ID |
|
||||
| `datasetId` | string | Yes | BigQuery dataset ID |
|
||||
| `tableId` | string | Yes | BigQuery table ID |
|
||||
| `rows` | string | Yes | JSON array of row objects to insert |
|
||||
| `skipInvalidRows` | boolean | No | Whether to insert valid rows even if some are invalid |
|
||||
| `ignoreUnknownValues` | boolean | No | Whether to ignore columns not in the table schema |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `insertedRows` | number | Number of rows successfully inserted |
|
||||
| `errors` | array | Array of per-row insertion errors \(empty if all succeeded\) |
|
||||
| ↳ `index` | number | Zero-based index of the row that failed |
|
||||
| ↳ `errors` | array | Error details for this row |
|
||||
| ↳ `reason` | string | Short error code summarizing the error |
|
||||
| ↳ `location` | string | Where the error occurred |
|
||||
| ↳ `message` | string | Human-readable error description |
|
||||
|
||||
|
||||
@@ -10,6 +10,13 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
color="#E0E0E0"
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
[Google Books](https://books.google.com) is Google's comprehensive book discovery and metadata service, providing access to millions of books from publishers, libraries, and digitized collections worldwide. The Google Books API enables programmatic search and retrieval of detailed book information including titles, authors, descriptions, ratings, and publication details.
|
||||
|
||||
In Sim, the Google Books integration allows your agents to search for books and retrieve volume details as part of automated workflows. This enables use cases such as content research, reading list curation, bibliographic data enrichment, ISBN lookups, and knowledge gathering from published works. By connecting Sim with Google Books, your agents can discover and analyze book metadata, filter by availability or format, and incorporate literary references into their outputs—all without manual research.
|
||||
{/* MANUAL-CONTENT-END */}
|
||||
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Search for books using the Google Books API. Find volumes by title, author, ISBN, or keywords, and retrieve detailed information about specific books including descriptions, ratings, and publication details.
|
||||
|
||||
205
apps/docs/content/docs/en/tools/google_tasks.mdx
Normal file
205
apps/docs/content/docs/en/tools/google_tasks.mdx
Normal file
@@ -0,0 +1,205 @@
|
||||
---
|
||||
title: Google Tasks
|
||||
description: Manage Google Tasks
|
||||
---
|
||||
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="google_tasks"
|
||||
color="#E0E0E0"
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
[Google Tasks](https://support.google.com/tasks) is Google's lightweight task management service, integrated into Gmail, Google Calendar, and the standalone Google Tasks app. It provides a simple way to create, organize, and track to-do items with support for due dates, subtasks, and task lists. As part of Google Workspace, Google Tasks keeps your action items synchronized across all your devices.
|
||||
|
||||
In Sim, the Google Tasks integration allows your agents to create, read, update, delete, and list tasks and task lists as part of automated workflows. This enables use cases such as automated task creation from incoming data, to-do list management based on workflow triggers, task status tracking, and deadline monitoring. By connecting Sim with Google Tasks, your agents can manage action items programmatically, keep teams organized, and ensure nothing falls through the cracks.
|
||||
{/* MANUAL-CONTENT-END */}
|
||||
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Integrate Google Tasks into your workflow. Create, read, update, delete, and list tasks and task lists.
|
||||
|
||||
|
||||
|
||||
## Tools
|
||||
|
||||
### `google_tasks_create`
|
||||
|
||||
Create a new task in a Google Tasks list
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `taskListId` | string | No | Task list ID \(defaults to primary task list "@default"\) |
|
||||
| `title` | string | Yes | Title of the task \(max 1024 characters\) |
|
||||
| `notes` | string | No | Notes/description for the task \(max 8192 characters\) |
|
||||
| `due` | string | No | Due date in RFC 3339 format \(e.g., 2025-06-03T00:00:00.000Z\) |
|
||||
| `status` | string | No | Task status: "needsAction" or "completed" |
|
||||
| `parent` | string | No | Parent task ID to create this task as a subtask. Omit for top-level tasks. |
|
||||
| `previous` | string | No | Previous sibling task ID to position after. Omit to place first among siblings. |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `id` | string | Task ID |
|
||||
| `title` | string | Task title |
|
||||
| `notes` | string | Task notes |
|
||||
| `status` | string | Task status \(needsAction or completed\) |
|
||||
| `due` | string | Due date |
|
||||
| `updated` | string | Last modification time |
|
||||
| `selfLink` | string | URL for the task |
|
||||
| `webViewLink` | string | Link to task in Google Tasks UI |
|
||||
| `parent` | string | Parent task ID |
|
||||
| `position` | string | Position among sibling tasks |
|
||||
| `completed` | string | Completion date |
|
||||
| `deleted` | boolean | Whether the task is deleted |
|
||||
|
||||
### `google_tasks_list`
|
||||
|
||||
List all tasks in a Google Tasks list
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `taskListId` | string | No | Task list ID \(defaults to primary task list "@default"\) |
|
||||
| `maxResults` | number | No | Maximum number of tasks to return \(default 20, max 100\) |
|
||||
| `pageToken` | string | No | Token for pagination |
|
||||
| `showCompleted` | boolean | No | Whether to show completed tasks \(default true\) |
|
||||
| `showDeleted` | boolean | No | Whether to show deleted tasks \(default false\) |
|
||||
| `showHidden` | boolean | No | Whether to show hidden tasks \(default false\) |
|
||||
| `dueMin` | string | No | Lower bound for due date filter \(RFC 3339 timestamp\) |
|
||||
| `dueMax` | string | No | Upper bound for due date filter \(RFC 3339 timestamp\) |
|
||||
| `completedMin` | string | No | Lower bound for task completion date \(RFC 3339 timestamp\) |
|
||||
| `completedMax` | string | No | Upper bound for task completion date \(RFC 3339 timestamp\) |
|
||||
| `updatedMin` | string | No | Lower bound for last modification time \(RFC 3339 timestamp\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `tasks` | array | List of tasks |
|
||||
| ↳ `id` | string | Task identifier |
|
||||
| ↳ `title` | string | Title of the task |
|
||||
| ↳ `notes` | string | Notes/description for the task |
|
||||
| ↳ `status` | string | Task status: "needsAction" or "completed" |
|
||||
| ↳ `due` | string | Due date \(RFC 3339 timestamp\) |
|
||||
| ↳ `completed` | string | Completion date \(RFC 3339 timestamp\) |
|
||||
| ↳ `updated` | string | Last modification time \(RFC 3339 timestamp\) |
|
||||
| ↳ `selfLink` | string | URL pointing to this task |
|
||||
| ↳ `webViewLink` | string | Link to task in Google Tasks UI |
|
||||
| ↳ `parent` | string | Parent task identifier |
|
||||
| ↳ `position` | string | Position among sibling tasks \(string-based ordering\) |
|
||||
| ↳ `hidden` | boolean | Whether the task is hidden |
|
||||
| ↳ `deleted` | boolean | Whether the task is deleted |
|
||||
| ↳ `links` | array | Collection of links associated with the task |
|
||||
| ↳ `type` | string | Link type \(e.g., "email", "generic", "chat_message"\) |
|
||||
| ↳ `description` | string | Link description |
|
||||
| ↳ `link` | string | The URL |
|
||||
| `nextPageToken` | string | Token for retrieving the next page of results |
|
||||
|
||||
### `google_tasks_get`
|
||||
|
||||
Retrieve a specific task by ID from a Google Tasks list
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `taskListId` | string | No | Task list ID \(defaults to primary task list "@default"\) |
|
||||
| `taskId` | string | Yes | The ID of the task to retrieve |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `id` | string | Task ID |
|
||||
| `title` | string | Task title |
|
||||
| `notes` | string | Task notes |
|
||||
| `status` | string | Task status \(needsAction or completed\) |
|
||||
| `due` | string | Due date |
|
||||
| `updated` | string | Last modification time |
|
||||
| `selfLink` | string | URL for the task |
|
||||
| `webViewLink` | string | Link to task in Google Tasks UI |
|
||||
| `parent` | string | Parent task ID |
|
||||
| `position` | string | Position among sibling tasks |
|
||||
| `completed` | string | Completion date |
|
||||
| `deleted` | boolean | Whether the task is deleted |
|
||||
|
||||
### `google_tasks_update`
|
||||
|
||||
Update an existing task in a Google Tasks list
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `taskListId` | string | No | Task list ID \(defaults to primary task list "@default"\) |
|
||||
| `taskId` | string | Yes | The ID of the task to update |
|
||||
| `title` | string | No | New title for the task |
|
||||
| `notes` | string | No | New notes for the task |
|
||||
| `due` | string | No | New due date in RFC 3339 format |
|
||||
| `status` | string | No | New status: "needsAction" or "completed" |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `id` | string | Task ID |
|
||||
| `title` | string | Task title |
|
||||
| `notes` | string | Task notes |
|
||||
| `status` | string | Task status \(needsAction or completed\) |
|
||||
| `due` | string | Due date |
|
||||
| `updated` | string | Last modification time |
|
||||
| `selfLink` | string | URL for the task |
|
||||
| `webViewLink` | string | Link to task in Google Tasks UI |
|
||||
| `parent` | string | Parent task ID |
|
||||
| `position` | string | Position among sibling tasks |
|
||||
| `completed` | string | Completion date |
|
||||
| `deleted` | boolean | Whether the task is deleted |
|
||||
|
||||
### `google_tasks_delete`
|
||||
|
||||
Delete a task from a Google Tasks list
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `taskListId` | string | No | Task list ID \(defaults to primary task list "@default"\) |
|
||||
| `taskId` | string | Yes | The ID of the task to delete |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `taskId` | string | Deleted task ID |
|
||||
| `deleted` | boolean | Whether deletion was successful |
|
||||
|
||||
### `google_tasks_list_task_lists`
|
||||
|
||||
Retrieve all task lists for the authenticated user
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `maxResults` | number | No | Maximum number of task lists to return \(default 20, max 100\) |
|
||||
| `pageToken` | string | No | Token for pagination |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `taskLists` | array | List of task lists |
|
||||
| ↳ `id` | string | Task list identifier |
|
||||
| ↳ `title` | string | Title of the task list |
|
||||
| ↳ `updated` | string | Last modification time \(RFC 3339 timestamp\) |
|
||||
| ↳ `selfLink` | string | URL pointing to this task list |
|
||||
| `nextPageToken` | string | Token for retrieving the next page of results |
|
||||
|
||||
|
||||
60
apps/docs/content/docs/en/tools/google_translate.mdx
Normal file
60
apps/docs/content/docs/en/tools/google_translate.mdx
Normal file
@@ -0,0 +1,60 @@
|
||||
---
|
||||
title: Google Translate
|
||||
description: Translate text using Google Cloud Translation
|
||||
---
|
||||
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="google_translate"
|
||||
color="#E0E0E0"
|
||||
/>
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Translate and detect languages using the Google Cloud Translation API. Supports auto-detection of the source language.
|
||||
|
||||
|
||||
|
||||
## Tools
|
||||
|
||||
### `google_translate_text`
|
||||
|
||||
Translate text between languages using the Google Cloud Translation API. Supports auto-detection of the source language.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Google Cloud API key with Cloud Translation API enabled |
|
||||
| `text` | string | Yes | The text to translate |
|
||||
| `target` | string | Yes | Target language code \(e.g., "es", "fr", "de", "ja"\) |
|
||||
| `source` | string | No | Source language code. If omitted, the API will auto-detect the source language. |
|
||||
| `format` | string | No | Format of the text: "text" for plain text, "html" for HTML content |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `translatedText` | string | The translated text |
|
||||
| `detectedSourceLanguage` | string | The detected source language code \(if source was not specified\) |
|
||||
|
||||
### `google_translate_detect`
|
||||
|
||||
Detect the language of text using the Google Cloud Translation API.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Google Cloud API key with Cloud Translation API enabled |
|
||||
| `text` | string | Yes | The text to detect the language of |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `language` | string | The detected language code \(e.g., "en", "es", "fr"\) |
|
||||
| `confidence` | number | Confidence score of the detection |
|
||||
|
||||
|
||||
@@ -10,6 +10,7 @@
|
||||
"apollo",
|
||||
"arxiv",
|
||||
"asana",
|
||||
"attio",
|
||||
"browser_use",
|
||||
"calcom",
|
||||
"calendly",
|
||||
@@ -20,6 +21,7 @@
|
||||
"confluence",
|
||||
"cursor",
|
||||
"datadog",
|
||||
"devin",
|
||||
"discord",
|
||||
"dropbox",
|
||||
"dspy",
|
||||
@@ -36,6 +38,7 @@
|
||||
"gitlab",
|
||||
"gmail",
|
||||
"gong",
|
||||
"google_bigquery",
|
||||
"google_books",
|
||||
"google_calendar",
|
||||
"google_docs",
|
||||
@@ -46,6 +49,8 @@
|
||||
"google_search",
|
||||
"google_sheets",
|
||||
"google_slides",
|
||||
"google_tasks",
|
||||
"google_translate",
|
||||
"google_vault",
|
||||
"grafana",
|
||||
"grain",
|
||||
|
||||
@@ -5,11 +5,12 @@ description: User-defined data tables for storing and querying structured data
|
||||
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
<BlockInfoCard
|
||||
type="table"
|
||||
color="#10B981"
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
Tables allow you to create and manage custom data tables directly within Sim. Store, query, and manipulate structured data within your workflows without needing external database integrations.
|
||||
|
||||
**Why Use Tables?**
|
||||
@@ -26,6 +27,7 @@ Tables allow you to create and manage custom data tables directly within Sim. St
|
||||
- Batch operations for bulk inserts
|
||||
- Bulk updates and deletes by filter
|
||||
- Up to 10,000 rows per table, 100 tables per workspace
|
||||
{/* MANUAL-CONTENT-END */}
|
||||
|
||||
## Creating Tables
|
||||
|
||||
|
||||
@@ -1,96 +0,0 @@
|
||||
---
|
||||
title: Environment Variables
|
||||
---
|
||||
|
||||
import { Callout } from 'fumadocs-ui/components/callout'
|
||||
import { Image } from '@/components/ui/image'
|
||||
|
||||
Environment variables provide a secure way to manage configuration values and secrets across your workflows, including API keys and other sensitive data that your workflows need to access. They keep secrets out of your workflow definitions while making them available during execution.
|
||||
|
||||
## Variable Types
|
||||
|
||||
Environment variables in Sim work at two levels:
|
||||
|
||||
- **Personal Environment Variables**: Private to your account, only you can see and use them
|
||||
- **Workspace Environment Variables**: Shared across the entire workspace, available to all team members
|
||||
|
||||
<Callout type="info">
|
||||
Workspace environment variables take precedence over personal ones when there's a naming conflict.
|
||||
</Callout>
|
||||
|
||||
## Setting up Environment Variables
|
||||
|
||||
Navigate to Settings to configure your environment variables:
|
||||
|
||||
<Image
|
||||
src="/static/environment/environment-1.png"
|
||||
alt="Environment variables modal for creating new variables"
|
||||
width={500}
|
||||
height={350}
|
||||
/>
|
||||
|
||||
From your workspace settings, you can create and manage both personal and workspace-level environment variables. Personal variables are private to your account, while workspace variables are shared with all team members.
|
||||
|
||||
### Making Variables Workspace-Scoped
|
||||
|
||||
Use the workspace scope toggle to make variables available to your entire team:
|
||||
|
||||
<Image
|
||||
src="/static/environment/environment-2.png"
|
||||
alt="Toggle workspace scope for environment variables"
|
||||
width={500}
|
||||
height={350}
|
||||
/>
|
||||
|
||||
When you enable workspace scope, the variable becomes available to all workspace members and can be used in any workflow within that workspace.
|
||||
|
||||
### Workspace Variables View
|
||||
|
||||
Once you have workspace-scoped variables, they appear in your environment variables list:
|
||||
|
||||
<Image
|
||||
src="/static/environment/environment-3.png"
|
||||
alt="Workspace-scoped variables in the environment variables list"
|
||||
width={500}
|
||||
height={350}
|
||||
/>
|
||||
|
||||
## Using Variables in Workflows
|
||||
|
||||
To reference environment variables in your workflows, use the `{{}}` notation. When you type `{{` in any input field, a dropdown will appear showing both your personal and workspace-level environment variables. Simply select the variable you want to use.
|
||||
|
||||
<Image
|
||||
src="/static/environment/environment-4.png"
|
||||
alt="Using environment variables with double brace notation"
|
||||
width={500}
|
||||
height={350}
|
||||
/>
|
||||
|
||||
## How Variables are Resolved
|
||||
|
||||
**Workspace variables always take precedence** over personal variables, regardless of who runs the workflow.
|
||||
|
||||
When no workspace variable exists for a key, personal variables are used:
|
||||
- **Manual runs (UI)**: Your personal variables
|
||||
- **Automated runs (API, webhook, schedule, deployed chat)**: Workflow owner's personal variables
|
||||
|
||||
<Callout type="info">
|
||||
Personal variables are best for testing. Use workspace variables for production workflows.
|
||||
</Callout>
|
||||
|
||||
## Security Best Practices
|
||||
|
||||
### For Sensitive Data
|
||||
- Store API keys, tokens, and passwords as environment variables instead of hardcoding them
|
||||
- Use workspace variables for shared resources that multiple team members need
|
||||
- Keep personal credentials in personal variables
|
||||
|
||||
### Variable Naming
|
||||
- Use descriptive names: `DATABASE_URL` instead of `DB`
|
||||
- Follow consistent naming conventions across your team
|
||||
- Consider prefixes to avoid conflicts: `PROD_API_KEY`, `DEV_API_KEY`
|
||||
|
||||
### Access Control
|
||||
- Workspace environment variables respect workspace permissions
|
||||
- Only users with write access or higher can create/modify workspace variables
|
||||
- Personal variables are always private to the individual user
|
||||
@@ -1,96 +0,0 @@
|
||||
---
|
||||
title: Variables de entorno
|
||||
---
|
||||
|
||||
import { Callout } from 'fumadocs-ui/components/callout'
|
||||
import { Image } from '@/components/ui/image'
|
||||
|
||||
Las variables de entorno proporcionan una forma segura de gestionar valores de configuración y secretos en tus flujos de trabajo, incluyendo claves API y otros datos sensibles que tus flujos de trabajo necesitan acceder. Mantienen los secretos fuera de las definiciones de tu flujo de trabajo mientras los hacen disponibles durante la ejecución.
|
||||
|
||||
## Tipos de variables
|
||||
|
||||
Las variables de entorno en Sim funcionan en dos niveles:
|
||||
|
||||
- **Variables de entorno personales**: Privadas para tu cuenta, solo tú puedes verlas y usarlas
|
||||
- **Variables de entorno del espacio de trabajo**: Compartidas en todo el espacio de trabajo, disponibles para todos los miembros del equipo
|
||||
|
||||
<Callout type="info">
|
||||
Las variables de entorno del espacio de trabajo tienen prioridad sobre las personales cuando hay un conflicto de nombres.
|
||||
</Callout>
|
||||
|
||||
## Configuración de variables de entorno
|
||||
|
||||
Navega a Configuración para configurar tus variables de entorno:
|
||||
|
||||
<Image
|
||||
src="/static/environment/environment-1.png"
|
||||
alt="Modal de variables de entorno para crear nuevas variables"
|
||||
width={500}
|
||||
height={350}
|
||||
/>
|
||||
|
||||
Desde la configuración de tu espacio de trabajo, puedes crear y gestionar variables de entorno tanto personales como a nivel de espacio de trabajo. Las variables personales son privadas para tu cuenta, mientras que las variables del espacio de trabajo se comparten con todos los miembros del equipo.
|
||||
|
||||
### Hacer variables con ámbito de espacio de trabajo
|
||||
|
||||
Usa el interruptor de ámbito del espacio de trabajo para hacer que las variables estén disponibles para todo tu equipo:
|
||||
|
||||
<Image
|
||||
src="/static/environment/environment-2.png"
|
||||
alt="Interruptor de ámbito del espacio de trabajo para variables de entorno"
|
||||
width={500}
|
||||
height={350}
|
||||
/>
|
||||
|
||||
Cuando habilitas el ámbito del espacio de trabajo, la variable se vuelve disponible para todos los miembros del espacio de trabajo y puede ser utilizada en cualquier flujo de trabajo dentro de ese espacio de trabajo.
|
||||
|
||||
### Vista de variables del espacio de trabajo
|
||||
|
||||
Una vez que tienes variables con ámbito de espacio de trabajo, aparecen en tu lista de variables de entorno:
|
||||
|
||||
<Image
|
||||
src="/static/environment/environment-3.png"
|
||||
alt="Variables con ámbito de espacio de trabajo en la lista de variables de entorno"
|
||||
width={500}
|
||||
height={350}
|
||||
/>
|
||||
|
||||
## Uso de variables en flujos de trabajo
|
||||
|
||||
Para hacer referencia a variables de entorno en tus flujos de trabajo, utiliza la notación `{{}}`. Cuando escribas `{{` en cualquier campo de entrada, aparecerá un menú desplegable mostrando tanto tus variables de entorno personales como las del espacio de trabajo. Simplemente selecciona la variable que deseas utilizar.
|
||||
|
||||
<Image
|
||||
src="/static/environment/environment-4.png"
|
||||
alt="Uso de variables de entorno con notación de doble llave"
|
||||
width={500}
|
||||
height={350}
|
||||
/>
|
||||
|
||||
## Cómo se resuelven las variables
|
||||
|
||||
**Las variables del espacio de trabajo siempre tienen prioridad** sobre las variables personales, independientemente de quién ejecute el flujo de trabajo.
|
||||
|
||||
Cuando no existe una variable de espacio de trabajo para una clave, se utilizan las variables personales:
|
||||
- **Ejecuciones manuales (UI)**: Tus variables personales
|
||||
- **Ejecuciones automatizadas (API, webhook, programación, chat implementado)**: Variables personales del propietario del flujo de trabajo
|
||||
|
||||
<Callout type="info">
|
||||
Las variables personales son mejores para pruebas. Usa variables de espacio de trabajo para flujos de trabajo de producción.
|
||||
</Callout>
|
||||
|
||||
## Mejores prácticas de seguridad
|
||||
|
||||
### Para datos sensibles
|
||||
- Almacena claves API, tokens y contraseñas como variables de entorno en lugar de codificarlos directamente
|
||||
- Usa variables de espacio de trabajo para recursos compartidos que varios miembros del equipo necesitan
|
||||
- Mantén las credenciales personales en variables personales
|
||||
|
||||
### Nomenclatura de variables
|
||||
- Usa nombres descriptivos: `DATABASE_URL` en lugar de `DB`
|
||||
- Sigue convenciones de nomenclatura consistentes en todo tu equipo
|
||||
- Considera usar prefijos para evitar conflictos: `PROD_API_KEY`, `DEV_API_KEY`
|
||||
|
||||
### Control de acceso
|
||||
- Las variables de entorno del espacio de trabajo respetan los permisos del espacio de trabajo
|
||||
- Solo los usuarios con acceso de escritura o superior pueden crear/modificar variables del espacio de trabajo
|
||||
- Las variables personales siempre son privadas para el usuario individual
|
||||
@@ -1,96 +0,0 @@
|
||||
---
|
||||
title: Variables d'environnement
|
||||
---
|
||||
|
||||
import { Callout } from 'fumadocs-ui/components/callout'
|
||||
import { Image } from '@/components/ui/image'
|
||||
|
||||
Les variables d'environnement offrent un moyen sécurisé de gérer les valeurs de configuration et les secrets dans vos workflows, y compris les clés API et autres données sensibles dont vos workflows ont besoin. Elles gardent les secrets en dehors de vos définitions de workflow tout en les rendant disponibles pendant l'exécution.
|
||||
|
||||
## Types de variables
|
||||
|
||||
Les variables d'environnement dans Sim fonctionnent à deux niveaux :
|
||||
|
||||
- **Variables d'environnement personnelles** : privées à votre compte, vous seul pouvez les voir et les utiliser
|
||||
- **Variables d'environnement d'espace de travail** : partagées dans tout l'espace de travail, disponibles pour tous les membres de l'équipe
|
||||
|
||||
<Callout type="info">
|
||||
Les variables d'environnement d'espace de travail ont priorité sur les variables personnelles en cas de conflit de noms.
|
||||
</Callout>
|
||||
|
||||
## Configuration des variables d'environnement
|
||||
|
||||
Accédez aux Paramètres pour configurer vos variables d'environnement :
|
||||
|
||||
<Image
|
||||
src="/static/environment/environment-1.png"
|
||||
alt="Fenêtre modale de variables d'environnement pour créer de nouvelles variables"
|
||||
width={500}
|
||||
height={350}
|
||||
/>
|
||||
|
||||
Depuis les paramètres de votre espace de travail, vous pouvez créer et gérer des variables d'environnement personnelles et au niveau de l'espace de travail. Les variables personnelles sont privées à votre compte, tandis que les variables d'espace de travail sont partagées avec tous les membres de l'équipe.
|
||||
|
||||
### Définir des variables au niveau de l'espace de travail
|
||||
|
||||
Utilisez le bouton de portée d'espace de travail pour rendre les variables disponibles à toute votre équipe :
|
||||
|
||||
<Image
|
||||
src="/static/environment/environment-2.png"
|
||||
alt="Activer la portée d'espace de travail pour les variables d'environnement"
|
||||
width={500}
|
||||
height={350}
|
||||
/>
|
||||
|
||||
Lorsque vous activez la portée d'espace de travail, la variable devient disponible pour tous les membres de l'espace de travail et peut être utilisée dans n'importe quel workflow au sein de cet espace de travail.
|
||||
|
||||
### Vue des variables d'espace de travail
|
||||
|
||||
Une fois que vous avez des variables à portée d'espace de travail, elles apparaissent dans votre liste de variables d'environnement :
|
||||
|
||||
<Image
|
||||
src="/static/environment/environment-3.png"
|
||||
alt="Variables à portée d'espace de travail dans la liste des variables d'environnement"
|
||||
width={500}
|
||||
height={350}
|
||||
/>
|
||||
|
||||
## Utilisation des variables dans les workflows
|
||||
|
||||
Pour référencer des variables d'environnement dans vos workflows, utilisez la notation `{{}}`. Lorsque vous tapez `{{` dans n'importe quel champ de saisie, un menu déroulant apparaîtra affichant à la fois vos variables d'environnement personnelles et celles au niveau de l'espace de travail. Sélectionnez simplement la variable que vous souhaitez utiliser.
|
||||
|
||||
<Image
|
||||
src="/static/environment/environment-4.png"
|
||||
alt="Utilisation des variables d'environnement avec la notation à double accolade"
|
||||
width={500}
|
||||
height={350}
|
||||
/>
|
||||
|
||||
## Comment les variables sont résolues
|
||||
|
||||
**Les variables d'espace de travail ont toujours la priorité** sur les variables personnelles, quel que soit l'utilisateur qui exécute le flux de travail.
|
||||
|
||||
Lorsqu'aucune variable d'espace de travail n'existe pour une clé, les variables personnelles sont utilisées :
|
||||
- **Exécutions manuelles (UI)** : Vos variables personnelles
|
||||
- **Exécutions automatisées (API, webhook, planification, chat déployé)** : Variables personnelles du propriétaire du flux de travail
|
||||
|
||||
<Callout type="info">
|
||||
Les variables personnelles sont idéales pour les tests. Utilisez les variables d'espace de travail pour les flux de travail en production.
|
||||
</Callout>
|
||||
|
||||
## Bonnes pratiques de sécurité
|
||||
|
||||
### Pour les données sensibles
|
||||
- Stockez les clés API, les jetons et les mots de passe comme variables d'environnement au lieu de les coder en dur
|
||||
- Utilisez des variables d'espace de travail pour les ressources partagées dont plusieurs membres de l'équipe ont besoin
|
||||
- Conservez vos identifiants personnels dans des variables personnelles
|
||||
|
||||
### Nommage des variables
|
||||
- Utilisez des noms descriptifs : `DATABASE_URL` au lieu de `DB`
|
||||
- Suivez des conventions de nommage cohérentes au sein de votre équipe
|
||||
- Envisagez des préfixes pour éviter les conflits : `PROD_API_KEY`, `DEV_API_KEY`
|
||||
|
||||
### Contrôle d'accès
|
||||
- Les variables d'environnement de l'espace de travail respectent les permissions de l'espace de travail
|
||||
- Seuls les utilisateurs disposant d'un accès en écriture ou supérieur peuvent créer/modifier les variables d'espace de travail
|
||||
- Les variables personnelles sont toujours privées pour l'utilisateur individuel
|
||||
@@ -1,96 +0,0 @@
|
||||
---
|
||||
title: 環境変数
|
||||
---
|
||||
|
||||
import { Callout } from 'fumadocs-ui/components/callout'
|
||||
import { Image } from '@/components/ui/image'
|
||||
|
||||
環境変数は、APIキーやワークフローがアクセスする必要のあるその他の機密データなど、ワークフロー全体で設定値や機密情報を安全に管理する方法を提供します。これにより、実行中にそれらを利用可能にしながら、ワークフロー定義から機密情報を切り離すことができます。
|
||||
|
||||
## 変数タイプ
|
||||
|
||||
Simの環境変数は2つのレベルで機能します:
|
||||
|
||||
- **個人環境変数**:あなたのアカウントに限定され、あなただけが閲覧・使用できます
|
||||
- **ワークスペース環境変数**:ワークスペース全体で共有され、すべてのチームメンバーが利用できます
|
||||
|
||||
<Callout type="info">
|
||||
名前の競合がある場合、ワークスペース環境変数は個人環境変数よりも優先されます。
|
||||
</Callout>
|
||||
|
||||
## 環境変数の設定
|
||||
|
||||
設定に移動して環境変数を構成します:
|
||||
|
||||
<Image
|
||||
src="/static/environment/environment-1.png"
|
||||
alt="新しい変数を作成するための環境変数モーダル"
|
||||
width={500}
|
||||
height={350}
|
||||
/>
|
||||
|
||||
ワークスペース設定から、個人レベルとワークスペースレベルの両方の環境変数を作成・管理できます。個人変数はあなたのアカウントに限定されますが、ワークスペース変数はすべてのチームメンバーと共有されます。
|
||||
|
||||
### 変数をワークスペーススコープにする
|
||||
|
||||
ワークスペーススコープトグルを使用して、変数をチーム全体で利用可能にします:
|
||||
|
||||
<Image
|
||||
src="/static/environment/environment-2.png"
|
||||
alt="環境変数のワークスペーススコープを切り替えるトグル"
|
||||
width={500}
|
||||
height={350}
|
||||
/>
|
||||
|
||||
ワークスペーススコープを有効にすると、その変数はすべてのワークスペースメンバーが利用でき、そのワークスペース内のあらゆるワークフローで使用できるようになります。
|
||||
|
||||
### ワークスペース変数ビュー
|
||||
|
||||
ワークスペーススコープの変数を作成すると、環境変数リストに表示されます:
|
||||
|
||||
<Image
|
||||
src="/static/environment/environment-3.png"
|
||||
alt="環境変数リスト内のワークスペーススコープ変数"
|
||||
width={500}
|
||||
height={350}
|
||||
/>
|
||||
|
||||
## ワークフローでの変数の使用
|
||||
|
||||
ワークフローで環境変数を参照するには、`{{}}`表記を使用します。任意の入力フィールドで`{{`と入力すると、個人用とワークスペースレベルの両方の環境変数を表示するドロップダウンが表示されます。使用したい変数を選択するだけです。
|
||||
|
||||
<Image
|
||||
src="/static/environment/environment-4.png"
|
||||
alt="二重括弧表記を使用した環境変数の使用方法"
|
||||
width={500}
|
||||
height={350}
|
||||
/>
|
||||
|
||||
## 変数の解決方法
|
||||
|
||||
**ワークスペース変数は常に優先されます**。誰がワークフローを実行するかに関わらず、個人変数よりも優先されます。
|
||||
|
||||
キーに対するワークスペース変数が存在しない場合、個人変数が使用されます:
|
||||
- **手動実行(UI)**:あなたの個人変数
|
||||
- **自動実行(API、ウェブフック、スケジュール、デプロイされたチャット)**:ワークフロー所有者の個人変数
|
||||
|
||||
<Callout type="info">
|
||||
個人変数はテストに最適です。本番環境のワークフローにはワークスペース変数を使用してください。
|
||||
</Callout>
|
||||
|
||||
## セキュリティのベストプラクティス
|
||||
|
||||
### 機密データについて
|
||||
- APIキー、トークン、パスワードはハードコーディングせず、環境変数として保存してください
|
||||
- 複数のチームメンバーが必要とする共有リソースにはワークスペース変数を使用してください
|
||||
- 個人の認証情報は個人変数に保管してください
|
||||
|
||||
### 変数の命名
|
||||
- 説明的な名前を使用する:`DATABASE_URL`ではなく`DB`
|
||||
- チーム全体で一貫した命名規則に従う
|
||||
- 競合を避けるために接頭辞を検討する:`PROD_API_KEY`、`DEV_API_KEY`
|
||||
|
||||
### アクセス制御
|
||||
- ワークスペース環境変数はワークスペースの権限を尊重します
|
||||
- 書き込みアクセス権以上を持つユーザーのみがワークスペース変数を作成/変更できます
|
||||
- 個人変数は常に個々のユーザーにプライベートです
|
||||
@@ -1,96 +0,0 @@
|
||||
---
|
||||
title: 环境变量
|
||||
---
|
||||
|
||||
import { Callout } from 'fumadocs-ui/components/callout'
|
||||
import { Image } from '@/components/ui/image'
|
||||
|
||||
环境变量为管理工作流中的配置值和密钥(包括 API 密钥和其他敏感数据)提供了一种安全的方式。它们可以在执行期间使用,同时将敏感信息从工作流定义中隔离开来。
|
||||
|
||||
## 变量类型
|
||||
|
||||
Sim 中的环境变量分为两个级别:
|
||||
|
||||
- **个人环境变量**:仅限于您的账户,只有您可以查看和使用
|
||||
- **工作区环境变量**:在整个工作区内共享,所有团队成员都可以使用
|
||||
|
||||
<Callout type="info">
|
||||
当命名冲突时,工作区环境变量优先于个人环境变量。
|
||||
</Callout>
|
||||
|
||||
## 设置环境变量
|
||||
|
||||
前往设置页面配置您的环境变量:
|
||||
|
||||
<Image
|
||||
src="/static/environment/environment-1.png"
|
||||
alt="用于创建新变量的环境变量弹窗"
|
||||
width={500}
|
||||
height={350}
|
||||
/>
|
||||
|
||||
在工作区设置中,您可以创建和管理个人及工作区级别的环境变量。个人变量仅限于您的账户,而工作区变量会与所有团队成员共享。
|
||||
|
||||
### 将变量设为工作区范围
|
||||
|
||||
使用工作区范围切换按钮,使变量对整个团队可用:
|
||||
|
||||
<Image
|
||||
src="/static/environment/environment-2.png"
|
||||
alt="切换环境变量的工作区范围"
|
||||
width={500}
|
||||
height={350}
|
||||
/>
|
||||
|
||||
启用工作区范围后,该变量将对所有工作区成员可用,并可在该工作区内的任何工作流中使用。
|
||||
|
||||
### 工作区变量视图
|
||||
|
||||
一旦您拥有了工作区范围的变量,它们将显示在您的环境变量列表中:
|
||||
|
||||
<Image
|
||||
src="/static/environment/environment-3.png"
|
||||
alt="环境变量列表中的工作区范围变量"
|
||||
width={500}
|
||||
height={350}
|
||||
/>
|
||||
|
||||
## 在工作流中使用变量
|
||||
|
||||
要在工作流中引用环境变量,请使用 `{{}}` 表示法。当您在任何输入字段中键入 `{{` 时,将会出现一个下拉菜单,显示您的个人和工作区级别的环境变量。只需选择您想要使用的变量即可。
|
||||
|
||||
<Image
|
||||
src="/static/environment/environment-4.png"
|
||||
alt="使用双大括号表示法的环境变量"
|
||||
width={500}
|
||||
height={350}
|
||||
/>
|
||||
|
||||
## 变量的解析方式
|
||||
|
||||
**工作区变量始终优先于**个人变量,无论是谁运行工作流。
|
||||
|
||||
当某个键没有工作区变量时,将使用个人变量:
|
||||
- **手动运行(UI)**:使用您的个人变量
|
||||
- **自动运行(API、Webhook、计划任务、已部署的聊天)**:使用工作流所有者的个人变量
|
||||
|
||||
<Callout type="info">
|
||||
个人变量最适合用于测试。生产环境的工作流请使用工作区变量。
|
||||
</Callout>
|
||||
|
||||
## 安全最佳实践
|
||||
|
||||
### 针对敏感数据
|
||||
- 将 API 密钥、令牌和密码存储为环境变量,而不是硬编码它们
|
||||
- 对于多个团队成员需要的共享资源,使用工作区变量
|
||||
- 将个人凭据保存在个人变量中
|
||||
|
||||
### 变量命名
|
||||
- 使用描述性名称:`DATABASE_URL` 而不是 `DB`
|
||||
- 在团队中遵循一致的命名约定
|
||||
- 考虑使用前缀以避免冲突:`PROD_API_KEY`、`DEV_API_KEY`
|
||||
|
||||
### 访问控制
|
||||
- 工作区环境变量遵循工作区权限
|
||||
- 只有具有写入权限或更高权限的用户才能创建/修改工作区变量
|
||||
- 个人变量始终对个人用户私有
|
||||
BIN
apps/docs/public/static/credentials/create-oauth.png
Normal file
BIN
apps/docs/public/static/credentials/create-oauth.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 83 KiB |
BIN
apps/docs/public/static/credentials/create-secret.png
Normal file
BIN
apps/docs/public/static/credentials/create-secret.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 84 KiB |
BIN
apps/docs/public/static/credentials/oauth-selector.png
Normal file
BIN
apps/docs/public/static/credentials/oauth-selector.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 48 KiB |
BIN
apps/docs/public/static/credentials/secret-dropdown.png
Normal file
BIN
apps/docs/public/static/credentials/secret-dropdown.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 23 KiB |
BIN
apps/docs/public/static/credentials/secret-resolved.png
Normal file
BIN
apps/docs/public/static/credentials/secret-resolved.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 26 KiB |
BIN
apps/docs/public/static/credentials/settings-secrets.png
Normal file
BIN
apps/docs/public/static/credentials/settings-secrets.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 72 KiB |
@@ -3,6 +3,7 @@ import type { NextRequest } from 'next/server'
|
||||
import { getHighestPrioritySubscription } from '@/lib/billing/core/plan'
|
||||
import { getExecutionTimeout } from '@/lib/core/execution-limits'
|
||||
import type { SubscriptionPlan } from '@/lib/core/rate-limiter/types'
|
||||
import { SIM_VIA_HEADER } from '@/lib/execution/call-chain'
|
||||
import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware'
|
||||
import { mcpService } from '@/lib/mcp/service'
|
||||
import type { McpTool, McpToolCall, McpToolResult } from '@/lib/mcp/types'
|
||||
@@ -178,8 +179,14 @@ export const POST = withMcpAuth('read')(
|
||||
'sync'
|
||||
)
|
||||
|
||||
const simViaHeader = request.headers.get(SIM_VIA_HEADER)
|
||||
const extraHeaders: Record<string, string> = {}
|
||||
if (simViaHeader) {
|
||||
extraHeaders[SIM_VIA_HEADER] = simViaHeader
|
||||
}
|
||||
|
||||
const result = await Promise.race([
|
||||
mcpService.executeTool(userId, serverId, toolCall, workspaceId),
|
||||
mcpService.executeTool(userId, serverId, toolCall, workspaceId, extraHeaders),
|
||||
new Promise<never>((_, reject) =>
|
||||
setTimeout(() => reject(new Error('Tool execution timeout')), executionTimeout)
|
||||
),
|
||||
|
||||
@@ -283,3 +283,165 @@ export async function POST(request: NextRequest) {
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update a blog post
|
||||
*/
|
||||
export async function PUT(request: NextRequest) {
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const { domain, accessToken, blogPostId, title, content, cloudId: providedCloudId } = body
|
||||
|
||||
if (!domain || !accessToken || !blogPostId) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Domain, access token, and blog post ID are required' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const blogPostIdValidation = validateAlphanumericId(blogPostId, 'blogPostId', 255)
|
||||
if (!blogPostIdValidation.isValid) {
|
||||
return NextResponse.json({ error: blogPostIdValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const cloudId = providedCloudId || (await getConfluenceCloudId(domain, accessToken))
|
||||
|
||||
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
|
||||
if (!cloudIdValidation.isValid) {
|
||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
// Fetch current blog post to get version number
|
||||
const currentUrl = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/blogposts/${blogPostId}?body-format=storage`
|
||||
const currentResponse = await fetch(currentUrl, {
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
})
|
||||
|
||||
if (!currentResponse.ok) {
|
||||
throw new Error(`Failed to fetch current blog post: ${currentResponse.status}`)
|
||||
}
|
||||
|
||||
const currentPost = await currentResponse.json()
|
||||
|
||||
if (!currentPost.version?.number) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Unable to determine current blog post version' },
|
||||
{ status: 422 }
|
||||
)
|
||||
}
|
||||
|
||||
const currentVersion = currentPost.version.number
|
||||
|
||||
const updateBody: Record<string, unknown> = {
|
||||
id: blogPostId,
|
||||
version: { number: currentVersion + 1 },
|
||||
status: 'current',
|
||||
title: title || currentPost.title,
|
||||
body: {
|
||||
representation: 'storage',
|
||||
value: content || currentPost.body?.storage?.value || '',
|
||||
},
|
||||
}
|
||||
|
||||
const response = await fetch(currentUrl, {
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
body: JSON.stringify(updateBody),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json().catch(() => null)
|
||||
logger.error('Confluence API error response:', {
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
error: JSON.stringify(errorData, null, 2),
|
||||
})
|
||||
const errorMessage = errorData?.message || `Failed to update blog post (${response.status})`
|
||||
return NextResponse.json({ error: errorMessage }, { status: response.status })
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
return NextResponse.json(data)
|
||||
} catch (error) {
|
||||
logger.error('Error updating blog post:', error)
|
||||
return NextResponse.json(
|
||||
{ error: (error as Error).message || 'Internal server error' },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a blog post
|
||||
*/
|
||||
export async function DELETE(request: NextRequest) {
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const { domain, accessToken, blogPostId, cloudId: providedCloudId } = body
|
||||
|
||||
if (!domain || !accessToken || !blogPostId) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Domain, access token, and blog post ID are required' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const blogPostIdValidation = validateAlphanumericId(blogPostId, 'blogPostId', 255)
|
||||
if (!blogPostIdValidation.isValid) {
|
||||
return NextResponse.json({ error: blogPostIdValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const cloudId = providedCloudId || (await getConfluenceCloudId(domain, accessToken))
|
||||
|
||||
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
|
||||
if (!cloudIdValidation.isValid) {
|
||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/blogposts/${blogPostId}`
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'DELETE',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json().catch(() => null)
|
||||
logger.error('Confluence API error response:', {
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
error: JSON.stringify(errorData, null, 2),
|
||||
})
|
||||
const errorMessage = errorData?.message || `Failed to delete blog post (${response.status})`
|
||||
return NextResponse.json({ error: errorMessage }, { status: response.status })
|
||||
}
|
||||
|
||||
return NextResponse.json({ blogPostId, deleted: true })
|
||||
} catch (error) {
|
||||
logger.error('Error deleting blog post:', error)
|
||||
return NextResponse.json(
|
||||
{ error: (error as Error).message || 'Internal server error' },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
115
apps/sim/app/api/tools/confluence/page-descendants/route.ts
Normal file
115
apps/sim/app/api/tools/confluence/page-descendants/route.ts
Normal file
@@ -0,0 +1,115 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||
import {
|
||||
validateAlphanumericId,
|
||||
validateJiraCloudId,
|
||||
validatePaginationCursor,
|
||||
} from '@/lib/core/security/input-validation'
|
||||
import { getConfluenceCloudId } from '@/tools/confluence/utils'
|
||||
|
||||
const logger = createLogger('ConfluencePageDescendantsAPI')
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
/**
|
||||
* Get all descendants of a Confluence page recursively.
|
||||
* Uses GET /wiki/api/v2/pages/{id}/descendants
|
||||
*/
|
||||
export async function POST(request: NextRequest) {
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const { domain, accessToken, pageId, cloudId: providedCloudId, limit = 50, cursor } = body
|
||||
|
||||
if (!domain) {
|
||||
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!accessToken) {
|
||||
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!pageId) {
|
||||
return NextResponse.json({ error: 'Page ID is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
const pageIdValidation = validateAlphanumericId(pageId, 'pageId', 255)
|
||||
if (!pageIdValidation.isValid) {
|
||||
return NextResponse.json({ error: pageIdValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const cloudId = providedCloudId || (await getConfluenceCloudId(domain, accessToken))
|
||||
|
||||
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
|
||||
if (!cloudIdValidation.isValid) {
|
||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const queryParams = new URLSearchParams()
|
||||
queryParams.append('limit', String(Math.min(limit, 250)))
|
||||
|
||||
if (cursor) {
|
||||
const cursorValidation = validatePaginationCursor(cursor, 'cursor')
|
||||
if (!cursorValidation.isValid) {
|
||||
return NextResponse.json({ error: cursorValidation.error }, { status: 400 })
|
||||
}
|
||||
queryParams.append('cursor', cursor)
|
||||
}
|
||||
|
||||
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/pages/${pageId}/descendants?${queryParams.toString()}`
|
||||
|
||||
logger.info(`Fetching descendants for page ${pageId}`)
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json().catch(() => null)
|
||||
logger.error('Confluence API error response:', {
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
error: JSON.stringify(errorData, null, 2),
|
||||
})
|
||||
const errorMessage =
|
||||
errorData?.message || `Failed to get page descendants (${response.status})`
|
||||
return NextResponse.json({ error: errorMessage }, { status: response.status })
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
const descendants = (data.results || []).map((page: any) => ({
|
||||
id: page.id,
|
||||
title: page.title,
|
||||
type: page.type ?? null,
|
||||
status: page.status ?? null,
|
||||
spaceId: page.spaceId ?? null,
|
||||
parentId: page.parentId ?? null,
|
||||
childPosition: page.childPosition ?? null,
|
||||
depth: page.depth ?? null,
|
||||
}))
|
||||
|
||||
return NextResponse.json({
|
||||
descendants,
|
||||
pageId,
|
||||
nextCursor: data._links?.next
|
||||
? new URL(data._links.next, 'https://placeholder').searchParams.get('cursor')
|
||||
: null,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Error getting page descendants:', error)
|
||||
return NextResponse.json(
|
||||
{ error: (error as Error).message || 'Internal server error' },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -1,8 +1,13 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { validateAlphanumericId, validateJiraCloudId } from '@/lib/core/security/input-validation'
|
||||
import { getConfluenceCloudId } from '@/tools/confluence/utils'
|
||||
import {
|
||||
validateAlphanumericId,
|
||||
validateJiraCloudId,
|
||||
validateNumericId,
|
||||
validatePaginationCursor,
|
||||
} from '@/lib/core/security/input-validation'
|
||||
import { cleanHtmlContent, getConfluenceCloudId } from '@/tools/confluence/utils'
|
||||
|
||||
const logger = createLogger('ConfluencePageVersionsAPI')
|
||||
|
||||
@@ -55,42 +60,85 @@ export async function POST(request: NextRequest) {
|
||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
// If versionNumber is provided, get specific version
|
||||
// If versionNumber is provided, get specific version with page content
|
||||
if (versionNumber !== undefined && versionNumber !== null) {
|
||||
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/pages/${pageId}/versions/${versionNumber}`
|
||||
const versionValidation = validateNumericId(versionNumber, 'versionNumber', { min: 1 })
|
||||
if (!versionValidation.isValid) {
|
||||
return NextResponse.json({ error: versionValidation.error }, { status: 400 })
|
||||
}
|
||||
const safeVersion = versionValidation.sanitized
|
||||
|
||||
const versionUrl = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/pages/${pageId}/versions/${safeVersion}`
|
||||
const pageUrl = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/pages/${pageId}?version=${safeVersion}&body-format=storage`
|
||||
|
||||
logger.info(`Fetching version ${versionNumber} for page ${pageId}`)
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
})
|
||||
const [versionResponse, pageResponse] = await Promise.all([
|
||||
fetch(versionUrl, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
}),
|
||||
fetch(pageUrl, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
}),
|
||||
])
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json().catch(() => null)
|
||||
if (!versionResponse.ok) {
|
||||
const errorData = await versionResponse.json().catch(() => null)
|
||||
logger.error('Confluence API error response:', {
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
status: versionResponse.status,
|
||||
statusText: versionResponse.statusText,
|
||||
error: JSON.stringify(errorData, null, 2),
|
||||
})
|
||||
const errorMessage = errorData?.message || `Failed to get page version (${response.status})`
|
||||
return NextResponse.json({ error: errorMessage }, { status: response.status })
|
||||
const errorMessage =
|
||||
errorData?.message || `Failed to get page version (${versionResponse.status})`
|
||||
return NextResponse.json({ error: errorMessage }, { status: versionResponse.status })
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
const versionData = await versionResponse.json()
|
||||
|
||||
let title: string | null = null
|
||||
let content: string | null = null
|
||||
let body: Record<string, unknown> | null = null
|
||||
|
||||
if (pageResponse.ok) {
|
||||
const pageData = await pageResponse.json()
|
||||
title = pageData.title ?? null
|
||||
body = pageData.body ?? null
|
||||
|
||||
const rawContent =
|
||||
pageData.body?.storage?.value ||
|
||||
pageData.body?.view?.value ||
|
||||
pageData.body?.atlas_doc_format?.value ||
|
||||
''
|
||||
if (rawContent) {
|
||||
content = cleanHtmlContent(rawContent)
|
||||
}
|
||||
} else {
|
||||
logger.warn(
|
||||
`Could not fetch page content for version ${versionNumber}: ${pageResponse.status}`
|
||||
)
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
version: {
|
||||
number: data.number,
|
||||
message: data.message ?? null,
|
||||
minorEdit: data.minorEdit ?? false,
|
||||
authorId: data.authorId ?? null,
|
||||
createdAt: data.createdAt ?? null,
|
||||
number: versionData.number,
|
||||
message: versionData.message ?? null,
|
||||
minorEdit: versionData.minorEdit ?? false,
|
||||
authorId: versionData.authorId ?? null,
|
||||
createdAt: versionData.createdAt ?? null,
|
||||
},
|
||||
pageId,
|
||||
title,
|
||||
content,
|
||||
body,
|
||||
})
|
||||
}
|
||||
// List all versions
|
||||
@@ -98,6 +146,10 @@ export async function POST(request: NextRequest) {
|
||||
queryParams.append('limit', String(Math.min(limit, 250)))
|
||||
|
||||
if (cursor) {
|
||||
const cursorValidation = validatePaginationCursor(cursor, 'cursor')
|
||||
if (!cursorValidation.isValid) {
|
||||
return NextResponse.json({ error: cursorValidation.error }, { status: 400 })
|
||||
}
|
||||
queryParams.append('cursor', cursor)
|
||||
}
|
||||
|
||||
|
||||
@@ -185,7 +185,7 @@ export async function PUT(request: NextRequest) {
|
||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const currentPageUrl = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/pages/${pageId}`
|
||||
const currentPageUrl = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/pages/${pageId}?body-format=storage`
|
||||
const currentPageResponse = await fetch(currentPageUrl, {
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
|
||||
114
apps/sim/app/api/tools/confluence/space-permissions/route.ts
Normal file
114
apps/sim/app/api/tools/confluence/space-permissions/route.ts
Normal file
@@ -0,0 +1,114 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||
import {
|
||||
validateAlphanumericId,
|
||||
validateJiraCloudId,
|
||||
validatePaginationCursor,
|
||||
} from '@/lib/core/security/input-validation'
|
||||
import { getConfluenceCloudId } from '@/tools/confluence/utils'
|
||||
|
||||
const logger = createLogger('ConfluenceSpacePermissionsAPI')
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
/**
|
||||
* List permissions for a Confluence space.
|
||||
* Uses GET /wiki/api/v2/spaces/{id}/permissions
|
||||
*/
|
||||
export async function POST(request: NextRequest) {
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const { domain, accessToken, spaceId, cloudId: providedCloudId, limit = 50, cursor } = body
|
||||
|
||||
if (!domain) {
|
||||
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!accessToken) {
|
||||
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!spaceId) {
|
||||
return NextResponse.json({ error: 'Space ID is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
const spaceIdValidation = validateAlphanumericId(spaceId, 'spaceId', 255)
|
||||
if (!spaceIdValidation.isValid) {
|
||||
return NextResponse.json({ error: spaceIdValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const cloudId = providedCloudId || (await getConfluenceCloudId(domain, accessToken))
|
||||
|
||||
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
|
||||
if (!cloudIdValidation.isValid) {
|
||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const queryParams = new URLSearchParams()
|
||||
queryParams.append('limit', String(Math.min(limit, 250)))
|
||||
|
||||
if (cursor) {
|
||||
const cursorValidation = validatePaginationCursor(cursor, 'cursor')
|
||||
if (!cursorValidation.isValid) {
|
||||
return NextResponse.json({ error: cursorValidation.error }, { status: 400 })
|
||||
}
|
||||
queryParams.append('cursor', cursor)
|
||||
}
|
||||
|
||||
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/spaces/${spaceId}/permissions?${queryParams.toString()}`
|
||||
|
||||
logger.info(`Fetching permissions for space ${spaceId}`)
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json().catch(() => null)
|
||||
logger.error('Confluence API error response:', {
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
error: JSON.stringify(errorData, null, 2),
|
||||
})
|
||||
const errorMessage =
|
||||
errorData?.message || `Failed to list space permissions (${response.status})`
|
||||
return NextResponse.json({ error: errorMessage }, { status: response.status })
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
const permissions = (data.results || []).map((perm: any) => ({
|
||||
id: perm.id,
|
||||
principalType: perm.principal?.type ?? null,
|
||||
principalId: perm.principal?.id ?? null,
|
||||
operationKey: perm.operation?.key ?? null,
|
||||
operationTargetType: perm.operation?.targetType ?? null,
|
||||
anonymousAccess: perm.anonymousAccess ?? false,
|
||||
unlicensedAccess: perm.unlicensedAccess ?? false,
|
||||
}))
|
||||
|
||||
return NextResponse.json({
|
||||
permissions,
|
||||
spaceId,
|
||||
nextCursor: data._links?.next
|
||||
? new URL(data._links.next, 'https://placeholder').searchParams.get('cursor')
|
||||
: null,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Error listing space permissions:', error)
|
||||
return NextResponse.json(
|
||||
{ error: (error as Error).message || 'Internal server error' },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
209
apps/sim/app/api/tools/confluence/space-properties/route.ts
Normal file
209
apps/sim/app/api/tools/confluence/space-properties/route.ts
Normal file
@@ -0,0 +1,209 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||
import {
|
||||
validateAlphanumericId,
|
||||
validateJiraCloudId,
|
||||
validatePaginationCursor,
|
||||
} from '@/lib/core/security/input-validation'
|
||||
import { getConfluenceCloudId } from '@/tools/confluence/utils'
|
||||
|
||||
const logger = createLogger('ConfluenceSpacePropertiesAPI')
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
/**
|
||||
* List, create, or delete space properties.
|
||||
* Uses GET/POST /wiki/api/v2/spaces/{id}/properties
|
||||
* and DELETE /wiki/api/v2/spaces/{id}/properties/{propertyId}
|
||||
*/
|
||||
export async function POST(request: NextRequest) {
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const {
|
||||
domain,
|
||||
accessToken,
|
||||
spaceId,
|
||||
cloudId: providedCloudId,
|
||||
action,
|
||||
key,
|
||||
value,
|
||||
propertyId,
|
||||
limit = 50,
|
||||
cursor,
|
||||
} = body
|
||||
|
||||
if (!domain) {
|
||||
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!accessToken) {
|
||||
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!spaceId) {
|
||||
return NextResponse.json({ error: 'Space ID is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
const spaceIdValidation = validateAlphanumericId(spaceId, 'spaceId', 255)
|
||||
if (!spaceIdValidation.isValid) {
|
||||
return NextResponse.json({ error: spaceIdValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const cloudId = providedCloudId || (await getConfluenceCloudId(domain, accessToken))
|
||||
|
||||
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
|
||||
if (!cloudIdValidation.isValid) {
|
||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const baseUrl = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/spaces/${spaceId}/properties`
|
||||
|
||||
// Validate required params for specific actions
|
||||
if (action === 'delete' && !propertyId) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Property ID is required for delete action' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
if (action === 'create' && !key) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Property key is required for create action' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
// Delete a property
|
||||
if (action === 'delete' && propertyId) {
|
||||
const propertyIdValidation = validateAlphanumericId(propertyId, 'propertyId', 255)
|
||||
if (!propertyIdValidation.isValid) {
|
||||
return NextResponse.json({ error: propertyIdValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const url = `${baseUrl}/${encodeURIComponent(propertyId)}`
|
||||
|
||||
logger.info(`Deleting space property ${propertyId} from space ${spaceId}`)
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'DELETE',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json().catch(() => null)
|
||||
logger.error('Confluence API error response:', {
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
error: JSON.stringify(errorData, null, 2),
|
||||
})
|
||||
const errorMessage =
|
||||
errorData?.message || `Failed to delete space property (${response.status})`
|
||||
return NextResponse.json({ error: errorMessage }, { status: response.status })
|
||||
}
|
||||
|
||||
return NextResponse.json({ spaceId, propertyId, deleted: true })
|
||||
}
|
||||
|
||||
// Create a property
|
||||
if (action === 'create' && key) {
|
||||
logger.info(`Creating space property '${key}' on space ${spaceId}`)
|
||||
|
||||
const response = await fetch(baseUrl, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
body: JSON.stringify({ key, value: value ?? {} }),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json().catch(() => null)
|
||||
logger.error('Confluence API error response:', {
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
error: JSON.stringify(errorData, null, 2),
|
||||
})
|
||||
const errorMessage =
|
||||
errorData?.message || `Failed to create space property (${response.status})`
|
||||
return NextResponse.json({ error: errorMessage }, { status: response.status })
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
return NextResponse.json({
|
||||
propertyId: data.id,
|
||||
key: data.key,
|
||||
value: data.value ?? null,
|
||||
spaceId,
|
||||
})
|
||||
}
|
||||
|
||||
// List properties
|
||||
const queryParams = new URLSearchParams()
|
||||
queryParams.append('limit', String(Math.min(limit, 250)))
|
||||
|
||||
if (cursor) {
|
||||
const cursorValidation = validatePaginationCursor(cursor, 'cursor')
|
||||
if (!cursorValidation.isValid) {
|
||||
return NextResponse.json({ error: cursorValidation.error }, { status: 400 })
|
||||
}
|
||||
queryParams.append('cursor', cursor)
|
||||
}
|
||||
|
||||
const url = `${baseUrl}?${queryParams.toString()}`
|
||||
|
||||
logger.info(`Fetching properties for space ${spaceId}`)
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json().catch(() => null)
|
||||
logger.error('Confluence API error response:', {
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
error: JSON.stringify(errorData, null, 2),
|
||||
})
|
||||
const errorMessage =
|
||||
errorData?.message || `Failed to list space properties (${response.status})`
|
||||
return NextResponse.json({ error: errorMessage }, { status: response.status })
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
const properties = (data.results || []).map((prop: any) => ({
|
||||
id: prop.id,
|
||||
key: prop.key,
|
||||
value: prop.value ?? null,
|
||||
}))
|
||||
|
||||
return NextResponse.json({
|
||||
properties,
|
||||
spaceId,
|
||||
nextCursor: data._links?.next
|
||||
? new URL(data._links.next, 'https://placeholder').searchParams.get('cursor')
|
||||
: null,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Error with space properties:', error)
|
||||
return NextResponse.json(
|
||||
{ error: (error as Error).message || 'Internal server error' },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -78,3 +78,258 @@ export async function GET(request: NextRequest) {
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new Confluence space.
|
||||
* Uses POST /wiki/api/v2/spaces
|
||||
*/
|
||||
export async function POST(request: NextRequest) {
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const { domain, accessToken, name, key, description, cloudId: providedCloudId } = body
|
||||
|
||||
if (!domain) {
|
||||
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!accessToken) {
|
||||
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!name) {
|
||||
return NextResponse.json({ error: 'Space name is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!key) {
|
||||
return NextResponse.json({ error: 'Space key is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
const cloudId = providedCloudId || (await getConfluenceCloudId(domain, accessToken))
|
||||
|
||||
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
|
||||
if (!cloudIdValidation.isValid) {
|
||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/spaces`
|
||||
|
||||
const createBody: Record<string, unknown> = { name, key }
|
||||
if (description) {
|
||||
createBody.description = { value: description, representation: 'plain' }
|
||||
}
|
||||
|
||||
logger.info(`Creating space with key ${key}`)
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
body: JSON.stringify(createBody),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json().catch(() => null)
|
||||
logger.error('Confluence API error response:', {
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
error: JSON.stringify(errorData, null, 2),
|
||||
})
|
||||
const errorMessage = errorData?.message || `Failed to create space (${response.status})`
|
||||
return NextResponse.json({ error: errorMessage }, { status: response.status })
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
return NextResponse.json(data)
|
||||
} catch (error) {
|
||||
logger.error('Error creating Confluence space:', error)
|
||||
return NextResponse.json(
|
||||
{ error: (error as Error).message || 'Internal server error' },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update a Confluence space.
|
||||
* Uses PUT /wiki/api/v2/spaces/{id}
|
||||
*/
|
||||
export async function PUT(request: NextRequest) {
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const { domain, accessToken, spaceId, name, description, cloudId: providedCloudId } = body
|
||||
|
||||
if (!domain) {
|
||||
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!accessToken) {
|
||||
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!spaceId) {
|
||||
return NextResponse.json({ error: 'Space ID is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
const spaceIdValidation = validateAlphanumericId(spaceId, 'spaceId', 255)
|
||||
if (!spaceIdValidation.isValid) {
|
||||
return NextResponse.json({ error: spaceIdValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const cloudId = providedCloudId || (await getConfluenceCloudId(domain, accessToken))
|
||||
|
||||
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
|
||||
if (!cloudIdValidation.isValid) {
|
||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/spaces/${spaceId}`
|
||||
|
||||
if (!name && description === undefined) {
|
||||
return NextResponse.json(
|
||||
{ error: 'At least one of name or description is required for update' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const updateBody: Record<string, unknown> = {}
|
||||
|
||||
if (name) {
|
||||
updateBody.name = name
|
||||
} else {
|
||||
const currentResponse = await fetch(url, {
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
})
|
||||
if (!currentResponse.ok) {
|
||||
return NextResponse.json(
|
||||
{ error: `Failed to fetch current space: ${currentResponse.status}` },
|
||||
{ status: currentResponse.status }
|
||||
)
|
||||
}
|
||||
const currentSpace = await currentResponse.json()
|
||||
updateBody.name = currentSpace.name
|
||||
}
|
||||
|
||||
if (description !== undefined) {
|
||||
updateBody.description = { value: description, representation: 'plain' }
|
||||
}
|
||||
|
||||
logger.info(`Updating space ${spaceId}`)
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
body: JSON.stringify(updateBody),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json().catch(() => null)
|
||||
logger.error('Confluence API error response:', {
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
error: JSON.stringify(errorData, null, 2),
|
||||
})
|
||||
const errorMessage = errorData?.message || `Failed to update space (${response.status})`
|
||||
return NextResponse.json({ error: errorMessage }, { status: response.status })
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
return NextResponse.json(data)
|
||||
} catch (error) {
|
||||
logger.error('Error updating Confluence space:', error)
|
||||
return NextResponse.json(
|
||||
{ error: (error as Error).message || 'Internal server error' },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a Confluence space.
|
||||
* Uses DELETE /wiki/api/v2/spaces/{id}
|
||||
*/
|
||||
export async function DELETE(request: NextRequest) {
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const { domain, accessToken, spaceId, cloudId: providedCloudId } = body
|
||||
|
||||
if (!domain) {
|
||||
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!accessToken) {
|
||||
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!spaceId) {
|
||||
return NextResponse.json({ error: 'Space ID is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
const spaceIdValidation = validateAlphanumericId(spaceId, 'spaceId', 255)
|
||||
if (!spaceIdValidation.isValid) {
|
||||
return NextResponse.json({ error: spaceIdValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const cloudId = providedCloudId || (await getConfluenceCloudId(domain, accessToken))
|
||||
|
||||
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
|
||||
if (!cloudIdValidation.isValid) {
|
||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/spaces/${spaceId}`
|
||||
|
||||
logger.info(`Deleting space ${spaceId}`)
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'DELETE',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json().catch(() => null)
|
||||
logger.error('Confluence API error response:', {
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
error: JSON.stringify(errorData, null, 2),
|
||||
})
|
||||
const errorMessage = errorData?.message || `Failed to delete space (${response.status})`
|
||||
return NextResponse.json({ error: errorMessage }, { status: response.status })
|
||||
}
|
||||
|
||||
return NextResponse.json({ spaceId, deleted: true })
|
||||
} catch (error) {
|
||||
logger.error('Error deleting Confluence space:', error)
|
||||
return NextResponse.json(
|
||||
{ error: (error as Error).message || 'Internal server error' },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
278
apps/sim/app/api/tools/confluence/tasks/route.ts
Normal file
278
apps/sim/app/api/tools/confluence/tasks/route.ts
Normal file
@@ -0,0 +1,278 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||
import {
|
||||
validateAlphanumericId,
|
||||
validateJiraCloudId,
|
||||
validatePaginationCursor,
|
||||
validatePathSegment,
|
||||
} from '@/lib/core/security/input-validation'
|
||||
import { getConfluenceCloudId } from '@/tools/confluence/utils'
|
||||
|
||||
const logger = createLogger('ConfluenceTasksAPI')
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
/**
|
||||
* List, get, or update Confluence inline tasks.
|
||||
* Uses GET /wiki/api/v2/tasks, GET /wiki/api/v2/tasks/{id}, PUT /wiki/api/v2/tasks/{id}
|
||||
*/
|
||||
export async function POST(request: NextRequest) {
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const {
|
||||
domain,
|
||||
accessToken,
|
||||
cloudId: providedCloudId,
|
||||
action,
|
||||
taskId,
|
||||
status: taskStatus,
|
||||
pageId,
|
||||
spaceId,
|
||||
assignedTo,
|
||||
limit = 50,
|
||||
cursor,
|
||||
} = body
|
||||
|
||||
if (!domain) {
|
||||
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!accessToken) {
|
||||
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
const cloudId = providedCloudId || (await getConfluenceCloudId(domain, accessToken))
|
||||
|
||||
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
|
||||
if (!cloudIdValidation.isValid) {
|
||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
// Update a task
|
||||
if (action === 'update' && taskId) {
|
||||
const taskIdValidation = validateAlphanumericId(taskId, 'taskId', 255)
|
||||
if (!taskIdValidation.isValid) {
|
||||
return NextResponse.json({ error: taskIdValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
// First fetch the current task to get required fields
|
||||
const getUrl = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/tasks/${taskId}`
|
||||
const getResponse = await fetch(getUrl, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
})
|
||||
|
||||
if (!getResponse.ok) {
|
||||
const errorData = await getResponse.json().catch(() => null)
|
||||
const errorMessage = errorData?.message || `Failed to fetch task (${getResponse.status})`
|
||||
return NextResponse.json({ error: errorMessage }, { status: getResponse.status })
|
||||
}
|
||||
|
||||
const currentTask = await getResponse.json()
|
||||
|
||||
const updateBody: Record<string, unknown> = {
|
||||
id: taskId,
|
||||
status: taskStatus || currentTask.status,
|
||||
}
|
||||
|
||||
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/tasks/${taskId}`
|
||||
|
||||
logger.info(`Updating task ${taskId}`)
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
body: JSON.stringify(updateBody),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json().catch(() => null)
|
||||
logger.error('Confluence API error response:', {
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
error: JSON.stringify(errorData, null, 2),
|
||||
})
|
||||
const errorMessage = errorData?.message || `Failed to update task (${response.status})`
|
||||
return NextResponse.json({ error: errorMessage }, { status: response.status })
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
return NextResponse.json({
|
||||
task: {
|
||||
id: data.id,
|
||||
localId: data.localId ?? null,
|
||||
spaceId: data.spaceId ?? null,
|
||||
pageId: data.pageId ?? null,
|
||||
blogPostId: data.blogPostId ?? null,
|
||||
status: data.status,
|
||||
body: data.body?.storage?.value ?? null,
|
||||
createdBy: data.createdBy ?? null,
|
||||
assignedTo: data.assignedTo ?? null,
|
||||
completedBy: data.completedBy ?? null,
|
||||
createdAt: data.createdAt ?? null,
|
||||
updatedAt: data.updatedAt ?? null,
|
||||
dueAt: data.dueAt ?? null,
|
||||
completedAt: data.completedAt ?? null,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
// Get a specific task
|
||||
if (taskId) {
|
||||
const taskIdValidation = validateAlphanumericId(taskId, 'taskId', 255)
|
||||
if (!taskIdValidation.isValid) {
|
||||
return NextResponse.json({ error: taskIdValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/tasks/${taskId}`
|
||||
|
||||
logger.info(`Fetching task ${taskId}`)
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json().catch(() => null)
|
||||
logger.error('Confluence API error response:', {
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
error: JSON.stringify(errorData, null, 2),
|
||||
})
|
||||
const errorMessage = errorData?.message || `Failed to get task (${response.status})`
|
||||
return NextResponse.json({ error: errorMessage }, { status: response.status })
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
return NextResponse.json({
|
||||
task: {
|
||||
id: data.id,
|
||||
localId: data.localId ?? null,
|
||||
spaceId: data.spaceId ?? null,
|
||||
pageId: data.pageId ?? null,
|
||||
blogPostId: data.blogPostId ?? null,
|
||||
status: data.status,
|
||||
body: data.body?.storage?.value ?? null,
|
||||
createdBy: data.createdBy ?? null,
|
||||
assignedTo: data.assignedTo ?? null,
|
||||
completedBy: data.completedBy ?? null,
|
||||
createdAt: data.createdAt ?? null,
|
||||
updatedAt: data.updatedAt ?? null,
|
||||
dueAt: data.dueAt ?? null,
|
||||
completedAt: data.completedAt ?? null,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
// List tasks
|
||||
const queryParams = new URLSearchParams()
|
||||
queryParams.append('limit', String(Math.min(limit, 250)))
|
||||
|
||||
if (cursor) {
|
||||
const cursorValidation = validatePaginationCursor(cursor, 'cursor')
|
||||
if (!cursorValidation.isValid) {
|
||||
return NextResponse.json({ error: cursorValidation.error }, { status: 400 })
|
||||
}
|
||||
queryParams.append('cursor', cursor)
|
||||
}
|
||||
if (taskStatus) queryParams.append('status', taskStatus)
|
||||
if (pageId) {
|
||||
const pageIdValidation = validateAlphanumericId(pageId, 'pageId', 255)
|
||||
if (!pageIdValidation.isValid) {
|
||||
return NextResponse.json({ error: pageIdValidation.error }, { status: 400 })
|
||||
}
|
||||
queryParams.append('page-id', pageId)
|
||||
}
|
||||
if (spaceId) {
|
||||
const spaceIdValidation = validateAlphanumericId(spaceId, 'spaceId', 255)
|
||||
if (!spaceIdValidation.isValid) {
|
||||
return NextResponse.json({ error: spaceIdValidation.error }, { status: 400 })
|
||||
}
|
||||
queryParams.append('space-id', spaceId)
|
||||
}
|
||||
if (assignedTo) {
|
||||
// Atlassian account IDs: 5d5bd05c3aee0123abc or 557058:6b9c9931-4693-49c1-8b3a-931f1af98134
|
||||
const assignedToValidation = validatePathSegment(assignedTo, {
|
||||
paramName: 'assignedTo',
|
||||
maxLength: 128,
|
||||
customPattern: /^[a-zA-Z0-9_|:-]+$/,
|
||||
})
|
||||
if (!assignedToValidation.isValid) {
|
||||
return NextResponse.json({ error: assignedToValidation.error }, { status: 400 })
|
||||
}
|
||||
queryParams.append('assigned-to', assignedTo)
|
||||
}
|
||||
|
||||
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/tasks?${queryParams.toString()}`
|
||||
|
||||
logger.info('Fetching tasks')
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json().catch(() => null)
|
||||
logger.error('Confluence API error response:', {
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
error: JSON.stringify(errorData, null, 2),
|
||||
})
|
||||
const errorMessage = errorData?.message || `Failed to list tasks (${response.status})`
|
||||
return NextResponse.json({ error: errorMessage }, { status: response.status })
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
const tasks = (data.results || []).map((task: any) => ({
|
||||
id: task.id,
|
||||
localId: task.localId ?? null,
|
||||
spaceId: task.spaceId ?? null,
|
||||
pageId: task.pageId ?? null,
|
||||
blogPostId: task.blogPostId ?? null,
|
||||
status: task.status,
|
||||
body: task.body?.storage?.value ?? null,
|
||||
createdBy: task.createdBy ?? null,
|
||||
assignedTo: task.assignedTo ?? null,
|
||||
completedBy: task.completedBy ?? null,
|
||||
createdAt: task.createdAt ?? null,
|
||||
updatedAt: task.updatedAt ?? null,
|
||||
dueAt: task.dueAt ?? null,
|
||||
completedAt: task.completedAt ?? null,
|
||||
}))
|
||||
|
||||
return NextResponse.json({
|
||||
tasks,
|
||||
nextCursor: data._links?.next
|
||||
? new URL(data._links.next, 'https://placeholder').searchParams.get('cursor')
|
||||
: null,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Error with tasks:', error)
|
||||
return NextResponse.json(
|
||||
{ error: (error as Error).message || 'Internal server error' },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
85
apps/sim/app/api/tools/confluence/user/route.ts
Normal file
85
apps/sim/app/api/tools/confluence/user/route.ts
Normal file
@@ -0,0 +1,85 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { validateJiraCloudId, validatePathSegment } from '@/lib/core/security/input-validation'
|
||||
import { getConfluenceCloudId } from '@/tools/confluence/utils'
|
||||
|
||||
const logger = createLogger('ConfluenceUserAPI')
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
/**
|
||||
* Get a Confluence user by account ID.
|
||||
* Uses GET /wiki/rest/api/user?accountId={accountId}
|
||||
*/
|
||||
export async function POST(request: NextRequest) {
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const { domain, accessToken, accountId, cloudId: providedCloudId } = body
|
||||
|
||||
if (!domain) {
|
||||
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!accessToken) {
|
||||
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!accountId) {
|
||||
return NextResponse.json({ error: 'Account ID is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
// Atlassian account IDs: 5d5bd05c3aee0123abc or 557058:6b9c9931-4693-49c1-8b3a-931f1af98134
|
||||
const accountIdValidation = validatePathSegment(accountId, {
|
||||
paramName: 'accountId',
|
||||
maxLength: 128,
|
||||
customPattern: /^[a-zA-Z0-9_|:-]+$/,
|
||||
})
|
||||
if (!accountIdValidation.isValid) {
|
||||
return NextResponse.json({ error: accountIdValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const cloudId = providedCloudId || (await getConfluenceCloudId(domain, accessToken))
|
||||
|
||||
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
|
||||
if (!cloudIdValidation.isValid) {
|
||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/rest/api/user?accountId=${encodeURIComponent(accountId)}`
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json().catch(() => null)
|
||||
logger.error('Confluence API error response:', {
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
error: JSON.stringify(errorData, null, 2),
|
||||
})
|
||||
const errorMessage =
|
||||
errorData?.message || `Failed to get Confluence user (${response.status})`
|
||||
return NextResponse.json({ error: errorMessage }, { status: response.status })
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
return NextResponse.json(data)
|
||||
} catch (error) {
|
||||
logger.error('Error getting Confluence user:', error)
|
||||
return NextResponse.json(
|
||||
{ error: (error as Error).message || 'Internal server error' },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
44
apps/sim/app/api/v1/admin/audit-logs/[id]/route.ts
Normal file
44
apps/sim/app/api/v1/admin/audit-logs/[id]/route.ts
Normal file
@@ -0,0 +1,44 @@
|
||||
/**
|
||||
* GET /api/v1/admin/audit-logs/[id]
|
||||
*
|
||||
* Get a single audit log entry by ID.
|
||||
*
|
||||
* Response: AdminSingleResponse<AdminAuditLog>
|
||||
*/
|
||||
|
||||
import { db } from '@sim/db'
|
||||
import { auditLog } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { withAdminAuthParams } from '@/app/api/v1/admin/middleware'
|
||||
import {
|
||||
internalErrorResponse,
|
||||
notFoundResponse,
|
||||
singleResponse,
|
||||
} from '@/app/api/v1/admin/responses'
|
||||
import { toAdminAuditLog } from '@/app/api/v1/admin/types'
|
||||
|
||||
const logger = createLogger('AdminAuditLogDetailAPI')
|
||||
|
||||
interface RouteParams {
|
||||
id: string
|
||||
}
|
||||
|
||||
export const GET = withAdminAuthParams<RouteParams>(async (request, context) => {
|
||||
const { id } = await context.params
|
||||
|
||||
try {
|
||||
const [log] = await db.select().from(auditLog).where(eq(auditLog.id, id)).limit(1)
|
||||
|
||||
if (!log) {
|
||||
return notFoundResponse('AuditLog')
|
||||
}
|
||||
|
||||
logger.info(`Admin API: Retrieved audit log ${id}`)
|
||||
|
||||
return singleResponse(toAdminAuditLog(log))
|
||||
} catch (error) {
|
||||
logger.error('Admin API: Failed to get audit log', { error, id })
|
||||
return internalErrorResponse('Failed to get audit log')
|
||||
}
|
||||
})
|
||||
96
apps/sim/app/api/v1/admin/audit-logs/route.ts
Normal file
96
apps/sim/app/api/v1/admin/audit-logs/route.ts
Normal file
@@ -0,0 +1,96 @@
|
||||
/**
|
||||
* GET /api/v1/admin/audit-logs
|
||||
*
|
||||
* List all audit logs with pagination and filtering.
|
||||
*
|
||||
* Query Parameters:
|
||||
* - limit: number (default: 50, max: 250)
|
||||
* - offset: number (default: 0)
|
||||
* - action: string (optional) - Filter by action (e.g., "workflow.created")
|
||||
* - resourceType: string (optional) - Filter by resource type (e.g., "workflow")
|
||||
* - resourceId: string (optional) - Filter by resource ID
|
||||
* - workspaceId: string (optional) - Filter by workspace ID
|
||||
* - actorId: string (optional) - Filter by actor user ID
|
||||
* - actorEmail: string (optional) - Filter by actor email
|
||||
* - startDate: string (optional) - ISO 8601 date, filter createdAt >= startDate
|
||||
* - endDate: string (optional) - ISO 8601 date, filter createdAt <= endDate
|
||||
*
|
||||
* Response: AdminListResponse<AdminAuditLog>
|
||||
*/
|
||||
|
||||
import { db } from '@sim/db'
|
||||
import { auditLog } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, count, desc, eq, gte, lte, type SQL } from 'drizzle-orm'
|
||||
import { withAdminAuth } from '@/app/api/v1/admin/middleware'
|
||||
import {
|
||||
badRequestResponse,
|
||||
internalErrorResponse,
|
||||
listResponse,
|
||||
} from '@/app/api/v1/admin/responses'
|
||||
import {
|
||||
type AdminAuditLog,
|
||||
createPaginationMeta,
|
||||
parsePaginationParams,
|
||||
toAdminAuditLog,
|
||||
} from '@/app/api/v1/admin/types'
|
||||
|
||||
const logger = createLogger('AdminAuditLogsAPI')
|
||||
|
||||
export const GET = withAdminAuth(async (request) => {
|
||||
const url = new URL(request.url)
|
||||
const { limit, offset } = parsePaginationParams(url)
|
||||
|
||||
const actionFilter = url.searchParams.get('action')
|
||||
const resourceTypeFilter = url.searchParams.get('resourceType')
|
||||
const resourceIdFilter = url.searchParams.get('resourceId')
|
||||
const workspaceIdFilter = url.searchParams.get('workspaceId')
|
||||
const actorIdFilter = url.searchParams.get('actorId')
|
||||
const actorEmailFilter = url.searchParams.get('actorEmail')
|
||||
const startDateFilter = url.searchParams.get('startDate')
|
||||
const endDateFilter = url.searchParams.get('endDate')
|
||||
|
||||
if (startDateFilter && Number.isNaN(Date.parse(startDateFilter))) {
|
||||
return badRequestResponse('Invalid startDate format. Use ISO 8601.')
|
||||
}
|
||||
if (endDateFilter && Number.isNaN(Date.parse(endDateFilter))) {
|
||||
return badRequestResponse('Invalid endDate format. Use ISO 8601.')
|
||||
}
|
||||
|
||||
try {
|
||||
const conditions: SQL<unknown>[] = []
|
||||
|
||||
if (actionFilter) conditions.push(eq(auditLog.action, actionFilter))
|
||||
if (resourceTypeFilter) conditions.push(eq(auditLog.resourceType, resourceTypeFilter))
|
||||
if (resourceIdFilter) conditions.push(eq(auditLog.resourceId, resourceIdFilter))
|
||||
if (workspaceIdFilter) conditions.push(eq(auditLog.workspaceId, workspaceIdFilter))
|
||||
if (actorIdFilter) conditions.push(eq(auditLog.actorId, actorIdFilter))
|
||||
if (actorEmailFilter) conditions.push(eq(auditLog.actorEmail, actorEmailFilter))
|
||||
if (startDateFilter) conditions.push(gte(auditLog.createdAt, new Date(startDateFilter)))
|
||||
if (endDateFilter) conditions.push(lte(auditLog.createdAt, new Date(endDateFilter)))
|
||||
|
||||
const whereClause = conditions.length > 0 ? and(...conditions) : undefined
|
||||
|
||||
const [countResult, logs] = await Promise.all([
|
||||
db.select({ total: count() }).from(auditLog).where(whereClause),
|
||||
db
|
||||
.select()
|
||||
.from(auditLog)
|
||||
.where(whereClause)
|
||||
.orderBy(desc(auditLog.createdAt))
|
||||
.limit(limit)
|
||||
.offset(offset),
|
||||
])
|
||||
|
||||
const total = countResult[0].total
|
||||
const data: AdminAuditLog[] = logs.map(toAdminAuditLog)
|
||||
const pagination = createPaginationMeta(total, limit, offset)
|
||||
|
||||
logger.info(`Admin API: Listed ${data.length} audit logs (total: ${total})`)
|
||||
|
||||
return listResponse(data, pagination)
|
||||
} catch (error) {
|
||||
logger.error('Admin API: Failed to list audit logs', { error })
|
||||
return internalErrorResponse('Failed to list audit logs')
|
||||
}
|
||||
})
|
||||
@@ -6,6 +6,7 @@
|
||||
*/
|
||||
|
||||
import type {
|
||||
auditLog,
|
||||
member,
|
||||
organization,
|
||||
referralCampaigns,
|
||||
@@ -694,3 +695,45 @@ export function toAdminReferralCampaign(
|
||||
updatedAt: dbCampaign.updatedAt.toISOString(),
|
||||
}
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
// Audit Log Types
|
||||
// =============================================================================
|
||||
|
||||
export type DbAuditLog = InferSelectModel<typeof auditLog>
|
||||
|
||||
export interface AdminAuditLog {
|
||||
id: string
|
||||
workspaceId: string | null
|
||||
actorId: string | null
|
||||
actorName: string | null
|
||||
actorEmail: string | null
|
||||
action: string
|
||||
resourceType: string
|
||||
resourceId: string | null
|
||||
resourceName: string | null
|
||||
description: string | null
|
||||
metadata: unknown
|
||||
ipAddress: string | null
|
||||
userAgent: string | null
|
||||
createdAt: string
|
||||
}
|
||||
|
||||
export function toAdminAuditLog(dbLog: DbAuditLog): AdminAuditLog {
|
||||
return {
|
||||
id: dbLog.id,
|
||||
workspaceId: dbLog.workspaceId,
|
||||
actorId: dbLog.actorId,
|
||||
actorName: dbLog.actorName,
|
||||
actorEmail: dbLog.actorEmail,
|
||||
action: dbLog.action,
|
||||
resourceType: dbLog.resourceType,
|
||||
resourceId: dbLog.resourceId,
|
||||
resourceName: dbLog.resourceName,
|
||||
description: dbLog.description,
|
||||
metadata: dbLog.metadata,
|
||||
ipAddress: dbLog.ipAddress,
|
||||
userAgent: dbLog.userAgent,
|
||||
createdAt: dbLog.createdAt.toISOString(),
|
||||
}
|
||||
}
|
||||
|
||||
78
apps/sim/app/api/v1/audit-logs/[id]/route.ts
Normal file
78
apps/sim/app/api/v1/audit-logs/[id]/route.ts
Normal file
@@ -0,0 +1,78 @@
|
||||
/**
|
||||
* GET /api/v1/audit-logs/[id]
|
||||
*
|
||||
* Get a single audit log entry by ID, scoped to the authenticated user's organization.
|
||||
* Requires enterprise subscription and org admin/owner role.
|
||||
*
|
||||
* Scope includes logs from current org members AND logs within org workspaces
|
||||
* (including those from departed members or system actions with null actorId).
|
||||
*
|
||||
* Response: { data: AuditLogEntry, limits: UserLimits }
|
||||
*/
|
||||
|
||||
import { db } from '@sim/db'
|
||||
import { auditLog, workspace } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq, inArray, or } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { validateEnterpriseAuditAccess } from '@/app/api/v1/audit-logs/auth'
|
||||
import { formatAuditLogEntry } from '@/app/api/v1/audit-logs/format'
|
||||
import { createApiResponse, getUserLimits } from '@/app/api/v1/logs/meta'
|
||||
import { checkRateLimit, createRateLimitResponse } from '@/app/api/v1/middleware'
|
||||
|
||||
const logger = createLogger('V1AuditLogDetailAPI')
|
||||
|
||||
export const revalidate = 0
|
||||
|
||||
export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const rateLimit = await checkRateLimit(request, 'audit-logs')
|
||||
if (!rateLimit.allowed) {
|
||||
return createRateLimitResponse(rateLimit)
|
||||
}
|
||||
|
||||
const userId = rateLimit.userId!
|
||||
const { id } = await params
|
||||
|
||||
const authResult = await validateEnterpriseAuditAccess(userId)
|
||||
if (!authResult.success) {
|
||||
return authResult.response
|
||||
}
|
||||
|
||||
const { orgMemberIds } = authResult.context
|
||||
|
||||
const orgWorkspaceIds = db
|
||||
.select({ id: workspace.id })
|
||||
.from(workspace)
|
||||
.where(inArray(workspace.ownerId, orgMemberIds))
|
||||
|
||||
const [log] = await db
|
||||
.select()
|
||||
.from(auditLog)
|
||||
.where(
|
||||
and(
|
||||
eq(auditLog.id, id),
|
||||
or(
|
||||
inArray(auditLog.actorId, orgMemberIds),
|
||||
inArray(auditLog.workspaceId, orgWorkspaceIds)
|
||||
)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (!log) {
|
||||
return NextResponse.json({ error: 'Audit log not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
const limits = await getUserLimits(userId)
|
||||
const response = createApiResponse({ data: formatAuditLogEntry(log) }, limits, rateLimit)
|
||||
|
||||
return NextResponse.json(response.body, { headers: response.headers })
|
||||
} catch (error: unknown) {
|
||||
const message = error instanceof Error ? error.message : 'Unknown error'
|
||||
logger.error(`[${requestId}] Audit log detail fetch error`, { error: message })
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
103
apps/sim/app/api/v1/audit-logs/auth.ts
Normal file
103
apps/sim/app/api/v1/audit-logs/auth.ts
Normal file
@@ -0,0 +1,103 @@
|
||||
/**
|
||||
* Enterprise audit log authorization.
|
||||
*
|
||||
* Validates that the authenticated user is an admin/owner of an enterprise organization
|
||||
* and returns the organization context needed for scoped queries.
|
||||
*/
|
||||
|
||||
import { db } from '@sim/db'
|
||||
import { member, subscription } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { NextResponse } from 'next/server'
|
||||
|
||||
const logger = createLogger('V1AuditLogsAuth')
|
||||
|
||||
export interface EnterpriseAuditContext {
|
||||
organizationId: string
|
||||
orgMemberIds: string[]
|
||||
}
|
||||
|
||||
type AuthResult =
|
||||
| { success: true; context: EnterpriseAuditContext }
|
||||
| { success: false; response: NextResponse }
|
||||
|
||||
/**
|
||||
* Validates enterprise audit log access for the given user.
|
||||
*
|
||||
* Checks:
|
||||
* 1. User belongs to an organization
|
||||
* 2. User has admin or owner role
|
||||
* 3. Organization has an active enterprise subscription
|
||||
*
|
||||
* Returns the organization ID and all member user IDs on success,
|
||||
* or an error response on failure.
|
||||
*/
|
||||
export async function validateEnterpriseAuditAccess(userId: string): Promise<AuthResult> {
|
||||
const [membership] = await db
|
||||
.select({ organizationId: member.organizationId, role: member.role })
|
||||
.from(member)
|
||||
.where(eq(member.userId, userId))
|
||||
.limit(1)
|
||||
|
||||
if (!membership) {
|
||||
return {
|
||||
success: false,
|
||||
response: NextResponse.json({ error: 'Not a member of any organization' }, { status: 403 }),
|
||||
}
|
||||
}
|
||||
|
||||
if (membership.role !== 'admin' && membership.role !== 'owner') {
|
||||
return {
|
||||
success: false,
|
||||
response: NextResponse.json(
|
||||
{ error: 'Organization admin or owner role required' },
|
||||
{ status: 403 }
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
const [orgSub, orgMembers] = await Promise.all([
|
||||
db
|
||||
.select({ id: subscription.id })
|
||||
.from(subscription)
|
||||
.where(
|
||||
and(
|
||||
eq(subscription.referenceId, membership.organizationId),
|
||||
eq(subscription.plan, 'enterprise'),
|
||||
eq(subscription.status, 'active')
|
||||
)
|
||||
)
|
||||
.limit(1),
|
||||
db
|
||||
.select({ userId: member.userId })
|
||||
.from(member)
|
||||
.where(eq(member.organizationId, membership.organizationId)),
|
||||
])
|
||||
|
||||
if (orgSub.length === 0) {
|
||||
return {
|
||||
success: false,
|
||||
response: NextResponse.json(
|
||||
{ error: 'Active enterprise subscription required' },
|
||||
{ status: 403 }
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
const orgMemberIds = orgMembers.map((m) => m.userId)
|
||||
|
||||
logger.info('Enterprise audit access validated', {
|
||||
userId,
|
||||
organizationId: membership.organizationId,
|
||||
memberCount: orgMemberIds.length,
|
||||
})
|
||||
|
||||
return {
|
||||
success: true,
|
||||
context: {
|
||||
organizationId: membership.organizationId,
|
||||
orgMemberIds,
|
||||
},
|
||||
}
|
||||
}
|
||||
43
apps/sim/app/api/v1/audit-logs/format.ts
Normal file
43
apps/sim/app/api/v1/audit-logs/format.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
/**
|
||||
* Enterprise audit log response formatting.
|
||||
*
|
||||
* Defines the shape returned by the enterprise audit log API.
|
||||
* Excludes `ipAddress` and `userAgent` for privacy.
|
||||
*/
|
||||
|
||||
import type { auditLog } from '@sim/db/schema'
|
||||
import type { InferSelectModel } from 'drizzle-orm'
|
||||
|
||||
type DbAuditLog = InferSelectModel<typeof auditLog>
|
||||
|
||||
export interface EnterpriseAuditLogEntry {
|
||||
id: string
|
||||
workspaceId: string | null
|
||||
actorId: string | null
|
||||
actorName: string | null
|
||||
actorEmail: string | null
|
||||
action: string
|
||||
resourceType: string
|
||||
resourceId: string | null
|
||||
resourceName: string | null
|
||||
description: string | null
|
||||
metadata: unknown
|
||||
createdAt: string
|
||||
}
|
||||
|
||||
export function formatAuditLogEntry(log: DbAuditLog): EnterpriseAuditLogEntry {
|
||||
return {
|
||||
id: log.id,
|
||||
workspaceId: log.workspaceId,
|
||||
actorId: log.actorId,
|
||||
actorName: log.actorName,
|
||||
actorEmail: log.actorEmail,
|
||||
action: log.action,
|
||||
resourceType: log.resourceType,
|
||||
resourceId: log.resourceId,
|
||||
resourceName: log.resourceName,
|
||||
description: log.description,
|
||||
metadata: log.metadata,
|
||||
createdAt: log.createdAt.toISOString(),
|
||||
}
|
||||
}
|
||||
191
apps/sim/app/api/v1/audit-logs/route.ts
Normal file
191
apps/sim/app/api/v1/audit-logs/route.ts
Normal file
@@ -0,0 +1,191 @@
|
||||
/**
|
||||
* GET /api/v1/audit-logs
|
||||
*
|
||||
* List audit logs scoped to the authenticated user's organization.
|
||||
* Requires enterprise subscription and org admin/owner role.
|
||||
*
|
||||
* Query Parameters:
|
||||
* - action: string (optional) - Filter by action (e.g., "workflow.created")
|
||||
* - resourceType: string (optional) - Filter by resource type (e.g., "workflow")
|
||||
* - resourceId: string (optional) - Filter by resource ID
|
||||
* - workspaceId: string (optional) - Filter by workspace ID
|
||||
* - actorId: string (optional) - Filter by actor user ID (must be an org member)
|
||||
* - startDate: string (optional) - ISO 8601 date, filter createdAt >= startDate
|
||||
* - endDate: string (optional) - ISO 8601 date, filter createdAt <= endDate
|
||||
* - includeDeparted: boolean (optional, default: false) - Include logs from departed members
|
||||
* - limit: number (optional, default: 50, max: 100)
|
||||
* - cursor: string (optional) - Opaque cursor for pagination
|
||||
*
|
||||
* Response: { data: AuditLogEntry[], nextCursor?: string, limits: UserLimits }
|
||||
*/
|
||||
|
||||
import { db } from '@sim/db'
|
||||
import { auditLog, workspace } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, desc, eq, gte, inArray, lt, lte, or, type SQL } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { validateEnterpriseAuditAccess } from '@/app/api/v1/audit-logs/auth'
|
||||
import { formatAuditLogEntry } from '@/app/api/v1/audit-logs/format'
|
||||
import { createApiResponse, getUserLimits } from '@/app/api/v1/logs/meta'
|
||||
import { checkRateLimit, createRateLimitResponse } from '@/app/api/v1/middleware'
|
||||
|
||||
const logger = createLogger('V1AuditLogsAPI')
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
export const revalidate = 0
|
||||
|
||||
const isoDateString = z.string().refine((val) => !Number.isNaN(Date.parse(val)), {
|
||||
message: 'Invalid date format. Use ISO 8601.',
|
||||
})
|
||||
|
||||
const QueryParamsSchema = z.object({
|
||||
action: z.string().optional(),
|
||||
resourceType: z.string().optional(),
|
||||
resourceId: z.string().optional(),
|
||||
workspaceId: z.string().optional(),
|
||||
actorId: z.string().optional(),
|
||||
startDate: isoDateString.optional(),
|
||||
endDate: isoDateString.optional(),
|
||||
includeDeparted: z
|
||||
.enum(['true', 'false'])
|
||||
.transform((val) => val === 'true')
|
||||
.optional()
|
||||
.default('false'),
|
||||
limit: z.coerce.number().min(1).max(100).optional().default(50),
|
||||
cursor: z.string().optional(),
|
||||
})
|
||||
|
||||
interface CursorData {
|
||||
createdAt: string
|
||||
id: string
|
||||
}
|
||||
|
||||
function encodeCursor(data: CursorData): string {
|
||||
return Buffer.from(JSON.stringify(data)).toString('base64')
|
||||
}
|
||||
|
||||
function decodeCursor(cursor: string): CursorData | null {
|
||||
try {
|
||||
return JSON.parse(Buffer.from(cursor, 'base64').toString())
|
||||
} catch {
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
export async function GET(request: NextRequest) {
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const rateLimit = await checkRateLimit(request, 'audit-logs')
|
||||
if (!rateLimit.allowed) {
|
||||
return createRateLimitResponse(rateLimit)
|
||||
}
|
||||
|
||||
const userId = rateLimit.userId!
|
||||
|
||||
const authResult = await validateEnterpriseAuditAccess(userId)
|
||||
if (!authResult.success) {
|
||||
return authResult.response
|
||||
}
|
||||
|
||||
const { orgMemberIds } = authResult.context
|
||||
|
||||
const { searchParams } = new URL(request.url)
|
||||
const rawParams = Object.fromEntries(searchParams.entries())
|
||||
const validationResult = QueryParamsSchema.safeParse(rawParams)
|
||||
|
||||
if (!validationResult.success) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid parameters', details: validationResult.error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const params = validationResult.data
|
||||
|
||||
if (params.actorId && !orgMemberIds.includes(params.actorId)) {
|
||||
return NextResponse.json(
|
||||
{ error: 'actorId is not a member of your organization' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
let scopeCondition: SQL<unknown>
|
||||
|
||||
if (params.includeDeparted) {
|
||||
const orgWorkspaces = await db
|
||||
.select({ id: workspace.id })
|
||||
.from(workspace)
|
||||
.where(inArray(workspace.ownerId, orgMemberIds))
|
||||
|
||||
const orgWorkspaceIds = orgWorkspaces.map((w) => w.id)
|
||||
|
||||
if (orgWorkspaceIds.length > 0) {
|
||||
scopeCondition = or(
|
||||
inArray(auditLog.actorId, orgMemberIds),
|
||||
inArray(auditLog.workspaceId, orgWorkspaceIds)
|
||||
)!
|
||||
} else {
|
||||
scopeCondition = inArray(auditLog.actorId, orgMemberIds)
|
||||
}
|
||||
} else {
|
||||
scopeCondition = inArray(auditLog.actorId, orgMemberIds)
|
||||
}
|
||||
|
||||
const conditions: SQL<unknown>[] = [scopeCondition]
|
||||
|
||||
if (params.action) conditions.push(eq(auditLog.action, params.action))
|
||||
if (params.resourceType) conditions.push(eq(auditLog.resourceType, params.resourceType))
|
||||
if (params.resourceId) conditions.push(eq(auditLog.resourceId, params.resourceId))
|
||||
if (params.workspaceId) conditions.push(eq(auditLog.workspaceId, params.workspaceId))
|
||||
if (params.actorId) conditions.push(eq(auditLog.actorId, params.actorId))
|
||||
if (params.startDate) conditions.push(gte(auditLog.createdAt, new Date(params.startDate)))
|
||||
if (params.endDate) conditions.push(lte(auditLog.createdAt, new Date(params.endDate)))
|
||||
|
||||
if (params.cursor) {
|
||||
const cursorData = decodeCursor(params.cursor)
|
||||
if (cursorData?.createdAt && cursorData.id) {
|
||||
const cursorDate = new Date(cursorData.createdAt)
|
||||
if (!Number.isNaN(cursorDate.getTime())) {
|
||||
conditions.push(
|
||||
or(
|
||||
lt(auditLog.createdAt, cursorDate),
|
||||
and(eq(auditLog.createdAt, cursorDate), lt(auditLog.id, cursorData.id))
|
||||
)!
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const rows = await db
|
||||
.select()
|
||||
.from(auditLog)
|
||||
.where(and(...conditions))
|
||||
.orderBy(desc(auditLog.createdAt), desc(auditLog.id))
|
||||
.limit(params.limit + 1)
|
||||
|
||||
const hasMore = rows.length > params.limit
|
||||
const data = rows.slice(0, params.limit)
|
||||
|
||||
let nextCursor: string | undefined
|
||||
if (hasMore && data.length > 0) {
|
||||
const last = data[data.length - 1]
|
||||
nextCursor = encodeCursor({
|
||||
createdAt: last.createdAt.toISOString(),
|
||||
id: last.id,
|
||||
})
|
||||
}
|
||||
|
||||
const formattedLogs = data.map(formatAuditLogEntry)
|
||||
|
||||
const limits = await getUserLimits(userId)
|
||||
const response = createApiResponse({ data: formattedLogs, nextCursor }, limits, rateLimit)
|
||||
|
||||
return NextResponse.json(response.body, { headers: response.headers })
|
||||
} catch (error: unknown) {
|
||||
const message = error instanceof Error ? error.message : 'Unknown error'
|
||||
logger.error(`[${requestId}] Audit logs fetch error`, { error: message })
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -19,7 +19,7 @@ export interface RateLimitResult {
|
||||
|
||||
export async function checkRateLimit(
|
||||
request: NextRequest,
|
||||
endpoint: 'logs' | 'logs-detail' | 'workflows' | 'workflow-detail' = 'logs'
|
||||
endpoint: 'logs' | 'logs-detail' | 'workflows' | 'workflow-detail' | 'audit-logs' = 'logs'
|
||||
): Promise<RateLimitResult> {
|
||||
try {
|
||||
const auth = await authenticateV1Request(request)
|
||||
|
||||
@@ -987,7 +987,8 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
const onChildWorkflowInstanceReady = (
|
||||
blockId: string,
|
||||
childWorkflowInstanceId: string,
|
||||
iterationContext?: IterationContext
|
||||
iterationContext?: IterationContext,
|
||||
executionOrder?: number
|
||||
) => {
|
||||
sendEvent({
|
||||
type: 'block:childWorkflowStarted',
|
||||
@@ -1001,6 +1002,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
iterationCurrent: iterationContext.iterationCurrent,
|
||||
iterationContainerId: iterationContext.iterationContainerId,
|
||||
}),
|
||||
...(executionOrder !== undefined && { executionOrder }),
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
@@ -3,8 +3,8 @@
|
||||
import type React from 'react'
|
||||
import { useEffect, useRef, useState } from 'react'
|
||||
import { motion } from 'framer-motion'
|
||||
import { AlertCircle, Paperclip, Send, Square, X } from 'lucide-react'
|
||||
import { Tooltip } from '@/components/emcn'
|
||||
import { Paperclip, Send, Square, X } from 'lucide-react'
|
||||
import { Badge, Tooltip } from '@/components/emcn'
|
||||
import { VoiceInput } from '@/app/chat/components/input/voice-input'
|
||||
|
||||
const logger = createLogger('ChatInput')
|
||||
@@ -218,24 +218,12 @@ export const ChatInput: React.FC<{
|
||||
<div ref={wrapperRef} className='w-full max-w-3xl md:max-w-[748px]'>
|
||||
{/* Error Messages */}
|
||||
{uploadErrors.length > 0 && (
|
||||
<div className='mb-3'>
|
||||
<div className='rounded-lg border border-red-200 bg-red-50 p-3 dark:border-red-800/50 dark:bg-red-950/20'>
|
||||
<div className='flex items-start gap-2'>
|
||||
<AlertCircle className='mt-0.5 h-4 w-4 shrink-0 text-red-600 dark:text-red-400' />
|
||||
<div className='flex-1'>
|
||||
<div className='mb-1 font-medium text-red-800 text-sm dark:text-red-300'>
|
||||
File upload error
|
||||
</div>
|
||||
<div className='space-y-1'>
|
||||
{uploadErrors.map((error, idx) => (
|
||||
<div key={idx} className='text-red-700 text-sm dark:text-red-400'>
|
||||
{error}
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div className='mb-3 flex flex-col gap-2'>
|
||||
{uploadErrors.map((error, idx) => (
|
||||
<Badge key={idx} variant='red' size='lg' dot className='max-w-full'>
|
||||
{error}
|
||||
</Badge>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
|
||||
|
||||
@@ -21,6 +21,7 @@ interface TemplateCardProps {
|
||||
blocks?: string[]
|
||||
className?: string
|
||||
state?: WorkflowState
|
||||
description?: string | null
|
||||
isStarred?: boolean
|
||||
isVerified?: boolean
|
||||
}
|
||||
@@ -124,6 +125,7 @@ function TemplateCardInner({
|
||||
blocks = [],
|
||||
className,
|
||||
state,
|
||||
description,
|
||||
isStarred = false,
|
||||
isVerified = false,
|
||||
}: TemplateCardProps) {
|
||||
@@ -270,6 +272,12 @@ function TemplateCardInner({
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{description && (
|
||||
<p className='mt-[4px] truncate pl-[2px] text-[12px] text-[var(--text-tertiary)]'>
|
||||
{description}
|
||||
</p>
|
||||
)}
|
||||
|
||||
<div className='mt-[10px] flex items-center justify-between'>
|
||||
<div className='flex min-w-0 items-center gap-[8px]'>
|
||||
{authorImageUrl ? (
|
||||
|
||||
@@ -196,6 +196,7 @@ export default function Templates({
|
||||
key={template.id}
|
||||
id={template.id}
|
||||
title={template.name}
|
||||
description={template.details?.tagline}
|
||||
author={template.creator?.name || 'Unknown'}
|
||||
authorImageUrl={template.creator?.profileImageUrl || null}
|
||||
usageCount={template.views.toString()}
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
import { useCallback, useEffect, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import {
|
||||
Badge,
|
||||
Button,
|
||||
Combobox,
|
||||
DatePicker,
|
||||
@@ -706,12 +707,10 @@ export function DocumentTagsModal({
|
||||
(def) =>
|
||||
def.displayName.toLowerCase() === editTagForm.displayName.toLowerCase()
|
||||
) && (
|
||||
<div className='rounded-[4px] border border-amber-500/50 bg-amber-500/10 p-[8px]'>
|
||||
<p className='text-[11px] text-amber-600 dark:text-amber-400'>
|
||||
Maximum tag definitions reached. You can still use existing tag
|
||||
definitions, but cannot create new ones.
|
||||
</p>
|
||||
</div>
|
||||
<Badge variant='amber' size='lg' dot className='max-w-full'>
|
||||
Maximum tag definitions reached. You can still use existing tag definitions,
|
||||
but cannot create new ones.
|
||||
</Badge>
|
||||
)}
|
||||
|
||||
<div className='flex gap-[8px]'>
|
||||
|
||||
@@ -18,6 +18,7 @@ interface TemplateCardProps {
|
||||
blocks?: string[]
|
||||
className?: string
|
||||
state?: WorkflowState
|
||||
description?: string | null
|
||||
isStarred?: boolean
|
||||
isVerified?: boolean
|
||||
}
|
||||
@@ -127,6 +128,7 @@ function TemplateCardInner({
|
||||
blocks = [],
|
||||
className,
|
||||
state,
|
||||
description,
|
||||
isStarred = false,
|
||||
isVerified = false,
|
||||
}: TemplateCardProps) {
|
||||
@@ -277,6 +279,12 @@ function TemplateCardInner({
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{description && (
|
||||
<p className='mt-[4px] truncate pl-[2px] text-[12px] text-[var(--text-tertiary)]'>
|
||||
{description}
|
||||
</p>
|
||||
)}
|
||||
|
||||
<div className='mt-[10px] flex items-center justify-between'>
|
||||
<div className='flex min-w-0 flex-1 items-center gap-[6px]'>
|
||||
{authorImageUrl ? (
|
||||
|
||||
@@ -222,6 +222,7 @@ export default function Templates({
|
||||
key={template.id}
|
||||
id={template.id}
|
||||
title={template.name}
|
||||
description={template.details?.tagline}
|
||||
author={template.creator?.name || 'Unknown'}
|
||||
authorImageUrl={template.creator?.profileImageUrl || null}
|
||||
usageCount={template.views.toString()}
|
||||
|
||||
@@ -26,16 +26,21 @@ export interface CanvasMenuProps {
|
||||
onOpenLogs: () => void
|
||||
onToggleVariables: () => void
|
||||
onToggleChat: () => void
|
||||
onToggleWorkflowLock?: () => void
|
||||
isVariablesOpen?: boolean
|
||||
isChatOpen?: boolean
|
||||
hasClipboard?: boolean
|
||||
disableEdit?: boolean
|
||||
disableAdmin?: boolean
|
||||
canAdmin?: boolean
|
||||
canUndo?: boolean
|
||||
canRedo?: boolean
|
||||
isInvitationsDisabled?: boolean
|
||||
/** Whether the workflow has locked blocks (disables auto-layout) */
|
||||
hasLockedBlocks?: boolean
|
||||
/** Whether all blocks in the workflow are locked */
|
||||
allBlocksLocked?: boolean
|
||||
/** Whether the workflow has any blocks */
|
||||
hasBlocks?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -56,13 +61,17 @@ export function CanvasMenu({
|
||||
onOpenLogs,
|
||||
onToggleVariables,
|
||||
onToggleChat,
|
||||
onToggleWorkflowLock,
|
||||
isVariablesOpen = false,
|
||||
isChatOpen = false,
|
||||
hasClipboard = false,
|
||||
disableEdit = false,
|
||||
canAdmin = false,
|
||||
canUndo = false,
|
||||
canRedo = false,
|
||||
hasLockedBlocks = false,
|
||||
allBlocksLocked = false,
|
||||
hasBlocks = false,
|
||||
}: CanvasMenuProps) {
|
||||
return (
|
||||
<Popover
|
||||
@@ -142,6 +151,17 @@ export function CanvasMenu({
|
||||
<span>Auto-layout</span>
|
||||
<span className='ml-auto opacity-70 group-hover:opacity-100'>⇧L</span>
|
||||
</PopoverItem>
|
||||
{canAdmin && onToggleWorkflowLock && (
|
||||
<PopoverItem
|
||||
disabled={!hasBlocks}
|
||||
onClick={() => {
|
||||
onToggleWorkflowLock()
|
||||
onClose()
|
||||
}}
|
||||
>
|
||||
<span>{allBlocksLocked ? 'Unlock workflow' : 'Lock workflow'}</span>
|
||||
</PopoverItem>
|
||||
)}
|
||||
<PopoverItem
|
||||
onClick={() => {
|
||||
onFitToView()
|
||||
|
||||
@@ -61,6 +61,9 @@ export const Notifications = memo(function Notifications() {
|
||||
case 'refresh':
|
||||
window.location.reload()
|
||||
break
|
||||
case 'unlock-workflow':
|
||||
window.dispatchEvent(new CustomEvent('unlock-workflow'))
|
||||
break
|
||||
default:
|
||||
logger.warn('Unknown action type', { notificationId, actionType: action.type })
|
||||
}
|
||||
@@ -175,7 +178,9 @@ export const Notifications = memo(function Notifications() {
|
||||
? 'Fix in Copilot'
|
||||
: notification.action!.type === 'refresh'
|
||||
? 'Refresh'
|
||||
: 'Take action'}
|
||||
: notification.action!.type === 'unlock-workflow'
|
||||
? 'Unlock Workflow'
|
||||
: 'Take action'}
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
|
||||
@@ -40,10 +40,12 @@ const SCOPE_DESCRIPTIONS: Record<string, string> = {
|
||||
'https://www.googleapis.com/auth/drive.file': 'View and manage Google Drive files',
|
||||
'https://www.googleapis.com/auth/drive': 'Access all Google Drive files',
|
||||
'https://www.googleapis.com/auth/calendar': 'View and manage calendar',
|
||||
'https://www.googleapis.com/auth/tasks': 'Create, read, update, and delete Google Tasks',
|
||||
'https://www.googleapis.com/auth/userinfo.email': 'View email address',
|
||||
'https://www.googleapis.com/auth/userinfo.profile': 'View basic profile info',
|
||||
'https://www.googleapis.com/auth/forms.body': 'View and manage Google Forms',
|
||||
'https://www.googleapis.com/auth/forms.responses.readonly': 'View responses to Google Forms',
|
||||
'https://www.googleapis.com/auth/bigquery': 'View and manage data in Google BigQuery',
|
||||
'https://www.googleapis.com/auth/ediscovery': 'Access Google Vault for eDiscovery',
|
||||
'https://www.googleapis.com/auth/devstorage.read_only': 'Read files from Google Cloud Storage',
|
||||
'https://www.googleapis.com/auth/admin.directory.group': 'Manage Google Workspace groups',
|
||||
@@ -81,6 +83,15 @@ const SCOPE_DESCRIPTIONS: Record<string, string> = {
|
||||
'write:content.property:confluence': 'Create and manage content properties',
|
||||
'read:hierarchical-content:confluence': 'View page hierarchy (children and ancestors)',
|
||||
'read:content.metadata:confluence': 'View content metadata (required for ancestors)',
|
||||
'read:user:confluence': 'View Confluence user profiles',
|
||||
'read:task:confluence': 'View Confluence inline tasks',
|
||||
'write:task:confluence': 'Update Confluence inline tasks',
|
||||
'delete:blogpost:confluence': 'Delete Confluence blog posts',
|
||||
'write:space:confluence': 'Create and update Confluence spaces',
|
||||
'delete:space:confluence': 'Delete Confluence spaces',
|
||||
'read:space.property:confluence': 'View Confluence space properties',
|
||||
'write:space.property:confluence': 'Create and manage space properties',
|
||||
'read:space.permission:confluence': 'View Confluence space permissions',
|
||||
'read:me': 'Read profile information',
|
||||
'database.read': 'Read database',
|
||||
'database.write': 'Write to database',
|
||||
@@ -301,6 +312,16 @@ const SCOPE_DESCRIPTIONS: Record<string, string> = {
|
||||
'user-follow-modify': 'Follow and unfollow artists and users',
|
||||
'user-read-playback-position': 'View playback position in podcasts',
|
||||
'ugc-image-upload': 'Upload images to Spotify playlists',
|
||||
// Attio
|
||||
'record_permission:read-write': 'Read and write CRM records',
|
||||
'object_configuration:read-write': 'Read and manage object schemas',
|
||||
'list_configuration:read-write': 'Read and manage list configurations',
|
||||
'list_entry:read-write': 'Read and write list entries',
|
||||
'note:read-write': 'Read and write notes',
|
||||
'task:read-write': 'Read and write tasks',
|
||||
'comment:read-write': 'Read and write comments and threads',
|
||||
'user_management:read': 'View workspace members',
|
||||
'webhook:read-write': 'Manage webhooks',
|
||||
}
|
||||
|
||||
function getScopeDescription(scope: string): string {
|
||||
|
||||
@@ -379,7 +379,7 @@ export function CredentialSelector({
|
||||
filterOptions={true}
|
||||
isLoading={credentialsLoading}
|
||||
overlayContent={overlayContent}
|
||||
className={selectedId || isCredentialSetSelected ? 'pl-[28px]' : ''}
|
||||
className={overlayContent ? 'pl-[28px]' : ''}
|
||||
/>
|
||||
|
||||
{needsUpdate && (
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
import { memo, useCallback, useEffect, useRef, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { ArrowUp, Square } from 'lucide-react'
|
||||
import { ArrowUp, Lock, Square, Unlock } from 'lucide-react'
|
||||
import { useParams, useRouter } from 'next/navigation'
|
||||
import { useShallow } from 'zustand/react/shallow'
|
||||
import {
|
||||
@@ -41,8 +41,11 @@ import {
|
||||
} from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/hooks'
|
||||
import { Variables } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/variables/variables'
|
||||
import { useAutoLayout } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-auto-layout'
|
||||
import { useCurrentWorkflow } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-current-workflow'
|
||||
import { useWorkflowExecution } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-workflow-execution'
|
||||
import { getWorkflowLockToggleIds } from '@/app/workspace/[workspaceId]/w/[workflowId]/utils'
|
||||
import { useDeleteWorkflow, useImportWorkflow } from '@/app/workspace/[workspaceId]/w/hooks'
|
||||
import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow'
|
||||
import { usePermissionConfig } from '@/hooks/use-permission-config'
|
||||
import { useChatStore } from '@/stores/chat/store'
|
||||
import { useNotificationStore } from '@/stores/notifications/store'
|
||||
@@ -126,6 +129,15 @@ export const Panel = memo(function Panel() {
|
||||
Object.values(state.blocks).some((block) => block.locked)
|
||||
)
|
||||
|
||||
const allBlocksLocked = useWorkflowStore((state) => {
|
||||
const blockList = Object.values(state.blocks)
|
||||
return blockList.length > 0 && blockList.every((block) => block.locked)
|
||||
})
|
||||
|
||||
const hasBlocks = useWorkflowStore((state) => Object.keys(state.blocks).length > 0)
|
||||
|
||||
const { collaborativeBatchToggleLocked } = useCollaborativeWorkflow()
|
||||
|
||||
// Delete workflow hook
|
||||
const { isDeleting, handleDeleteWorkflow } = useDeleteWorkflow({
|
||||
workspaceId,
|
||||
@@ -192,6 +204,7 @@ export const Panel = memo(function Panel() {
|
||||
)
|
||||
|
||||
const currentWorkflow = activeWorkflowId ? workflows[activeWorkflowId] : null
|
||||
const { isSnapshotView } = useCurrentWorkflow()
|
||||
|
||||
/**
|
||||
* Mark hydration as complete on mount
|
||||
@@ -329,6 +342,17 @@ export const Panel = memo(function Panel() {
|
||||
workspaceId,
|
||||
])
|
||||
|
||||
/**
|
||||
* Toggles the locked state of all blocks in the workflow
|
||||
*/
|
||||
const handleToggleWorkflowLock = useCallback(() => {
|
||||
const blocks = useWorkflowStore.getState().blocks
|
||||
const allLocked = Object.values(blocks).every((b) => b.locked)
|
||||
const ids = getWorkflowLockToggleIds(blocks, !allLocked)
|
||||
if (ids.length > 0) collaborativeBatchToggleLocked(ids)
|
||||
setIsMenuOpen(false)
|
||||
}, [collaborativeBatchToggleLocked])
|
||||
|
||||
// Compute run button state
|
||||
const canRun = userPermissions.canRead // Running only requires read permissions
|
||||
const isLoadingPermissions = userPermissions.isLoading
|
||||
@@ -399,6 +423,16 @@ export const Panel = memo(function Panel() {
|
||||
<Layout className='h-3 w-3' animate={isAutoLayouting} variant='clockwise' />
|
||||
<span>Auto layout</span>
|
||||
</PopoverItem>
|
||||
{userPermissions.canAdmin && !isSnapshotView && (
|
||||
<PopoverItem onClick={handleToggleWorkflowLock} disabled={!hasBlocks}>
|
||||
{allBlocksLocked ? (
|
||||
<Unlock className='h-3 w-3' />
|
||||
) : (
|
||||
<Lock className='h-3 w-3' />
|
||||
)}
|
||||
<span>{allBlocksLocked ? 'Unlock workflow' : 'Lock workflow'}</span>
|
||||
</PopoverItem>
|
||||
)}
|
||||
{
|
||||
<PopoverItem onClick={() => setVariablesOpen(!isVariablesOpen)}>
|
||||
<VariableIcon className='h-3 w-3' />
|
||||
|
||||
@@ -160,12 +160,16 @@ const IterationNodeRow = memo(function IterationNodeRow({
|
||||
onSelectEntry,
|
||||
isExpanded,
|
||||
onToggle,
|
||||
expandedNodes,
|
||||
onToggleNode,
|
||||
}: {
|
||||
node: EntryNode
|
||||
selectedEntryId: string | null
|
||||
onSelectEntry: (entry: ConsoleEntry) => void
|
||||
isExpanded: boolean
|
||||
onToggle: () => void
|
||||
expandedNodes: Set<string>
|
||||
onToggleNode: (nodeId: string) => void
|
||||
}) {
|
||||
const { entry, children, iterationInfo } = node
|
||||
const hasError = Boolean(entry.error) || children.some((c) => c.entry.error)
|
||||
@@ -226,11 +230,13 @@ const IterationNodeRow = memo(function IterationNodeRow({
|
||||
{isExpanded && hasChildren && (
|
||||
<div className={ROW_STYLES.nested}>
|
||||
{children.map((child) => (
|
||||
<BlockRow
|
||||
<EntryNodeRow
|
||||
key={child.entry.id}
|
||||
entry={child.entry}
|
||||
isSelected={selectedEntryId === child.entry.id}
|
||||
onSelect={onSelectEntry}
|
||||
node={child}
|
||||
selectedEntryId={selectedEntryId}
|
||||
onSelectEntry={onSelectEntry}
|
||||
expandedNodes={expandedNodes}
|
||||
onToggleNode={onToggleNode}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
@@ -346,6 +352,8 @@ const SubflowNodeRow = memo(function SubflowNodeRow({
|
||||
onSelectEntry={onSelectEntry}
|
||||
isExpanded={expandedNodes.has(iterNode.entry.id)}
|
||||
onToggle={() => onToggleNode(iterNode.entry.id)}
|
||||
expandedNodes={expandedNodes}
|
||||
onToggleNode={onToggleNode}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
@@ -520,6 +528,8 @@ const EntryNodeRow = memo(function EntryNodeRow({
|
||||
onSelectEntry={onSelectEntry}
|
||||
isExpanded={expandedNodes.has(node.entry.id)}
|
||||
onToggle={() => onToggleNode(node.entry.id)}
|
||||
expandedNodes={expandedNodes}
|
||||
onToggleNode={onToggleNode}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -1298,7 +1298,7 @@ export const WorkflowBlock = memo(function WorkflowBlock({
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
)}
|
||||
{!isEnabled && <Badge variant='gray-secondary'>disabled</Badge>}
|
||||
{!isEnabled && !isLocked && <Badge variant='gray-secondary'>disabled</Badge>}
|
||||
{isLocked && <Badge variant='gray-secondary'>locked</Badge>}
|
||||
|
||||
{type === 'schedule' && shouldShowScheduleBadge && scheduleInfo?.isDisabled && (
|
||||
|
||||
@@ -554,6 +554,7 @@ export function useWorkflowExecution() {
|
||||
childWorkflowInstanceId: string
|
||||
iterationCurrent?: number
|
||||
iterationContainerId?: string
|
||||
executionOrder?: number
|
||||
}) => {
|
||||
if (isStaleExecution()) return
|
||||
updateConsole(
|
||||
@@ -564,6 +565,7 @@ export function useWorkflowExecution() {
|
||||
...(data.iterationContainerId !== undefined && {
|
||||
iterationContainerId: data.iterationContainerId,
|
||||
}),
|
||||
...(data.executionOrder !== undefined && { executionOrder: data.executionOrder }),
|
||||
},
|
||||
executionIdRef.current
|
||||
)
|
||||
|
||||
@@ -71,3 +71,38 @@ export function filterProtectedBlocks(
|
||||
allProtected: protectedIds.length === blockIds.length && blockIds.length > 0,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns block IDs ordered so that `batchToggleLocked` will target the desired state.
|
||||
*
|
||||
* `batchToggleLocked` determines its target locked state from `!firstBlock.locked`.
|
||||
* When `targetLocked` is true (lock all), an unlocked block must come first.
|
||||
* When `targetLocked` is false (unlock all), a locked block must come first.
|
||||
*
|
||||
* Returns an empty array when there are no blocks or all blocks already match `targetLocked`.
|
||||
*
|
||||
* @param blocks - Record of all blocks in the workflow
|
||||
* @param targetLocked - The desired locked state for all blocks
|
||||
* @returns Sorted block IDs, or empty array if no toggle is needed
|
||||
*/
|
||||
export function getWorkflowLockToggleIds(
|
||||
blocks: Record<string, BlockState>,
|
||||
targetLocked: boolean
|
||||
): string[] {
|
||||
const ids = Object.keys(blocks)
|
||||
if (ids.length === 0) return []
|
||||
|
||||
// No-op if all blocks already match the desired state
|
||||
const allMatch = Object.values(blocks).every((b) => Boolean(b.locked) === targetLocked)
|
||||
if (allMatch) return []
|
||||
|
||||
ids.sort((a, b) => {
|
||||
const aVal = blocks[a].locked ? 1 : 0
|
||||
const bVal = blocks[b].locked ? 1 : 0
|
||||
// To lock all (targetLocked=true): unlocked first (aVal - bVal)
|
||||
// To unlock all (targetLocked=false): locked first (bVal - aVal)
|
||||
return targetLocked ? aVal - bVal : bVal - aVal
|
||||
})
|
||||
|
||||
return ids
|
||||
}
|
||||
|
||||
@@ -57,6 +57,7 @@ import {
|
||||
estimateBlockDimensions,
|
||||
filterProtectedBlocks,
|
||||
getClampedPositionForNode,
|
||||
getWorkflowLockToggleIds,
|
||||
isBlockProtected,
|
||||
isEdgeProtected,
|
||||
isInEditableElement,
|
||||
@@ -393,6 +394,15 @@ const WorkflowContent = React.memo(() => {
|
||||
|
||||
const { blocks, edges, lastSaved } = currentWorkflow
|
||||
|
||||
const allBlocksLocked = useMemo(() => {
|
||||
const blockList = Object.values(blocks)
|
||||
return blockList.length > 0 && blockList.every((b) => b.locked)
|
||||
}, [blocks])
|
||||
|
||||
const hasBlocks = useMemo(() => Object.keys(blocks).length > 0, [blocks])
|
||||
|
||||
const hasLockedBlocks = useMemo(() => Object.values(blocks).some((b) => b.locked), [blocks])
|
||||
|
||||
const isWorkflowReady = useMemo(
|
||||
() =>
|
||||
hydration.phase === 'ready' &&
|
||||
@@ -1175,6 +1185,91 @@ const WorkflowContent = React.memo(() => {
|
||||
collaborativeBatchToggleLocked(blockIds)
|
||||
}, [contextMenuBlocks, collaborativeBatchToggleLocked])
|
||||
|
||||
const handleToggleWorkflowLock = useCallback(() => {
|
||||
const currentBlocks = useWorkflowStore.getState().blocks
|
||||
const allLocked = Object.values(currentBlocks).every((b) => b.locked)
|
||||
const ids = getWorkflowLockToggleIds(currentBlocks, !allLocked)
|
||||
if (ids.length > 0) collaborativeBatchToggleLocked(ids)
|
||||
}, [collaborativeBatchToggleLocked])
|
||||
|
||||
// Show notification when all blocks in the workflow are locked
|
||||
const lockNotificationIdRef = useRef<string | null>(null)
|
||||
|
||||
const clearLockNotification = useCallback(() => {
|
||||
if (lockNotificationIdRef.current) {
|
||||
useNotificationStore.getState().removeNotification(lockNotificationIdRef.current)
|
||||
lockNotificationIdRef.current = null
|
||||
}
|
||||
}, [])
|
||||
|
||||
// Clear persisted lock notifications on mount/workflow change (prevents duplicates after reload)
|
||||
useEffect(() => {
|
||||
// Reset ref so the main effect creates a fresh notification for the new workflow
|
||||
clearLockNotification()
|
||||
|
||||
if (!activeWorkflowId) return
|
||||
const store = useNotificationStore.getState()
|
||||
const stale = store.notifications.filter(
|
||||
(n) =>
|
||||
n.workflowId === activeWorkflowId &&
|
||||
(n.action?.type === 'unlock-workflow' || n.message.startsWith('This workflow is locked'))
|
||||
)
|
||||
for (const n of stale) {
|
||||
store.removeNotification(n.id)
|
||||
}
|
||||
}, [activeWorkflowId, clearLockNotification])
|
||||
|
||||
const prevCanAdminRef = useRef(effectivePermissions.canAdmin)
|
||||
useEffect(() => {
|
||||
if (!isWorkflowReady) return
|
||||
|
||||
const canAdminChanged = prevCanAdminRef.current !== effectivePermissions.canAdmin
|
||||
prevCanAdminRef.current = effectivePermissions.canAdmin
|
||||
|
||||
// Clear stale notification when admin status changes so it recreates with correct message
|
||||
if (canAdminChanged) {
|
||||
clearLockNotification()
|
||||
}
|
||||
|
||||
if (allBlocksLocked) {
|
||||
if (lockNotificationIdRef.current) return
|
||||
|
||||
const isAdmin = effectivePermissions.canAdmin
|
||||
lockNotificationIdRef.current = addNotification({
|
||||
level: 'info',
|
||||
message: isAdmin
|
||||
? 'This workflow is locked'
|
||||
: 'This workflow is locked. Ask an admin to unlock it.',
|
||||
workflowId: activeWorkflowId || undefined,
|
||||
...(isAdmin ? { action: { type: 'unlock-workflow' as const, message: '' } } : {}),
|
||||
})
|
||||
} else {
|
||||
clearLockNotification()
|
||||
}
|
||||
}, [
|
||||
allBlocksLocked,
|
||||
isWorkflowReady,
|
||||
effectivePermissions.canAdmin,
|
||||
addNotification,
|
||||
activeWorkflowId,
|
||||
clearLockNotification,
|
||||
])
|
||||
|
||||
// Clean up notification on unmount
|
||||
useEffect(() => clearLockNotification, [clearLockNotification])
|
||||
|
||||
// Listen for unlock-workflow events from notification action button
|
||||
useEffect(() => {
|
||||
const handleUnlockWorkflow = () => {
|
||||
const currentBlocks = useWorkflowStore.getState().blocks
|
||||
const ids = getWorkflowLockToggleIds(currentBlocks, false)
|
||||
if (ids.length > 0) collaborativeBatchToggleLocked(ids)
|
||||
}
|
||||
|
||||
window.addEventListener('unlock-workflow', handleUnlockWorkflow)
|
||||
return () => window.removeEventListener('unlock-workflow', handleUnlockWorkflow)
|
||||
}, [collaborativeBatchToggleLocked])
|
||||
|
||||
const handleContextRemoveFromSubflow = useCallback(() => {
|
||||
const blocksToRemove = contextMenuBlocks.filter(
|
||||
(block) => block.parentId && (block.parentType === 'loop' || block.parentType === 'parallel')
|
||||
@@ -2439,6 +2534,16 @@ const WorkflowContent = React.memo(() => {
|
||||
window.removeEventListener('remove-from-subflow', handleRemoveFromSubflow as EventListener)
|
||||
}, [blocks, edgesForDisplay, getNodeAbsolutePosition, collaborativeBatchUpdateParent])
|
||||
|
||||
useEffect(() => {
|
||||
const handleToggleWorkflowLock = (e: CustomEvent<{ blockIds: string[] }>) => {
|
||||
collaborativeBatchToggleLocked(e.detail.blockIds)
|
||||
}
|
||||
|
||||
window.addEventListener('toggle-workflow-lock', handleToggleWorkflowLock as EventListener)
|
||||
return () =>
|
||||
window.removeEventListener('toggle-workflow-lock', handleToggleWorkflowLock as EventListener)
|
||||
}, [collaborativeBatchToggleLocked])
|
||||
|
||||
/**
|
||||
* Updates container dimensions in displayNodes during drag or keyboard movement.
|
||||
*/
|
||||
@@ -3699,7 +3804,11 @@ const WorkflowContent = React.memo(() => {
|
||||
disableEdit={!effectivePermissions.canEdit}
|
||||
canUndo={canUndo}
|
||||
canRedo={canRedo}
|
||||
hasLockedBlocks={Object.values(blocks).some((b) => b.locked)}
|
||||
hasLockedBlocks={hasLockedBlocks}
|
||||
onToggleWorkflowLock={handleToggleWorkflowLock}
|
||||
allBlocksLocked={allBlocksLocked}
|
||||
canAdmin={effectivePermissions.canAdmin}
|
||||
hasBlocks={hasBlocks}
|
||||
/>
|
||||
</>
|
||||
)}
|
||||
|
||||
@@ -142,7 +142,7 @@ export function ApiKeys({ onOpenChange }: ApiKeysProps) {
|
||||
strokeWidth={2}
|
||||
/>
|
||||
<Input
|
||||
placeholder='Search API keys...'
|
||||
placeholder='Search Sim keys...'
|
||||
value={searchTerm}
|
||||
onChange={(e) => setSearchTerm(e.target.value)}
|
||||
className='h-auto flex-1 border-0 bg-transparent p-0 font-base leading-none placeholder:text-[var(--text-tertiary)] focus-visible:ring-0 focus-visible:ring-offset-0'
|
||||
@@ -195,7 +195,7 @@ export function ApiKeys({ onOpenChange }: ApiKeysProps) {
|
||||
</div>
|
||||
{workspaceKeys.length === 0 ? (
|
||||
<div className='text-[13px] text-[var(--text-muted)]'>
|
||||
No workspace API keys yet
|
||||
No workspace Sim keys yet
|
||||
</div>
|
||||
) : (
|
||||
workspaceKeys.map((key) => (
|
||||
@@ -301,7 +301,7 @@ export function ApiKeys({ onOpenChange }: ApiKeysProps) {
|
||||
</div>
|
||||
{isConflict && (
|
||||
<div className='text-[12px] text-[var(--text-error)] leading-tight'>
|
||||
Workspace API key with the same name overrides this. Rename your
|
||||
Workspace Sim key with the same name overrides this. Rename your
|
||||
personal key to use it.
|
||||
</div>
|
||||
)}
|
||||
@@ -317,7 +317,7 @@ export function ApiKeys({ onOpenChange }: ApiKeysProps) {
|
||||
filteredWorkspaceKeys.length === 0 &&
|
||||
(personalKeys.length > 0 || workspaceKeys.length > 0) && (
|
||||
<div className='py-[16px] text-center text-[13px] text-[var(--text-muted)]'>
|
||||
No API keys found matching "{searchTerm}"
|
||||
No Sim keys found matching "{searchTerm}"
|
||||
</div>
|
||||
)}
|
||||
</>
|
||||
@@ -331,7 +331,7 @@ export function ApiKeys({ onOpenChange }: ApiKeysProps) {
|
||||
<div className='mt-auto flex items-center justify-between'>
|
||||
<div className='flex items-center gap-[8px]'>
|
||||
<span className='font-medium text-[13px] text-[var(--text-secondary)]'>
|
||||
Allow personal API keys
|
||||
Allow personal Sim keys
|
||||
</span>
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
@@ -383,7 +383,7 @@ export function ApiKeys({ onOpenChange }: ApiKeysProps) {
|
||||
{/* Delete Confirmation Dialog */}
|
||||
<Modal open={showDeleteDialog} onOpenChange={setShowDeleteDialog}>
|
||||
<ModalContent size='sm'>
|
||||
<ModalHeader>Delete API key</ModalHeader>
|
||||
<ModalHeader>Delete Sim key</ModalHeader>
|
||||
<ModalBody>
|
||||
<p className='text-[12px] text-[var(--text-secondary)]'>
|
||||
Deleting{' '}
|
||||
|
||||
@@ -62,8 +62,8 @@ export function CreateApiKeyModal({
|
||||
if (isDuplicate) {
|
||||
setCreateError(
|
||||
keyType === 'workspace'
|
||||
? `A workspace API key named "${trimmedName}" already exists. Please choose a different name.`
|
||||
: `A personal API key named "${trimmedName}" already exists. Please choose a different name.`
|
||||
? `A workspace Sim key named "${trimmedName}" already exists. Please choose a different name.`
|
||||
: `A personal Sim key named "${trimmedName}" already exists. Please choose a different name.`
|
||||
)
|
||||
return
|
||||
}
|
||||
@@ -86,11 +86,11 @@ export function CreateApiKeyModal({
|
||||
} catch (error: unknown) {
|
||||
logger.error('API key creation failed:', { error })
|
||||
const errorMessage =
|
||||
error instanceof Error ? error.message : 'Failed to create API key. Please try again.'
|
||||
error instanceof Error ? error.message : 'Failed to create Sim key. Please try again.'
|
||||
if (errorMessage.toLowerCase().includes('already exists')) {
|
||||
setCreateError(errorMessage)
|
||||
} else {
|
||||
setCreateError('Failed to create API key. Please check your connection and try again.')
|
||||
setCreateError('Failed to create Sim key. Please check your connection and try again.')
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -113,7 +113,7 @@ export function CreateApiKeyModal({
|
||||
{/* Create API Key Dialog */}
|
||||
<Modal open={open} onOpenChange={onOpenChange}>
|
||||
<ModalContent size='sm'>
|
||||
<ModalHeader>Create new API key</ModalHeader>
|
||||
<ModalHeader>Create new Sim key</ModalHeader>
|
||||
<ModalBody>
|
||||
<p className='text-[12px] text-[var(--text-secondary)]'>
|
||||
{keyType === 'workspace'
|
||||
@@ -125,7 +125,7 @@ export function CreateApiKeyModal({
|
||||
{canManageWorkspaceKeys && (
|
||||
<div className='flex flex-col gap-[8px]'>
|
||||
<p className='font-medium text-[13px] text-[var(--text-secondary)]'>
|
||||
API Key Type
|
||||
Sim Key Type
|
||||
</p>
|
||||
<ButtonGroup
|
||||
value={keyType}
|
||||
@@ -143,7 +143,7 @@ export function CreateApiKeyModal({
|
||||
)}
|
||||
<div className='flex flex-col gap-[8px]'>
|
||||
<p className='font-medium text-[13px] text-[var(--text-secondary)]'>
|
||||
Enter a name for your API key to help you identify it later.
|
||||
Enter a name for your Sim key to help you identify it later.
|
||||
</p>
|
||||
{/* Hidden decoy fields to prevent browser autofill */}
|
||||
<input
|
||||
@@ -216,10 +216,10 @@ export function CreateApiKeyModal({
|
||||
}}
|
||||
>
|
||||
<ModalContent size='sm'>
|
||||
<ModalHeader>Your API key has been created</ModalHeader>
|
||||
<ModalHeader>Your Sim key has been created</ModalHeader>
|
||||
<ModalBody>
|
||||
<p className='text-[12px] text-[var(--text-secondary)]'>
|
||||
This is the only time you will see your API key.{' '}
|
||||
This is the only time you will see your Sim key.{' '}
|
||||
<span className='font-semibold text-[var(--text-primary)]'>
|
||||
Copy it now and store it securely.
|
||||
</span>
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -6,10 +6,10 @@ interface CredentialsProps {
|
||||
onOpenChange?: (open: boolean) => void
|
||||
}
|
||||
|
||||
export function Credentials(_props: CredentialsProps) {
|
||||
export function Credentials({ onOpenChange }: CredentialsProps) {
|
||||
return (
|
||||
<div className='h-full min-h-0'>
|
||||
<CredentialsManager />
|
||||
<CredentialsManager onOpenChange={onOpenChange} />
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { ChevronDown, Plus, Search, X } from 'lucide-react'
|
||||
import { Braces, ChevronDown, List, Plus, Search, X } from 'lucide-react'
|
||||
import { useParams } from 'next/navigation'
|
||||
import {
|
||||
Badge,
|
||||
@@ -13,6 +13,7 @@ import {
|
||||
ModalContent,
|
||||
ModalFooter,
|
||||
ModalHeader,
|
||||
Textarea,
|
||||
Tooltip,
|
||||
} from '@/components/emcn'
|
||||
import { Input } from '@/components/ui'
|
||||
@@ -438,6 +439,9 @@ export function MCP({ initialServerId }: MCPProps) {
|
||||
const [showAddForm, setShowAddForm] = useState(false)
|
||||
const [formData, setFormData] = useState<McpServerFormData>(DEFAULT_FORM_DATA)
|
||||
const [isAddingServer, setIsAddingServer] = useState(false)
|
||||
const [addFormMode, setAddFormMode] = useState<'form' | 'json'>('form')
|
||||
const [jsonInput, setJsonInput] = useState('')
|
||||
const [jsonError, setJsonError] = useState<string | null>(null)
|
||||
|
||||
const [searchTerm, setSearchTerm] = useState('')
|
||||
const [deletingServers, setDeletingServers] = useState<Set<string>>(new Set())
|
||||
@@ -501,6 +505,9 @@ export function MCP({ initialServerId }: MCPProps) {
|
||||
const resetForm = useCallback(() => {
|
||||
setFormData(DEFAULT_FORM_DATA)
|
||||
setShowAddForm(false)
|
||||
setAddFormMode('form')
|
||||
setJsonInput('')
|
||||
setJsonError(null)
|
||||
resetEnvVarState()
|
||||
clearTestResult()
|
||||
}, [clearTestResult, resetEnvVarState])
|
||||
@@ -650,6 +657,138 @@ export function MCP({ initialServerId }: MCPProps) {
|
||||
}
|
||||
}, [formData, testConnection, createServerMutation, workspaceId, headersToRecord, resetForm])
|
||||
|
||||
/**
|
||||
* Extracts string-only headers from an unknown value.
|
||||
*/
|
||||
const extractStringHeaders = useCallback((headers: unknown): Record<string, string> => {
|
||||
if (typeof headers !== 'object' || headers === null) return {}
|
||||
return Object.fromEntries(
|
||||
Object.entries(headers).filter(
|
||||
(entry): entry is [string, string] => typeof entry[1] === 'string'
|
||||
)
|
||||
)
|
||||
}, [])
|
||||
|
||||
/**
|
||||
* Parses MCP JSON config into form data.
|
||||
* Accepts both `{ mcpServers: { name: { url, headers } } }` and `{ url, headers }` formats.
|
||||
*/
|
||||
const parseJsonConfig = useCallback(
|
||||
(json: string): { name: string; url: string; headers: Record<string, string> } | null => {
|
||||
try {
|
||||
const parsed = JSON.parse(json)
|
||||
|
||||
if (parsed.mcpServers && typeof parsed.mcpServers === 'object') {
|
||||
const entries = Object.entries(parsed.mcpServers)
|
||||
if (entries.length === 0) {
|
||||
setJsonError('No servers found in mcpServers')
|
||||
return null
|
||||
}
|
||||
const [name, config] = entries[0] as [string, Record<string, unknown>]
|
||||
if (!config.url || typeof config.url !== 'string') {
|
||||
setJsonError('Server config must include a "url" field')
|
||||
return null
|
||||
}
|
||||
setJsonError(null)
|
||||
return {
|
||||
name,
|
||||
url: config.url,
|
||||
headers: extractStringHeaders(config.headers),
|
||||
}
|
||||
}
|
||||
|
||||
if (parsed.url && typeof parsed.url === 'string') {
|
||||
setJsonError(null)
|
||||
return {
|
||||
name: '',
|
||||
url: parsed.url,
|
||||
headers: extractStringHeaders(parsed.headers),
|
||||
}
|
||||
}
|
||||
|
||||
setJsonError('JSON must contain "mcpServers" or a "url" field')
|
||||
return null
|
||||
} catch {
|
||||
setJsonError('Invalid JSON')
|
||||
return null
|
||||
}
|
||||
},
|
||||
[extractStringHeaders]
|
||||
)
|
||||
|
||||
/**
|
||||
* Validates parsed JSON config for name and domain requirements.
|
||||
* Returns the config if valid, null otherwise (sets jsonError on failure).
|
||||
*/
|
||||
const validateJsonConfig = useCallback((): {
|
||||
name: string
|
||||
url: string
|
||||
headers: Record<string, string>
|
||||
} | null => {
|
||||
const config = parseJsonConfig(jsonInput)
|
||||
if (!config) return null
|
||||
|
||||
if (!config.name) {
|
||||
setJsonError(
|
||||
'Server name is required. Use the mcpServers format: { "mcpServers": { "name": { ... } } }'
|
||||
)
|
||||
return null
|
||||
}
|
||||
|
||||
if (!isDomainAllowed(config.url, allowedMcpDomains)) {
|
||||
setJsonError('Domain not permitted by server policy')
|
||||
return null
|
||||
}
|
||||
|
||||
return config
|
||||
}, [jsonInput, parseJsonConfig, allowedMcpDomains])
|
||||
|
||||
/**
|
||||
* Adds an MCP server from parsed JSON config.
|
||||
*/
|
||||
const handleAddServerFromJson = useCallback(async () => {
|
||||
const config = validateJsonConfig()
|
||||
if (!config) return
|
||||
|
||||
setIsAddingServer(true)
|
||||
try {
|
||||
const serverConfig = {
|
||||
name: config.name,
|
||||
transport: 'streamable-http' as const,
|
||||
url: config.url,
|
||||
headers: config.headers,
|
||||
timeout: 30000,
|
||||
workspaceId,
|
||||
}
|
||||
|
||||
const connectionResult = await testConnection(serverConfig)
|
||||
|
||||
if (!connectionResult.success) {
|
||||
logger.error('Connection test failed, server not added:', connectionResult.error)
|
||||
return
|
||||
}
|
||||
|
||||
await createServerMutation.mutateAsync({
|
||||
workspaceId,
|
||||
config: {
|
||||
name: config.name,
|
||||
transport: 'streamable-http',
|
||||
url: config.url,
|
||||
timeout: 30000,
|
||||
headers: config.headers,
|
||||
enabled: true,
|
||||
},
|
||||
})
|
||||
|
||||
logger.info(`Added MCP server from JSON: ${config.name}`)
|
||||
resetForm()
|
||||
} catch (error) {
|
||||
logger.error('Failed to add MCP server from JSON:', error)
|
||||
} finally {
|
||||
setIsAddingServer(false)
|
||||
}
|
||||
}, [validateJsonConfig, testConnection, createServerMutation, workspaceId, resetForm])
|
||||
|
||||
/**
|
||||
* Opens the delete confirmation dialog for an MCP server.
|
||||
*/
|
||||
@@ -1458,102 +1597,184 @@ export function MCP({ initialServerId }: MCPProps) {
|
||||
{shouldShowForm && !serversLoading && (
|
||||
<div className='rounded-[8px] border p-[10px]'>
|
||||
<div className='flex flex-col gap-[8px]'>
|
||||
<FormField label='Server Name'>
|
||||
<EmcnInput
|
||||
placeholder='e.g., My MCP Server'
|
||||
value={formData.name}
|
||||
onChange={(e) => {
|
||||
if (testResult) clearTestResult()
|
||||
handleNameChange(e.target.value)
|
||||
}}
|
||||
className='h-9'
|
||||
/>
|
||||
</FormField>
|
||||
<div className='flex items-center justify-end'>
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
type='button'
|
||||
variant='ghost'
|
||||
onClick={() => {
|
||||
if (testResult) clearTestResult()
|
||||
setAddFormMode(addFormMode === 'form' ? 'json' : 'form')
|
||||
setJsonError(null)
|
||||
}}
|
||||
className='h-6 w-6 p-0'
|
||||
>
|
||||
{addFormMode === 'form' ? (
|
||||
<Braces className='h-3.5 w-3.5' />
|
||||
) : (
|
||||
<List className='h-3.5 w-3.5' />
|
||||
)}
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content>
|
||||
{addFormMode === 'form' ? 'Switch to JSON' : 'Switch to form'}
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
</div>
|
||||
|
||||
<FormField label='Server URL'>
|
||||
<FormattedInput
|
||||
ref={urlInputRef}
|
||||
placeholder='https://mcp.server.dev/{{YOUR_API_KEY}}/sse'
|
||||
value={formData.url || ''}
|
||||
scrollLeft={urlScrollLeft}
|
||||
showEnvVars={showEnvVars && activeInputField === 'url'}
|
||||
envVarProps={{
|
||||
searchTerm: envSearchTerm,
|
||||
cursorPosition,
|
||||
workspaceId,
|
||||
onSelect: handleEnvVarSelect,
|
||||
onClose: resetEnvVarState,
|
||||
}}
|
||||
availableEnvVars={availableEnvVars}
|
||||
onChange={(e) => handleInputChange('url', e.target.value)}
|
||||
onScroll={(scrollLeft) => handleUrlScroll(scrollLeft)}
|
||||
/>
|
||||
{isAddDomainBlocked && (
|
||||
<p className='mt-[4px] text-[12px] text-[var(--text-error)]'>
|
||||
Domain not permitted by server policy
|
||||
</p>
|
||||
)}
|
||||
</FormField>
|
||||
{addFormMode === 'json' ? (
|
||||
<>
|
||||
<Textarea
|
||||
placeholder={`{\n "mcpServers": {\n "server-name": {\n "url": "https://...",\n "headers": {\n "X-API-Key": "..."\n }\n }\n }\n}`}
|
||||
value={jsonInput}
|
||||
onChange={(e) => {
|
||||
setJsonInput(e.target.value)
|
||||
if (jsonError) setJsonError(null)
|
||||
if (testResult) clearTestResult()
|
||||
}}
|
||||
className='min-h-[160px] resize-none font-mono text-[13px]'
|
||||
/>
|
||||
{jsonError && <p className='text-[12px] text-[var(--text-error)]'>{jsonError}</p>}
|
||||
|
||||
<div className='flex flex-col gap-[8px]'>
|
||||
<div className='flex items-center justify-between'>
|
||||
<span className='font-medium text-[13px] text-[var(--text-secondary)]'>
|
||||
Headers
|
||||
</span>
|
||||
<Button
|
||||
type='button'
|
||||
variant='ghost'
|
||||
onClick={handleAddHeader}
|
||||
className='h-6 w-6 p-0'
|
||||
>
|
||||
<Plus className='h-3 w-3' />
|
||||
</Button>
|
||||
</div>
|
||||
<div className='flex items-center justify-between pt-[12px]'>
|
||||
<Button
|
||||
variant='default'
|
||||
onClick={() => {
|
||||
const config = validateJsonConfig()
|
||||
if (!config) return
|
||||
testConnection({
|
||||
name: config.name,
|
||||
transport: 'streamable-http',
|
||||
url: config.url,
|
||||
headers: config.headers,
|
||||
timeout: 30000,
|
||||
workspaceId,
|
||||
})
|
||||
}}
|
||||
disabled={isTestingConnection || !jsonInput.trim()}
|
||||
>
|
||||
{testButtonLabel}
|
||||
</Button>
|
||||
|
||||
<div className='flex max-h-[140px] flex-col gap-[8px] overflow-y-auto'>
|
||||
{(formData.headers || []).map((header, index) => (
|
||||
<HeaderRow
|
||||
key={index}
|
||||
header={header}
|
||||
index={index}
|
||||
headerScrollLeft={headerScrollLeft}
|
||||
showEnvVars={showEnvVars}
|
||||
activeInputField={activeInputField}
|
||||
activeHeaderIndex={activeHeaderIndex}
|
||||
envSearchTerm={envSearchTerm}
|
||||
cursorPosition={cursorPosition}
|
||||
workspaceId={workspaceId}
|
||||
availableEnvVars={availableEnvVars}
|
||||
onInputChange={handleInputChange}
|
||||
onHeaderScroll={handleHeaderScroll}
|
||||
onEnvVarSelect={handleEnvVarSelect}
|
||||
onEnvVarClose={resetEnvVarState}
|
||||
onRemove={() => handleRemoveHeader(index)}
|
||||
<div className='flex items-center gap-[8px]'>
|
||||
<Button variant='ghost' onClick={handleCancelForm}>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button
|
||||
onClick={handleAddServerFromJson}
|
||||
disabled={isAddingServer || !jsonInput.trim()}
|
||||
variant='tertiary'
|
||||
>
|
||||
{isAddingServer ? 'Adding...' : 'Add Server'}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
<FormField label='Server Name'>
|
||||
<EmcnInput
|
||||
placeholder='e.g., My MCP Server'
|
||||
value={formData.name}
|
||||
onChange={(e) => {
|
||||
if (testResult) clearTestResult()
|
||||
handleNameChange(e.target.value)
|
||||
}}
|
||||
className='h-9'
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
</FormField>
|
||||
|
||||
<div className='flex items-center justify-between pt-[12px]'>
|
||||
<Button
|
||||
variant='default'
|
||||
onClick={handleTestConnection}
|
||||
disabled={isTestingConnection || !isFormValid || isAddDomainBlocked}
|
||||
>
|
||||
{testButtonLabel}
|
||||
</Button>
|
||||
<FormField label='Server URL'>
|
||||
<FormattedInput
|
||||
ref={urlInputRef}
|
||||
placeholder='https://mcp.server.dev/{{YOUR_API_KEY}}/sse'
|
||||
value={formData.url || ''}
|
||||
scrollLeft={urlScrollLeft}
|
||||
showEnvVars={showEnvVars && activeInputField === 'url'}
|
||||
envVarProps={{
|
||||
searchTerm: envSearchTerm,
|
||||
cursorPosition,
|
||||
workspaceId,
|
||||
onSelect: handleEnvVarSelect,
|
||||
onClose: resetEnvVarState,
|
||||
}}
|
||||
availableEnvVars={availableEnvVars}
|
||||
onChange={(e) => handleInputChange('url', e.target.value)}
|
||||
onScroll={(scrollLeft) => handleUrlScroll(scrollLeft)}
|
||||
/>
|
||||
{isAddDomainBlocked && (
|
||||
<p className='mt-[4px] text-[12px] text-[var(--text-error)]'>
|
||||
Domain not permitted by server policy
|
||||
</p>
|
||||
)}
|
||||
</FormField>
|
||||
|
||||
<div className='flex items-center gap-[8px]'>
|
||||
<Button variant='ghost' onClick={handleCancelForm}>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button onClick={handleAddServer} disabled={isSubmitDisabled} variant='tertiary'>
|
||||
{isSubmitDisabled && isFormValid && !isAddDomainBlocked
|
||||
? 'Adding...'
|
||||
: 'Add Server'}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
<div className='flex flex-col gap-[8px]'>
|
||||
<div className='flex items-center justify-between'>
|
||||
<span className='font-medium text-[13px] text-[var(--text-secondary)]'>
|
||||
Headers
|
||||
</span>
|
||||
<Button
|
||||
type='button'
|
||||
variant='ghost'
|
||||
onClick={handleAddHeader}
|
||||
className='h-6 w-6 p-0'
|
||||
>
|
||||
<Plus className='h-3 w-3' />
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
<div className='flex max-h-[140px] flex-col gap-[8px] overflow-y-auto'>
|
||||
{(formData.headers || []).map((header, index) => (
|
||||
<HeaderRow
|
||||
key={index}
|
||||
header={header}
|
||||
index={index}
|
||||
headerScrollLeft={headerScrollLeft}
|
||||
showEnvVars={showEnvVars}
|
||||
activeInputField={activeInputField}
|
||||
activeHeaderIndex={activeHeaderIndex}
|
||||
envSearchTerm={envSearchTerm}
|
||||
cursorPosition={cursorPosition}
|
||||
workspaceId={workspaceId}
|
||||
availableEnvVars={availableEnvVars}
|
||||
onInputChange={handleInputChange}
|
||||
onHeaderScroll={handleHeaderScroll}
|
||||
onEnvVarSelect={handleEnvVarSelect}
|
||||
onEnvVarClose={resetEnvVarState}
|
||||
onRemove={() => handleRemoveHeader(index)}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className='flex items-center justify-between pt-[12px]'>
|
||||
<Button
|
||||
variant='default'
|
||||
onClick={handleTestConnection}
|
||||
disabled={isTestingConnection || !isFormValid || isAddDomainBlocked}
|
||||
>
|
||||
{testButtonLabel}
|
||||
</Button>
|
||||
|
||||
<div className='flex items-center gap-[8px]'>
|
||||
<Button variant='ghost' onClick={handleCancelForm}>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button
|
||||
onClick={handleAddServer}
|
||||
disabled={isSubmitDisabled}
|
||||
variant='tertiary'
|
||||
>
|
||||
{isSubmitDisabled && isFormValid && !isAddDomainBlocked
|
||||
? 'Adding...'
|
||||
: 'Add Server'}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
'use client'
|
||||
|
||||
import { useCallback, useEffect, useMemo, useState } from 'react'
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { Check, Clipboard, Plus, Search } from 'lucide-react'
|
||||
import { Check, Clipboard, Plus, Search, Server } from 'lucide-react'
|
||||
import { useParams } from 'next/navigation'
|
||||
import {
|
||||
Badge,
|
||||
@@ -31,6 +31,7 @@ import { Input, Skeleton } from '@/components/ui'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
import { useApiKeys } from '@/hooks/queries/api-keys'
|
||||
import { useCreateMcpServer } from '@/hooks/queries/mcp'
|
||||
import {
|
||||
useAddWorkflowMcpTool,
|
||||
useCreateWorkflowMcpServer,
|
||||
@@ -56,7 +57,7 @@ interface ServerDetailViewProps {
|
||||
onBack: () => void
|
||||
}
|
||||
|
||||
type McpClientType = 'cursor' | 'claude-code' | 'claude-desktop' | 'vscode'
|
||||
type McpClientType = 'sim' | 'cursor' | 'claude-code' | 'claude-desktop' | 'vscode'
|
||||
|
||||
function ServerDetailView({ workspaceId, serverId, onBack }: ServerDetailViewProps) {
|
||||
const { data, isLoading, error } = useWorkflowMcpServer(workspaceId, serverId)
|
||||
@@ -82,6 +83,18 @@ function ServerDetailView({ workspaceId, serverId, onBack }: ServerDetailViewPro
|
||||
const canManageWorkspaceKeys = userPermissions.canAdmin
|
||||
const defaultKeyType = allowPersonalApiKeys ? 'personal' : 'workspace'
|
||||
|
||||
const addToWorkspaceMutation = useCreateMcpServer()
|
||||
const [addedToWorkspace, setAddedToWorkspace] = useState(false)
|
||||
const addedToWorkspaceTimerRef = useRef<ReturnType<typeof setTimeout>>(null)
|
||||
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
if (addedToWorkspaceTimerRef.current) {
|
||||
clearTimeout(addedToWorkspaceTimerRef.current)
|
||||
}
|
||||
}
|
||||
}, [])
|
||||
|
||||
const [copiedConfig, setCopiedConfig] = useState(false)
|
||||
const [activeConfigTab, setActiveConfigTab] = useState<McpClientType>('cursor')
|
||||
const [toolToDelete, setToolToDelete] = useState<WorkflowMcpTool | null>(null)
|
||||
@@ -178,6 +191,10 @@ function ServerDetailView({ workspaceId, serverId, onBack }: ServerDetailViewPro
|
||||
.replace(/\s+/g, '-')
|
||||
.replace(/[^a-z0-9-]/g, '')
|
||||
|
||||
if (client === 'sim') {
|
||||
return ''
|
||||
}
|
||||
|
||||
if (client === 'claude-code') {
|
||||
if (isPublic) {
|
||||
return `claude mcp add "${safeName}" --url "${mcpServerUrl}"`
|
||||
@@ -450,6 +467,7 @@ function ServerDetailView({ workspaceId, serverId, onBack }: ServerDetailViewPro
|
||||
value={activeConfigTab}
|
||||
onValueChange={(v) => setActiveConfigTab(v as McpClientType)}
|
||||
>
|
||||
<ButtonGroupItem value='sim'>Sim</ButtonGroupItem>
|
||||
<ButtonGroupItem value='cursor'>Cursor</ButtonGroupItem>
|
||||
<ButtonGroupItem value='claude-code'>Claude Code</ButtonGroupItem>
|
||||
<ButtonGroupItem value='claude-desktop'>Claude Desktop</ButtonGroupItem>
|
||||
@@ -457,56 +475,127 @@ function ServerDetailView({ workspaceId, serverId, onBack }: ServerDetailViewPro
|
||||
</ButtonGroup>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<div className='mb-[6.5px] flex items-center justify-between'>
|
||||
<span className='block pl-[2px] font-medium text-[13px] text-[var(--text-primary)]'>
|
||||
Configuration
|
||||
</span>
|
||||
<Button
|
||||
variant='ghost'
|
||||
onClick={() => handleCopyConfig(server.isPublic, server.name)}
|
||||
className='!p-1.5 -my-1.5'
|
||||
>
|
||||
{copiedConfig ? (
|
||||
<Check className='h-3 w-3' />
|
||||
) : (
|
||||
<Clipboard className='h-3 w-3' />
|
||||
)}
|
||||
</Button>
|
||||
</div>
|
||||
<div className='relative'>
|
||||
<Code.Viewer
|
||||
code={getConfigSnippet(activeConfigTab, server.isPublic, server.name)}
|
||||
language={activeConfigTab === 'claude-code' ? 'javascript' : 'json'}
|
||||
wrapText
|
||||
className='!min-h-0 rounded-[4px] border border-[var(--border-1)]'
|
||||
/>
|
||||
{activeConfigTab === 'cursor' && (
|
||||
<a
|
||||
href={getCursorInstallUrl(server.isPublic, server.name)}
|
||||
className='absolute top-[6px] right-2'
|
||||
{activeConfigTab === 'sim' ? (
|
||||
<div className='rounded-[8px] border border-[var(--border-1)] p-[16px]'>
|
||||
<div className='flex flex-col gap-[12px]'>
|
||||
<p className='text-[13px] text-[var(--text-secondary)]'>
|
||||
Add this MCP server to your workspace so you can use its tools in other
|
||||
workflows via the MCP block.
|
||||
</p>
|
||||
<Button
|
||||
variant='tertiary'
|
||||
disabled={addToWorkspaceMutation.isPending || addedToWorkspace}
|
||||
onClick={async () => {
|
||||
try {
|
||||
const headers: Record<string, string> = server.isPublic
|
||||
? {}
|
||||
: { 'X-API-Key': '{{SIM_API_KEY}}' }
|
||||
await addToWorkspaceMutation.mutateAsync({
|
||||
workspaceId,
|
||||
config: {
|
||||
name: server.name,
|
||||
transport: 'streamable-http',
|
||||
url: mcpServerUrl,
|
||||
timeout: 30000,
|
||||
headers,
|
||||
enabled: true,
|
||||
},
|
||||
})
|
||||
setAddedToWorkspace(true)
|
||||
addedToWorkspaceTimerRef.current = setTimeout(
|
||||
() => setAddedToWorkspace(false),
|
||||
3000
|
||||
)
|
||||
} catch (err) {
|
||||
logger.error('Failed to add server to workspace:', err)
|
||||
}
|
||||
}}
|
||||
>
|
||||
<img
|
||||
src='https://cursor.com/deeplink/mcp-install-dark.svg'
|
||||
alt='Add to Cursor'
|
||||
className='h-[26px]'
|
||||
/>
|
||||
</a>
|
||||
{addToWorkspaceMutation.isPending ? (
|
||||
'Adding...'
|
||||
) : addedToWorkspace ? (
|
||||
<>
|
||||
<Check className='mr-[6px] h-[13px] w-[13px]' />
|
||||
Added to Workspace
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
<Server className='mr-[6px] h-[13px] w-[13px]' />
|
||||
Add to Workspace
|
||||
</>
|
||||
)}
|
||||
</Button>
|
||||
{!server.isPublic && (
|
||||
<p className='text-[11px] text-[var(--text-muted)]'>
|
||||
Set the SIM_API_KEY environment variable, or{' '}
|
||||
<button
|
||||
type='button'
|
||||
onClick={() => setShowCreateApiKeyModal(true)}
|
||||
className='underline hover:text-[var(--text-secondary)]'
|
||||
>
|
||||
create one now
|
||||
</button>
|
||||
</p>
|
||||
)}
|
||||
{addToWorkspaceMutation.isError && (
|
||||
<p className='text-[11px] text-[var(--text-error)]'>
|
||||
{addToWorkspaceMutation.error?.message || 'Failed to add server'}
|
||||
</p>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
) : (
|
||||
<div>
|
||||
<div className='mb-[6.5px] flex items-center justify-between'>
|
||||
<span className='block pl-[2px] font-medium text-[13px] text-[var(--text-primary)]'>
|
||||
Configuration
|
||||
</span>
|
||||
<Button
|
||||
variant='ghost'
|
||||
onClick={() => handleCopyConfig(server.isPublic, server.name)}
|
||||
className='!p-1.5 -my-1.5'
|
||||
>
|
||||
{copiedConfig ? (
|
||||
<Check className='h-3 w-3' />
|
||||
) : (
|
||||
<Clipboard className='h-3 w-3' />
|
||||
)}
|
||||
</Button>
|
||||
</div>
|
||||
<div className='relative'>
|
||||
<Code.Viewer
|
||||
code={getConfigSnippet(activeConfigTab, server.isPublic, server.name)}
|
||||
language={activeConfigTab === 'claude-code' ? 'javascript' : 'json'}
|
||||
wrapText
|
||||
className='!min-h-0 rounded-[4px] border border-[var(--border-1)]'
|
||||
/>
|
||||
{activeConfigTab === 'cursor' && (
|
||||
<a
|
||||
href={getCursorInstallUrl(server.isPublic, server.name)}
|
||||
className='absolute top-[6px] right-2 inline-flex rounded-[6px] bg-[var(--surface-5)] ring-1 ring-[var(--border-1)] focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-[var(--brand-primary)] focus-visible:ring-offset-2 focus-visible:ring-offset-[var(--surface-2)]'
|
||||
>
|
||||
<img
|
||||
src='https://cursor.com/deeplink/mcp-install-dark.svg'
|
||||
alt='Add to Cursor'
|
||||
className='h-[26px] rounded-[6px] align-middle'
|
||||
/>
|
||||
</a>
|
||||
)}
|
||||
</div>
|
||||
{!server.isPublic && (
|
||||
<p className='mt-[8px] text-[11px] text-[var(--text-muted)]'>
|
||||
Replace $SIM_API_KEY with your API key, or{' '}
|
||||
<button
|
||||
type='button'
|
||||
onClick={() => setShowCreateApiKeyModal(true)}
|
||||
className='underline hover:text-[var(--text-secondary)]'
|
||||
>
|
||||
create one now
|
||||
</button>
|
||||
</p>
|
||||
)}
|
||||
</div>
|
||||
{!server.isPublic && (
|
||||
<p className='mt-[8px] text-[11px] text-[var(--text-muted)]'>
|
||||
Replace $SIM_API_KEY with your API key, or{' '}
|
||||
<button
|
||||
type='button'
|
||||
onClick={() => setShowCreateApiKeyModal(true)}
|
||||
className='underline hover:text-[var(--text-secondary)]'
|
||||
>
|
||||
create one now
|
||||
</button>
|
||||
</p>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</SModalTabsContent>
|
||||
</SModalTabsBody>
|
||||
|
||||
@@ -19,7 +19,6 @@ import {
|
||||
} from 'lucide-react'
|
||||
import {
|
||||
Card,
|
||||
Connections,
|
||||
HexSimple,
|
||||
Key,
|
||||
SModal,
|
||||
@@ -32,6 +31,7 @@ import {
|
||||
SModalSidebarItem,
|
||||
SModalSidebarSection,
|
||||
SModalSidebarSectionTitle,
|
||||
TerminalWindow,
|
||||
} from '@/components/emcn'
|
||||
import { AgentSkillsIcon, McpIcon } from '@/components/icons'
|
||||
import { useSession } from '@/lib/auth/auth-client'
|
||||
@@ -153,11 +153,11 @@ const allNavigationItems: NavigationItem[] = [
|
||||
requiresHosted: true,
|
||||
requiresTeam: true,
|
||||
},
|
||||
{ id: 'credentials', label: 'Credentials', icon: Connections, section: 'account' },
|
||||
{ id: 'credentials', label: 'Secrets', icon: Key, section: 'account' },
|
||||
{ id: 'custom-tools', label: 'Custom Tools', icon: Wrench, section: 'tools' },
|
||||
{ id: 'skills', label: 'Skills', icon: AgentSkillsIcon, section: 'tools' },
|
||||
{ id: 'mcp', label: 'MCP Tools', icon: McpIcon, section: 'tools' },
|
||||
{ id: 'apikeys', label: 'API Keys', icon: Key, section: 'system' },
|
||||
{ id: 'apikeys', label: 'Sim Keys', icon: TerminalWindow, section: 'system' },
|
||||
{ id: 'workflow-mcp-servers', label: 'MCP Servers', icon: Server, section: 'system' },
|
||||
{
|
||||
id: 'byok',
|
||||
@@ -449,7 +449,18 @@ export function SettingsModal({ open, onOpenChange }: SettingsModalProps) {
|
||||
}
|
||||
}
|
||||
|
||||
const { hasUnsavedChanges, onCloseAttempt, setHasUnsavedChanges, setOnCloseAttempt } =
|
||||
useSettingsModalStore()
|
||||
|
||||
const handleDialogOpenChange = (newOpen: boolean) => {
|
||||
if (!newOpen && hasUnsavedChanges && onCloseAttempt) {
|
||||
onCloseAttempt()
|
||||
return
|
||||
}
|
||||
if (!newOpen) {
|
||||
setHasUnsavedChanges(false)
|
||||
setOnCloseAttempt(null)
|
||||
}
|
||||
onOpenChange(newOpen)
|
||||
}
|
||||
|
||||
@@ -461,7 +472,7 @@ export function SettingsModal({ open, onOpenChange }: SettingsModalProps) {
|
||||
</VisuallyHidden.Root>
|
||||
<VisuallyHidden.Root>
|
||||
<DialogPrimitive.Description>
|
||||
Configure your workspace settings, credentials, and preferences
|
||||
Configure your workspace settings, secrets, and preferences
|
||||
</DialogPrimitive.Description>
|
||||
</VisuallyHidden.Root>
|
||||
|
||||
|
||||
@@ -281,6 +281,24 @@ interface ContextMenuProps {
|
||||
* Set to true when user cannot leave (e.g., last admin)
|
||||
*/
|
||||
disableLeave?: boolean
|
||||
/**
|
||||
* Callback when lock/unlock is clicked
|
||||
*/
|
||||
onToggleLock?: () => void
|
||||
/**
|
||||
* Whether to show the lock option (default: false)
|
||||
* Set to true for workflows that support locking
|
||||
*/
|
||||
showLock?: boolean
|
||||
/**
|
||||
* Whether the lock option is disabled (default: false)
|
||||
* Set to true when user lacks permissions
|
||||
*/
|
||||
disableLock?: boolean
|
||||
/**
|
||||
* Whether the workflow is currently locked (all blocks locked)
|
||||
*/
|
||||
isLocked?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -321,6 +339,10 @@ export function ContextMenu({
|
||||
onLeave,
|
||||
showLeave = false,
|
||||
disableLeave = false,
|
||||
onToggleLock,
|
||||
showLock = false,
|
||||
disableLock = false,
|
||||
isLocked = false,
|
||||
}: ContextMenuProps) {
|
||||
const [hexInput, setHexInput] = useState(currentColor || '#ffffff')
|
||||
|
||||
@@ -372,7 +394,8 @@ export function ContextMenu({
|
||||
(showRename && onRename) ||
|
||||
(showCreate && onCreate) ||
|
||||
(showCreateFolder && onCreateFolder) ||
|
||||
(showColorChange && onColorChange)
|
||||
(showColorChange && onColorChange) ||
|
||||
(showLock && onToggleLock)
|
||||
const hasCopySection = (showDuplicate && onDuplicate) || (showExport && onExport)
|
||||
|
||||
return (
|
||||
@@ -495,6 +518,19 @@ export function ContextMenu({
|
||||
</PopoverFolder>
|
||||
)}
|
||||
|
||||
{showLock && onToggleLock && (
|
||||
<PopoverItem
|
||||
rootOnly
|
||||
disabled={disableLock}
|
||||
onClick={() => {
|
||||
onToggleLock()
|
||||
onClose()
|
||||
}}
|
||||
>
|
||||
{isLocked ? 'Unlock' : 'Lock'}
|
||||
</PopoverItem>
|
||||
)}
|
||||
|
||||
{/* Copy and export actions */}
|
||||
{hasEditSection && hasCopySection && <PopoverDivider rootOnly />}
|
||||
{showDuplicate && onDuplicate && (
|
||||
|
||||
@@ -6,6 +6,7 @@ import { MoreHorizontal } from 'lucide-react'
|
||||
import Link from 'next/link'
|
||||
import { useParams } from 'next/navigation'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
import { getWorkflowLockToggleIds } from '@/app/workspace/[workspaceId]/w/[workflowId]/utils'
|
||||
import { ContextMenu } from '@/app/workspace/[workspaceId]/w/components/sidebar/components/workflow-list/components/context-menu/context-menu'
|
||||
import { DeleteModal } from '@/app/workspace/[workspaceId]/w/components/sidebar/components/workflow-list/components/delete-modal/delete-modal'
|
||||
import { Avatars } from '@/app/workspace/[workspaceId]/w/components/sidebar/components/workflow-list/components/workflow-item/avatars/avatars'
|
||||
@@ -27,6 +28,7 @@ import {
|
||||
import { useFolderStore } from '@/stores/folders/store'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import type { WorkflowMetadata } from '@/stores/workflows/registry/types'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
|
||||
interface WorkflowItemProps {
|
||||
workflow: WorkflowMetadata
|
||||
@@ -169,6 +171,29 @@ export function WorkflowItem({
|
||||
[workflow.id, updateWorkflow]
|
||||
)
|
||||
|
||||
const activeWorkflowId = useWorkflowRegistry((state) => state.activeWorkflowId)
|
||||
const isActiveWorkflow = workflow.id === activeWorkflowId
|
||||
|
||||
const isWorkflowLocked = useWorkflowStore(
|
||||
useCallback(
|
||||
(state) => {
|
||||
if (!isActiveWorkflow) return false
|
||||
const blockValues = Object.values(state.blocks)
|
||||
if (blockValues.length === 0) return false
|
||||
return blockValues.every((block) => block.locked)
|
||||
},
|
||||
[isActiveWorkflow]
|
||||
)
|
||||
)
|
||||
|
||||
const handleToggleLock = useCallback(() => {
|
||||
if (!isActiveWorkflow) return
|
||||
const blocks = useWorkflowStore.getState().blocks
|
||||
const blockIds = getWorkflowLockToggleIds(blocks, !isWorkflowLocked)
|
||||
if (blockIds.length === 0) return
|
||||
window.dispatchEvent(new CustomEvent('toggle-workflow-lock', { detail: { blockIds } }))
|
||||
}, [isActiveWorkflow, isWorkflowLocked])
|
||||
|
||||
const isEditingRef = useRef(false)
|
||||
|
||||
const {
|
||||
@@ -461,6 +486,10 @@ export function WorkflowItem({
|
||||
disableExport={!userPermissions.canEdit}
|
||||
disableColorChange={!userPermissions.canEdit}
|
||||
disableDelete={!userPermissions.canEdit || !canDeleteSelection}
|
||||
onToggleLock={handleToggleLock}
|
||||
showLock={isActiveWorkflow && !isMixedSelection && selectedWorkflows.size <= 1}
|
||||
disableLock={!userPermissions.canAdmin}
|
||||
isLocked={isWorkflowLocked}
|
||||
/>
|
||||
|
||||
<DeleteModal
|
||||
|
||||
@@ -89,6 +89,38 @@ Example:
|
||||
'Request timeout in milliseconds (default: 300000 = 5 minutes, max: 600000 = 10 minutes)',
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'retries',
|
||||
title: 'Retries',
|
||||
type: 'short-input',
|
||||
placeholder: '0',
|
||||
description:
|
||||
'Number of retry attempts for timeouts, 429 responses, and 5xx errors (default: 0, no retries)',
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'retryDelayMs',
|
||||
title: 'Retry delay (ms)',
|
||||
type: 'short-input',
|
||||
placeholder: '500',
|
||||
description: 'Initial retry delay in milliseconds (exponential backoff)',
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'retryMaxDelayMs',
|
||||
title: 'Max retry delay (ms)',
|
||||
type: 'short-input',
|
||||
placeholder: '30000',
|
||||
description: 'Maximum delay between retries in milliseconds',
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'retryNonIdempotent',
|
||||
title: 'Retry non-idempotent methods',
|
||||
type: 'switch',
|
||||
description: 'Allow retries for POST/PATCH requests (may create duplicate requests)',
|
||||
mode: 'advanced',
|
||||
},
|
||||
],
|
||||
tools: {
|
||||
access: ['http_request'],
|
||||
@@ -100,6 +132,16 @@ Example:
|
||||
body: { type: 'json', description: 'Request body data' },
|
||||
params: { type: 'json', description: 'URL query parameters' },
|
||||
timeout: { type: 'number', description: 'Request timeout in milliseconds' },
|
||||
retries: { type: 'number', description: 'Number of retry attempts for retryable failures' },
|
||||
retryDelayMs: { type: 'number', description: 'Initial retry delay in milliseconds' },
|
||||
retryMaxDelayMs: {
|
||||
type: 'number',
|
||||
description: 'Maximum delay between retries in milliseconds',
|
||||
},
|
||||
retryNonIdempotent: {
|
||||
type: 'boolean',
|
||||
description: 'Allow retries for non-idempotent methods like POST/PATCH',
|
||||
},
|
||||
},
|
||||
outputs: {
|
||||
data: { type: 'json', description: 'API response data (JSON, text, or other formats)' },
|
||||
|
||||
1243
apps/sim/blocks/blocks/attio.ts
Normal file
1243
apps/sim/blocks/blocks/attio.ts
Normal file
File diff suppressed because it is too large
Load Diff
@@ -84,6 +84,7 @@ export const ConfluenceBlock: BlockConfig<ConfluenceResponse> = {
|
||||
'write:content.property:confluence',
|
||||
'read:hierarchical-content:confluence',
|
||||
'read:content.metadata:confluence',
|
||||
'read:user:confluence',
|
||||
],
|
||||
placeholder: 'Select Confluence account',
|
||||
required: true,
|
||||
@@ -414,6 +415,8 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
{ label: 'List Blog Posts', id: 'list_blogposts' },
|
||||
{ label: 'Get Blog Post', id: 'get_blogpost' },
|
||||
{ label: 'Create Blog Post', id: 'create_blogpost' },
|
||||
{ label: 'Update Blog Post', id: 'update_blogpost' },
|
||||
{ label: 'Delete Blog Post', id: 'delete_blogpost' },
|
||||
{ label: 'List Blog Posts in Space', id: 'list_blogposts_in_space' },
|
||||
// Comment Operations
|
||||
{ label: 'Create Comment', id: 'create_comment' },
|
||||
@@ -432,7 +435,24 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
{ label: 'List Space Labels', id: 'list_space_labels' },
|
||||
// Space Operations
|
||||
{ label: 'Get Space', id: 'get_space' },
|
||||
{ label: 'Create Space', id: 'create_space' },
|
||||
{ label: 'Update Space', id: 'update_space' },
|
||||
{ label: 'Delete Space', id: 'delete_space' },
|
||||
{ label: 'List Spaces', id: 'list_spaces' },
|
||||
// Space Property Operations
|
||||
{ label: 'List Space Properties', id: 'list_space_properties' },
|
||||
{ label: 'Create Space Property', id: 'create_space_property' },
|
||||
{ label: 'Delete Space Property', id: 'delete_space_property' },
|
||||
// Space Permission Operations
|
||||
{ label: 'List Space Permissions', id: 'list_space_permissions' },
|
||||
// Page Descendant Operations
|
||||
{ label: 'Get Page Descendants', id: 'get_page_descendants' },
|
||||
// Task Operations
|
||||
{ label: 'List Tasks', id: 'list_tasks' },
|
||||
{ label: 'Get Task', id: 'get_task' },
|
||||
{ label: 'Update Task', id: 'update_task' },
|
||||
// User Operations
|
||||
{ label: 'Get User', id: 'get_user' },
|
||||
],
|
||||
value: () => 'read',
|
||||
},
|
||||
@@ -472,6 +492,15 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
'write:content.property:confluence',
|
||||
'read:hierarchical-content:confluence',
|
||||
'read:content.metadata:confluence',
|
||||
'read:user:confluence',
|
||||
'read:task:confluence',
|
||||
'write:task:confluence',
|
||||
'delete:blogpost:confluence',
|
||||
'write:space:confluence',
|
||||
'delete:space:confluence',
|
||||
'read:space.property:confluence',
|
||||
'write:space.property:confluence',
|
||||
'read:space.permission:confluence',
|
||||
],
|
||||
placeholder: 'Select Confluence account',
|
||||
required: true,
|
||||
@@ -507,13 +536,26 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
'list_pages_in_space',
|
||||
'list_blogposts',
|
||||
'get_blogpost',
|
||||
'update_blogpost',
|
||||
'delete_blogpost',
|
||||
'list_blogposts_in_space',
|
||||
'search',
|
||||
'search_in_space',
|
||||
'get_space',
|
||||
'create_space',
|
||||
'update_space',
|
||||
'delete_space',
|
||||
'list_spaces',
|
||||
'get_pages_by_label',
|
||||
'list_space_labels',
|
||||
'list_space_permissions',
|
||||
'list_space_properties',
|
||||
'create_space_property',
|
||||
'delete_space_property',
|
||||
'list_tasks',
|
||||
'get_task',
|
||||
'update_task',
|
||||
'get_user',
|
||||
],
|
||||
not: true,
|
||||
},
|
||||
@@ -537,6 +579,7 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
'get_page_version',
|
||||
'list_page_properties',
|
||||
'create_page_property',
|
||||
'get_page_descendants',
|
||||
],
|
||||
},
|
||||
},
|
||||
@@ -553,13 +596,26 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
'list_pages_in_space',
|
||||
'list_blogposts',
|
||||
'get_blogpost',
|
||||
'update_blogpost',
|
||||
'delete_blogpost',
|
||||
'list_blogposts_in_space',
|
||||
'search',
|
||||
'search_in_space',
|
||||
'get_space',
|
||||
'create_space',
|
||||
'update_space',
|
||||
'delete_space',
|
||||
'list_spaces',
|
||||
'get_pages_by_label',
|
||||
'list_space_labels',
|
||||
'list_space_permissions',
|
||||
'list_space_properties',
|
||||
'create_space_property',
|
||||
'delete_space_property',
|
||||
'list_tasks',
|
||||
'get_task',
|
||||
'update_task',
|
||||
'get_user',
|
||||
],
|
||||
not: true,
|
||||
},
|
||||
@@ -583,6 +639,7 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
'get_page_version',
|
||||
'list_page_properties',
|
||||
'create_page_property',
|
||||
'get_page_descendants',
|
||||
],
|
||||
},
|
||||
},
|
||||
@@ -597,11 +654,17 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
value: [
|
||||
'create',
|
||||
'get_space',
|
||||
'update_space',
|
||||
'delete_space',
|
||||
'list_pages_in_space',
|
||||
'search_in_space',
|
||||
'create_blogpost',
|
||||
'list_blogposts_in_space',
|
||||
'list_space_labels',
|
||||
'list_space_permissions',
|
||||
'list_space_properties',
|
||||
'create_space_property',
|
||||
'delete_space_property',
|
||||
],
|
||||
},
|
||||
},
|
||||
@@ -611,7 +674,10 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter blog post ID',
|
||||
required: true,
|
||||
condition: { field: 'operation', value: 'get_blogpost' },
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['get_blogpost', 'update_blogpost', 'delete_blogpost'],
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'versionNumber',
|
||||
@@ -621,6 +687,86 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
required: true,
|
||||
condition: { field: 'operation', value: 'get_page_version' },
|
||||
},
|
||||
{
|
||||
id: 'accountId',
|
||||
title: 'Account ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter Atlassian account ID',
|
||||
required: true,
|
||||
condition: { field: 'operation', value: 'get_user' },
|
||||
},
|
||||
{
|
||||
id: 'taskId',
|
||||
title: 'Task ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter task ID',
|
||||
required: true,
|
||||
condition: { field: 'operation', value: ['get_task', 'update_task'] },
|
||||
},
|
||||
{
|
||||
id: 'taskStatus',
|
||||
title: 'Task Status',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Complete', id: 'complete' },
|
||||
{ label: 'Incomplete', id: 'incomplete' },
|
||||
],
|
||||
value: () => 'complete',
|
||||
condition: { field: 'operation', value: 'update_task' },
|
||||
},
|
||||
{
|
||||
id: 'taskAssignedTo',
|
||||
title: 'Assigned To',
|
||||
type: 'short-input',
|
||||
placeholder: 'Filter by assignee account ID (optional)',
|
||||
condition: { field: 'operation', value: 'list_tasks' },
|
||||
},
|
||||
{
|
||||
id: 'spaceName',
|
||||
title: 'Space Name',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter space name',
|
||||
required: true,
|
||||
condition: { field: 'operation', value: 'create_space' },
|
||||
},
|
||||
{
|
||||
id: 'spaceKey',
|
||||
title: 'Space Key',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter space key (e.g., MYSPACE)',
|
||||
required: true,
|
||||
condition: { field: 'operation', value: 'create_space' },
|
||||
},
|
||||
{
|
||||
id: 'spaceDescription',
|
||||
title: 'Description',
|
||||
type: 'long-input',
|
||||
placeholder: 'Enter space description (optional)',
|
||||
condition: { field: 'operation', value: ['create_space', 'update_space'] },
|
||||
},
|
||||
{
|
||||
id: 'spacePropertyKey',
|
||||
title: 'Property Key',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter property key/name',
|
||||
required: true,
|
||||
condition: { field: 'operation', value: 'create_space_property' },
|
||||
},
|
||||
{
|
||||
id: 'spacePropertyValue',
|
||||
title: 'Property Value',
|
||||
type: 'long-input',
|
||||
placeholder: 'Enter property value (JSON supported)',
|
||||
condition: { field: 'operation', value: 'create_space_property' },
|
||||
},
|
||||
{
|
||||
id: 'spacePropertyId',
|
||||
title: 'Property ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter property ID to delete',
|
||||
required: true,
|
||||
condition: { field: 'operation', value: 'delete_space_property' },
|
||||
},
|
||||
{
|
||||
id: 'propertyKey',
|
||||
title: 'Property Key',
|
||||
@@ -650,14 +796,20 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
title: 'Title',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter title',
|
||||
condition: { field: 'operation', value: ['create', 'update', 'create_blogpost'] },
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['create', 'update', 'create_blogpost', 'update_blogpost', 'update_space'],
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'content',
|
||||
title: 'Content',
|
||||
type: 'long-input',
|
||||
placeholder: 'Enter content',
|
||||
condition: { field: 'operation', value: ['create', 'update', 'create_blogpost'] },
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['create', 'update', 'create_blogpost', 'update_blogpost'],
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'parentId',
|
||||
@@ -813,6 +965,10 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
'list_labels',
|
||||
'get_pages_by_label',
|
||||
'list_space_labels',
|
||||
'get_page_descendants',
|
||||
'list_space_permissions',
|
||||
'list_space_properties',
|
||||
'list_tasks',
|
||||
],
|
||||
},
|
||||
},
|
||||
@@ -836,6 +992,10 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
'list_labels',
|
||||
'get_pages_by_label',
|
||||
'list_space_labels',
|
||||
'get_page_descendants',
|
||||
'list_space_permissions',
|
||||
'list_space_properties',
|
||||
'list_tasks',
|
||||
],
|
||||
},
|
||||
},
|
||||
@@ -921,7 +1081,27 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
'confluence_list_space_labels',
|
||||
// Space Tools
|
||||
'confluence_get_space',
|
||||
'confluence_create_space',
|
||||
'confluence_update_space',
|
||||
'confluence_delete_space',
|
||||
'confluence_list_spaces',
|
||||
// Space Property Tools
|
||||
'confluence_list_space_properties',
|
||||
'confluence_create_space_property',
|
||||
'confluence_delete_space_property',
|
||||
// Space Permission Tools
|
||||
'confluence_list_space_permissions',
|
||||
// Page Descendant Tools
|
||||
'confluence_get_page_descendants',
|
||||
// Task Tools
|
||||
'confluence_list_tasks',
|
||||
'confluence_get_task',
|
||||
'confluence_update_task',
|
||||
// Blog Post Update/Delete
|
||||
'confluence_update_blogpost',
|
||||
'confluence_delete_blogpost',
|
||||
// User Tools
|
||||
'confluence_get_user',
|
||||
],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
@@ -965,6 +1145,10 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
return 'confluence_get_blogpost'
|
||||
case 'create_blogpost':
|
||||
return 'confluence_create_blogpost'
|
||||
case 'update_blogpost':
|
||||
return 'confluence_update_blogpost'
|
||||
case 'delete_blogpost':
|
||||
return 'confluence_delete_blogpost'
|
||||
case 'list_blogposts_in_space':
|
||||
return 'confluence_list_blogposts_in_space'
|
||||
// Comment Operations
|
||||
@@ -997,8 +1181,37 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
// Space Operations
|
||||
case 'get_space':
|
||||
return 'confluence_get_space'
|
||||
case 'create_space':
|
||||
return 'confluence_create_space'
|
||||
case 'update_space':
|
||||
return 'confluence_update_space'
|
||||
case 'delete_space':
|
||||
return 'confluence_delete_space'
|
||||
case 'list_spaces':
|
||||
return 'confluence_list_spaces'
|
||||
// Space Property Operations
|
||||
case 'list_space_properties':
|
||||
return 'confluence_list_space_properties'
|
||||
case 'create_space_property':
|
||||
return 'confluence_create_space_property'
|
||||
case 'delete_space_property':
|
||||
return 'confluence_delete_space_property'
|
||||
// Space Permission Operations
|
||||
case 'list_space_permissions':
|
||||
return 'confluence_list_space_permissions'
|
||||
// Page Descendant Operations
|
||||
case 'get_page_descendants':
|
||||
return 'confluence_get_page_descendants'
|
||||
// Task Operations
|
||||
case 'list_tasks':
|
||||
return 'confluence_list_tasks'
|
||||
case 'get_task':
|
||||
return 'confluence_get_task'
|
||||
case 'update_task':
|
||||
return 'confluence_update_task'
|
||||
// User Operations
|
||||
case 'get_user':
|
||||
return 'confluence_get_user'
|
||||
default:
|
||||
return 'confluence_retrieve'
|
||||
}
|
||||
@@ -1013,6 +1226,7 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
attachmentComment,
|
||||
blogPostId,
|
||||
versionNumber,
|
||||
accountId,
|
||||
propertyKey,
|
||||
propertyValue,
|
||||
propertyId,
|
||||
@@ -1022,6 +1236,15 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
purge,
|
||||
bodyFormat,
|
||||
cursor,
|
||||
taskId,
|
||||
taskStatus,
|
||||
taskAssignedTo,
|
||||
spaceName,
|
||||
spaceKey,
|
||||
spaceDescription,
|
||||
spacePropertyKey,
|
||||
spacePropertyValue,
|
||||
spacePropertyId,
|
||||
...rest
|
||||
} = params
|
||||
|
||||
@@ -1069,8 +1292,8 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
}
|
||||
|
||||
// Operations that support generic cursor pagination.
|
||||
// get_pages_by_label and list_space_labels have dedicated handlers
|
||||
// below that pass cursor along with their required params (labelId, spaceId).
|
||||
// get_pages_by_label, list_space_labels, and list_tasks have dedicated handlers
|
||||
// below that pass cursor along with their required params.
|
||||
const supportsCursor = [
|
||||
'list_attachments',
|
||||
'list_spaces',
|
||||
@@ -1081,6 +1304,9 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
'list_page_versions',
|
||||
'list_page_properties',
|
||||
'list_labels',
|
||||
'get_page_descendants',
|
||||
'list_space_permissions',
|
||||
'list_space_properties',
|
||||
]
|
||||
|
||||
if (supportsCursor.includes(operation) && cursor) {
|
||||
@@ -1152,6 +1378,122 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
}
|
||||
}
|
||||
|
||||
if (operation === 'get_user') {
|
||||
return {
|
||||
credential: oauthCredential,
|
||||
operation,
|
||||
accountId: accountId ? String(accountId).trim() : undefined,
|
||||
...rest,
|
||||
}
|
||||
}
|
||||
|
||||
if (operation === 'update_blogpost' || operation === 'delete_blogpost') {
|
||||
return {
|
||||
credential: oauthCredential,
|
||||
operation,
|
||||
blogPostId: blogPostId || undefined,
|
||||
...rest,
|
||||
}
|
||||
}
|
||||
|
||||
if (operation === 'create_space') {
|
||||
return {
|
||||
credential: oauthCredential,
|
||||
operation,
|
||||
name: spaceName,
|
||||
key: spaceKey,
|
||||
description: spaceDescription,
|
||||
...rest,
|
||||
}
|
||||
}
|
||||
|
||||
if (operation === 'update_space') {
|
||||
return {
|
||||
credential: oauthCredential,
|
||||
operation,
|
||||
name: spaceName || rest.title,
|
||||
description: spaceDescription,
|
||||
...rest,
|
||||
}
|
||||
}
|
||||
|
||||
if (operation === 'delete_space') {
|
||||
return {
|
||||
credential: oauthCredential,
|
||||
operation,
|
||||
...rest,
|
||||
}
|
||||
}
|
||||
|
||||
if (operation === 'create_space_property') {
|
||||
return {
|
||||
credential: oauthCredential,
|
||||
operation,
|
||||
key: spacePropertyKey,
|
||||
value: spacePropertyValue,
|
||||
...rest,
|
||||
}
|
||||
}
|
||||
|
||||
if (operation === 'delete_space_property') {
|
||||
return {
|
||||
credential: oauthCredential,
|
||||
operation,
|
||||
propertyId: spacePropertyId,
|
||||
...rest,
|
||||
}
|
||||
}
|
||||
|
||||
if (operation === 'list_space_permissions' || operation === 'list_space_properties') {
|
||||
return {
|
||||
credential: oauthCredential,
|
||||
operation,
|
||||
cursor: cursor || undefined,
|
||||
...rest,
|
||||
}
|
||||
}
|
||||
|
||||
if (operation === 'get_page_descendants') {
|
||||
return {
|
||||
credential: oauthCredential,
|
||||
pageId: effectivePageId,
|
||||
operation,
|
||||
cursor: cursor || undefined,
|
||||
...rest,
|
||||
}
|
||||
}
|
||||
|
||||
if (operation === 'get_task') {
|
||||
return {
|
||||
credential: oauthCredential,
|
||||
operation,
|
||||
taskId,
|
||||
...rest,
|
||||
}
|
||||
}
|
||||
|
||||
if (operation === 'update_task') {
|
||||
return {
|
||||
credential: oauthCredential,
|
||||
operation,
|
||||
taskId,
|
||||
status: taskStatus,
|
||||
...rest,
|
||||
}
|
||||
}
|
||||
|
||||
if (operation === 'list_tasks') {
|
||||
return {
|
||||
credential: oauthCredential,
|
||||
operation,
|
||||
pageId: effectivePageId || undefined,
|
||||
assignedTo: taskAssignedTo || undefined,
|
||||
status: taskStatus || undefined,
|
||||
cursor: cursor || undefined,
|
||||
...rest,
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
credential: oauthCredential,
|
||||
pageId: effectivePageId || undefined,
|
||||
@@ -1171,6 +1513,7 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
spaceId: { type: 'string', description: 'Space identifier' },
|
||||
blogPostId: { type: 'string', description: 'Blog post identifier' },
|
||||
versionNumber: { type: 'number', description: 'Page version number' },
|
||||
accountId: { type: 'string', description: 'Atlassian account ID' },
|
||||
propertyKey: { type: 'string', description: 'Property key/name' },
|
||||
propertyValue: { type: 'json', description: 'Property value (JSON)' },
|
||||
title: { type: 'string', description: 'Page or blog post title' },
|
||||
@@ -1192,6 +1535,15 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
bodyFormat: { type: 'string', description: 'Body format for comments' },
|
||||
limit: { type: 'number', description: 'Maximum number of results' },
|
||||
cursor: { type: 'string', description: 'Pagination cursor from previous response' },
|
||||
taskId: { type: 'string', description: 'Task identifier' },
|
||||
taskStatus: { type: 'string', description: 'Task status (complete or incomplete)' },
|
||||
taskAssignedTo: { type: 'string', description: 'Filter tasks by assignee account ID' },
|
||||
spaceName: { type: 'string', description: 'Space name for create/update' },
|
||||
spaceKey: { type: 'string', description: 'Space key for create' },
|
||||
spaceDescription: { type: 'string', description: 'Space description' },
|
||||
spacePropertyKey: { type: 'string', description: 'Space property key' },
|
||||
spacePropertyValue: { type: 'json', description: 'Space property value' },
|
||||
spacePropertyId: { type: 'string', description: 'Space property identifier' },
|
||||
},
|
||||
outputs: {
|
||||
ts: { type: 'string', description: 'Timestamp' },
|
||||
@@ -1242,6 +1594,23 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
propertyId: { type: 'string', description: 'Property identifier' },
|
||||
propertyKey: { type: 'string', description: 'Property key' },
|
||||
propertyValue: { type: 'json', description: 'Property value' },
|
||||
// User Results
|
||||
accountId: { type: 'string', description: 'Atlassian account ID' },
|
||||
displayName: { type: 'string', description: 'User display name' },
|
||||
email: { type: 'string', description: 'User email address' },
|
||||
accountType: { type: 'string', description: 'Account type (atlassian, app, customer)' },
|
||||
profilePicture: { type: 'string', description: 'Path to user profile picture' },
|
||||
publicName: { type: 'string', description: 'User public name' },
|
||||
// Task Results
|
||||
tasks: { type: 'array', description: 'List of tasks' },
|
||||
taskId: { type: 'string', description: 'Task identifier' },
|
||||
// Descendant Results
|
||||
descendants: { type: 'array', description: 'List of descendant pages' },
|
||||
// Permission Results
|
||||
permissions: { type: 'array', description: 'List of space permissions' },
|
||||
// Space Property Results
|
||||
homepageId: { type: 'string', description: 'Space homepage ID' },
|
||||
description: { type: 'json', description: 'Space description' },
|
||||
// Pagination
|
||||
nextCursor: { type: 'string', description: 'Cursor for fetching next page of results' },
|
||||
},
|
||||
|
||||
187
apps/sim/blocks/blocks/devin.ts
Normal file
187
apps/sim/blocks/blocks/devin.ts
Normal file
@@ -0,0 +1,187 @@
|
||||
import { DevinIcon } from '@/components/icons'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import { AuthMode } from '@/blocks/types'
|
||||
|
||||
export const DevinBlock: BlockConfig = {
|
||||
type: 'devin',
|
||||
name: 'Devin',
|
||||
description: 'Autonomous AI software engineer',
|
||||
longDescription:
|
||||
'Integrate Devin into your workflow. Create sessions to assign coding tasks, send messages to guide active sessions, and retrieve session status and results. Devin autonomously writes, runs, and tests code.',
|
||||
bestPractices: `
|
||||
- Write clear, specific prompts describing the task, expected outcome, and any constraints.
|
||||
- Use playbook IDs to standardize recurring task patterns across sessions.
|
||||
- Set ACU limits to control cost for long-running tasks.
|
||||
- Use Get Session to poll for completion status before consuming structured output.
|
||||
- Send Message auto-resumes suspended sessions — no need to resume separately.
|
||||
`,
|
||||
docsLink: 'https://docs.sim.ai/tools/devin',
|
||||
category: 'tools',
|
||||
bgColor: '#12141A',
|
||||
icon: DevinIcon,
|
||||
authMode: AuthMode.ApiKey,
|
||||
subBlocks: [
|
||||
{
|
||||
id: 'operation',
|
||||
title: 'Operation',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Create Session', id: 'create_session' },
|
||||
{ label: 'Get Session', id: 'get_session' },
|
||||
{ label: 'List Sessions', id: 'list_sessions' },
|
||||
{ label: 'Send Message', id: 'send_message' },
|
||||
],
|
||||
value: () => 'create_session',
|
||||
},
|
||||
{
|
||||
id: 'apiKey',
|
||||
title: 'API Key',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter your Devin API key (cog_...)',
|
||||
password: true,
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'prompt',
|
||||
title: 'Prompt',
|
||||
type: 'long-input',
|
||||
placeholder: 'Describe the task for Devin...',
|
||||
required: { field: 'operation', value: 'create_session' },
|
||||
condition: { field: 'operation', value: 'create_session' },
|
||||
wandConfig: {
|
||||
enabled: true,
|
||||
prompt: `You are an expert at writing clear, actionable prompts for Devin, an autonomous AI software engineer. Generate or refine a task prompt based on the user's request.
|
||||
|
||||
Current prompt: {context}
|
||||
|
||||
RULES:
|
||||
1. Be specific about the expected outcome and deliverables
|
||||
2. Include relevant technical context (languages, frameworks, repos)
|
||||
3. Specify any constraints (don't modify certain files, follow certain patterns)
|
||||
4. Break complex tasks into clear steps when helpful
|
||||
5. Return ONLY the prompt text, no markdown formatting or explanations`,
|
||||
placeholder: 'Describe what you want Devin to do...',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'playbookId',
|
||||
title: 'Playbook ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Optional playbook ID to guide the session',
|
||||
condition: { field: 'operation', value: 'create_session' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'maxAcuLimit',
|
||||
title: 'Max ACU Limit',
|
||||
type: 'short-input',
|
||||
placeholder: 'Maximum ACU budget for this session',
|
||||
condition: { field: 'operation', value: 'create_session' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'tags',
|
||||
title: 'Tags',
|
||||
type: 'short-input',
|
||||
placeholder: 'Comma-separated tags',
|
||||
condition: { field: 'operation', value: 'create_session' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'sessionId',
|
||||
title: 'Session ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter session ID',
|
||||
required: { field: 'operation', value: ['get_session', 'send_message'] },
|
||||
condition: { field: 'operation', value: ['get_session', 'send_message'] },
|
||||
},
|
||||
{
|
||||
id: 'message',
|
||||
title: 'Message',
|
||||
type: 'long-input',
|
||||
placeholder: 'Enter message to send to Devin...',
|
||||
required: { field: 'operation', value: 'send_message' },
|
||||
condition: { field: 'operation', value: 'send_message' },
|
||||
},
|
||||
{
|
||||
id: 'limit',
|
||||
title: 'Limit',
|
||||
type: 'short-input',
|
||||
placeholder: 'Number of sessions (1-200, default: 100)',
|
||||
condition: { field: 'operation', value: 'list_sessions' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
],
|
||||
tools: {
|
||||
access: [
|
||||
'devin_create_session',
|
||||
'devin_get_session',
|
||||
'devin_list_sessions',
|
||||
'devin_send_message',
|
||||
],
|
||||
config: {
|
||||
tool: (params) => `devin_${params.operation}`,
|
||||
params: (params) => {
|
||||
if (params.maxAcuLimit != null && params.maxAcuLimit !== '') {
|
||||
params.maxAcuLimit = Number(params.maxAcuLimit)
|
||||
}
|
||||
if (params.limit != null && params.limit !== '') {
|
||||
params.limit = Number(params.limit)
|
||||
}
|
||||
return params
|
||||
},
|
||||
},
|
||||
},
|
||||
inputs: {
|
||||
prompt: { type: 'string', description: 'Task prompt for Devin' },
|
||||
sessionId: { type: 'string', description: 'Session ID' },
|
||||
message: { type: 'string', description: 'Message to send to the session' },
|
||||
apiKey: { type: 'string', description: 'Devin API key' },
|
||||
playbookId: { type: 'string', description: 'Playbook ID to guide the session' },
|
||||
maxAcuLimit: { type: 'number', description: 'Maximum ACU limit' },
|
||||
tags: { type: 'string', description: 'Comma-separated tags' },
|
||||
limit: { type: 'number', description: 'Number of sessions to return' },
|
||||
},
|
||||
outputs: {
|
||||
sessionId: { type: 'string', description: 'Session identifier' },
|
||||
url: { type: 'string', description: 'URL to view the session in Devin UI' },
|
||||
status: {
|
||||
type: 'string',
|
||||
description: 'Session status (new, claimed, running, exit, error, suspended, resuming)',
|
||||
},
|
||||
statusDetail: {
|
||||
type: 'string',
|
||||
description: 'Detailed status (working, waiting_for_user, finished, etc.)',
|
||||
condition: { field: 'operation', value: 'list_sessions', not: true },
|
||||
},
|
||||
title: { type: 'string', description: 'Session title' },
|
||||
createdAt: { type: 'number', description: 'Creation timestamp (Unix)' },
|
||||
updatedAt: { type: 'number', description: 'Last updated timestamp (Unix)' },
|
||||
acusConsumed: {
|
||||
type: 'number',
|
||||
description: 'ACUs consumed',
|
||||
condition: { field: 'operation', value: 'list_sessions', not: true },
|
||||
},
|
||||
tags: { type: 'json', description: 'Session tags' },
|
||||
pullRequests: {
|
||||
type: 'json',
|
||||
description: 'Pull requests created during the session',
|
||||
condition: { field: 'operation', value: 'list_sessions', not: true },
|
||||
},
|
||||
structuredOutput: {
|
||||
type: 'json',
|
||||
description: 'Structured output from the session',
|
||||
condition: { field: 'operation', value: 'list_sessions', not: true },
|
||||
},
|
||||
playbookId: {
|
||||
type: 'string',
|
||||
description: 'Associated playbook ID',
|
||||
condition: { field: 'operation', value: 'list_sessions', not: true },
|
||||
},
|
||||
sessions: {
|
||||
type: 'json',
|
||||
description: 'List of sessions',
|
||||
condition: { field: 'operation', value: 'list_sessions' },
|
||||
},
|
||||
},
|
||||
}
|
||||
256
apps/sim/blocks/blocks/google_bigquery.ts
Normal file
256
apps/sim/blocks/blocks/google_bigquery.ts
Normal file
@@ -0,0 +1,256 @@
|
||||
import { GoogleBigQueryIcon } from '@/components/icons'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import { AuthMode } from '@/blocks/types'
|
||||
|
||||
export const GoogleBigQueryBlock: BlockConfig = {
|
||||
type: 'google_bigquery',
|
||||
name: 'Google BigQuery',
|
||||
description: 'Query, list, and insert data in Google BigQuery',
|
||||
longDescription:
|
||||
'Connect to Google BigQuery to run SQL queries, list datasets and tables, get table metadata, and insert rows.',
|
||||
docsLink: 'https://docs.sim.ai/tools/google_bigquery',
|
||||
category: 'tools',
|
||||
bgColor: '#E0E0E0',
|
||||
icon: GoogleBigQueryIcon,
|
||||
authMode: AuthMode.OAuth,
|
||||
subBlocks: [
|
||||
{
|
||||
id: 'operation',
|
||||
title: 'Operation',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Run Query', id: 'query' },
|
||||
{ label: 'List Datasets', id: 'list_datasets' },
|
||||
{ label: 'List Tables', id: 'list_tables' },
|
||||
{ label: 'Get Table', id: 'get_table' },
|
||||
{ label: 'Insert Rows', id: 'insert_rows' },
|
||||
],
|
||||
value: () => 'query',
|
||||
},
|
||||
|
||||
{
|
||||
id: 'credential',
|
||||
title: 'Google Account',
|
||||
type: 'oauth-input',
|
||||
canonicalParamId: 'oauthCredential',
|
||||
mode: 'basic',
|
||||
required: true,
|
||||
serviceId: 'google-bigquery',
|
||||
requiredScopes: ['https://www.googleapis.com/auth/bigquery'],
|
||||
placeholder: 'Select Google account',
|
||||
},
|
||||
{
|
||||
id: 'manualCredential',
|
||||
title: 'Google Account',
|
||||
type: 'short-input',
|
||||
canonicalParamId: 'oauthCredential',
|
||||
mode: 'advanced',
|
||||
placeholder: 'Enter credential ID',
|
||||
required: true,
|
||||
},
|
||||
|
||||
{
|
||||
id: 'projectId',
|
||||
title: 'Project ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter Google Cloud project ID',
|
||||
required: true,
|
||||
},
|
||||
|
||||
{
|
||||
id: 'query',
|
||||
title: 'SQL Query',
|
||||
type: 'long-input',
|
||||
placeholder: 'SELECT * FROM `project.dataset.table` LIMIT 100',
|
||||
condition: { field: 'operation', value: 'query' },
|
||||
required: { field: 'operation', value: 'query' },
|
||||
wandConfig: {
|
||||
enabled: true,
|
||||
prompt: `Generate a BigQuery Standard SQL query based on the user's description.
|
||||
The query should:
|
||||
- Use Standard SQL syntax (not Legacy SQL)
|
||||
- Be well-formatted and efficient
|
||||
- Include appropriate LIMIT clauses when applicable
|
||||
|
||||
Examples:
|
||||
- "get all users" -> SELECT * FROM \`project.dataset.users\` LIMIT 1000
|
||||
- "count orders by status" -> SELECT status, COUNT(*) as count FROM \`project.dataset.orders\` GROUP BY status
|
||||
- "recent events" -> SELECT * FROM \`project.dataset.events\` ORDER BY created_at DESC LIMIT 100
|
||||
|
||||
Return ONLY the SQL query - no explanations, no quotes, no extra text.`,
|
||||
placeholder: 'Describe the query you want to run...',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'useLegacySql',
|
||||
title: 'Use Legacy SQL',
|
||||
type: 'switch',
|
||||
condition: { field: 'operation', value: 'query' },
|
||||
},
|
||||
{
|
||||
id: 'maxResults',
|
||||
title: 'Max Results',
|
||||
type: 'short-input',
|
||||
placeholder: 'Maximum rows to return',
|
||||
condition: { field: 'operation', value: ['query', 'list_datasets', 'list_tables'] },
|
||||
},
|
||||
{
|
||||
id: 'defaultDatasetId',
|
||||
title: 'Default Dataset',
|
||||
type: 'short-input',
|
||||
placeholder: 'Default dataset for unqualified table names',
|
||||
condition: { field: 'operation', value: 'query' },
|
||||
},
|
||||
{
|
||||
id: 'location',
|
||||
title: 'Location',
|
||||
type: 'short-input',
|
||||
placeholder: 'Processing location (e.g., US, EU)',
|
||||
condition: { field: 'operation', value: 'query' },
|
||||
},
|
||||
|
||||
{
|
||||
id: 'datasetId',
|
||||
title: 'Dataset ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter BigQuery dataset ID',
|
||||
condition: { field: 'operation', value: ['list_tables', 'get_table', 'insert_rows'] },
|
||||
required: { field: 'operation', value: ['list_tables', 'get_table', 'insert_rows'] },
|
||||
},
|
||||
|
||||
{
|
||||
id: 'tableId',
|
||||
title: 'Table ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter BigQuery table ID',
|
||||
condition: { field: 'operation', value: ['get_table', 'insert_rows'] },
|
||||
required: { field: 'operation', value: ['get_table', 'insert_rows'] },
|
||||
},
|
||||
|
||||
{
|
||||
id: 'rows',
|
||||
title: 'Rows',
|
||||
type: 'long-input',
|
||||
placeholder: '[{"column1": "value1", "column2": 42}]',
|
||||
condition: { field: 'operation', value: 'insert_rows' },
|
||||
required: { field: 'operation', value: 'insert_rows' },
|
||||
wandConfig: {
|
||||
enabled: true,
|
||||
prompt: `Generate a JSON array of row objects for BigQuery insertion based on the user's description.
|
||||
Each row should be a JSON object where keys are column names and values match the expected types.
|
||||
|
||||
Examples:
|
||||
- "3 users" -> [{"name": "Alice", "email": "alice@example.com"}, {"name": "Bob", "email": "bob@example.com"}, {"name": "Charlie", "email": "charlie@example.com"}]
|
||||
- "order record" -> [{"order_id": "ORD-001", "amount": 99.99, "status": "pending"}]
|
||||
|
||||
Return ONLY the JSON array - no explanations, no wrapping, no extra text.`,
|
||||
placeholder: 'Describe the rows to insert...',
|
||||
generationType: 'json-object',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'skipInvalidRows',
|
||||
title: 'Skip Invalid Rows',
|
||||
type: 'switch',
|
||||
condition: { field: 'operation', value: 'insert_rows' },
|
||||
},
|
||||
{
|
||||
id: 'ignoreUnknownValues',
|
||||
title: 'Ignore Unknown Values',
|
||||
type: 'switch',
|
||||
condition: { field: 'operation', value: 'insert_rows' },
|
||||
},
|
||||
|
||||
{
|
||||
id: 'pageToken',
|
||||
title: 'Page Token',
|
||||
type: 'short-input',
|
||||
placeholder: 'Pagination token',
|
||||
condition: { field: 'operation', value: ['list_datasets', 'list_tables'] },
|
||||
},
|
||||
],
|
||||
tools: {
|
||||
access: [
|
||||
'google_bigquery_query',
|
||||
'google_bigquery_list_datasets',
|
||||
'google_bigquery_list_tables',
|
||||
'google_bigquery_get_table',
|
||||
'google_bigquery_insert_rows',
|
||||
],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
switch (params.operation) {
|
||||
case 'query':
|
||||
return 'google_bigquery_query'
|
||||
case 'list_datasets':
|
||||
return 'google_bigquery_list_datasets'
|
||||
case 'list_tables':
|
||||
return 'google_bigquery_list_tables'
|
||||
case 'get_table':
|
||||
return 'google_bigquery_get_table'
|
||||
case 'insert_rows':
|
||||
return 'google_bigquery_insert_rows'
|
||||
default:
|
||||
throw new Error(`Invalid Google BigQuery operation: ${params.operation}`)
|
||||
}
|
||||
},
|
||||
params: (params) => {
|
||||
const { oauthCredential, rows, maxResults, ...rest } = params
|
||||
return {
|
||||
...rest,
|
||||
oauthCredential,
|
||||
...(rows && { rows: typeof rows === 'string' ? rows : JSON.stringify(rows) }),
|
||||
...(maxResults !== undefined && maxResults !== '' && { maxResults: Number(maxResults) }),
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
inputs: {
|
||||
operation: { type: 'string', description: 'Operation to perform' },
|
||||
oauthCredential: { type: 'string', description: 'Google BigQuery OAuth credential' },
|
||||
projectId: { type: 'string', description: 'Google Cloud project ID' },
|
||||
query: { type: 'string', description: 'SQL query to execute' },
|
||||
useLegacySql: { type: 'boolean', description: 'Whether to use legacy SQL syntax' },
|
||||
maxResults: { type: 'number', description: 'Maximum number of results to return' },
|
||||
defaultDatasetId: {
|
||||
type: 'string',
|
||||
description: 'Default dataset for unqualified table names',
|
||||
},
|
||||
location: { type: 'string', description: 'Processing location' },
|
||||
datasetId: { type: 'string', description: 'BigQuery dataset ID' },
|
||||
tableId: { type: 'string', description: 'BigQuery table ID' },
|
||||
rows: { type: 'string', description: 'JSON array of row objects to insert' },
|
||||
skipInvalidRows: { type: 'boolean', description: 'Whether to skip invalid rows during insert' },
|
||||
ignoreUnknownValues: {
|
||||
type: 'boolean',
|
||||
description: 'Whether to ignore unknown column values',
|
||||
},
|
||||
pageToken: { type: 'string', description: 'Pagination token' },
|
||||
},
|
||||
outputs: {
|
||||
columns: { type: 'json', description: 'Array of column names (query)' },
|
||||
rows: { type: 'json', description: 'Array of row objects (query)' },
|
||||
totalRows: { type: 'string', description: 'Total number of rows (query)' },
|
||||
jobComplete: { type: 'boolean', description: 'Whether the query completed (query)' },
|
||||
totalBytesProcessed: { type: 'string', description: 'Bytes processed (query)' },
|
||||
cacheHit: { type: 'boolean', description: 'Whether result was cached (query)' },
|
||||
jobReference: { type: 'json', description: 'Job reference for incomplete queries (query)' },
|
||||
pageToken: { type: 'string', description: 'Token for additional result pages (query)' },
|
||||
datasets: { type: 'json', description: 'Array of dataset objects (list_datasets)' },
|
||||
tables: { type: 'json', description: 'Array of table objects (list_tables)' },
|
||||
totalItems: { type: 'number', description: 'Total items count (list_tables)' },
|
||||
tableId: { type: 'string', description: 'Table ID (get_table)' },
|
||||
datasetId: { type: 'string', description: 'Dataset ID (get_table)' },
|
||||
type: { type: 'string', description: 'Table type (get_table)' },
|
||||
description: { type: 'string', description: 'Table description (get_table)' },
|
||||
numRows: { type: 'string', description: 'Row count (get_table)' },
|
||||
numBytes: { type: 'string', description: 'Size in bytes (get_table)' },
|
||||
schema: { type: 'json', description: 'Column definitions (get_table)' },
|
||||
creationTime: { type: 'string', description: 'Creation time (get_table)' },
|
||||
lastModifiedTime: { type: 'string', description: 'Last modified time (get_table)' },
|
||||
location: { type: 'string', description: 'Data location (get_table)' },
|
||||
insertedRows: { type: 'number', description: 'Rows inserted (insert_rows)' },
|
||||
errors: { type: 'json', description: 'Insert errors (insert_rows)' },
|
||||
nextPageToken: { type: 'string', description: 'Token for next page of results' },
|
||||
},
|
||||
}
|
||||
@@ -440,6 +440,36 @@ Return ONLY the range string - no sheet name, no explanations, no quotes.`,
|
||||
placeholder: 'Describe the range (e.g., "first 50 rows" or "column A")...',
|
||||
},
|
||||
},
|
||||
// Read Filter Fields (advanced mode only)
|
||||
{
|
||||
id: 'filterColumn',
|
||||
title: 'Filter Column',
|
||||
type: 'short-input',
|
||||
placeholder: 'Column header name to filter on (e.g., Email, Status)',
|
||||
condition: { field: 'operation', value: 'read' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'filterValue',
|
||||
title: 'Filter Value',
|
||||
type: 'short-input',
|
||||
placeholder: 'Value to match against',
|
||||
condition: { field: 'operation', value: 'read' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'filterMatchType',
|
||||
title: 'Match Type',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Contains', id: 'contains' },
|
||||
{ label: 'Exact Match', id: 'exact' },
|
||||
{ label: 'Starts With', id: 'starts_with' },
|
||||
{ label: 'Ends With', id: 'ends_with' },
|
||||
],
|
||||
condition: { field: 'operation', value: 'read' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
// Write-specific Fields
|
||||
{
|
||||
id: 'values',
|
||||
@@ -748,6 +778,9 @@ Return ONLY the JSON array - no explanations, no markdown, no extra text.`,
|
||||
batchData,
|
||||
sheetId,
|
||||
destinationSpreadsheetId,
|
||||
filterColumn,
|
||||
filterValue,
|
||||
filterMatchType,
|
||||
...rest
|
||||
} = params
|
||||
|
||||
@@ -836,6 +869,11 @@ Return ONLY the JSON array - no explanations, no markdown, no extra text.`,
|
||||
cellRange: cellRange ? (cellRange as string).trim() : undefined,
|
||||
values: parsedValues,
|
||||
oauthCredential,
|
||||
...(filterColumn ? { filterColumn: (filterColumn as string).trim() } : {}),
|
||||
...(filterValue !== undefined && filterValue !== ''
|
||||
? { filterValue: filterValue as string }
|
||||
: {}),
|
||||
...(filterMatchType ? { filterMatchType: filterMatchType as string } : {}),
|
||||
}
|
||||
},
|
||||
},
|
||||
@@ -858,6 +896,12 @@ Return ONLY the JSON array - no explanations, no markdown, no extra text.`,
|
||||
type: 'string',
|
||||
description: 'Destination spreadsheet ID for copy',
|
||||
},
|
||||
filterColumn: { type: 'string', description: 'Column header name to filter on' },
|
||||
filterValue: { type: 'string', description: 'Value to match against the filter column' },
|
||||
filterMatchType: {
|
||||
type: 'string',
|
||||
description: 'Match type: contains, exact, starts_with, or ends_with',
|
||||
},
|
||||
},
|
||||
outputs: {
|
||||
// Read outputs
|
||||
|
||||
262
apps/sim/blocks/blocks/google_tasks.ts
Normal file
262
apps/sim/blocks/blocks/google_tasks.ts
Normal file
@@ -0,0 +1,262 @@
|
||||
import { GoogleTasksIcon } from '@/components/icons'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import { AuthMode } from '@/blocks/types'
|
||||
import type { GoogleTasksResponse } from '@/tools/google_tasks/types'
|
||||
|
||||
export const GoogleTasksBlock: BlockConfig<GoogleTasksResponse> = {
|
||||
type: 'google_tasks',
|
||||
name: 'Google Tasks',
|
||||
description: 'Manage Google Tasks',
|
||||
longDescription:
|
||||
'Integrate Google Tasks into your workflow. Create, read, update, delete, and list tasks and task lists.',
|
||||
docsLink: 'https://docs.sim.ai/tools/google_tasks',
|
||||
category: 'tools',
|
||||
bgColor: '#E0E0E0',
|
||||
icon: GoogleTasksIcon,
|
||||
authMode: AuthMode.OAuth,
|
||||
|
||||
subBlocks: [
|
||||
{
|
||||
id: 'operation',
|
||||
title: 'Operation',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Create Task', id: 'create' },
|
||||
{ label: 'List Tasks', id: 'list' },
|
||||
{ label: 'Get Task', id: 'get' },
|
||||
{ label: 'Update Task', id: 'update' },
|
||||
{ label: 'Delete Task', id: 'delete' },
|
||||
{ label: 'List Task Lists', id: 'list_task_lists' },
|
||||
],
|
||||
value: () => 'create',
|
||||
},
|
||||
{
|
||||
id: 'credential',
|
||||
title: 'Google Tasks Account',
|
||||
type: 'oauth-input',
|
||||
canonicalParamId: 'oauthCredential',
|
||||
mode: 'basic',
|
||||
required: true,
|
||||
serviceId: 'google-tasks',
|
||||
requiredScopes: ['https://www.googleapis.com/auth/tasks'],
|
||||
placeholder: 'Select Google Tasks account',
|
||||
},
|
||||
{
|
||||
id: 'manualCredential',
|
||||
title: 'Google Tasks Account',
|
||||
type: 'short-input',
|
||||
canonicalParamId: 'oauthCredential',
|
||||
mode: 'advanced',
|
||||
placeholder: 'Enter credential ID',
|
||||
required: true,
|
||||
},
|
||||
|
||||
// Task List ID - shown for all task operations (not list_task_lists)
|
||||
{
|
||||
id: 'taskListId',
|
||||
title: 'Task List ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Task list ID (leave empty for default list)',
|
||||
condition: { field: 'operation', value: 'list_task_lists', not: true },
|
||||
},
|
||||
|
||||
// Create Task Fields
|
||||
{
|
||||
id: 'title',
|
||||
title: 'Title',
|
||||
type: 'short-input',
|
||||
placeholder: 'Buy groceries',
|
||||
condition: { field: 'operation', value: 'create' },
|
||||
required: { field: 'operation', value: 'create' },
|
||||
},
|
||||
{
|
||||
id: 'notes',
|
||||
title: 'Notes',
|
||||
type: 'long-input',
|
||||
placeholder: 'Task notes or description',
|
||||
condition: { field: 'operation', value: 'create' },
|
||||
},
|
||||
{
|
||||
id: 'due',
|
||||
title: 'Due Date',
|
||||
type: 'short-input',
|
||||
placeholder: '2025-06-03T00:00:00.000Z',
|
||||
condition: { field: 'operation', value: 'create' },
|
||||
wandConfig: {
|
||||
enabled: true,
|
||||
prompt: `Generate an RFC 3339 timestamp in UTC based on the user's description.
|
||||
The timestamp should be in the format: YYYY-MM-DDTHH:MM:SS.000Z (UTC timezone).
|
||||
Examples:
|
||||
- "tomorrow" -> Calculate tomorrow's date at 00:00:00.000Z
|
||||
- "next Friday" -> Calculate the next Friday's date at 00:00:00.000Z
|
||||
- "June 15" -> 2025-06-15T00:00:00.000Z
|
||||
|
||||
Return ONLY the timestamp - no explanations, no extra text.`,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'status',
|
||||
title: 'Status',
|
||||
type: 'dropdown',
|
||||
condition: { field: 'operation', value: 'create' },
|
||||
options: [
|
||||
{ label: 'Needs Action', id: 'needsAction' },
|
||||
{ label: 'Completed', id: 'completed' },
|
||||
],
|
||||
},
|
||||
|
||||
// Get/Update/Delete Task Fields - Task ID
|
||||
{
|
||||
id: 'taskId',
|
||||
title: 'Task ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Task ID',
|
||||
condition: { field: 'operation', value: ['get', 'update', 'delete'] },
|
||||
required: { field: 'operation', value: ['get', 'update', 'delete'] },
|
||||
},
|
||||
|
||||
// Update Task Fields
|
||||
{
|
||||
id: 'title',
|
||||
title: 'New Title',
|
||||
type: 'short-input',
|
||||
placeholder: 'Updated task title',
|
||||
condition: { field: 'operation', value: 'update' },
|
||||
},
|
||||
{
|
||||
id: 'notes',
|
||||
title: 'New Notes',
|
||||
type: 'long-input',
|
||||
placeholder: 'Updated task notes',
|
||||
condition: { field: 'operation', value: 'update' },
|
||||
},
|
||||
{
|
||||
id: 'due',
|
||||
title: 'New Due Date',
|
||||
type: 'short-input',
|
||||
placeholder: '2025-06-03T00:00:00.000Z',
|
||||
condition: { field: 'operation', value: 'update' },
|
||||
wandConfig: {
|
||||
enabled: true,
|
||||
prompt: `Generate an RFC 3339 timestamp in UTC based on the user's description.
|
||||
The timestamp should be in the format: YYYY-MM-DDTHH:MM:SS.000Z (UTC timezone).
|
||||
Examples:
|
||||
- "tomorrow" -> Calculate tomorrow's date at 00:00:00.000Z
|
||||
- "next Friday" -> Calculate the next Friday's date at 00:00:00.000Z
|
||||
- "June 15" -> 2025-06-15T00:00:00.000Z
|
||||
|
||||
Return ONLY the timestamp - no explanations, no extra text.`,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'status',
|
||||
title: 'New Status',
|
||||
type: 'dropdown',
|
||||
condition: { field: 'operation', value: 'update' },
|
||||
options: [
|
||||
{ label: 'Needs Action', id: 'needsAction' },
|
||||
{ label: 'Completed', id: 'completed' },
|
||||
],
|
||||
},
|
||||
|
||||
// List Tasks Fields
|
||||
{
|
||||
id: 'maxResults',
|
||||
title: 'Max Results',
|
||||
type: 'short-input',
|
||||
placeholder: '20',
|
||||
condition: { field: 'operation', value: ['list', 'list_task_lists'] },
|
||||
},
|
||||
{
|
||||
id: 'showCompleted',
|
||||
title: 'Show Completed',
|
||||
type: 'dropdown',
|
||||
condition: { field: 'operation', value: 'list' },
|
||||
options: [
|
||||
{ label: 'Yes', id: 'true' },
|
||||
{ label: 'No', id: 'false' },
|
||||
],
|
||||
},
|
||||
],
|
||||
|
||||
tools: {
|
||||
access: [
|
||||
'google_tasks_create',
|
||||
'google_tasks_list',
|
||||
'google_tasks_get',
|
||||
'google_tasks_update',
|
||||
'google_tasks_delete',
|
||||
'google_tasks_list_task_lists',
|
||||
],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
switch (params.operation) {
|
||||
case 'create':
|
||||
return 'google_tasks_create'
|
||||
case 'list':
|
||||
return 'google_tasks_list'
|
||||
case 'get':
|
||||
return 'google_tasks_get'
|
||||
case 'update':
|
||||
return 'google_tasks_update'
|
||||
case 'delete':
|
||||
return 'google_tasks_delete'
|
||||
case 'list_task_lists':
|
||||
return 'google_tasks_list_task_lists'
|
||||
default:
|
||||
throw new Error(`Invalid Google Tasks operation: ${params.operation}`)
|
||||
}
|
||||
},
|
||||
params: (params) => {
|
||||
const { oauthCredential, operation, showCompleted, maxResults, ...rest } = params
|
||||
|
||||
const processedParams: Record<string, unknown> = { ...rest }
|
||||
|
||||
if (maxResults && typeof maxResults === 'string') {
|
||||
processedParams.maxResults = Number.parseInt(maxResults, 10)
|
||||
}
|
||||
|
||||
if (showCompleted !== undefined) {
|
||||
processedParams.showCompleted = showCompleted === 'true'
|
||||
}
|
||||
|
||||
return {
|
||||
oauthCredential,
|
||||
...processedParams,
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
inputs: {
|
||||
operation: { type: 'string', description: 'Operation to perform' },
|
||||
oauthCredential: { type: 'string', description: 'Google Tasks access token' },
|
||||
taskListId: { type: 'string', description: 'Task list identifier' },
|
||||
title: { type: 'string', description: 'Task title' },
|
||||
notes: { type: 'string', description: 'Task notes' },
|
||||
due: { type: 'string', description: 'Task due date' },
|
||||
status: { type: 'string', description: 'Task status' },
|
||||
taskId: { type: 'string', description: 'Task identifier' },
|
||||
maxResults: { type: 'string', description: 'Maximum number of results' },
|
||||
showCompleted: { type: 'string', description: 'Whether to show completed tasks' },
|
||||
},
|
||||
|
||||
outputs: {
|
||||
id: { type: 'string', description: 'Task ID' },
|
||||
title: { type: 'string', description: 'Task title' },
|
||||
notes: { type: 'string', description: 'Task notes' },
|
||||
status: { type: 'string', description: 'Task status' },
|
||||
due: { type: 'string', description: 'Due date' },
|
||||
updated: { type: 'string', description: 'Last modification time' },
|
||||
selfLink: { type: 'string', description: 'URL for the task' },
|
||||
webViewLink: { type: 'string', description: 'Link to task in Google Tasks UI' },
|
||||
parent: { type: 'string', description: 'Parent task ID' },
|
||||
position: { type: 'string', description: 'Position among sibling tasks' },
|
||||
completed: { type: 'string', description: 'Completion date' },
|
||||
deleted: { type: 'boolean', description: 'Whether the task is deleted' },
|
||||
tasks: { type: 'json', description: 'Array of tasks (list operation)' },
|
||||
taskLists: { type: 'json', description: 'Array of task lists (list_task_lists operation)' },
|
||||
taskId: { type: 'string', description: 'Deleted task ID (delete operation)' },
|
||||
nextPageToken: { type: 'string', description: 'Token for next page of results' },
|
||||
},
|
||||
}
|
||||
215
apps/sim/blocks/blocks/google_translate.ts
Normal file
215
apps/sim/blocks/blocks/google_translate.ts
Normal file
@@ -0,0 +1,215 @@
|
||||
import { GoogleTranslateIcon } from '@/components/icons'
|
||||
import { AuthMode, type BlockConfig } from '@/blocks/types'
|
||||
|
||||
const SUPPORTED_LANGUAGES = [
|
||||
{ label: 'Afrikaans', id: 'af' },
|
||||
{ label: 'Albanian', id: 'sq' },
|
||||
{ label: 'Amharic', id: 'am' },
|
||||
{ label: 'Arabic', id: 'ar' },
|
||||
{ label: 'Armenian', id: 'hy' },
|
||||
{ label: 'Assamese', id: 'as' },
|
||||
{ label: 'Aymara', id: 'ay' },
|
||||
{ label: 'Azerbaijani', id: 'az' },
|
||||
{ label: 'Bambara', id: 'bm' },
|
||||
{ label: 'Basque', id: 'eu' },
|
||||
{ label: 'Belarusian', id: 'be' },
|
||||
{ label: 'Bengali', id: 'bn' },
|
||||
{ label: 'Bhojpuri', id: 'bho' },
|
||||
{ label: 'Bosnian', id: 'bs' },
|
||||
{ label: 'Bulgarian', id: 'bg' },
|
||||
{ label: 'Catalan', id: 'ca' },
|
||||
{ label: 'Cebuano', id: 'ceb' },
|
||||
{ label: 'Chinese (Simplified)', id: 'zh-CN' },
|
||||
{ label: 'Chinese (Traditional)', id: 'zh-TW' },
|
||||
{ label: 'Corsican', id: 'co' },
|
||||
{ label: 'Croatian', id: 'hr' },
|
||||
{ label: 'Czech', id: 'cs' },
|
||||
{ label: 'Danish', id: 'da' },
|
||||
{ label: 'Dhivehi', id: 'dv' },
|
||||
{ label: 'Dogri', id: 'doi' },
|
||||
{ label: 'Dutch', id: 'nl' },
|
||||
{ label: 'English', id: 'en' },
|
||||
{ label: 'Esperanto', id: 'eo' },
|
||||
{ label: 'Estonian', id: 'et' },
|
||||
{ label: 'Ewe', id: 'ee' },
|
||||
{ label: 'Filipino', id: 'tl' },
|
||||
{ label: 'Finnish', id: 'fi' },
|
||||
{ label: 'French', id: 'fr' },
|
||||
{ label: 'Frisian', id: 'fy' },
|
||||
{ label: 'Galician', id: 'gl' },
|
||||
{ label: 'Georgian', id: 'ka' },
|
||||
{ label: 'German', id: 'de' },
|
||||
{ label: 'Greek', id: 'el' },
|
||||
{ label: 'Guarani', id: 'gn' },
|
||||
{ label: 'Gujarati', id: 'gu' },
|
||||
{ label: 'Haitian Creole', id: 'ht' },
|
||||
{ label: 'Hausa', id: 'ha' },
|
||||
{ label: 'Hawaiian', id: 'haw' },
|
||||
{ label: 'Hebrew', id: 'he' },
|
||||
{ label: 'Hindi', id: 'hi' },
|
||||
{ label: 'Hmong', id: 'hmn' },
|
||||
{ label: 'Hungarian', id: 'hu' },
|
||||
{ label: 'Icelandic', id: 'is' },
|
||||
{ label: 'Igbo', id: 'ig' },
|
||||
{ label: 'Ilocano', id: 'ilo' },
|
||||
{ label: 'Indonesian', id: 'id' },
|
||||
{ label: 'Irish', id: 'ga' },
|
||||
{ label: 'Italian', id: 'it' },
|
||||
{ label: 'Japanese', id: 'ja' },
|
||||
{ label: 'Javanese', id: 'jv' },
|
||||
{ label: 'Kannada', id: 'kn' },
|
||||
{ label: 'Kazakh', id: 'kk' },
|
||||
{ label: 'Khmer', id: 'km' },
|
||||
{ label: 'Kinyarwanda', id: 'rw' },
|
||||
{ label: 'Konkani', id: 'gom' },
|
||||
{ label: 'Korean', id: 'ko' },
|
||||
{ label: 'Krio', id: 'kri' },
|
||||
{ label: 'Kurdish', id: 'ku' },
|
||||
{ label: 'Kurdish (Sorani)', id: 'ckb' },
|
||||
{ label: 'Kyrgyz', id: 'ky' },
|
||||
{ label: 'Lao', id: 'lo' },
|
||||
{ label: 'Latin', id: 'la' },
|
||||
{ label: 'Latvian', id: 'lv' },
|
||||
{ label: 'Lingala', id: 'ln' },
|
||||
{ label: 'Lithuanian', id: 'lt' },
|
||||
{ label: 'Luganda', id: 'lg' },
|
||||
{ label: 'Luxembourgish', id: 'lb' },
|
||||
{ label: 'Macedonian', id: 'mk' },
|
||||
{ label: 'Maithili', id: 'mai' },
|
||||
{ label: 'Malagasy', id: 'mg' },
|
||||
{ label: 'Malay', id: 'ms' },
|
||||
{ label: 'Malayalam', id: 'ml' },
|
||||
{ label: 'Maltese', id: 'mt' },
|
||||
{ label: 'Maori', id: 'mi' },
|
||||
{ label: 'Marathi', id: 'mr' },
|
||||
{ label: 'Meiteilon (Manipuri)', id: 'mni-Mtei' },
|
||||
{ label: 'Mizo', id: 'lus' },
|
||||
{ label: 'Mongolian', id: 'mn' },
|
||||
{ label: 'Myanmar (Burmese)', id: 'my' },
|
||||
{ label: 'Nepali', id: 'ne' },
|
||||
{ label: 'Norwegian', id: 'no' },
|
||||
{ label: 'Nyanja (Chichewa)', id: 'ny' },
|
||||
{ label: 'Odia (Oriya)', id: 'or' },
|
||||
{ label: 'Oromo', id: 'om' },
|
||||
{ label: 'Pashto', id: 'ps' },
|
||||
{ label: 'Persian', id: 'fa' },
|
||||
{ label: 'Polish', id: 'pl' },
|
||||
{ label: 'Portuguese', id: 'pt' },
|
||||
{ label: 'Punjabi', id: 'pa' },
|
||||
{ label: 'Quechua', id: 'qu' },
|
||||
{ label: 'Romanian', id: 'ro' },
|
||||
{ label: 'Russian', id: 'ru' },
|
||||
{ label: 'Samoan', id: 'sm' },
|
||||
{ label: 'Sanskrit', id: 'sa' },
|
||||
{ label: 'Scots Gaelic', id: 'gd' },
|
||||
{ label: 'Sepedi', id: 'nso' },
|
||||
{ label: 'Serbian', id: 'sr' },
|
||||
{ label: 'Sesotho', id: 'st' },
|
||||
{ label: 'Shona', id: 'sn' },
|
||||
{ label: 'Sindhi', id: 'sd' },
|
||||
{ label: 'Sinhala', id: 'si' },
|
||||
{ label: 'Slovak', id: 'sk' },
|
||||
{ label: 'Slovenian', id: 'sl' },
|
||||
{ label: 'Somali', id: 'so' },
|
||||
{ label: 'Spanish', id: 'es' },
|
||||
{ label: 'Sundanese', id: 'su' },
|
||||
{ label: 'Swahili', id: 'sw' },
|
||||
{ label: 'Swedish', id: 'sv' },
|
||||
{ label: 'Tajik', id: 'tg' },
|
||||
{ label: 'Tamil', id: 'ta' },
|
||||
{ label: 'Tatar', id: 'tt' },
|
||||
{ label: 'Telugu', id: 'te' },
|
||||
{ label: 'Thai', id: 'th' },
|
||||
{ label: 'Tigrinya', id: 'ti' },
|
||||
{ label: 'Tsonga', id: 'ts' },
|
||||
{ label: 'Turkish', id: 'tr' },
|
||||
{ label: 'Turkmen', id: 'tk' },
|
||||
{ label: 'Twi (Akan)', id: 'ak' },
|
||||
{ label: 'Ukrainian', id: 'uk' },
|
||||
{ label: 'Urdu', id: 'ur' },
|
||||
{ label: 'Uyghur', id: 'ug' },
|
||||
{ label: 'Uzbek', id: 'uz' },
|
||||
{ label: 'Vietnamese', id: 'vi' },
|
||||
{ label: 'Welsh', id: 'cy' },
|
||||
{ label: 'Xhosa', id: 'xh' },
|
||||
{ label: 'Yiddish', id: 'yi' },
|
||||
{ label: 'Yoruba', id: 'yo' },
|
||||
{ label: 'Zulu', id: 'zu' },
|
||||
] satisfies { label: string; id: string }[]
|
||||
|
||||
export const GoogleTranslateBlock: BlockConfig = {
|
||||
type: 'google_translate',
|
||||
name: 'Google Translate',
|
||||
description: 'Translate text using Google Cloud Translation',
|
||||
longDescription:
|
||||
'Translate and detect languages using the Google Cloud Translation API. Supports auto-detection of the source language.',
|
||||
docsLink: 'https://docs.sim.ai/tools/google_translate',
|
||||
category: 'tools',
|
||||
bgColor: '#E0E0E0',
|
||||
icon: GoogleTranslateIcon,
|
||||
authMode: AuthMode.ApiKey,
|
||||
subBlocks: [
|
||||
{
|
||||
id: 'operation',
|
||||
title: 'Operation',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Translate Text', id: 'text' },
|
||||
{ label: 'Detect Language', id: 'detect' },
|
||||
],
|
||||
value: () => 'text',
|
||||
},
|
||||
{
|
||||
id: 'text',
|
||||
title: 'Text',
|
||||
type: 'long-input',
|
||||
placeholder: 'Enter text...',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'target',
|
||||
title: 'Target Language',
|
||||
type: 'dropdown',
|
||||
condition: { field: 'operation', value: 'text' },
|
||||
searchable: true,
|
||||
options: SUPPORTED_LANGUAGES,
|
||||
value: () => 'es',
|
||||
required: { field: 'operation', value: 'text' },
|
||||
},
|
||||
{
|
||||
id: 'source',
|
||||
title: 'Source Language',
|
||||
type: 'dropdown',
|
||||
condition: { field: 'operation', value: 'text' },
|
||||
searchable: true,
|
||||
options: [{ label: 'Auto-detect', id: '' }, ...SUPPORTED_LANGUAGES],
|
||||
value: () => '',
|
||||
},
|
||||
{
|
||||
id: 'apiKey',
|
||||
title: 'API Key',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter your Google Cloud API key',
|
||||
password: true,
|
||||
required: true,
|
||||
},
|
||||
],
|
||||
tools: {
|
||||
access: ['google_translate_text', 'google_translate_detect'],
|
||||
config: {
|
||||
tool: (params) => `google_translate_${params.operation}`,
|
||||
},
|
||||
},
|
||||
inputs: {
|
||||
text: { type: 'string', description: 'Text to translate or detect language of' },
|
||||
target: { type: 'string', description: 'Target language code' },
|
||||
source: { type: 'string', description: 'Source language code (optional, auto-detected)' },
|
||||
apiKey: { type: 'string', description: 'Google Cloud API key' },
|
||||
},
|
||||
outputs: {
|
||||
translatedText: { type: 'string', description: 'Translated text' },
|
||||
detectedSourceLanguage: { type: 'string', description: 'Detected source language code' },
|
||||
language: { type: 'string', description: 'Detected language code' },
|
||||
confidence: { type: 'number', description: 'Detection confidence score' },
|
||||
},
|
||||
}
|
||||
@@ -10,6 +10,7 @@ import { ApifyBlock } from '@/blocks/blocks/apify'
|
||||
import { ApolloBlock } from '@/blocks/blocks/apollo'
|
||||
import { ArxivBlock } from '@/blocks/blocks/arxiv'
|
||||
import { AsanaBlock } from '@/blocks/blocks/asana'
|
||||
import { AttioBlock } from '@/blocks/blocks/attio'
|
||||
import { BrowserUseBlock } from '@/blocks/blocks/browser_use'
|
||||
import { CalComBlock } from '@/blocks/blocks/calcom'
|
||||
import { CalendlyBlock } from '@/blocks/blocks/calendly'
|
||||
@@ -22,6 +23,7 @@ import { ConditionBlock } from '@/blocks/blocks/condition'
|
||||
import { ConfluenceBlock, ConfluenceV2Block } from '@/blocks/blocks/confluence'
|
||||
import { CursorBlock, CursorV2Block } from '@/blocks/blocks/cursor'
|
||||
import { DatadogBlock } from '@/blocks/blocks/datadog'
|
||||
import { DevinBlock } from '@/blocks/blocks/devin'
|
||||
import { DiscordBlock } from '@/blocks/blocks/discord'
|
||||
import { DropboxBlock } from '@/blocks/blocks/dropbox'
|
||||
import { DSPyBlock } from '@/blocks/blocks/dspy'
|
||||
@@ -42,6 +44,7 @@ import { GitLabBlock } from '@/blocks/blocks/gitlab'
|
||||
import { GmailBlock, GmailV2Block } from '@/blocks/blocks/gmail'
|
||||
import { GongBlock } from '@/blocks/blocks/gong'
|
||||
import { GoogleSearchBlock } from '@/blocks/blocks/google'
|
||||
import { GoogleBigQueryBlock } from '@/blocks/blocks/google_bigquery'
|
||||
import { GoogleBooksBlock } from '@/blocks/blocks/google_books'
|
||||
import { GoogleCalendarBlock, GoogleCalendarV2Block } from '@/blocks/blocks/google_calendar'
|
||||
import { GoogleDocsBlock } from '@/blocks/blocks/google_docs'
|
||||
@@ -51,6 +54,8 @@ import { GoogleGroupsBlock } from '@/blocks/blocks/google_groups'
|
||||
import { GoogleMapsBlock } from '@/blocks/blocks/google_maps'
|
||||
import { GoogleSheetsBlock, GoogleSheetsV2Block } from '@/blocks/blocks/google_sheets'
|
||||
import { GoogleSlidesBlock, GoogleSlidesV2Block } from '@/blocks/blocks/google_slides'
|
||||
import { GoogleTasksBlock } from '@/blocks/blocks/google_tasks'
|
||||
import { GoogleTranslateBlock } from '@/blocks/blocks/google_translate'
|
||||
import { GoogleVaultBlock } from '@/blocks/blocks/google_vault'
|
||||
import { GrafanaBlock } from '@/blocks/blocks/grafana'
|
||||
import { GrainBlock } from '@/blocks/blocks/grain'
|
||||
@@ -187,6 +192,7 @@ export const registry: Record<string, BlockConfig> = {
|
||||
apollo: ApolloBlock,
|
||||
arxiv: ArxivBlock,
|
||||
asana: AsanaBlock,
|
||||
attio: AttioBlock,
|
||||
browser_use: BrowserUseBlock,
|
||||
calcom: CalComBlock,
|
||||
calendly: CalendlyBlock,
|
||||
@@ -201,6 +207,7 @@ export const registry: Record<string, BlockConfig> = {
|
||||
cursor: CursorBlock,
|
||||
cursor_v2: CursorV2Block,
|
||||
datadog: DatadogBlock,
|
||||
devin: DevinBlock,
|
||||
discord: DiscordBlock,
|
||||
dropbox: DropboxBlock,
|
||||
dspy: DSPyBlock,
|
||||
@@ -232,12 +239,15 @@ export const registry: Record<string, BlockConfig> = {
|
||||
google_forms: GoogleFormsBlock,
|
||||
google_groups: GoogleGroupsBlock,
|
||||
google_maps: GoogleMapsBlock,
|
||||
google_tasks: GoogleTasksBlock,
|
||||
google_translate: GoogleTranslateBlock,
|
||||
gong: GongBlock,
|
||||
google_search: GoogleSearchBlock,
|
||||
google_sheets: GoogleSheetsBlock,
|
||||
google_sheets_v2: GoogleSheetsV2Block,
|
||||
google_slides: GoogleSlidesBlock,
|
||||
google_slides_v2: GoogleSlidesV2Block,
|
||||
google_bigquery: GoogleBigQueryBlock,
|
||||
google_vault: GoogleVaultBlock,
|
||||
grafana: GrafanaBlock,
|
||||
grain: GrainBlock,
|
||||
|
||||
@@ -24,6 +24,7 @@ export { PanelLeft } from './panel-left'
|
||||
export { Play, PlayOutline } from './play'
|
||||
export { Redo } from './redo'
|
||||
export { Rocket } from './rocket'
|
||||
export { TerminalWindow } from './terminal-window'
|
||||
export { Trash } from './trash'
|
||||
export { Trash2 } from './trash2'
|
||||
export { Undo } from './undo'
|
||||
|
||||
26
apps/sim/components/emcn/icons/terminal-window.tsx
Normal file
26
apps/sim/components/emcn/icons/terminal-window.tsx
Normal file
@@ -0,0 +1,26 @@
|
||||
import type { SVGProps } from 'react'
|
||||
|
||||
/**
|
||||
* Terminal window icon component
|
||||
* @param props - SVG properties including className, fill, etc.
|
||||
*/
|
||||
export function TerminalWindow(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg
|
||||
width='16'
|
||||
height='14'
|
||||
viewBox='0 0 16 14'
|
||||
fill='none'
|
||||
xmlns='http://www.w3.org/2000/svg'
|
||||
{...props}
|
||||
>
|
||||
<path
|
||||
d='M3 0C1.34315 0 0 1.34315 0 3V11C0 12.6569 1.34315 14 3 14H13C14.6569 14 16 12.6569 16 11V3C16 1.34315 14.6569 0 13 0H3ZM1 3C1 1.89543 1.89543 1 3 1H13C14.1046 1 15 1.89543 15 3V4H1V3ZM1 5H15V11C15 12.1046 14.1046 13 13 13H3C1.89543 13 1 12.1046 1 11V5Z'
|
||||
fill='currentColor'
|
||||
/>
|
||||
<circle cx='3.5' cy='2.5' r='0.75' fill='currentColor' />
|
||||
<circle cx='5.75' cy='2.5' r='0.75' fill='currentColor' />
|
||||
<circle cx='8' cy='2.5' r='0.75' fill='currentColor' />
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
@@ -939,6 +939,25 @@ export function GoogleIcon(props: SVGProps<SVGSVGElement>) {
|
||||
)
|
||||
}
|
||||
|
||||
export function DevinIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg {...props} viewBox='0 0 500 500' fill='none' xmlns='http://www.w3.org/2000/svg'>
|
||||
<path
|
||||
d='M59.29,209.39l48.87,28.21c1.75,1.01,3.71,1.51,5.67,1.51c1.95,0,3.92-0.52,5.67-1.51l48.87-28.21c0,0,0.14-0.11,0.2-0.16c0.74-0.45,1.44-0.99,2.07-1.6c0.09-0.09,0.18-0.2,0.27-0.29c0.54-0.58,1.03-1.21,1.44-1.89c0.06-0.11,0.16-0.2,0.2-0.32c0.43-0.74,0.74-1.53,0.99-2.37c0.05-0.18,0.09-0.36,0.14-0.54c0.2-0.86,0.36-1.74,0.36-2.66v-28.21c0-10.89,5.87-21.03,15.3-26.48c9.42-5.45,21.15-5.44,30.59,0l24.43,14.11c0.79,0.45,1.62,0.77,2.47,1.01c0.18,0.05,0.37,0.11,0.54,0.16c0.83,0.2,1.69,0.32,2.54,0.34c0.05,0,0.09,0,0.11,0c0.09,0,0.18-0.05,0.26-0.05c0.79,0,1.58-0.11,2.34-0.32c0.14-0.03,0.27-0.05,0.4-0.09c0.83-0.23,1.64-0.57,2.41-0.99c0.06-0.05,0.16-0.05,0.23-0.09l48.87-28.21c3.51-2.03,5.67-5.76,5.67-9.81V64.52c0-4.05-2.16-7.78-5.67-9.81l-48.91-28.19c-3.51-2.03-7.81-2.03-11.32,0l-48.87,28.21c0,0-0.14,0.11-0.2,0.16c-0.74,0.45-1.44,0.99-2.07,1.6c-0.09,0.09-0.18,0.2-0.27,0.29c-0.54,0.58-1.03,1.21-1.44,1.89c-0.06,0.11-0.16,0.2-0.2,0.31c-0.43,0.74-0.74,1.53-0.99,2.37c-0.05,0.18-0.09,0.36-0.14,0.54c-0.2,0.86-0.36,1.74-0.36,2.66v28.21c0,10.89-5.87,21.03-15.3,26.5c-9.42,5.44-21.15,5.44-30.59,0l-24.42-14.1c-0.79-0.45-1.63-0.77-2.47-1.01c-0.18-0.05-0.36-0.11-0.54-0.16c-0.84-0.2-1.69-0.31-2.55-0.34c-0.14,0-0.25,0-0.38,0c-0.81,0-1.6,0.11-2.37,0.31c-0.14,0.02-0.25,0.05-0.38,0.09c-0.82,0.23-1.63,0.57-2.4,1c-0.06,0.05-0.16,0.05-0.23,0.09l-48.84,28.24c-3.51,2.03-5.67,5.76-5.67,9.81v56.42c0,4.05,2.16,7.78,5.67,9.81C59.29,209.41,59.29,209.39,59.29,209.39z'
|
||||
fill='#2A6DCE'
|
||||
/>
|
||||
<path
|
||||
d='M325.46,223.49c9.42-5.44,21.15-5.44,30.59,0l24.43,14.11c0.79,0.45,1.62,0.77,2.47,1.01c0.18,0.05,0.36,0.11,0.54,0.16c0.83,0.2,1.69,0.31,2.54,0.34c0.05,0,0.09,0,0.11,0c0.09,0,0.18-0.03,0.26-0.05c0.79,0,1.58-0.11,2.34-0.31c0.14-0.03,0.27-0.05,0.4-0.09c0.83-0.23,1.62-0.57,2.41-0.99c0.06-0.05,0.16-0.05,0.25-0.09l48.87-28.21c3.51-2.03,5.67-5.76,5.67-9.81v-56.43c0-4.05-2.16-7.78-5.67-9.81l-48.84-28.22c-3.51-2.03-7.81-2.03-11.32,0l-48.87,28.21c0,0-0.14,0.11-0.2,0.16c-0.74,0.45-1.44,0.99-2.07,1.6c-0.09,0.09-0.18,0.2-0.26,0.29c-0.54,0.58-1.03,1.21-1.44,1.89c-0.06,0.11-0.16,0.2-0.2,0.32c-0.43,0.74-0.74,1.53-0.99,2.37c-0.05,0.18-0.09,0.36-0.14,0.54c-0.2,0.86-0.36,1.74-0.36,2.66v28.21c0,10.89-5.87,21.03-15.3,26.5c-9.42,5.44-21.15,5.44-30.59,0l-24.43-14.11c-0.79-0.45-1.62-0.77-2.47-1.01c-0.18-0.05-0.36-0.11-0.54-0.16c-0.83-0.2-1.69-0.32-2.54-0.34c-0.14,0-0.25,0-0.38,0c-0.81,0-1.6,0.11-2.37,0.32c-0.14,0.03-0.25,0.05-0.38,0.09c-0.83,0.23-1.64,0.57-2.41,0.99c-0.06,0.05-0.16,0.05-0.23,0.09l-48.87,28.21c-3.51,2.03-5.67,5.76-5.67,9.81v56.43c0,4.05,2.16,7.78,5.67,9.81l48.87,28.21c0,0,0.16,0.05,0.23,0.09c0.77,0.43,1.58,0.77,2.41,0.99c0.14,0.05,0.27,0.05,0.4,0.09c0.77,0.18,1.55,0.29,2.34,0.32c0.09,0,0.18,0.05,0.27,0.05c0.05,0,0.09,0,0.11,0c0.86,0,1.69-0.14,2.54-0.34c0.18-0.05,0.36-0.09,0.54-0.16c0.86-0.25,1.69-0.57,2.47-1.01l24.43-14.11c9.42-5.44,21.15-5.44,30.59,0c9.42,5.44,15.3,15.59,15.3,26.48v28.21c0,0.92,0.14,1.8,0.36,2.66c0.05,0.18,0.09,0.36,0.14,0.54c0.25,0.83,0.56,1.62,0.99,2.37c0.06,0.11,0.14,0.2,0.2,0.31c0.4,0.68,0.9,1.31,1.44,1.89c0.09,0.09,0.18,0.2,0.26,0.29c0.61,0.6,1.31,1.12,2.07,1.6c0.06,0.05,0.11,0.11,0.2,0.16l48.87,28.21c1.75,1.01,3.72,1.51,5.67,1.51s3.92-0.52,5.67-1.51l48.87-28.21c3.51-2.03,5.67-5.76,5.67-9.81v-56.43c0-4.05-2.16-7.78-5.67-9.81l-48.87-28.21c0,0-0.16-0.05-0.23-0.09c-0.77-0.43-1.58-0.77-2.41-0.99c-0.14-0.05-0.25-0.05-0.38-0.09c-0.79-0.18-1.57-0.29-2.38-0.32c-0.11,0-0.25,0-0.36,0c-0.86,0-1.71,0.14-2.54,0.34c-0.18,0.05-0.34,0.09-0.52,0.16c-0.86,0.25-1.69,0.57-2.47,1.01l-24.43,14.11c-9.42,5.44-21.15,5.44-30.58,0c-9.42-5.44-15.3-15.59-15.3-26.5c0-10.91,5.87-21.03,15.3-26.48C325.55,223.49,325.46,223.49,325.46,223.49z'
|
||||
fill='#1DC19C'
|
||||
/>
|
||||
<path
|
||||
d='M304.5,369.22l-48.87-28.21c0,0-0.16-0.05-0.23-0.09c-0.77-0.43-1.57-0.77-2.41-0.99c-0.14-0.05-0.27-0.05-0.4-0.09c-0.79-0.18-1.57-0.29-2.37-0.32c-0.14,0-0.25,0-0.38,0c-0.86,0-1.71,0.14-2.54,0.34c-0.18,0.05-0.34,0.09-0.52,0.16c-0.86,0.25-1.69,0.57-2.47,1.01l-24.43,14.11c-9.42,5.44-21.15,5.44-30.58,0c-9.42-5.44-15.3-15.59-15.3-26.5v-28.22c0-0.92-0.14-1.8-0.36-2.66c-0.05-0.18-0.09-0.36-0.14-0.54c-0.25-0.83-0.57-1.62-0.99-2.37c-0.06-0.11-0.14-0.2-0.2-0.32c-0.4-0.68-0.9-1.31-1.44-1.89c-0.09-0.09-0.18-0.2-0.27-0.29c-0.6-0.6-1.31-1.12-2.07-1.6c-0.06-0.05-0.11-0.11-0.2-0.16l-48.87-28.21c-3.51-2.03-7.81-2.03-11.32,0L59.28,290.6c-3.51,2.03-5.67,5.76-5.67,9.81v56.43c0,4.05,2.16,7.78,5.67,9.81l48.87,28.21c0,0,0.16,0.06,0.23,0.09c0.77,0.43,1.55,0.77,2.38,0.99c0.14,0.05,0.27,0.06,0.4,0.09c0.77,0.18,1.55,0.29,2.34,0.32c0.09,0,0.18,0.05,0.29,0.05c0.05,0,0.09,0,0.14,0c0.86,0,1.69-0.14,2.52-0.34c0.18-0.05,0.36-0.09,0.54-0.16c0.86-0.25,1.69-0.57,2.47-1.01l24.43-14.11c9.42-5.44,21.15-5.44,30.59,0c9.42,5.44,15.3,15.59,15.3,26.48v28.21c0,0.92,0.14,1.8,0.36,2.66c0.05,0.18,0.09,0.36,0.14,0.54c0.25,0.83,0.57,1.62,0.99,2.37c0.06,0.11,0.14,0.2,0.2,0.32c0.4,0.68,0.9,1.31,1.44,1.89c0.09,0.09,0.18,0.2,0.27,0.29c0.61,0.61,1.31,1.12,2.07,1.6c0.06,0.05,0.11,0.11,0.2,0.16l48.87,28.21c1.75,1.01,3.71,1.51,5.67,1.51c1.96,0,3.92-0.52,5.67-1.51l48.87-28.21c3.51-2.03,5.67-5.76,5.67-9.81v-56.43c0-4.05-2.16-7.78-5.67-9.81L304.5,369.22z'
|
||||
fill='#1796E2'
|
||||
/>
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
export function DiscordIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg
|
||||
@@ -1302,6 +1321,21 @@ export function GoogleCalendarIcon(props: SVGProps<SVGSVGElement>) {
|
||||
)
|
||||
}
|
||||
|
||||
export function GoogleTasksIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg {...props} viewBox='0 0 527.1 500' xmlns='http://www.w3.org/2000/svg'>
|
||||
<polygon
|
||||
fill='#0066DA'
|
||||
points='410.4,58.3 368.8,81.2 348.2,120.6 368.8,168.8 407.8,211 450,187.5 475.9,142.8 450,87.5'
|
||||
/>
|
||||
<path
|
||||
fill='#2684FC'
|
||||
d='M249.3,219.4l98.9-98.9c29.1,22.1,50.5,53.8,59.6,90.4L272.1,346.7c-12.2,12.2-32,12.2-44.2,0l-91.5-91.5 c-9.8-9.8-9.8-25.6,0-35.3l39-39c9.8-9.8,25.6-9.8,35.3,0L249.3,219.4z M519.8,63.6l-39.7-39.7c-9.7-9.7-25.6-9.7-35.3,0 l-34.4,34.4c27.5,23,49.9,51.8,65.5,84.5l43.9-43.9C529.6,89.2,529.6,73.3,519.8,63.6z M412.5,250c0,89.8-72.8,162.5-162.5,162.5 S87.5,339.8,87.5,250S160.2,87.5,250,87.5c36.9,0,70.9,12.3,98.2,33.1l62.2-62.2C367,21.9,311.1,0,250,0C111.9,0,0,111.9,0,250 s111.9,250,250,250s250-111.9,250-250c0-38.3-8.7-74.7-24.1-107.2L407.8,211C410.8,223.5,412.5,236.6,412.5,250z'
|
||||
/>
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
export function SupabaseIcon(props: SVGProps<SVGSVGElement>) {
|
||||
const id = useId()
|
||||
const gradient0 = `supabase_paint0_${id}`
|
||||
@@ -3430,6 +3464,23 @@ export const ResendIcon = (props: SVGProps<SVGSVGElement>) => (
|
||||
</svg>
|
||||
)
|
||||
|
||||
export const GoogleBigQueryIcon = (props: SVGProps<SVGSVGElement>) => (
|
||||
<svg {...props} xmlns='http://www.w3.org/2000/svg' viewBox='0 0 64 64'>
|
||||
<path
|
||||
d='M14.48 58.196L.558 34.082c-.744-1.288-.744-2.876 0-4.164L14.48 5.805c.743-1.287 2.115-2.08 3.6-2.082h27.857c1.48.007 2.845.8 3.585 2.082l13.92 24.113c.744 1.288.744 2.876 0 4.164L49.52 58.196c-.743 1.287-2.115 2.08-3.6 2.082H18.07c-1.483-.005-2.85-.798-3.593-2.082z'
|
||||
fill='#4386fa'
|
||||
/>
|
||||
<path
|
||||
d='M40.697 24.235s3.87 9.283-1.406 14.545-14.883 1.894-14.883 1.894L43.95 60.27h1.984c1.486-.002 2.858-.796 3.6-2.082L58.75 42.23z'
|
||||
opacity='.1'
|
||||
/>
|
||||
<path
|
||||
d='M45.267 43.23L41 38.953a.67.67 0 0 0-.158-.12 11.63 11.63 0 1 0-2.032 2.037.67.67 0 0 0 .113.15l4.277 4.277a.67.67 0 0 0 .947 0l1.12-1.12a.67.67 0 0 0 0-.947zM31.64 40.464a8.75 8.75 0 1 1 8.749-8.749 8.75 8.75 0 0 1-8.749 8.749zm-5.593-9.216v3.616c.557.983 1.363 1.803 2.338 2.375v-6.013zm4.375-2.998v9.772a6.45 6.45 0 0 0 2.338 0V28.25zm6.764 6.606v-2.142H34.85v4.5a6.43 6.43 0 0 0 2.338-2.368z'
|
||||
fill='#fff'
|
||||
/>
|
||||
</svg>
|
||||
)
|
||||
|
||||
export const GoogleVaultIcon = (props: SVGProps<SVGSVGElement>) => (
|
||||
<svg {...props} xmlns='http://www.w3.org/2000/svg' viewBox='0 0 82 82'>
|
||||
<path
|
||||
@@ -3552,6 +3603,15 @@ export function TrelloIcon(props: SVGProps<SVGSVGElement>) {
|
||||
)
|
||||
}
|
||||
|
||||
export function AttioIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg {...props} xmlns='http://www.w3.org/2000/svg' viewBox='0 0 60.9 50' fill='currentColor'>
|
||||
<path d='M60.3,34.8l-5.1-8.1c0,0,0,0,0,0L54.7,26c-0.8-1.2-2.1-1.9-3.5-1.9L43,24L42.5,25l-9.8,15.7l-0.5,0.9l4.1,6.6c0.8,1.2,2.1,1.9,3.5,1.9h11.5c1.4,0,2.8-0.7,3.5-1.9l0.4-0.6c0,0,0,0,0,0l5.1-8.2C61.1,37.9,61.1,36.2,60.3,34.8L60.3,34.8z M58.7,38.3l-5.1,8.2c0,0,0,0.1-0.1,0.1c-0.2,0.2-0.4,0.2-0.5,0.2c-0.1,0-0.4,0-0.6-0.3l-5.1-8.2c-0.1-0.1-0.1-0.2-0.2-0.3c0-0.1-0.1-0.2-0.1-0.3c-0.1-0.4-0.1-0.8,0-1.3c0.1-0.2,0.1-0.4,0.3-0.6l5.1-8.1c0,0,0,0,0,0c0.1-0.2,0.3-0.3,0.4-0.3c0.1,0,0.1,0,0.1,0c0,0,0,0,0.1,0c0.1,0,0.4,0,0.6,0.3l5.1,8.1C59.2,36.6,59.2,37.5,58.7,38.3L58.7,38.3z' />
|
||||
<path d='M45.2,15.1c0.8-1.3,0.8-3.1,0-4.4l-5.1-8.1l-0.4-0.7C38.9,0.7,37.6,0,36.2,0H24.7c-1.4,0-2.7,0.7-3.5,1.9L0.6,34.9C0.2,35.5,0,36.3,0,37c0,0.8,0.2,1.5,0.6,2.2l5.5,8.8C6.9,49.3,8.2,50,9.7,50h11.5c1.4,0,2.8-0.7,3.5-1.9l0.4-0.7c0,0,0,0,0,0c0,0,0,0,0,0l4.1-6.6l12.1-19.4L45.2,15.1L45.2,15.1z M44,13c0,0.4-0.1,0.8-0.4,1.2L23.5,46.4c-0.2,0.3-0.5,0.3-0.6,0.3c-0.1,0-0.4,0-0.6-0.3l-5.1-8.2c-0.5-0.7-0.5-1.7,0-2.4L37.4,3.6c0.2-0.3,0.5-0.3,0.6-0.3c0.1,0,0.4,0,0.6,0.3l5.1,8.1C43.9,12.1,44,12.5,44,13z' />
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
export function AsanaIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg {...props} xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24' fill='none'>
|
||||
@@ -5436,6 +5496,34 @@ export function GoogleMapsIcon(props: SVGProps<SVGSVGElement>) {
|
||||
)
|
||||
}
|
||||
|
||||
export function GoogleTranslateIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg {...props} xmlns='http://www.w3.org/2000/svg' viewBox='0 0 998.1 998.3'>
|
||||
<path
|
||||
fill='#DBDBDB'
|
||||
d='M931.7 998.3c36.5 0 66.4-29.4 66.4-65.4V265.8c0-36-29.9-65.4-66.4-65.4H283.6l260.1 797.9h388z'
|
||||
/>
|
||||
<path
|
||||
fill='#DCDCDC'
|
||||
d='M931.7 230.4c9.7 0 18.9 3.8 25.8 10.6 6.8 6.7 10.6 15.5 10.6 24.8v667.1c0 9.3-3.7 18.1-10.6 24.8-6.9 6.8-16.1 10.6-25.8 10.6H565.5L324.9 230.4h606.8m0-30H283.6l260.1 797.9h388c36.5 0 66.4-29.4 66.4-65.4V265.8c0-36-29.9-65.4-66.4-65.4z'
|
||||
/>
|
||||
<polygon fill='#4352B8' points='482.3,809.8 543.7,998.3 714.4,809.8' />
|
||||
<path
|
||||
fill='#607988'
|
||||
d='M936.1 476.1V437H747.6v-63.2h-61.2V437H566.1v39.1h239.4c-12.8 45.1-41.1 87.7-68.7 120.8-48.9-57.9-49.1-76.7-49.1-76.7h-50.8s2.1 28.2 70.7 108.6c-22.3 22.8-39.2 36.3-39.2 36.3l15.6 48.8s23.6-20.3 53.1-51.6c29.6 32.1 67.8 70.7 117.2 116.7l32.1-32.1c-52.9-48-91.7-86.1-120.2-116.7 38.2-45.2 77-102.1 85.2-154.2H936v.1z'
|
||||
/>
|
||||
<path
|
||||
fill='#4285F4'
|
||||
d='M66.4 0C29.9 0 0 29.9 0 66.5v677c0 36.5 29.9 66.4 66.4 66.4h648.1L454.4 0h-388z'
|
||||
/>
|
||||
<path
|
||||
fill='#EEEEEE'
|
||||
d='M371.4 430.6c-2.5 30.3-28.4 75.2-91.1 75.2-54.3 0-98.3-44.9-98.3-100.2s44-100.2 98.3-100.2c30.9 0 51.5 13.4 63.3 24.3l41.2-39.6c-27.1-25-62.4-40.6-104.5-40.6-86.1 0-156 69.9-156 156s69.9 156 156 156c90.2 0 149.8-63.3 149.8-152.6 0-12.8-1.6-22.2-3.7-31.8h-146v53.4l91 .1z'
|
||||
/>
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
export function DsPyIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg {...props} xmlns='http://www.w3.org/2000/svg' viewBox='30 28 185 175' fill='none'>
|
||||
|
||||
@@ -158,7 +158,6 @@ export const DEFAULTS = {
|
||||
MAX_LOOP_ITERATIONS: 1000,
|
||||
MAX_FOREACH_ITEMS: 1000,
|
||||
MAX_PARALLEL_BRANCHES: 20,
|
||||
MAX_WORKFLOW_DEPTH: 10,
|
||||
MAX_SSE_CHILD_DEPTH: 3,
|
||||
EXECUTION_TIME: 0,
|
||||
TOKENS: {
|
||||
|
||||
@@ -80,7 +80,10 @@ export class BlockExecutor {
|
||||
const startTime = performance.now()
|
||||
let resolvedInputs: Record<string, any> = {}
|
||||
|
||||
const nodeMetadata = this.buildNodeMetadata(node)
|
||||
const nodeMetadata = {
|
||||
...this.buildNodeMetadata(node),
|
||||
executionOrder: blockLog?.executionOrder,
|
||||
}
|
||||
let cleanupSelfReference: (() => void) | undefined
|
||||
|
||||
if (block.metadata?.id === BlockType.HUMAN_IN_THE_LOOP) {
|
||||
|
||||
@@ -89,7 +89,8 @@ export interface ExecutionCallbacks {
|
||||
onChildWorkflowInstanceReady?: (
|
||||
blockId: string,
|
||||
childWorkflowInstanceId: string,
|
||||
iterationContext?: IterationContext
|
||||
iterationContext?: IterationContext,
|
||||
executionOrder?: number
|
||||
) => void
|
||||
}
|
||||
|
||||
@@ -155,7 +156,8 @@ export interface ContextExtensions {
|
||||
onChildWorkflowInstanceReady?: (
|
||||
blockId: string,
|
||||
childWorkflowInstanceId: string,
|
||||
iterationContext?: IterationContext
|
||||
iterationContext?: IterationContext,
|
||||
executionOrder?: number
|
||||
) => void
|
||||
|
||||
/**
|
||||
|
||||
@@ -123,7 +123,6 @@ describe('AgentBlockHandler', () => {
|
||||
let handler: AgentBlockHandler
|
||||
let mockBlock: SerializedBlock
|
||||
let mockContext: ExecutionContext
|
||||
let originalPromiseAll: any
|
||||
|
||||
beforeEach(() => {
|
||||
handler = new AgentBlockHandler()
|
||||
@@ -135,8 +134,6 @@ describe('AgentBlockHandler', () => {
|
||||
configurable: true,
|
||||
})
|
||||
|
||||
originalPromiseAll = Promise.all
|
||||
|
||||
mockBlock = {
|
||||
id: 'test-agent-block',
|
||||
metadata: { id: BlockType.AGENT, name: 'Test Agent' },
|
||||
@@ -209,8 +206,6 @@ describe('AgentBlockHandler', () => {
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
Promise.all = originalPromiseAll
|
||||
|
||||
try {
|
||||
Object.defineProperty(global, 'window', {
|
||||
value: undefined,
|
||||
@@ -271,38 +266,7 @@ describe('AgentBlockHandler', () => {
|
||||
expect(result).toEqual(expectedOutput)
|
||||
})
|
||||
|
||||
it('should preserve executeFunction for custom tools with different usageControl settings', async () => {
|
||||
let capturedTools: any[] = []
|
||||
|
||||
Promise.all = vi.fn().mockImplementation((promises: Promise<any>[]) => {
|
||||
const result = originalPromiseAll.call(Promise, promises)
|
||||
|
||||
result.then((tools: any[]) => {
|
||||
if (tools?.length) {
|
||||
capturedTools = tools.filter((t) => t !== null)
|
||||
}
|
||||
})
|
||||
|
||||
return result
|
||||
})
|
||||
|
||||
mockExecuteProviderRequest.mockResolvedValueOnce({
|
||||
content: 'Using tools to respond',
|
||||
model: 'mock-model',
|
||||
tokens: { input: 10, output: 20, total: 30 },
|
||||
toolCalls: [
|
||||
{
|
||||
name: 'auto_tool',
|
||||
arguments: { input: 'test input for auto tool' },
|
||||
},
|
||||
{
|
||||
name: 'force_tool',
|
||||
arguments: { input: 'test input for force tool' },
|
||||
},
|
||||
],
|
||||
timing: { total: 100 },
|
||||
})
|
||||
|
||||
it('should preserve usageControl for custom tools and filter out "none"', async () => {
|
||||
const inputs = {
|
||||
model: 'gpt-4o',
|
||||
userPrompt: 'Test custom tools with different usageControl settings',
|
||||
@@ -372,13 +336,14 @@ describe('AgentBlockHandler', () => {
|
||||
|
||||
await handler.execute(mockContext, mockBlock, inputs)
|
||||
|
||||
expect(Promise.all).toHaveBeenCalled()
|
||||
const providerCall = mockExecuteProviderRequest.mock.calls[0]
|
||||
const tools = providerCall[1].tools
|
||||
|
||||
expect(capturedTools.length).toBe(2)
|
||||
expect(tools.length).toBe(2)
|
||||
|
||||
const autoTool = capturedTools.find((t) => t.name === 'auto_tool')
|
||||
const forceTool = capturedTools.find((t) => t.name === 'force_tool')
|
||||
const noneTool = capturedTools.find((t) => t.name === 'none_tool')
|
||||
const autoTool = tools.find((t: any) => t.name === 'auto_tool')
|
||||
const forceTool = tools.find((t: any) => t.name === 'force_tool')
|
||||
const noneTool = tools.find((t: any) => t.name === 'none_tool')
|
||||
|
||||
expect(autoTool).toBeDefined()
|
||||
expect(forceTool).toBeDefined()
|
||||
@@ -386,37 +351,6 @@ describe('AgentBlockHandler', () => {
|
||||
|
||||
expect(autoTool.usageControl).toBe('auto')
|
||||
expect(forceTool.usageControl).toBe('force')
|
||||
|
||||
expect(typeof autoTool.executeFunction).toBe('function')
|
||||
expect(typeof forceTool.executeFunction).toBe('function')
|
||||
|
||||
await autoTool.executeFunction({ input: 'test input' })
|
||||
expect(mockExecuteTool).toHaveBeenCalledWith(
|
||||
'function_execute',
|
||||
expect.objectContaining({
|
||||
code: 'return { result: "auto tool executed", input }',
|
||||
input: 'test input',
|
||||
}),
|
||||
false, // skipPostProcess
|
||||
expect.any(Object) // execution context
|
||||
)
|
||||
|
||||
await forceTool.executeFunction({ input: 'another test' })
|
||||
expect(mockExecuteTool).toHaveBeenNthCalledWith(
|
||||
2, // Check the 2nd call
|
||||
'function_execute',
|
||||
expect.objectContaining({
|
||||
code: 'return { result: "force tool executed", input }',
|
||||
input: 'another test',
|
||||
}),
|
||||
false, // skipPostProcess
|
||||
expect.any(Object) // execution context
|
||||
)
|
||||
|
||||
const providerCall = mockExecuteProviderRequest.mock.calls[0]
|
||||
const requestBody = providerCall[1]
|
||||
|
||||
expect(requestBody.tools.length).toBe(2)
|
||||
})
|
||||
|
||||
it('should filter out tools with usageControl set to "none"', async () => {
|
||||
@@ -1763,6 +1697,52 @@ describe('AgentBlockHandler', () => {
|
||||
expect(providerCallArgs[1].tools[0].name).toBe('search_files')
|
||||
})
|
||||
|
||||
it('should pass callChain to executeProviderRequest for MCP cycle detection', async () => {
|
||||
mockFetch.mockImplementation(() =>
|
||||
Promise.resolve({ ok: true, json: () => Promise.resolve({}) })
|
||||
)
|
||||
|
||||
const inputs = {
|
||||
model: 'gpt-4o',
|
||||
userPrompt: 'Search for files',
|
||||
apiKey: 'test-api-key',
|
||||
tools: [
|
||||
{
|
||||
type: 'mcp',
|
||||
title: 'search_files',
|
||||
schema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
query: { type: 'string', description: 'Search query' },
|
||||
},
|
||||
required: ['query'],
|
||||
},
|
||||
params: {
|
||||
serverId: 'mcp-search-server',
|
||||
toolName: 'search_files',
|
||||
serverName: 'search',
|
||||
},
|
||||
usageControl: 'auto' as const,
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
const contextWithCallChain = {
|
||||
...mockContext,
|
||||
workspaceId: 'test-workspace-123',
|
||||
workflowId: 'test-workflow-456',
|
||||
callChain: ['wf-parent', 'test-workflow-456'],
|
||||
}
|
||||
|
||||
mockGetProviderFromModel.mockReturnValue('openai')
|
||||
|
||||
await handler.execute(contextWithCallChain, mockBlock, inputs)
|
||||
|
||||
expect(mockExecuteProviderRequest).toHaveBeenCalled()
|
||||
const providerCallArgs = mockExecuteProviderRequest.mock.calls[0][1]
|
||||
expect(providerCallArgs.callChain).toEqual(['wf-parent', 'test-workflow-456'])
|
||||
})
|
||||
|
||||
it('should handle multiple MCP tools from the same server efficiently', async () => {
|
||||
const fetchCalls: any[] = []
|
||||
|
||||
@@ -2139,21 +2119,10 @@ describe('AgentBlockHandler', () => {
|
||||
expect(tools.length).toBe(0)
|
||||
})
|
||||
|
||||
it('should use DB code for executeFunction when customToolId resolves', async () => {
|
||||
it('should use DB schema when customToolId resolves', async () => {
|
||||
const toolId = 'custom-tool-123'
|
||||
mockFetchForCustomTool(toolId)
|
||||
|
||||
let capturedTools: any[] = []
|
||||
Promise.all = vi.fn().mockImplementation((promises: Promise<any>[]) => {
|
||||
const result = originalPromiseAll.call(Promise, promises)
|
||||
result.then((tools: any[]) => {
|
||||
if (tools?.length) {
|
||||
capturedTools = tools.filter((t) => t !== null)
|
||||
}
|
||||
})
|
||||
return result
|
||||
})
|
||||
|
||||
const inputs = {
|
||||
model: 'gpt-4o',
|
||||
userPrompt: 'Format a report',
|
||||
@@ -2174,19 +2143,12 @@ describe('AgentBlockHandler', () => {
|
||||
|
||||
await handler.execute(mockContext, mockBlock, inputs)
|
||||
|
||||
expect(capturedTools.length).toBe(1)
|
||||
expect(typeof capturedTools[0].executeFunction).toBe('function')
|
||||
expect(mockExecuteProviderRequest).toHaveBeenCalled()
|
||||
const providerCall = mockExecuteProviderRequest.mock.calls[0]
|
||||
const tools = providerCall[1].tools
|
||||
|
||||
await capturedTools[0].executeFunction({ title: 'Q1', format: 'pdf' })
|
||||
|
||||
expect(mockExecuteTool).toHaveBeenCalledWith(
|
||||
'function_execute',
|
||||
expect.objectContaining({
|
||||
code: dbCode,
|
||||
}),
|
||||
false,
|
||||
expect.any(Object)
|
||||
)
|
||||
expect(tools.length).toBe(1)
|
||||
expect(tools[0].name).toBe('formatReport')
|
||||
})
|
||||
|
||||
it('should not fetch from DB when no customToolId is present', async () => {
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
import { db } from '@sim/db'
|
||||
import { account, mcpServers } from '@sim/db/schema'
|
||||
import { mcpServers } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq, inArray, isNull } from 'drizzle-orm'
|
||||
import { createMcpToolId } from '@/lib/mcp/utils'
|
||||
import { refreshTokenIfNeeded, resolveOAuthAccountId } from '@/app/api/auth/oauth/utils'
|
||||
import { getAllBlocks } from '@/blocks'
|
||||
import type { BlockOutput } from '@/blocks/types'
|
||||
import {
|
||||
@@ -30,10 +29,10 @@ import type { BlockHandler, ExecutionContext, StreamingExecution } from '@/execu
|
||||
import { collectBlockData } from '@/executor/utils/block-data'
|
||||
import { buildAPIUrl, buildAuthHeaders } from '@/executor/utils/http'
|
||||
import { stringifyJSON } from '@/executor/utils/json'
|
||||
import { resolveVertexCredential } from '@/executor/utils/vertex-credential'
|
||||
import { executeProviderRequest } from '@/providers'
|
||||
import { getProviderFromModel, transformBlockTool } from '@/providers/utils'
|
||||
import type { SerializedBlock } from '@/serializer/types'
|
||||
import { executeTool } from '@/tools'
|
||||
import { getTool, getToolAsync } from '@/tools/utils'
|
||||
|
||||
const logger = createLogger('AgentBlockHandler')
|
||||
@@ -276,14 +275,12 @@ export class AgentBlockHandler implements BlockHandler {
|
||||
const userProvidedParams = tool.params || {}
|
||||
|
||||
let schema = tool.schema
|
||||
let code = tool.code
|
||||
let title = tool.title
|
||||
|
||||
if (tool.customToolId) {
|
||||
const resolved = await this.fetchCustomToolById(ctx, tool.customToolId)
|
||||
if (resolved) {
|
||||
schema = resolved.schema
|
||||
code = resolved.code
|
||||
title = resolved.title
|
||||
} else if (!schema) {
|
||||
logger.error(`Custom tool not found: ${tool.customToolId}`)
|
||||
@@ -296,7 +293,7 @@ export class AgentBlockHandler implements BlockHandler {
|
||||
return null
|
||||
}
|
||||
|
||||
const { filterSchemaForLLM, mergeToolParameters } = await import('@/tools/params')
|
||||
const { filterSchemaForLLM } = await import('@/tools/params')
|
||||
|
||||
const filteredSchema = filterSchemaForLLM(schema.function.parameters, userProvidedParams)
|
||||
|
||||
@@ -313,43 +310,6 @@ export class AgentBlockHandler implements BlockHandler {
|
||||
usageControl: tool.usageControl || 'auto',
|
||||
}
|
||||
|
||||
if (code) {
|
||||
base.executeFunction = async (callParams: Record<string, any>) => {
|
||||
const mergedParams = mergeToolParameters(userProvidedParams, callParams)
|
||||
|
||||
const { blockData, blockNameMapping, blockOutputSchemas } = collectBlockData(ctx)
|
||||
|
||||
const result = await executeTool(
|
||||
'function_execute',
|
||||
{
|
||||
code,
|
||||
...mergedParams,
|
||||
timeout: tool.timeout ?? AGENT.DEFAULT_FUNCTION_TIMEOUT,
|
||||
envVars: ctx.environmentVariables || {},
|
||||
workflowVariables: ctx.workflowVariables || {},
|
||||
blockData,
|
||||
blockNameMapping,
|
||||
blockOutputSchemas,
|
||||
isCustomTool: true,
|
||||
_context: {
|
||||
workflowId: ctx.workflowId,
|
||||
workspaceId: ctx.workspaceId,
|
||||
userId: ctx.userId,
|
||||
isDeployedContext: ctx.isDeployedContext,
|
||||
enforceCredentialAccess: ctx.enforceCredentialAccess,
|
||||
},
|
||||
},
|
||||
false,
|
||||
ctx
|
||||
)
|
||||
|
||||
if (!result.success) {
|
||||
throw new Error(result.error || 'Function execution failed')
|
||||
}
|
||||
return result.output
|
||||
}
|
||||
}
|
||||
|
||||
return base
|
||||
}
|
||||
|
||||
@@ -359,7 +319,7 @@ export class AgentBlockHandler implements BlockHandler {
|
||||
private async fetchCustomToolById(
|
||||
ctx: ExecutionContext,
|
||||
customToolId: string
|
||||
): Promise<{ schema: any; code: string; title: string } | null> {
|
||||
): Promise<{ schema: any; title: string } | null> {
|
||||
if (typeof window !== 'undefined') {
|
||||
try {
|
||||
const { getCustomTool } = await import('@/hooks/queries/custom-tools')
|
||||
@@ -367,7 +327,6 @@ export class AgentBlockHandler implements BlockHandler {
|
||||
if (tool) {
|
||||
return {
|
||||
schema: tool.schema,
|
||||
code: tool.code || '',
|
||||
title: tool.title,
|
||||
}
|
||||
}
|
||||
@@ -416,7 +375,6 @@ export class AgentBlockHandler implements BlockHandler {
|
||||
|
||||
return {
|
||||
schema: tool.schema,
|
||||
code: tool.code || '',
|
||||
title: tool.title,
|
||||
}
|
||||
} catch (error) {
|
||||
@@ -481,65 +439,15 @@ export class AgentBlockHandler implements BlockHandler {
|
||||
tool: ToolInput
|
||||
): Promise<any> {
|
||||
const { serverId, toolName, serverName, ...userProvidedParams } = tool.params || {}
|
||||
|
||||
const { filterSchemaForLLM } = await import('@/tools/params')
|
||||
const filteredSchema = filterSchemaForLLM(
|
||||
tool.schema || { type: 'object', properties: {} },
|
||||
userProvidedParams
|
||||
)
|
||||
|
||||
const toolId = createMcpToolId(serverId, toolName)
|
||||
|
||||
return {
|
||||
id: toolId,
|
||||
name: toolName,
|
||||
return this.buildMcpTool({
|
||||
serverId,
|
||||
toolName,
|
||||
description:
|
||||
tool.schema?.description || `MCP tool ${toolName} from ${serverName || serverId}`,
|
||||
parameters: filteredSchema,
|
||||
params: userProvidedParams,
|
||||
usageControl: tool.usageControl || 'auto',
|
||||
executeFunction: async (callParams: Record<string, any>) => {
|
||||
const headers = await buildAuthHeaders()
|
||||
const execParams: Record<string, string> = {}
|
||||
if (ctx.userId) execParams.userId = ctx.userId
|
||||
const execUrl = buildAPIUrl('/api/mcp/tools/execute', execParams)
|
||||
|
||||
const execResponse = await fetch(execUrl.toString(), {
|
||||
method: 'POST',
|
||||
headers,
|
||||
body: stringifyJSON({
|
||||
serverId,
|
||||
toolName,
|
||||
arguments: callParams,
|
||||
workspaceId: ctx.workspaceId,
|
||||
workflowId: ctx.workflowId,
|
||||
toolSchema: tool.schema,
|
||||
}),
|
||||
})
|
||||
|
||||
if (!execResponse.ok) {
|
||||
throw new Error(
|
||||
`MCP tool execution failed: ${execResponse.status} ${execResponse.statusText}`
|
||||
)
|
||||
}
|
||||
|
||||
const result = await execResponse.json()
|
||||
if (!result.success) {
|
||||
throw new Error(result.error || 'MCP tool execution failed')
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: result.data.output || {},
|
||||
metadata: {
|
||||
source: 'mcp',
|
||||
serverId,
|
||||
serverName: serverName || serverId,
|
||||
toolName,
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
schema: tool.schema || { type: 'object', properties: {} },
|
||||
userProvidedParams,
|
||||
usageControl: tool.usageControl,
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -668,63 +576,35 @@ export class AgentBlockHandler implements BlockHandler {
|
||||
serverId: string
|
||||
): Promise<any> {
|
||||
const { toolName, ...userProvidedParams } = tool.params || {}
|
||||
return this.buildMcpTool({
|
||||
serverId,
|
||||
toolName,
|
||||
description: mcpTool.description || `MCP tool ${toolName} from ${mcpTool.serverName}`,
|
||||
schema: mcpTool.inputSchema || { type: 'object', properties: {} },
|
||||
userProvidedParams,
|
||||
usageControl: tool.usageControl,
|
||||
})
|
||||
}
|
||||
|
||||
private async buildMcpTool(config: {
|
||||
serverId: string
|
||||
toolName: string
|
||||
description: string
|
||||
schema: any
|
||||
userProvidedParams: Record<string, any>
|
||||
usageControl?: string
|
||||
}): Promise<any> {
|
||||
const { filterSchemaForLLM } = await import('@/tools/params')
|
||||
const filteredSchema = filterSchemaForLLM(
|
||||
mcpTool.inputSchema || { type: 'object', properties: {} },
|
||||
userProvidedParams
|
||||
)
|
||||
|
||||
const toolId = createMcpToolId(serverId, toolName)
|
||||
const filteredSchema = filterSchemaForLLM(config.schema, config.userProvidedParams)
|
||||
const toolId = createMcpToolId(config.serverId, config.toolName)
|
||||
|
||||
return {
|
||||
id: toolId,
|
||||
name: toolName,
|
||||
description: mcpTool.description || `MCP tool ${toolName} from ${mcpTool.serverName}`,
|
||||
name: config.toolName,
|
||||
description: config.description,
|
||||
parameters: filteredSchema,
|
||||
params: userProvidedParams,
|
||||
usageControl: tool.usageControl || 'auto',
|
||||
executeFunction: async (callParams: Record<string, any>) => {
|
||||
const headers = await buildAuthHeaders()
|
||||
const discoverExecParams: Record<string, string> = {}
|
||||
if (ctx.userId) discoverExecParams.userId = ctx.userId
|
||||
const execUrl = buildAPIUrl('/api/mcp/tools/execute', discoverExecParams)
|
||||
|
||||
const execResponse = await fetch(execUrl.toString(), {
|
||||
method: 'POST',
|
||||
headers,
|
||||
body: stringifyJSON({
|
||||
serverId,
|
||||
toolName,
|
||||
arguments: callParams,
|
||||
workspaceId: ctx.workspaceId,
|
||||
workflowId: ctx.workflowId,
|
||||
toolSchema: mcpTool.inputSchema,
|
||||
}),
|
||||
})
|
||||
|
||||
if (!execResponse.ok) {
|
||||
throw new Error(
|
||||
`MCP tool execution failed: ${execResponse.status} ${execResponse.statusText}`
|
||||
)
|
||||
}
|
||||
|
||||
const result = await execResponse.json()
|
||||
if (!result.success) {
|
||||
throw new Error(result.error || 'MCP tool execution failed')
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: result.data.output || {},
|
||||
metadata: {
|
||||
source: 'mcp',
|
||||
serverId,
|
||||
serverName: mcpTool.serverName,
|
||||
toolName,
|
||||
},
|
||||
}
|
||||
},
|
||||
params: config.userProvidedParams,
|
||||
usageControl: config.usageControl || 'auto',
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1048,9 +928,9 @@ export class AgentBlockHandler implements BlockHandler {
|
||||
let finalApiKey: string | undefined = providerRequest.apiKey
|
||||
|
||||
if (providerId === 'vertex' && providerRequest.vertexCredential) {
|
||||
finalApiKey = await this.resolveVertexCredential(
|
||||
finalApiKey = await resolveVertexCredential(
|
||||
providerRequest.vertexCredential,
|
||||
ctx.workflowId
|
||||
'vertex-agent'
|
||||
)
|
||||
}
|
||||
|
||||
@@ -1082,10 +962,12 @@ export class AgentBlockHandler implements BlockHandler {
|
||||
blockData,
|
||||
blockNameMapping,
|
||||
isDeployedContext: ctx.isDeployedContext,
|
||||
callChain: ctx.callChain,
|
||||
reasoningEffort: providerRequest.reasoningEffort,
|
||||
verbosity: providerRequest.verbosity,
|
||||
thinkingLevel: providerRequest.thinkingLevel,
|
||||
previousInteractionId: providerRequest.previousInteractionId,
|
||||
abortSignal: ctx.abortSignal,
|
||||
})
|
||||
|
||||
return this.processProviderResponse(response, block, responseFormat)
|
||||
@@ -1095,37 +977,6 @@ export class AgentBlockHandler implements BlockHandler {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves a Vertex AI OAuth credential to an access token
|
||||
*/
|
||||
private async resolveVertexCredential(credentialId: string, workflowId: string): Promise<string> {
|
||||
const requestId = `vertex-${Date.now()}`
|
||||
|
||||
logger.info(`[${requestId}] Resolving Vertex AI credential: ${credentialId}`)
|
||||
|
||||
const resolved = await resolveOAuthAccountId(credentialId)
|
||||
if (!resolved) {
|
||||
throw new Error(`Vertex AI credential is not a valid OAuth credential: ${credentialId}`)
|
||||
}
|
||||
|
||||
const credential = await db.query.account.findFirst({
|
||||
where: eq(account.id, resolved.accountId),
|
||||
})
|
||||
|
||||
if (!credential) {
|
||||
throw new Error(`Vertex AI credential not found: ${credentialId}`)
|
||||
}
|
||||
|
||||
const { accessToken } = await refreshTokenIfNeeded(requestId, credential, resolved.accountId)
|
||||
|
||||
if (!accessToken) {
|
||||
throw new Error('Failed to get Vertex AI access token')
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Successfully resolved Vertex AI credential`)
|
||||
return accessToken
|
||||
}
|
||||
|
||||
private handleExecutionError(
|
||||
error: any,
|
||||
startTime: number,
|
||||
@@ -1309,7 +1160,7 @@ export class AgentBlockHandler implements BlockHandler {
|
||||
},
|
||||
toolCalls: {
|
||||
list: result.toolCalls?.map(this.formatToolCall.bind(this)) || [],
|
||||
count: result.toolCalls?.length || DEFAULTS.EXECUTION_TIME,
|
||||
count: result.toolCalls?.length ?? 0,
|
||||
},
|
||||
providerTiming: result.timing,
|
||||
cost: result.cost,
|
||||
|
||||
@@ -1,14 +1,11 @@
|
||||
import { db } from '@sim/db'
|
||||
import { account } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { refreshTokenIfNeeded, resolveOAuthAccountId } from '@/app/api/auth/oauth/utils'
|
||||
import type { BlockOutput } from '@/blocks/types'
|
||||
import { validateModelProvider } from '@/ee/access-control/utils/permission-check'
|
||||
import { BlockType, DEFAULTS, EVALUATOR } from '@/executor/constants'
|
||||
import type { BlockHandler, ExecutionContext } from '@/executor/types'
|
||||
import { buildAPIUrl, buildAuthHeaders, extractAPIErrorMessage } from '@/executor/utils/http'
|
||||
import { isJSONString, parseJSON, stringifyJSON } from '@/executor/utils/json'
|
||||
import { resolveVertexCredential } from '@/executor/utils/vertex-credential'
|
||||
import { calculateCost, getProviderFromModel } from '@/providers/utils'
|
||||
import type { SerializedBlock } from '@/serializer/types'
|
||||
|
||||
@@ -44,7 +41,10 @@ export class EvaluatorBlockHandler implements BlockHandler {
|
||||
|
||||
let finalApiKey: string | undefined = evaluatorConfig.apiKey
|
||||
if (providerId === 'vertex' && evaluatorConfig.vertexCredential) {
|
||||
finalApiKey = await this.resolveVertexCredential(evaluatorConfig.vertexCredential)
|
||||
finalApiKey = await resolveVertexCredential(
|
||||
evaluatorConfig.vertexCredential,
|
||||
'vertex-evaluator'
|
||||
)
|
||||
}
|
||||
|
||||
const processedContent = this.processContent(inputs.content)
|
||||
@@ -234,7 +234,7 @@ export class EvaluatorBlockHandler implements BlockHandler {
|
||||
if (Object.keys(parsedContent).length === 0) {
|
||||
validMetrics.forEach((metric: any) => {
|
||||
if (metric?.name) {
|
||||
metricScores[metric.name.toLowerCase()] = DEFAULTS.EXECUTION_TIME
|
||||
metricScores[metric.name.toLowerCase()] = 0
|
||||
}
|
||||
})
|
||||
return metricScores
|
||||
@@ -273,37 +273,6 @@ export class EvaluatorBlockHandler implements BlockHandler {
|
||||
}
|
||||
|
||||
logger.warn(`Metric "${metricName}" not found in LLM response`)
|
||||
return DEFAULTS.EXECUTION_TIME
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves a Vertex AI OAuth credential to an access token
|
||||
*/
|
||||
private async resolveVertexCredential(credentialId: string): Promise<string> {
|
||||
const requestId = `vertex-evaluator-${Date.now()}`
|
||||
|
||||
logger.info(`[${requestId}] Resolving Vertex AI credential: ${credentialId}`)
|
||||
|
||||
const resolved = await resolveOAuthAccountId(credentialId)
|
||||
if (!resolved) {
|
||||
throw new Error(`Vertex AI credential is not a valid OAuth credential: ${credentialId}`)
|
||||
}
|
||||
|
||||
const credential = await db.query.account.findFirst({
|
||||
where: eq(account.id, resolved.accountId),
|
||||
})
|
||||
|
||||
if (!credential) {
|
||||
throw new Error(`Vertex AI credential not found: ${credentialId}`)
|
||||
}
|
||||
|
||||
const { accessToken } = await refreshTokenIfNeeded(requestId, credential, resolved.accountId)
|
||||
|
||||
if (!accessToken) {
|
||||
throw new Error('Failed to get Vertex AI access token')
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Successfully resolved Vertex AI credential`)
|
||||
return accessToken
|
||||
return 0
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,7 +9,6 @@ import {
|
||||
HTTP,
|
||||
normalizeName,
|
||||
PAUSE_RESUME,
|
||||
REFERENCE,
|
||||
} from '@/executor/constants'
|
||||
import {
|
||||
generatePauseContextId,
|
||||
@@ -17,6 +16,7 @@ import {
|
||||
} from '@/executor/human-in-the-loop/utils'
|
||||
import type { BlockHandler, ExecutionContext, PauseMetadata } from '@/executor/types'
|
||||
import { collectBlockData } from '@/executor/utils/block-data'
|
||||
import { convertBuilderDataToJson, convertPropertyValue } from '@/executor/utils/builder-data'
|
||||
import { parseObjectStrings } from '@/executor/utils/json'
|
||||
import type { SerializedBlock } from '@/serializer/types'
|
||||
import { executeTool } from '@/tools'
|
||||
@@ -265,7 +265,7 @@ export class HumanInTheLoopBlockHandler implements BlockHandler {
|
||||
}
|
||||
|
||||
if (dataMode === 'structured' && inputs.builderData) {
|
||||
const convertedData = this.convertBuilderDataToJson(inputs.builderData)
|
||||
const convertedData = convertBuilderDataToJson(inputs.builderData)
|
||||
return parseObjectStrings(convertedData)
|
||||
}
|
||||
|
||||
@@ -296,7 +296,7 @@ export class HumanInTheLoopBlockHandler implements BlockHandler {
|
||||
}
|
||||
}
|
||||
|
||||
const value = this.convertPropertyValue(prop)
|
||||
const value = convertPropertyValue(prop)
|
||||
|
||||
entries.push({
|
||||
name: path,
|
||||
@@ -352,140 +352,6 @@ export class HumanInTheLoopBlockHandler implements BlockHandler {
|
||||
.filter((field): field is NormalizedInputField => field !== null)
|
||||
}
|
||||
|
||||
private convertBuilderDataToJson(builderData: JSONProperty[]): any {
|
||||
if (!Array.isArray(builderData)) {
|
||||
return {}
|
||||
}
|
||||
|
||||
const result: any = {}
|
||||
|
||||
for (const prop of builderData) {
|
||||
if (!prop.name || !prop.name.trim()) {
|
||||
continue
|
||||
}
|
||||
|
||||
const value = this.convertPropertyValue(prop)
|
||||
result[prop.name] = value
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
static convertBuilderDataToJsonString(builderData: JSONProperty[]): string {
|
||||
if (!Array.isArray(builderData) || builderData.length === 0) {
|
||||
return '{\n \n}'
|
||||
}
|
||||
|
||||
const result: any = {}
|
||||
|
||||
for (const prop of builderData) {
|
||||
if (!prop.name || !prop.name.trim()) {
|
||||
continue
|
||||
}
|
||||
|
||||
result[prop.name] = prop.value
|
||||
}
|
||||
|
||||
let jsonString = JSON.stringify(result, null, 2)
|
||||
|
||||
jsonString = jsonString.replace(/"(<[^>]+>)"/g, '$1')
|
||||
|
||||
return jsonString
|
||||
}
|
||||
|
||||
private convertPropertyValue(prop: JSONProperty): any {
|
||||
switch (prop.type) {
|
||||
case 'object':
|
||||
return this.convertObjectValue(prop.value)
|
||||
case 'array':
|
||||
return this.convertArrayValue(prop.value)
|
||||
case 'number':
|
||||
return this.convertNumberValue(prop.value)
|
||||
case 'boolean':
|
||||
return this.convertBooleanValue(prop.value)
|
||||
case 'files':
|
||||
return prop.value
|
||||
default:
|
||||
return prop.value
|
||||
}
|
||||
}
|
||||
|
||||
private convertObjectValue(value: any): any {
|
||||
if (Array.isArray(value)) {
|
||||
return this.convertBuilderDataToJson(value)
|
||||
}
|
||||
|
||||
if (typeof value === 'string' && !this.isVariableReference(value)) {
|
||||
return this.tryParseJson(value, value)
|
||||
}
|
||||
|
||||
return value
|
||||
}
|
||||
|
||||
private convertArrayValue(value: any): any {
|
||||
if (Array.isArray(value)) {
|
||||
return value.map((item: any) => this.convertArrayItem(item))
|
||||
}
|
||||
|
||||
if (typeof value === 'string' && !this.isVariableReference(value)) {
|
||||
const parsed = this.tryParseJson(value, value)
|
||||
return Array.isArray(parsed) ? parsed : value
|
||||
}
|
||||
|
||||
return value
|
||||
}
|
||||
|
||||
private convertArrayItem(item: any): any {
|
||||
if (typeof item !== 'object' || !item.type) {
|
||||
return item
|
||||
}
|
||||
|
||||
if (item.type === 'object' && Array.isArray(item.value)) {
|
||||
return this.convertBuilderDataToJson(item.value)
|
||||
}
|
||||
|
||||
if (item.type === 'array' && Array.isArray(item.value)) {
|
||||
return item.value.map((subItem: any) =>
|
||||
typeof subItem === 'object' && subItem.type ? subItem.value : subItem
|
||||
)
|
||||
}
|
||||
|
||||
return item.value
|
||||
}
|
||||
|
||||
private convertNumberValue(value: any): any {
|
||||
if (this.isVariableReference(value)) {
|
||||
return value
|
||||
}
|
||||
|
||||
const numValue = Number(value)
|
||||
return Number.isNaN(numValue) ? value : numValue
|
||||
}
|
||||
|
||||
private convertBooleanValue(value: any): any {
|
||||
if (this.isVariableReference(value)) {
|
||||
return value
|
||||
}
|
||||
|
||||
return value === 'true' || value === true
|
||||
}
|
||||
|
||||
private tryParseJson(jsonString: string, fallback: any): any {
|
||||
try {
|
||||
return JSON.parse(jsonString)
|
||||
} catch {
|
||||
return fallback
|
||||
}
|
||||
}
|
||||
|
||||
private isVariableReference(value: any): boolean {
|
||||
return (
|
||||
typeof value === 'string' &&
|
||||
value.trim().startsWith(REFERENCE.START) &&
|
||||
value.trim().includes(REFERENCE.END)
|
||||
)
|
||||
}
|
||||
|
||||
private parseStatus(status?: string): number {
|
||||
if (!status) return HTTP.STATUS.OK
|
||||
const parsed = Number(status)
|
||||
|
||||
@@ -1,19 +1,15 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { BlockType, HTTP, REFERENCE } from '@/executor/constants'
|
||||
import { BlockType, HTTP } from '@/executor/constants'
|
||||
import type { BlockHandler, ExecutionContext, NormalizedBlockOutput } from '@/executor/types'
|
||||
import {
|
||||
convertBuilderDataToJson,
|
||||
convertBuilderDataToJsonString,
|
||||
} from '@/executor/utils/builder-data'
|
||||
import { parseObjectStrings } from '@/executor/utils/json'
|
||||
import type { SerializedBlock } from '@/serializer/types'
|
||||
|
||||
const logger = createLogger('ResponseBlockHandler')
|
||||
|
||||
interface JSONProperty {
|
||||
id: string
|
||||
name: string
|
||||
type: 'string' | 'number' | 'boolean' | 'object' | 'array' | 'files'
|
||||
value: any
|
||||
collapsed?: boolean
|
||||
}
|
||||
|
||||
export class ResponseBlockHandler implements BlockHandler {
|
||||
canHandle(block: SerializedBlock): boolean {
|
||||
return block.metadata?.id === BlockType.RESPONSE
|
||||
@@ -73,154 +69,15 @@ export class ResponseBlockHandler implements BlockHandler {
|
||||
}
|
||||
|
||||
if (dataMode === 'structured' && inputs.builderData) {
|
||||
const convertedData = this.convertBuilderDataToJson(inputs.builderData)
|
||||
const convertedData = convertBuilderDataToJson(inputs.builderData)
|
||||
return parseObjectStrings(convertedData)
|
||||
}
|
||||
|
||||
return inputs.data || {}
|
||||
}
|
||||
|
||||
private convertBuilderDataToJson(builderData: JSONProperty[]): any {
|
||||
if (!Array.isArray(builderData)) {
|
||||
return {}
|
||||
}
|
||||
|
||||
const result: any = {}
|
||||
|
||||
for (const prop of builderData) {
|
||||
if (!prop.name || !prop.name.trim()) {
|
||||
continue
|
||||
}
|
||||
|
||||
const value = this.convertPropertyValue(prop)
|
||||
result[prop.name] = value
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
static convertBuilderDataToJsonString(builderData: JSONProperty[]): string {
|
||||
if (!Array.isArray(builderData) || builderData.length === 0) {
|
||||
return '{\n \n}'
|
||||
}
|
||||
|
||||
const result: any = {}
|
||||
|
||||
for (const prop of builderData) {
|
||||
if (!prop.name || !prop.name.trim()) {
|
||||
continue
|
||||
}
|
||||
|
||||
result[prop.name] = prop.value
|
||||
}
|
||||
|
||||
let jsonString = JSON.stringify(result, null, 2)
|
||||
|
||||
jsonString = jsonString.replace(/"(<[^>]+>)"/g, '$1')
|
||||
|
||||
return jsonString
|
||||
}
|
||||
|
||||
private convertPropertyValue(prop: JSONProperty): any {
|
||||
switch (prop.type) {
|
||||
case 'object':
|
||||
return this.convertObjectValue(prop.value)
|
||||
case 'array':
|
||||
return this.convertArrayValue(prop.value)
|
||||
case 'number':
|
||||
return this.convertNumberValue(prop.value)
|
||||
case 'boolean':
|
||||
return this.convertBooleanValue(prop.value)
|
||||
case 'files':
|
||||
return prop.value
|
||||
default:
|
||||
return prop.value
|
||||
}
|
||||
}
|
||||
|
||||
private convertObjectValue(value: any): any {
|
||||
if (Array.isArray(value)) {
|
||||
return this.convertBuilderDataToJson(value)
|
||||
}
|
||||
|
||||
if (typeof value === 'string' && !this.isVariableReference(value)) {
|
||||
return this.tryParseJson(value, value)
|
||||
}
|
||||
|
||||
return value
|
||||
}
|
||||
|
||||
private convertArrayValue(value: any): any {
|
||||
if (Array.isArray(value)) {
|
||||
return value.map((item: any) => this.convertArrayItem(item))
|
||||
}
|
||||
|
||||
if (typeof value === 'string' && !this.isVariableReference(value)) {
|
||||
const parsed = this.tryParseJson(value, value)
|
||||
if (Array.isArray(parsed)) {
|
||||
return parsed
|
||||
}
|
||||
return value
|
||||
}
|
||||
|
||||
return value
|
||||
}
|
||||
|
||||
private convertArrayItem(item: any): any {
|
||||
if (typeof item !== 'object' || !item.type) {
|
||||
return item
|
||||
}
|
||||
|
||||
if (item.type === 'object' && Array.isArray(item.value)) {
|
||||
return this.convertBuilderDataToJson(item.value)
|
||||
}
|
||||
|
||||
if (item.type === 'array' && Array.isArray(item.value)) {
|
||||
return item.value.map((subItem: any) => {
|
||||
if (typeof subItem === 'object' && subItem.type) {
|
||||
return subItem.value
|
||||
}
|
||||
return subItem
|
||||
})
|
||||
}
|
||||
|
||||
return item.value
|
||||
}
|
||||
|
||||
private convertNumberValue(value: any): any {
|
||||
if (this.isVariableReference(value)) {
|
||||
return value
|
||||
}
|
||||
|
||||
const numValue = Number(value)
|
||||
if (Number.isNaN(numValue)) {
|
||||
return value
|
||||
}
|
||||
return numValue
|
||||
}
|
||||
|
||||
private convertBooleanValue(value: any): any {
|
||||
if (this.isVariableReference(value)) {
|
||||
return value
|
||||
}
|
||||
|
||||
return value === 'true' || value === true
|
||||
}
|
||||
|
||||
private tryParseJson(jsonString: string, fallback: any): any {
|
||||
try {
|
||||
return JSON.parse(jsonString)
|
||||
} catch {
|
||||
return fallback
|
||||
}
|
||||
}
|
||||
|
||||
private isVariableReference(value: any): boolean {
|
||||
return (
|
||||
typeof value === 'string' &&
|
||||
value.trim().startsWith(REFERENCE.START) &&
|
||||
value.trim().includes(REFERENCE.END)
|
||||
)
|
||||
static convertBuilderDataToJsonString(builderData: any[]): string {
|
||||
return convertBuilderDataToJsonString(builderData)
|
||||
}
|
||||
|
||||
private parseStatus(status?: string): number {
|
||||
|
||||
@@ -1,9 +1,5 @@
|
||||
import { db } from '@sim/db'
|
||||
import { account } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { getInternalApiBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { refreshTokenIfNeeded, resolveOAuthAccountId } from '@/app/api/auth/oauth/utils'
|
||||
import { generateRouterPrompt, generateRouterV2Prompt } from '@/blocks/blocks/router'
|
||||
import type { BlockOutput } from '@/blocks/types'
|
||||
import { validateModelProvider } from '@/ee/access-control/utils/permission-check'
|
||||
@@ -16,6 +12,7 @@ import {
|
||||
} from '@/executor/constants'
|
||||
import type { BlockHandler, ExecutionContext } from '@/executor/types'
|
||||
import { buildAuthHeaders } from '@/executor/utils/http'
|
||||
import { resolveVertexCredential } from '@/executor/utils/vertex-credential'
|
||||
import { calculateCost, getProviderFromModel } from '@/providers/utils'
|
||||
import type { SerializedBlock } from '@/serializer/types'
|
||||
|
||||
@@ -87,7 +84,7 @@ export class RouterBlockHandler implements BlockHandler {
|
||||
|
||||
let finalApiKey: string | undefined = routerConfig.apiKey
|
||||
if (providerId === 'vertex' && routerConfig.vertexCredential) {
|
||||
finalApiKey = await this.resolveVertexCredential(routerConfig.vertexCredential)
|
||||
finalApiKey = await resolveVertexCredential(routerConfig.vertexCredential, 'vertex-router')
|
||||
}
|
||||
|
||||
const providerRequest: Record<string, any> = {
|
||||
@@ -217,7 +214,7 @@ export class RouterBlockHandler implements BlockHandler {
|
||||
|
||||
let finalApiKey: string | undefined = routerConfig.apiKey
|
||||
if (providerId === 'vertex' && routerConfig.vertexCredential) {
|
||||
finalApiKey = await this.resolveVertexCredential(routerConfig.vertexCredential)
|
||||
finalApiKey = await resolveVertexCredential(routerConfig.vertexCredential, 'vertex-router')
|
||||
}
|
||||
|
||||
const providerRequest: Record<string, any> = {
|
||||
@@ -416,35 +413,4 @@ export class RouterBlockHandler implements BlockHandler {
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves a Vertex AI OAuth credential to an access token
|
||||
*/
|
||||
private async resolveVertexCredential(credentialId: string): Promise<string> {
|
||||
const requestId = `vertex-router-${Date.now()}`
|
||||
|
||||
logger.info(`[${requestId}] Resolving Vertex AI credential: ${credentialId}`)
|
||||
|
||||
const resolved = await resolveOAuthAccountId(credentialId)
|
||||
if (!resolved) {
|
||||
throw new Error(`Vertex AI credential is not a valid OAuth credential: ${credentialId}`)
|
||||
}
|
||||
|
||||
const credential = await db.query.account.findFirst({
|
||||
where: eq(account.id, resolved.accountId),
|
||||
})
|
||||
|
||||
if (!credential) {
|
||||
throw new Error(`Vertex AI credential not found: ${credentialId}`)
|
||||
}
|
||||
|
||||
const { accessToken } = await refreshTokenIfNeeded(requestId, credential, resolved.accountId)
|
||||
|
||||
if (!accessToken) {
|
||||
throw new Error('Failed to get Vertex AI access token')
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Successfully resolved Vertex AI credential`)
|
||||
return accessToken
|
||||
}
|
||||
}
|
||||
|
||||
@@ -108,18 +108,16 @@ describe('WorkflowBlockHandler', () => {
|
||||
)
|
||||
})
|
||||
|
||||
it('should enforce maximum depth limit', async () => {
|
||||
it('should enforce maximum call chain depth limit', async () => {
|
||||
const inputs = { workflowId: 'child-workflow-id' }
|
||||
|
||||
// Create a deeply nested context (simulate 11 levels deep to exceed the limit of 10)
|
||||
const deepContext = {
|
||||
...mockContext,
|
||||
workflowId:
|
||||
'level1_sub_level2_sub_level3_sub_level4_sub_level5_sub_level6_sub_level7_sub_level8_sub_level9_sub_level10_sub_level11',
|
||||
callChain: Array.from({ length: 25 }, (_, i) => `wf-${i}`),
|
||||
}
|
||||
|
||||
await expect(handler.execute(deepContext, mockBlock, inputs)).rejects.toThrow(
|
||||
'"child-workflow-id" failed: Maximum workflow nesting depth of 10 exceeded'
|
||||
'Maximum workflow call chain depth (25) exceeded'
|
||||
)
|
||||
})
|
||||
|
||||
|
||||
@@ -62,6 +62,7 @@ export class WorkflowBlockHandler implements BlockHandler {
|
||||
branchTotal?: number
|
||||
originalBlockId?: string
|
||||
isLoopNode?: boolean
|
||||
executionOrder?: number
|
||||
}
|
||||
): Promise<BlockOutput | StreamingExecution> {
|
||||
return this._executeCore(ctx, block, inputs, nodeMetadata)
|
||||
@@ -79,6 +80,7 @@ export class WorkflowBlockHandler implements BlockHandler {
|
||||
branchTotal?: number
|
||||
originalBlockId?: string
|
||||
isLoopNode?: boolean
|
||||
executionOrder?: number
|
||||
}
|
||||
): Promise<BlockOutput | StreamingExecution> {
|
||||
logger.info(`Executing workflow block: ${block.id}`)
|
||||
@@ -98,13 +100,17 @@ export class WorkflowBlockHandler implements BlockHandler {
|
||||
// workflow block execution, preventing cross-iteration child mixing in loop contexts.
|
||||
const instanceId = crypto.randomUUID()
|
||||
|
||||
const childCallChain = buildNextCallChain(ctx.callChain || [], workflowId)
|
||||
const depthError = validateCallChain(childCallChain)
|
||||
if (depthError) {
|
||||
throw new ChildWorkflowError({
|
||||
message: depthError,
|
||||
childWorkflowName,
|
||||
})
|
||||
}
|
||||
|
||||
let childWorkflowSnapshotId: string | undefined
|
||||
try {
|
||||
const currentDepth = (ctx.workflowId?.split('_sub_').length || 1) - 1
|
||||
if (currentDepth >= DEFAULTS.MAX_WORKFLOW_DEPTH) {
|
||||
throw new Error(`Maximum workflow nesting depth of ${DEFAULTS.MAX_WORKFLOW_DEPTH} exceeded`)
|
||||
}
|
||||
|
||||
if (ctx.isDeployedContext) {
|
||||
const hasActiveDeployment = await this.checkChildDeployment(workflowId)
|
||||
if (!hasActiveDeployment) {
|
||||
@@ -126,7 +132,7 @@ export class WorkflowBlockHandler implements BlockHandler {
|
||||
childWorkflowName = workflowMetadata?.name || childWorkflow.name || 'Unknown Workflow'
|
||||
|
||||
logger.info(
|
||||
`Executing child workflow: ${childWorkflowName} (${workflowId}) at depth ${currentDepth}`
|
||||
`Executing child workflow: ${childWorkflowName} (${workflowId}), call chain depth ${ctx.callChain?.length || 0}`
|
||||
)
|
||||
|
||||
let childWorkflowInput: Record<string, any> = {}
|
||||
@@ -165,16 +171,12 @@ export class WorkflowBlockHandler implements BlockHandler {
|
||||
const iterationContext = nodeMetadata
|
||||
? this.getIterationContext(ctx, nodeMetadata)
|
||||
: undefined
|
||||
ctx.onChildWorkflowInstanceReady?.(effectiveBlockId, instanceId, iterationContext)
|
||||
}
|
||||
|
||||
const childCallChain = buildNextCallChain(ctx.callChain || [], workflowId)
|
||||
const depthError = validateCallChain(childCallChain)
|
||||
if (depthError) {
|
||||
throw new ChildWorkflowError({
|
||||
message: depthError,
|
||||
childWorkflowName,
|
||||
})
|
||||
ctx.onChildWorkflowInstanceReady?.(
|
||||
effectiveBlockId,
|
||||
instanceId,
|
||||
iterationContext,
|
||||
nodeMetadata?.executionOrder
|
||||
)
|
||||
}
|
||||
|
||||
const subExecutor = new Executor({
|
||||
@@ -584,45 +586,6 @@ export class WorkflowBlockHandler implements BlockHandler {
|
||||
return processed
|
||||
}
|
||||
|
||||
private flattenChildWorkflowSpans(spans: TraceSpan[]): WorkflowTraceSpan[] {
|
||||
const flattened: WorkflowTraceSpan[] = []
|
||||
|
||||
spans.forEach((span) => {
|
||||
if (this.isSyntheticWorkflowWrapper(span)) {
|
||||
if (span.children && Array.isArray(span.children)) {
|
||||
flattened.push(...this.flattenChildWorkflowSpans(span.children))
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
const workflowSpan: WorkflowTraceSpan = {
|
||||
...span,
|
||||
}
|
||||
|
||||
if (Array.isArray(workflowSpan.children)) {
|
||||
const childSpans = workflowSpan.children as TraceSpan[]
|
||||
workflowSpan.children = this.flattenChildWorkflowSpans(childSpans)
|
||||
}
|
||||
|
||||
if (workflowSpan.output && typeof workflowSpan.output === 'object') {
|
||||
const { childTraceSpans: nestedChildSpans, ...outputRest } = workflowSpan.output as {
|
||||
childTraceSpans?: TraceSpan[]
|
||||
} & Record<string, unknown>
|
||||
|
||||
if (Array.isArray(nestedChildSpans) && nestedChildSpans.length > 0) {
|
||||
const flattenedNestedChildren = this.flattenChildWorkflowSpans(nestedChildSpans)
|
||||
workflowSpan.children = [...(workflowSpan.children || []), ...flattenedNestedChildren]
|
||||
}
|
||||
|
||||
workflowSpan.output = outputRest
|
||||
}
|
||||
|
||||
flattened.push(workflowSpan)
|
||||
})
|
||||
|
||||
return flattened
|
||||
}
|
||||
|
||||
private toExecutionResult(result: ExecutionResult | StreamingExecution): ExecutionResult {
|
||||
return 'execution' in result ? result.execution : result
|
||||
}
|
||||
|
||||
@@ -264,7 +264,8 @@ export interface ExecutionContext {
|
||||
onChildWorkflowInstanceReady?: (
|
||||
blockId: string,
|
||||
childWorkflowInstanceId: string,
|
||||
iterationContext?: IterationContext
|
||||
iterationContext?: IterationContext,
|
||||
executionOrder?: number
|
||||
) => void
|
||||
|
||||
/**
|
||||
@@ -377,6 +378,7 @@ export interface BlockHandler {
|
||||
branchTotal?: number
|
||||
originalBlockId?: string
|
||||
isLoopNode?: boolean
|
||||
executionOrder?: number
|
||||
}
|
||||
) => Promise<BlockOutput | StreamingExecution>
|
||||
}
|
||||
|
||||
149
apps/sim/executor/utils/builder-data.ts
Normal file
149
apps/sim/executor/utils/builder-data.ts
Normal file
@@ -0,0 +1,149 @@
|
||||
import { REFERENCE } from '@/executor/constants'
|
||||
|
||||
export interface JSONProperty {
|
||||
id: string
|
||||
name: string
|
||||
type: string
|
||||
value: any
|
||||
collapsed?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts builder data (structured JSON properties) into a plain JSON object.
|
||||
*/
|
||||
export function convertBuilderDataToJson(builderData: JSONProperty[]): any {
|
||||
if (!Array.isArray(builderData)) {
|
||||
return {}
|
||||
}
|
||||
|
||||
const result: any = {}
|
||||
|
||||
for (const prop of builderData) {
|
||||
if (!prop.name || !prop.name.trim()) {
|
||||
continue
|
||||
}
|
||||
|
||||
const value = convertPropertyValue(prop)
|
||||
result[prop.name] = value
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts builder data into a JSON string with variable references unquoted.
|
||||
*/
|
||||
export function convertBuilderDataToJsonString(builderData: JSONProperty[]): string {
|
||||
if (!Array.isArray(builderData) || builderData.length === 0) {
|
||||
return '{\n \n}'
|
||||
}
|
||||
|
||||
const result: any = {}
|
||||
|
||||
for (const prop of builderData) {
|
||||
if (!prop.name || !prop.name.trim()) {
|
||||
continue
|
||||
}
|
||||
|
||||
result[prop.name] = prop.value
|
||||
}
|
||||
|
||||
let jsonString = JSON.stringify(result, null, 2)
|
||||
|
||||
jsonString = jsonString.replace(/"(<[^>]+>)"/g, '$1')
|
||||
|
||||
return jsonString
|
||||
}
|
||||
|
||||
export function convertPropertyValue(prop: JSONProperty): any {
|
||||
switch (prop.type) {
|
||||
case 'object':
|
||||
return convertObjectValue(prop.value)
|
||||
case 'array':
|
||||
return convertArrayValue(prop.value)
|
||||
case 'number':
|
||||
return convertNumberValue(prop.value)
|
||||
case 'boolean':
|
||||
return convertBooleanValue(prop.value)
|
||||
case 'files':
|
||||
return prop.value
|
||||
default:
|
||||
return prop.value
|
||||
}
|
||||
}
|
||||
|
||||
function convertObjectValue(value: any): any {
|
||||
if (Array.isArray(value)) {
|
||||
return convertBuilderDataToJson(value)
|
||||
}
|
||||
|
||||
if (typeof value === 'string' && !isVariableReference(value)) {
|
||||
return tryParseJson(value, value)
|
||||
}
|
||||
|
||||
return value
|
||||
}
|
||||
|
||||
function convertArrayValue(value: any): any {
|
||||
if (Array.isArray(value)) {
|
||||
return value.map((item: any) => convertArrayItem(item))
|
||||
}
|
||||
|
||||
if (typeof value === 'string' && !isVariableReference(value)) {
|
||||
const parsed = tryParseJson(value, value)
|
||||
return Array.isArray(parsed) ? parsed : value
|
||||
}
|
||||
|
||||
return value
|
||||
}
|
||||
|
||||
function convertArrayItem(item: any): any {
|
||||
if (typeof item !== 'object' || !item.type) {
|
||||
return item
|
||||
}
|
||||
|
||||
if (item.type === 'object' && Array.isArray(item.value)) {
|
||||
return convertBuilderDataToJson(item.value)
|
||||
}
|
||||
|
||||
if (item.type === 'array' && Array.isArray(item.value)) {
|
||||
return item.value.map((subItem: any) =>
|
||||
typeof subItem === 'object' && subItem.type ? subItem.value : subItem
|
||||
)
|
||||
}
|
||||
|
||||
return item.value
|
||||
}
|
||||
|
||||
function convertNumberValue(value: any): any {
|
||||
if (isVariableReference(value)) {
|
||||
return value
|
||||
}
|
||||
|
||||
const numValue = Number(value)
|
||||
return Number.isNaN(numValue) ? value : numValue
|
||||
}
|
||||
|
||||
function convertBooleanValue(value: any): any {
|
||||
if (isVariableReference(value)) {
|
||||
return value
|
||||
}
|
||||
|
||||
return value === 'true' || value === true
|
||||
}
|
||||
|
||||
function tryParseJson(jsonString: string, fallback: any): any {
|
||||
try {
|
||||
return JSON.parse(jsonString)
|
||||
} catch {
|
||||
return fallback
|
||||
}
|
||||
}
|
||||
|
||||
function isVariableReference(value: any): boolean {
|
||||
return (
|
||||
typeof value === 'string' &&
|
||||
value.trim().startsWith(REFERENCE.START) &&
|
||||
value.trim().includes(REFERENCE.END)
|
||||
)
|
||||
}
|
||||
@@ -215,5 +215,115 @@ describe('start-block utilities', () => {
|
||||
|
||||
expect(output.customField).toBe('defaultValue')
|
||||
})
|
||||
|
||||
it.concurrent('preserves coerced types for unified start payload', () => {
|
||||
const block = createBlock('start_trigger', 'start', {
|
||||
subBlocks: {
|
||||
inputFormat: {
|
||||
value: [
|
||||
{ name: 'conversation_id', type: 'number' },
|
||||
{ name: 'sender', type: 'object' },
|
||||
{ name: 'is_active', type: 'boolean' },
|
||||
],
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
const resolution = {
|
||||
blockId: 'start',
|
||||
block,
|
||||
path: StartBlockPath.UNIFIED,
|
||||
} as const
|
||||
|
||||
const output = buildStartBlockOutput({
|
||||
resolution,
|
||||
workflowInput: {
|
||||
conversation_id: '149',
|
||||
sender: '{"id":10,"email":"user@example.com"}',
|
||||
is_active: 'true',
|
||||
},
|
||||
})
|
||||
|
||||
expect(output.conversation_id).toBe(149)
|
||||
expect(output.sender).toEqual({ id: 10, email: 'user@example.com' })
|
||||
expect(output.is_active).toBe(true)
|
||||
})
|
||||
|
||||
it.concurrent(
|
||||
'prefers coerced inputFormat values over duplicated top-level workflowInput keys',
|
||||
() => {
|
||||
const block = createBlock('start_trigger', 'start', {
|
||||
subBlocks: {
|
||||
inputFormat: {
|
||||
value: [
|
||||
{ name: 'conversation_id', type: 'number' },
|
||||
{ name: 'sender', type: 'object' },
|
||||
{ name: 'is_active', type: 'boolean' },
|
||||
],
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
const resolution = {
|
||||
blockId: 'start',
|
||||
block,
|
||||
path: StartBlockPath.UNIFIED,
|
||||
} as const
|
||||
|
||||
const output = buildStartBlockOutput({
|
||||
resolution,
|
||||
workflowInput: {
|
||||
input: {
|
||||
conversation_id: '149',
|
||||
sender: '{"id":10,"email":"user@example.com"}',
|
||||
is_active: 'false',
|
||||
},
|
||||
conversation_id: '150',
|
||||
sender: '{"id":99,"email":"wrong@example.com"}',
|
||||
is_active: 'true',
|
||||
extra: 'keep-me',
|
||||
},
|
||||
})
|
||||
|
||||
expect(output.conversation_id).toBe(149)
|
||||
expect(output.sender).toEqual({ id: 10, email: 'user@example.com' })
|
||||
expect(output.is_active).toBe(false)
|
||||
expect(output.extra).toBe('keep-me')
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
describe('EXTERNAL_TRIGGER path', () => {
|
||||
it.concurrent('preserves coerced types for integration trigger payload', () => {
|
||||
const block = createBlock('webhook', 'start', {
|
||||
subBlocks: {
|
||||
inputFormat: {
|
||||
value: [
|
||||
{ name: 'count', type: 'number' },
|
||||
{ name: 'payload', type: 'object' },
|
||||
],
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
const resolution = {
|
||||
blockId: 'start',
|
||||
block,
|
||||
path: StartBlockPath.EXTERNAL_TRIGGER,
|
||||
} as const
|
||||
|
||||
const output = buildStartBlockOutput({
|
||||
resolution,
|
||||
workflowInput: {
|
||||
count: '5',
|
||||
payload: '{"event":"push"}',
|
||||
extra: 'untouched',
|
||||
},
|
||||
})
|
||||
|
||||
expect(output.count).toBe(5)
|
||||
expect(output.payload).toEqual({ event: 'push' })
|
||||
expect(output.extra).toBe('untouched')
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -262,6 +262,7 @@ function buildUnifiedStartOutput(
|
||||
hasStructured: boolean
|
||||
): NormalizedBlockOutput {
|
||||
const output: NormalizedBlockOutput = {}
|
||||
const structuredKeys = hasStructured ? new Set(Object.keys(structuredInput)) : null
|
||||
|
||||
if (hasStructured) {
|
||||
for (const [key, value] of Object.entries(structuredInput)) {
|
||||
@@ -272,6 +273,9 @@ function buildUnifiedStartOutput(
|
||||
if (isPlainObject(workflowInput)) {
|
||||
for (const [key, value] of Object.entries(workflowInput)) {
|
||||
if (key === 'onUploadError') continue
|
||||
// Skip keys already set by schema-coerced structuredInput to
|
||||
// prevent raw workflowInput strings from overwriting typed values.
|
||||
if (structuredKeys?.has(key)) continue
|
||||
// Runtime values override defaults (except undefined/null which mean "not provided")
|
||||
if (value !== undefined && value !== null) {
|
||||
output[key] = value
|
||||
@@ -384,6 +388,7 @@ function buildIntegrationTriggerOutput(
|
||||
hasStructured: boolean
|
||||
): NormalizedBlockOutput {
|
||||
const output: NormalizedBlockOutput = {}
|
||||
const structuredKeys = hasStructured ? new Set(Object.keys(structuredInput)) : null
|
||||
|
||||
if (hasStructured) {
|
||||
for (const [key, value] of Object.entries(structuredInput)) {
|
||||
@@ -393,6 +398,7 @@ function buildIntegrationTriggerOutput(
|
||||
|
||||
if (isPlainObject(workflowInput)) {
|
||||
for (const [key, value] of Object.entries(workflowInput)) {
|
||||
if (structuredKeys?.has(key)) continue
|
||||
if (value !== undefined && value !== null) {
|
||||
output[key] = value
|
||||
} else if (!Object.hasOwn(output, key)) {
|
||||
|
||||
42
apps/sim/executor/utils/vertex-credential.ts
Normal file
42
apps/sim/executor/utils/vertex-credential.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
import { db } from '@sim/db'
|
||||
import { account } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { refreshTokenIfNeeded, resolveOAuthAccountId } from '@/app/api/auth/oauth/utils'
|
||||
|
||||
const logger = createLogger('VertexCredential')
|
||||
|
||||
/**
|
||||
* Resolves a Vertex AI OAuth credential to an access token.
|
||||
* Shared across agent, evaluator, and router handlers.
|
||||
*/
|
||||
export async function resolveVertexCredential(
|
||||
credentialId: string,
|
||||
callerLabel = 'vertex'
|
||||
): Promise<string> {
|
||||
const requestId = `${callerLabel}-${Date.now()}`
|
||||
|
||||
logger.info(`[${requestId}] Resolving Vertex AI credential: ${credentialId}`)
|
||||
|
||||
const resolved = await resolveOAuthAccountId(credentialId)
|
||||
if (!resolved) {
|
||||
throw new Error(`Vertex AI credential is not a valid OAuth credential: ${credentialId}`)
|
||||
}
|
||||
|
||||
const credential = await db.query.account.findFirst({
|
||||
where: eq(account.id, resolved.accountId),
|
||||
})
|
||||
|
||||
if (!credential) {
|
||||
throw new Error(`Vertex AI credential not found: ${credentialId}`)
|
||||
}
|
||||
|
||||
const { accessToken } = await refreshTokenIfNeeded(requestId, credential, resolved.accountId)
|
||||
|
||||
if (!accessToken) {
|
||||
throw new Error('Failed to get Vertex AI access token')
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Successfully resolved Vertex AI credential`)
|
||||
return accessToken
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user