mirror of
https://github.com/simstudioai/sim.git
synced 2026-04-28 03:00:29 -04:00
Compare commits
18 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c400e59ea6 | ||
|
|
2e3de9ac8a | ||
|
|
154b9d0883 | ||
|
|
c95ac3bc23 | ||
|
|
ca814f021d | ||
|
|
2502369122 | ||
|
|
8266f0afdb | ||
|
|
896a00ae31 | ||
|
|
74946fb162 | ||
|
|
f62d274478 | ||
|
|
65e17de065 | ||
|
|
79ff5d80b3 | ||
|
|
2a52141d2f | ||
|
|
76ad59fd7d | ||
|
|
c32c1cb917 | ||
|
|
50e74f75ef | ||
|
|
60652e621c | ||
|
|
8863f1132a |
@@ -4045,6 +4045,7 @@ export function AsanaIcon(props: SVGProps<SVGSVGElement>) {
|
||||
}
|
||||
|
||||
export function PipedriveIcon(props: SVGProps<SVGSVGElement>) {
|
||||
const pathId = useId()
|
||||
return (
|
||||
<svg
|
||||
{...props}
|
||||
@@ -4058,7 +4059,7 @@ export function PipedriveIcon(props: SVGProps<SVGSVGElement>) {
|
||||
<defs>
|
||||
<path
|
||||
d='M59.6807,81.1772 C59.6807,101.5343 70.0078,123.4949 92.7336,123.4949 C109.5872,123.4949 126.6277,110.3374 126.6277,80.8785 C126.6277,55.0508 113.232,37.7119 93.2944,37.7119 C77.0483,37.7119 59.6807,49.1244 59.6807,81.1772 Z M101.3006,0 C142.0482,0 169.4469,32.2728 169.4469,80.3126 C169.4469,127.5978 140.584,160.60942 99.3224,160.60942 C79.6495,160.60942 67.0483,152.1836 60.4595,146.0843 C60.5063,147.5305 60.5374,149.1497 60.5374,150.8788 L60.5374,215 L18.32565,215 L18.32565,44.157 C18.32565,41.6732 17.53126,40.8873 15.07021,40.8873 L0.5531,40.8873 L0.5531,3.4741 L35.9736,3.4741 C52.282,3.4741 56.4564,11.7741 57.2508,18.1721 C63.8708,10.7524 77.5935,0 101.3006,0 Z'
|
||||
id='path-1'
|
||||
id={pathId}
|
||||
/>
|
||||
</defs>
|
||||
<g
|
||||
@@ -4069,10 +4070,7 @@ export function PipedriveIcon(props: SVGProps<SVGSVGElement>) {
|
||||
fillRule='evenodd'
|
||||
>
|
||||
<g transform='translate(67.000000, 44.000000)'>
|
||||
<mask id='mask-2' fill='white'>
|
||||
<use href='#path-1' />
|
||||
</mask>
|
||||
<use id='Clip-5' fill='#FFFFFF' xlinkHref='#path-1' />
|
||||
<use fill='#FFFFFF' xlinkHref={`#${pathId}`} />
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
@@ -4098,6 +4096,40 @@ export function SalesforceIcon(props: SVGProps<SVGSVGElement>) {
|
||||
)
|
||||
}
|
||||
|
||||
export function SapS4HanaIcon(props: SVGProps<SVGSVGElement>) {
|
||||
const id = useId()
|
||||
return (
|
||||
<svg {...props} xmlns='http://www.w3.org/2000/svg' viewBox='0 0 412.38 204'>
|
||||
<defs>
|
||||
<linearGradient
|
||||
id={id}
|
||||
x1='206.19'
|
||||
y1='0'
|
||||
x2='206.19'
|
||||
y2='204'
|
||||
gradientUnits='userSpaceOnUse'
|
||||
>
|
||||
<stop offset='0' stopColor='#00b1eb' />
|
||||
<stop offset='.212' stopColor='#009ad9' />
|
||||
<stop offset='.519' stopColor='#007fc4' />
|
||||
<stop offset='.792' stopColor='#006eb8' />
|
||||
<stop offset='1' stopColor='#0069b4' />
|
||||
</linearGradient>
|
||||
</defs>
|
||||
<polyline
|
||||
fill={`url(#${id})`}
|
||||
fillRule='evenodd'
|
||||
points='0 204 208.413 204 412.38 0 0 0 0 204'
|
||||
/>
|
||||
<path
|
||||
fill='#fff'
|
||||
fillRule='evenodd'
|
||||
d='m244.727,38.359l-40.593-.025v96.518l-35.46-96.518h-35.16l-30.277,80.716c-3.224-20.352-24.277-27.38-40.84-32.649-10.937-3.512-22.541-8.678-22.434-14.387.091-4.687,6.225-9.04,18.377-8.385,8.17.433,15.373,1.092,29.71,8.006l14.102-24.557c-13.088-6.658-31.169-10.867-45.985-10.883h-.086c-17.277,0-31.677,5.598-40.602,14.824-6.221,6.443-9.572,14.626-9.712,23.679-.227,12.454,4.341,21.292,13.938,28.338,8.104,5.944,18.468,9.794,27.603,12.626,11.27,3.492,20.467,6.526,20.36,13.002-.083,2.355-.977,4.552-2.671,6.337-2.807,2.897-7.124,3.986-13.084,4.098-11.497.243-20.026-1.559-33.61-9.585l-12.536,24.903c13.546,7.705,29.586,12.223,45.952,12.223l2.106-.024c14.247-.256,25.745-4.316,34.929-11.712.527-.416,1.001-.845,1.488-1.277l-4.073,10.874h36.875l6.189-18.822c6.477,2.214,13.847,3.437,21.676,3.437,7.618,0,14.795-1.17,21.156-3.252l5.965,18.637h60.137v-38.969h13.113c31.706,0,50.456-16.147,50.456-43.202,0-30.139-18.219-43.969-57.011-43.969Zm-93.816,82.587c-4.737,0-9.177-.828-13.006-2.275l12.866-40.593h.244l12.643,40.708c-3.801,1.349-8.138,2.16-12.746,2.16Zm96.199-23.324h-8.941v-32.711h8.941c11.927,0,21.437,3.961,21.437,16.139,0,12.602-9.51,16.572-21.437,16.572'
|
||||
/>
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
export function ServiceNowIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg {...props} xmlns='http://www.w3.org/2000/svg' viewBox='0 0 71.1 63.6'>
|
||||
@@ -4694,15 +4726,16 @@ export function DynamoDBIcon(props: SVGProps<SVGSVGElement>) {
|
||||
}
|
||||
|
||||
export function IAMIcon(props: SVGProps<SVGSVGElement>) {
|
||||
const id = useId()
|
||||
return (
|
||||
<svg {...props} viewBox='0 0 80 80' xmlns='http://www.w3.org/2000/svg'>
|
||||
<defs>
|
||||
<linearGradient x1='0%' y1='100%' x2='100%' y2='0%' id='iamGradient'>
|
||||
<linearGradient x1='0%' y1='100%' x2='100%' y2='0%' id={id}>
|
||||
<stop stopColor='#BD0816' offset='0%' />
|
||||
<stop stopColor='#FF5252' offset='100%' />
|
||||
</linearGradient>
|
||||
</defs>
|
||||
<rect fill='url(#iamGradient)' width='80' height='80' />
|
||||
<rect fill={`url(#${id})`} width='80' height='80' />
|
||||
<path
|
||||
d='M14,59 L66,59 L66,21 L14,21 L14,59 Z M68,20 L68,60 C68,60.552 67.553,61 67,61 L13,61 C12.447,61 12,60.552 12,60 L12,20 C12,19.448 12.447,19 13,19 L67,19 C67.553,19 68,19.448 68,20 L68,20 Z M44,48 L59,48 L59,46 L44,46 L44,48 Z M57,42 L62,42 L62,40 L57,40 L57,42 Z M44,42 L52,42 L52,40 L44,40 L44,42 Z M29,46 C29,45.449 28.552,45 28,45 C27.448,45 27,45.449 27,46 C27,46.551 27.448,47 28,47 C28.552,47 29,46.551 29,46 L29,46 Z M31,46 C31,47.302 30.161,48.401 29,48.816 L29,51 L27,51 L27,48.815 C25.839,48.401 25,47.302 25,46 C25,44.346 26.346,43 28,43 C29.654,43 31,44.346 31,46 L31,46 Z M19,53.993 L36.994,54 L36.996,50 L33,50 L33,48 L36.996,48 L36.998,45 L33,45 L33,43 L36.999,43 L37,40.007 L19.006,40 L19,53.993 Z M22,38.001 L34,38.006 L34,31 C34.001,28.697 31.197,26.677 28,26.675 L27.996,26.675 C24.804,26.675 22.004,28.696 22.002,31 L22,38.001 Z M17,54.992 L17.006,39 C17.006,38.734 17.111,38.48 17.299,38.292 C17.486,38.105 17.741,38 18.006,38 L20,38.001 L20.002,31 C20.004,27.512 23.59,24.675 27.996,24.675 L28,24.675 C32.412,24.677 36.001,27.515 36,31 L36,38.007 L38,38.008 C38.553,38.008 39,38.456 39,39.008 L38.994,55 C38.994,55.266 38.889,55.52 38.701,55.708 C38.514,55.895 38.259,56 37.994,56 L18,55.992 C17.447,55.992 17,55.544 17,54.992 L17,54.992 Z M60,36 L62,36 L62,34 L60,34 L60,36 Z M44,36 L55,36 L55,34 L44,34 L44,36 Z'
|
||||
fill='#FFFFFF'
|
||||
@@ -4712,15 +4745,16 @@ export function IAMIcon(props: SVGProps<SVGSVGElement>) {
|
||||
}
|
||||
|
||||
export function IdentityCenterIcon(props: SVGProps<SVGSVGElement>) {
|
||||
const id = useId()
|
||||
return (
|
||||
<svg {...props} viewBox='0 0 80 80' xmlns='http://www.w3.org/2000/svg'>
|
||||
<defs>
|
||||
<linearGradient x1='0%' y1='100%' x2='100%' y2='0%' id='identityCenterGradient'>
|
||||
<linearGradient x1='0%' y1='100%' x2='100%' y2='0%' id={id}>
|
||||
<stop stopColor='#BD0816' offset='0%' />
|
||||
<stop stopColor='#FF5252' offset='100%' />
|
||||
</linearGradient>
|
||||
</defs>
|
||||
<rect fill='url(#identityCenterGradient)' width='80' height='80' />
|
||||
<rect fill={`url(#${id})`} width='80' height='80' />
|
||||
<path
|
||||
d='M46.694,46.8194562 C47.376,46.1374562 47.376,45.0294562 46.694,44.3474562 C46.353,44.0074562 45.906,43.8374562 45.459,43.8374562 C45.01,43.8374562 44.563,44.0074562 44.222,44.3474562 C43.542,45.0284562 43.542,46.1384562 44.222,46.8194562 C44.905,47.5014562 46.013,47.4994562 46.694,46.8194562 M47.718,47.1374562 L51.703,51.1204562 L50.996,51.8274562 L49.868,50.6994562 L48.793,51.7754562 L48.086,51.0684562 L49.161,49.9924562 L47.011,47.8444562 C46.545,48.1654562 46.003,48.3294562 45.458,48.3294562 C44.755,48.3294562 44.051,48.0624562 43.515,47.5264562 C42.445,46.4554562 42.445,44.7124562 43.515,43.6404562 C44.586,42.5714562 46.329,42.5694562 47.401,43.6404562 C48.351,44.5904562 48.455,46.0674562 47.718,47.1374562 M53,44.1014562 C53,46.1684562 51.505,47.0934562 50.023,47.0934562 L50.023,46.0934562 C50.487,46.0934562 52,45.9494562 52,44.1014562 C52,43.0044562 51.353,42.3894562 49.905,42.1084562 C49.68,42.0654562 49.514,41.8754562 49.501,41.6484562 C49.446,40.7444562 48.987,40.1124562 48.384,40.1124562 C48.084,40.1124562 47.854,40.2424562 47.616,40.5464562 C47.506,40.6884562 47.324,40.7594562 47.147,40.7324562 C46.968,40.7054562 46.818,40.5844562 46.755,40.4144562 C46.577,39.9434562 46.211,39.4334562 45.723,38.9774562 C45.231,38.5094562 43.883,37.5074562 41.972,38.2734562 C40.885,38.7054562 40.034,39.9494562 40.034,41.1074562 C40.034,41.2354562 40.043,41.3624562 40.058,41.4884562 C40.061,41.5094562 40.062,41.5304562 40.062,41.5514562 C40.062,41.7994562 39.882,42.0064562 39.645,42.0464562 C38.886,42.2394562 38,42.7454562 38,44.0554562 L38.005,44.2104562 C38.069,45.3254562 39.252,45.9954562 40.358,45.9984562 L41,45.9984562 L41,46.9984562 L40.357,46.9984562 C38.536,46.9944562 37.095,45.8194562 37.006,44.2644562 C37.003,44.1944562 37,44.1244562 37,44.0554562 C37,42.6944562 37.752,41.6484562 39.035,41.1884562 C39.034,41.1614562 39.034,41.1344562 39.034,41.1074562 C39.034,39.5434562 40.138,37.9254562 41.602,37.3434562 C43.298,36.6654562 45.095,37.0034562 46.409,38.2494562 C46.706,38.5274562 47.076,38.9264562 47.372,39.4134562 C47.673,39.2124562 48.008,39.1124562 48.384,39.1124562 C49.257,39.1124562 50.231,39.7714562 50.458,41.2074562 C52.145,41.6324562 53,42.6054562 53,44.1014562 M27,53 L27,27 L53,27 L53,34 L51,34 L51,29 L29,29 L29,51 L51,51 L51,46 L53,46 L53,53 Z'
|
||||
fill='#FFFFFF'
|
||||
@@ -4730,15 +4764,16 @@ export function IdentityCenterIcon(props: SVGProps<SVGSVGElement>) {
|
||||
}
|
||||
|
||||
export function STSIcon(props: SVGProps<SVGSVGElement>) {
|
||||
const id = useId()
|
||||
return (
|
||||
<svg {...props} viewBox='0 0 80 80' xmlns='http://www.w3.org/2000/svg'>
|
||||
<defs>
|
||||
<linearGradient x1='0%' y1='100%' x2='100%' y2='0%' id='stsGradient'>
|
||||
<linearGradient x1='0%' y1='100%' x2='100%' y2='0%' id={id}>
|
||||
<stop stopColor='#BD0816' offset='0%' />
|
||||
<stop stopColor='#FF5252' offset='100%' />
|
||||
</linearGradient>
|
||||
</defs>
|
||||
<rect fill='url(#stsGradient)' width='80' height='80' />
|
||||
<rect fill={`url(#${id})`} width='80' height='80' />
|
||||
<path
|
||||
d='M14,59 L66,59 L66,21 L14,21 L14,59 Z M68,20 L68,60 C68,60.552 67.553,61 67,61 L13,61 C12.447,61 12,60.552 12,60 L12,20 C12,19.448 12.447,19 13,19 L67,19 C67.553,19 68,19.448 68,20 L68,20 Z M44,48 L59,48 L59,46 L44,46 L44,48 Z M57,42 L62,42 L62,40 L57,40 L57,42 Z M44,42 L52,42 L52,40 L44,40 L44,42 Z M29,46 C29,45.449 28.552,45 28,45 C27.448,45 27,45.449 27,46 C27,46.551 27.448,47 28,47 C28.552,47 29,46.551 29,46 L29,46 Z M31,46 C31,47.302 30.161,48.401 29,48.816 L29,51 L27,51 L27,48.815 C25.839,48.401 25,47.302 25,46 C25,44.346 26.346,43 28,43 C29.654,43 31,44.346 31,46 L31,46 Z M19,53.993 L36.994,54 L36.996,50 L33,50 L33,48 L36.996,48 L36.998,45 L33,45 L33,43 L36.999,43 L37,40.007 L19.006,40 L19,53.993 Z M22,38.001 L34,38.006 L34,31 C34.001,28.697 31.197,26.677 28,26.675 L27.996,26.675 C24.804,26.675 22.004,28.696 22.002,31 L22,38.001 Z M17,54.992 L17.006,39 C17.006,38.734 17.111,38.48 17.299,38.292 C17.486,38.105 17.741,38 18.006,38 L20,38.001 L20.002,31 C20.004,27.512 23.59,24.675 27.996,24.675 L28,24.675 C32.412,24.677 36.001,27.515 36,31 L36,38.007 L38,38.008 C38.553,38.008 39,38.456 39,39.008 L38.994,55 C38.994,55.266 38.889,55.52 38.701,55.708 C38.514,55.895 38.259,56 37.994,56 L18,55.992 C17.447,55.992 17,55.544 17,54.992 L17,54.992 Z M60,36 L62,36 L62,34 L60,34 L60,36 Z M44,36 L55,36 L55,34 L44,34 L44,36 Z'
|
||||
fill='#FFFFFF'
|
||||
@@ -4748,15 +4783,16 @@ export function STSIcon(props: SVGProps<SVGSVGElement>) {
|
||||
}
|
||||
|
||||
export function SESIcon(props: SVGProps<SVGSVGElement>) {
|
||||
const id = useId()
|
||||
return (
|
||||
<svg {...props} viewBox='0 0 80 80' xmlns='http://www.w3.org/2000/svg'>
|
||||
<defs>
|
||||
<linearGradient x1='0%' y1='100%' x2='100%' y2='0%' id='sesGradient'>
|
||||
<linearGradient x1='0%' y1='100%' x2='100%' y2='0%' id={id}>
|
||||
<stop stopColor='#BD0816' offset='0%' />
|
||||
<stop stopColor='#FF5252' offset='100%' />
|
||||
</linearGradient>
|
||||
</defs>
|
||||
<rect fill='url(#sesGradient)' width='80' height='80' />
|
||||
<rect fill={`url(#${id})`} width='80' height='80' />
|
||||
<path
|
||||
d='M57,60.999875 C57,59.373846 55.626,57.9998214 54,57.9998214 C52.374,57.9998214 51,59.373846 51,60.999875 C51,62.625904 52.374,63.9999286 54,63.9999286 C55.626,63.9999286 57,62.625904 57,60.999875 L57,60.999875 Z M40,59.9998571 C38.374,59.9998571 37,61.3738817 37,62.9999107 C37,64.6259397 38.374,65.9999643 40,65.9999643 C41.626,65.9999643 43,64.6259397 43,62.9999107 C43,61.3738817 41.626,59.9998571 40,59.9998571 L40,59.9998571 Z M26,57.9998214 C24.374,57.9998214 23,59.373846 23,60.999875 C23,62.625904 24.374,63.9999286 26,63.9999286 C27.626,63.9999286 29,62.625904 29,60.999875 C29,59.373846 27.626,57.9998214 26,57.9998214 L26,57.9998214 Z M28.605,42.9995536 L51.395,42.9995536 L43.739,36.1104305 L40.649,38.7584778 C40.463,38.9194807 40.23,38.9994821 39.999,38.9994821 C39.768,38.9994821 39.535,38.9194807 39.349,38.7584778 L36.26,36.1104305 L28.605,42.9995536 Z M27,28.1732888 L27,41.7545313 L34.729,34.7984071 L27,28.1732888 Z M51.297,26.9992678 L28.703,26.9992678 L39.999,36.6824408 L51.297,26.9992678 Z M53,41.7545313 L53,28.1732888 L45.271,34.7974071 L53,41.7545313 Z M59,60.999875 C59,63.7099234 56.71,65.9999643 54,65.9999643 C51.29,65.9999643 49,63.7099234 49,60.999875 C49,58.6308327 50.75,56.5837961 53,56.1057876 L53,52.9997321 L41,52.9997321 L41,58.1058233 C43.25,58.5838319 45,60.6308684 45,62.9999107 C45,65.7099591 42.71,68 40,68 C37.29,68 35,65.7099591 35,62.9999107 C35,60.6308684 36.75,58.5838319 39,58.1058233 L39,52.9997321 L27,52.9997321 L27,56.1057876 C29.25,56.5837961 31,58.6308327 31,60.999875 C31,63.7099234 28.71,65.9999643 26,65.9999643 C23.29,65.9999643 21,63.7099234 21,60.999875 C21,58.6308327 22.75,56.5837961 25,56.1057876 L25,51.9997143 C25,51.4477044 25.447,50.9996964 26,50.9996964 L39,50.9996964 L39,44.9995893 L26,44.9995893 C25.447,44.9995893 25,44.5515813 25,43.9995714 L25,25.99925 C25,25.4472401 25.447,24.9992321 26,24.9992321 L54,24.9992321 C54.553,24.9992321 55,25.4472401 55,25.99925 L55,43.9995714 C55,44.5515813 54.553,44.9995893 54,44.9995893 L41,44.9995893 L41,50.9996964 L54,50.9996964 C54.553,50.9996964 55,51.4477044 55,51.9997143 L55,56.1057876 C57.25,56.5837961 59,58.6308327 59,60.999875 L59,60.999875 Z M68,39.9995 C68,45.9066055 66.177,51.5597064 62.727,56.3447919 L61.104,55.174771 C64.307,50.7316916 66,45.4845979 66,39.9995 C66,25.664244 54.337,14.0000357 40.001,14.0000357 C25.664,14.0000357 14,25.664244 14,39.9995 C14,45.4845979 15.693,50.7316916 18.896,55.174771 L17.273,56.3447919 C13.823,51.5597064 12,45.9066055 12,39.9995 C12,24.5612243 24.561,12 39.999,12 C55.438,12 68,24.5612243 68,39.9995 L68,39.9995 Z'
|
||||
fill='#FFFFFF'
|
||||
@@ -4766,15 +4802,16 @@ export function SESIcon(props: SVGProps<SVGSVGElement>) {
|
||||
}
|
||||
|
||||
export function SecretsManagerIcon(props: SVGProps<SVGSVGElement>) {
|
||||
const id = useId()
|
||||
return (
|
||||
<svg {...props} viewBox='0 0 80 80' xmlns='http://www.w3.org/2000/svg'>
|
||||
<defs>
|
||||
<linearGradient x1='0%' y1='100%' x2='100%' y2='0%' id='secretsManagerGradient'>
|
||||
<linearGradient x1='0%' y1='100%' x2='100%' y2='0%' id={id}>
|
||||
<stop stopColor='#BD0816' offset='0%' />
|
||||
<stop stopColor='#FF5252' offset='100%' />
|
||||
</linearGradient>
|
||||
</defs>
|
||||
<rect fill='url(#secretsManagerGradient)' width='80' height='80' />
|
||||
<rect fill={`url(#${id})`} width='80' height='80' />
|
||||
<path
|
||||
d='M38.76,43.36 C38.76,44.044 39.317,44.6 40,44.6 C40.684,44.6 41.24,44.044 41.24,43.36 C41.24,42.676 40.684,42.12 40,42.12 C39.317,42.12 38.76,42.676 38.76,43.36 L38.76,43.36 Z M36.76,43.36 C36.76,41.573 38.213,40.12 40,40.12 C41.787,40.12 43.24,41.573 43.24,43.36 C43.24,44.796 42.296,46.002 41,46.426 L41,49 L39,49 L39,46.426 C37.704,46.002 36.76,44.796 36.76,43.36 L36.76,43.36 Z M49,38 L31,38 L31,51 L49,51 L49,48 L46,48 L46,46 L49,46 L49,43 L46,43 L46,41 L49,41 L49,38 Z M34,36 L45.999,36 L46,31 C46.001,28.384 43.143,26.002 40.004,26 L40.001,26 C38.472,26 36.928,26.574 35.763,27.575 C34.643,28.537 34,29.786 34,31.001 L34,36 Z M48,31.001 L47.999,36 L50,36 C50.553,36 51,36.448 51,37 L51,52 C51,52.552 50.553,53 50,53 L30,53 C29.447,53 29,52.552 29,52 L29,37 C29,36.448 29.447,36 30,36 L32,36 L32,31 C32.001,29.202 32.897,27.401 34.459,26.058 C35.982,24.75 38.001,24 40.001,24 L40.004,24 C44.265,24.002 48.001,27.273 48,31.001 L48,31.001 Z M19.207,55.049 L20.828,53.877 C18.093,50.097 16.581,45.662 16.396,41 L19,41 L19,39 L16.399,39 C16.598,34.366 18.108,29.957 20.828,26.198 L19.207,25.025 C16.239,29.128 14.599,33.942 14.399,39 L12,39 L12,41 L14.396,41 C14.582,46.086 16.224,50.926 19.207,55.049 L19.207,55.049 Z M53.838,59.208 C50.069,61.936 45.648,63.446 41,63.639 L41,61 L39,61 L39,63.639 C34.352,63.447 29.93,61.937 26.159,59.208 L24.988,60.828 C29.1,63.805 33.928,65.445 39,65.639 L39,68 L41,68 L41,65.639 C46.072,65.445 50.898,63.805 55.01,60.828 L53.838,59.208 Z M26.159,20.866 C29.93,18.138 34.352,16.628 39,16.436 L39,19 L41,19 L41,16.436 C45.648,16.628 50.069,18.138 53.838,20.866 L55.01,19.246 C50.898,16.27 46.072,14.63 41,14.436 L41,12 L39,12 L39,14.436 C33.928,14.629 29.1,16.269 24.988,19.246 L26.159,20.866 Z M65.599,39 C65.399,33.942 63.759,29.128 60.79,25.025 L59.169,26.198 C61.89,29.957 63.4,34.366 63.599,39 L61,39 L61,41 L63.602,41 C63.416,45.662 61.905,50.097 59.169,53.877 L60.79,55.049 C63.774,50.926 65.415,46.086 65.602,41 L68,41 L68,39 L65.599,39 Z M56.386,25.064 L64.226,17.224 L62.812,15.81 L54.972,23.65 L56.386,25.064 Z M23.612,55.01 L15.772,62.85 L17.186,64.264 L25.026,56.424 L23.612,55.01 Z M28.666,27.253 L13.825,12.413 L12.411,13.827 L27.252,28.667 L28.666,27.253 Z M54.193,52.78 L67.586,66.173 L66.172,67.587 L52.779,54.194 L54.193,52.78 Z'
|
||||
fill='#FFFFFF'
|
||||
|
||||
@@ -154,6 +154,7 @@ import {
|
||||
RootlyIcon,
|
||||
S3Icon,
|
||||
SalesforceIcon,
|
||||
SapS4HanaIcon,
|
||||
SESIcon,
|
||||
SearchIcon,
|
||||
SecretsManagerIcon,
|
||||
@@ -369,6 +370,7 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
|
||||
rootly: RootlyIcon,
|
||||
s3: S3Icon,
|
||||
salesforce: SalesforceIcon,
|
||||
sap_s4hana: SapS4HanaIcon,
|
||||
search: SearchIcon,
|
||||
secrets_manager: SecretsManagerIcon,
|
||||
sendgrid: SendgridIcon,
|
||||
|
||||
@@ -25,6 +25,8 @@ Secrets are organized into two sections:
|
||||
- **Workspace** — shared with all members of your workspace
|
||||
- **Personal** — private to you
|
||||
|
||||
External workspace members count as workspace members for workspace-scoped secrets. They can use workspace secrets according to their workspace permission level, even though they are not members of your organization.
|
||||
|
||||
### Adding a Secret
|
||||
|
||||
Type a key name (e.g. `OPENAI_API_KEY`) into the **Key** column and its value into the **Value** column in the last empty row. A new empty row appears automatically as you type. Existing values are masked by default.
|
||||
@@ -89,7 +91,7 @@ Click **Save** to apply changes, or **Back** to return to the list.
|
||||
|
||||
| | Workspace | Personal |
|
||||
|---|---|---|
|
||||
| **Visibility** | All workspace members | Only you |
|
||||
| **Visibility** | All workspace members, including external workspace members | Only you |
|
||||
| **Use in workflows** | Any member can use | Only you can use |
|
||||
| **Best for** | Production workflows, shared services | Testing, personal API keys |
|
||||
| **Who can edit** | Workspace admins | Only you |
|
||||
|
||||
@@ -130,6 +130,8 @@ Controls visibility of platform features and modules.
|
||||
|
||||
Open the group's **Details** view and add members by searching for users by name or email. Only users who already have workspace-level access can be added. A user can only belong to one group per workspace — adding a user to a new group within the same workspace removes them from their current group for that workspace.
|
||||
|
||||
External workspace members are treated like other workspace members for access-control purposes. They can be assigned to permission groups in any workspace they have access to, but they do not become organization members or appear in the organization roster.
|
||||
|
||||
---
|
||||
|
||||
## Enforcement
|
||||
@@ -159,6 +161,7 @@ When a user opens Mothership, their permission group is read before any block or
|
||||
- Moving a user to a new group within a workspace automatically removes them from their previous group in that workspace.
|
||||
- Users not assigned to any group in a workspace have no restrictions applied in that workspace (all blocks, providers, and features are available to them there).
|
||||
- If **Auto-add new members** is enabled on a group, new members of that workspace are automatically placed in the group. Only one group per workspace can have this setting active.
|
||||
- External workspace members follow the same per-workspace permission group rules as internal members.
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -44,7 +44,7 @@ Authorization: Bearer <api-key>
|
||||
| `resourceType` | string | Filter by resource type (e.g. `workflow`) |
|
||||
| `resourceId` | string | Filter by a specific resource ID |
|
||||
| `workspaceId` | string | Filter by workspace |
|
||||
| `actorId` | string | Filter by user ID (must be an org member) |
|
||||
| `actorId` | string | Filter by user ID. For organization-wide filters, the actor must be a current or former org member; workspace-scoped logs can also include external workspace members. |
|
||||
| `startDate` | string | ISO 8601 date — return logs on or after this date |
|
||||
| `endDate` | string | ISO 8601 date — return logs on or before this date |
|
||||
| `includeDeparted` | boolean | Include logs from members who have since left the organization (default `false`) |
|
||||
@@ -98,6 +98,8 @@ Audit log events follow a `resource.action` naming pattern. The table below list
|
||||
| **Credentials** | `credential.created`, `credential.deleted`, `oauth.disconnected` |
|
||||
| **Organization** | `organization.updated`, `org_member.added`, `org_member.role_changed` |
|
||||
|
||||
Workspace invitation events include whether the invite is for an internal organization member or an external workspace member in their metadata. External workspace members can appear as actors on workspace-scoped events, but they are not organization members and do not appear in the organization roster.
|
||||
|
||||
---
|
||||
|
||||
<FAQ items={[
|
||||
@@ -123,7 +125,7 @@ Audit log events follow a `resource.action` naming pattern. The table below list
|
||||
},
|
||||
{
|
||||
question: "Can I filter logs by a specific user?",
|
||||
answer: "Yes. Pass the actorId query parameter to filter logs by a specific user. The actor must be a current or former member of your organization."
|
||||
answer: "Yes. Pass the actorId query parameter to filter logs by a specific user. Organization-wide actor filters require the actor to be a current or former member of your organization. Workspace-scoped logs may also include external workspace members who acted inside a workspace without joining the organization."
|
||||
}
|
||||
]} />
|
||||
|
||||
|
||||
@@ -13,6 +13,8 @@ Sim Enterprise provides advanced features for organizations with enhanced securi
|
||||
|
||||
Define permission groups on a workspace to control what features and integrations its members can use. Permission groups are scoped to a single workspace — a user can belong to different groups (or no group) in different workspaces.
|
||||
|
||||
External workspace members can be assigned to permission groups just like internal organization members, but they remain outside the organization roster and do not consume seats.
|
||||
|
||||
### Features
|
||||
|
||||
- **Allowed Model Providers** - Restrict which AI providers users can access (OpenAI, Anthropic, Google, etc.)
|
||||
@@ -81,4 +83,4 @@ Self-hosted deployments enable enterprise features via environment variables ins
|
||||
| `INBOX_ENABLED`, `NEXT_PUBLIC_INBOX_ENABLED` | Sim Mailer inbox |
|
||||
| `DISABLE_INVITATIONS`, `NEXT_PUBLIC_DISABLE_INVITATIONS` | Disable invitations; manage membership via Admin API |
|
||||
|
||||
Once enabled, each feature is configured through the same Settings UI as Sim Cloud. When invitations are disabled, use the Admin API (`x-admin-key` header) to manage organization and workspace membership.
|
||||
Once enabled, each feature is configured through the same Settings UI as Sim Cloud. When invitations are disabled, use the Admin API (`x-admin-key` header) to manage organization membership and workspace access. Internal members join the organization; external workspace members only receive access to a specific workspace.
|
||||
|
||||
@@ -221,6 +221,8 @@ Once SSO is configured, users with your domain (`company.com`) can sign in throu
|
||||
|
||||
Users who sign in via SSO for the first time are automatically provisioned and added to your organization — no manual invite required.
|
||||
|
||||
SSO provisioning creates internal organization members. External workspace members are different: they are invited to a specific workspace without joining your organization or consuming one of your seats.
|
||||
|
||||
<Callout type="info">
|
||||
Password-based login remains available. Forcing all organization members to use SSO exclusively is not yet supported.
|
||||
</Callout>
|
||||
@@ -242,7 +244,7 @@ Users who sign in via SSO for the first time are automatically provisioned and a
|
||||
},
|
||||
{
|
||||
question: "What happens when a user signs in with SSO for the first time?",
|
||||
answer: "Sim creates an account for them automatically and adds them to your organization. No manual invite is needed. They are assigned the member role by default."
|
||||
answer: "Sim creates an account for them automatically and adds them to your organization. No manual invite is needed. They are assigned the member role by default. External workspace members are not provisioned through SSO into your organization; they are invited directly to a workspace and remain outside your org roster."
|
||||
},
|
||||
{
|
||||
question: "Can I still use email/password login after enabling SSO?",
|
||||
|
||||
@@ -272,6 +272,8 @@ Sim has two paid plan tiers - **Pro** and **Max**. Either can be used individual
|
||||
|
||||
To use Pro or Max with a team, select **Get For Team** in subscription settings and choose the tier and number of seats. Credits are pooled across the organization at the per-seat rate (e.g. Max for Teams with 3 seats = 75,000 credits/mo pooled).
|
||||
|
||||
Internal organization members use seats and contribute to the team's pooled credit allocation. External workspace members do not join your organization, do not appear in the organization roster, and do not count toward your seat total.
|
||||
|
||||
### Daily Refresh Credits
|
||||
|
||||
Paid plans include a small daily credit allowance that does not count toward your plan limit. Each day, usage up to the daily refresh amount is excluded from billable usage. This allowance resets every 24 hours and does not carry over - use it or lose it.
|
||||
@@ -317,7 +319,7 @@ By default, your usage is capped at the credits included in your plan. To allow
|
||||
| **Max** | Up to 10 | — |
|
||||
| **Team / Enterprise** | Unlimited | Unlimited |
|
||||
|
||||
Team and Enterprise plans unlock shared workspaces that belong to your organization. Members invited to a shared workspace automatically join the organization and count toward your seat total. When a Team or Enterprise subscription is cancelled or downgraded, existing shared workspaces remain accessible to current members but new invites are disabled until the organization is upgraded again.
|
||||
Team and Enterprise plans unlock shared workspaces that belong to your organization. Internal members invited to a shared workspace join the organization and count toward your seat total. Existing Sim users who already belong to another organization can be added as external workspace members; they get workspace access without joining your organization or using one of your seats. When a Team or Enterprise subscription is cancelled or downgraded, existing shared workspaces remain accessible to current members but new invites are disabled until the organization is upgraded again.
|
||||
|
||||
### Rate Limits
|
||||
|
||||
@@ -368,7 +370,8 @@ Sim uses a **base subscription + overage** billing model:
|
||||
- Example: 7,000 credits used = $25 (subscription) + $5 (overage for 1,000 extra credits at $0.005/credit)
|
||||
|
||||
**Team Plans:**
|
||||
- Usage is pooled across all team members in the organization
|
||||
- Usage is pooled across internal team members in the organization
|
||||
- External workspace members keep their own organization or personal billing context for runs where they are the billing actor
|
||||
- Overage is calculated from total team usage against the pooled limit
|
||||
- Organization owner receives one bill
|
||||
|
||||
|
||||
@@ -42,6 +42,8 @@ Only authorized senders can create tasks. Emails from anyone else are automatica
|
||||
- **Workspace members** are allowed by default — no setup needed
|
||||
- **External senders** can be added manually with an optional label for easy identification
|
||||
|
||||
External senders are email addresses that can create inbox tasks. They are not the same as external workspace members, who have workspace access in Sim without joining your organization.
|
||||
|
||||
Manage your allowed senders list in **Settings** → **Inbox** → **Allowed Senders**.
|
||||
|
||||
## Tracking Tasks
|
||||
|
||||
@@ -12,7 +12,7 @@ When you invite team members to your organization or workspace, you'll need to c
|
||||
Sim has two kinds of workspaces:
|
||||
|
||||
- **Personal workspaces** live under your individual account. The number you can create depends on your plan.
|
||||
- **Shared (organization) workspaces** live under an organization and are available on Team and Enterprise plans. Any organization Owner or Admin can create them. Members invited to a shared workspace automatically join the organization and count toward your seat total.
|
||||
- **Shared (organization) workspaces** live under an organization and are available on Team and Enterprise plans. Any organization Owner or Admin can create them. Internal members invited to a shared workspace join the organization and count toward your seat total. Existing Sim users who already belong to another organization can be added as external workspace members instead, giving them access to the workspace without adding them to your organization roster or using one of your seats.
|
||||
|
||||
### Workspace Limits by Plan
|
||||
|
||||
@@ -43,6 +43,15 @@ When inviting someone to a workspace, you can assign one of three permission lev
|
||||
| **Write** | Create and edit workflows, run workflows, manage environment variables |
|
||||
| **Admin** | Everything Write can do, plus invite/remove users and manage workspace settings |
|
||||
|
||||
## Internal Members vs External Workspace Members
|
||||
|
||||
Workspace permissions are separate from organization membership:
|
||||
|
||||
- **Internal organization members** belong to your organization, appear in the organization roster, and count toward your seat total. Invite new teammates this way when they should be part of your company or team in Sim.
|
||||
- **External workspace members** have access only to the workspace they are invited to. They keep their own organization membership, do not appear in your organization roster, and do not count toward your organization's seats. Use external access for clients, partners, contractors, or collaborators who already use Sim in another organization.
|
||||
|
||||
External workspace members still receive a workspace permission level — Read, Write, or Admin — and that permission controls what they can do inside the workspace.
|
||||
|
||||
## What Each Permission Level Can Do
|
||||
|
||||
Here's a detailed breakdown of what users can do with each permission level:
|
||||
@@ -126,7 +135,7 @@ Every workspace has one **Owner** (the person who created it) plus any number of
|
||||
2. **Workspace level**: Give them **Admin** permission so they can manage the team and see everything
|
||||
|
||||
### Adding a Stakeholder or Client
|
||||
1. **Organization level**: Invite them as an **Organization Member**
|
||||
1. **Organization level**: If they should not join your organization, add them as an **External workspace member**
|
||||
2. **Workspace level**: Give them **Read** permission so they can see progress but not make changes
|
||||
|
||||
---
|
||||
@@ -199,12 +208,12 @@ An organization has three roles: **Owner**, **Admin**, and **Member**.
|
||||
import { FAQ } from '@/components/ui/faq'
|
||||
|
||||
<FAQ items={[
|
||||
{ question: "What is the difference between organization roles and workspace permissions?", answer: "Organization roles (Owner, Admin, or Member) control who can manage the organization itself, including inviting people, creating shared workspaces, and handling billing. Workspace permissions (Read, Write, Admin) control what a user can do within a specific workspace, such as viewing, editing, or managing workflows. A user needs both an organization role and a workspace permission to work within a shared workspace." },
|
||||
{ question: "What is the difference between organization roles and workspace permissions?", answer: "Organization roles (Owner, Admin, or Member) control who can manage the organization itself, including inviting people, creating shared workspaces, and handling billing. Workspace permissions (Read, Write, Admin) control what a user can do within a specific workspace, such as viewing, editing, or managing workflows. Internal members need both an organization role and a workspace permission to work within a shared workspace. External workspace members do not have an organization role in your org; they only have workspace-level access." },
|
||||
{ question: "How many workspaces can I create?", answer: "Free users get 1 personal workspace. Pro users get up to 3 personal workspaces. Max users get up to 10 personal workspaces. Team and Enterprise plans support unlimited shared workspaces under the organization — new invites are gated by your seat count." },
|
||||
{ question: "What happens to my shared workspaces if I cancel or downgrade my Team plan?", answer: "Existing shared workspaces remain accessible to current members, but new invitations are disabled until you upgrade back to a Team or Enterprise plan. No workspaces or members are deleted — the organization is simply dormant until billing is re-enabled." },
|
||||
{ question: "Can I restrict which integrations or model providers a team member can use?", answer: "Yes, on Enterprise-entitled workspaces. Any workspace admin can create permission groups with fine-grained controls, including restricting allowed integrations and allowed model providers to specific lists. You can also disable access to MCP tools, custom tools, skills, and various platform features like the knowledge base, API keys, or Copilot on a per-group basis. Permission groups are scoped per workspace — a user can belong to different groups in different workspaces." },
|
||||
{ question: "What happens when a personal environment variable has the same name as a workspace variable?", answer: "The personal environment variable takes priority. When a workflow runs, if both a personal and workspace variable share the same name, the personal value is used. This allows individual users to override shared workspace configuration when needed." },
|
||||
{ question: "Can an Admin remove the workspace owner?", answer: "No. The workspace owner cannot be removed from the workspace by anyone. Only the workspace owner can delete the workspace or transfer ownership to another user. Admins can do everything else, including inviting and removing other users and managing workspace settings." },
|
||||
{ question: "What are permission groups and how do they work?", answer: "Permission groups are an Enterprise access control feature that lets workspace admins define granular restrictions beyond the standard Read/Write/Admin roles. Groups are scoped to a single workspace: each user can be in at most one group per workspace, and a user can be in different groups across different workspaces. A permission group can hide UI sections (like trace spans, knowledge base, API keys, or deployment options), disable features (MCP tools, custom tools, skills, invitations), and restrict which integrations and model providers its members can access. Members can be assigned manually, and new members can be auto-added on join. Execution-time enforcement is based on the workflow's workspace, not the user's current UI context." },
|
||||
{ question: "How should I set up permissions for a new team member?", answer: "Start with the lowest permission level they need. Invite them to the organization as a Member, then add them to the relevant workspace with Read permission if they only need visibility, Write if they need to create and run workflows, or Admin if they need to manage the workspace and its users. You can always increase permissions later." },
|
||||
{ question: "How should I set up permissions for a new team member?", answer: "Start with the lowest permission level they need. Invite teammates to the organization as Members, then add them to the relevant workspace with Read permission if they only need visibility, Write if they need to create and run workflows, or Admin if they need to manage the workspace and its users. For clients, partners, or users who already belong to another Sim organization, use external workspace access so they can collaborate without joining your organization or consuming a seat." },
|
||||
]} />
|
||||
@@ -31,7 +31,7 @@ A quick lookup for everyday actions in the Sim workflow editor. For keyboard sho
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Invite team members</td>
|
||||
<td>Sidebar → **Invite**</td>
|
||||
<td>Sidebar → **Invite**. Internal invites join the organization; external workspace members get workspace access only.</td>
|
||||
<td><ActionVideo src="quick-reference/invite.mp4" alt="Invite team members" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
|
||||
@@ -42,9 +42,18 @@ Runs a browser automation task using BrowserUse
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `task` | string | Yes | What should the browser agent do |
|
||||
| `variables` | json | No | Optional variables to use as secrets \(format: \{key: value\}\) |
|
||||
| `save_browser_data` | boolean | No | Whether to save browser data |
|
||||
| `model` | string | No | LLM model to use \(default: gpt-4o\) |
|
||||
| `startUrl` | string | No | Initial page URL to start the agent on \(reduces navigation steps\) |
|
||||
| `variables` | json | No | Optional secrets injected into the task \(format: \{key: value\}\) |
|
||||
| `allowedDomains` | string | No | Comma-separated list of domains the agent is allowed to visit |
|
||||
| `maxSteps` | number | No | Maximum number of steps the agent may take \(default 100, max 10000\) |
|
||||
| `flashMode` | boolean | No | Enable flash mode \(faster, less careful navigation\) |
|
||||
| `thinking` | boolean | No | Enable extended reasoning mode |
|
||||
| `vision` | string | No | Vision capability: "true", "false", or "auto" |
|
||||
| `systemPromptExtension` | string | No | Optional text appended to the agent system prompt \(max 2000 chars\) |
|
||||
| `structuredOutput` | string | No | Stringified JSON schema for the structured output |
|
||||
| `highlightElements` | boolean | No | Highlight interactive elements on the page \(default true\) |
|
||||
| `metadata` | json | No | Custom key-value metadata \(up to 10 pairs\) for tracking |
|
||||
| `model` | string | No | LLM model identifier \(e.g. browser-use-2.0\) |
|
||||
| `apiKey` | string | Yes | API key for BrowserUse API |
|
||||
| `profile_id` | string | No | Browser profile ID for persistent sessions \(cookies, login state\) |
|
||||
|
||||
@@ -54,7 +63,18 @@ Runs a browser automation task using BrowserUse
|
||||
| --------- | ---- | ----------- |
|
||||
| `id` | string | Task execution identifier |
|
||||
| `success` | boolean | Task completion status |
|
||||
| `output` | json | Task output data |
|
||||
| `steps` | json | Execution steps taken |
|
||||
| `output` | json | Final task output \(string or structured\) |
|
||||
| `steps` | array | Steps the agent executed \(number, memory, nextGoal, url, actions, duration\) |
|
||||
| ↳ `number` | number | Sequential step number |
|
||||
| ↳ `memory` | string | Agent memory at this step |
|
||||
| ↳ `evaluationPreviousGoal` | string | Evaluation of previous goal completion |
|
||||
| ↳ `nextGoal` | string | Goal for the next step |
|
||||
| ↳ `url` | string | Current URL of the browser |
|
||||
| ↳ `screenshotUrl` | string | Optional screenshot URL |
|
||||
| ↳ `actions` | array | Stringified JSON actions performed |
|
||||
| ↳ `duration` | number | Step duration in seconds |
|
||||
| `liveUrl` | string | Embeddable live browser session URL \(active during execution\) |
|
||||
| `shareUrl` | string | Public shareable URL for the recorded session \(post-run\) |
|
||||
| `sessionId` | string | Browser Use session identifier |
|
||||
|
||||
|
||||
|
||||
@@ -150,6 +150,7 @@
|
||||
"rootly",
|
||||
"s3",
|
||||
"salesforce",
|
||||
"sap_s4hana",
|
||||
"search",
|
||||
"secrets_manager",
|
||||
"sendgrid",
|
||||
|
||||
1212
apps/docs/content/docs/en/tools/sap_s4hana.mdx
Normal file
1212
apps/docs/content/docs/en/tools/sap_s4hana.mdx
Normal file
File diff suppressed because it is too large
Load Diff
@@ -925,6 +925,139 @@ Create a canvas pinned to a Slack channel as its resource hub
|
||||
| --------- | ---- | ----------- |
|
||||
| `canvas_id` | string | ID of the created channel canvas |
|
||||
|
||||
### `slack_get_canvas`
|
||||
|
||||
Get Slack canvas file metadata by canvas ID
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `authMethod` | string | No | Authentication method: oauth or bot_token |
|
||||
| `botToken` | string | No | Bot token for Custom Bot |
|
||||
| `canvasId` | string | Yes | Canvas file ID to retrieve \(e.g., F1234ABCD\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `canvas` | object | Canvas file information returned by Slack |
|
||||
| ↳ `id` | string | Unique canvas file identifier |
|
||||
| ↳ `created` | number | Unix timestamp when the canvas was created |
|
||||
| ↳ `timestamp` | number | Unix timestamp associated with the canvas |
|
||||
| ↳ `name` | string | Canvas file name |
|
||||
| ↳ `title` | string | Canvas title |
|
||||
| ↳ `mimetype` | string | MIME type of the canvas file |
|
||||
| ↳ `filetype` | string | Slack file type for the canvas |
|
||||
| ↳ `pretty_type` | string | Human-readable file type |
|
||||
| ↳ `user` | string | User ID of the canvas creator |
|
||||
| ↳ `editable` | boolean | Whether the canvas file is editable |
|
||||
| ↳ `size` | number | Canvas file size in bytes |
|
||||
| ↳ `mode` | string | File mode |
|
||||
| ↳ `is_external` | boolean | Whether the canvas is externally hosted |
|
||||
| ↳ `is_public` | boolean | Whether the canvas is public |
|
||||
| ↳ `url_private` | string | Private URL for the canvas file |
|
||||
| ↳ `url_private_download` | string | Private download URL for the canvas file |
|
||||
| ↳ `permalink` | string | Permanent URL for the canvas |
|
||||
| ↳ `channels` | array | Public channel IDs where the canvas appears |
|
||||
| ↳ `groups` | array | Private channel IDs where the canvas appears |
|
||||
| ↳ `ims` | array | Direct message IDs where the canvas appears |
|
||||
| ↳ `canvas_readtime` | number | Approximate read time for canvas content |
|
||||
| ↳ `is_channel_space` | boolean | Whether this canvas is linked to a channel |
|
||||
| ↳ `linked_channel_id` | string | Channel ID linked to this canvas |
|
||||
| ↳ `canvas_creator_id` | string | User ID of the canvas creator |
|
||||
|
||||
### `slack_list_canvases`
|
||||
|
||||
List Slack canvases available to the authenticated user or bot
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `authMethod` | string | No | Authentication method: oauth or bot_token |
|
||||
| `botToken` | string | No | Bot token for Custom Bot |
|
||||
| `channel` | string | No | Filter canvases appearing in a specific channel ID |
|
||||
| `count` | number | No | Number of canvases to return per page |
|
||||
| `page` | number | No | Page number to return |
|
||||
| `user` | string | No | Filter canvases created by a single user ID |
|
||||
| `tsFrom` | string | No | Filter canvases created after this Unix timestamp |
|
||||
| `tsTo` | string | No | Filter canvases created before this Unix timestamp |
|
||||
| `teamId` | string | No | Encoded team ID, required when using an org-level token |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `canvases` | array | Canvas file objects returned by Slack |
|
||||
| ↳ `id` | string | Unique canvas file identifier |
|
||||
| ↳ `created` | number | Unix timestamp when the canvas was created |
|
||||
| ↳ `timestamp` | number | Unix timestamp associated with the canvas |
|
||||
| ↳ `name` | string | Canvas file name |
|
||||
| ↳ `title` | string | Canvas title |
|
||||
| ↳ `mimetype` | string | MIME type of the canvas file |
|
||||
| ↳ `filetype` | string | Slack file type for the canvas |
|
||||
| ↳ `pretty_type` | string | Human-readable file type |
|
||||
| ↳ `user` | string | User ID of the canvas creator |
|
||||
| ↳ `editable` | boolean | Whether the canvas file is editable |
|
||||
| ↳ `size` | number | Canvas file size in bytes |
|
||||
| ↳ `mode` | string | File mode |
|
||||
| ↳ `is_external` | boolean | Whether the canvas is externally hosted |
|
||||
| ↳ `is_public` | boolean | Whether the canvas is public |
|
||||
| ↳ `url_private` | string | Private URL for the canvas file |
|
||||
| ↳ `url_private_download` | string | Private download URL for the canvas file |
|
||||
| ↳ `permalink` | string | Permanent URL for the canvas |
|
||||
| ↳ `channels` | array | Public channel IDs where the canvas appears |
|
||||
| ↳ `groups` | array | Private channel IDs where the canvas appears |
|
||||
| ↳ `ims` | array | Direct message IDs where the canvas appears |
|
||||
| ↳ `canvas_readtime` | number | Approximate read time for canvas content |
|
||||
| ↳ `is_channel_space` | boolean | Whether this canvas is linked to a channel |
|
||||
| ↳ `linked_channel_id` | string | Channel ID linked to this canvas |
|
||||
| ↳ `canvas_creator_id` | string | User ID of the canvas creator |
|
||||
| `paging` | object | Pagination information from Slack |
|
||||
| ↳ `count` | number | Number of items requested per page |
|
||||
| ↳ `total` | number | Total number of matching files |
|
||||
| ↳ `page` | number | Current page number |
|
||||
| ↳ `pages` | number | Total number of pages |
|
||||
|
||||
### `slack_lookup_canvas_sections`
|
||||
|
||||
Find Slack canvas section IDs matching criteria for later edits
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `authMethod` | string | No | Authentication method: oauth or bot_token |
|
||||
| `botToken` | string | No | Bot token for Custom Bot |
|
||||
| `canvasId` | string | Yes | Canvas ID to search \(e.g., F1234ABCD\) |
|
||||
| `criteria` | json | Yes | Section lookup criteria, such as \{"section_types":\["h1"\],"contains_text":"Roadmap"\} |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `sections` | array | Canvas sections matching the lookup criteria |
|
||||
| ↳ `id` | string | Canvas section identifier |
|
||||
|
||||
### `slack_delete_canvas`
|
||||
|
||||
Delete a Slack canvas by its canvas ID
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `authMethod` | string | No | Authentication method: oauth or bot_token |
|
||||
| `botToken` | string | No | Bot token for Custom Bot |
|
||||
| `canvasId` | string | Yes | Canvas ID to delete \(e.g., F1234ABCD\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ok` | boolean | Whether Slack deleted the canvas successfully |
|
||||
|
||||
### `slack_create_conversation`
|
||||
|
||||
Create a new public or private channel in a Slack workspace.
|
||||
|
||||
@@ -72,6 +72,8 @@ Run an autonomous web agent to complete tasks and extract structured data
|
||||
| `provider` | string | No | AI provider to use: openai or anthropic |
|
||||
| `apiKey` | string | Yes | API key for the selected provider |
|
||||
| `outputSchema` | json | No | Optional JSON schema defining the structure of data the agent should return |
|
||||
| `mode` | string | No | Agent tool mode: dom \(default\), hybrid, or cua |
|
||||
| `maxSteps` | number | No | Maximum agent steps \(default 20, max 200\) |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -92,5 +94,7 @@ Run an autonomous web agent to complete tasks and extract structured data
|
||||
| ↳ `timestamp` | number | Unix timestamp when the action was performed |
|
||||
| ↳ `timeMs` | number | Time in milliseconds \(for wait actions\) |
|
||||
| `structuredOutput` | object | Extracted data matching the provided output schema |
|
||||
| `liveViewUrl` | string | Embeddable Browserbase live view URL \(active only while the session is running\) |
|
||||
| `sessionId` | string | Browserbase session identifier |
|
||||
|
||||
|
||||
|
||||
@@ -89,6 +89,8 @@ Polling Groups let you monitor multiple team members' Gmail or Outlook inboxes w
|
||||
|
||||
Invitees receive an email with a link to connect their account. Once connected, their inbox is automatically included in the polling group. Invitees don't need to be members of your Sim organization.
|
||||
|
||||
This is separate from external workspace membership: polling group invitees are granting access to an inbox for a trigger, while external workspace members are collaborators with Read, Write, or Admin access to a workspace.
|
||||
|
||||
**Using in a Workflow**
|
||||
|
||||
When configuring an email trigger, select your polling group from the credentials dropdown instead of an individual account. The system creates webhooks for each member and routes all emails through your workflow.
|
||||
|
||||
@@ -49,7 +49,7 @@ Environment variables store sensitive values like API keys, tokens, and configur
|
||||
|
||||
| Scope | Visibility | Use case |
|
||||
|-------|-----------|----------|
|
||||
| **Workspace** | All workspace members | Shared API keys, team configuration |
|
||||
| **Workspace** | All workspace members, including external workspace members | Shared API keys, team configuration |
|
||||
| **Personal** | Only you | Your personal tokens, dev credentials |
|
||||
|
||||
When both a workspace and personal variable share the same key, the workspace value takes precedence.
|
||||
@@ -84,7 +84,7 @@ If a workflow variable and a block output share the same name, Sim resolves the
|
||||
<FAQ items={[
|
||||
{ question: "What's the difference between workflow variables and environment variables?", answer: "Workflow variables store runtime data (text, numbers, objects, arrays) that blocks can read and modify during execution. They use <variable.name> syntax. Environment variables store sensitive configuration like API keys using {{KEY}} syntax. They never appear in logs and are managed at the workspace or personal level." },
|
||||
{ question: "Can I use environment variables in the Function block?", answer: "Yes. Use the double curly brace syntax {{KEY}} directly in your code. The value is substituted before execution, so the actual secret never appears in logs or outputs." },
|
||||
{ question: "How do I share an API key with my team?", answer: "Create a workspace-scoped environment variable in Settings → Secrets. All workspace members will be able to use it in their workflows via {{KEY}} syntax." },
|
||||
{ question: "How do I share an API key with my team?", answer: "Create a workspace-scoped environment variable in Settings → Secrets. All workspace members, including external workspace members, will be able to use it in their workflows via {{KEY}} syntax." },
|
||||
{ question: "What happens if a variable name has spaces or mixed case?", answer: "Variable resolution is case-insensitive and ignores spaces. A variable named 'My Counter' can be referenced as <variable.mycounter> or <variable.My Counter>. However, using consistent naming (like camelCase) is recommended." },
|
||||
{ question: "Can I reference environment variables in the Agent system prompt?", answer: "Yes. You can use {{KEY}} syntax in any text field, including system prompts, to inject environment variable values." },
|
||||
]} />
|
||||
|
||||
@@ -154,6 +154,7 @@ import {
|
||||
RootlyIcon,
|
||||
S3Icon,
|
||||
SalesforceIcon,
|
||||
SapS4HanaIcon,
|
||||
SESIcon,
|
||||
SearchIcon,
|
||||
SecretsManagerIcon,
|
||||
@@ -351,6 +352,7 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
|
||||
rootly: RootlyIcon,
|
||||
s3: S3Icon,
|
||||
salesforce: SalesforceIcon,
|
||||
sap_s4hana: SapS4HanaIcon,
|
||||
search: SearchIcon,
|
||||
secrets_manager: SecretsManagerIcon,
|
||||
sendgrid: SendgridIcon,
|
||||
|
||||
@@ -11379,6 +11379,177 @@
|
||||
"integrationTypes": ["crm", "customer-support", "sales"],
|
||||
"tags": ["sales-engagement", "customer-support"]
|
||||
},
|
||||
{
|
||||
"type": "sap_s4hana",
|
||||
"slug": "sap-s-4hana",
|
||||
"name": "SAP S/4HANA",
|
||||
"description": "Read and write SAP S/4HANA Cloud business data via OData",
|
||||
"longDescription": "Connect SAP S/4HANA Cloud Public Edition with per-tenant OAuth 2.0 client credentials configured in your Communication Arrangements. Read and create business partners, customers, suppliers, sales orders, deliveries (inbound/outbound), billing documents, products, stock and material documents, purchase requisitions, purchase orders, and supplier invoices, or run arbitrary OData v2 queries against any whitelisted Communication Scenario.",
|
||||
"bgColor": "#0A6ED1",
|
||||
"iconName": "SapS4HanaIcon",
|
||||
"docsUrl": "https://docs.sim.ai/tools/sap_s4hana",
|
||||
"operations": [
|
||||
{
|
||||
"name": "List Business Partners",
|
||||
"description": "List business partners from SAP S/4HANA Cloud (API_BUSINESS_PARTNER, A_BusinessPartner) with optional OData $filter, $top, $skip, $orderby, $select, $expand."
|
||||
},
|
||||
{
|
||||
"name": "Get Business Partner",
|
||||
"description": "Retrieve a single business partner by BusinessPartner key from SAP S/4HANA Cloud (API_BUSINESS_PARTNER, A_BusinessPartner)."
|
||||
},
|
||||
{
|
||||
"name": "Create Business Partner",
|
||||
"description": "Create a business partner in SAP S/4HANA Cloud (API_BUSINESS_PARTNER, A_BusinessPartner). For Person category 1 provide FirstName and LastName. For Organization category 2 provide OrganizationBPName1."
|
||||
},
|
||||
{
|
||||
"name": "Update Business Partner",
|
||||
"description": "Update fields on an A_BusinessPartner entity in SAP S/4HANA Cloud (API_BUSINESS_PARTNER). PATCH only sends the fields you provide; existing values are preserved. If-Match defaults to a wildcard (unconditional) — for safe concurrent updates pass the ETag from a prior GET to avoid lost updates."
|
||||
},
|
||||
{
|
||||
"name": "List Customers",
|
||||
"description": "List customers from SAP S/4HANA Cloud (API_BUSINESS_PARTNER, A_Customer) with optional OData $filter, $top, $skip, $orderby, $select, $expand."
|
||||
},
|
||||
{
|
||||
"name": "Get Customer",
|
||||
"description": "Retrieve a single customer by Customer key from SAP S/4HANA Cloud (API_BUSINESS_PARTNER, A_Customer)."
|
||||
},
|
||||
{
|
||||
"name": "Update Customer",
|
||||
"description": "Update fields on an A_Customer entity in SAP S/4HANA Cloud (API_BUSINESS_PARTNER). PATCH only sends the fields you provide; existing values are preserved. A_Customer PATCH is limited to modifiable fields such as OrderIsBlockedForCustomer, DeliveryIsBlock, BillingIsBlockedForCustomer, PostingIsBlocked, and DeletionIndicator. If-Match defaults to a wildcard - for safe concurrent updates pass the ETag from a prior GET to avoid lost updates."
|
||||
},
|
||||
{
|
||||
"name": "List Suppliers",
|
||||
"description": "List suppliers from SAP S/4HANA Cloud (API_BUSINESS_PARTNER, A_Supplier) with optional OData $filter, $top, $skip, $orderby, $select, $expand."
|
||||
},
|
||||
{
|
||||
"name": "Get Supplier",
|
||||
"description": "Retrieve a single supplier by Supplier key from SAP S/4HANA Cloud (API_BUSINESS_PARTNER, A_Supplier)."
|
||||
},
|
||||
{
|
||||
"name": "Update Supplier",
|
||||
"description": "Update fields on an A_Supplier entity in SAP S/4HANA Cloud (API_BUSINESS_PARTNER). PATCH only sends the fields you provide; existing values are preserved. A_Supplier PATCH is limited to modifiable fields such as PostingIsBlocked, PurchasingIsBlocked, PaymentIsBlockedForSupplier, DeletionIndicator, and SupplierAccountGroup. If-Match defaults to a wildcard - for safe concurrent updates pass the ETag from a prior GET to avoid lost updates."
|
||||
},
|
||||
{
|
||||
"name": "List Sales Orders",
|
||||
"description": "List sales orders from SAP S/4HANA Cloud (API_SALES_ORDER_SRV, A_SalesOrder) with optional OData $filter, $top, $skip, $orderby, $select, $expand."
|
||||
},
|
||||
{
|
||||
"name": "Get Sales Order",
|
||||
"description": "Retrieve a single sales order by SalesOrder key from SAP S/4HANA Cloud (API_SALES_ORDER_SRV, A_SalesOrder)."
|
||||
},
|
||||
{
|
||||
"name": "Create Sales Order",
|
||||
"description": "Create a sales order in SAP S/4HANA Cloud (API_SALES_ORDER_SRV, A_SalesOrder) with deep insert of sales order items via to_Item."
|
||||
},
|
||||
{
|
||||
"name": "Update Sales Order",
|
||||
"description": "Update fields on an A_SalesOrder entity in SAP S/4HANA Cloud (API_SALES_ORDER_SRV). PATCH only sends the fields you provide; existing values are preserved. If-Match defaults to a wildcard (unconditional) — for safe concurrent updates pass the ETag from a prior GET to avoid lost updates."
|
||||
},
|
||||
{
|
||||
"name": "Delete Sales Order",
|
||||
"description": "Delete an A_SalesOrder entity in SAP S/4HANA Cloud (API_SALES_ORDER_SRV). Only orders without subsequent documents (deliveries, invoices) can be deleted; otherwise reject items via update instead."
|
||||
},
|
||||
{
|
||||
"name": "List Outbound Deliveries",
|
||||
"description": "List outbound deliveries from SAP S/4HANA Cloud (API_OUTBOUND_DELIVERY_SRV;v=0002, A_OutbDeliveryHeader) with optional OData $filter, $top, $skip, $orderby, $select, $expand."
|
||||
},
|
||||
{
|
||||
"name": "Get Outbound Delivery",
|
||||
"description": "Retrieve a single outbound delivery by DeliveryDocument key from SAP S/4HANA Cloud (API_OUTBOUND_DELIVERY_SRV;v=0002, A_OutbDeliveryHeader)."
|
||||
},
|
||||
{
|
||||
"name": "List Inbound Deliveries",
|
||||
"description": "List inbound deliveries from SAP S/4HANA Cloud (API_INBOUND_DELIVERY_SRV;v=0002, A_InbDeliveryHeader) with optional OData $filter, $top, $skip, $orderby, $select, $expand."
|
||||
},
|
||||
{
|
||||
"name": "Get Inbound Delivery",
|
||||
"description": "Retrieve a single inbound delivery by DeliveryDocument key from SAP S/4HANA Cloud (API_INBOUND_DELIVERY_SRV;v=0002, A_InbDeliveryHeader)."
|
||||
},
|
||||
{
|
||||
"name": "List Billing Documents",
|
||||
"description": "List billing documents (customer invoices) from SAP S/4HANA Cloud (API_BILLING_DOCUMENT_SRV, A_BillingDocument) with optional OData $filter, $top, $skip, $orderby, $select, $expand."
|
||||
},
|
||||
{
|
||||
"name": "Get Billing Document",
|
||||
"description": "Retrieve a single billing document (customer invoice) by BillingDocument key from SAP S/4HANA Cloud (API_BILLING_DOCUMENT_SRV, A_BillingDocument)."
|
||||
},
|
||||
{
|
||||
"name": "List Products",
|
||||
"description": "List products (materials) from SAP S/4HANA Cloud (API_PRODUCT_SRV, A_Product) with optional OData $filter, $top, $skip, $orderby, $select, $expand."
|
||||
},
|
||||
{
|
||||
"name": "Get Product",
|
||||
"description": "Retrieve a single product (material) by Product key from SAP S/4HANA Cloud (API_PRODUCT_SRV, A_Product)."
|
||||
},
|
||||
{
|
||||
"name": "Update Product",
|
||||
"description": "Update fields on an A_Product entity in SAP S/4HANA Cloud (API_PRODUCT_SRV). PATCH only sends the fields you provide; existing values are preserved. Flat scalar header fields only — deep/multi-entity updates across navigation properties are not supported by API_PRODUCT_SRV PATCH/PUT (see SAP KBA 2833338); update child entities (plant, valuation, sales data, etc.) via their own endpoints. If-Match defaults to a wildcard (unconditional) — for safe concurrent updates pass the ETag from a prior GET."
|
||||
},
|
||||
{
|
||||
"name": "List Material Stock",
|
||||
"description": "List material stock quantities from SAP S/4HANA Cloud (API_MATERIAL_STOCK_SRV, A_MatlStkInAcctMod). The entity uses an 11-field composite key (Material, Plant, StorageLocation, Batch, Supplier, Customer, WBSElementInternalID, SDDocument, SDDocumentItem, InventorySpecialStockType, InventoryStockType) — query with $filter on these fields instead of a direct key lookup."
|
||||
},
|
||||
{
|
||||
"name": "List Material Documents",
|
||||
"description": "List material document headers (goods movements) from SAP S/4HANA Cloud (API_MATERIAL_DOCUMENT_SRV, A_MaterialDocumentHeader) with optional OData $filter, $top, $skip, $orderby, $select, $expand."
|
||||
},
|
||||
{
|
||||
"name": "Get Material Document",
|
||||
"description": "Retrieve a single material document header by composite key (MaterialDocument + MaterialDocumentYear) from SAP S/4HANA Cloud (API_MATERIAL_DOCUMENT_SRV, A_MaterialDocumentHeader)."
|
||||
},
|
||||
{
|
||||
"name": "List Purchase Requisitions",
|
||||
"description": "List purchase requisitions from SAP S/4HANA Cloud (API_PURCHASEREQ_PROCESS_SRV, A_PurchaseRequisitionHeader) with optional OData $filter, $top, $skip, $orderby, $select, $expand. Note: API_PURCHASEREQ_PROCESS_SRV is deprecated since S/4HANA Cloud Public Edition 2402; the successor is API_PURCHASEREQUISITION_2 (OData v4). This tool still works against tenants where the legacy service is enabled."
|
||||
},
|
||||
{
|
||||
"name": "Get Purchase Requisition",
|
||||
"description": "Retrieve a single purchase requisition by PurchaseRequisition key from SAP S/4HANA Cloud (API_PURCHASEREQ_PROCESS_SRV, A_PurchaseRequisitionHeader). Note: API_PURCHASEREQ_PROCESS_SRV is deprecated since S/4HANA Cloud Public Edition 2402; the successor is API_PURCHASEREQUISITION_2 (OData v4). This tool still works against tenants where the legacy service is enabled."
|
||||
},
|
||||
{
|
||||
"name": "Create Purchase Requisition",
|
||||
"description": "Create a purchase requisition in SAP S/4HANA Cloud (API_PURCHASEREQ_PROCESS_SRV, A_PurchaseRequisitionHeader). PurchaseRequisition is auto-assigned by SAP from the document number range; provide line items via the to_PurchaseReqnItem deep-insert array. Note: API_PURCHASEREQ_PROCESS_SRV is deprecated since S/4HANA Cloud Public Edition 2402; the successor is API_PURCHASEREQUISITION_2 (OData v4). This tool still works against tenants where the legacy service is enabled."
|
||||
},
|
||||
{
|
||||
"name": "Update Purchase Requisition",
|
||||
"description": "Update fields on an A_PurchaseRequisitionHeader entity in SAP S/4HANA Cloud (API_PURCHASEREQ_PROCESS_SRV; deprecated since S/4HANA 2402, successor is API_PURCHASEREQUISITION_2 OData v4). PATCH only sends the fields you provide; existing values are preserved. If-Match defaults to a wildcard - for safe concurrent updates pass the ETag from a prior GET to avoid lost updates."
|
||||
},
|
||||
{
|
||||
"name": "List Purchase Orders",
|
||||
"description": "List purchase orders from SAP S/4HANA Cloud (API_PURCHASEORDER_PROCESS_SRV, A_PurchaseOrder) with optional OData $filter, $top, $skip, $orderby, $select, $expand."
|
||||
},
|
||||
{
|
||||
"name": "Get Purchase Order",
|
||||
"description": "Retrieve a single purchase order by PurchaseOrder key from SAP S/4HANA Cloud (API_PURCHASEORDER_PROCESS_SRV, A_PurchaseOrder)."
|
||||
},
|
||||
{
|
||||
"name": "Create Purchase Order",
|
||||
"description": "Create a purchase order in SAP S/4HANA Cloud (API_PURCHASEORDER_PROCESS_SRV, A_PurchaseOrder). PurchaseOrder is auto-assigned by SAP from the document number range; provide line items via the body parameter."
|
||||
},
|
||||
{
|
||||
"name": "Update Purchase Order",
|
||||
"description": "Update fields on an A_PurchaseOrder entity in SAP S/4HANA Cloud (API_PURCHASEORDER_PROCESS_SRV). PATCH only sends the fields you provide; existing values are preserved. If-Match defaults to a wildcard (unconditional) — for safe concurrent updates pass the ETag from a prior GET to avoid lost updates."
|
||||
},
|
||||
{
|
||||
"name": "List Supplier Invoices",
|
||||
"description": "List supplier invoices from SAP S/4HANA Cloud (API_SUPPLIERINVOICE_PROCESS_SRV, A_SupplierInvoice) with optional OData $filter, $top, $skip, $orderby, $select, $expand."
|
||||
},
|
||||
{
|
||||
"name": "Get Supplier Invoice",
|
||||
"description": "Retrieve a single supplier invoice by composite key (SupplierInvoice + FiscalYear) from SAP S/4HANA Cloud (API_SUPPLIERINVOICE_PROCESS_SRV, A_SupplierInvoice)."
|
||||
},
|
||||
{
|
||||
"name": "OData Query (advanced)",
|
||||
"description": "Make an arbitrary OData v2 call against any SAP S/4HANA Cloud whitelisted Communication Scenario. Use when no dedicated tool exists for the entity. The proxy handles auth, CSRF, and OData unwrapping."
|
||||
}
|
||||
],
|
||||
"operationCount": 38,
|
||||
"triggers": [],
|
||||
"triggerCount": 0,
|
||||
"authType": "none",
|
||||
"category": "tools",
|
||||
"integrationTypes": ["other", "developer-tools"],
|
||||
"tags": ["automation"]
|
||||
},
|
||||
{
|
||||
"type": "search",
|
||||
"slug": "search",
|
||||
@@ -11983,6 +12154,22 @@
|
||||
"name": "Create Channel Canvas",
|
||||
"description": "Create a canvas pinned to a Slack channel as its resource hub"
|
||||
},
|
||||
{
|
||||
"name": "Get Canvas Info",
|
||||
"description": "Get Slack canvas file metadata by canvas ID"
|
||||
},
|
||||
{
|
||||
"name": "List Canvases",
|
||||
"description": "List Slack canvases available to the authenticated user or bot"
|
||||
},
|
||||
{
|
||||
"name": "Lookup Canvas Sections",
|
||||
"description": "Find Slack canvas section IDs matching criteria for later edits"
|
||||
},
|
||||
{
|
||||
"name": "Delete Canvas",
|
||||
"description": "Delete a Slack canvas by its canvas ID"
|
||||
},
|
||||
{
|
||||
"name": "Create Conversation",
|
||||
"description": "Create a new public or private channel in a Slack workspace."
|
||||
@@ -12008,7 +12195,7 @@
|
||||
"description": "Publish a static view to a user"
|
||||
}
|
||||
],
|
||||
"operationCount": 25,
|
||||
"operationCount": 29,
|
||||
"triggers": [
|
||||
{
|
||||
"id": "slack_webhook",
|
||||
|
||||
@@ -16,6 +16,14 @@ function getMothershipUrl(environment: string): string | null {
|
||||
return ENV_URLS[environment] ?? null
|
||||
}
|
||||
|
||||
const ENDPOINT_PATTERN = /^[a-zA-Z0-9_-]+(?:\/[a-zA-Z0-9_-]+)*$/
|
||||
|
||||
function isValidEndpoint(endpoint: string): boolean {
|
||||
if (!endpoint) return false
|
||||
if (endpoint.includes('..')) return false
|
||||
return ENDPOINT_PATTERN.test(endpoint)
|
||||
}
|
||||
|
||||
async function isAdminRequestAuthorized() {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) return false
|
||||
@@ -57,6 +65,10 @@ export const POST = withRouteHandler(async (req: NextRequest) => {
|
||||
return NextResponse.json({ error: 'endpoint query param required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!isValidEndpoint(endpoint)) {
|
||||
return NextResponse.json({ error: 'invalid endpoint' }, { status: 400 })
|
||||
}
|
||||
|
||||
const baseUrl = getMothershipUrl(environment)
|
||||
if (!baseUrl) {
|
||||
return NextResponse.json(
|
||||
@@ -108,6 +120,10 @@ export const GET = withRouteHandler(async (req: NextRequest) => {
|
||||
return NextResponse.json({ error: 'endpoint query param required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!isValidEndpoint(endpoint)) {
|
||||
return NextResponse.json({ error: 'invalid endpoint' }, { status: 400 })
|
||||
}
|
||||
|
||||
const baseUrl = getMothershipUrl(environment)
|
||||
if (!baseUrl) {
|
||||
return NextResponse.json(
|
||||
|
||||
@@ -32,7 +32,9 @@ export const GET = withRouteHandler(async (request: NextRequest) => {
|
||||
const returnUrl = request.nextUrl.searchParams.get('returnUrl')
|
||||
|
||||
if (!shopDomain) {
|
||||
const returnUrlParam = returnUrl ? encodeURIComponent(returnUrl) : ''
|
||||
const safeReturnUrl =
|
||||
returnUrl && isSameOrigin(returnUrl) ? encodeURIComponent(returnUrl) : ''
|
||||
const returnUrlJsLiteral = JSON.stringify(safeReturnUrl)
|
||||
return new NextResponse(
|
||||
`<!DOCTYPE html>
|
||||
<html>
|
||||
@@ -120,7 +122,7 @@ export const GET = withRouteHandler(async (request: NextRequest) => {
|
||||
</div>
|
||||
|
||||
<script>
|
||||
const returnUrl = '${returnUrlParam}';
|
||||
const returnUrl = ${returnUrlJsLiteral};
|
||||
function handleSubmit(e) {
|
||||
e.preventDefault();
|
||||
let shop = document.getElementById('shop').value.trim().toLowerCase();
|
||||
|
||||
@@ -112,6 +112,16 @@ vi.mock('@/lib/core/storage', () => ({
|
||||
getStorageMethod: mockGetStorageMethod,
|
||||
}))
|
||||
|
||||
const { mockCheckRateLimitDirect } = vi.hoisted(() => ({
|
||||
mockCheckRateLimitDirect: vi.fn(),
|
||||
}))
|
||||
|
||||
vi.mock('@/lib/core/rate-limiter', () => ({
|
||||
RateLimiter: class {
|
||||
checkRateLimitDirect = mockCheckRateLimitDirect
|
||||
},
|
||||
}))
|
||||
|
||||
vi.mock('@/lib/messaging/email/mailer', () => ({
|
||||
sendEmail: mockSendEmail,
|
||||
}))
|
||||
@@ -234,6 +244,13 @@ describe('Chat OTP API Route', () => {
|
||||
}))
|
||||
|
||||
requestUtilsMockFns.mockGenerateRequestId.mockReturnValue('req-123')
|
||||
requestUtilsMockFns.mockGetClientIp.mockReturnValue('1.2.3.4')
|
||||
|
||||
mockCheckRateLimitDirect.mockResolvedValue({
|
||||
allowed: true,
|
||||
remaining: 10,
|
||||
resetAt: new Date(Date.now() + 60_000),
|
||||
})
|
||||
|
||||
mockZodParse.mockImplementation((data: unknown) => data)
|
||||
|
||||
@@ -283,6 +300,134 @@ describe('Chat OTP API Route', () => {
|
||||
})
|
||||
})
|
||||
|
||||
describe('POST - Rate limiting', () => {
|
||||
const buildDeploymentSelect = () =>
|
||||
mockDbSelect.mockImplementationOnce(() => ({
|
||||
from: vi.fn().mockReturnValue({
|
||||
where: vi.fn().mockReturnValue({
|
||||
limit: vi.fn().mockResolvedValue([
|
||||
{
|
||||
id: mockChatId,
|
||||
authType: 'email',
|
||||
allowedEmails: [mockEmail],
|
||||
title: 'Test Chat',
|
||||
},
|
||||
]),
|
||||
}),
|
||||
}),
|
||||
}))
|
||||
|
||||
it('returns 429 with Retry-After when IP rate limit is exceeded', async () => {
|
||||
mockCheckRateLimitDirect.mockResolvedValueOnce({
|
||||
allowed: false,
|
||||
remaining: 0,
|
||||
resetAt: new Date(Date.now() + 900_000),
|
||||
retryAfterMs: 900_000,
|
||||
})
|
||||
|
||||
const headerSet = vi.fn()
|
||||
mockCreateErrorResponse.mockImplementationOnce((message: string, status: number) => ({
|
||||
json: () => Promise.resolve({ error: message }),
|
||||
status,
|
||||
headers: { set: headerSet },
|
||||
}))
|
||||
|
||||
const request = new NextRequest('http://localhost:3000/api/chat/test/otp', {
|
||||
method: 'POST',
|
||||
body: JSON.stringify({ email: mockEmail }),
|
||||
})
|
||||
|
||||
const response = await POST(request, {
|
||||
params: Promise.resolve({ identifier: mockIdentifier }),
|
||||
})
|
||||
|
||||
expect(response.status).toBe(429)
|
||||
expect(headerSet).toHaveBeenCalledWith('Retry-After', '900')
|
||||
expect(mockSendEmail).not.toHaveBeenCalled()
|
||||
expect(mockDbSelect).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('returns 429 with Retry-After when email rate limit is exceeded', async () => {
|
||||
mockCheckRateLimitDirect
|
||||
.mockResolvedValueOnce({
|
||||
allowed: true,
|
||||
remaining: 9,
|
||||
resetAt: new Date(Date.now() + 60_000),
|
||||
})
|
||||
.mockResolvedValueOnce({
|
||||
allowed: false,
|
||||
remaining: 0,
|
||||
resetAt: new Date(Date.now() + 900_000),
|
||||
retryAfterMs: 900_000,
|
||||
})
|
||||
|
||||
const headerSet = vi.fn()
|
||||
mockCreateErrorResponse.mockImplementationOnce((message: string, status: number) => ({
|
||||
json: () => Promise.resolve({ error: message }),
|
||||
status,
|
||||
headers: { set: headerSet },
|
||||
}))
|
||||
|
||||
buildDeploymentSelect()
|
||||
|
||||
const request = new NextRequest('http://localhost:3000/api/chat/test/otp', {
|
||||
method: 'POST',
|
||||
body: JSON.stringify({ email: mockEmail }),
|
||||
})
|
||||
|
||||
const response = await POST(request, {
|
||||
params: Promise.resolve({ identifier: mockIdentifier }),
|
||||
})
|
||||
|
||||
expect(response.status).toBe(429)
|
||||
expect(headerSet).toHaveBeenCalledWith('Retry-After', '900')
|
||||
expect(mockSendEmail).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('falls back to refill interval when retryAfterMs is missing', async () => {
|
||||
mockCheckRateLimitDirect.mockResolvedValueOnce({
|
||||
allowed: false,
|
||||
remaining: 0,
|
||||
resetAt: new Date(Date.now() + 900_000),
|
||||
})
|
||||
|
||||
const headerSet = vi.fn()
|
||||
mockCreateErrorResponse.mockImplementationOnce((message: string, status: number) => ({
|
||||
json: () => Promise.resolve({ error: message }),
|
||||
status,
|
||||
headers: { set: headerSet },
|
||||
}))
|
||||
|
||||
const request = new NextRequest('http://localhost:3000/api/chat/test/otp', {
|
||||
method: 'POST',
|
||||
body: JSON.stringify({ email: mockEmail }),
|
||||
})
|
||||
|
||||
await POST(request, { params: Promise.resolve({ identifier: mockIdentifier }) })
|
||||
|
||||
expect(headerSet).toHaveBeenCalledWith('Retry-After', '900')
|
||||
})
|
||||
|
||||
it('skips IP rate limit when client IP is unknown', async () => {
|
||||
requestUtilsMockFns.mockGetClientIp.mockReturnValueOnce('unknown')
|
||||
buildDeploymentSelect()
|
||||
|
||||
const request = new NextRequest('http://localhost:3000/api/chat/test/otp', {
|
||||
method: 'POST',
|
||||
body: JSON.stringify({ email: mockEmail }),
|
||||
})
|
||||
|
||||
await POST(request, { params: Promise.resolve({ identifier: mockIdentifier }) })
|
||||
|
||||
// Only the email-scoped check should run, not the IP-scoped one
|
||||
expect(mockCheckRateLimitDirect).toHaveBeenCalledTimes(1)
|
||||
expect(mockCheckRateLimitDirect).toHaveBeenCalledWith(
|
||||
expect.stringContaining('chat-otp:email:'),
|
||||
expect.any(Object)
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('POST - Store OTP (Database path)', () => {
|
||||
beforeEach(() => {
|
||||
mockGetStorageMethod.mockReturnValue('database')
|
||||
|
||||
@@ -8,9 +8,11 @@ import type { NextRequest } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { renderOTPEmail } from '@/components/emails'
|
||||
import { getRedisClient } from '@/lib/core/config/redis'
|
||||
import type { TokenBucketConfig } from '@/lib/core/rate-limiter'
|
||||
import { RateLimiter } from '@/lib/core/rate-limiter'
|
||||
import { addCorsHeaders, isEmailAllowed } from '@/lib/core/security/deployment'
|
||||
import { getStorageMethod } from '@/lib/core/storage'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { generateRequestId, getClientIp } from '@/lib/core/utils/request'
|
||||
import { withRouteHandler } from '@/lib/core/utils/with-route-handler'
|
||||
import { sendEmail } from '@/lib/messaging/email/mailer'
|
||||
import { setChatAuthCookie } from '@/app/api/chat/utils'
|
||||
@@ -18,6 +20,20 @@ import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/
|
||||
|
||||
const logger = createLogger('ChatOtpAPI')
|
||||
|
||||
const rateLimiter = new RateLimiter()
|
||||
|
||||
const OTP_IP_RATE_LIMIT: TokenBucketConfig = {
|
||||
maxTokens: 10,
|
||||
refillRate: 10,
|
||||
refillIntervalMs: 15 * 60_000,
|
||||
}
|
||||
|
||||
const OTP_EMAIL_RATE_LIMIT: TokenBucketConfig = {
|
||||
maxTokens: 3,
|
||||
refillRate: 3,
|
||||
refillIntervalMs: 15 * 60_000,
|
||||
}
|
||||
|
||||
function generateOTP(): string {
|
||||
return randomInt(100000, 1000000).toString()
|
||||
}
|
||||
@@ -214,6 +230,23 @@ export const POST = withRouteHandler(
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const ip = getClientIp(request)
|
||||
if (ip !== 'unknown') {
|
||||
const ipRateLimit = await rateLimiter.checkRateLimitDirect(
|
||||
`chat-otp:ip:${identifier}:${ip}`,
|
||||
OTP_IP_RATE_LIMIT
|
||||
)
|
||||
if (!ipRateLimit.allowed) {
|
||||
logger.warn(`[${requestId}] OTP IP rate limit exceeded for ${identifier} from ${ip}`)
|
||||
const retryAfter = Math.ceil(
|
||||
(ipRateLimit.retryAfterMs ?? OTP_IP_RATE_LIMIT.refillIntervalMs) / 1000
|
||||
)
|
||||
const response = createErrorResponse('Too many requests. Please try again later.', 429)
|
||||
response.headers.set('Retry-After', String(retryAfter))
|
||||
return addCorsHeaders(response, request)
|
||||
}
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const { email } = otpRequestSchema.parse(body)
|
||||
|
||||
@@ -255,6 +288,25 @@ export const POST = withRouteHandler(
|
||||
)
|
||||
}
|
||||
|
||||
const emailRateLimit = await rateLimiter.checkRateLimitDirect(
|
||||
`chat-otp:email:${deployment.id}:${email.toLowerCase()}`,
|
||||
OTP_EMAIL_RATE_LIMIT
|
||||
)
|
||||
if (!emailRateLimit.allowed) {
|
||||
logger.warn(
|
||||
`[${requestId}] OTP email rate limit exceeded for ${email} on chat ${deployment.id}`
|
||||
)
|
||||
const retryAfter = Math.ceil(
|
||||
(emailRateLimit.retryAfterMs ?? OTP_EMAIL_RATE_LIMIT.refillIntervalMs) / 1000
|
||||
)
|
||||
const response = createErrorResponse(
|
||||
'Too many verification code requests. Please try again later.',
|
||||
429
|
||||
)
|
||||
response.headers.set('Retry-After', String(retryAfter))
|
||||
return addCorsHeaders(response, request)
|
||||
}
|
||||
|
||||
const otp = generateOTP()
|
||||
await storeOTP(email, deployment.id, otp)
|
||||
|
||||
|
||||
@@ -1,10 +1,7 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import {
|
||||
authenticateCopilotRequestSessionOnly,
|
||||
createUnauthorizedResponse,
|
||||
} from '@/lib/copilot/request/http'
|
||||
import { checkInternalApiKey, createUnauthorizedResponse } from '@/lib/copilot/request/http'
|
||||
import { env } from '@/lib/core/config/env'
|
||||
import { withRouteHandler } from '@/lib/core/utils/with-route-handler'
|
||||
|
||||
@@ -21,8 +18,8 @@ const TrainingExampleSchema = z.object({
|
||||
})
|
||||
|
||||
export const POST = withRouteHandler(async (request: NextRequest) => {
|
||||
const { userId, isAuthenticated } = await authenticateCopilotRequestSessionOnly()
|
||||
if (!isAuthenticated || !userId) {
|
||||
const auth = checkInternalApiKey(request)
|
||||
if (!auth.success) {
|
||||
return createUnauthorizedResponse()
|
||||
}
|
||||
|
||||
|
||||
@@ -1,10 +1,7 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import {
|
||||
authenticateCopilotRequestSessionOnly,
|
||||
createUnauthorizedResponse,
|
||||
} from '@/lib/copilot/request/http'
|
||||
import { checkInternalApiKey, createUnauthorizedResponse } from '@/lib/copilot/request/http'
|
||||
import { env } from '@/lib/core/config/env'
|
||||
import { withRouteHandler } from '@/lib/core/utils/with-route-handler'
|
||||
|
||||
@@ -27,8 +24,8 @@ const TrainingDataSchema = z.object({
|
||||
})
|
||||
|
||||
export const POST = withRouteHandler(async (request: NextRequest) => {
|
||||
const { userId, isAuthenticated } = await authenticateCopilotRequestSessionOnly()
|
||||
if (!isAuthenticated || !userId) {
|
||||
const auth = checkInternalApiKey(request)
|
||||
if (!auth.success) {
|
||||
return createUnauthorizedResponse()
|
||||
}
|
||||
|
||||
|
||||
@@ -12,6 +12,7 @@ import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { withRouteHandler } from '@/lib/core/utils/with-route-handler'
|
||||
import { normalizeEmail } from '@/lib/invitations/core'
|
||||
import { syncAllWebhooksForCredentialSet } from '@/lib/webhooks/utils.server'
|
||||
|
||||
const logger = createLogger('CredentialSetInviteToken')
|
||||
@@ -111,6 +112,21 @@ export const POST = withRouteHandler(
|
||||
return NextResponse.json({ error: 'Invitation has expired' }, { status: 410 })
|
||||
}
|
||||
|
||||
if (invitation.email) {
|
||||
const sessionEmail = session.user.email
|
||||
if (!sessionEmail || normalizeEmail(sessionEmail) !== normalizeEmail(invitation.email)) {
|
||||
logger.warn('Rejected credential set invitation accept due to email mismatch', {
|
||||
invitationId: invitation.id,
|
||||
credentialSetId: invitation.credentialSetId,
|
||||
userId: session.user.id,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{ error: 'This invitation was sent to a different email address' },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
const existingMember = await db
|
||||
.select()
|
||||
.from(credentialSetMember)
|
||||
|
||||
@@ -8,21 +8,61 @@ import {
|
||||
isUsingCloudStorage,
|
||||
type StorageContext,
|
||||
} from '@/lib/uploads'
|
||||
import {
|
||||
signUploadToken,
|
||||
type UploadTokenPayload,
|
||||
verifyUploadToken,
|
||||
} from '@/lib/uploads/core/upload-token'
|
||||
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
|
||||
|
||||
const logger = createLogger('MultipartUploadAPI')
|
||||
|
||||
const ALLOWED_UPLOAD_CONTEXTS = new Set<StorageContext>([
|
||||
'knowledge-base',
|
||||
'chat',
|
||||
'copilot',
|
||||
'mothership',
|
||||
'execution',
|
||||
'workspace',
|
||||
'profile-pictures',
|
||||
'og-images',
|
||||
'logs',
|
||||
'workspace-logos',
|
||||
])
|
||||
|
||||
interface InitiateMultipartRequest {
|
||||
fileName: string
|
||||
contentType: string
|
||||
fileSize: number
|
||||
workspaceId: string
|
||||
context?: StorageContext
|
||||
}
|
||||
|
||||
interface GetPartUrlsRequest {
|
||||
uploadId: string
|
||||
key: string
|
||||
interface TokenBoundRequest {
|
||||
uploadToken: string
|
||||
}
|
||||
|
||||
interface GetPartUrlsRequest extends TokenBoundRequest {
|
||||
partNumbers: number[]
|
||||
context?: StorageContext
|
||||
}
|
||||
|
||||
interface CompleteSingleRequest extends TokenBoundRequest {
|
||||
parts: unknown
|
||||
}
|
||||
|
||||
interface CompleteBatchRequest {
|
||||
uploads: Array<TokenBoundRequest & { parts: unknown }>
|
||||
}
|
||||
|
||||
const verifyTokenForUser = (token: string | undefined, userId: string) => {
|
||||
if (!token || typeof token !== 'string') {
|
||||
return null
|
||||
}
|
||||
const result = verifyUploadToken(token)
|
||||
if (!result.valid || result.payload.userId !== userId) {
|
||||
return null
|
||||
}
|
||||
return result.payload
|
||||
}
|
||||
|
||||
export const POST = withRouteHandler(async (request: NextRequest) => {
|
||||
@@ -31,6 +71,7 @@ export const POST = withRouteHandler(async (request: NextRequest) => {
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
const userId = session.user.id
|
||||
|
||||
const action = request.nextUrl.searchParams.get('action')
|
||||
|
||||
@@ -45,32 +86,34 @@ export const POST = withRouteHandler(async (request: NextRequest) => {
|
||||
|
||||
switch (action) {
|
||||
case 'initiate': {
|
||||
const data: InitiateMultipartRequest = await request.json()
|
||||
const { fileName, contentType, fileSize, context = 'knowledge-base' } = data
|
||||
const data = (await request.json()) as InitiateMultipartRequest
|
||||
const { fileName, contentType, fileSize, workspaceId, context = 'knowledge-base' } = data
|
||||
|
||||
if (!workspaceId || typeof workspaceId !== 'string') {
|
||||
return NextResponse.json({ error: 'workspaceId is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!ALLOWED_UPLOAD_CONTEXTS.has(context)) {
|
||||
return NextResponse.json({ error: 'Invalid storage context' }, { status: 400 })
|
||||
}
|
||||
|
||||
const permission = await getUserEntityPermissions(userId, 'workspace', workspaceId)
|
||||
if (permission !== 'write' && permission !== 'admin') {
|
||||
return NextResponse.json({ error: 'Forbidden' }, { status: 403 })
|
||||
}
|
||||
|
||||
const config = getStorageConfig(context)
|
||||
|
||||
let uploadId: string
|
||||
let key: string
|
||||
|
||||
if (storageProvider === 's3') {
|
||||
const { initiateS3MultipartUpload } = await import('@/lib/uploads/providers/s3/client')
|
||||
|
||||
const result = await initiateS3MultipartUpload({
|
||||
fileName,
|
||||
contentType,
|
||||
fileSize,
|
||||
})
|
||||
|
||||
logger.info(
|
||||
`Initiated S3 multipart upload for ${fileName} (context: ${context}): ${result.uploadId}`
|
||||
)
|
||||
|
||||
return NextResponse.json({
|
||||
uploadId: result.uploadId,
|
||||
key: result.key,
|
||||
})
|
||||
}
|
||||
if (storageProvider === 'blob') {
|
||||
const result = await initiateS3MultipartUpload({ fileName, contentType, fileSize })
|
||||
uploadId = result.uploadId
|
||||
key = result.key
|
||||
} else if (storageProvider === 'blob') {
|
||||
const { initiateMultipartUpload } = await import('@/lib/uploads/providers/blob/client')
|
||||
|
||||
const result = await initiateMultipartUpload({
|
||||
fileName,
|
||||
contentType,
|
||||
@@ -82,46 +125,55 @@ export const POST = withRouteHandler(async (request: NextRequest) => {
|
||||
connectionString: config.connectionString,
|
||||
},
|
||||
})
|
||||
|
||||
logger.info(
|
||||
`Initiated Azure multipart upload for ${fileName} (context: ${context}): ${result.uploadId}`
|
||||
uploadId = result.uploadId
|
||||
key = result.key
|
||||
} else {
|
||||
return NextResponse.json(
|
||||
{ error: `Unsupported storage provider: ${storageProvider}` },
|
||||
{ status: 400 }
|
||||
)
|
||||
|
||||
return NextResponse.json({
|
||||
uploadId: result.uploadId,
|
||||
key: result.key,
|
||||
})
|
||||
}
|
||||
|
||||
return NextResponse.json(
|
||||
{ error: `Unsupported storage provider: ${storageProvider}` },
|
||||
{ status: 400 }
|
||||
const uploadToken = signUploadToken({
|
||||
uploadId,
|
||||
key,
|
||||
userId,
|
||||
workspaceId,
|
||||
context,
|
||||
})
|
||||
|
||||
logger.info(
|
||||
`Initiated ${storageProvider} multipart upload for ${fileName} (context: ${context}, workspace: ${workspaceId}): ${uploadId}`
|
||||
)
|
||||
|
||||
return NextResponse.json({ uploadId, key, uploadToken })
|
||||
}
|
||||
|
||||
case 'get-part-urls': {
|
||||
const data: GetPartUrlsRequest = await request.json()
|
||||
const { uploadId, key, partNumbers, context = 'knowledge-base' } = data
|
||||
const data = (await request.json()) as GetPartUrlsRequest
|
||||
const { partNumbers } = data
|
||||
|
||||
const tokenPayload = verifyTokenForUser(data.uploadToken, userId)
|
||||
if (!tokenPayload) {
|
||||
return NextResponse.json({ error: 'Invalid or expired upload token' }, { status: 403 })
|
||||
}
|
||||
|
||||
const { uploadId, key, context } = tokenPayload
|
||||
const config = getStorageConfig(context)
|
||||
|
||||
if (storageProvider === 's3') {
|
||||
const { getS3MultipartPartUrls } = await import('@/lib/uploads/providers/s3/client')
|
||||
|
||||
const presignedUrls = await getS3MultipartPartUrls(key, uploadId, partNumbers)
|
||||
|
||||
return NextResponse.json({ presignedUrls })
|
||||
}
|
||||
if (storageProvider === 'blob') {
|
||||
const { getMultipartPartUrls } = await import('@/lib/uploads/providers/blob/client')
|
||||
|
||||
const presignedUrls = await getMultipartPartUrls(key, partNumbers, {
|
||||
containerName: config.containerName!,
|
||||
accountName: config.accountName!,
|
||||
accountKey: config.accountKey,
|
||||
connectionString: config.connectionString,
|
||||
})
|
||||
|
||||
return NextResponse.json({ presignedUrls })
|
||||
}
|
||||
|
||||
@@ -132,24 +184,32 @@ export const POST = withRouteHandler(async (request: NextRequest) => {
|
||||
}
|
||||
|
||||
case 'complete': {
|
||||
const data = await request.json()
|
||||
const context: StorageContext = data.context || 'knowledge-base'
|
||||
const data = (await request.json()) as CompleteSingleRequest | CompleteBatchRequest
|
||||
|
||||
const config = getStorageConfig(context)
|
||||
if ('uploads' in data && Array.isArray(data.uploads)) {
|
||||
const verified = data.uploads.map((upload) => {
|
||||
const payload = verifyTokenForUser(upload.uploadToken, userId)
|
||||
return payload ? { payload, parts: upload.parts } : null
|
||||
})
|
||||
|
||||
if (verified.some((entry) => entry === null)) {
|
||||
return NextResponse.json({ error: 'Invalid or expired upload token' }, { status: 403 })
|
||||
}
|
||||
|
||||
const verifiedEntries = verified.filter(
|
||||
(entry): entry is { payload: UploadTokenPayload; parts: unknown } => entry !== null
|
||||
)
|
||||
|
||||
if ('uploads' in data) {
|
||||
const results = await Promise.all(
|
||||
data.uploads.map(async (upload: any) => {
|
||||
const { uploadId, key } = upload
|
||||
verifiedEntries.map(async ({ payload, parts }) => {
|
||||
const { uploadId, key, context } = payload
|
||||
const config = getStorageConfig(context)
|
||||
|
||||
if (storageProvider === 's3') {
|
||||
const { completeS3MultipartUpload } = await import(
|
||||
'@/lib/uploads/providers/s3/client'
|
||||
)
|
||||
const parts = upload.parts // S3 format: { ETag, PartNumber }
|
||||
|
||||
const result = await completeS3MultipartUpload(key, uploadId, parts)
|
||||
|
||||
const result = await completeS3MultipartUpload(key, uploadId, parts as any)
|
||||
return {
|
||||
success: true,
|
||||
location: result.location,
|
||||
@@ -161,15 +221,12 @@ export const POST = withRouteHandler(async (request: NextRequest) => {
|
||||
const { completeMultipartUpload } = await import(
|
||||
'@/lib/uploads/providers/blob/client'
|
||||
)
|
||||
const parts = upload.parts // Azure format: { blockId, partNumber }
|
||||
|
||||
const result = await completeMultipartUpload(key, parts, {
|
||||
const result = await completeMultipartUpload(key, parts as any, {
|
||||
containerName: config.containerName!,
|
||||
accountName: config.accountName!,
|
||||
accountKey: config.accountKey,
|
||||
connectionString: config.connectionString,
|
||||
})
|
||||
|
||||
return {
|
||||
success: true,
|
||||
location: result.location,
|
||||
@@ -182,19 +239,23 @@ export const POST = withRouteHandler(async (request: NextRequest) => {
|
||||
})
|
||||
)
|
||||
|
||||
logger.info(`Completed ${data.uploads.length} multipart uploads (context: ${context})`)
|
||||
logger.info(`Completed ${verifiedEntries.length} multipart uploads`)
|
||||
return NextResponse.json({ results })
|
||||
}
|
||||
|
||||
const { uploadId, key, parts } = data
|
||||
const single = data as CompleteSingleRequest
|
||||
const tokenPayload = verifyTokenForUser(single.uploadToken, userId)
|
||||
if (!tokenPayload) {
|
||||
return NextResponse.json({ error: 'Invalid or expired upload token' }, { status: 403 })
|
||||
}
|
||||
|
||||
const { uploadId, key, context } = tokenPayload
|
||||
const config = getStorageConfig(context)
|
||||
|
||||
if (storageProvider === 's3') {
|
||||
const { completeS3MultipartUpload } = await import('@/lib/uploads/providers/s3/client')
|
||||
|
||||
const result = await completeS3MultipartUpload(key, uploadId, parts)
|
||||
|
||||
const result = await completeS3MultipartUpload(key, uploadId, single.parts as any)
|
||||
logger.info(`Completed S3 multipart upload for key ${key} (context: ${context})`)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
location: result.location,
|
||||
@@ -204,16 +265,13 @@ export const POST = withRouteHandler(async (request: NextRequest) => {
|
||||
}
|
||||
if (storageProvider === 'blob') {
|
||||
const { completeMultipartUpload } = await import('@/lib/uploads/providers/blob/client')
|
||||
|
||||
const result = await completeMultipartUpload(key, parts, {
|
||||
const result = await completeMultipartUpload(key, single.parts as any, {
|
||||
containerName: config.containerName!,
|
||||
accountName: config.accountName!,
|
||||
accountKey: config.accountKey,
|
||||
connectionString: config.connectionString,
|
||||
})
|
||||
|
||||
logger.info(`Completed Azure multipart upload for key ${key} (context: ${context})`)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
location: result.location,
|
||||
@@ -229,27 +287,27 @@ export const POST = withRouteHandler(async (request: NextRequest) => {
|
||||
}
|
||||
|
||||
case 'abort': {
|
||||
const data = await request.json()
|
||||
const { uploadId, key, context = 'knowledge-base' } = data
|
||||
const data = (await request.json()) as TokenBoundRequest
|
||||
const tokenPayload = verifyTokenForUser(data.uploadToken, userId)
|
||||
if (!tokenPayload) {
|
||||
return NextResponse.json({ error: 'Invalid or expired upload token' }, { status: 403 })
|
||||
}
|
||||
|
||||
const config = getStorageConfig(context as StorageContext)
|
||||
const { uploadId, key, context } = tokenPayload
|
||||
const config = getStorageConfig(context)
|
||||
|
||||
if (storageProvider === 's3') {
|
||||
const { abortS3MultipartUpload } = await import('@/lib/uploads/providers/s3/client')
|
||||
|
||||
await abortS3MultipartUpload(key, uploadId)
|
||||
|
||||
logger.info(`Aborted S3 multipart upload for key ${key} (context: ${context})`)
|
||||
} else if (storageProvider === 'blob') {
|
||||
const { abortMultipartUpload } = await import('@/lib/uploads/providers/blob/client')
|
||||
|
||||
await abortMultipartUpload(key, {
|
||||
containerName: config.containerName!,
|
||||
accountName: config.accountName!,
|
||||
accountKey: config.accountKey,
|
||||
connectionString: config.connectionString,
|
||||
})
|
||||
|
||||
logger.info(`Aborted Azure multipart upload for key ${key} (context: ${context})`)
|
||||
} else {
|
||||
return NextResponse.json(
|
||||
|
||||
@@ -191,7 +191,7 @@ describe('Function Execute API Route', () => {
|
||||
const response = await POST(req)
|
||||
const data = await response.json()
|
||||
|
||||
if (response.status === 500) {
|
||||
if (response.status === 422 || response.status === 500) {
|
||||
expect(data.success).toBe(false)
|
||||
} else {
|
||||
const result = data.output?.result
|
||||
@@ -504,7 +504,7 @@ describe('Function Execute API Route', () => {
|
||||
const response = await POST(req)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(500)
|
||||
expect(response.status).toBe(422)
|
||||
expect(data.success).toBe(false)
|
||||
expect(data.error).toBeTruthy()
|
||||
})
|
||||
@@ -518,7 +518,7 @@ describe('Function Execute API Route', () => {
|
||||
const response = await POST(req)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(500)
|
||||
expect(response.status).toBe(422)
|
||||
expect(data.success).toBe(false)
|
||||
expect(data.error).toContain('Type Error')
|
||||
expect(data.error).toContain('Cannot read properties of null')
|
||||
@@ -533,7 +533,7 @@ describe('Function Execute API Route', () => {
|
||||
const response = await POST(req)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(500)
|
||||
expect(response.status).toBe(422)
|
||||
expect(data.success).toBe(false)
|
||||
expect(data.error).toContain('Reference Error')
|
||||
expect(data.error).toContain('undefinedVariable is not defined')
|
||||
@@ -548,7 +548,7 @@ describe('Function Execute API Route', () => {
|
||||
const response = await POST(req)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(500)
|
||||
expect(response.status).toBe(422)
|
||||
expect(data.success).toBe(false)
|
||||
expect(data.error).toContain('Custom error message')
|
||||
})
|
||||
@@ -562,7 +562,7 @@ describe('Function Execute API Route', () => {
|
||||
const response = await POST(req)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(500)
|
||||
expect(response.status).toBe(422)
|
||||
expect(data.success).toBe(false)
|
||||
expect(data.error).toBeTruthy()
|
||||
})
|
||||
|
||||
@@ -1088,9 +1088,12 @@ export const POST = withRouteHandler(async (req: NextRequest) => {
|
||||
const executionTime = Date.now() - startTime
|
||||
|
||||
if (isolatedResult.error) {
|
||||
logger.error(`[${requestId}] Function execution failed in isolated-vm`, {
|
||||
const isSystemError = isolatedResult.error.isSystemError === true
|
||||
const logFn = isSystemError ? logger.error.bind(logger) : logger.warn.bind(logger)
|
||||
logFn(`[${requestId}] Function execution failed in isolated-vm`, {
|
||||
error: isolatedResult.error,
|
||||
executionTime,
|
||||
isSystemError,
|
||||
})
|
||||
|
||||
const ivmError = isolatedResult.error
|
||||
@@ -1119,7 +1122,8 @@ export const POST = withRouteHandler(async (req: NextRequest) => {
|
||||
resolvedCode
|
||||
)
|
||||
|
||||
logger.error(`[${requestId}] Enhanced error details`, {
|
||||
const detailLogFn = isSystemError ? logger.error.bind(logger) : logger.warn.bind(logger)
|
||||
detailLogFn(`[${requestId}] Enhanced error details`, {
|
||||
originalMessage: ivmError.message,
|
||||
enhancedMessage: userFriendlyErrorMessage,
|
||||
line: enhancedError.line,
|
||||
@@ -1145,7 +1149,7 @@ export const POST = withRouteHandler(async (req: NextRequest) => {
|
||||
stack: enhancedError.stack,
|
||||
},
|
||||
},
|
||||
{ status: 500 }
|
||||
{ status: isSystemError ? 500 : 422 }
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
@@ -146,6 +146,7 @@ export const POST = withRouteHandler(
|
||||
targetEmail: inv.email,
|
||||
targetRole: inv.role,
|
||||
kind: inv.kind,
|
||||
membershipIntent: inv.membershipIntent,
|
||||
},
|
||||
request,
|
||||
})
|
||||
|
||||
@@ -54,6 +54,7 @@ export const GET = withRouteHandler(
|
||||
email: inv.email,
|
||||
organizationId: inv.organizationId,
|
||||
organizationName: inv.organizationName,
|
||||
membershipIntent: inv.membershipIntent,
|
||||
role: inv.role,
|
||||
status: inv.status,
|
||||
expiresAt: inv.expiresAt,
|
||||
@@ -121,6 +122,12 @@ export const PATCH = withRouteHandler(
|
||||
const { role, grants } = parsed.data
|
||||
|
||||
if (role !== undefined) {
|
||||
if (inv.membershipIntent === 'external') {
|
||||
return NextResponse.json(
|
||||
{ error: 'Role updates are not valid on external workspace invitations' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
if (!inv.organizationId) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Role updates are only valid on organization-scoped invitations' },
|
||||
@@ -187,6 +194,7 @@ export const PATCH = withRouteHandler(
|
||||
invitationId: id,
|
||||
targetEmail: inv.email,
|
||||
kind: inv.kind,
|
||||
membershipIntent: inv.membershipIntent,
|
||||
roleUpdate: role ?? null,
|
||||
grantUpdates: grantsToApply,
|
||||
},
|
||||
|
||||
@@ -8,7 +8,11 @@ import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { setActiveOrganizationForCurrentSession } from '@/lib/auth/active-organization'
|
||||
import { getUserUsageData } from '@/lib/billing/core/usage'
|
||||
import { removeUserFromOrganization } from '@/lib/billing/organizations/membership'
|
||||
import {
|
||||
removeExternalUserFromOrganizationWorkspaces,
|
||||
removeUserFromOrganization,
|
||||
} from '@/lib/billing/organizations/membership'
|
||||
import { reduceOrganizationSeatsByOne } from '@/lib/billing/organizations/seats'
|
||||
import { withRouteHandler } from '@/lib/core/utils/with-route-handler'
|
||||
|
||||
const logger = createLogger('OrganizationMemberAPI')
|
||||
@@ -282,6 +286,7 @@ export const DELETE = withRouteHandler(
|
||||
}
|
||||
|
||||
const { id: organizationId, memberId: targetUserId } = await params
|
||||
const shouldReduceSeats = request.nextUrl.searchParams.get('shouldReduceSeats') === 'true'
|
||||
|
||||
const userMember = await db
|
||||
.select()
|
||||
@@ -311,7 +316,79 @@ export const DELETE = withRouteHandler(
|
||||
.limit(1)
|
||||
|
||||
if (targetMember.length === 0) {
|
||||
return NextResponse.json({ error: 'Member not found' }, { status: 404 })
|
||||
const [targetUser] = await db
|
||||
.select({ id: user.id, email: user.email, name: user.name })
|
||||
.from(user)
|
||||
.where(eq(user.id, targetUserId))
|
||||
.limit(1)
|
||||
|
||||
if (!targetUser) {
|
||||
return NextResponse.json({ error: 'Member not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
const externalResult = await removeExternalUserFromOrganizationWorkspaces({
|
||||
userId: targetUserId,
|
||||
organizationId,
|
||||
})
|
||||
|
||||
if (!externalResult.success) {
|
||||
const error = externalResult.error || 'External workspace member not found'
|
||||
const status =
|
||||
error === 'External workspace member not found'
|
||||
? 404
|
||||
: error === 'User is an organization member'
|
||||
? 409
|
||||
: 500
|
||||
|
||||
return NextResponse.json({ error }, { status })
|
||||
}
|
||||
|
||||
logger.info('External workspace member removed from organization workspaces', {
|
||||
organizationId,
|
||||
removedMemberId: targetUserId,
|
||||
removedBy: session.user.id,
|
||||
workspaceAccessRevoked: externalResult.workspaceAccessRevoked,
|
||||
permissionGroupsRevoked: externalResult.permissionGroupsRevoked,
|
||||
credentialMembershipsRevoked: externalResult.credentialMembershipsRevoked,
|
||||
pendingInvitationsCancelled: externalResult.pendingInvitationsCancelled,
|
||||
})
|
||||
|
||||
recordAudit({
|
||||
workspaceId: null,
|
||||
actorId: session.user.id,
|
||||
action: AuditAction.ORG_MEMBER_REMOVED,
|
||||
resourceType: AuditResourceType.ORGANIZATION,
|
||||
resourceId: organizationId,
|
||||
actorName: session.user.name ?? undefined,
|
||||
actorEmail: session.user.email ?? undefined,
|
||||
description: `Removed external workspace member ${targetUserId} from organization`,
|
||||
metadata: {
|
||||
targetUserId,
|
||||
targetEmail: targetUser.email ?? undefined,
|
||||
targetName: targetUser.name ?? undefined,
|
||||
membershipType: 'external',
|
||||
workspaceAccessRevoked: externalResult.workspaceAccessRevoked,
|
||||
permissionGroupsRevoked: externalResult.permissionGroupsRevoked,
|
||||
credentialMembershipsRevoked: externalResult.credentialMembershipsRevoked,
|
||||
pendingInvitationsCancelled: externalResult.pendingInvitationsCancelled,
|
||||
},
|
||||
request,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
message: 'External member removed successfully',
|
||||
data: {
|
||||
removedMemberId: targetUserId,
|
||||
removedBy: session.user.id,
|
||||
removedAt: new Date().toISOString(),
|
||||
membershipType: 'external',
|
||||
workspaceAccessRevoked: externalResult.workspaceAccessRevoked,
|
||||
permissionGroupsRevoked: externalResult.permissionGroupsRevoked,
|
||||
credentialMembershipsRevoked: externalResult.credentialMembershipsRevoked,
|
||||
pendingInvitationsCancelled: externalResult.pendingInvitationsCancelled,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
const result = await removeUserFromOrganization({
|
||||
@@ -330,6 +407,28 @@ export const DELETE = withRouteHandler(
|
||||
return NextResponse.json({ error: result.error }, { status: 500 })
|
||||
}
|
||||
|
||||
let seatReduction: Awaited<ReturnType<typeof reduceOrganizationSeatsByOne>> | null = null
|
||||
if (shouldReduceSeats && session.user.id !== targetUserId) {
|
||||
try {
|
||||
seatReduction = await reduceOrganizationSeatsByOne({
|
||||
organizationId,
|
||||
actorUserId: session.user.id,
|
||||
removedUserId: targetUserId,
|
||||
})
|
||||
} catch (seatError) {
|
||||
logger.error('Failed to reduce seats after member removal', {
|
||||
organizationId,
|
||||
removedMemberId: targetUserId,
|
||||
removedBy: session.user.id,
|
||||
error: seatError,
|
||||
})
|
||||
seatReduction = {
|
||||
reduced: false,
|
||||
reason: 'Failed to reduce seats after member removal',
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (session.user.id === targetUserId) {
|
||||
try {
|
||||
await setActiveOrganizationForCurrentSession(null)
|
||||
@@ -348,6 +447,7 @@ export const DELETE = withRouteHandler(
|
||||
removedBy: session.user.id,
|
||||
wasSelfRemoval: session.user.id === targetUserId,
|
||||
billingActions: result.billingActions,
|
||||
seatReduction,
|
||||
})
|
||||
|
||||
recordAudit({
|
||||
@@ -367,6 +467,7 @@ export const DELETE = withRouteHandler(
|
||||
targetEmail: targetMember[0].email ?? undefined,
|
||||
targetName: targetMember[0].name ?? undefined,
|
||||
wasSelfRemoval: session.user.id === targetUserId,
|
||||
seatReduction,
|
||||
},
|
||||
request,
|
||||
})
|
||||
@@ -381,6 +482,7 @@ export const DELETE = withRouteHandler(
|
||||
removedMemberId: targetUserId,
|
||||
removedBy: session.user.id,
|
||||
removedAt: new Date().toISOString(),
|
||||
seatReduction,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
|
||||
@@ -8,7 +8,7 @@ import {
|
||||
workspace,
|
||||
} from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq, inArray, sql } from 'drizzle-orm'
|
||||
import { and, eq, inArray, isNull, sql } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { withRouteHandler } from '@/lib/core/utils/with-route-handler'
|
||||
@@ -57,7 +57,7 @@ export const GET = withRouteHandler(
|
||||
const orgWorkspaces = await db
|
||||
.select({ id: workspace.id, name: workspace.name })
|
||||
.from(workspace)
|
||||
.where(eq(workspace.organizationId, organizationId))
|
||||
.where(and(eq(workspace.organizationId, organizationId), isNull(workspace.archivedAt)))
|
||||
|
||||
const orgWorkspaceIds = orgWorkspaces.map((ws) => ws.id)
|
||||
const workspaceNameById = new Map(orgWorkspaces.map((ws) => [ws.id, ws.name]))
|
||||
@@ -118,12 +118,82 @@ export const GET = withRouteHandler(
|
||||
workspaces: permissionsByUser.get(row.userId) ?? [],
|
||||
}))
|
||||
|
||||
const externalPermissionRows =
|
||||
orgWorkspaceIds.length > 0
|
||||
? await db
|
||||
.select({
|
||||
userId: user.id,
|
||||
userName: user.name,
|
||||
userEmail: user.email,
|
||||
userImage: user.image,
|
||||
workspaceId: permissions.entityId,
|
||||
permission: permissions.permissionType,
|
||||
createdAt: permissions.createdAt,
|
||||
})
|
||||
.from(permissions)
|
||||
.innerJoin(user, eq(permissions.userId, user.id))
|
||||
.leftJoin(
|
||||
member,
|
||||
and(eq(member.userId, user.id), eq(member.organizationId, organizationId))
|
||||
)
|
||||
.where(
|
||||
and(
|
||||
eq(permissions.entityType, 'workspace'),
|
||||
inArray(permissions.entityId, orgWorkspaceIds),
|
||||
isNull(member.id)
|
||||
)
|
||||
)
|
||||
: []
|
||||
|
||||
const externalMembersByUser = new Map<
|
||||
string,
|
||||
{
|
||||
memberId: string
|
||||
userId: string
|
||||
role: 'external'
|
||||
createdAt: Date
|
||||
name: string
|
||||
email: string
|
||||
image: string | null
|
||||
workspaces: RosterWorkspaceAccess[]
|
||||
}
|
||||
>()
|
||||
|
||||
for (const row of externalPermissionRows) {
|
||||
const existing = externalMembersByUser.get(row.userId)
|
||||
const workspaceAccess: RosterWorkspaceAccess = {
|
||||
workspaceId: row.workspaceId,
|
||||
workspaceName: workspaceNameById.get(row.workspaceId) ?? 'Workspace',
|
||||
permission: row.permission,
|
||||
}
|
||||
|
||||
if (existing) {
|
||||
existing.workspaces.push(workspaceAccess)
|
||||
if (row.createdAt < existing.createdAt) existing.createdAt = row.createdAt
|
||||
continue
|
||||
}
|
||||
|
||||
externalMembersByUser.set(row.userId, {
|
||||
memberId: `external-${row.userId}`,
|
||||
userId: row.userId,
|
||||
role: 'external',
|
||||
createdAt: row.createdAt,
|
||||
name: row.userName,
|
||||
email: row.userEmail,
|
||||
image: row.userImage,
|
||||
workspaces: [workspaceAccess],
|
||||
})
|
||||
}
|
||||
|
||||
const rosterMembers = [...members, ...externalMembersByUser.values()]
|
||||
|
||||
const pendingInvitationRows = await db
|
||||
.select({
|
||||
id: invitation.id,
|
||||
email: invitation.email,
|
||||
role: invitation.role,
|
||||
kind: invitation.kind,
|
||||
membershipIntent: invitation.membershipIntent,
|
||||
createdAt: invitation.createdAt,
|
||||
expiresAt: invitation.expiresAt,
|
||||
inviteeName: user.name,
|
||||
@@ -160,8 +230,9 @@ export const GET = withRouteHandler(
|
||||
const pendingInvitations = pendingInvitationRows.map((row) => ({
|
||||
id: row.id,
|
||||
email: row.email,
|
||||
role: row.role,
|
||||
role: row.membershipIntent === 'external' ? 'external' : row.role,
|
||||
kind: row.kind,
|
||||
membershipIntent: row.membershipIntent,
|
||||
createdAt: row.createdAt,
|
||||
expiresAt: row.expiresAt,
|
||||
inviteeName: row.inviteeName,
|
||||
@@ -172,7 +243,7 @@ export const GET = withRouteHandler(
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: {
|
||||
members,
|
||||
members: rosterMembers,
|
||||
pendingInvitations,
|
||||
workspaces: orgWorkspaces,
|
||||
},
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { db } from '@sim/db'
|
||||
import { invitation, member, organization, subscription } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, count, eq, inArray } from 'drizzle-orm'
|
||||
import { and, count, eq, gt, inArray, ne } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
@@ -116,7 +116,14 @@ export const PUT = withRouteHandler(
|
||||
const [pendingCountRow] = await db
|
||||
.select({ count: count() })
|
||||
.from(invitation)
|
||||
.where(and(eq(invitation.organizationId, organizationId), eq(invitation.status, 'pending')))
|
||||
.where(
|
||||
and(
|
||||
eq(invitation.organizationId, organizationId),
|
||||
eq(invitation.status, 'pending'),
|
||||
ne(invitation.membershipIntent, 'external'),
|
||||
gt(invitation.expiresAt, new Date())
|
||||
)
|
||||
)
|
||||
|
||||
const memberCount = memberCountRow?.count ?? 0
|
||||
const pendingCount = pendingCountRow?.count ?? 0
|
||||
|
||||
614
apps/sim/app/api/tools/sap_s4hana/proxy/route.ts
Normal file
614
apps/sim/app/api/tools/sap_s4hana/proxy/route.ts
Normal file
@@ -0,0 +1,614 @@
|
||||
import { createHash } from 'node:crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { toError } from '@sim/utils/errors'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { withRouteHandler } from '@/lib/core/utils/with-route-handler'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('SapS4HanaProxyAPI')
|
||||
|
||||
const HttpMethod = z.enum(['GET', 'POST', 'PATCH', 'PUT', 'DELETE', 'MERGE'])
|
||||
const DeploymentType = z.enum(['cloud_public', 'cloud_private', 'on_premise'])
|
||||
const AuthType = z.enum(['oauth_client_credentials', 'basic'])
|
||||
|
||||
const ServiceName = z
|
||||
.string()
|
||||
.min(1, 'service is required')
|
||||
.regex(
|
||||
/^[A-Z][A-Z0-9_]*(;v=\d+)?$/,
|
||||
'service must be an uppercase OData service name optionally suffixed with ";v=NNNN" (e.g., API_BUSINESS_PARTNER, API_OUTBOUND_DELIVERY_SRV;v=0002)'
|
||||
)
|
||||
|
||||
const ServicePath = z
|
||||
.string()
|
||||
.min(1, 'path is required')
|
||||
.refine(
|
||||
(p) =>
|
||||
!p.split(/[/\\]/).some((seg) => seg === '..' || seg === '.') &&
|
||||
!p.includes('?') &&
|
||||
!p.includes('#') &&
|
||||
!/%(?:2[eEfF]|5[cC]|3[fF]|23)/.test(p),
|
||||
{
|
||||
message:
|
||||
'path must not contain ".." or "." segments, "?", "#", or percent-encoded path/query/fragment characters',
|
||||
}
|
||||
)
|
||||
|
||||
const Subdomain = z
|
||||
.string()
|
||||
.regex(
|
||||
/^[a-z0-9]([a-z0-9-]{0,61}[a-z0-9])?$/i,
|
||||
'subdomain must contain only letters, digits, and hyphens (1-63 chars)'
|
||||
)
|
||||
|
||||
const ProxyRequestSchema = z
|
||||
.object({
|
||||
deploymentType: DeploymentType.default('cloud_public'),
|
||||
authType: AuthType.default('oauth_client_credentials'),
|
||||
subdomain: Subdomain.optional(),
|
||||
region: z
|
||||
.string()
|
||||
.regex(/^[a-z]{2,4}\d{1,3}$/i, 'region must be an SAP BTP region code (e.g., eu10, us30)')
|
||||
.optional(),
|
||||
baseUrl: z.string().optional(),
|
||||
tokenUrl: z.string().optional(),
|
||||
clientId: z.string().optional(),
|
||||
clientSecret: z.string().optional(),
|
||||
username: z.string().optional(),
|
||||
password: z.string().optional(),
|
||||
service: ServiceName,
|
||||
path: ServicePath,
|
||||
method: HttpMethod.default('GET'),
|
||||
query: z.record(z.union([z.string(), z.number(), z.boolean()])).optional(),
|
||||
body: z.unknown().optional(),
|
||||
ifMatch: z.string().optional(),
|
||||
})
|
||||
.superRefine((req, ctx) => {
|
||||
if (req.deploymentType === 'cloud_public') {
|
||||
if (!req.subdomain) {
|
||||
ctx.addIssue({
|
||||
code: z.ZodIssueCode.custom,
|
||||
path: ['subdomain'],
|
||||
message: 'subdomain is required for cloud_public deployment',
|
||||
})
|
||||
}
|
||||
if (!req.region) {
|
||||
ctx.addIssue({
|
||||
code: z.ZodIssueCode.custom,
|
||||
path: ['region'],
|
||||
message: 'region is required for cloud_public deployment',
|
||||
})
|
||||
}
|
||||
if (req.authType !== 'oauth_client_credentials') {
|
||||
ctx.addIssue({
|
||||
code: z.ZodIssueCode.custom,
|
||||
path: ['authType'],
|
||||
message: 'cloud_public deployment only supports oauth_client_credentials',
|
||||
})
|
||||
}
|
||||
if (!req.clientId) {
|
||||
ctx.addIssue({
|
||||
code: z.ZodIssueCode.custom,
|
||||
path: ['clientId'],
|
||||
message: 'clientId is required',
|
||||
})
|
||||
}
|
||||
if (!req.clientSecret) {
|
||||
ctx.addIssue({
|
||||
code: z.ZodIssueCode.custom,
|
||||
path: ['clientSecret'],
|
||||
message: 'clientSecret is required',
|
||||
})
|
||||
}
|
||||
} else {
|
||||
if (!req.baseUrl) {
|
||||
ctx.addIssue({
|
||||
code: z.ZodIssueCode.custom,
|
||||
path: ['baseUrl'],
|
||||
message: 'baseUrl is required for cloud_private and on_premise deployments',
|
||||
})
|
||||
} else {
|
||||
const baseUrlCheck = checkExternalUrlSafety(req.baseUrl, 'baseUrl')
|
||||
if (!baseUrlCheck.ok) {
|
||||
ctx.addIssue({
|
||||
code: z.ZodIssueCode.custom,
|
||||
path: ['baseUrl'],
|
||||
message: baseUrlCheck.message,
|
||||
})
|
||||
}
|
||||
}
|
||||
if (req.authType === 'oauth_client_credentials') {
|
||||
if (!req.tokenUrl) {
|
||||
ctx.addIssue({
|
||||
code: z.ZodIssueCode.custom,
|
||||
path: ['tokenUrl'],
|
||||
message: 'tokenUrl is required for OAuth on cloud_private/on_premise',
|
||||
})
|
||||
} else {
|
||||
const tokenUrlCheck = checkExternalUrlSafety(req.tokenUrl, 'tokenUrl')
|
||||
if (!tokenUrlCheck.ok) {
|
||||
ctx.addIssue({
|
||||
code: z.ZodIssueCode.custom,
|
||||
path: ['tokenUrl'],
|
||||
message: tokenUrlCheck.message,
|
||||
})
|
||||
}
|
||||
}
|
||||
if (!req.clientId) {
|
||||
ctx.addIssue({
|
||||
code: z.ZodIssueCode.custom,
|
||||
path: ['clientId'],
|
||||
message: 'clientId is required for OAuth',
|
||||
})
|
||||
}
|
||||
if (!req.clientSecret) {
|
||||
ctx.addIssue({
|
||||
code: z.ZodIssueCode.custom,
|
||||
path: ['clientSecret'],
|
||||
message: 'clientSecret is required for OAuth',
|
||||
})
|
||||
}
|
||||
} else {
|
||||
if (!req.username) {
|
||||
ctx.addIssue({
|
||||
code: z.ZodIssueCode.custom,
|
||||
path: ['username'],
|
||||
message: 'username is required for Basic auth',
|
||||
})
|
||||
}
|
||||
if (!req.password) {
|
||||
ctx.addIssue({
|
||||
code: z.ZodIssueCode.custom,
|
||||
path: ['password'],
|
||||
message: 'password is required for Basic auth',
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
type ProxyRequest = z.infer<typeof ProxyRequestSchema>
|
||||
|
||||
interface CachedToken {
|
||||
accessToken: string
|
||||
expiresAt: number
|
||||
}
|
||||
|
||||
const TOKEN_CACHE = new Map<string, CachedToken>()
|
||||
const TOKEN_CACHE_MAX_ENTRIES = 500
|
||||
const TOKEN_SAFETY_WINDOW_MS = 60_000
|
||||
const OUTBOUND_FETCH_TIMEOUT_MS = 30_000
|
||||
|
||||
const FORBIDDEN_HOSTS = new Set([
|
||||
'localhost',
|
||||
'0.0.0.0',
|
||||
'127.0.0.1',
|
||||
'169.254.169.254',
|
||||
'metadata.google.internal',
|
||||
'metadata',
|
||||
'[::1]',
|
||||
'[::]',
|
||||
'[::ffff:127.0.0.1]',
|
||||
'[fd00:ec2::254]',
|
||||
])
|
||||
|
||||
function isPrivateIPv4(host: string): boolean {
|
||||
const match = host.match(/^(\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})$/)
|
||||
if (!match) return false
|
||||
const octets = match.slice(1, 5).map(Number) as [number, number, number, number]
|
||||
if (octets.some((o) => o < 0 || o > 255)) return false
|
||||
const [a, b] = octets
|
||||
if (a === 10) return true
|
||||
if (a === 172 && b >= 16 && b <= 31) return true
|
||||
if (a === 192 && b === 168) return true
|
||||
if (a === 127) return true
|
||||
if (a === 169 && b === 254) return true
|
||||
if (a === 0) return true
|
||||
return false
|
||||
}
|
||||
|
||||
function extractIPv4MappedHost(host: string): string | null {
|
||||
const stripped = host.startsWith('[') && host.endsWith(']') ? host.slice(1, -1) : host
|
||||
const lower = stripped.toLowerCase()
|
||||
for (const prefix of ['::ffff:', '::']) {
|
||||
if (lower.startsWith(prefix)) {
|
||||
const candidate = lower.slice(prefix.length)
|
||||
if (/^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$/.test(candidate)) return candidate
|
||||
}
|
||||
}
|
||||
const hexMatch = lower.match(/^::ffff:([0-9a-f]{1,4}):([0-9a-f]{1,4})$/)
|
||||
if (hexMatch) {
|
||||
const high = Number.parseInt(hexMatch[1] as string, 16)
|
||||
const low = Number.parseInt(hexMatch[2] as string, 16)
|
||||
if (high >= 0 && high <= 0xffff && low >= 0 && low <= 0xffff) {
|
||||
const a = (high >> 8) & 0xff
|
||||
const b = high & 0xff
|
||||
const c = (low >> 8) & 0xff
|
||||
const d = low & 0xff
|
||||
return `${a}.${b}.${c}.${d}`
|
||||
}
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
function isPrivateOrLoopbackIPv6(host: string): boolean {
|
||||
const stripped = host.startsWith('[') && host.endsWith(']') ? host.slice(1, -1) : host
|
||||
const lower = stripped.toLowerCase()
|
||||
if (lower === '::' || lower === '::1') return true
|
||||
if (/^fc[0-9a-f]{2}:/.test(lower) || /^fd[0-9a-f]{2}:/.test(lower)) return true
|
||||
if (lower.startsWith('fe80:')) return true
|
||||
return false
|
||||
}
|
||||
|
||||
function checkExternalUrlSafety(
|
||||
rawUrl: string,
|
||||
label: string
|
||||
): { ok: true; url: URL } | { ok: false; message: string } {
|
||||
let parsed: URL
|
||||
try {
|
||||
parsed = new URL(rawUrl)
|
||||
} catch {
|
||||
return { ok: false, message: `${label} must be a valid URL` }
|
||||
}
|
||||
if (parsed.protocol !== 'https:') {
|
||||
return { ok: false, message: `${label} must use https://` }
|
||||
}
|
||||
const host = parsed.hostname.toLowerCase()
|
||||
if (FORBIDDEN_HOSTS.has(host) || FORBIDDEN_HOSTS.has(`[${host}]`)) {
|
||||
return { ok: false, message: `${label} host is not allowed` }
|
||||
}
|
||||
if (isPrivateIPv4(host)) {
|
||||
return { ok: false, message: `${label} host is not allowed (private/loopback range)` }
|
||||
}
|
||||
const mapped = extractIPv4MappedHost(host)
|
||||
if (mapped && isPrivateIPv4(mapped)) {
|
||||
return { ok: false, message: `${label} host is not allowed (IPv4-mapped private range)` }
|
||||
}
|
||||
if (isPrivateOrLoopbackIPv6(host)) {
|
||||
return { ok: false, message: `${label} host is not allowed (IPv6 private/loopback)` }
|
||||
}
|
||||
return { ok: true, url: parsed }
|
||||
}
|
||||
|
||||
function assertSafeExternalUrl(rawUrl: string, label: string): URL {
|
||||
const result = checkExternalUrlSafety(rawUrl, label)
|
||||
if (!result.ok) throw new Error(result.message)
|
||||
return result.url
|
||||
}
|
||||
|
||||
function resolveTokenUrl(req: ProxyRequest): string {
|
||||
if (req.deploymentType === 'cloud_public') {
|
||||
return `https://${req.subdomain}.authentication.${req.region}.hana.ondemand.com/oauth/token`
|
||||
}
|
||||
if (!req.tokenUrl) {
|
||||
throw new Error('tokenUrl is required for OAuth on cloud_private/on_premise')
|
||||
}
|
||||
return req.tokenUrl
|
||||
}
|
||||
|
||||
function tokenCacheKey(req: ProxyRequest): string {
|
||||
const secretHash = req.clientSecret
|
||||
? createHash('sha256').update(req.clientSecret).digest('hex').slice(0, 16)
|
||||
: ''
|
||||
return `${resolveTokenUrl(req)}::${req.clientId ?? ''}::${secretHash}`
|
||||
}
|
||||
|
||||
function rememberToken(key: string, token: CachedToken): void {
|
||||
if (TOKEN_CACHE.has(key)) TOKEN_CACHE.delete(key)
|
||||
TOKEN_CACHE.set(key, token)
|
||||
while (TOKEN_CACHE.size > TOKEN_CACHE_MAX_ENTRIES) {
|
||||
const oldestKey = TOKEN_CACHE.keys().next().value
|
||||
if (oldestKey === undefined) break
|
||||
TOKEN_CACHE.delete(oldestKey)
|
||||
}
|
||||
}
|
||||
|
||||
async function fetchAccessToken(req: ProxyRequest, requestId: string): Promise<string> {
|
||||
const cacheKey = tokenCacheKey(req)
|
||||
const cached = TOKEN_CACHE.get(cacheKey)
|
||||
if (cached && cached.expiresAt - TOKEN_SAFETY_WINDOW_MS > Date.now()) {
|
||||
return cached.accessToken
|
||||
}
|
||||
|
||||
const tokenUrl = assertSafeExternalUrl(resolveTokenUrl(req), 'tokenUrl').toString()
|
||||
const basic = Buffer.from(`${req.clientId}:${req.clientSecret}`).toString('base64')
|
||||
|
||||
const response = await fetch(tokenUrl, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Authorization: `Basic ${basic}`,
|
||||
'Content-Type': 'application/x-www-form-urlencoded',
|
||||
Accept: 'application/json',
|
||||
},
|
||||
body: 'grant_type=client_credentials',
|
||||
signal: AbortSignal.timeout(OUTBOUND_FETCH_TIMEOUT_MS),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const text = await response.text().catch(() => '')
|
||||
logger.warn(`[${requestId}] Token fetch failed (${response.status}): ${text}`)
|
||||
throw new Error(`SAP token request failed: HTTP ${response.status}`)
|
||||
}
|
||||
|
||||
const data = (await response.json()) as {
|
||||
access_token?: string
|
||||
expires_in?: number
|
||||
}
|
||||
|
||||
if (!data.access_token) {
|
||||
throw new Error('SAP token response missing access_token')
|
||||
}
|
||||
|
||||
const expiresInMs = (data.expires_in ?? 3600) * 1000
|
||||
rememberToken(cacheKey, {
|
||||
accessToken: data.access_token,
|
||||
expiresAt: Date.now() + expiresInMs,
|
||||
})
|
||||
return data.access_token
|
||||
}
|
||||
|
||||
interface CsrfBundle {
|
||||
token: string
|
||||
cookie: string
|
||||
}
|
||||
|
||||
function joinSetCookies(headers: Headers): string {
|
||||
const cookies =
|
||||
typeof (headers as { getSetCookie?: () => string[] }).getSetCookie === 'function'
|
||||
? (headers as { getSetCookie: () => string[] }).getSetCookie()
|
||||
: (headers.get('set-cookie') ?? '').split(/,\s*(?=[^=,;\s]+=)/)
|
||||
return cookies
|
||||
.map((c) => c.split(';')[0]?.trim())
|
||||
.filter(Boolean)
|
||||
.join('; ')
|
||||
}
|
||||
|
||||
function buildAuthHeader(req: ProxyRequest, accessToken: string | null): string {
|
||||
if (req.authType === 'basic') {
|
||||
const basic = Buffer.from(`${req.username}:${req.password}`).toString('base64')
|
||||
return `Basic ${basic}`
|
||||
}
|
||||
return `Bearer ${accessToken}`
|
||||
}
|
||||
|
||||
async function fetchCsrf(
|
||||
req: ProxyRequest,
|
||||
accessToken: string | null,
|
||||
requestId: string
|
||||
): Promise<CsrfBundle | null> {
|
||||
const url = buildOdataUrl(req, '/$metadata')
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: buildAuthHeader(req, accessToken),
|
||||
Accept: 'application/xml',
|
||||
'X-CSRF-Token': 'Fetch',
|
||||
},
|
||||
signal: AbortSignal.timeout(OUTBOUND_FETCH_TIMEOUT_MS),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const text = await response.text().catch(() => '')
|
||||
logger.warn(`[${requestId}] CSRF fetch failed (${response.status}): ${text}`)
|
||||
return null
|
||||
}
|
||||
|
||||
const token = response.headers.get('x-csrf-token')
|
||||
const cookie = joinSetCookies(response.headers)
|
||||
if (!token) return null
|
||||
return { token, cookie }
|
||||
}
|
||||
|
||||
function resolveHost(req: ProxyRequest): string {
|
||||
if (req.deploymentType === 'cloud_public') {
|
||||
const constructed = `https://${req.subdomain}-api.s4hana.ondemand.com`
|
||||
return assertSafeExternalUrl(constructed, 'subdomain').toString().replace(/\/+$/, '')
|
||||
}
|
||||
if (!req.baseUrl) {
|
||||
throw new Error('baseUrl is required for cloud_private and on_premise deployments')
|
||||
}
|
||||
const trimmed = req.baseUrl.replace(/\/+$/, '')
|
||||
return assertSafeExternalUrl(trimmed, 'baseUrl').toString().replace(/\/+$/, '')
|
||||
}
|
||||
|
||||
function buildOdataUrl(req: ProxyRequest, pathOverride?: string): string {
|
||||
const host = resolveHost(req)
|
||||
const servicePath = `/sap/opu/odata/sap/${req.service}`
|
||||
const subPath = pathOverride ?? req.path
|
||||
const normalized = subPath.startsWith('/') ? subPath : `/${subPath}`
|
||||
const base = `${host}${servicePath}${normalized}`
|
||||
|
||||
if (pathOverride !== undefined) {
|
||||
return base
|
||||
}
|
||||
if (!req.query || Object.keys(req.query).length === 0) {
|
||||
return base
|
||||
}
|
||||
const encode = (s: string) => encodeURIComponent(s).replace(/%24/g, '$')
|
||||
const parts: string[] = []
|
||||
for (const [key, value] of Object.entries(req.query)) {
|
||||
if (value === undefined || value === null) continue
|
||||
parts.push(`${encode(key)}=${encode(String(value))}`)
|
||||
}
|
||||
const queryString = parts.join('&')
|
||||
if (!queryString) return base
|
||||
return base.includes('?') ? `${base}&${queryString}` : `${base}?${queryString}`
|
||||
}
|
||||
|
||||
const WRITE_METHODS = new Set(['POST', 'PUT', 'PATCH', 'DELETE', 'MERGE'])
|
||||
|
||||
interface OdataInvocation {
|
||||
status: number
|
||||
body: unknown
|
||||
raw: string
|
||||
csrfHeader: string
|
||||
}
|
||||
|
||||
async function callOdata(
|
||||
req: ProxyRequest,
|
||||
accessToken: string | null,
|
||||
csrf: CsrfBundle | null
|
||||
): Promise<OdataInvocation> {
|
||||
const url = buildOdataUrl(req)
|
||||
const headers: Record<string, string> = {
|
||||
Authorization: buildAuthHeader(req, accessToken),
|
||||
Accept: 'application/json',
|
||||
}
|
||||
|
||||
const isWrite = WRITE_METHODS.has(req.method)
|
||||
const hasBody = req.body !== undefined && req.body !== null
|
||||
if (hasBody) headers['Content-Type'] = 'application/json'
|
||||
if (req.ifMatch) headers['If-Match'] = req.ifMatch
|
||||
|
||||
if (isWrite && csrf) {
|
||||
headers['X-CSRF-Token'] = csrf.token
|
||||
if (csrf.cookie) headers.Cookie = csrf.cookie
|
||||
}
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: req.method,
|
||||
headers,
|
||||
body: hasBody ? JSON.stringify(req.body) : undefined,
|
||||
signal: AbortSignal.timeout(OUTBOUND_FETCH_TIMEOUT_MS),
|
||||
})
|
||||
|
||||
const raw = await response.text()
|
||||
let parsed: unknown = null
|
||||
if (raw.length > 0) {
|
||||
try {
|
||||
parsed = JSON.parse(raw)
|
||||
} catch {
|
||||
parsed = raw
|
||||
}
|
||||
}
|
||||
|
||||
const csrfHeader = response.headers.get('x-csrf-token')?.toLowerCase() ?? ''
|
||||
return { status: response.status, body: parsed, raw, csrfHeader }
|
||||
}
|
||||
|
||||
function isCsrfRequired(invocation: OdataInvocation): boolean {
|
||||
if (invocation.status !== 403) return false
|
||||
if (invocation.csrfHeader === 'required') return true
|
||||
if (typeof invocation.body !== 'object' || invocation.body === null) return false
|
||||
const errorObj = (invocation.body as { error?: { message?: { value?: string } | string } }).error
|
||||
const messageField = errorObj?.message
|
||||
const message = typeof messageField === 'string' ? messageField : (messageField?.value ?? '')
|
||||
return message.toLowerCase().includes('csrf')
|
||||
}
|
||||
|
||||
function extractOdataError(body: unknown, status: number): string {
|
||||
if (body && typeof body === 'object') {
|
||||
const err = (
|
||||
body as {
|
||||
error?: {
|
||||
message?: { value?: string } | string
|
||||
code?: string
|
||||
innererror?: {
|
||||
errordetails?: Array<{ code?: string; message?: string; severity?: string }>
|
||||
}
|
||||
}
|
||||
}
|
||||
).error
|
||||
if (err) {
|
||||
const messageField = err.message
|
||||
const base =
|
||||
typeof messageField === 'string' ? messageField : (messageField?.value ?? err.code ?? '')
|
||||
const prefix = err.code ? `[${err.code}] ` : ''
|
||||
const details = err.innererror?.errordetails
|
||||
?.filter((d) => d.message && (!d.severity || d.severity.toLowerCase() !== 'info'))
|
||||
.map((d) => {
|
||||
const tag = d.code ? `[${d.code}] ` : ''
|
||||
return `${tag}${d.message}`
|
||||
})
|
||||
.filter((m): m is string => Boolean(m))
|
||||
if (details && details.length > 0) {
|
||||
const extras = details.filter((d) => !d.endsWith(base))
|
||||
return extras.length > 0 ? `${prefix}${base} (${extras.join('; ')})` : `${prefix}${base}`
|
||||
}
|
||||
if (base) return `${prefix}${base}`
|
||||
}
|
||||
}
|
||||
if (typeof body === 'string' && body.length > 0) return body
|
||||
return `SAP request failed with HTTP ${status}`
|
||||
}
|
||||
|
||||
function unwrapOdata(body: unknown): unknown {
|
||||
if (!body || typeof body !== 'object') return body
|
||||
const root = (body as { d?: unknown }).d
|
||||
if (root === undefined) return body
|
||||
if (root && typeof root === 'object' && 'results' in (root as Record<string, unknown>)) {
|
||||
const rootObj = root as { results: unknown; __count?: string; __next?: string }
|
||||
if (rootObj.__count !== undefined || rootObj.__next !== undefined) {
|
||||
return {
|
||||
results: rootObj.results,
|
||||
...(rootObj.__count !== undefined && { __count: rootObj.__count }),
|
||||
...(rootObj.__next !== undefined && { __next: rootObj.__next }),
|
||||
}
|
||||
}
|
||||
return rootObj.results
|
||||
}
|
||||
return root
|
||||
}
|
||||
|
||||
export const POST = withRouteHandler(async (request: NextRequest) => {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized SAP proxy request: ${authResult.error}`)
|
||||
return NextResponse.json(
|
||||
{ success: false, error: authResult.error || 'Authentication required' },
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
const json = await request.json()
|
||||
const proxyReq = ProxyRequestSchema.parse(json)
|
||||
const isWrite = WRITE_METHODS.has(proxyReq.method)
|
||||
|
||||
const accessToken =
|
||||
proxyReq.authType === 'oauth_client_credentials'
|
||||
? await fetchAccessToken(proxyReq, requestId)
|
||||
: null
|
||||
const csrf = isWrite ? await fetchCsrf(proxyReq, accessToken, requestId) : null
|
||||
|
||||
let invocation = await callOdata(proxyReq, accessToken, csrf)
|
||||
|
||||
if (isWrite && isCsrfRequired(invocation)) {
|
||||
logger.info(`[${requestId}] CSRF token rejected, refetching and retrying`)
|
||||
const refreshed = await fetchCsrf(proxyReq, accessToken, requestId)
|
||||
if (refreshed) {
|
||||
invocation = await callOdata(proxyReq, accessToken, refreshed)
|
||||
}
|
||||
}
|
||||
|
||||
if (invocation.status >= 200 && invocation.status < 300) {
|
||||
const data = invocation.status === 204 ? null : unwrapOdata(invocation.body)
|
||||
return NextResponse.json({ success: true, output: { status: invocation.status, data } })
|
||||
}
|
||||
|
||||
const message = extractOdataError(invocation.body, invocation.status)
|
||||
logger.warn(
|
||||
`[${requestId}] SAP API error (${invocation.status}) ${proxyReq.service}${proxyReq.path}: ${message}`
|
||||
)
|
||||
return NextResponse.json(
|
||||
{ success: false, error: message, status: invocation.status },
|
||||
{ status: invocation.status }
|
||||
)
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Validation error:`, error.errors)
|
||||
return NextResponse.json(
|
||||
{ success: false, error: error.errors[0]?.message || 'Validation failed' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
logger.error(`[${requestId}] Unexpected SAP proxy error:`, error)
|
||||
return NextResponse.json({ success: false, error: toError(error).message }, { status: 500 })
|
||||
}
|
||||
})
|
||||
@@ -22,6 +22,8 @@ const requestSchema = z.object({
|
||||
variables: z.any(),
|
||||
provider: z.enum(['openai', 'anthropic']).optional().default('openai'),
|
||||
apiKey: z.string(),
|
||||
mode: z.enum(['dom', 'hybrid', 'cua']).optional().default('dom'),
|
||||
maxSteps: z.number().int().min(1).max(200).optional().default(20),
|
||||
})
|
||||
|
||||
/**
|
||||
@@ -121,7 +123,7 @@ export const POST = withRouteHandler(async (request: NextRequest) => {
|
||||
}
|
||||
|
||||
const params = validationResult.data
|
||||
const { task, startUrl: rawStartUrl, outputSchema, provider, apiKey } = params
|
||||
const { task, startUrl: rawStartUrl, outputSchema, provider, apiKey, mode, maxSteps } = params
|
||||
const variablesObject = processVariables(params.variables)
|
||||
|
||||
const startUrl = normalizeUrl(rawStartUrl)
|
||||
@@ -165,8 +167,10 @@ export const POST = withRouteHandler(async (request: NextRequest) => {
|
||||
return NextResponse.json({ error: 'Invalid Anthropic API key format' }, { status: 400 })
|
||||
}
|
||||
|
||||
const modelName =
|
||||
provider === 'anthropic' ? 'anthropic/claude-sonnet-4-5-20250929' : 'openai/gpt-5'
|
||||
const modelName = provider === 'anthropic' ? 'anthropic/claude-sonnet-4-6' : 'openai/gpt-5'
|
||||
|
||||
let sessionId: string | null = null
|
||||
let liveViewUrl: string | null = null
|
||||
|
||||
try {
|
||||
logger.info('Initializing Stagehand with Browserbase (v3)', { provider, modelName })
|
||||
@@ -190,6 +194,35 @@ export const POST = withRouteHandler(async (request: NextRequest) => {
|
||||
await stagehand.init()
|
||||
logger.info('Stagehand initialized successfully')
|
||||
|
||||
sessionId = stagehand.browserbaseSessionID ?? null
|
||||
if (sessionId) {
|
||||
try {
|
||||
const debugResponse = await fetch(
|
||||
`https://api.browserbase.com/v1/sessions/${sessionId}/debug`,
|
||||
{
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'X-BB-API-Key': BROWSERBASE_API_KEY,
|
||||
},
|
||||
}
|
||||
)
|
||||
if (debugResponse.ok) {
|
||||
const debugData = (await debugResponse.json()) as {
|
||||
debuggerFullscreenUrl?: string
|
||||
debuggerUrl?: string
|
||||
}
|
||||
liveViewUrl = debugData.debuggerFullscreenUrl ?? debugData.debuggerUrl ?? null
|
||||
if (liveViewUrl) {
|
||||
logger.info(`Browserbase live view URL: ${liveViewUrl}`)
|
||||
}
|
||||
} else {
|
||||
logger.warn(`Failed to fetch Browserbase debug URL: ${debugResponse.statusText}`)
|
||||
}
|
||||
} catch (debugError) {
|
||||
logger.warn('Error fetching Browserbase debug URL', { error: debugError })
|
||||
}
|
||||
}
|
||||
|
||||
const page = stagehand.context.pages()[0]
|
||||
logger.info(`Navigating to ${startUrl}`)
|
||||
await page.goto(startUrl, { waitUntil: 'networkidle' })
|
||||
@@ -223,13 +256,14 @@ export const POST = withRouteHandler(async (request: NextRequest) => {
|
||||
apiKey: apiKey,
|
||||
},
|
||||
systemPrompt: agentInstructions,
|
||||
mode,
|
||||
})
|
||||
|
||||
logger.info('Executing agent task', { task: taskWithVariables })
|
||||
logger.info('Executing agent task', { task: taskWithVariables, mode, maxSteps })
|
||||
|
||||
const agentExecutionResult = await agent.execute({
|
||||
instruction: taskWithVariables,
|
||||
maxSteps: 20,
|
||||
maxSteps,
|
||||
})
|
||||
|
||||
const agentResult = {
|
||||
@@ -293,6 +327,8 @@ export const POST = withRouteHandler(async (request: NextRequest) => {
|
||||
return NextResponse.json({
|
||||
agentResult,
|
||||
structuredOutput,
|
||||
liveViewUrl,
|
||||
sessionId,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Stagehand agent execution error', {
|
||||
@@ -327,6 +363,8 @@ export const POST = withRouteHandler(async (request: NextRequest) => {
|
||||
{
|
||||
error: errorMessage,
|
||||
details: errorDetails,
|
||||
liveViewUrl,
|
||||
sessionId,
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
|
||||
@@ -17,8 +17,6 @@ const BROWSERBASE_PROJECT_ID = env.BROWSERBASE_PROJECT_ID
|
||||
const requestSchema = z.object({
|
||||
instruction: z.string(),
|
||||
schema: z.record(z.any()),
|
||||
useTextExtract: z.boolean().optional().default(false),
|
||||
selector: z.string().nullable().optional(),
|
||||
provider: z.enum(['openai', 'anthropic']).optional().default('openai'),
|
||||
apiKey: z.string(),
|
||||
url: z.string().url(),
|
||||
@@ -51,7 +49,7 @@ export const POST = withRouteHandler(async (request: NextRequest) => {
|
||||
}
|
||||
|
||||
const params = validationResult.data
|
||||
const { url: rawUrl, instruction, selector, provider, apiKey, schema } = params
|
||||
const { url: rawUrl, instruction, provider, apiKey, schema } = params
|
||||
const url = normalizeUrl(rawUrl)
|
||||
const urlValidation = await validateUrlWithDNS(url, 'url')
|
||||
if (!urlValidation.isValid) {
|
||||
@@ -101,8 +99,7 @@ export const POST = withRouteHandler(async (request: NextRequest) => {
|
||||
}
|
||||
|
||||
try {
|
||||
const modelName =
|
||||
provider === 'anthropic' ? 'anthropic/claude-sonnet-4-5-20250929' : 'openai/gpt-5'
|
||||
const modelName = provider === 'anthropic' ? 'anthropic/claude-sonnet-4-6' : 'openai/gpt-5'
|
||||
|
||||
logger.info('Initializing Stagehand with Browserbase (v3)', { provider, modelName })
|
||||
|
||||
@@ -162,14 +159,11 @@ export const POST = withRouteHandler(async (request: NextRequest) => {
|
||||
logger.info('Calling stagehand.extract with options', {
|
||||
hasInstruction: !!instruction,
|
||||
hasSchema: !!zodSchema,
|
||||
hasSelector: !!selector,
|
||||
})
|
||||
|
||||
let extractedData
|
||||
if (zodSchema) {
|
||||
extractedData = await stagehand.extract(instruction, zodSchema, {
|
||||
selector: selector || undefined,
|
||||
})
|
||||
extractedData = await stagehand.extract(instruction, zodSchema)
|
||||
} else {
|
||||
extractedData = await stagehand.extract(instruction)
|
||||
}
|
||||
|
||||
@@ -3,7 +3,7 @@ import { toError } from '@sim/utils/errors'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { MAX_DOCUMENT_PREVIEW_CODE_BYTES } from '@/lib/execution/constants'
|
||||
import { runSandboxTask } from '@/lib/execution/sandbox/run-task'
|
||||
import { runSandboxTask, SandboxUserCodeError } from '@/lib/execution/sandbox/run-task'
|
||||
import { verifyWorkspaceMembership } from '@/app/api/workflows/utils'
|
||||
import type { SandboxTaskId } from '@/sandbox-tasks/registry'
|
||||
|
||||
@@ -83,6 +83,14 @@ export function createDocumentPreviewRoute(config: DocumentPreviewRouteConfig) {
|
||||
})
|
||||
} catch (err) {
|
||||
const message = toError(err).message
|
||||
if (err instanceof SandboxUserCodeError) {
|
||||
logger.warn(`${config.label} preview user code failed`, {
|
||||
error: message,
|
||||
errorName: err.name,
|
||||
workspaceId,
|
||||
})
|
||||
return NextResponse.json({ error: message, errorName: err.name }, { status: 422 })
|
||||
}
|
||||
logger.error(`${config.label} preview generation failed`, { error: message, workspaceId })
|
||||
return NextResponse.json({ error: message }, { status: 500 })
|
||||
}
|
||||
|
||||
@@ -6,9 +6,15 @@ import { NextRequest } from 'next/server'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { MAX_DOCUMENT_PREVIEW_CODE_BYTES } from '@/lib/execution/constants'
|
||||
|
||||
const { mockRunSandboxTask } = vi.hoisted(() => ({
|
||||
mockRunSandboxTask: vi.fn(),
|
||||
}))
|
||||
const { mockRunSandboxTask, SandboxUserCodeError } = vi.hoisted(() => {
|
||||
class SandboxUserCodeError extends Error {
|
||||
constructor(message: string, name: string) {
|
||||
super(message)
|
||||
this.name = name
|
||||
}
|
||||
}
|
||||
return { mockRunSandboxTask: vi.fn(), SandboxUserCodeError }
|
||||
})
|
||||
|
||||
const mockVerifyWorkspaceMembership = workflowsApiUtilsMockFns.mockVerifyWorkspaceMembership
|
||||
|
||||
@@ -16,6 +22,7 @@ vi.mock('@/app/api/workflows/utils', () => workflowsApiUtilsMock)
|
||||
|
||||
vi.mock('@/lib/execution/sandbox/run-task', () => ({
|
||||
runSandboxTask: mockRunSandboxTask,
|
||||
SandboxUserCodeError,
|
||||
}))
|
||||
|
||||
import { POST } from '@/app/api/workspaces/[id]/docx/preview/route'
|
||||
@@ -189,4 +196,31 @@ describe('DOCX preview API route', () => {
|
||||
expect(response.status).toBe(500)
|
||||
await expect(response.json()).resolves.toEqual({ error: 'boom: sandbox failed' })
|
||||
})
|
||||
|
||||
it('returns 422 when user code throws inside the sandbox', async () => {
|
||||
mockRunSandboxTask.mockRejectedValue(
|
||||
new SandboxUserCodeError('Invalid or unexpected token', 'SyntaxError')
|
||||
)
|
||||
|
||||
const request = new NextRequest(
|
||||
'http://localhost:3000/api/workspaces/workspace-1/docx/preview',
|
||||
{
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({ code: 'const x = ' }),
|
||||
}
|
||||
)
|
||||
|
||||
const response = await POST(request, {
|
||||
params: Promise.resolve({ id: 'workspace-1' }),
|
||||
})
|
||||
|
||||
expect(response.status).toBe(422)
|
||||
await expect(response.json()).resolves.toEqual({
|
||||
error: 'Invalid or unexpected token',
|
||||
errorName: 'SyntaxError',
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -6,9 +6,15 @@ import { NextRequest } from 'next/server'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { MAX_DOCUMENT_PREVIEW_CODE_BYTES } from '@/lib/execution/constants'
|
||||
|
||||
const { mockRunSandboxTask } = vi.hoisted(() => ({
|
||||
mockRunSandboxTask: vi.fn(),
|
||||
}))
|
||||
const { mockRunSandboxTask, SandboxUserCodeError } = vi.hoisted(() => {
|
||||
class SandboxUserCodeError extends Error {
|
||||
constructor(message: string, name: string) {
|
||||
super(message)
|
||||
this.name = name
|
||||
}
|
||||
}
|
||||
return { mockRunSandboxTask: vi.fn(), SandboxUserCodeError }
|
||||
})
|
||||
|
||||
const mockVerifyWorkspaceMembership = workflowsApiUtilsMockFns.mockVerifyWorkspaceMembership
|
||||
|
||||
@@ -16,6 +22,7 @@ vi.mock('@/app/api/workflows/utils', () => workflowsApiUtilsMock)
|
||||
|
||||
vi.mock('@/lib/execution/sandbox/run-task', () => ({
|
||||
runSandboxTask: mockRunSandboxTask,
|
||||
SandboxUserCodeError,
|
||||
}))
|
||||
|
||||
import { POST } from '@/app/api/workspaces/[id]/pdf/preview/route'
|
||||
@@ -187,4 +194,31 @@ describe('PDF preview API route', () => {
|
||||
expect(response.status).toBe(500)
|
||||
await expect(response.json()).resolves.toEqual({ error: 'boom: sandbox failed' })
|
||||
})
|
||||
|
||||
it('returns 422 when user code throws inside the sandbox', async () => {
|
||||
mockRunSandboxTask.mockRejectedValue(
|
||||
new SandboxUserCodeError('Invalid or unexpected token', 'SyntaxError')
|
||||
)
|
||||
|
||||
const request = new NextRequest(
|
||||
'http://localhost:3000/api/workspaces/workspace-1/pdf/preview',
|
||||
{
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({ code: 'const x = ' }),
|
||||
}
|
||||
)
|
||||
|
||||
const response = await POST(request, {
|
||||
params: Promise.resolve({ id: 'workspace-1' }),
|
||||
})
|
||||
|
||||
expect(response.status).toBe(422)
|
||||
await expect(response.json()).resolves.toEqual({
|
||||
error: 'Invalid or unexpected token',
|
||||
errorName: 'SyntaxError',
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -6,9 +6,15 @@ import { NextRequest } from 'next/server'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { MAX_DOCUMENT_PREVIEW_CODE_BYTES } from '@/lib/execution/constants'
|
||||
|
||||
const { mockRunSandboxTask } = vi.hoisted(() => ({
|
||||
mockRunSandboxTask: vi.fn(),
|
||||
}))
|
||||
const { mockRunSandboxTask, SandboxUserCodeError } = vi.hoisted(() => {
|
||||
class SandboxUserCodeError extends Error {
|
||||
constructor(message: string, name: string) {
|
||||
super(message)
|
||||
this.name = name
|
||||
}
|
||||
}
|
||||
return { mockRunSandboxTask: vi.fn(), SandboxUserCodeError }
|
||||
})
|
||||
|
||||
const mockVerifyWorkspaceMembership = workflowsApiUtilsMockFns.mockVerifyWorkspaceMembership
|
||||
|
||||
@@ -16,6 +22,7 @@ vi.mock('@/app/api/workflows/utils', () => workflowsApiUtilsMock)
|
||||
|
||||
vi.mock('@/lib/execution/sandbox/run-task', () => ({
|
||||
runSandboxTask: mockRunSandboxTask,
|
||||
SandboxUserCodeError,
|
||||
}))
|
||||
|
||||
import { POST } from '@/app/api/workspaces/[id]/pptx/preview/route'
|
||||
@@ -189,4 +196,31 @@ describe('PPTX preview API route', () => {
|
||||
expect(response.status).toBe(500)
|
||||
await expect(response.json()).resolves.toEqual({ error: 'boom: sandbox failed' })
|
||||
})
|
||||
|
||||
it('returns 422 when user code throws inside the sandbox', async () => {
|
||||
mockRunSandboxTask.mockRejectedValue(
|
||||
new SandboxUserCodeError('Invalid or unexpected token', 'SyntaxError')
|
||||
)
|
||||
|
||||
const request = new NextRequest(
|
||||
'http://localhost:3000/api/workspaces/workspace-1/pptx/preview',
|
||||
{
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({ code: 'const x = ' }),
|
||||
}
|
||||
)
|
||||
|
||||
const response = await POST(request, {
|
||||
params: Promise.resolve({ id: 'workspace-1' }),
|
||||
})
|
||||
|
||||
expect(response.status).toBe(422)
|
||||
await expect(response.json()).resolves.toEqual({
|
||||
error: 'Invalid or unexpected token',
|
||||
errorName: 'SyntaxError',
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
129
apps/sim/app/api/workspaces/invitations/batch/route.ts
Normal file
129
apps/sim/app/api/workspaces/invitations/batch/route.ts
Normal file
@@ -0,0 +1,129 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { withRouteHandler } from '@/lib/core/utils/with-route-handler'
|
||||
import { normalizeEmail } from '@/lib/invitations/core'
|
||||
import {
|
||||
createWorkspaceInvitation,
|
||||
prepareWorkspaceInvitationContext,
|
||||
WorkspaceInvitationError,
|
||||
type WorkspaceInvitationResult,
|
||||
} from '@/lib/invitations/workspace-invitations'
|
||||
import { InvitationsNotAllowedError } from '@/ee/access-control/utils/permission-check'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('WorkspaceInvitationBatchAPI')
|
||||
|
||||
interface BatchInvitationFailure {
|
||||
email: string
|
||||
error: string
|
||||
}
|
||||
|
||||
const batchInvitationSchema = z.object({
|
||||
workspaceId: z.string().min(1, 'Workspace ID is required'),
|
||||
invitations: z
|
||||
.array(
|
||||
z.object({
|
||||
email: z.string().trim().min(1, 'Invitation email is required'),
|
||||
permission: z.string().optional(),
|
||||
})
|
||||
)
|
||||
.min(1, 'At least one invitation is required'),
|
||||
})
|
||||
|
||||
type BatchInvitationRequest = z.infer<typeof batchInvitationSchema>
|
||||
|
||||
function batchErrorResponse(error: unknown) {
|
||||
if (error instanceof WorkspaceInvitationError) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: error.message,
|
||||
...(error.email ? { email: error.email } : {}),
|
||||
...(error.upgradeRequired !== undefined ? { upgradeRequired: error.upgradeRequired } : {}),
|
||||
},
|
||||
{ status: error.status }
|
||||
)
|
||||
}
|
||||
|
||||
if (error instanceof InvitationsNotAllowedError) {
|
||||
return NextResponse.json({ error: error.message }, { status: 403 })
|
||||
}
|
||||
|
||||
logger.error('Error creating workspace invitation batch:', error)
|
||||
return NextResponse.json({ error: 'Failed to create invitation batch' }, { status: 500 })
|
||||
}
|
||||
|
||||
export const POST = withRouteHandler(async (req: NextRequest) => {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
try {
|
||||
const parsedBody = batchInvitationSchema.safeParse(await req.json().catch(() => null))
|
||||
if (!parsedBody.success) {
|
||||
return NextResponse.json(
|
||||
{ error: parsedBody.error.errors[0]?.message ?? 'Invalid invitation batch payload' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
const body: BatchInvitationRequest = parsedBody.data
|
||||
|
||||
const context = await prepareWorkspaceInvitationContext({
|
||||
workspaceId: body.workspaceId,
|
||||
inviterId: session.user.id,
|
||||
inviterName: session.user.name || session.user.email || 'A user',
|
||||
inviterEmail: session.user.email,
|
||||
})
|
||||
|
||||
const successful: string[] = []
|
||||
const failed: BatchInvitationFailure[] = []
|
||||
const invitations: WorkspaceInvitationResult[] = []
|
||||
const seenEmails = new Set<string>()
|
||||
|
||||
for (const item of body.invitations) {
|
||||
const normalizedEmail = normalizeEmail(item.email)
|
||||
if (seenEmails.has(normalizedEmail)) {
|
||||
failed.push({
|
||||
email: normalizedEmail,
|
||||
error: `${normalizedEmail} appears more than once in this invitation batch`,
|
||||
})
|
||||
continue
|
||||
}
|
||||
seenEmails.add(normalizedEmail)
|
||||
|
||||
try {
|
||||
const invitation = await createWorkspaceInvitation({
|
||||
context,
|
||||
email: item.email,
|
||||
permission: item.permission,
|
||||
request: req,
|
||||
})
|
||||
successful.push(invitation.email)
|
||||
invitations.push(invitation)
|
||||
} catch (error) {
|
||||
if (error instanceof WorkspaceInvitationError) {
|
||||
failed.push({ email: error.email ?? normalizedEmail, error: error.message })
|
||||
continue
|
||||
}
|
||||
|
||||
logger.error('Unexpected workspace invitation batch item failure:', {
|
||||
email: normalizedEmail,
|
||||
error,
|
||||
})
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
success: failed.length === 0,
|
||||
successful,
|
||||
failed,
|
||||
invitations,
|
||||
})
|
||||
} catch (error) {
|
||||
return batchErrorResponse(error)
|
||||
}
|
||||
})
|
||||
@@ -108,9 +108,9 @@ const mockGetSession = authMockFns.mockGetSession
|
||||
const mockGetWorkspaceWithOwner = permissionsMockFns.mockGetWorkspaceWithOwner
|
||||
|
||||
import { UPGRADE_TO_INVITE_REASON } from '@/lib/workspaces/policy-constants'
|
||||
import { POST } from '@/app/api/workspaces/invitations/route'
|
||||
import { POST } from '@/app/api/workspaces/invitations/batch/route'
|
||||
|
||||
describe('POST /api/workspaces/invitations', () => {
|
||||
describe('POST /api/workspaces/invitations/batch', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
mockDbResults.value = []
|
||||
@@ -169,8 +169,7 @@ describe('POST /api/workspaces/invitations', () => {
|
||||
|
||||
const request = createMockRequest('POST', {
|
||||
workspaceId: 'workspace-1',
|
||||
email: 'new@example.com',
|
||||
permission: 'read',
|
||||
invitations: [{ email: 'new@example.com', permission: 'read' }],
|
||||
})
|
||||
|
||||
const response = await POST(request)
|
||||
@@ -201,8 +200,7 @@ describe('POST /api/workspaces/invitations', () => {
|
||||
|
||||
const request = createMockRequest('POST', {
|
||||
workspaceId: 'workspace-1',
|
||||
email: 'new@example.com',
|
||||
permission: 'read',
|
||||
invitations: [{ email: 'new@example.com', permission: 'read' }],
|
||||
})
|
||||
|
||||
const response = await POST(request)
|
||||
@@ -213,7 +211,7 @@ describe('POST /api/workspaces/invitations', () => {
|
||||
expect(mockCreatePendingInvitation).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('rejects org-owned invites when the organization has no available seats', async () => {
|
||||
it('reports org-owned invites as failed when the organization has no available seats', async () => {
|
||||
mockGetWorkspaceWithOwner.mockResolvedValueOnce({
|
||||
id: 'workspace-1',
|
||||
name: 'Org Workspace',
|
||||
@@ -240,20 +238,25 @@ describe('POST /api/workspaces/invitations', () => {
|
||||
|
||||
const request = createMockRequest('POST', {
|
||||
workspaceId: 'workspace-1',
|
||||
email: 'new@example.com',
|
||||
permission: 'read',
|
||||
invitations: [{ email: 'new@example.com', permission: 'read' }],
|
||||
})
|
||||
|
||||
const response = await POST(request)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(400)
|
||||
expect(data.error).toContain('No available seats')
|
||||
expect(response.status).toBe(200)
|
||||
expect(data.success).toBe(false)
|
||||
expect(data.failed).toEqual([
|
||||
{
|
||||
email: 'new@example.com',
|
||||
error: 'No available seats. Currently using 5 of 5 seats.',
|
||||
},
|
||||
])
|
||||
expect(mockValidateSeatAvailability).toHaveBeenCalledWith('org-1', 1)
|
||||
expect(mockCreatePendingInvitation).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('rejects org-owned invites for users already in another organization', async () => {
|
||||
it('creates an external workspace invitation for users already in another organization', async () => {
|
||||
mockGetWorkspaceWithOwner.mockResolvedValueOnce({
|
||||
id: 'workspace-1',
|
||||
name: 'Org Workspace',
|
||||
@@ -281,16 +284,25 @@ describe('POST /api/workspaces/invitations', () => {
|
||||
|
||||
const request = createMockRequest('POST', {
|
||||
workspaceId: 'workspace-1',
|
||||
email: 'new@example.com',
|
||||
permission: 'read',
|
||||
invitations: [{ email: 'new@example.com', permission: 'read' }],
|
||||
})
|
||||
|
||||
const response = await POST(request)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(409)
|
||||
expect(data.error).toContain('already a member of another organization')
|
||||
expect(mockCreatePendingInvitation).not.toHaveBeenCalled()
|
||||
expect(response.status).toBe(200)
|
||||
expect(data.success).toBe(true)
|
||||
expect(data.invitations[0].membershipIntent).toBe('external')
|
||||
expect(mockValidateSeatAvailability).not.toHaveBeenCalled()
|
||||
expect(mockCreatePendingInvitation).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
kind: 'workspace',
|
||||
email: 'new@example.com',
|
||||
organizationId: 'org-1',
|
||||
membershipIntent: 'external',
|
||||
grants: [{ workspaceId: 'workspace-1', permission: 'read' }],
|
||||
})
|
||||
)
|
||||
})
|
||||
|
||||
it('creates a unified workspace invitation for a grandfathered workspace', async () => {
|
||||
@@ -306,8 +318,7 @@ describe('POST /api/workspaces/invitations', () => {
|
||||
|
||||
const request = createMockRequest('POST', {
|
||||
workspaceId: 'workspace-1',
|
||||
email: 'new@example.com',
|
||||
permission: 'write',
|
||||
invitations: [{ email: 'new@example.com', permission: 'write' }],
|
||||
})
|
||||
|
||||
const response = await POST(request)
|
||||
@@ -327,6 +338,40 @@ describe('POST /api/workspaces/invitations', () => {
|
||||
expect(mockValidateSeatAvailability).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('creates multiple workspace invitations in one batch request', async () => {
|
||||
mockDbResults.value = [[{ permissionType: 'admin' }], [], []]
|
||||
mockCreatePendingInvitation
|
||||
.mockResolvedValueOnce({
|
||||
invitationId: 'inv-1',
|
||||
token: 'tok-1',
|
||||
expiresAt: new Date(Date.now() + 7 * 24 * 60 * 60 * 1000),
|
||||
})
|
||||
.mockResolvedValueOnce({
|
||||
invitationId: 'inv-2',
|
||||
token: 'tok-2',
|
||||
expiresAt: new Date(Date.now() + 7 * 24 * 60 * 60 * 1000),
|
||||
})
|
||||
|
||||
const request = createMockRequest('POST', {
|
||||
workspaceId: 'workspace-1',
|
||||
invitations: [
|
||||
{ email: 'first@example.com', permission: 'read' },
|
||||
{ email: 'second@example.com', permission: 'write' },
|
||||
],
|
||||
})
|
||||
|
||||
const response = await POST(request)
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
expect(data.success).toBe(true)
|
||||
expect(data.successful).toEqual(['first@example.com', 'second@example.com'])
|
||||
expect(data.failed).toEqual([])
|
||||
expect(data.invitations).toHaveLength(2)
|
||||
expect(mockCreatePendingInvitation).toHaveBeenCalledTimes(2)
|
||||
expect(mockSendInvitationEmail).toHaveBeenCalledTimes(2)
|
||||
})
|
||||
|
||||
it('rolls back the unified invitation when email delivery fails', async () => {
|
||||
mockGetWorkspaceWithOwner.mockResolvedValueOnce({
|
||||
id: 'workspace-1',
|
||||
@@ -344,13 +389,18 @@ describe('POST /api/workspaces/invitations', () => {
|
||||
|
||||
const request = createMockRequest('POST', {
|
||||
workspaceId: 'workspace-1',
|
||||
email: 'new@example.com',
|
||||
permission: 'read',
|
||||
invitations: [{ email: 'new@example.com', permission: 'read' }],
|
||||
})
|
||||
|
||||
const response = await POST(request)
|
||||
|
||||
expect(response.status).toBe(502)
|
||||
expect(response.status).toBe(200)
|
||||
await expect(response.json()).resolves.toEqual(
|
||||
expect.objectContaining({
|
||||
success: false,
|
||||
failed: [{ email: 'new@example.com', error: 'mailer unavailable' }],
|
||||
})
|
||||
)
|
||||
expect(mockCancelPendingInvitation).toHaveBeenCalledWith('inv-1')
|
||||
})
|
||||
})
|
||||
|
||||
@@ -1,35 +1,16 @@
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@sim/audit'
|
||||
import { db } from '@sim/db'
|
||||
import { permissions, type permissionTypeEnum, user, workspace } from '@sim/db/schema'
|
||||
import { permissions, workspace } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq, isNull, sql } from 'drizzle-orm'
|
||||
import { and, eq, isNull } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { getUserOrganization } from '@/lib/billing/organizations/membership'
|
||||
import { validateSeatAvailability } from '@/lib/billing/validation/seat-management'
|
||||
import { PlatformEvents } from '@/lib/core/telemetry'
|
||||
import { withRouteHandler } from '@/lib/core/utils/with-route-handler'
|
||||
import { listInvitationsForWorkspaces, normalizeEmail } from '@/lib/invitations/core'
|
||||
import {
|
||||
cancelPendingInvitation,
|
||||
createPendingInvitation,
|
||||
findPendingGrantForWorkspaceEmail,
|
||||
sendInvitationEmail,
|
||||
} from '@/lib/invitations/send'
|
||||
import { captureServerEvent } from '@/lib/posthog/server'
|
||||
import { getWorkspaceWithOwner } from '@/lib/workspaces/permissions/utils'
|
||||
import { getWorkspaceInvitePolicy } from '@/lib/workspaces/policy'
|
||||
import {
|
||||
InvitationsNotAllowedError,
|
||||
validateInvitationsAllowed,
|
||||
} from '@/ee/access-control/utils/permission-check'
|
||||
import { listInvitationsForWorkspaces } from '@/lib/invitations/core'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('WorkspaceInvitationsAPI')
|
||||
|
||||
type PermissionType = (typeof permissionTypeEnum.enumValues)[number]
|
||||
|
||||
export const GET = withRouteHandler(async (req: NextRequest) => {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
@@ -61,241 +42,3 @@ export const GET = withRouteHandler(async (req: NextRequest) => {
|
||||
return NextResponse.json({ error: 'Failed to fetch invitations' }, { status: 500 })
|
||||
}
|
||||
})
|
||||
|
||||
export const POST = withRouteHandler(async (req: NextRequest) => {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
try {
|
||||
const { workspaceId, email, permission = 'read' } = await req.json()
|
||||
|
||||
if (!workspaceId || !email) {
|
||||
return NextResponse.json({ error: 'Workspace ID and email are required' }, { status: 400 })
|
||||
}
|
||||
|
||||
await validateInvitationsAllowed(session.user.id, workspaceId)
|
||||
|
||||
const validPermissions: PermissionType[] = ['admin', 'write', 'read']
|
||||
if (!validPermissions.includes(permission)) {
|
||||
return NextResponse.json(
|
||||
{ error: `Invalid permission: must be one of ${validPermissions.join(', ')}` },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const normalizedEmail = normalizeEmail(email)
|
||||
|
||||
const userPermission = await db
|
||||
.select()
|
||||
.from(permissions)
|
||||
.where(
|
||||
and(
|
||||
eq(permissions.entityId, workspaceId),
|
||||
eq(permissions.entityType, 'workspace'),
|
||||
eq(permissions.userId, session.user.id),
|
||||
eq(permissions.permissionType, 'admin')
|
||||
)
|
||||
)
|
||||
.then((rows) => rows[0])
|
||||
|
||||
if (!userPermission) {
|
||||
return NextResponse.json(
|
||||
{ error: 'You need admin permissions to invite users' },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
const workspaceDetails = await getWorkspaceWithOwner(workspaceId)
|
||||
if (!workspaceDetails) {
|
||||
return NextResponse.json({ error: 'Workspace not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
const invitePolicy = await getWorkspaceInvitePolicy(workspaceDetails)
|
||||
if (!invitePolicy.allowed) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: invitePolicy.reason ?? 'Invites are disabled for this workspace.',
|
||||
upgradeRequired: invitePolicy.upgradeRequired,
|
||||
},
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
const existingUser = await db
|
||||
.select()
|
||||
.from(user)
|
||||
.where(sql`lower(${user.email}) = ${normalizedEmail}`)
|
||||
.then((rows) => rows[0])
|
||||
|
||||
if (existingUser) {
|
||||
const existingPermission = await db
|
||||
.select()
|
||||
.from(permissions)
|
||||
.where(
|
||||
and(
|
||||
eq(permissions.entityId, workspaceId),
|
||||
eq(permissions.entityType, 'workspace'),
|
||||
eq(permissions.userId, existingUser.id)
|
||||
)
|
||||
)
|
||||
.then((rows) => rows[0])
|
||||
|
||||
if (existingPermission) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: `${normalizedEmail} already has access to this workspace`,
|
||||
email: normalizedEmail,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
if (invitePolicy.requiresSeat && invitePolicy.organizationId) {
|
||||
const existingMembership = await getUserOrganization(existingUser.id)
|
||||
if (
|
||||
existingMembership &&
|
||||
existingMembership.organizationId !== invitePolicy.organizationId
|
||||
) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error:
|
||||
'This user is already a member of another organization. They must leave it before joining this workspace.',
|
||||
email: normalizedEmail,
|
||||
},
|
||||
{ status: 409 }
|
||||
)
|
||||
}
|
||||
|
||||
if (!existingMembership) {
|
||||
const seatValidation = await validateSeatAvailability(invitePolicy.organizationId, 1)
|
||||
if (!seatValidation.canInvite) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: seatValidation.reason || 'No available seats for this organization.',
|
||||
email: normalizedEmail,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (invitePolicy.requiresSeat && invitePolicy.organizationId) {
|
||||
const seatValidation = await validateSeatAvailability(invitePolicy.organizationId, 1)
|
||||
if (!seatValidation.canInvite) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: seatValidation.reason || 'No available seats for this organization.',
|
||||
email: normalizedEmail,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
const existingInvitation = await findPendingGrantForWorkspaceEmail({
|
||||
workspaceId,
|
||||
email: normalizedEmail,
|
||||
})
|
||||
if (existingInvitation) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: `${normalizedEmail} has already been invited to this workspace`,
|
||||
email: normalizedEmail,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const { invitationId, token } = await createPendingInvitation({
|
||||
kind: 'workspace',
|
||||
email: normalizedEmail,
|
||||
inviterId: session.user.id,
|
||||
organizationId: workspaceDetails.organizationId,
|
||||
role: 'member',
|
||||
grants: [
|
||||
{
|
||||
workspaceId,
|
||||
permission,
|
||||
},
|
||||
],
|
||||
})
|
||||
|
||||
try {
|
||||
PlatformEvents.workspaceMemberInvited({
|
||||
workspaceId,
|
||||
invitedBy: session.user.id,
|
||||
inviteeEmail: normalizedEmail,
|
||||
role: permission,
|
||||
})
|
||||
} catch {
|
||||
// telemetry must not fail the operation
|
||||
}
|
||||
|
||||
captureServerEvent(
|
||||
session.user.id,
|
||||
'workspace_member_invited',
|
||||
{ workspace_id: workspaceId, invitee_role: permission },
|
||||
{
|
||||
groups: { workspace: workspaceId },
|
||||
setOnce: { first_invitation_sent_at: new Date().toISOString() },
|
||||
}
|
||||
)
|
||||
|
||||
const emailResult = await sendInvitationEmail({
|
||||
invitationId,
|
||||
token,
|
||||
kind: 'workspace',
|
||||
email: normalizedEmail,
|
||||
inviterName: session.user.name || session.user.email || 'A user',
|
||||
organizationId: workspaceDetails.organizationId,
|
||||
organizationRole: 'member',
|
||||
grants: [{ workspaceId, permission }],
|
||||
})
|
||||
|
||||
if (!emailResult.success) {
|
||||
await cancelPendingInvitation(invitationId)
|
||||
return NextResponse.json(
|
||||
{ error: emailResult.error || 'Failed to send invitation email' },
|
||||
{ status: 502 }
|
||||
)
|
||||
}
|
||||
|
||||
recordAudit({
|
||||
workspaceId,
|
||||
actorId: session.user.id,
|
||||
actorName: session.user.name,
|
||||
actorEmail: session.user.email,
|
||||
action: AuditAction.MEMBER_INVITED,
|
||||
resourceType: AuditResourceType.WORKSPACE,
|
||||
resourceId: workspaceId,
|
||||
resourceName: normalizedEmail,
|
||||
description: `Invited ${normalizedEmail} as ${permission}`,
|
||||
metadata: {
|
||||
targetEmail: normalizedEmail,
|
||||
targetRole: permission,
|
||||
workspaceName: workspaceDetails.name,
|
||||
invitationId,
|
||||
},
|
||||
request: req,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
invitation: {
|
||||
id: invitationId,
|
||||
workspaceId,
|
||||
email: normalizedEmail,
|
||||
permission,
|
||||
expiresAt: undefined,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof InvitationsNotAllowedError) {
|
||||
return NextResponse.json({ error: error.message }, { status: 403 })
|
||||
}
|
||||
logger.error('Error creating workspace invitation:', error)
|
||||
return NextResponse.json({ error: 'Failed to create invitation' }, { status: 500 })
|
||||
}
|
||||
})
|
||||
|
||||
@@ -1,13 +1,14 @@
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@sim/audit'
|
||||
import { db } from '@sim/db'
|
||||
import { permissions, workspace } from '@sim/db/schema'
|
||||
import { permissionGroupMember, permissions, workspace } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { generateId } from '@sim/utils/id'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { withRouteHandler } from '@/lib/core/utils/with-route-handler'
|
||||
import { revokeWorkspaceCredentialMemberships } from '@/lib/credentials/access'
|
||||
import { revokeWorkspaceCredentialMembershipsTx } from '@/lib/credentials/access'
|
||||
import { captureServerEvent } from '@/lib/posthog/server'
|
||||
import { hasWorkspaceAdminAccess } from '@/lib/workspaces/permissions/utils'
|
||||
|
||||
@@ -32,7 +33,10 @@ export const DELETE = withRouteHandler(
|
||||
const { workspaceId } = body
|
||||
|
||||
const workspaceRow = await db
|
||||
.select({ billedAccountUserId: workspace.billedAccountUserId })
|
||||
.select({
|
||||
ownerId: workspace.ownerId,
|
||||
billedAccountUserId: workspace.billedAccountUserId,
|
||||
})
|
||||
.from(workspace)
|
||||
.where(eq(workspace.id, workspaceId))
|
||||
.limit(1)
|
||||
@@ -61,7 +65,10 @@ export const DELETE = withRouteHandler(
|
||||
)
|
||||
.then((rows) => rows[0])
|
||||
|
||||
if (!userPermission) {
|
||||
const isRemovingWorkspaceOwner = workspaceRow[0].ownerId === userId
|
||||
const isOwnerOnlyRemoval = isRemovingWorkspaceOwner && !userPermission
|
||||
|
||||
if (!userPermission && !isOwnerOnlyRemoval) {
|
||||
return NextResponse.json({ error: 'User not found in workspace' }, { status: 404 })
|
||||
}
|
||||
|
||||
@@ -73,8 +80,19 @@ export const DELETE = withRouteHandler(
|
||||
return NextResponse.json({ error: 'Insufficient permissions' }, { status: 403 })
|
||||
}
|
||||
|
||||
if (
|
||||
isRemovingWorkspaceOwner &&
|
||||
!isSelf &&
|
||||
session.user.id !== workspaceRow[0].billedAccountUserId
|
||||
) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Only the workspace owner or billing account can remove the workspace owner' },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
// Prevent removing yourself if you're the last admin
|
||||
if (isSelf && userPermission.permissionType === 'admin') {
|
||||
if (isSelf && userPermission?.permissionType === 'admin' && !isRemovingWorkspaceOwner) {
|
||||
const otherAdmins = await db
|
||||
.select()
|
||||
.from(permissions)
|
||||
@@ -95,18 +113,78 @@ export const DELETE = withRouteHandler(
|
||||
}
|
||||
}
|
||||
|
||||
// Delete the user's permissions for this workspace
|
||||
await db
|
||||
.delete(permissions)
|
||||
.where(
|
||||
and(
|
||||
eq(permissions.userId, userId),
|
||||
eq(permissions.entityType, 'workspace'),
|
||||
eq(permissions.entityId, workspaceId)
|
||||
)
|
||||
)
|
||||
const ownershipTransferred = await db.transaction(async (tx) => {
|
||||
let didTransferOwnership = false
|
||||
|
||||
await revokeWorkspaceCredentialMemberships(workspaceId, userId)
|
||||
if (isRemovingWorkspaceOwner) {
|
||||
/**
|
||||
* Invariant: the billed account is the org owner for org workspaces,
|
||||
* the owner for personal workspaces, and a workspace admin for
|
||||
* grandfathered shared workspaces.
|
||||
*/
|
||||
const newOwnerId = workspaceRow[0].billedAccountUserId
|
||||
|
||||
await tx
|
||||
.update(workspace)
|
||||
.set({ ownerId: newOwnerId, updatedAt: new Date() })
|
||||
.where(eq(workspace.id, workspaceId))
|
||||
|
||||
const [existingNewOwnerPermission] = await tx
|
||||
.select({ id: permissions.id })
|
||||
.from(permissions)
|
||||
.where(
|
||||
and(
|
||||
eq(permissions.userId, newOwnerId),
|
||||
eq(permissions.entityType, 'workspace'),
|
||||
eq(permissions.entityId, workspaceId)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (existingNewOwnerPermission) {
|
||||
await tx
|
||||
.update(permissions)
|
||||
.set({ permissionType: 'admin', updatedAt: new Date() })
|
||||
.where(eq(permissions.id, existingNewOwnerPermission.id))
|
||||
} else {
|
||||
const now = new Date()
|
||||
await tx.insert(permissions).values({
|
||||
id: generateId(),
|
||||
userId: newOwnerId,
|
||||
entityType: 'workspace',
|
||||
entityId: workspaceId,
|
||||
permissionType: 'admin',
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
})
|
||||
}
|
||||
|
||||
didTransferOwnership = true
|
||||
}
|
||||
|
||||
await tx
|
||||
.delete(permissions)
|
||||
.where(
|
||||
and(
|
||||
eq(permissions.userId, userId),
|
||||
eq(permissions.entityType, 'workspace'),
|
||||
eq(permissions.entityId, workspaceId)
|
||||
)
|
||||
)
|
||||
|
||||
await revokeWorkspaceCredentialMembershipsTx(tx, workspaceId, userId)
|
||||
|
||||
await tx
|
||||
.delete(permissionGroupMember)
|
||||
.where(
|
||||
and(
|
||||
eq(permissionGroupMember.userId, userId),
|
||||
eq(permissionGroupMember.workspaceId, workspaceId)
|
||||
)
|
||||
)
|
||||
|
||||
return didTransferOwnership
|
||||
})
|
||||
|
||||
captureServerEvent(
|
||||
session.user.id,
|
||||
@@ -126,8 +204,9 @@ export const DELETE = withRouteHandler(
|
||||
description: isSelf ? 'Left the workspace' : `Removed member ${userId} from the workspace`,
|
||||
metadata: {
|
||||
removedUserId: userId,
|
||||
removedUserRole: userPermission.permissionType,
|
||||
removedUserRole: userPermission?.permissionType ?? 'owner',
|
||||
selfRemoval: isSelf,
|
||||
ownershipTransferred,
|
||||
},
|
||||
request: req,
|
||||
})
|
||||
|
||||
@@ -156,32 +156,86 @@ function toToolData(tc: NonNullable<ContentBlock['toolCall']>): ToolCallData {
|
||||
*/
|
||||
function parseBlocks(blocks: ContentBlock[]): MessageSegment[] {
|
||||
const segments: MessageSegment[] = []
|
||||
let group: AgentGroupSegment | null = null
|
||||
const pushGroup = (nextGroup: AgentGroupSegment, isOpen = false) => {
|
||||
segments.push({ ...nextGroup, isOpen })
|
||||
const groupsByKey = new Map<string, AgentGroupSegment>()
|
||||
let activeGroupKey: string | null = null
|
||||
|
||||
const groupKey = (name: string, parentToolCallId: string | undefined) =>
|
||||
parentToolCallId ? `${name}:${parentToolCallId}` : `${name}:legacy`
|
||||
|
||||
const resolveGroupKey = (name: string, parentToolCallId: string | undefined) => {
|
||||
if (parentToolCallId) return groupKey(name, parentToolCallId)
|
||||
if (activeGroupKey && groupsByKey.get(activeGroupKey)?.agentName === name) {
|
||||
return activeGroupKey
|
||||
}
|
||||
for (const [key, g] of groupsByKey) {
|
||||
if (g.agentName === name && g.isOpen) return key
|
||||
}
|
||||
return groupKey(name, undefined)
|
||||
}
|
||||
|
||||
const ensureGroup = (
|
||||
name: string,
|
||||
parentToolCallId: string | undefined
|
||||
): { group: AgentGroupSegment; created: boolean } => {
|
||||
const key = resolveGroupKey(name, parentToolCallId)
|
||||
const existing = groupsByKey.get(key)
|
||||
if (existing) return { group: existing, created: false }
|
||||
const group: AgentGroupSegment = {
|
||||
type: 'agent_group',
|
||||
id: `agent-${key}-${segments.length}`,
|
||||
agentName: name,
|
||||
agentLabel: resolveAgentLabel(name),
|
||||
items: [],
|
||||
isDelegating: false,
|
||||
isOpen: false,
|
||||
}
|
||||
segments.push(group)
|
||||
groupsByKey.set(key, group)
|
||||
return { group, created: true }
|
||||
}
|
||||
|
||||
const findGroupForSubagentChunk = (
|
||||
parentToolCallId: string | undefined
|
||||
): AgentGroupSegment | undefined => {
|
||||
if (parentToolCallId) {
|
||||
for (const [key, g] of groupsByKey) {
|
||||
if (key.endsWith(`:${parentToolCallId}`)) return g
|
||||
}
|
||||
return undefined
|
||||
}
|
||||
if (activeGroupKey) return groupsByKey.get(activeGroupKey)
|
||||
return undefined
|
||||
}
|
||||
|
||||
const flushLanes = () => {
|
||||
for (const g of groupsByKey.values()) {
|
||||
g.isOpen = false
|
||||
g.isDelegating = false
|
||||
}
|
||||
groupsByKey.clear()
|
||||
activeGroupKey = null
|
||||
}
|
||||
|
||||
for (let i = 0; i < blocks.length; i++) {
|
||||
const block = blocks[i]
|
||||
|
||||
if (block.type === 'subagent_text' || block.type === 'subagent_thinking') {
|
||||
if (!block.content || !group) continue
|
||||
group.isDelegating = false
|
||||
const lastItem = group.items[group.items.length - 1]
|
||||
if (!block.content) continue
|
||||
const g = findGroupForSubagentChunk(block.parentToolCallId)
|
||||
if (!g) continue
|
||||
g.isDelegating = false
|
||||
const lastItem = g.items[g.items.length - 1]
|
||||
if (lastItem?.type === 'text') {
|
||||
lastItem.content += block.content
|
||||
} else {
|
||||
group.items.push({ type: 'text', content: block.content })
|
||||
g.items.push({ type: 'text', content: block.content })
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
if (block.type === 'thinking') {
|
||||
if (!block.content?.trim()) continue
|
||||
if (group) {
|
||||
pushGroup(group)
|
||||
group = null
|
||||
}
|
||||
flushLanes()
|
||||
const last = segments[segments.length - 1]
|
||||
if (last?.type === 'thinking' && last.endedAt === undefined) {
|
||||
last.content += block.content
|
||||
@@ -201,21 +255,19 @@ function parseBlocks(blocks: ContentBlock[]): MessageSegment[] {
|
||||
if (block.type === 'text') {
|
||||
if (!block.content) continue
|
||||
if (block.subagent) {
|
||||
if (group && group.agentName === block.subagent) {
|
||||
group.isDelegating = false
|
||||
const lastItem = group.items[group.items.length - 1]
|
||||
const g = groupsByKey.get(resolveGroupKey(block.subagent, block.parentToolCallId))
|
||||
if (g) {
|
||||
g.isDelegating = false
|
||||
const lastItem = g.items[g.items.length - 1]
|
||||
if (lastItem?.type === 'text') {
|
||||
lastItem.content += block.content
|
||||
} else {
|
||||
group.items.push({ type: 'text', content: block.content })
|
||||
g.items.push({ type: 'text', content: block.content })
|
||||
}
|
||||
continue
|
||||
}
|
||||
}
|
||||
if (group) {
|
||||
pushGroup(group)
|
||||
group = null
|
||||
}
|
||||
flushLanes()
|
||||
const last = segments[segments.length - 1]
|
||||
if (last?.type === 'text') {
|
||||
last.content += block.content
|
||||
@@ -228,34 +280,23 @@ function parseBlocks(blocks: ContentBlock[]): MessageSegment[] {
|
||||
if (block.type === 'subagent') {
|
||||
if (!block.content) continue
|
||||
const key = block.content
|
||||
if (group && group.agentName === key) continue
|
||||
|
||||
const dispatchToolName = SUBAGENT_DISPATCH_TOOLS[key]
|
||||
let inheritedDelegation = false
|
||||
if (group && dispatchToolName) {
|
||||
const last: AgentGroupItem | undefined = group.items[group.items.length - 1]
|
||||
if (last?.type === 'tool' && last.data.toolName === dispatchToolName) {
|
||||
inheritedDelegation = !isToolDone(last.data.status) && Boolean(last.data.streamingArgs)
|
||||
group.items.pop()
|
||||
const dispatchToolName = SUBAGENT_DISPATCH_TOOLS[key]
|
||||
if (dispatchToolName) {
|
||||
const mship = groupsByKey.get(groupKey('mothership', undefined))
|
||||
if (mship) {
|
||||
const last = mship.items[mship.items.length - 1]
|
||||
if (last?.type === 'tool' && last.data.toolName === dispatchToolName) {
|
||||
inheritedDelegation = !isToolDone(last.data.status) && Boolean(last.data.streamingArgs)
|
||||
mship.items.pop()
|
||||
}
|
||||
}
|
||||
if (group.items.length > 0) {
|
||||
pushGroup(group)
|
||||
}
|
||||
group = null
|
||||
} else if (group) {
|
||||
pushGroup(group)
|
||||
group = null
|
||||
}
|
||||
|
||||
group = {
|
||||
type: 'agent_group',
|
||||
id: `agent-${key}-${i}`,
|
||||
agentName: key,
|
||||
agentLabel: resolveAgentLabel(key),
|
||||
items: [],
|
||||
isDelegating: inheritedDelegation,
|
||||
isOpen: false,
|
||||
}
|
||||
groupsByKey.delete(groupKey('mothership', undefined))
|
||||
const { group: g } = ensureGroup(key, block.parentToolCallId)
|
||||
if (inheritedDelegation) g.isDelegating = true
|
||||
g.isOpen = true
|
||||
activeGroupKey = resolveGroupKey(key, block.parentToolCallId)
|
||||
continue
|
||||
}
|
||||
|
||||
@@ -267,95 +308,75 @@ function parseBlocks(blocks: ContentBlock[]): MessageSegment[] {
|
||||
const isDispatch = SUBAGENT_KEYS.has(tc.name) && !tc.calledBy
|
||||
|
||||
if (isDispatch) {
|
||||
if (!group || group.agentName !== tc.name) {
|
||||
if (group) {
|
||||
pushGroup(group)
|
||||
group = null
|
||||
}
|
||||
group = {
|
||||
type: 'agent_group',
|
||||
id: `agent-${tc.name}-${i}`,
|
||||
agentName: tc.name,
|
||||
agentLabel: resolveAgentLabel(tc.name),
|
||||
items: [],
|
||||
isDelegating: false,
|
||||
isOpen: false,
|
||||
}
|
||||
}
|
||||
group.isDelegating = isDelegatingTool(tc)
|
||||
groupsByKey.delete(groupKey('mothership', undefined))
|
||||
const { group: g } = ensureGroup(tc.name, tc.id)
|
||||
g.isDelegating = isDelegatingTool(tc)
|
||||
g.isOpen = g.isDelegating
|
||||
continue
|
||||
}
|
||||
|
||||
const tool = toToolData(tc)
|
||||
|
||||
if (tc.calledBy && group && group.agentName === tc.calledBy) {
|
||||
group.isDelegating = false
|
||||
group.items.push({ type: 'tool', data: tool })
|
||||
} else if (tc.calledBy) {
|
||||
if (group) {
|
||||
pushGroup(group)
|
||||
group = null
|
||||
}
|
||||
group = {
|
||||
type: 'agent_group',
|
||||
id: `agent-${tc.calledBy}-${i}`,
|
||||
agentName: tc.calledBy,
|
||||
agentLabel: resolveAgentLabel(tc.calledBy),
|
||||
items: [{ type: 'tool', data: tool }],
|
||||
isDelegating: false,
|
||||
isOpen: false,
|
||||
}
|
||||
if (tc.calledBy) {
|
||||
const { group: g, created } = ensureGroup(tc.calledBy, block.parentToolCallId)
|
||||
g.isDelegating = false
|
||||
if (created && block.parentToolCallId) g.isOpen = true
|
||||
g.items.push({ type: 'tool', data: tool })
|
||||
activeGroupKey = resolveGroupKey(tc.calledBy, block.parentToolCallId)
|
||||
} else {
|
||||
if (group && group.agentName === 'mothership') {
|
||||
group.items.push({ type: 'tool', data: tool })
|
||||
} else {
|
||||
if (group) {
|
||||
pushGroup(group)
|
||||
group = null
|
||||
}
|
||||
group = {
|
||||
type: 'agent_group',
|
||||
id: `agent-mothership-${i}`,
|
||||
agentName: 'mothership',
|
||||
agentLabel: 'Mothership',
|
||||
items: [{ type: 'tool', data: tool }],
|
||||
isDelegating: false,
|
||||
isOpen: false,
|
||||
}
|
||||
}
|
||||
const { group: g } = ensureGroup('mothership', undefined)
|
||||
g.items.push({ type: 'tool', data: tool })
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
if (block.type === 'options') {
|
||||
if (!block.options?.length) continue
|
||||
if (group) {
|
||||
pushGroup(group)
|
||||
group = null
|
||||
}
|
||||
flushLanes()
|
||||
segments.push({ type: 'options', items: block.options })
|
||||
continue
|
||||
}
|
||||
|
||||
if (block.type === 'subagent_end') {
|
||||
if (group) {
|
||||
pushGroup(group)
|
||||
group = null
|
||||
if (block.parentToolCallId) {
|
||||
for (const [key, g] of groupsByKey) {
|
||||
if (key.endsWith(`:${block.parentToolCallId}`)) {
|
||||
g.isOpen = false
|
||||
g.isDelegating = false
|
||||
}
|
||||
}
|
||||
if (activeGroupKey?.endsWith(`:${block.parentToolCallId}`)) {
|
||||
activeGroupKey = null
|
||||
}
|
||||
} else {
|
||||
for (const [key, g] of groupsByKey) {
|
||||
if (key.endsWith(':legacy') && g.agentName !== 'mothership') {
|
||||
g.isOpen = false
|
||||
g.isDelegating = false
|
||||
}
|
||||
}
|
||||
if (activeGroupKey?.endsWith(':legacy')) {
|
||||
activeGroupKey = null
|
||||
}
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
if (block.type === 'stopped') {
|
||||
if (group) {
|
||||
pushGroup(group)
|
||||
group = null
|
||||
}
|
||||
flushLanes()
|
||||
segments.push({ type: 'stopped' })
|
||||
}
|
||||
}
|
||||
|
||||
if (group) pushGroup(group, true)
|
||||
return segments
|
||||
const visibleSegments = segments.filter(
|
||||
(segment) =>
|
||||
segment.type !== 'agent_group' ||
|
||||
segment.items.length > 0 ||
|
||||
segment.isDelegating ||
|
||||
segment.isOpen
|
||||
)
|
||||
|
||||
return visibleSegments
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -428,12 +449,6 @@ export function MessageContent({
|
||||
isStreaming &&
|
||||
!hasTrailingContent &&
|
||||
(lastSegment.type === 'thinking' || hasSubagentEnded || allLastGroupToolsDone)
|
||||
const lastOpenSubagentGroupId = [...segments]
|
||||
.reverse()
|
||||
.find(
|
||||
(segment): segment is AgentGroupSegment =>
|
||||
segment.type === 'agent_group' && segment.agentName !== 'mothership' && segment.isOpen
|
||||
)?.id
|
||||
|
||||
return (
|
||||
<div className='space-y-[10px]'>
|
||||
@@ -488,8 +503,8 @@ export function MessageContent({
|
||||
items={segment.items}
|
||||
isDelegating={segment.isDelegating}
|
||||
isStreaming={isStreaming}
|
||||
autoCollapse={allToolsDone && hasFollowingText}
|
||||
defaultExpanded={segment.id === lastOpenSubagentGroupId}
|
||||
autoCollapse={!segment.isOpen && allToolsDone && hasFollowingText}
|
||||
defaultExpanded={segment.isOpen}
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
|
||||
@@ -131,6 +131,7 @@ import type {
|
||||
MothershipResource,
|
||||
MothershipResourceType,
|
||||
QueuedMessage,
|
||||
ToolCallInfo,
|
||||
} from '../types'
|
||||
import { ToolCallStatus } from '../types'
|
||||
|
||||
@@ -701,7 +702,9 @@ function parseStreamBatchResponse(value: unknown): StreamBatchResponse {
|
||||
|
||||
function toRawPersistedContentBlock(block: ContentBlock): Record<string, unknown> | null {
|
||||
const persisted = toRawPersistedContentBlockBody(block)
|
||||
return persisted ? withBlockTiming(persisted, block) : null
|
||||
if (!persisted) return null
|
||||
if (block.parentToolCallId) persisted.parentToolCallId = block.parentToolCallId
|
||||
return withBlockTiming(persisted, block)
|
||||
}
|
||||
|
||||
function toRawPersistedContentBlockBody(block: ContentBlock): Record<string, unknown> | null {
|
||||
@@ -1215,7 +1218,7 @@ export function useChat(
|
||||
reader: ReadableStreamDefaultReader<Uint8Array>,
|
||||
assistantId: string,
|
||||
expectedGen?: number,
|
||||
options?: { preserveExistingState?: boolean }
|
||||
options?: { preserveExistingState?: boolean; suppressWorkflowToolStarts?: boolean }
|
||||
) => Promise<{ sawStreamError: boolean; sawComplete: boolean }>
|
||||
>(async () => ({ sawStreamError: false, sawComplete: false }))
|
||||
const attachToExistingStreamRef = useRef<
|
||||
@@ -1457,6 +1460,9 @@ export function useChat(
|
||||
if (handledClientWorkflowToolIdsRef.current.has(toolCallId)) {
|
||||
return
|
||||
}
|
||||
if (recoveringClientWorkflowToolIdsRef.current.has(toolCallId)) {
|
||||
return
|
||||
}
|
||||
handledClientWorkflowToolIdsRef.current.add(toolCallId)
|
||||
|
||||
ensureWorkflowToolResource(toolArgs)
|
||||
@@ -1467,41 +1473,41 @@ export function useChat(
|
||||
|
||||
const recoverPendingClientWorkflowTools = useCallback(
|
||||
async (nextMessages: ChatMessage[]) => {
|
||||
const pending: ToolCallInfo[] = []
|
||||
|
||||
for (const message of nextMessages) {
|
||||
for (const block of message.contentBlocks ?? []) {
|
||||
const toolCall = block.toolCall
|
||||
if (!toolCall || !isWorkflowToolName(toolCall.name)) {
|
||||
continue
|
||||
}
|
||||
if (toolCall.status !== 'executing') {
|
||||
continue
|
||||
}
|
||||
|
||||
if (!toolCall || !isWorkflowToolName(toolCall.name)) continue
|
||||
if (toolCall.status !== 'executing') continue
|
||||
if (
|
||||
handledClientWorkflowToolIdsRef.current.has(toolCall.id) ||
|
||||
recoveringClientWorkflowToolIdsRef.current.has(toolCall.id)
|
||||
) {
|
||||
continue
|
||||
}
|
||||
|
||||
recoveringClientWorkflowToolIdsRef.current.add(toolCall.id)
|
||||
pending.push(toolCall)
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
const toolArgs = toolCall.params ?? {}
|
||||
const targetWorkflowId = ensureWorkflowToolResource(toolArgs)
|
||||
for (const toolCall of pending) {
|
||||
try {
|
||||
const toolArgs = toolCall.params ?? {}
|
||||
const targetWorkflowId = ensureWorkflowToolResource(toolArgs)
|
||||
|
||||
if (targetWorkflowId) {
|
||||
const rebound = await bindRunToolToExecution(toolCall.id, targetWorkflowId)
|
||||
if (rebound) {
|
||||
handledClientWorkflowToolIdsRef.current.add(toolCall.id)
|
||||
continue
|
||||
}
|
||||
if (targetWorkflowId) {
|
||||
const rebound = await bindRunToolToExecution(toolCall.id, targetWorkflowId)
|
||||
if (rebound) {
|
||||
handledClientWorkflowToolIdsRef.current.add(toolCall.id)
|
||||
continue
|
||||
}
|
||||
|
||||
startClientWorkflowTool(toolCall.id, toolCall.name, toolArgs)
|
||||
} finally {
|
||||
recoveringClientWorkflowToolIdsRef.current.delete(toolCall.id)
|
||||
}
|
||||
|
||||
recoveringClientWorkflowToolIdsRef.current.delete(toolCall.id)
|
||||
startClientWorkflowTool(toolCall.id, toolCall.name, toolArgs)
|
||||
} finally {
|
||||
recoveringClientWorkflowToolIdsRef.current.delete(toolCall.id)
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -1701,7 +1707,7 @@ export function useChat(
|
||||
reader: ReadableStreamDefaultReader<Uint8Array>,
|
||||
assistantId: string,
|
||||
expectedGen?: number,
|
||||
options?: { preserveExistingState?: boolean }
|
||||
options?: { preserveExistingState?: boolean; suppressWorkflowToolStarts?: boolean }
|
||||
) => {
|
||||
const decoder = new TextDecoder()
|
||||
streamReaderRef.current = reader
|
||||
@@ -1731,6 +1737,7 @@ export function useChat(
|
||||
for (let i = blocks.length - 1; i >= 0; i--) {
|
||||
if (blocks[i].type === 'subagent' && blocks[i].content) {
|
||||
activeSubagent = blocks[i].content
|
||||
activeSubagentParentToolCallId = blocks[i].parentToolCallId
|
||||
break
|
||||
}
|
||||
if (blocks[i].type === 'subagent_end') {
|
||||
@@ -1760,23 +1767,45 @@ export function useChat(
|
||||
if (block && block.endedAt === undefined) block.endedAt = toEventMs(ts)
|
||||
}
|
||||
|
||||
const ensureTextBlock = (subagentName: string | undefined, ts?: string): ContentBlock => {
|
||||
const ensureTextBlock = (
|
||||
subagentName: string | undefined,
|
||||
parentToolCallId: string | undefined,
|
||||
ts?: string
|
||||
): ContentBlock => {
|
||||
const last = blocks[blocks.length - 1]
|
||||
if (last?.type === 'text' && last.subagent === subagentName) return last
|
||||
if (
|
||||
last?.type === 'text' &&
|
||||
last.subagent === subagentName &&
|
||||
last.parentToolCallId === parentToolCallId
|
||||
) {
|
||||
return last
|
||||
}
|
||||
stampBlockEnd(last, ts)
|
||||
const b: ContentBlock = { type: 'text', content: '', timestamp: toEventMs(ts) }
|
||||
if (subagentName) b.subagent = subagentName
|
||||
if (parentToolCallId) b.parentToolCallId = parentToolCallId
|
||||
blocks.push(b)
|
||||
return b
|
||||
}
|
||||
|
||||
const ensureThinkingBlock = (subagentName: string | undefined, ts?: string): ContentBlock => {
|
||||
const ensureThinkingBlock = (
|
||||
subagentName: string | undefined,
|
||||
parentToolCallId: string | undefined,
|
||||
ts?: string
|
||||
): ContentBlock => {
|
||||
const targetType = subagentName ? 'subagent_thinking' : 'thinking'
|
||||
const last = blocks[blocks.length - 1]
|
||||
if (last?.type === targetType && last.subagent === subagentName) return last
|
||||
if (
|
||||
last?.type === targetType &&
|
||||
last.subagent === subagentName &&
|
||||
last.parentToolCallId === parentToolCallId
|
||||
) {
|
||||
return last
|
||||
}
|
||||
stampBlockEnd(last, ts)
|
||||
const b: ContentBlock = { type: targetType, content: '', timestamp: toEventMs(ts) }
|
||||
if (subagentName) b.subagent = subagentName
|
||||
if (parentToolCallId) b.parentToolCallId = parentToolCallId
|
||||
blocks.push(b)
|
||||
return b
|
||||
}
|
||||
@@ -1793,9 +1822,27 @@ export function useChat(
|
||||
return activeSubagent
|
||||
}
|
||||
|
||||
const appendInlineErrorTag = (tag: string, subagentName?: string, ts?: string) => {
|
||||
const resolveParentForSubagentBlock = (
|
||||
subagent: string | undefined,
|
||||
scopedParent: string | undefined
|
||||
): string | undefined => {
|
||||
if (!subagent) return undefined
|
||||
if (scopedParent) return scopedParent
|
||||
if (activeSubagent === subagent) return activeSubagentParentToolCallId
|
||||
for (const [parent, name] of subagentByParentToolCallId) {
|
||||
if (name === subagent) return parent
|
||||
}
|
||||
return undefined
|
||||
}
|
||||
|
||||
const appendInlineErrorTag = (
|
||||
tag: string,
|
||||
subagentName?: string,
|
||||
parentToolCallId?: string,
|
||||
ts?: string
|
||||
) => {
|
||||
if (runningText.includes(tag)) return
|
||||
const tb = ensureTextBlock(subagentName, ts)
|
||||
const tb = ensureTextBlock(subagentName, parentToolCallId, ts)
|
||||
const prefix = runningText.length > 0 && !runningText.endsWith('\n') ? '\n' : ''
|
||||
tb.content = `${tb.content ?? ''}${prefix}${tag}`
|
||||
runningText += `${prefix}${tag}`
|
||||
@@ -2008,7 +2055,11 @@ export function useChat(
|
||||
if (chunk) {
|
||||
const eventTs = typeof parsed.ts === 'string' ? parsed.ts : undefined
|
||||
if (parsed.payload.channel === MothershipStreamV1TextChannel.thinking) {
|
||||
const tb = ensureThinkingBlock(scopedSubagent, eventTs)
|
||||
const scopedParentForBlock = resolveParentForSubagentBlock(
|
||||
scopedSubagent,
|
||||
scopedParentToolCallId
|
||||
)
|
||||
const tb = ensureThinkingBlock(scopedSubagent, scopedParentForBlock, eventTs)
|
||||
tb.content = (tb.content ?? '') + chunk
|
||||
flushText()
|
||||
break
|
||||
@@ -2019,7 +2070,11 @@ export function useChat(
|
||||
lastContentSource !== contentSource &&
|
||||
runningText.length > 0 &&
|
||||
!runningText.endsWith('\n')
|
||||
const tb = ensureTextBlock(scopedSubagent, eventTs)
|
||||
const scopedParentForBlock = resolveParentForSubagentBlock(
|
||||
scopedSubagent,
|
||||
scopedParentToolCallId
|
||||
)
|
||||
const tb = ensureTextBlock(scopedSubagent, scopedParentForBlock, eventTs)
|
||||
const normalizedChunk = needsBoundaryNewline ? `\n${chunk}` : chunk
|
||||
tb.content = (tb.content ?? '') + normalizedChunk
|
||||
runningText += normalizedChunk
|
||||
@@ -2355,9 +2410,17 @@ export function useChat(
|
||||
}
|
||||
}
|
||||
|
||||
if (!toolMap.has(id)) {
|
||||
const existingToolCall = toolMap.has(id)
|
||||
? blocks[toolMap.get(id)!]?.toolCall
|
||||
: undefined
|
||||
const isNewToolCall = !existingToolCall
|
||||
if (isNewToolCall) {
|
||||
stampBlockEnd(blocks[blocks.length - 1])
|
||||
toolMap.set(id, blocks.length)
|
||||
const parentToolCallIdForBlock = resolveParentForSubagentBlock(
|
||||
scopedSubagent,
|
||||
scopedParentToolCallId
|
||||
)
|
||||
blocks.push({
|
||||
type: 'tool_call',
|
||||
toolCall: {
|
||||
@@ -2368,6 +2431,9 @@ export function useChat(
|
||||
params: args,
|
||||
calledBy: scopedSubagent,
|
||||
},
|
||||
...(parentToolCallIdForBlock
|
||||
? { parentToolCallId: parentToolCallIdForBlock }
|
||||
: {}),
|
||||
timestamp: Date.now(),
|
||||
})
|
||||
if (name === ReadTool.id || isResourceToolName(name)) {
|
||||
@@ -2385,7 +2451,14 @@ export function useChat(
|
||||
flush()
|
||||
|
||||
if (isWorkflowToolName(name) && !isPartial) {
|
||||
startClientWorkflowTool(id, name, args ?? {})
|
||||
const shouldStartWorkflowTool =
|
||||
!options?.suppressWorkflowToolStarts &&
|
||||
(isNewToolCall ||
|
||||
(existingToolCall?.status === ToolCallStatus.executing &&
|
||||
!existingToolCall.result))
|
||||
if (shouldStartWorkflowTool) {
|
||||
startClientWorkflowTool(id, name, args ?? {})
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
@@ -2488,9 +2561,13 @@ export function useChat(
|
||||
break
|
||||
}
|
||||
const spanData = asPayloadRecord(payload.data)
|
||||
const parentToolCallId =
|
||||
scopedParentToolCallId ??
|
||||
(typeof spanData?.tool_call_id === 'string' ? spanData.tool_call_id : undefined)
|
||||
const parentToolCallIdFromData =
|
||||
typeof spanData?.tool_call_id === 'string'
|
||||
? spanData.tool_call_id
|
||||
: typeof spanData?.toolCallId === 'string'
|
||||
? spanData.toolCallId
|
||||
: undefined
|
||||
const parentToolCallId = scopedParentToolCallId ?? parentToolCallIdFromData
|
||||
const isPendingPause = spanData?.pending === true
|
||||
const name = typeof payload.agent === 'string' ? payload.agent : scopedAgentId
|
||||
if (payload.event === MothershipStreamV1SpanLifecycleEvent.start && name) {
|
||||
@@ -2505,7 +2582,12 @@ export function useChat(
|
||||
activeSubagentParentToolCallId = parentToolCallId
|
||||
if (!isSameActiveSubagent) {
|
||||
stampBlockEnd(blocks[blocks.length - 1])
|
||||
blocks.push({ type: 'subagent', content: name, timestamp: Date.now() })
|
||||
blocks.push({
|
||||
type: 'subagent',
|
||||
content: name,
|
||||
...(parentToolCallId ? { parentToolCallId } : {}),
|
||||
timestamp: Date.now(),
|
||||
})
|
||||
}
|
||||
if (name === FILE_SUBAGENT_ID && !isSameActiveSubagent) {
|
||||
applyPreviewSessionUpdate({
|
||||
@@ -2549,14 +2631,23 @@ export function useChat(
|
||||
if (name) {
|
||||
for (let i = blocks.length - 1; i >= 0; i--) {
|
||||
const b = blocks[i]
|
||||
if (b.type === 'subagent' && b.content === name && b.endedAt === undefined) {
|
||||
if (
|
||||
b.type === 'subagent' &&
|
||||
b.content === name &&
|
||||
b.endedAt === undefined &&
|
||||
(!parentToolCallId || b.parentToolCallId === parentToolCallId)
|
||||
) {
|
||||
b.endedAt = endNow
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
stampBlockEnd(blocks[blocks.length - 1])
|
||||
blocks.push({ type: 'subagent_end', timestamp: endNow })
|
||||
blocks.push({
|
||||
type: 'subagent_end',
|
||||
...(parentToolCallId ? { parentToolCallId } : {}),
|
||||
timestamp: endNow,
|
||||
})
|
||||
flush()
|
||||
}
|
||||
break
|
||||
@@ -2567,6 +2658,7 @@ export function useChat(
|
||||
appendInlineErrorTag(
|
||||
buildInlineErrorTag(parsed.payload),
|
||||
scopedSubagent,
|
||||
resolveParentForSubagentBlock(scopedSubagent, scopedParentToolCallId),
|
||||
typeof parsed.ts === 'string' ? parsed.ts : undefined
|
||||
)
|
||||
break
|
||||
@@ -2671,6 +2763,7 @@ export function useChat(
|
||||
let latestCursor = afterCursor
|
||||
let seedEvents = opts.initialBatch?.events ?? []
|
||||
let streamStatus = opts.initialBatch?.status ?? 'unknown'
|
||||
let suppressSeedWorkflowStarts = seedEvents.length > 0
|
||||
|
||||
const isStaleReconnect = () =>
|
||||
streamGenRef.current !== expectedGen || abortControllerRef.current?.signal.aborted === true
|
||||
@@ -2689,11 +2782,15 @@ export function useChat(
|
||||
buildReplayStream(seedEvents).getReader(),
|
||||
assistantId,
|
||||
expectedGen,
|
||||
{ preserveExistingState: true }
|
||||
{
|
||||
preserveExistingState: true,
|
||||
suppressWorkflowToolStarts: suppressSeedWorkflowStarts,
|
||||
}
|
||||
)
|
||||
latestCursor = String(seedEvents[seedEvents.length - 1]?.eventId ?? latestCursor)
|
||||
lastCursorRef.current = latestCursor
|
||||
seedEvents = []
|
||||
suppressSeedWorkflowStarts = false
|
||||
|
||||
if (replayResult.sawStreamError) {
|
||||
return { error: true, aborted: false }
|
||||
@@ -2998,6 +3095,7 @@ export function useChat(
|
||||
...(display ? { display } : {}),
|
||||
calledBy: block.toolCall.calledBy,
|
||||
},
|
||||
...(block.parentToolCallId ? { parentToolCallId: block.parentToolCallId } : {}),
|
||||
...timing,
|
||||
}
|
||||
}
|
||||
@@ -3005,6 +3103,7 @@ export function useChat(
|
||||
type: block.type,
|
||||
content: block.content,
|
||||
...(block.subagent ? { lane: 'subagent' } : {}),
|
||||
...(block.parentToolCallId ? { parentToolCallId: block.parentToolCallId } : {}),
|
||||
...timing,
|
||||
}
|
||||
})
|
||||
|
||||
@@ -133,6 +133,7 @@ export interface ContentBlock {
|
||||
options?: OptionItem[]
|
||||
timestamp?: number
|
||||
endedAt?: number
|
||||
parentToolCallId?: string
|
||||
}
|
||||
|
||||
export interface ChatMessageAttachment {
|
||||
|
||||
@@ -604,6 +604,10 @@ export function useKnowledgeUpload(options: UseKnowledgeUploadOptions = {}) {
|
||||
const startTime = getHighResTime()
|
||||
|
||||
try {
|
||||
if (!options.workspaceId) {
|
||||
throw new Error('workspaceId is required for multipart upload')
|
||||
}
|
||||
|
||||
const initiateResponse = await fetch('/api/files/multipart?action=initiate', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
@@ -611,6 +615,7 @@ export function useKnowledgeUpload(options: UseKnowledgeUploadOptions = {}) {
|
||||
fileName: file.name,
|
||||
contentType: getFileContentType(file),
|
||||
fileSize: file.size,
|
||||
workspaceId: options.workspaceId,
|
||||
}),
|
||||
})
|
||||
|
||||
@@ -618,7 +623,7 @@ export function useKnowledgeUpload(options: UseKnowledgeUploadOptions = {}) {
|
||||
throw new Error(`Failed to initiate multipart upload: ${initiateResponse.statusText}`)
|
||||
}
|
||||
|
||||
const { uploadId, key } = await initiateResponse.json()
|
||||
const { uploadId, key, uploadToken } = await initiateResponse.json()
|
||||
logger.info(`Initiated multipart upload with ID: ${uploadId}`)
|
||||
|
||||
const chunkSize = UPLOAD_CONFIG.CHUNK_SIZE
|
||||
@@ -629,8 +634,7 @@ export function useKnowledgeUpload(options: UseKnowledgeUploadOptions = {}) {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
uploadId,
|
||||
key,
|
||||
uploadToken,
|
||||
partNumbers,
|
||||
}),
|
||||
})
|
||||
@@ -639,7 +643,7 @@ export function useKnowledgeUpload(options: UseKnowledgeUploadOptions = {}) {
|
||||
await fetch('/api/files/multipart?action=abort', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ uploadId, key }),
|
||||
body: JSON.stringify({ uploadToken }),
|
||||
})
|
||||
throw new Error(`Failed to get part URLs: ${partUrlsResponse.statusText}`)
|
||||
}
|
||||
@@ -723,8 +727,7 @@ export function useKnowledgeUpload(options: UseKnowledgeUploadOptions = {}) {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
uploadId,
|
||||
key,
|
||||
uploadToken,
|
||||
parts: uploadedParts,
|
||||
}),
|
||||
})
|
||||
@@ -791,7 +794,7 @@ export function useKnowledgeUpload(options: UseKnowledgeUploadOptions = {}) {
|
||||
}
|
||||
|
||||
throw new DirectUploadError(
|
||||
`Failed to upload ${file.name}: ${errorData?.error || 'Unknown error'}`,
|
||||
`Failed to upload ${file.name}: ${errorData?.message || errorData?.error || 'Unknown error'}`,
|
||||
errorData
|
||||
)
|
||||
}
|
||||
|
||||
@@ -75,11 +75,11 @@ function apportionCredits(
|
||||
return result
|
||||
}
|
||||
|
||||
function RoleBadge({ role }: { role: string }) {
|
||||
const variant = role === 'owner' ? 'blue-secondary' : 'gray-secondary'
|
||||
function RoleBadge({ memberRole }: { memberRole: string }) {
|
||||
const variant = memberRole === 'owner' ? 'blue-secondary' : 'gray-secondary'
|
||||
return (
|
||||
<Badge variant={variant} size='sm'>
|
||||
{role.charAt(0).toUpperCase() + role.slice(1)}
|
||||
{memberRole.charAt(0).toUpperCase() + memberRole.slice(1)}
|
||||
</Badge>
|
||||
)
|
||||
}
|
||||
@@ -521,6 +521,7 @@ export function OrganizationRoster({
|
||||
const rowKey = `member-${m.memberId}`
|
||||
const expanded = expandedRows.has(rowKey)
|
||||
const isSelf = m.email === currentUserEmail
|
||||
const isExternal = m.role === 'external'
|
||||
const credits = memberCredits[m.userId] ?? 0
|
||||
const canRemove = isAdminOrOwner && m.role !== 'owner' && !isSelf
|
||||
const canTransferAndLeave = isSelf && m.role === 'owner' && !!onTransferOwnership
|
||||
@@ -545,8 +546,11 @@ export function OrganizationRoster({
|
||||
</button>
|
||||
</TableCell>
|
||||
<TableCell>
|
||||
{m.role === 'owner' || !canEditRoles || m.userId === currentUserId ? (
|
||||
<RoleBadge role={m.role} />
|
||||
{m.role === 'owner' ||
|
||||
isExternal ||
|
||||
!canEditRoles ||
|
||||
m.userId === currentUserId ? (
|
||||
<RoleBadge memberRole={m.role} />
|
||||
) : (
|
||||
<OrgRoleSelector
|
||||
value={(m.role === 'admin' ? 'admin' : 'member') as OrgRole}
|
||||
@@ -630,6 +634,7 @@ export function OrganizationRoster({
|
||||
{filteredInvitations.map((inv) => {
|
||||
const rowKey = `invite-${inv.id}`
|
||||
const expanded = expandedRows.has(rowKey)
|
||||
const isExternal = inv.membershipIntent === 'external'
|
||||
const isResending = resendingIds.has(inv.id)
|
||||
const isCancelling = cancellingIds.has(inv.id)
|
||||
const cooldown = resendCooldowns[inv.id] ?? 0
|
||||
@@ -660,7 +665,9 @@ export function OrganizationRoster({
|
||||
</button>
|
||||
</TableCell>
|
||||
<TableCell>
|
||||
{isAdminOrOwner ? (
|
||||
{isExternal ? (
|
||||
<RoleBadge memberRole='external' />
|
||||
) : isAdminOrOwner ? (
|
||||
<OrgRoleSelector
|
||||
value={(inv.role === 'admin' ? 'admin' : 'member') as OrgRole}
|
||||
onChange={(next) =>
|
||||
@@ -677,7 +684,7 @@ export function OrganizationRoster({
|
||||
disabled={updateInvitation.isPending}
|
||||
/>
|
||||
) : (
|
||||
<RoleBadge role={inv.role} />
|
||||
<RoleBadge memberRole={inv.role} />
|
||||
)}
|
||||
</TableCell>
|
||||
<TableCell className='text-right'>
|
||||
|
||||
@@ -12,7 +12,10 @@ interface RemoveMemberDialogProps {
|
||||
open: boolean
|
||||
memberName: string
|
||||
shouldReduceSeats: boolean
|
||||
canReduceSeats?: boolean
|
||||
isSelfRemoval?: boolean
|
||||
isExternalRemoval?: boolean
|
||||
isSubmitting?: boolean
|
||||
error?: Error | null
|
||||
onOpenChange: (open: boolean) => void
|
||||
onShouldReduceSeatsChange: (shouldReduce: boolean) => void
|
||||
@@ -24,21 +27,37 @@ export function RemoveMemberDialog({
|
||||
open,
|
||||
memberName,
|
||||
shouldReduceSeats,
|
||||
canReduceSeats = true,
|
||||
error,
|
||||
onOpenChange,
|
||||
onShouldReduceSeatsChange,
|
||||
onConfirmRemove,
|
||||
onCancel,
|
||||
isSelfRemoval = false,
|
||||
isExternalRemoval = false,
|
||||
isSubmitting = false,
|
||||
}: RemoveMemberDialogProps) {
|
||||
const title = isSelfRemoval
|
||||
? 'Leave Organization'
|
||||
: isExternalRemoval
|
||||
? 'Remove External Member'
|
||||
: 'Remove Team Member'
|
||||
|
||||
return (
|
||||
<Modal open={open} onOpenChange={onOpenChange}>
|
||||
<ModalContent size='sm'>
|
||||
<ModalHeader>{isSelfRemoval ? 'Leave Organization' : 'Remove Team Member'}</ModalHeader>
|
||||
<ModalHeader>{title}</ModalHeader>
|
||||
<ModalBody>
|
||||
<p className='text-[var(--text-secondary)]'>
|
||||
{isSelfRemoval ? (
|
||||
'Are you sure you want to leave this organization? You will lose access to all team resources.'
|
||||
) : isExternalRemoval ? (
|
||||
<>
|
||||
Are you sure you want to remove{' '}
|
||||
<span className='font-medium text-[var(--text-primary)]'>{memberName}</span> from
|
||||
all organization workspaces? Their workspace access and workspace credential access
|
||||
will be revoked.
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
Are you sure you want to remove{' '}
|
||||
@@ -49,7 +68,7 @@ export function RemoveMemberDialog({
|
||||
This action cannot be undone.
|
||||
</p>
|
||||
|
||||
{!isSelfRemoval && (
|
||||
{!isSelfRemoval && !isExternalRemoval && canReduceSeats && (
|
||||
<div className='mt-4'>
|
||||
<div className='flex items-center gap-2'>
|
||||
<Checkbox
|
||||
@@ -77,10 +96,16 @@ export function RemoveMemberDialog({
|
||||
)}
|
||||
</ModalBody>
|
||||
<ModalFooter>
|
||||
<Button variant='default' onClick={onCancel}>
|
||||
<Button variant='default' disabled={isSubmitting} onClick={onCancel}>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button variant='destructive' onClick={() => onConfirmRemove(shouldReduceSeats)}>
|
||||
<Button
|
||||
variant='destructive'
|
||||
disabled={isSubmitting}
|
||||
onClick={() =>
|
||||
onConfirmRemove(isExternalRemoval || !canReduceSeats ? false : shouldReduceSeats)
|
||||
}
|
||||
>
|
||||
{isSelfRemoval ? 'Leave Organization' : 'Remove'}
|
||||
</Button>
|
||||
</ModalFooter>
|
||||
|
||||
@@ -53,7 +53,9 @@ export function TransferOwnershipDialog({
|
||||
const [selectedUserId, setSelectedUserId] = useState<string | null>(null)
|
||||
|
||||
const candidates = useMemo(() => {
|
||||
const others = members.filter((m) => m.userId !== currentUserId && m.role !== 'owner')
|
||||
const others = members.filter(
|
||||
(m) => m.userId !== currentUserId && m.role !== 'owner' && m.role !== 'external'
|
||||
)
|
||||
others.sort((a, b) => {
|
||||
if (a.role === 'admin' && b.role !== 'admin') return -1
|
||||
if (a.role !== 'admin' && b.role === 'admin') return 1
|
||||
@@ -66,7 +68,9 @@ export function TransferOwnershipDialog({
|
||||
)
|
||||
}, [members, currentUserId, search])
|
||||
|
||||
const hasCandidates = members.some((m) => m.userId !== currentUserId && m.role !== 'owner')
|
||||
const hasCandidates = members.some(
|
||||
(m) => m.userId !== currentUserId && m.role !== 'owner' && m.role !== 'external'
|
||||
)
|
||||
|
||||
const handleClose = (next: boolean) => {
|
||||
if (!next) {
|
||||
|
||||
@@ -6,14 +6,9 @@ import { Skeleton, type TagItem } from '@/components/emcn'
|
||||
import { useSession } from '@/lib/auth/auth-client'
|
||||
import { getSubscriptionAccessState } from '@/lib/billing/client/utils'
|
||||
import { getPlanTierCredits, getPlanTierDollars } from '@/lib/billing/plan-helpers'
|
||||
import { checkEnterprisePlan } from '@/lib/billing/subscriptions/utils'
|
||||
import { checkEnterprisePlan, checkTeamPlan } from '@/lib/billing/subscriptions/utils'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import {
|
||||
generateSlug,
|
||||
getUsedSeats,
|
||||
isAdminOrOwner,
|
||||
type Member,
|
||||
} from '@/lib/workspaces/organization'
|
||||
import { generateSlug, isAdminOrOwner, type Member } from '@/lib/workspaces/organization'
|
||||
import {
|
||||
MemberInvitationCard,
|
||||
NoOrganizationView,
|
||||
@@ -93,6 +88,7 @@ export function TeamManagement() {
|
||||
memberName: string
|
||||
shouldReduceSeats: boolean
|
||||
isSelfRemoval?: boolean
|
||||
isExternalRemoval?: boolean
|
||||
}>({ open: false, memberId: '', memberName: '', shouldReduceSeats: false })
|
||||
const [transferDialogOpen, setTransferDialogOpen] = useState(false)
|
||||
const [transferPortalError, setTransferPortalError] = useState<string | null>(null)
|
||||
@@ -108,8 +104,9 @@ export function TeamManagement() {
|
||||
)
|
||||
|
||||
const adminOrOwner = isAdminOrOwner(organization, session?.user?.email)
|
||||
const usedSeats = getUsedSeats(organization)
|
||||
const totalSeats = organizationBillingData?.data?.totalSeats ?? 0
|
||||
const usedSeats = organizationBillingData?.data?.usedSeats ?? 0
|
||||
const canReduceSubscriptionSeats = Boolean(subscriptionData && checkTeamPlan(subscriptionData))
|
||||
|
||||
useEffect(() => {
|
||||
if ((hasTeamPlan || hasEnterprisePlan) && session?.user?.name && !orgName) {
|
||||
@@ -209,6 +206,7 @@ export function TeamManagement() {
|
||||
memberName: displayName,
|
||||
shouldReduceSeats: false,
|
||||
isSelfRemoval: isLeavingSelf,
|
||||
isExternalRemoval: member.role === 'external',
|
||||
})
|
||||
},
|
||||
[session?.user, activeOrganization?.id]
|
||||
@@ -225,11 +223,13 @@ export function TeamManagement() {
|
||||
orgId: activeOrganization?.id,
|
||||
shouldReduceSeats,
|
||||
})
|
||||
|
||||
setRemoveMemberDialog({
|
||||
open: false,
|
||||
memberId: '',
|
||||
memberName: '',
|
||||
shouldReduceSeats: false,
|
||||
isExternalRemoval: false,
|
||||
})
|
||||
|
||||
if (isSelfRemoval) {
|
||||
@@ -446,7 +446,7 @@ export function TeamManagement() {
|
||||
subscriptionData={subscriptionData || null}
|
||||
isLoadingSubscription={isLoadingSubscription}
|
||||
totalSeats={totalSeats}
|
||||
usedSeats={usedSeats.used}
|
||||
usedSeats={usedSeats}
|
||||
isLoading={isLoading}
|
||||
onAddSeatDialog={handleAddSeatDialog}
|
||||
/>
|
||||
@@ -466,7 +466,7 @@ export function TeamManagement() {
|
||||
onLoadUserWorkspaces={async () => {}}
|
||||
onWorkspaceToggle={handleWorkspaceToggle}
|
||||
inviteSuccess={inviteSuccess}
|
||||
availableSeats={Math.max(0, totalSeats - usedSeats.used)}
|
||||
availableSeats={Math.max(0, totalSeats - usedSeats)}
|
||||
maxSeats={totalSeats}
|
||||
invitationError={inviteMutation.error}
|
||||
isLoadingWorkspaces={isLoadingWorkspaces}
|
||||
@@ -504,7 +504,10 @@ export function TeamManagement() {
|
||||
open={removeMemberDialog.open}
|
||||
memberName={removeMemberDialog.memberName}
|
||||
shouldReduceSeats={removeMemberDialog.shouldReduceSeats}
|
||||
canReduceSeats={canReduceSubscriptionSeats}
|
||||
isSelfRemoval={removeMemberDialog.isSelfRemoval}
|
||||
isExternalRemoval={removeMemberDialog.isExternalRemoval}
|
||||
isSubmitting={removeMemberMutation.isPending}
|
||||
error={removeMemberMutation.error}
|
||||
onOpenChange={(open: boolean) => {
|
||||
if (!open) setRemoveMemberDialog({ ...removeMemberDialog, open: false })
|
||||
@@ -523,6 +526,7 @@ export function TeamManagement() {
|
||||
memberName: '',
|
||||
shouldReduceSeats: false,
|
||||
isSelfRemoval: false,
|
||||
isExternalRemoval: false,
|
||||
})
|
||||
}
|
||||
/>
|
||||
|
||||
@@ -78,8 +78,10 @@ export function useProfilePictureUpload({
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json().catch(() => ({ error: response.statusText }))
|
||||
throw new Error(errorData.error || `Failed to upload file: ${response.status}`)
|
||||
const errorData = await response.json().catch(() => ({ message: response.statusText }))
|
||||
throw new Error(
|
||||
errorData.message || errorData.error || `Failed to upload file: ${response.status}`
|
||||
)
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
@@ -2,7 +2,9 @@
|
||||
|
||||
import { useCallback, useEffect, useRef, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { toError } from '@sim/utils/errors'
|
||||
import { generateId } from '@sim/utils/id'
|
||||
import { toast } from '@/components/emcn'
|
||||
import { resolveFileType } from '@/lib/uploads/utils/file-utils'
|
||||
|
||||
const logger = createLogger('useFileAttachments')
|
||||
@@ -147,9 +149,13 @@ export function useFileAttachments(props: UseFileAttachmentsProps) {
|
||||
|
||||
if (!uploadResponse.ok) {
|
||||
const errorData = await uploadResponse.json().catch(() => ({
|
||||
error: `Upload failed: ${uploadResponse.status}`,
|
||||
message: `Upload failed: ${uploadResponse.status}`,
|
||||
}))
|
||||
throw new Error(errorData.error || `Failed to upload file: ${uploadResponse.status}`)
|
||||
throw new Error(
|
||||
errorData.message ||
|
||||
errorData.error ||
|
||||
`Failed to upload file: ${uploadResponse.status}`
|
||||
)
|
||||
}
|
||||
|
||||
const uploadData = await uploadResponse.json()
|
||||
@@ -172,6 +178,9 @@ export function useFileAttachments(props: UseFileAttachmentsProps) {
|
||||
)
|
||||
} catch (error) {
|
||||
logger.error(`File upload failed: ${error}`)
|
||||
toast.error(`Couldn't upload "${file.name}"`, {
|
||||
description: toError(error).message,
|
||||
})
|
||||
if (placeholder.previewUrl) URL.revokeObjectURL(placeholder.previewUrl)
|
||||
setAttachedFiles((prev) => prev.filter((f) => f.id !== placeholder.id))
|
||||
}
|
||||
|
||||
@@ -328,7 +328,8 @@ export function FileUpload({
|
||||
const data = await response.json()
|
||||
|
||||
if (!response.ok) {
|
||||
const errorMessage = data.error || `Failed to upload file: ${response.status}`
|
||||
const errorMessage =
|
||||
data.message || data.error || `Failed to upload file: ${response.status}`
|
||||
uploadErrors.push(`${file.name}: ${errorMessage}`)
|
||||
|
||||
setUploadError(errorMessage)
|
||||
|
||||
@@ -34,7 +34,7 @@ import { hasExecutionResult } from '@/executor/utils/errors'
|
||||
import { coerceValue } from '@/executor/utils/start-block'
|
||||
import { subscriptionKeys } from '@/hooks/queries/subscription'
|
||||
import { getWorkflows } from '@/hooks/queries/utils/workflow-cache'
|
||||
import { useExecutionStream } from '@/hooks/use-execution-stream'
|
||||
import { isExecutionStreamHttpError, useExecutionStream } from '@/hooks/use-execution-stream'
|
||||
import { WorkflowValidationError } from '@/serializer'
|
||||
import { useCurrentWorkflowExecution, useExecutionStore } from '@/stores/execution'
|
||||
import { useNotificationStore } from '@/stores/notifications'
|
||||
@@ -60,6 +60,13 @@ const logger = createLogger('useWorkflowExecution')
|
||||
*/
|
||||
const activeReconnections = new Set<string>()
|
||||
|
||||
function isReconnectTerminal(error: unknown): boolean {
|
||||
return (
|
||||
isExecutionStreamHttpError(error) &&
|
||||
(error.httpStatus === 404 || error.httpStatus === 403 || error.httpStatus === 401)
|
||||
)
|
||||
}
|
||||
|
||||
interface DebugValidationResult {
|
||||
isValid: boolean
|
||||
error?: string
|
||||
@@ -487,8 +494,14 @@ export function useWorkflowExecution() {
|
||||
logger.error('Unexpected upload response format:', uploadResult)
|
||||
}
|
||||
} else {
|
||||
const errorText = await response.text()
|
||||
const message = `Failed to upload ${fileData.name}: ${response.status} ${errorText}`
|
||||
const cloned = response.clone()
|
||||
const errorData = await response.json().catch(() => null)
|
||||
const reason =
|
||||
errorData?.message ||
|
||||
errorData?.error ||
|
||||
(await cloned.text().catch(() => '')) ||
|
||||
`${response.status}`
|
||||
const message = `Failed to upload ${fileData.name}: ${reason}`
|
||||
logger.error(message)
|
||||
if (isUploadErrorCapable(workflowInput)) {
|
||||
try {
|
||||
@@ -1283,8 +1296,7 @@ export function useWorkflowExecution() {
|
||||
} else {
|
||||
if (!executor) {
|
||||
try {
|
||||
const httpStatus =
|
||||
isRecord(error) && typeof error.httpStatus === 'number' ? error.httpStatus : undefined
|
||||
const httpStatus = isExecutionStreamHttpError(error) ? error.httpStatus : undefined
|
||||
const storeAddConsole = useTerminalConsoleStore.getState().addConsole
|
||||
|
||||
if (httpStatus && activeWorkflowId) {
|
||||
@@ -1867,8 +1879,6 @@ export function useWorkflowExecution() {
|
||||
activeReconnections.add(reconnectWorkflowId)
|
||||
|
||||
executionStream.cancel(reconnectWorkflowId)
|
||||
setCurrentExecutionId(reconnectWorkflowId, executionId)
|
||||
setIsExecuting(reconnectWorkflowId, true)
|
||||
|
||||
const workflowEdges = useWorkflowStore.getState().edges
|
||||
const activeBlocksSet = new Set<string>()
|
||||
@@ -1891,13 +1901,47 @@ export function useWorkflowExecution() {
|
||||
includeStartConsoleEntry: true,
|
||||
})
|
||||
|
||||
clearExecutionEntries(executionId)
|
||||
|
||||
const capturedExecutionId = executionId
|
||||
const MAX_ATTEMPTS = 5
|
||||
const BASE_DELAY_MS = 1000
|
||||
const MAX_DELAY_MS = 15000
|
||||
|
||||
let activated = false
|
||||
const ensureActivated = () => {
|
||||
if (activated || cleanupRan) return
|
||||
activated = true
|
||||
setCurrentExecutionId(reconnectWorkflowId, capturedExecutionId)
|
||||
setIsExecuting(reconnectWorkflowId, true)
|
||||
clearExecutionEntries(capturedExecutionId)
|
||||
}
|
||||
|
||||
const wrapHandler =
|
||||
<T>(handler: (data: T) => void) =>
|
||||
(data: T) => {
|
||||
ensureActivated()
|
||||
handler(data)
|
||||
}
|
||||
|
||||
const cleanupFailedReconnect = () => {
|
||||
const currentId = useExecutionStore.getState().getCurrentExecutionId(reconnectWorkflowId)
|
||||
if (currentId && currentId !== capturedExecutionId) return
|
||||
|
||||
const hasRunningEntry = useTerminalConsoleStore
|
||||
.getState()
|
||||
.getWorkflowEntries(reconnectWorkflowId)
|
||||
.some((entry) => entry.isRunning && entry.executionId === capturedExecutionId)
|
||||
|
||||
if (activated || hasRunningEntry) {
|
||||
cancelRunningEntries(reconnectWorkflowId)
|
||||
}
|
||||
|
||||
if (currentId === capturedExecutionId) {
|
||||
setCurrentExecutionId(reconnectWorkflowId, null)
|
||||
setIsExecuting(reconnectWorkflowId, false)
|
||||
setActiveBlocks(reconnectWorkflowId, new Set())
|
||||
}
|
||||
}
|
||||
|
||||
const attemptReconnect = async (attempt: number): Promise<void> => {
|
||||
if (cleanupRan || reconnectionComplete) return
|
||||
|
||||
@@ -1914,38 +1958,39 @@ export function useWorkflowExecution() {
|
||||
fromEventId,
|
||||
callbacks: {
|
||||
onEventId: (eid) => {
|
||||
ensureActivated()
|
||||
fromEventId = eid
|
||||
},
|
||||
onBlockStarted: handlers.onBlockStarted,
|
||||
onBlockCompleted: handlers.onBlockCompleted,
|
||||
onBlockError: handlers.onBlockError,
|
||||
onBlockChildWorkflowStarted: handlers.onBlockChildWorkflowStarted,
|
||||
onBlockStarted: wrapHandler(handlers.onBlockStarted),
|
||||
onBlockCompleted: wrapHandler(handlers.onBlockCompleted),
|
||||
onBlockError: wrapHandler(handlers.onBlockError),
|
||||
onBlockChildWorkflowStarted: wrapHandler(handlers.onBlockChildWorkflowStarted),
|
||||
onExecutionCompleted: () => {
|
||||
reconnectionComplete = true
|
||||
activeReconnections.delete(reconnectWorkflowId)
|
||||
if (!activated) {
|
||||
clearExecutionPointer(reconnectWorkflowId)
|
||||
return
|
||||
}
|
||||
const currentId = useExecutionStore
|
||||
.getState()
|
||||
.getCurrentExecutionId(reconnectWorkflowId)
|
||||
if (currentId !== capturedExecutionId) {
|
||||
reconnectionComplete = true
|
||||
activeReconnections.delete(reconnectWorkflowId)
|
||||
return
|
||||
}
|
||||
reconnectionComplete = true
|
||||
activeReconnections.delete(reconnectWorkflowId)
|
||||
if (currentId !== capturedExecutionId) return
|
||||
setCurrentExecutionId(reconnectWorkflowId, null)
|
||||
setIsExecuting(reconnectWorkflowId, false)
|
||||
setActiveBlocks(reconnectWorkflowId, new Set())
|
||||
},
|
||||
onExecutionError: (data) => {
|
||||
reconnectionComplete = true
|
||||
activeReconnections.delete(reconnectWorkflowId)
|
||||
if (!activated) {
|
||||
clearExecutionPointer(reconnectWorkflowId)
|
||||
return
|
||||
}
|
||||
const currentId = useExecutionStore
|
||||
.getState()
|
||||
.getCurrentExecutionId(reconnectWorkflowId)
|
||||
if (currentId !== capturedExecutionId) {
|
||||
reconnectionComplete = true
|
||||
activeReconnections.delete(reconnectWorkflowId)
|
||||
return
|
||||
}
|
||||
reconnectionComplete = true
|
||||
activeReconnections.delete(reconnectWorkflowId)
|
||||
if (currentId !== capturedExecutionId) return
|
||||
setCurrentExecutionId(reconnectWorkflowId, null)
|
||||
setIsExecuting(reconnectWorkflowId, false)
|
||||
setActiveBlocks(reconnectWorkflowId, new Set())
|
||||
@@ -1957,16 +2002,16 @@ export function useWorkflowExecution() {
|
||||
})
|
||||
},
|
||||
onExecutionCancelled: () => {
|
||||
reconnectionComplete = true
|
||||
activeReconnections.delete(reconnectWorkflowId)
|
||||
if (!activated) {
|
||||
clearExecutionPointer(reconnectWorkflowId)
|
||||
return
|
||||
}
|
||||
const currentId = useExecutionStore
|
||||
.getState()
|
||||
.getCurrentExecutionId(reconnectWorkflowId)
|
||||
if (currentId !== capturedExecutionId) {
|
||||
reconnectionComplete = true
|
||||
activeReconnections.delete(reconnectWorkflowId)
|
||||
return
|
||||
}
|
||||
reconnectionComplete = true
|
||||
activeReconnections.delete(reconnectWorkflowId)
|
||||
if (currentId !== capturedExecutionId) return
|
||||
setCurrentExecutionId(reconnectWorkflowId, null)
|
||||
setIsExecuting(reconnectWorkflowId, false)
|
||||
setActiveBlocks(reconnectWorkflowId, new Set())
|
||||
@@ -1978,6 +2023,17 @@ export function useWorkflowExecution() {
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (isReconnectTerminal(error)) {
|
||||
logger.info('Reconnection skipped; run buffer no longer exists', {
|
||||
executionId: capturedExecutionId,
|
||||
})
|
||||
reconnectionComplete = true
|
||||
activeReconnections.delete(reconnectWorkflowId)
|
||||
clearExecutionPointer(reconnectWorkflowId)
|
||||
cleanupFailedReconnect()
|
||||
return
|
||||
}
|
||||
|
||||
logger.warn('Execution reconnection attempt failed', {
|
||||
executionId: capturedExecutionId,
|
||||
attempt,
|
||||
@@ -1986,17 +2042,27 @@ export function useWorkflowExecution() {
|
||||
if (!cleanupRan && !reconnectionComplete && attempt < MAX_ATTEMPTS) {
|
||||
return attemptReconnect(attempt + 1)
|
||||
}
|
||||
if (!cleanupRan && !reconnectionComplete) {
|
||||
reconnectionComplete = true
|
||||
activeReconnections.delete(reconnectWorkflowId)
|
||||
cleanupFailedReconnect()
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
if (!reconnectionComplete && !cleanupRan) {
|
||||
reconnectionComplete = true
|
||||
activeReconnections.delete(reconnectWorkflowId)
|
||||
const currentId = useExecutionStore.getState().getCurrentExecutionId(reconnectWorkflowId)
|
||||
if (currentId === capturedExecutionId) {
|
||||
cancelRunningEntries(reconnectWorkflowId)
|
||||
setCurrentExecutionId(reconnectWorkflowId, null)
|
||||
setIsExecuting(reconnectWorkflowId, false)
|
||||
setActiveBlocks(reconnectWorkflowId, new Set())
|
||||
if (activated) {
|
||||
const currentId = useExecutionStore
|
||||
.getState()
|
||||
.getCurrentExecutionId(reconnectWorkflowId)
|
||||
if (currentId === capturedExecutionId) {
|
||||
cancelRunningEntries(reconnectWorkflowId)
|
||||
setCurrentExecutionId(reconnectWorkflowId, null)
|
||||
setIsExecuting(reconnectWorkflowId, false)
|
||||
setActiveBlocks(reconnectWorkflowId, new Set())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -89,6 +89,7 @@ export const PermissionsTable = ({
|
||||
permissionType:
|
||||
changes.permissionType !== undefined ? changes.permissionType : permissionType,
|
||||
isCurrentUser: user.email === session?.user?.email,
|
||||
isExternal: user.isExternal,
|
||||
}
|
||||
}) || [],
|
||||
[workspacePermissions?.users, existingUserPermissionChanges, session?.user?.email]
|
||||
@@ -212,6 +213,11 @@ export const PermissionsTable = ({
|
||||
)}
|
||||
</Badge>
|
||||
)}
|
||||
{user.isExternal && (
|
||||
<Badge variant='default' className='text-caption'>
|
||||
External
|
||||
</Badge>
|
||||
)}
|
||||
{hasChanges && (
|
||||
<Badge variant='default' className='text-caption'>
|
||||
Modified
|
||||
|
||||
@@ -8,5 +8,6 @@ export interface UserPermissions {
|
||||
permissionType: PermissionType
|
||||
isCurrentUser?: boolean
|
||||
isPendingInvitation?: boolean
|
||||
isExternal?: boolean
|
||||
invitationId?: string
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { useParams, useRouter } from 'next/navigation'
|
||||
import { useParams } from 'next/navigation'
|
||||
import {
|
||||
Button,
|
||||
type FileInputOptions,
|
||||
@@ -47,7 +47,6 @@ export function InviteModal({
|
||||
inviteDisabledReason = null,
|
||||
organizationId = null,
|
||||
}: InviteModalProps) {
|
||||
const router = useRouter()
|
||||
const formRef = useRef<HTMLFormElement>(null)
|
||||
const [emailItems, setEmailItems] = useState<TagItem[]>([])
|
||||
const [userPermissions, setUserPermissions] = useState<UserPermissions[]>([])
|
||||
@@ -103,9 +102,9 @@ export function InviteModal({
|
||||
const isOutOfSeats = exceedsSeatCapacity || isAtSeatCapacity
|
||||
const seatLimitReason = hasSeatData
|
||||
? availableSeats === 0
|
||||
? `No available seats. Using ${usedSeats} of ${totalSeats}.`
|
||||
? `Internal invites may fail: using ${usedSeats} of ${totalSeats} seats. External workspace invites do not require seats.`
|
||||
: exceedsSeatCapacity
|
||||
? `Only ${availableSeats} seat${availableSeats === 1 ? '' : 's'} available.`
|
||||
? `Only ${availableSeats} internal seat${availableSeats === 1 ? '' : 's'} available. External workspace invites do not require seats.`
|
||||
: null
|
||||
: null
|
||||
|
||||
@@ -235,7 +234,7 @@ export function InviteModal({
|
||||
}))
|
||||
|
||||
updatePermissionsMutation.mutate(
|
||||
{ workspaceId, updates },
|
||||
{ workspaceId, organizationId: organizationId ?? undefined, updates },
|
||||
{
|
||||
onSuccess: (data) => {
|
||||
if (data.users && data.total !== undefined) {
|
||||
@@ -253,6 +252,7 @@ export function InviteModal({
|
||||
userPerms.canAdmin,
|
||||
hasPendingChanges,
|
||||
workspaceId,
|
||||
organizationId,
|
||||
existingUserPermissionChanges,
|
||||
updatePermissions,
|
||||
updatePermissionsMutation,
|
||||
@@ -284,7 +284,7 @@ export function InviteModal({
|
||||
}
|
||||
|
||||
removeMember.mutate(
|
||||
{ userId: memberToRemove.userId, workspaceId },
|
||||
{ userId: memberToRemove.userId, workspaceId, organizationId },
|
||||
{
|
||||
onSuccess: () => {
|
||||
if (workspacePermissions) {
|
||||
@@ -318,6 +318,7 @@ export function InviteModal({
|
||||
workspacePermissions,
|
||||
updatePermissions,
|
||||
removeMember,
|
||||
organizationId,
|
||||
])
|
||||
|
||||
const handleRemoveMemberCancel = useCallback(() => {
|
||||
@@ -334,7 +335,7 @@ export function InviteModal({
|
||||
setErrorMessage(null)
|
||||
|
||||
cancelInvitation.mutate(
|
||||
{ invitationId: invitationToRemove.invitationId, workspaceId },
|
||||
{ invitationId: invitationToRemove.invitationId, workspaceId, organizationId },
|
||||
{
|
||||
onSuccess: () => {
|
||||
setInvitationToRemove(null)
|
||||
@@ -346,7 +347,7 @@ export function InviteModal({
|
||||
},
|
||||
}
|
||||
)
|
||||
}, [invitationToRemove, workspaceId, userPerms.canAdmin, cancelInvitation])
|
||||
}, [invitationToRemove, workspaceId, userPerms.canAdmin, cancelInvitation, organizationId])
|
||||
|
||||
const handleRemoveInvitationCancel = useCallback(() => {
|
||||
setInvitationToRemove(null)
|
||||
@@ -421,23 +422,12 @@ export function InviteModal({
|
||||
[workspaceId, userPerms.canAdmin, resendCooldowns, resendingInvitationIds, resendInvitation]
|
||||
)
|
||||
|
||||
const handleUpgradeRedirect = useCallback(() => {
|
||||
if (!workspaceId) return
|
||||
onOpenChange(false)
|
||||
router.push(`/workspace/${workspaceId}/settings/subscription`)
|
||||
}, [onOpenChange, router, workspaceId])
|
||||
|
||||
const handleSubmit = useCallback(
|
||||
(e: React.FormEvent) => {
|
||||
e.preventDefault()
|
||||
|
||||
setErrorMessage(null)
|
||||
|
||||
if (isOutOfSeats) {
|
||||
handleUpgradeRedirect()
|
||||
return
|
||||
}
|
||||
|
||||
if (!canInviteMembers || validEmails.length === 0 || !workspaceId) {
|
||||
return
|
||||
}
|
||||
@@ -451,7 +441,7 @@ export function InviteModal({
|
||||
})
|
||||
|
||||
batchSendInvitations.mutate(
|
||||
{ workspaceId, invitations },
|
||||
{ workspaceId, organizationId, invitations },
|
||||
{
|
||||
onSuccess: (result) => {
|
||||
if (result.failed.length > 0) {
|
||||
@@ -474,10 +464,9 @@ export function InviteModal({
|
||||
},
|
||||
[
|
||||
canInviteMembers,
|
||||
isOutOfSeats,
|
||||
handleUpgradeRedirect,
|
||||
validEmails,
|
||||
workspaceId,
|
||||
organizationId,
|
||||
userPermissions,
|
||||
batchSendInvitations,
|
||||
]
|
||||
@@ -504,6 +493,7 @@ export function InviteModal({
|
||||
email: inv.email,
|
||||
permissionType: inv.permissionType,
|
||||
isPendingInvitation: true,
|
||||
isExternal: inv.isExternal,
|
||||
invitationId: inv.invitationId,
|
||||
})),
|
||||
[pendingInvitations]
|
||||
@@ -641,10 +631,6 @@ export function InviteModal({
|
||||
type='button'
|
||||
variant='primary'
|
||||
onClick={() => {
|
||||
if (isOutOfSeats) {
|
||||
handleUpgradeRedirect()
|
||||
return
|
||||
}
|
||||
formRef.current?.requestSubmit()
|
||||
}}
|
||||
disabled={
|
||||
@@ -653,7 +639,7 @@ export function InviteModal({
|
||||
isSubmitting ||
|
||||
isSaving ||
|
||||
!workspaceId ||
|
||||
(!isOutOfSeats && !hasNewInvites)
|
||||
!hasNewInvites
|
||||
}
|
||||
className='ml-auto'
|
||||
>
|
||||
@@ -663,9 +649,7 @@ export function InviteModal({
|
||||
? 'Admin Access Required'
|
||||
: isSubmitting
|
||||
? 'Inviting...'
|
||||
: isOutOfSeats
|
||||
? 'Upgrade to invite'
|
||||
: 'Invite'}
|
||||
: 'Invite'}
|
||||
</Button>
|
||||
</ModalFooter>
|
||||
</form>
|
||||
|
||||
@@ -73,8 +73,10 @@ export function useWorkspaceLogoUpload({
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json().catch(() => ({ error: response.statusText }))
|
||||
throw new Error(errorData.error || `Failed to upload file: ${response.status}`)
|
||||
const errorData = await response.json().catch(() => ({ message: response.statusText }))
|
||||
throw new Error(
|
||||
errorData.message || errorData.error || `Failed to upload file: ${response.status}`
|
||||
)
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
@@ -206,10 +206,7 @@ const SidebarTaskItem = memo(function SidebarTaskItem({
|
||||
e.preventDefault()
|
||||
onMultiSelectClick(task.id, true)
|
||||
} else {
|
||||
useFolderStore.setState({
|
||||
selectedTasks: new Set<string>(),
|
||||
lastSelectedTaskId: task.id,
|
||||
})
|
||||
useFolderStore.getState().selectTaskOnly(task.id)
|
||||
}
|
||||
}}
|
||||
onContextMenu={task.id !== 'new' ? (e) => onContextMenu(e, task.id) : undefined}
|
||||
|
||||
@@ -4,169 +4,71 @@ import { createLogger } from '@sim/logger'
|
||||
import { task } from '@trigger.dev/sdk'
|
||||
import { and, inArray, lt } from 'drizzle-orm'
|
||||
import { type CleanupJobPayload, resolveCleanupScope } from '@/lib/billing/cleanup-dispatcher'
|
||||
import {
|
||||
batchDeleteByWorkspaceAndTimestamp,
|
||||
chunkedBatchDelete,
|
||||
type TableCleanupResult,
|
||||
} from '@/lib/cleanup/batch-delete'
|
||||
import { snapshotService } from '@/lib/logs/execution/snapshot/service'
|
||||
import { isUsingCloudStorage, StorageService } from '@/lib/uploads'
|
||||
import { deleteFileMetadata } from '@/lib/uploads/server/metadata'
|
||||
|
||||
const logger = createLogger('CleanupLogs')
|
||||
|
||||
const BATCH_SIZE = 2000
|
||||
const MAX_BATCHES_PER_TIER = 10
|
||||
|
||||
interface TierResults {
|
||||
total: number
|
||||
deleted: number
|
||||
deleteFailed: number
|
||||
interface FileDeleteStats {
|
||||
filesTotal: number
|
||||
filesDeleted: number
|
||||
filesDeleteFailed: number
|
||||
}
|
||||
|
||||
function emptyTierResults(): TierResults {
|
||||
return {
|
||||
total: 0,
|
||||
deleted: 0,
|
||||
deleteFailed: 0,
|
||||
filesTotal: 0,
|
||||
filesDeleted: 0,
|
||||
filesDeleteFailed: 0,
|
||||
}
|
||||
}
|
||||
|
||||
async function deleteExecutionFiles(files: unknown, results: TierResults): Promise<void> {
|
||||
async function deleteExecutionFiles(files: unknown, stats: FileDeleteStats): Promise<void> {
|
||||
if (!isUsingCloudStorage() || !files || !Array.isArray(files)) return
|
||||
|
||||
const keys = files.filter((f) => f && typeof f === 'object' && f.key).map((f) => f.key as string)
|
||||
results.filesTotal += keys.length
|
||||
stats.filesTotal += keys.length
|
||||
|
||||
await Promise.all(
|
||||
keys.map(async (key) => {
|
||||
try {
|
||||
await StorageService.deleteFile({ key, context: 'execution' })
|
||||
await deleteFileMetadata(key)
|
||||
results.filesDeleted++
|
||||
stats.filesDeleted++
|
||||
} catch (fileError) {
|
||||
results.filesDeleteFailed++
|
||||
stats.filesDeleteFailed++
|
||||
logger.error(`Failed to delete file ${key}:`, { fileError })
|
||||
}
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
async function cleanupTier(
|
||||
async function cleanupWorkflowExecutionLogs(
|
||||
workspaceIds: string[],
|
||||
retentionDate: Date,
|
||||
label: string
|
||||
): Promise<TierResults> {
|
||||
const results = emptyTierResults()
|
||||
if (workspaceIds.length === 0) return results
|
||||
): Promise<TableCleanupResult & FileDeleteStats> {
|
||||
const fileStats: FileDeleteStats = { filesTotal: 0, filesDeleted: 0, filesDeleteFailed: 0 }
|
||||
|
||||
let batchesProcessed = 0
|
||||
let hasMore = true
|
||||
|
||||
while (hasMore && batchesProcessed < MAX_BATCHES_PER_TIER) {
|
||||
const batch = await db
|
||||
.select({
|
||||
id: workflowExecutionLogs.id,
|
||||
files: workflowExecutionLogs.files,
|
||||
})
|
||||
.from(workflowExecutionLogs)
|
||||
.where(
|
||||
and(
|
||||
inArray(workflowExecutionLogs.workspaceId, workspaceIds),
|
||||
lt(workflowExecutionLogs.startedAt, retentionDate)
|
||||
const dbStats = await chunkedBatchDelete({
|
||||
tableDef: workflowExecutionLogs,
|
||||
workspaceIds,
|
||||
tableName: `${label}/workflow_execution_logs`,
|
||||
selectChunk: (chunkIds, limit) =>
|
||||
db
|
||||
.select({ id: workflowExecutionLogs.id, files: workflowExecutionLogs.files })
|
||||
.from(workflowExecutionLogs)
|
||||
.where(
|
||||
and(
|
||||
inArray(workflowExecutionLogs.workspaceId, chunkIds),
|
||||
lt(workflowExecutionLogs.startedAt, retentionDate)
|
||||
)
|
||||
)
|
||||
)
|
||||
.limit(BATCH_SIZE)
|
||||
.limit(limit),
|
||||
onBatch: async (rows) => {
|
||||
for (const row of rows) await deleteExecutionFiles(row.files, fileStats)
|
||||
},
|
||||
})
|
||||
|
||||
results.total += batch.length
|
||||
|
||||
if (batch.length === 0) {
|
||||
hasMore = false
|
||||
break
|
||||
}
|
||||
|
||||
for (const log of batch) {
|
||||
await deleteExecutionFiles(log.files, results)
|
||||
}
|
||||
|
||||
const logIds = batch.map((log) => log.id)
|
||||
try {
|
||||
const deleted = await db
|
||||
.delete(workflowExecutionLogs)
|
||||
.where(inArray(workflowExecutionLogs.id, logIds))
|
||||
.returning({ id: workflowExecutionLogs.id })
|
||||
|
||||
results.deleted += deleted.length
|
||||
} catch (deleteError) {
|
||||
results.deleteFailed += logIds.length
|
||||
logger.error(`Batch delete failed for ${label}:`, { deleteError })
|
||||
}
|
||||
|
||||
batchesProcessed++
|
||||
hasMore = batch.length === BATCH_SIZE
|
||||
|
||||
logger.info(`[${label}] Batch ${batchesProcessed}: ${batch.length} logs processed`)
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
interface JobLogCleanupResults {
|
||||
deleted: number
|
||||
deleteFailed: number
|
||||
}
|
||||
|
||||
async function cleanupJobExecutionLogsTier(
|
||||
workspaceIds: string[],
|
||||
retentionDate: Date,
|
||||
label: string
|
||||
): Promise<JobLogCleanupResults> {
|
||||
const results: JobLogCleanupResults = { deleted: 0, deleteFailed: 0 }
|
||||
if (workspaceIds.length === 0) return results
|
||||
|
||||
let batchesProcessed = 0
|
||||
let hasMore = true
|
||||
|
||||
while (hasMore && batchesProcessed < MAX_BATCHES_PER_TIER) {
|
||||
const batch = await db
|
||||
.select({ id: jobExecutionLogs.id })
|
||||
.from(jobExecutionLogs)
|
||||
.where(
|
||||
and(
|
||||
inArray(jobExecutionLogs.workspaceId, workspaceIds),
|
||||
lt(jobExecutionLogs.startedAt, retentionDate)
|
||||
)
|
||||
)
|
||||
.limit(BATCH_SIZE)
|
||||
|
||||
if (batch.length === 0) {
|
||||
hasMore = false
|
||||
break
|
||||
}
|
||||
|
||||
const logIds = batch.map((log) => log.id)
|
||||
try {
|
||||
const deleted = await db
|
||||
.delete(jobExecutionLogs)
|
||||
.where(inArray(jobExecutionLogs.id, logIds))
|
||||
.returning({ id: jobExecutionLogs.id })
|
||||
|
||||
results.deleted += deleted.length
|
||||
} catch (deleteError) {
|
||||
results.deleteFailed += logIds.length
|
||||
logger.error(`Batch delete failed for ${label} (job_execution_logs):`, { deleteError })
|
||||
}
|
||||
|
||||
batchesProcessed++
|
||||
hasMore = batch.length === BATCH_SIZE
|
||||
|
||||
logger.info(
|
||||
`[${label}] job_execution_logs batch ${batchesProcessed}: ${batch.length} rows processed`
|
||||
)
|
||||
}
|
||||
|
||||
return results
|
||||
return { ...dbStats, ...fileStats }
|
||||
}
|
||||
|
||||
export async function runCleanupLogs(payload: CleanupJobPayload): Promise<void> {
|
||||
@@ -190,15 +92,19 @@ export async function runCleanupLogs(payload: CleanupJobPayload): Promise<void>
|
||||
`[${label}] Cleaning ${workspaceIds.length} workspaces, cutoff: ${retentionDate.toISOString()}`
|
||||
)
|
||||
|
||||
const results = await cleanupTier(workspaceIds, retentionDate, label)
|
||||
const workflowResults = await cleanupWorkflowExecutionLogs(workspaceIds, retentionDate, label)
|
||||
logger.info(
|
||||
`[${label}] workflow_execution_logs: ${results.deleted} deleted, ${results.deleteFailed} failed out of ${results.total} candidates`
|
||||
`[${label}] workflow_execution_logs files: ${workflowResults.filesDeleted}/${workflowResults.filesTotal} deleted, ${workflowResults.filesDeleteFailed} failed`
|
||||
)
|
||||
|
||||
const jobLogResults = await cleanupJobExecutionLogsTier(workspaceIds, retentionDate, label)
|
||||
logger.info(
|
||||
`[${label}] job_execution_logs: ${jobLogResults.deleted} deleted, ${jobLogResults.deleteFailed} failed`
|
||||
)
|
||||
await batchDeleteByWorkspaceAndTimestamp({
|
||||
tableDef: jobExecutionLogs,
|
||||
workspaceIdCol: jobExecutionLogs.workspaceId,
|
||||
timestampCol: jobExecutionLogs.startedAt,
|
||||
workspaceIds,
|
||||
retentionDate,
|
||||
tableName: `${label}/job_execution_logs`,
|
||||
})
|
||||
|
||||
// Snapshot cleanup runs only on the free job to avoid running it N times for N enterprise workspaces.
|
||||
if (payload.plan === 'free') {
|
||||
|
||||
@@ -18,9 +18,8 @@ import { and, inArray, isNotNull, lt } from 'drizzle-orm'
|
||||
import { type CleanupJobPayload, resolveCleanupScope } from '@/lib/billing/cleanup-dispatcher'
|
||||
import {
|
||||
batchDeleteByWorkspaceAndTimestamp,
|
||||
DEFAULT_BATCH_SIZE,
|
||||
DEFAULT_MAX_BATCHES_PER_TABLE,
|
||||
deleteRowsById,
|
||||
selectRowsByIdChunks,
|
||||
} from '@/lib/cleanup/batch-delete'
|
||||
import { prepareChatCleanup } from '@/lib/cleanup/chat-cleanup'
|
||||
import type { StorageContext } from '@/lib/uploads'
|
||||
@@ -44,35 +43,37 @@ async function selectExpiredWorkspaceFiles(
|
||||
workspaceIds: string[],
|
||||
retentionDate: Date
|
||||
): Promise<WorkspaceFileScope> {
|
||||
const limit = DEFAULT_BATCH_SIZE * DEFAULT_MAX_BATCHES_PER_TABLE
|
||||
|
||||
const [legacyRows, multiContextRows] = await Promise.all([
|
||||
db
|
||||
.select({ id: workspaceFile.id, key: workspaceFile.key })
|
||||
.from(workspaceFile)
|
||||
.where(
|
||||
and(
|
||||
inArray(workspaceFile.workspaceId, workspaceIds),
|
||||
isNotNull(workspaceFile.deletedAt),
|
||||
lt(workspaceFile.deletedAt, retentionDate)
|
||||
selectRowsByIdChunks(workspaceIds, (chunkIds, chunkLimit) =>
|
||||
db
|
||||
.select({ id: workspaceFile.id, key: workspaceFile.key })
|
||||
.from(workspaceFile)
|
||||
.where(
|
||||
and(
|
||||
inArray(workspaceFile.workspaceId, chunkIds),
|
||||
isNotNull(workspaceFile.deletedAt),
|
||||
lt(workspaceFile.deletedAt, retentionDate)
|
||||
)
|
||||
)
|
||||
)
|
||||
.limit(limit),
|
||||
db
|
||||
.select({
|
||||
id: workspaceFiles.id,
|
||||
key: workspaceFiles.key,
|
||||
context: workspaceFiles.context,
|
||||
})
|
||||
.from(workspaceFiles)
|
||||
.where(
|
||||
and(
|
||||
inArray(workspaceFiles.workspaceId, workspaceIds),
|
||||
isNotNull(workspaceFiles.deletedAt),
|
||||
lt(workspaceFiles.deletedAt, retentionDate)
|
||||
.limit(chunkLimit)
|
||||
),
|
||||
selectRowsByIdChunks(workspaceIds, (chunkIds, chunkLimit) =>
|
||||
db
|
||||
.select({
|
||||
id: workspaceFiles.id,
|
||||
key: workspaceFiles.key,
|
||||
context: workspaceFiles.context,
|
||||
})
|
||||
.from(workspaceFiles)
|
||||
.where(
|
||||
and(
|
||||
inArray(workspaceFiles.workspaceId, chunkIds),
|
||||
isNotNull(workspaceFiles.deletedAt),
|
||||
lt(workspaceFiles.deletedAt, retentionDate)
|
||||
)
|
||||
)
|
||||
)
|
||||
.limit(limit),
|
||||
.limit(chunkLimit)
|
||||
),
|
||||
])
|
||||
|
||||
return {
|
||||
@@ -182,17 +183,19 @@ export async function runCleanupSoftDeletes(payload: CleanupJobPayload): Promise
|
||||
// (chats + S3) AND the DB deletes below — selecting twice could return
|
||||
// different subsets above the LIMIT cap and orphan or prematurely purge data.
|
||||
const [doomedWorkflows, fileScope] = await Promise.all([
|
||||
db
|
||||
.select({ id: workflow.id })
|
||||
.from(workflow)
|
||||
.where(
|
||||
and(
|
||||
inArray(workflow.workspaceId, workspaceIds),
|
||||
isNotNull(workflow.archivedAt),
|
||||
lt(workflow.archivedAt, retentionDate)
|
||||
selectRowsByIdChunks(workspaceIds, (chunkIds, chunkLimit) =>
|
||||
db
|
||||
.select({ id: workflow.id })
|
||||
.from(workflow)
|
||||
.where(
|
||||
and(
|
||||
inArray(workflow.workspaceId, chunkIds),
|
||||
isNotNull(workflow.archivedAt),
|
||||
lt(workflow.archivedAt, retentionDate)
|
||||
)
|
||||
)
|
||||
)
|
||||
.limit(DEFAULT_BATCH_SIZE * DEFAULT_MAX_BATCHES_PER_TABLE),
|
||||
.limit(chunkLimit)
|
||||
),
|
||||
selectExpiredWorkspaceFiles(workspaceIds, retentionDate),
|
||||
])
|
||||
|
||||
@@ -200,11 +203,13 @@ export async function runCleanupSoftDeletes(payload: CleanupJobPayload): Promise
|
||||
let chatCleanup: { execute: () => Promise<void> } | null = null
|
||||
|
||||
if (doomedWorkflowIds.length > 0) {
|
||||
const doomedChats = await db
|
||||
.select({ id: copilotChats.id })
|
||||
.from(copilotChats)
|
||||
.where(inArray(copilotChats.workflowId, doomedWorkflowIds))
|
||||
.limit(DEFAULT_BATCH_SIZE * DEFAULT_MAX_BATCHES_PER_TABLE)
|
||||
const doomedChats = await selectRowsByIdChunks(doomedWorkflowIds, (chunkIds, chunkLimit) =>
|
||||
db
|
||||
.select({ id: copilotChats.id })
|
||||
.from(copilotChats)
|
||||
.where(inArray(copilotChats.workflowId, chunkIds))
|
||||
.limit(chunkLimit)
|
||||
)
|
||||
|
||||
const doomedChatIds = doomedChats.map((c) => c.id)
|
||||
if (doomedChatIds.length > 0) {
|
||||
|
||||
@@ -13,9 +13,8 @@ import { and, inArray, lt, sql } from 'drizzle-orm'
|
||||
import { type CleanupJobPayload, resolveCleanupScope } from '@/lib/billing/cleanup-dispatcher'
|
||||
import {
|
||||
batchDeleteByWorkspaceAndTimestamp,
|
||||
DEFAULT_BATCH_SIZE,
|
||||
DEFAULT_MAX_BATCHES_PER_TABLE,
|
||||
deleteRowsById,
|
||||
selectRowsByIdChunks,
|
||||
type TableCleanupResult,
|
||||
} from '@/lib/cleanup/batch-delete'
|
||||
import { prepareChatCleanup } from '@/lib/cleanup/chat-cleanup'
|
||||
@@ -67,13 +66,15 @@ async function cleanupRunChildren(
|
||||
): Promise<TableCleanupResult[]> {
|
||||
if (workspaceIds.length === 0) return []
|
||||
|
||||
const runIds = await db
|
||||
.select({ id: copilotRuns.id })
|
||||
.from(copilotRuns)
|
||||
.where(
|
||||
and(inArray(copilotRuns.workspaceId, workspaceIds), lt(copilotRuns.updatedAt, retentionDate))
|
||||
)
|
||||
.limit(DEFAULT_BATCH_SIZE * DEFAULT_MAX_BATCHES_PER_TABLE)
|
||||
const runIds = await selectRowsByIdChunks(workspaceIds, (chunkIds, chunkLimit) =>
|
||||
db
|
||||
.select({ id: copilotRuns.id })
|
||||
.from(copilotRuns)
|
||||
.where(
|
||||
and(inArray(copilotRuns.workspaceId, chunkIds), lt(copilotRuns.updatedAt, retentionDate))
|
||||
)
|
||||
.limit(chunkLimit)
|
||||
)
|
||||
|
||||
if (runIds.length === 0) {
|
||||
return RUN_CHILD_TABLES.map((t) => ({ table: `${label}/${t.name}`, deleted: 0, failed: 0 }))
|
||||
@@ -107,17 +108,15 @@ export async function runCleanupTasks(payload: CleanupJobPayload): Promise<void>
|
||||
`[${label}] Processing ${workspaceIds.length} workspaces, cutoff: ${retentionDate.toISOString()}`
|
||||
)
|
||||
|
||||
// Collect chat IDs before deleting so we can clean up the copilot backend after
|
||||
const doomedChats = await db
|
||||
.select({ id: copilotChats.id })
|
||||
.from(copilotChats)
|
||||
.where(
|
||||
and(
|
||||
inArray(copilotChats.workspaceId, workspaceIds),
|
||||
lt(copilotChats.updatedAt, retentionDate)
|
||||
const doomedChats = await selectRowsByIdChunks(workspaceIds, (chunkIds, chunkLimit) =>
|
||||
db
|
||||
.select({ id: copilotChats.id })
|
||||
.from(copilotChats)
|
||||
.where(
|
||||
and(inArray(copilotChats.workspaceId, chunkIds), lt(copilotChats.updatedAt, retentionDate))
|
||||
)
|
||||
)
|
||||
.limit(DEFAULT_BATCH_SIZE * DEFAULT_MAX_BATCHES_PER_TABLE)
|
||||
.limit(chunkLimit)
|
||||
)
|
||||
|
||||
const doomedChatIds = doomedChats.map((c) => c.id)
|
||||
|
||||
|
||||
@@ -23,6 +23,12 @@ export const BrowserUseBlock: BlockConfig<BrowserUseResponse> = {
|
||||
placeholder: 'Describe what the browser agent should do...',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'startUrl',
|
||||
title: 'Start URL',
|
||||
type: 'short-input',
|
||||
placeholder: 'https://example.com (optional starting URL)',
|
||||
},
|
||||
{
|
||||
id: 'variables',
|
||||
title: 'Variables (Secrets)',
|
||||
@@ -51,22 +57,85 @@ export const BrowserUseBlock: BlockConfig<BrowserUseResponse> = {
|
||||
{ label: 'Claude 3.7 Sonnet', id: 'claude-3-7-sonnet-20250219' },
|
||||
{ label: 'Claude Sonnet 4', id: 'claude-sonnet-4-20250514' },
|
||||
{ label: 'Claude Sonnet 4.5', id: 'claude-sonnet-4-5-20250929' },
|
||||
{ label: 'Claude Sonnet 4.6', id: 'claude-sonnet-4-6' },
|
||||
{ label: 'Claude Opus 4.5', id: 'claude-opus-4-5-20251101' },
|
||||
{ label: 'Llama 4 Maverick', id: 'llama-4-maverick-17b-128e-instruct' },
|
||||
],
|
||||
},
|
||||
{
|
||||
id: 'save_browser_data',
|
||||
title: 'Save Browser Data',
|
||||
type: 'switch',
|
||||
placeholder: 'Save browser data',
|
||||
},
|
||||
{
|
||||
id: 'profile_id',
|
||||
title: 'Profile ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter browser profile ID (optional)',
|
||||
},
|
||||
{
|
||||
id: 'maxSteps',
|
||||
title: 'Max Steps',
|
||||
type: 'short-input',
|
||||
placeholder: '100',
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'allowedDomains',
|
||||
title: 'Allowed Domains',
|
||||
type: 'short-input',
|
||||
placeholder: 'example.com, docs.example.com',
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'vision',
|
||||
title: 'Vision',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Auto (default)', id: 'auto' },
|
||||
{ label: 'Enabled', id: 'true' },
|
||||
{ label: 'Disabled', id: 'false' },
|
||||
],
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'flashMode',
|
||||
title: 'Flash Mode',
|
||||
type: 'switch',
|
||||
placeholder: 'Faster but less careful navigation',
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'thinking',
|
||||
title: 'Thinking',
|
||||
type: 'switch',
|
||||
placeholder: 'Enable extended reasoning',
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'highlightElements',
|
||||
title: 'Highlight Elements',
|
||||
type: 'switch',
|
||||
placeholder: 'Visually mark interactive elements',
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'systemPromptExtension',
|
||||
title: 'System Prompt Extension',
|
||||
type: 'long-input',
|
||||
placeholder: 'Append custom instructions to the agent system prompt (max 2000 chars)',
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'structuredOutput',
|
||||
title: 'Structured Output Schema',
|
||||
type: 'code',
|
||||
language: 'json',
|
||||
placeholder: 'Stringified JSON schema for structured output',
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'metadata',
|
||||
title: 'Metadata',
|
||||
type: 'table',
|
||||
columns: ['Key', 'Value'],
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'apiKey',
|
||||
title: 'API Key',
|
||||
@@ -78,19 +147,68 @@ export const BrowserUseBlock: BlockConfig<BrowserUseResponse> = {
|
||||
],
|
||||
tools: {
|
||||
access: ['browser_use_run_task'],
|
||||
config: {
|
||||
tool: () => 'browser_use_run_task',
|
||||
params: (params) => {
|
||||
const next: Record<string, any> = { ...params }
|
||||
if (typeof next.maxSteps === 'string') {
|
||||
const trimmed = next.maxSteps.trim()
|
||||
if (trimmed === '') {
|
||||
next.maxSteps = undefined
|
||||
} else {
|
||||
const n = Number(trimmed)
|
||||
next.maxSteps = Number.isFinite(n) ? n : undefined
|
||||
}
|
||||
}
|
||||
if (next.vision === 'true') next.vision = true
|
||||
else if (next.vision === 'false') next.vision = false
|
||||
if (next.metadata && Array.isArray(next.metadata)) {
|
||||
const obj: Record<string, string> = {}
|
||||
for (const row of next.metadata as Array<Record<string, any>>) {
|
||||
const key = row?.cells?.Key ?? row?.Key
|
||||
const value = row?.cells?.Value ?? row?.Value
|
||||
if (key) obj[key] = String(value ?? '')
|
||||
}
|
||||
next.metadata = obj
|
||||
}
|
||||
return next
|
||||
},
|
||||
},
|
||||
},
|
||||
inputs: {
|
||||
task: { type: 'string', description: 'Browser automation task' },
|
||||
startUrl: { type: 'string', description: 'Starting URL for the agent' },
|
||||
apiKey: { type: 'string', description: 'BrowserUse API key' },
|
||||
variables: { type: 'json', description: 'Task variables' },
|
||||
model: { type: 'string', description: 'AI model to use' },
|
||||
save_browser_data: { type: 'boolean', description: 'Save browser data' },
|
||||
variables: { type: 'json', description: 'Secrets to inject into the task' },
|
||||
model: { type: 'string', description: 'LLM model to use' },
|
||||
profile_id: { type: 'string', description: 'Browser profile ID for persistent sessions' },
|
||||
maxSteps: { type: 'number', description: 'Maximum agent steps' },
|
||||
allowedDomains: { type: 'string', description: 'Comma-separated allowed domains' },
|
||||
vision: { type: 'string', description: 'Vision capability (auto / true / false)' },
|
||||
flashMode: { type: 'boolean', description: 'Enable flash mode' },
|
||||
thinking: { type: 'boolean', description: 'Enable extended reasoning' },
|
||||
highlightElements: { type: 'boolean', description: 'Highlight interactive elements' },
|
||||
systemPromptExtension: { type: 'string', description: 'Custom system prompt extension' },
|
||||
structuredOutput: { type: 'string', description: 'Stringified JSON schema' },
|
||||
metadata: { type: 'json', description: 'Custom key-value metadata' },
|
||||
},
|
||||
outputs: {
|
||||
id: { type: 'string', description: 'Task execution identifier' },
|
||||
success: { type: 'boolean', description: 'Task completion status' },
|
||||
output: { type: 'json', description: 'Task output data' },
|
||||
steps: { type: 'json', description: 'Execution steps taken' },
|
||||
output: { type: 'json', description: 'Final task output (string or structured)' },
|
||||
steps: {
|
||||
type: 'json',
|
||||
description:
|
||||
'Steps the agent executed (number, memory, evaluationPreviousGoal, nextGoal, url, screenshotUrl, actions, duration)',
|
||||
},
|
||||
liveUrl: {
|
||||
type: 'string',
|
||||
description: 'Embeddable live browser session URL (active during execution)',
|
||||
},
|
||||
shareUrl: {
|
||||
type: 'string',
|
||||
description: 'Public shareable URL for the session (post-run)',
|
||||
},
|
||||
sessionId: { type: 'string', description: 'Browser Use session identifier' },
|
||||
},
|
||||
}
|
||||
|
||||
1175
apps/sim/blocks/blocks/sap_s4hana.ts
Normal file
1175
apps/sim/blocks/blocks/sap_s4hana.ts
Normal file
File diff suppressed because it is too large
Load Diff
@@ -46,6 +46,10 @@ export const SlackBlock: BlockConfig<SlackResponse> = {
|
||||
{ label: 'Get User Presence', id: 'get_user_presence' },
|
||||
{ label: 'Edit Canvas', id: 'edit_canvas' },
|
||||
{ label: 'Create Channel Canvas', id: 'create_channel_canvas' },
|
||||
{ label: 'Get Canvas Info', id: 'get_canvas' },
|
||||
{ label: 'List Canvases', id: 'list_canvases' },
|
||||
{ label: 'Lookup Canvas Sections', id: 'lookup_canvas_sections' },
|
||||
{ label: 'Delete Canvas', id: 'delete_canvas' },
|
||||
{ label: 'Create Conversation', id: 'create_conversation' },
|
||||
{ label: 'Invite to Conversation', id: 'invite_to_conversation' },
|
||||
{ label: 'Open View', id: 'open_view' },
|
||||
@@ -146,6 +150,9 @@ export const SlackBlock: BlockConfig<SlackResponse> = {
|
||||
'get_user',
|
||||
'get_user_presence',
|
||||
'edit_canvas',
|
||||
'get_canvas',
|
||||
'lookup_canvas_sections',
|
||||
'delete_canvas',
|
||||
'create_conversation',
|
||||
'open_view',
|
||||
'update_view',
|
||||
@@ -160,7 +167,11 @@ export const SlackBlock: BlockConfig<SlackResponse> = {
|
||||
},
|
||||
}
|
||||
},
|
||||
required: true,
|
||||
required: {
|
||||
field: 'operation',
|
||||
value: 'list_canvases',
|
||||
not: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'manualChannel',
|
||||
@@ -182,6 +193,9 @@ export const SlackBlock: BlockConfig<SlackResponse> = {
|
||||
'get_user',
|
||||
'get_user_presence',
|
||||
'edit_canvas',
|
||||
'get_canvas',
|
||||
'lookup_canvas_sections',
|
||||
'delete_canvas',
|
||||
'create_conversation',
|
||||
'open_view',
|
||||
'update_view',
|
||||
@@ -196,7 +210,11 @@ export const SlackBlock: BlockConfig<SlackResponse> = {
|
||||
},
|
||||
}
|
||||
},
|
||||
required: true,
|
||||
required: {
|
||||
field: 'operation',
|
||||
value: 'list_canvases',
|
||||
not: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'dmUserId',
|
||||
@@ -820,6 +838,121 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
||||
value: 'create_channel_canvas',
|
||||
},
|
||||
},
|
||||
// Get Canvas specific fields
|
||||
{
|
||||
id: 'getCanvasId',
|
||||
title: 'Canvas ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter canvas ID (e.g., F1234ABCD)',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'get_canvas',
|
||||
},
|
||||
required: true,
|
||||
},
|
||||
// List Canvases specific fields
|
||||
{
|
||||
id: 'canvasListCount',
|
||||
title: 'Canvas Limit',
|
||||
type: 'short-input',
|
||||
placeholder: '100',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'list_canvases',
|
||||
},
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'canvasListPage',
|
||||
title: 'Page',
|
||||
type: 'short-input',
|
||||
placeholder: '1',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'list_canvases',
|
||||
},
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'canvasListUser',
|
||||
title: 'User ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Optional creator filter (e.g., U1234567890)',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'list_canvases',
|
||||
},
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'canvasListTsFrom',
|
||||
title: 'Created After',
|
||||
type: 'short-input',
|
||||
placeholder: 'Unix timestamp (e.g., 123456789)',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'list_canvases',
|
||||
},
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'canvasListTsTo',
|
||||
title: 'Created Before',
|
||||
type: 'short-input',
|
||||
placeholder: 'Unix timestamp (e.g., 123456789)',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'list_canvases',
|
||||
},
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'canvasListTeamId',
|
||||
title: 'Team ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Encoded team ID (org tokens only)',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'list_canvases',
|
||||
},
|
||||
mode: 'advanced',
|
||||
},
|
||||
// Lookup Canvas Sections specific fields
|
||||
{
|
||||
id: 'lookupCanvasId',
|
||||
title: 'Canvas ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter canvas ID (e.g., F1234ABCD)',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'lookup_canvas_sections',
|
||||
},
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'sectionCriteria',
|
||||
title: 'Section Criteria',
|
||||
type: 'code',
|
||||
language: 'json',
|
||||
placeholder: '{"section_types":["h1"],"contains_text":"Roadmap"}',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'lookup_canvas_sections',
|
||||
},
|
||||
required: true,
|
||||
},
|
||||
// Delete Canvas specific fields
|
||||
{
|
||||
id: 'deleteCanvasId',
|
||||
title: 'Canvas ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter canvas ID (e.g., F1234ABCD)',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'delete_canvas',
|
||||
},
|
||||
required: true,
|
||||
},
|
||||
// Create Conversation specific fields
|
||||
{
|
||||
id: 'conversationName',
|
||||
@@ -1058,6 +1191,10 @@ Do not include any explanations, markdown formatting, or other text outside the
|
||||
'slack_get_user_presence',
|
||||
'slack_edit_canvas',
|
||||
'slack_create_channel_canvas',
|
||||
'slack_get_canvas',
|
||||
'slack_list_canvases',
|
||||
'slack_lookup_canvas_sections',
|
||||
'slack_delete_canvas',
|
||||
'slack_create_conversation',
|
||||
'slack_invite_to_conversation',
|
||||
'slack_open_view',
|
||||
@@ -1106,6 +1243,14 @@ Do not include any explanations, markdown formatting, or other text outside the
|
||||
return 'slack_edit_canvas'
|
||||
case 'create_channel_canvas':
|
||||
return 'slack_create_channel_canvas'
|
||||
case 'get_canvas':
|
||||
return 'slack_get_canvas'
|
||||
case 'list_canvases':
|
||||
return 'slack_list_canvases'
|
||||
case 'lookup_canvas_sections':
|
||||
return 'slack_lookup_canvas_sections'
|
||||
case 'delete_canvas':
|
||||
return 'slack_delete_canvas'
|
||||
case 'create_conversation':
|
||||
return 'slack_create_conversation'
|
||||
case 'invite_to_conversation':
|
||||
@@ -1164,6 +1309,16 @@ Do not include any explanations, markdown formatting, or other text outside the
|
||||
canvasTitle,
|
||||
channelCanvasTitle,
|
||||
channelCanvasContent,
|
||||
getCanvasId,
|
||||
canvasListCount,
|
||||
canvasListPage,
|
||||
canvasListUser,
|
||||
canvasListTsFrom,
|
||||
canvasListTsTo,
|
||||
canvasListTeamId,
|
||||
lookupCanvasId,
|
||||
sectionCriteria,
|
||||
deleteCanvasId,
|
||||
conversationName,
|
||||
isPrivate,
|
||||
teamId,
|
||||
@@ -1343,6 +1498,46 @@ Do not include any explanations, markdown formatting, or other text outside the
|
||||
}
|
||||
break
|
||||
|
||||
case 'get_canvas':
|
||||
baseParams.canvasId = getCanvasId
|
||||
break
|
||||
|
||||
case 'list_canvases':
|
||||
if (canvasListCount) {
|
||||
const parsedCount = Number.parseInt(canvasListCount, 10)
|
||||
if (!Number.isNaN(parsedCount) && parsedCount > 0) {
|
||||
baseParams.count = parsedCount
|
||||
}
|
||||
}
|
||||
if (canvasListPage) {
|
||||
const parsedPage = Number.parseInt(canvasListPage, 10)
|
||||
if (!Number.isNaN(parsedPage) && parsedPage > 0) {
|
||||
baseParams.page = parsedPage
|
||||
}
|
||||
}
|
||||
if (canvasListUser) {
|
||||
baseParams.user = String(canvasListUser).trim()
|
||||
}
|
||||
if (canvasListTsFrom) {
|
||||
baseParams.tsFrom = String(canvasListTsFrom).trim()
|
||||
}
|
||||
if (canvasListTsTo) {
|
||||
baseParams.tsTo = String(canvasListTsTo).trim()
|
||||
}
|
||||
if (canvasListTeamId) {
|
||||
baseParams.teamId = String(canvasListTeamId).trim()
|
||||
}
|
||||
break
|
||||
|
||||
case 'lookup_canvas_sections':
|
||||
baseParams.canvasId = lookupCanvasId
|
||||
baseParams.criteria = sectionCriteria
|
||||
break
|
||||
|
||||
case 'delete_canvas':
|
||||
baseParams.canvasId = deleteCanvasId
|
||||
break
|
||||
|
||||
case 'create_conversation':
|
||||
baseParams.name = conversationName
|
||||
baseParams.isPrivate = isPrivate === 'true'
|
||||
@@ -1461,6 +1656,23 @@ Do not include any explanations, markdown formatting, or other text outside the
|
||||
// Create Channel Canvas inputs
|
||||
channelCanvasTitle: { type: 'string', description: 'Title for channel canvas' },
|
||||
channelCanvasContent: { type: 'string', description: 'Content for channel canvas' },
|
||||
// Canvas management inputs
|
||||
getCanvasId: { type: 'string', description: 'Canvas ID to retrieve' },
|
||||
canvasListCount: { type: 'string', description: 'Maximum number of canvases to return' },
|
||||
canvasListPage: { type: 'string', description: 'Canvas list page number' },
|
||||
canvasListUser: { type: 'string', description: 'Optional canvas creator user filter' },
|
||||
canvasListTsFrom: {
|
||||
type: 'string',
|
||||
description: 'Filter canvases created after this timestamp',
|
||||
},
|
||||
canvasListTsTo: {
|
||||
type: 'string',
|
||||
description: 'Filter canvases created before this timestamp',
|
||||
},
|
||||
canvasListTeamId: { type: 'string', description: 'Encoded team ID for org tokens' },
|
||||
lookupCanvasId: { type: 'string', description: 'Canvas ID to search for sections' },
|
||||
sectionCriteria: { type: 'json', description: 'Canvas section lookup criteria' },
|
||||
deleteCanvasId: { type: 'string', description: 'Canvas ID to delete' },
|
||||
// Create Conversation inputs
|
||||
conversationName: { type: 'string', description: 'Name for the new channel' },
|
||||
isPrivate: { type: 'string', description: 'Create as private channel (true/false)' },
|
||||
@@ -1511,6 +1723,26 @@ Do not include any explanations, markdown formatting, or other text outside the
|
||||
// slack_canvas outputs
|
||||
canvas_id: { type: 'string', description: 'Canvas identifier for created canvases' },
|
||||
title: { type: 'string', description: 'Canvas title' },
|
||||
canvas: {
|
||||
type: 'json',
|
||||
description: 'Canvas file metadata returned by Slack',
|
||||
},
|
||||
canvases: {
|
||||
type: 'json',
|
||||
description: 'Array of canvas file objects returned by Slack',
|
||||
},
|
||||
paging: {
|
||||
type: 'json',
|
||||
description: 'Pagination information for listed canvases',
|
||||
},
|
||||
sections: {
|
||||
type: 'json',
|
||||
description: 'Canvas section IDs returned by Slack section lookup',
|
||||
},
|
||||
ok: {
|
||||
type: 'boolean',
|
||||
description: 'Whether Slack completed the canvas operation successfully',
|
||||
},
|
||||
|
||||
// slack_message_reader outputs (read operation)
|
||||
messages: {
|
||||
|
||||
@@ -1,28 +1,6 @@
|
||||
import { StagehandIcon } from '@/components/icons'
|
||||
import { AuthMode, type BlockConfig, IntegrationType } from '@/blocks/types'
|
||||
import type { ToolResponse } from '@/tools/types'
|
||||
|
||||
export interface StagehandExtractResponse extends ToolResponse {
|
||||
output: {
|
||||
data: Record<string, any>
|
||||
}
|
||||
}
|
||||
|
||||
export interface StagehandAgentResponse extends ToolResponse {
|
||||
output: {
|
||||
agentResult: {
|
||||
success: boolean
|
||||
completed: boolean
|
||||
message: string
|
||||
actions?: Array<{
|
||||
type: string
|
||||
description: string
|
||||
result?: string
|
||||
}>
|
||||
}
|
||||
structuredOutput?: Record<string, any>
|
||||
}
|
||||
}
|
||||
import type { StagehandAgentResponse, StagehandExtractResponse } from '@/tools/stagehand/types'
|
||||
|
||||
export type StagehandResponse = StagehandExtractResponse | StagehandAgentResponse
|
||||
|
||||
@@ -345,6 +323,27 @@ Example 3 (Data Collection):
|
||||
generationType: 'json-schema',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'mode',
|
||||
title: 'Agent Mode',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'DOM (default)', id: 'dom' },
|
||||
{ label: 'Hybrid', id: 'hybrid' },
|
||||
{ label: 'CUA', id: 'cua' },
|
||||
],
|
||||
value: () => 'dom',
|
||||
condition: { field: 'operation', value: 'agent' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'maxSteps',
|
||||
title: 'Max Steps',
|
||||
type: 'short-input',
|
||||
placeholder: '20',
|
||||
condition: { field: 'operation', value: 'agent' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
// Shared API key field
|
||||
{
|
||||
id: 'apiKey',
|
||||
@@ -361,6 +360,19 @@ Example 3 (Data Collection):
|
||||
tool: (params) => {
|
||||
return params.operation === 'agent' ? 'stagehand_agent' : 'stagehand_extract'
|
||||
},
|
||||
params: (params) => {
|
||||
const next: Record<string, any> = { ...params }
|
||||
if (typeof next.maxSteps === 'string') {
|
||||
const trimmed = next.maxSteps.trim()
|
||||
if (trimmed === '') {
|
||||
next.maxSteps = undefined
|
||||
} else {
|
||||
const n = Number(trimmed)
|
||||
next.maxSteps = Number.isFinite(n) ? n : undefined
|
||||
}
|
||||
}
|
||||
return next
|
||||
},
|
||||
},
|
||||
},
|
||||
inputs: {
|
||||
@@ -376,6 +388,8 @@ Example 3 (Data Collection):
|
||||
task: { type: 'string', description: 'Task description (agent operation)' },
|
||||
variables: { type: 'json', description: 'Task variables (agent operation)' },
|
||||
outputSchema: { type: 'json', description: 'Output schema (agent operation)' },
|
||||
mode: { type: 'string', description: 'Agent mode: dom, hybrid, or cua (agent operation)' },
|
||||
maxSteps: { type: 'number', description: 'Max agent steps (agent operation)' },
|
||||
},
|
||||
outputs: {
|
||||
// Extract outputs
|
||||
@@ -383,5 +397,10 @@ Example 3 (Data Collection):
|
||||
// Agent outputs
|
||||
agentResult: { type: 'json', description: 'Agent execution result (agent operation)' },
|
||||
structuredOutput: { type: 'json', description: 'Structured output data (agent operation)' },
|
||||
liveViewUrl: {
|
||||
type: 'string',
|
||||
description: 'Embeddable Browserbase live view URL (agent operation)',
|
||||
},
|
||||
sessionId: { type: 'string', description: 'Browserbase session identifier (agent operation)' },
|
||||
},
|
||||
}
|
||||
|
||||
@@ -169,6 +169,7 @@ import { RouterBlock, RouterV2Block } from '@/blocks/blocks/router'
|
||||
import { RssBlock } from '@/blocks/blocks/rss'
|
||||
import { S3Block } from '@/blocks/blocks/s3'
|
||||
import { SalesforceBlock } from '@/blocks/blocks/salesforce'
|
||||
import { SapS4HanaBlock } from '@/blocks/blocks/sap_s4hana'
|
||||
import { ScheduleBlock } from '@/blocks/blocks/schedule'
|
||||
import { SearchBlock } from '@/blocks/blocks/search'
|
||||
import { SecretsManagerBlock } from '@/blocks/blocks/secrets_manager'
|
||||
@@ -419,6 +420,7 @@ export const registry: Record<string, BlockConfig> = {
|
||||
rss: RssBlock,
|
||||
s3: S3Block,
|
||||
salesforce: SalesforceBlock,
|
||||
sap_s4hana: SapS4HanaBlock,
|
||||
schedule: ScheduleBlock,
|
||||
search: SearchBlock,
|
||||
sendgrid: SendGridBlock,
|
||||
|
||||
@@ -4045,6 +4045,7 @@ export function AsanaIcon(props: SVGProps<SVGSVGElement>) {
|
||||
}
|
||||
|
||||
export function PipedriveIcon(props: SVGProps<SVGSVGElement>) {
|
||||
const pathId = useId()
|
||||
return (
|
||||
<svg
|
||||
{...props}
|
||||
@@ -4058,7 +4059,7 @@ export function PipedriveIcon(props: SVGProps<SVGSVGElement>) {
|
||||
<defs>
|
||||
<path
|
||||
d='M59.6807,81.1772 C59.6807,101.5343 70.0078,123.4949 92.7336,123.4949 C109.5872,123.4949 126.6277,110.3374 126.6277,80.8785 C126.6277,55.0508 113.232,37.7119 93.2944,37.7119 C77.0483,37.7119 59.6807,49.1244 59.6807,81.1772 Z M101.3006,0 C142.0482,0 169.4469,32.2728 169.4469,80.3126 C169.4469,127.5978 140.584,160.60942 99.3224,160.60942 C79.6495,160.60942 67.0483,152.1836 60.4595,146.0843 C60.5063,147.5305 60.5374,149.1497 60.5374,150.8788 L60.5374,215 L18.32565,215 L18.32565,44.157 C18.32565,41.6732 17.53126,40.8873 15.07021,40.8873 L0.5531,40.8873 L0.5531,3.4741 L35.9736,3.4741 C52.282,3.4741 56.4564,11.7741 57.2508,18.1721 C63.8708,10.7524 77.5935,0 101.3006,0 Z'
|
||||
id='path-1'
|
||||
id={pathId}
|
||||
/>
|
||||
</defs>
|
||||
<g
|
||||
@@ -4069,10 +4070,7 @@ export function PipedriveIcon(props: SVGProps<SVGSVGElement>) {
|
||||
fillRule='evenodd'
|
||||
>
|
||||
<g transform='translate(67.000000, 44.000000)'>
|
||||
<mask id='mask-2' fill='white'>
|
||||
<use href='#path-1' />
|
||||
</mask>
|
||||
<use id='Clip-5' fill='#FFFFFF' xlinkHref='#path-1' />
|
||||
<use fill='#FFFFFF' xlinkHref={`#${pathId}`} />
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
@@ -4098,6 +4096,40 @@ export function SalesforceIcon(props: SVGProps<SVGSVGElement>) {
|
||||
)
|
||||
}
|
||||
|
||||
export function SapS4HanaIcon(props: SVGProps<SVGSVGElement>) {
|
||||
const id = useId()
|
||||
return (
|
||||
<svg {...props} xmlns='http://www.w3.org/2000/svg' viewBox='0 0 412.38 204'>
|
||||
<defs>
|
||||
<linearGradient
|
||||
id={id}
|
||||
x1='206.19'
|
||||
y1='0'
|
||||
x2='206.19'
|
||||
y2='204'
|
||||
gradientUnits='userSpaceOnUse'
|
||||
>
|
||||
<stop offset='0' stopColor='#00b1eb' />
|
||||
<stop offset='.212' stopColor='#009ad9' />
|
||||
<stop offset='.519' stopColor='#007fc4' />
|
||||
<stop offset='.792' stopColor='#006eb8' />
|
||||
<stop offset='1' stopColor='#0069b4' />
|
||||
</linearGradient>
|
||||
</defs>
|
||||
<polyline
|
||||
fill={`url(#${id})`}
|
||||
fillRule='evenodd'
|
||||
points='0 204 208.413 204 412.38 0 0 0 0 204'
|
||||
/>
|
||||
<path
|
||||
fill='#fff'
|
||||
fillRule='evenodd'
|
||||
d='m244.727,38.359l-40.593-.025v96.518l-35.46-96.518h-35.16l-30.277,80.716c-3.224-20.352-24.277-27.38-40.84-32.649-10.937-3.512-22.541-8.678-22.434-14.387.091-4.687,6.225-9.04,18.377-8.385,8.17.433,15.373,1.092,29.71,8.006l14.102-24.557c-13.088-6.658-31.169-10.867-45.985-10.883h-.086c-17.277,0-31.677,5.598-40.602,14.824-6.221,6.443-9.572,14.626-9.712,23.679-.227,12.454,4.341,21.292,13.938,28.338,8.104,5.944,18.468,9.794,27.603,12.626,11.27,3.492,20.467,6.526,20.36,13.002-.083,2.355-.977,4.552-2.671,6.337-2.807,2.897-7.124,3.986-13.084,4.098-11.497.243-20.026-1.559-33.61-9.585l-12.536,24.903c13.546,7.705,29.586,12.223,45.952,12.223l2.106-.024c14.247-.256,25.745-4.316,34.929-11.712.527-.416,1.001-.845,1.488-1.277l-4.073,10.874h36.875l6.189-18.822c6.477,2.214,13.847,3.437,21.676,3.437,7.618,0,14.795-1.17,21.156-3.252l5.965,18.637h60.137v-38.969h13.113c31.706,0,50.456-16.147,50.456-43.202,0-30.139-18.219-43.969-57.011-43.969Zm-93.816,82.587c-4.737,0-9.177-.828-13.006-2.275l12.866-40.593h.244l12.643,40.708c-3.801,1.349-8.138,2.16-12.746,2.16Zm96.199-23.324h-8.941v-32.711h8.941c11.927,0,21.437,3.961,21.437,16.139,0,12.602-9.51,16.572-21.437,16.572'
|
||||
/>
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
export function ServiceNowIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg {...props} xmlns='http://www.w3.org/2000/svg' viewBox='0 0 71.1 63.6'>
|
||||
@@ -4694,15 +4726,16 @@ export function DynamoDBIcon(props: SVGProps<SVGSVGElement>) {
|
||||
}
|
||||
|
||||
export function IAMIcon(props: SVGProps<SVGSVGElement>) {
|
||||
const id = useId()
|
||||
return (
|
||||
<svg {...props} viewBox='0 0 80 80' xmlns='http://www.w3.org/2000/svg'>
|
||||
<defs>
|
||||
<linearGradient x1='0%' y1='100%' x2='100%' y2='0%' id='iamGradient'>
|
||||
<linearGradient x1='0%' y1='100%' x2='100%' y2='0%' id={id}>
|
||||
<stop stopColor='#BD0816' offset='0%' />
|
||||
<stop stopColor='#FF5252' offset='100%' />
|
||||
</linearGradient>
|
||||
</defs>
|
||||
<rect fill='url(#iamGradient)' width='80' height='80' />
|
||||
<rect fill={`url(#${id})`} width='80' height='80' />
|
||||
<path
|
||||
d='M14,59 L66,59 L66,21 L14,21 L14,59 Z M68,20 L68,60 C68,60.552 67.553,61 67,61 L13,61 C12.447,61 12,60.552 12,60 L12,20 C12,19.448 12.447,19 13,19 L67,19 C67.553,19 68,19.448 68,20 L68,20 Z M44,48 L59,48 L59,46 L44,46 L44,48 Z M57,42 L62,42 L62,40 L57,40 L57,42 Z M44,42 L52,42 L52,40 L44,40 L44,42 Z M29,46 C29,45.449 28.552,45 28,45 C27.448,45 27,45.449 27,46 C27,46.551 27.448,47 28,47 C28.552,47 29,46.551 29,46 L29,46 Z M31,46 C31,47.302 30.161,48.401 29,48.816 L29,51 L27,51 L27,48.815 C25.839,48.401 25,47.302 25,46 C25,44.346 26.346,43 28,43 C29.654,43 31,44.346 31,46 L31,46 Z M19,53.993 L36.994,54 L36.996,50 L33,50 L33,48 L36.996,48 L36.998,45 L33,45 L33,43 L36.999,43 L37,40.007 L19.006,40 L19,53.993 Z M22,38.001 L34,38.006 L34,31 C34.001,28.697 31.197,26.677 28,26.675 L27.996,26.675 C24.804,26.675 22.004,28.696 22.002,31 L22,38.001 Z M17,54.992 L17.006,39 C17.006,38.734 17.111,38.48 17.299,38.292 C17.486,38.105 17.741,38 18.006,38 L20,38.001 L20.002,31 C20.004,27.512 23.59,24.675 27.996,24.675 L28,24.675 C32.412,24.677 36.001,27.515 36,31 L36,38.007 L38,38.008 C38.553,38.008 39,38.456 39,39.008 L38.994,55 C38.994,55.266 38.889,55.52 38.701,55.708 C38.514,55.895 38.259,56 37.994,56 L18,55.992 C17.447,55.992 17,55.544 17,54.992 L17,54.992 Z M60,36 L62,36 L62,34 L60,34 L60,36 Z M44,36 L55,36 L55,34 L44,34 L44,36 Z'
|
||||
fill='#FFFFFF'
|
||||
@@ -4712,15 +4745,16 @@ export function IAMIcon(props: SVGProps<SVGSVGElement>) {
|
||||
}
|
||||
|
||||
export function IdentityCenterIcon(props: SVGProps<SVGSVGElement>) {
|
||||
const id = useId()
|
||||
return (
|
||||
<svg {...props} viewBox='0 0 80 80' xmlns='http://www.w3.org/2000/svg'>
|
||||
<defs>
|
||||
<linearGradient x1='0%' y1='100%' x2='100%' y2='0%' id='identityCenterGradient'>
|
||||
<linearGradient x1='0%' y1='100%' x2='100%' y2='0%' id={id}>
|
||||
<stop stopColor='#BD0816' offset='0%' />
|
||||
<stop stopColor='#FF5252' offset='100%' />
|
||||
</linearGradient>
|
||||
</defs>
|
||||
<rect fill='url(#identityCenterGradient)' width='80' height='80' />
|
||||
<rect fill={`url(#${id})`} width='80' height='80' />
|
||||
<path
|
||||
d='M46.694,46.8194562 C47.376,46.1374562 47.376,45.0294562 46.694,44.3474562 C46.353,44.0074562 45.906,43.8374562 45.459,43.8374562 C45.01,43.8374562 44.563,44.0074562 44.222,44.3474562 C43.542,45.0284562 43.542,46.1384562 44.222,46.8194562 C44.905,47.5014562 46.013,47.4994562 46.694,46.8194562 M47.718,47.1374562 L51.703,51.1204562 L50.996,51.8274562 L49.868,50.6994562 L48.793,51.7754562 L48.086,51.0684562 L49.161,49.9924562 L47.011,47.8444562 C46.545,48.1654562 46.003,48.3294562 45.458,48.3294562 C44.755,48.3294562 44.051,48.0624562 43.515,47.5264562 C42.445,46.4554562 42.445,44.7124562 43.515,43.6404562 C44.586,42.5714562 46.329,42.5694562 47.401,43.6404562 C48.351,44.5904562 48.455,46.0674562 47.718,47.1374562 M53,44.1014562 C53,46.1684562 51.505,47.0934562 50.023,47.0934562 L50.023,46.0934562 C50.487,46.0934562 52,45.9494562 52,44.1014562 C52,43.0044562 51.353,42.3894562 49.905,42.1084562 C49.68,42.0654562 49.514,41.8754562 49.501,41.6484562 C49.446,40.7444562 48.987,40.1124562 48.384,40.1124562 C48.084,40.1124562 47.854,40.2424562 47.616,40.5464562 C47.506,40.6884562 47.324,40.7594562 47.147,40.7324562 C46.968,40.7054562 46.818,40.5844562 46.755,40.4144562 C46.577,39.9434562 46.211,39.4334562 45.723,38.9774562 C45.231,38.5094562 43.883,37.5074562 41.972,38.2734562 C40.885,38.7054562 40.034,39.9494562 40.034,41.1074562 C40.034,41.2354562 40.043,41.3624562 40.058,41.4884562 C40.061,41.5094562 40.062,41.5304562 40.062,41.5514562 C40.062,41.7994562 39.882,42.0064562 39.645,42.0464562 C38.886,42.2394562 38,42.7454562 38,44.0554562 L38.005,44.2104562 C38.069,45.3254562 39.252,45.9954562 40.358,45.9984562 L41,45.9984562 L41,46.9984562 L40.357,46.9984562 C38.536,46.9944562 37.095,45.8194562 37.006,44.2644562 C37.003,44.1944562 37,44.1244562 37,44.0554562 C37,42.6944562 37.752,41.6484562 39.035,41.1884562 C39.034,41.1614562 39.034,41.1344562 39.034,41.1074562 C39.034,39.5434562 40.138,37.9254562 41.602,37.3434562 C43.298,36.6654562 45.095,37.0034562 46.409,38.2494562 C46.706,38.5274562 47.076,38.9264562 47.372,39.4134562 C47.673,39.2124562 48.008,39.1124562 48.384,39.1124562 C49.257,39.1124562 50.231,39.7714562 50.458,41.2074562 C52.145,41.6324562 53,42.6054562 53,44.1014562 M27,53 L27,27 L53,27 L53,34 L51,34 L51,29 L29,29 L29,51 L51,51 L51,46 L53,46 L53,53 Z'
|
||||
fill='#FFFFFF'
|
||||
@@ -4730,15 +4764,16 @@ export function IdentityCenterIcon(props: SVGProps<SVGSVGElement>) {
|
||||
}
|
||||
|
||||
export function STSIcon(props: SVGProps<SVGSVGElement>) {
|
||||
const id = useId()
|
||||
return (
|
||||
<svg {...props} viewBox='0 0 80 80' xmlns='http://www.w3.org/2000/svg'>
|
||||
<defs>
|
||||
<linearGradient x1='0%' y1='100%' x2='100%' y2='0%' id='stsGradient'>
|
||||
<linearGradient x1='0%' y1='100%' x2='100%' y2='0%' id={id}>
|
||||
<stop stopColor='#BD0816' offset='0%' />
|
||||
<stop stopColor='#FF5252' offset='100%' />
|
||||
</linearGradient>
|
||||
</defs>
|
||||
<rect fill='url(#stsGradient)' width='80' height='80' />
|
||||
<rect fill={`url(#${id})`} width='80' height='80' />
|
||||
<path
|
||||
d='M14,59 L66,59 L66,21 L14,21 L14,59 Z M68,20 L68,60 C68,60.552 67.553,61 67,61 L13,61 C12.447,61 12,60.552 12,60 L12,20 C12,19.448 12.447,19 13,19 L67,19 C67.553,19 68,19.448 68,20 L68,20 Z M44,48 L59,48 L59,46 L44,46 L44,48 Z M57,42 L62,42 L62,40 L57,40 L57,42 Z M44,42 L52,42 L52,40 L44,40 L44,42 Z M29,46 C29,45.449 28.552,45 28,45 C27.448,45 27,45.449 27,46 C27,46.551 27.448,47 28,47 C28.552,47 29,46.551 29,46 L29,46 Z M31,46 C31,47.302 30.161,48.401 29,48.816 L29,51 L27,51 L27,48.815 C25.839,48.401 25,47.302 25,46 C25,44.346 26.346,43 28,43 C29.654,43 31,44.346 31,46 L31,46 Z M19,53.993 L36.994,54 L36.996,50 L33,50 L33,48 L36.996,48 L36.998,45 L33,45 L33,43 L36.999,43 L37,40.007 L19.006,40 L19,53.993 Z M22,38.001 L34,38.006 L34,31 C34.001,28.697 31.197,26.677 28,26.675 L27.996,26.675 C24.804,26.675 22.004,28.696 22.002,31 L22,38.001 Z M17,54.992 L17.006,39 C17.006,38.734 17.111,38.48 17.299,38.292 C17.486,38.105 17.741,38 18.006,38 L20,38.001 L20.002,31 C20.004,27.512 23.59,24.675 27.996,24.675 L28,24.675 C32.412,24.677 36.001,27.515 36,31 L36,38.007 L38,38.008 C38.553,38.008 39,38.456 39,39.008 L38.994,55 C38.994,55.266 38.889,55.52 38.701,55.708 C38.514,55.895 38.259,56 37.994,56 L18,55.992 C17.447,55.992 17,55.544 17,54.992 L17,54.992 Z M60,36 L62,36 L62,34 L60,34 L60,36 Z M44,36 L55,36 L55,34 L44,34 L44,36 Z'
|
||||
fill='#FFFFFF'
|
||||
@@ -4748,15 +4783,16 @@ export function STSIcon(props: SVGProps<SVGSVGElement>) {
|
||||
}
|
||||
|
||||
export function SESIcon(props: SVGProps<SVGSVGElement>) {
|
||||
const id = useId()
|
||||
return (
|
||||
<svg {...props} viewBox='0 0 80 80' xmlns='http://www.w3.org/2000/svg'>
|
||||
<defs>
|
||||
<linearGradient x1='0%' y1='100%' x2='100%' y2='0%' id='sesGradient'>
|
||||
<linearGradient x1='0%' y1='100%' x2='100%' y2='0%' id={id}>
|
||||
<stop stopColor='#BD0816' offset='0%' />
|
||||
<stop stopColor='#FF5252' offset='100%' />
|
||||
</linearGradient>
|
||||
</defs>
|
||||
<rect fill='url(#sesGradient)' width='80' height='80' />
|
||||
<rect fill={`url(#${id})`} width='80' height='80' />
|
||||
<path
|
||||
d='M57,60.999875 C57,59.373846 55.626,57.9998214 54,57.9998214 C52.374,57.9998214 51,59.373846 51,60.999875 C51,62.625904 52.374,63.9999286 54,63.9999286 C55.626,63.9999286 57,62.625904 57,60.999875 L57,60.999875 Z M40,59.9998571 C38.374,59.9998571 37,61.3738817 37,62.9999107 C37,64.6259397 38.374,65.9999643 40,65.9999643 C41.626,65.9999643 43,64.6259397 43,62.9999107 C43,61.3738817 41.626,59.9998571 40,59.9998571 L40,59.9998571 Z M26,57.9998214 C24.374,57.9998214 23,59.373846 23,60.999875 C23,62.625904 24.374,63.9999286 26,63.9999286 C27.626,63.9999286 29,62.625904 29,60.999875 C29,59.373846 27.626,57.9998214 26,57.9998214 L26,57.9998214 Z M28.605,42.9995536 L51.395,42.9995536 L43.739,36.1104305 L40.649,38.7584778 C40.463,38.9194807 40.23,38.9994821 39.999,38.9994821 C39.768,38.9994821 39.535,38.9194807 39.349,38.7584778 L36.26,36.1104305 L28.605,42.9995536 Z M27,28.1732888 L27,41.7545313 L34.729,34.7984071 L27,28.1732888 Z M51.297,26.9992678 L28.703,26.9992678 L39.999,36.6824408 L51.297,26.9992678 Z M53,41.7545313 L53,28.1732888 L45.271,34.7974071 L53,41.7545313 Z M59,60.999875 C59,63.7099234 56.71,65.9999643 54,65.9999643 C51.29,65.9999643 49,63.7099234 49,60.999875 C49,58.6308327 50.75,56.5837961 53,56.1057876 L53,52.9997321 L41,52.9997321 L41,58.1058233 C43.25,58.5838319 45,60.6308684 45,62.9999107 C45,65.7099591 42.71,68 40,68 C37.29,68 35,65.7099591 35,62.9999107 C35,60.6308684 36.75,58.5838319 39,58.1058233 L39,52.9997321 L27,52.9997321 L27,56.1057876 C29.25,56.5837961 31,58.6308327 31,60.999875 C31,63.7099234 28.71,65.9999643 26,65.9999643 C23.29,65.9999643 21,63.7099234 21,60.999875 C21,58.6308327 22.75,56.5837961 25,56.1057876 L25,51.9997143 C25,51.4477044 25.447,50.9996964 26,50.9996964 L39,50.9996964 L39,44.9995893 L26,44.9995893 C25.447,44.9995893 25,44.5515813 25,43.9995714 L25,25.99925 C25,25.4472401 25.447,24.9992321 26,24.9992321 L54,24.9992321 C54.553,24.9992321 55,25.4472401 55,25.99925 L55,43.9995714 C55,44.5515813 54.553,44.9995893 54,44.9995893 L41,44.9995893 L41,50.9996964 L54,50.9996964 C54.553,50.9996964 55,51.4477044 55,51.9997143 L55,56.1057876 C57.25,56.5837961 59,58.6308327 59,60.999875 L59,60.999875 Z M68,39.9995 C68,45.9066055 66.177,51.5597064 62.727,56.3447919 L61.104,55.174771 C64.307,50.7316916 66,45.4845979 66,39.9995 C66,25.664244 54.337,14.0000357 40.001,14.0000357 C25.664,14.0000357 14,25.664244 14,39.9995 C14,45.4845979 15.693,50.7316916 18.896,55.174771 L17.273,56.3447919 C13.823,51.5597064 12,45.9066055 12,39.9995 C12,24.5612243 24.561,12 39.999,12 C55.438,12 68,24.5612243 68,39.9995 L68,39.9995 Z'
|
||||
fill='#FFFFFF'
|
||||
@@ -4766,15 +4802,16 @@ export function SESIcon(props: SVGProps<SVGSVGElement>) {
|
||||
}
|
||||
|
||||
export function SecretsManagerIcon(props: SVGProps<SVGSVGElement>) {
|
||||
const id = useId()
|
||||
return (
|
||||
<svg {...props} viewBox='0 0 80 80' xmlns='http://www.w3.org/2000/svg'>
|
||||
<defs>
|
||||
<linearGradient x1='0%' y1='100%' x2='100%' y2='0%' id='secretsManagerGradient'>
|
||||
<linearGradient x1='0%' y1='100%' x2='100%' y2='0%' id={id}>
|
||||
<stop stopColor='#BD0816' offset='0%' />
|
||||
<stop stopColor='#FF5252' offset='100%' />
|
||||
</linearGradient>
|
||||
</defs>
|
||||
<rect fill='url(#secretsManagerGradient)' width='80' height='80' />
|
||||
<rect fill={`url(#${id})`} width='80' height='80' />
|
||||
<path
|
||||
d='M38.76,43.36 C38.76,44.044 39.317,44.6 40,44.6 C40.684,44.6 41.24,44.044 41.24,43.36 C41.24,42.676 40.684,42.12 40,42.12 C39.317,42.12 38.76,42.676 38.76,43.36 L38.76,43.36 Z M36.76,43.36 C36.76,41.573 38.213,40.12 40,40.12 C41.787,40.12 43.24,41.573 43.24,43.36 C43.24,44.796 42.296,46.002 41,46.426 L41,49 L39,49 L39,46.426 C37.704,46.002 36.76,44.796 36.76,43.36 L36.76,43.36 Z M49,38 L31,38 L31,51 L49,51 L49,48 L46,48 L46,46 L49,46 L49,43 L46,43 L46,41 L49,41 L49,38 Z M34,36 L45.999,36 L46,31 C46.001,28.384 43.143,26.002 40.004,26 L40.001,26 C38.472,26 36.928,26.574 35.763,27.575 C34.643,28.537 34,29.786 34,31.001 L34,36 Z M48,31.001 L47.999,36 L50,36 C50.553,36 51,36.448 51,37 L51,52 C51,52.552 50.553,53 50,53 L30,53 C29.447,53 29,52.552 29,52 L29,37 C29,36.448 29.447,36 30,36 L32,36 L32,31 C32.001,29.202 32.897,27.401 34.459,26.058 C35.982,24.75 38.001,24 40.001,24 L40.004,24 C44.265,24.002 48.001,27.273 48,31.001 L48,31.001 Z M19.207,55.049 L20.828,53.877 C18.093,50.097 16.581,45.662 16.396,41 L19,41 L19,39 L16.399,39 C16.598,34.366 18.108,29.957 20.828,26.198 L19.207,25.025 C16.239,29.128 14.599,33.942 14.399,39 L12,39 L12,41 L14.396,41 C14.582,46.086 16.224,50.926 19.207,55.049 L19.207,55.049 Z M53.838,59.208 C50.069,61.936 45.648,63.446 41,63.639 L41,61 L39,61 L39,63.639 C34.352,63.447 29.93,61.937 26.159,59.208 L24.988,60.828 C29.1,63.805 33.928,65.445 39,65.639 L39,68 L41,68 L41,65.639 C46.072,65.445 50.898,63.805 55.01,60.828 L53.838,59.208 Z M26.159,20.866 C29.93,18.138 34.352,16.628 39,16.436 L39,19 L41,19 L41,16.436 C45.648,16.628 50.069,18.138 53.838,20.866 L55.01,19.246 C50.898,16.27 46.072,14.63 41,14.436 L41,12 L39,12 L39,14.436 C33.928,14.629 29.1,16.269 24.988,19.246 L26.159,20.866 Z M65.599,39 C65.399,33.942 63.759,29.128 60.79,25.025 L59.169,26.198 C61.89,29.957 63.4,34.366 63.599,39 L61,39 L61,41 L63.602,41 C63.416,45.662 61.905,50.097 59.169,53.877 L60.79,55.049 C63.774,50.926 65.415,46.086 65.602,41 L68,41 L68,39 L65.599,39 Z M56.386,25.064 L64.226,17.224 L62.812,15.81 L54.972,23.65 L56.386,25.064 Z M23.612,55.01 L15.772,62.85 L17.186,64.264 L25.026,56.424 L23.612,55.01 Z M28.666,27.253 L13.825,12.413 L12.411,13.827 L27.252,28.667 L28.666,27.253 Z M54.193,52.78 L67.586,66.173 L66.172,67.587 L52.779,54.194 L54.193,52.78 Z'
|
||||
fill='#FFFFFF'
|
||||
|
||||
@@ -34,6 +34,7 @@ import {
|
||||
type ExecutionContext,
|
||||
getNextExecutionOrder,
|
||||
type NormalizedBlockOutput,
|
||||
type StreamingExecution,
|
||||
} from '@/executor/types'
|
||||
import { streamingResponseFormatProcessor } from '@/executor/utils'
|
||||
import { buildBlockExecutionError, normalizeError } from '@/executor/utils/errors'
|
||||
@@ -140,7 +141,7 @@ export class BlockExecutor {
|
||||
|
||||
let normalizedOutput: NormalizedBlockOutput
|
||||
if (isStreamingExecution) {
|
||||
const streamingExec = output as { stream: ReadableStream; execution: any }
|
||||
const streamingExec = output as StreamingExecution
|
||||
|
||||
if (ctx.onStream) {
|
||||
await this.handleStreamingExecution(
|
||||
@@ -602,7 +603,7 @@ export class BlockExecutor {
|
||||
ctx: ExecutionContext,
|
||||
node: DAGNode,
|
||||
block: SerializedBlock,
|
||||
streamingExec: { stream: ReadableStream; execution: any },
|
||||
streamingExec: StreamingExecution,
|
||||
resolvedInputs: Record<string, any>,
|
||||
selectedOutputs: string[]
|
||||
): Promise<void> {
|
||||
@@ -613,56 +614,39 @@ export class BlockExecutor {
|
||||
(block.config?.params as Record<string, any> | undefined)?.responseFormat ??
|
||||
(block.config as Record<string, any> | undefined)?.responseFormat
|
||||
|
||||
const stream = streamingExec.stream
|
||||
if (typeof stream.tee !== 'function') {
|
||||
await this.forwardStream(ctx, blockId, streamingExec, stream, responseFormat, selectedOutputs)
|
||||
return
|
||||
}
|
||||
const sourceReader = streamingExec.stream.getReader()
|
||||
const decoder = new TextDecoder()
|
||||
const accumulated: string[] = []
|
||||
let drainError: unknown
|
||||
let sourceFullyDrained = false
|
||||
|
||||
const [clientStream, executorStream] = stream.tee()
|
||||
const clientSource = new ReadableStream<Uint8Array>({
|
||||
async pull(controller) {
|
||||
try {
|
||||
const { done, value } = await sourceReader.read()
|
||||
if (done) {
|
||||
const tail = decoder.decode()
|
||||
if (tail) accumulated.push(tail)
|
||||
sourceFullyDrained = true
|
||||
controller.close()
|
||||
return
|
||||
}
|
||||
accumulated.push(decoder.decode(value, { stream: true }))
|
||||
controller.enqueue(value)
|
||||
} catch (error) {
|
||||
drainError = error
|
||||
controller.error(error)
|
||||
}
|
||||
},
|
||||
async cancel(reason) {
|
||||
try {
|
||||
await sourceReader.cancel(reason)
|
||||
} catch {}
|
||||
},
|
||||
})
|
||||
|
||||
const processedClientStream = streamingResponseFormatProcessor.processStream(
|
||||
clientStream,
|
||||
blockId,
|
||||
selectedOutputs,
|
||||
responseFormat
|
||||
)
|
||||
|
||||
const clientStreamingExec = {
|
||||
...streamingExec,
|
||||
stream: processedClientStream,
|
||||
}
|
||||
|
||||
const executorConsumption = this.consumeExecutorStream(
|
||||
executorStream,
|
||||
streamingExec,
|
||||
blockId,
|
||||
responseFormat
|
||||
)
|
||||
|
||||
const clientConsumption = (async () => {
|
||||
try {
|
||||
await ctx.onStream?.(clientStreamingExec)
|
||||
} catch (error) {
|
||||
this.execLogger.error('Error in onStream callback', { blockId, error })
|
||||
// Cancel the client stream to release the tee'd buffer
|
||||
await processedClientStream.cancel().catch(() => {})
|
||||
}
|
||||
})()
|
||||
|
||||
await Promise.all([clientConsumption, executorConsumption])
|
||||
}
|
||||
|
||||
private async forwardStream(
|
||||
ctx: ExecutionContext,
|
||||
blockId: string,
|
||||
streamingExec: { stream: ReadableStream; execution: any },
|
||||
stream: ReadableStream,
|
||||
responseFormat: any,
|
||||
selectedOutputs: string[]
|
||||
): Promise<void> {
|
||||
const processedStream = streamingResponseFormatProcessor.processStream(
|
||||
stream,
|
||||
clientSource,
|
||||
blockId,
|
||||
selectedOutputs,
|
||||
responseFormat
|
||||
@@ -670,72 +654,75 @@ export class BlockExecutor {
|
||||
|
||||
try {
|
||||
await ctx.onStream?.({
|
||||
...streamingExec,
|
||||
stream: processedStream,
|
||||
stream: processedClientStream,
|
||||
execution: streamingExec.execution,
|
||||
})
|
||||
} catch (error) {
|
||||
this.execLogger.error('Error in onStream callback', { blockId, error })
|
||||
await processedStream.cancel().catch(() => {})
|
||||
}
|
||||
}
|
||||
|
||||
private async consumeExecutorStream(
|
||||
stream: ReadableStream,
|
||||
streamingExec: { execution: any },
|
||||
blockId: string,
|
||||
responseFormat: any
|
||||
): Promise<void> {
|
||||
const reader = stream.getReader()
|
||||
const decoder = new TextDecoder()
|
||||
const chunks: string[] = []
|
||||
|
||||
try {
|
||||
while (true) {
|
||||
const { done, value } = await reader.read()
|
||||
if (done) break
|
||||
chunks.push(decoder.decode(value, { stream: true }))
|
||||
}
|
||||
const tail = decoder.decode()
|
||||
if (tail) chunks.push(tail)
|
||||
} catch (error) {
|
||||
this.execLogger.error('Error reading executor stream for block', { blockId, error })
|
||||
await processedClientStream.cancel().catch(() => {})
|
||||
} finally {
|
||||
try {
|
||||
await reader.cancel().catch(() => {})
|
||||
sourceReader.releaseLock()
|
||||
} catch {}
|
||||
}
|
||||
|
||||
const fullContent = chunks.join('')
|
||||
if (drainError) {
|
||||
this.execLogger.error('Error reading stream for block', { blockId, error: drainError })
|
||||
return
|
||||
}
|
||||
|
||||
// If the onStream consumer exited before the source drained (e.g. it caught
|
||||
// an internal error and returned normally), `accumulated` holds a truncated
|
||||
// response. Persisting that to memory or setting it as the block output
|
||||
// would corrupt downstream state — skip and log instead.
|
||||
if (!sourceFullyDrained) {
|
||||
this.execLogger.warn(
|
||||
'Stream consumer exited before source drained; skipping content persistence',
|
||||
{
|
||||
blockId,
|
||||
}
|
||||
)
|
||||
return
|
||||
}
|
||||
|
||||
const fullContent = accumulated.join('')
|
||||
if (!fullContent) {
|
||||
return
|
||||
}
|
||||
|
||||
const executionOutput = streamingExec.execution?.output
|
||||
if (!executionOutput || typeof executionOutput !== 'object') {
|
||||
return
|
||||
}
|
||||
|
||||
if (responseFormat) {
|
||||
try {
|
||||
const parsed = JSON.parse(fullContent.trim())
|
||||
|
||||
streamingExec.execution.output = {
|
||||
...parsed,
|
||||
tokens: executionOutput.tokens,
|
||||
toolCalls: executionOutput.toolCalls,
|
||||
providerTiming: executionOutput.providerTiming,
|
||||
cost: executionOutput.cost,
|
||||
model: executionOutput.model,
|
||||
if (executionOutput && typeof executionOutput === 'object') {
|
||||
let parsedForFormat = false
|
||||
if (responseFormat) {
|
||||
try {
|
||||
const parsed = JSON.parse(fullContent.trim())
|
||||
streamingExec.execution.output = {
|
||||
...parsed,
|
||||
tokens: executionOutput.tokens,
|
||||
toolCalls: executionOutput.toolCalls,
|
||||
providerTiming: executionOutput.providerTiming,
|
||||
cost: executionOutput.cost,
|
||||
model: executionOutput.model,
|
||||
}
|
||||
parsedForFormat = true
|
||||
} catch (error) {
|
||||
this.execLogger.warn('Failed to parse streamed content for response format', {
|
||||
blockId,
|
||||
error,
|
||||
})
|
||||
}
|
||||
return
|
||||
} catch (error) {
|
||||
this.execLogger.warn('Failed to parse streamed content for response format', {
|
||||
blockId,
|
||||
error,
|
||||
})
|
||||
}
|
||||
if (!parsedForFormat) {
|
||||
executionOutput.content = fullContent
|
||||
}
|
||||
}
|
||||
|
||||
executionOutput.content = fullContent
|
||||
if (streamingExec.onFullContent) {
|
||||
try {
|
||||
await streamingExec.onFullContent(fullContent)
|
||||
} catch (error) {
|
||||
this.execLogger.error('onFullContent callback failed', { blockId, error })
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -958,8 +958,16 @@ export class AgentBlockHandler implements BlockHandler {
|
||||
streamingExec: StreamingExecution
|
||||
): StreamingExecution {
|
||||
return {
|
||||
stream: memoryService.wrapStreamForPersistence(streamingExec.stream, ctx, inputs),
|
||||
stream: streamingExec.stream,
|
||||
execution: streamingExec.execution,
|
||||
onFullContent: async (content: string) => {
|
||||
if (!content.trim()) return
|
||||
try {
|
||||
await memoryService.appendToMemory(ctx, inputs, { role: 'assistant', content })
|
||||
} catch (error) {
|
||||
logger.error('Failed to persist streaming response:', error)
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -111,35 +111,6 @@ export class Memory {
|
||||
})
|
||||
}
|
||||
|
||||
wrapStreamForPersistence(
|
||||
stream: ReadableStream<Uint8Array>,
|
||||
ctx: ExecutionContext,
|
||||
inputs: AgentInputs
|
||||
): ReadableStream<Uint8Array> {
|
||||
const chunks: string[] = []
|
||||
const decoder = new TextDecoder()
|
||||
|
||||
const transformStream = new TransformStream<Uint8Array, Uint8Array>({
|
||||
transform: (chunk, controller) => {
|
||||
controller.enqueue(chunk)
|
||||
const decoded = decoder.decode(chunk, { stream: true })
|
||||
chunks.push(decoded)
|
||||
},
|
||||
|
||||
flush: () => {
|
||||
const content = chunks.join('')
|
||||
if (content.trim()) {
|
||||
this.appendToMemory(ctx, inputs, {
|
||||
role: 'assistant',
|
||||
content,
|
||||
}).catch((error) => logger.error('Failed to persist streaming response:', error))
|
||||
}
|
||||
},
|
||||
})
|
||||
|
||||
return stream.pipeThrough(transformStream)
|
||||
}
|
||||
|
||||
private requireWorkspaceId(ctx: ExecutionContext): string {
|
||||
if (!ctx.workspaceId) {
|
||||
throw new Error('workspaceId is required for memory operations')
|
||||
|
||||
@@ -717,10 +717,13 @@ export class LoopOrchestrator {
|
||||
})
|
||||
|
||||
if (vmResult.error) {
|
||||
logger.error('Failed to evaluate loop condition', {
|
||||
const isSystemError = vmResult.error.isSystemError === true
|
||||
const logFn = isSystemError ? logger.error.bind(logger) : logger.warn.bind(logger)
|
||||
logFn('Failed to evaluate loop condition', {
|
||||
condition,
|
||||
evaluatedCondition,
|
||||
error: vmResult.error,
|
||||
isSystemError,
|
||||
})
|
||||
return false
|
||||
}
|
||||
|
||||
@@ -359,6 +359,12 @@ export interface ExecutionResult {
|
||||
export interface StreamingExecution {
|
||||
stream: ReadableStream
|
||||
execution: ExecutionResult & { isStreaming?: boolean }
|
||||
/**
|
||||
* Invoked with the assembled response text after the stream drains. Lets agent
|
||||
* blocks persist the full response without interposing a TransformStream on a
|
||||
* fetch-backed source — that pattern amplifies memory on Bun via #28035.
|
||||
*/
|
||||
onFullContent?: (content: string) => void | Promise<void>
|
||||
}
|
||||
|
||||
export interface BlockExecutor {
|
||||
|
||||
@@ -1,11 +1,8 @@
|
||||
import { keepPreviousData, useMutation, useQuery, useQueryClient } from '@tanstack/react-query'
|
||||
import { workspaceCredentialKeys } from '@/hooks/queries/credentials'
|
||||
import { organizationKeys } from '@/hooks/queries/organization'
|
||||
import { workspaceKeys } from './workspace'
|
||||
|
||||
/**
|
||||
* Query key factory for invitation-related queries.
|
||||
* Provides hierarchical cache keys for workspace invitations.
|
||||
*/
|
||||
export const invitationKeys = {
|
||||
all: ['invitations'] as const,
|
||||
lists: () => [...invitationKeys.all, 'list'] as const,
|
||||
@@ -17,6 +14,7 @@ export interface PendingInvitationRow {
|
||||
workspaceId: string
|
||||
email: string
|
||||
permission: 'admin' | 'write' | 'read'
|
||||
membershipIntent?: 'internal' | 'external'
|
||||
status: string
|
||||
createdAt: string
|
||||
}
|
||||
@@ -25,6 +23,7 @@ export interface WorkspaceInvitation {
|
||||
email: string
|
||||
permissionType: 'admin' | 'write' | 'read'
|
||||
isPendingInvitation: boolean
|
||||
isExternal: boolean
|
||||
invitationId?: string
|
||||
}
|
||||
|
||||
@@ -49,6 +48,7 @@ async function fetchPendingInvitations(
|
||||
email: inv.email,
|
||||
permissionType: inv.permission,
|
||||
isPendingInvitation: true,
|
||||
isExternal: inv.membershipIntent === 'external',
|
||||
invitationId: inv.id,
|
||||
})) || []
|
||||
)
|
||||
@@ -70,6 +70,7 @@ export function usePendingInvitations(workspaceId: string | undefined) {
|
||||
|
||||
interface BatchSendInvitationsParams {
|
||||
workspaceId: string
|
||||
organizationId?: string | null
|
||||
invitations: Array<{ email: string; permission: 'admin' | 'write' | 'read' }>
|
||||
}
|
||||
|
||||
@@ -79,7 +80,7 @@ interface BatchInvitationResult {
|
||||
}
|
||||
|
||||
/**
|
||||
* Sends multiple workspace invitations in parallel.
|
||||
* Sends workspace invitations through the server-side batch endpoint.
|
||||
* Returns results for each invitation indicating success or failure.
|
||||
*/
|
||||
export function useBatchSendWorkspaceInvitations() {
|
||||
@@ -90,45 +91,38 @@ export function useBatchSendWorkspaceInvitations() {
|
||||
workspaceId,
|
||||
invitations,
|
||||
}: BatchSendInvitationsParams): Promise<BatchInvitationResult> => {
|
||||
const results = await Promise.allSettled(
|
||||
invitations.map(async ({ email, permission }) => {
|
||||
const response = await fetch('/api/workspaces/invitations', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
workspaceId,
|
||||
email,
|
||||
permission,
|
||||
}),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const error = await response.json()
|
||||
throw new Error(error.error || 'Failed to send invitation')
|
||||
}
|
||||
|
||||
return { email, data: await response.json() }
|
||||
})
|
||||
)
|
||||
|
||||
const successful: string[] = []
|
||||
const failed: Array<{ email: string; error: string }> = []
|
||||
|
||||
results.forEach((result, index) => {
|
||||
const email = invitations[index].email
|
||||
if (result.status === 'fulfilled') {
|
||||
successful.push(email)
|
||||
} else {
|
||||
failed.push({ email, error: result.reason?.message || 'Unknown error' })
|
||||
}
|
||||
const response = await fetch('/api/workspaces/invitations/batch', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
workspaceId,
|
||||
invitations,
|
||||
}),
|
||||
})
|
||||
|
||||
return { successful, failed }
|
||||
const result = await response.json()
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(result.error || 'Failed to send invitations')
|
||||
}
|
||||
|
||||
return {
|
||||
successful: result.successful ?? [],
|
||||
failed: result.failed ?? [],
|
||||
}
|
||||
},
|
||||
onSuccess: (_data, variables) => {
|
||||
onSettled: (_data, _error, variables) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: invitationKeys.list(variables.workspaceId),
|
||||
})
|
||||
if (variables.organizationId) {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: organizationKeys.roster(variables.organizationId),
|
||||
})
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: organizationKeys.billing(variables.organizationId),
|
||||
})
|
||||
}
|
||||
},
|
||||
})
|
||||
}
|
||||
@@ -136,6 +130,7 @@ export function useBatchSendWorkspaceInvitations() {
|
||||
interface CancelInvitationParams {
|
||||
invitationId: string
|
||||
workspaceId: string
|
||||
organizationId?: string | null
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -159,10 +154,18 @@ export function useCancelWorkspaceInvitation() {
|
||||
|
||||
return response.json()
|
||||
},
|
||||
onSuccess: (_data, variables) => {
|
||||
onSettled: (_data, _error, variables) => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: invitationKeys.list(variables.workspaceId),
|
||||
})
|
||||
if (variables.organizationId) {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: organizationKeys.roster(variables.organizationId),
|
||||
})
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: organizationKeys.billing(variables.organizationId),
|
||||
})
|
||||
}
|
||||
},
|
||||
})
|
||||
}
|
||||
@@ -204,6 +207,7 @@ export function useResendWorkspaceInvitation() {
|
||||
interface RemoveMemberParams {
|
||||
userId: string
|
||||
workspaceId: string
|
||||
organizationId?: string | null
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -232,6 +236,17 @@ export function useRemoveWorkspaceMember() {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: workspaceKeys.permissions(variables.workspaceId),
|
||||
})
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: workspaceKeys.members(variables.workspaceId),
|
||||
})
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: workspaceCredentialKeys.all,
|
||||
})
|
||||
if (variables.organizationId) {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: organizationKeys.roster(variables.organizationId),
|
||||
})
|
||||
}
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
@@ -3,10 +3,12 @@ import { keepPreviousData, useMutation, useQuery, useQueryClient } from '@tansta
|
||||
import { client } from '@/lib/auth/auth-client'
|
||||
import { isEnterprise, isPaid, isTeam } from '@/lib/billing/plan-helpers'
|
||||
import { hasPaidSubscriptionStatus } from '@/lib/billing/subscriptions/utils'
|
||||
import { workspaceCredentialKeys } from '@/hooks/queries/credentials'
|
||||
import { subscriptionKeys } from '@/hooks/queries/subscription'
|
||||
import { workspaceKeys } from '@/hooks/queries/workspace'
|
||||
|
||||
const logger = createLogger('OrganizationQueries')
|
||||
const invitationListsKey = ['invitations', 'list'] as const
|
||||
|
||||
/**
|
||||
* Query key factories for organization-related queries
|
||||
@@ -33,7 +35,7 @@ export type RosterWorkspaceAccess = {
|
||||
export type RosterMember = {
|
||||
memberId: string
|
||||
userId: string
|
||||
role: string
|
||||
role: 'owner' | 'admin' | 'member' | 'external'
|
||||
createdAt: string
|
||||
name: string
|
||||
email: string
|
||||
@@ -46,6 +48,7 @@ export type RosterPendingInvitation = {
|
||||
email: string
|
||||
role: string
|
||||
kind: 'organization' | 'workspace'
|
||||
membershipIntent?: 'internal' | 'external'
|
||||
createdAt: string
|
||||
expiresAt: string
|
||||
inviteeName: string | null
|
||||
@@ -401,6 +404,9 @@ export function useRemoveMember() {
|
||||
queryClient.invalidateQueries({ queryKey: organizationKeys.roster(variables.orgId) })
|
||||
queryClient.invalidateQueries({ queryKey: organizationKeys.lists() })
|
||||
queryClient.invalidateQueries({ queryKey: subscriptionKeys.all })
|
||||
queryClient.invalidateQueries({ queryKey: workspaceKeys.all })
|
||||
queryClient.invalidateQueries({ queryKey: workspaceCredentialKeys.all })
|
||||
queryClient.invalidateQueries({ queryKey: invitationListsKey })
|
||||
},
|
||||
})
|
||||
}
|
||||
@@ -531,7 +537,9 @@ export function useCancelInvitation() {
|
||||
onSuccess: (_data, variables) => {
|
||||
queryClient.invalidateQueries({ queryKey: organizationKeys.detail(variables.orgId) })
|
||||
queryClient.invalidateQueries({ queryKey: organizationKeys.roster(variables.orgId) })
|
||||
queryClient.invalidateQueries({ queryKey: organizationKeys.billing(variables.orgId) })
|
||||
queryClient.invalidateQueries({ queryKey: organizationKeys.lists() })
|
||||
queryClient.invalidateQueries({ queryKey: invitationListsKey })
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { keepPreviousData, useMutation, useQuery, useQueryClient } from '@tanstack/react-query'
|
||||
import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query'
|
||||
import type { PersistedMessage } from '@/lib/copilot/chat/persisted-message'
|
||||
import { normalizeMessage } from '@/lib/copilot/chat/persisted-message'
|
||||
import {
|
||||
@@ -254,7 +254,6 @@ export function useTasks(workspaceId?: string) {
|
||||
queryKey: taskKeys.list(workspaceId),
|
||||
queryFn: ({ signal }) => fetchTasks(workspaceId as string, signal),
|
||||
enabled: Boolean(workspaceId),
|
||||
placeholderData: keepPreviousData,
|
||||
staleTime: 60 * 1000,
|
||||
})
|
||||
}
|
||||
@@ -535,6 +534,10 @@ async function markTaskUnread(chatId: string): Promise<void> {
|
||||
|
||||
/**
|
||||
* Marks a task as read with optimistic update.
|
||||
*
|
||||
* The server only updates `lastSeenAt`, never `updatedAt`, so we deliberately
|
||||
* do not invalidate the list cache — that would trigger a refetch that can
|
||||
* reorder the sidebar if any unrelated server-side update landed in between.
|
||||
*/
|
||||
export function useMarkTaskRead(workspaceId?: string) {
|
||||
const queryClient = useQueryClient()
|
||||
@@ -556,14 +559,14 @@ export function useMarkTaskRead(workspaceId?: string) {
|
||||
queryClient.setQueryData(taskKeys.list(workspaceId), context.previousTasks)
|
||||
}
|
||||
},
|
||||
onSettled: () => {
|
||||
queryClient.invalidateQueries({ queryKey: taskKeys.list(workspaceId) })
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Marks a task as unread with optimistic update.
|
||||
*
|
||||
* Same rationale as `useMarkTaskRead` — no list invalidation, since the server
|
||||
* only flips `lastSeenAt` and the optimistic update fully reflects the change.
|
||||
*/
|
||||
export function useMarkTaskUnread(workspaceId?: string) {
|
||||
const queryClient = useQueryClient()
|
||||
@@ -585,8 +588,5 @@ export function useMarkTaskUnread(workspaceId?: string) {
|
||||
queryClient.setQueryData(taskKeys.list(workspaceId), context.previousTasks)
|
||||
}
|
||||
},
|
||||
onSettled: () => {
|
||||
queryClient.invalidateQueries({ queryKey: taskKeys.list(workspaceId) })
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
@@ -251,6 +251,7 @@ export interface WorkspaceUser {
|
||||
name: string | null
|
||||
image: string | null
|
||||
permissionType: 'admin' | 'write' | 'read'
|
||||
isExternal: boolean
|
||||
}
|
||||
|
||||
/** Viewer context for a workspace permissions response. */
|
||||
|
||||
@@ -18,6 +18,20 @@ import type { SerializableExecutionState } from '@/executor/execution/types'
|
||||
|
||||
const logger = createLogger('useExecutionStream')
|
||||
|
||||
export class ExecutionStreamHttpError extends Error {
|
||||
constructor(
|
||||
message: string,
|
||||
public readonly httpStatus: number
|
||||
) {
|
||||
super(message)
|
||||
this.name = 'ExecutionStreamHttpError'
|
||||
}
|
||||
}
|
||||
|
||||
export function isExecutionStreamHttpError(error: unknown): error is ExecutionStreamHttpError {
|
||||
return error instanceof ExecutionStreamHttpError
|
||||
}
|
||||
|
||||
/**
|
||||
* Detects errors caused by the browser killing a fetch (page refresh, navigation, tab close).
|
||||
* These should be treated as clean disconnects, not execution errors.
|
||||
@@ -205,11 +219,13 @@ export function useExecutionStream() {
|
||||
|
||||
if (!response.ok) {
|
||||
const errorResponse = await response.json()
|
||||
const error = new Error(errorResponse.error || 'Failed to start execution')
|
||||
const error = new ExecutionStreamHttpError(
|
||||
errorResponse.error || 'Failed to start execution',
|
||||
response.status
|
||||
)
|
||||
if (errorResponse && typeof errorResponse === 'object') {
|
||||
Object.assign(error, { executionResult: errorResponse })
|
||||
}
|
||||
Object.assign(error, { httpStatus: response.status })
|
||||
throw error
|
||||
}
|
||||
|
||||
@@ -279,15 +295,18 @@ export function useExecutionStream() {
|
||||
try {
|
||||
errorResponse = await response.json()
|
||||
} catch {
|
||||
const error = new Error(`Server error (${response.status}): ${response.statusText}`)
|
||||
Object.assign(error, { httpStatus: response.status })
|
||||
throw error
|
||||
throw new ExecutionStreamHttpError(
|
||||
`Server error (${response.status}): ${response.statusText}`,
|
||||
response.status
|
||||
)
|
||||
}
|
||||
const error = new Error(errorResponse.error || 'Failed to start execution')
|
||||
const error = new ExecutionStreamHttpError(
|
||||
errorResponse.error || 'Failed to start execution',
|
||||
response.status
|
||||
)
|
||||
if (errorResponse && typeof errorResponse === 'object') {
|
||||
Object.assign(error, { executionResult: errorResponse })
|
||||
}
|
||||
Object.assign(error, { httpStatus: response.status })
|
||||
throw error
|
||||
}
|
||||
|
||||
@@ -335,7 +354,9 @@ export function useExecutionStream() {
|
||||
`/api/workflows/${workflowId}/executions/${executionId}/stream?from=${fromEventId}`,
|
||||
{ signal: abortController.signal }
|
||||
)
|
||||
if (!response.ok) throw new Error(`Reconnect failed (${response.status})`)
|
||||
if (!response.ok) {
|
||||
throw new ExecutionStreamHttpError(`Reconnect failed (${response.status})`, response.status)
|
||||
}
|
||||
if (!response.body) throw new Error('No response body')
|
||||
|
||||
await processSSEStream(response.body.getReader(), callbacks, 'Reconnect')
|
||||
|
||||
@@ -50,12 +50,9 @@ describe('handleTaskStatusEvent', () => {
|
||||
})
|
||||
})
|
||||
|
||||
it('preserves list invalidation when task event payload is invalid', () => {
|
||||
it('does not invalidate when task event payload is invalid', () => {
|
||||
handleTaskStatusEvent(queryClient, 'ws-1', '{')
|
||||
|
||||
expect(queryClient.invalidateQueries).toHaveBeenCalledTimes(1)
|
||||
expect(queryClient.invalidateQueries).toHaveBeenCalledWith({
|
||||
queryKey: taskKeys.list('ws-1'),
|
||||
})
|
||||
expect(queryClient.invalidateQueries).not.toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
@@ -41,13 +41,13 @@ export function handleTaskStatusEvent(
|
||||
workspaceId: string,
|
||||
data: unknown
|
||||
): void {
|
||||
queryClient.invalidateQueries({ queryKey: taskKeys.list(workspaceId) })
|
||||
|
||||
const payload = parseTaskStatusEventPayload(data)
|
||||
if (!payload) {
|
||||
logger.warn('Received invalid task_status payload')
|
||||
return
|
||||
}
|
||||
|
||||
queryClient.invalidateQueries({ queryKey: taskKeys.list(workspaceId) })
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { db } from '@sim/db'
|
||||
import { member, organization, user, userStats } from '@sim/db/schema'
|
||||
import { invitation, member, organization, user, userStats } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { and, count, eq, gt, ne } from 'drizzle-orm'
|
||||
import { isOrganizationBillingBlocked } from '@/lib/billing/core/access'
|
||||
import { getOrganizationSubscription, getPlanPricing } from '@/lib/billing/core/billing'
|
||||
import {
|
||||
@@ -172,6 +172,19 @@ export async function getOrganizationBillingData(
|
||||
|
||||
const averageUsagePerMember = members.length > 0 ? totalCurrentUsage / members.length : 0
|
||||
|
||||
const [pendingInvitationCount] = await db
|
||||
.select({ count: count() })
|
||||
.from(invitation)
|
||||
.where(
|
||||
and(
|
||||
eq(invitation.organizationId, organizationId),
|
||||
eq(invitation.status, 'pending'),
|
||||
ne(invitation.membershipIntent, 'external'),
|
||||
gt(invitation.expiresAt, new Date())
|
||||
)
|
||||
)
|
||||
const usedSeats = members.length + (pendingInvitationCount?.count ?? 0)
|
||||
|
||||
const billingPeriodStart = subscription.periodStart || null
|
||||
const billingPeriodEnd = subscription.periodEnd || null
|
||||
|
||||
@@ -181,7 +194,7 @@ export async function getOrganizationBillingData(
|
||||
subscriptionPlan: subscription.plan,
|
||||
subscriptionStatus: subscription.status || 'inactive',
|
||||
totalSeats: effectiveSeats,
|
||||
usedSeats: members.length,
|
||||
usedSeats,
|
||||
seatsCount: licensedSeats,
|
||||
totalCurrentUsage: roundCurrency(totalCurrentUsage),
|
||||
totalUsageLimit: roundCurrency(totalUsageLimit),
|
||||
|
||||
@@ -7,8 +7,12 @@
|
||||
|
||||
import { db } from '@sim/db'
|
||||
import {
|
||||
credential,
|
||||
credentialMember,
|
||||
invitation,
|
||||
member,
|
||||
organization,
|
||||
permissionGroupMember,
|
||||
permissions,
|
||||
subscription as subscriptionTable,
|
||||
user,
|
||||
@@ -25,7 +29,7 @@ import { toDecimal, toNumber } from '@/lib/billing/utils/decimal'
|
||||
import { validateSeatAvailability } from '@/lib/billing/validation/seat-management'
|
||||
import { OUTBOX_EVENT_TYPES } from '@/lib/billing/webhooks/outbox-handlers'
|
||||
import { enqueueOutboxEvent } from '@/lib/core/outbox/service'
|
||||
import { revokeWorkspaceCredentialMemberships } from '@/lib/credentials/access'
|
||||
import type { DbOrTx } from '@/lib/db/types'
|
||||
|
||||
const logger = createLogger('OrganizationMembership')
|
||||
|
||||
@@ -233,6 +237,7 @@ export interface AddMemberResult {
|
||||
success: boolean
|
||||
memberId?: string
|
||||
error?: string
|
||||
failureCode?: MembershipAdditionFailureCode
|
||||
billingActions: {
|
||||
proUsageSnapshotted: boolean
|
||||
/**
|
||||
@@ -265,12 +270,200 @@ export interface RemoveMemberResult {
|
||||
proRestored: boolean
|
||||
usageRestored: boolean
|
||||
workspaceAccessRevoked: number
|
||||
pendingInvitationsCancelled: number
|
||||
}
|
||||
}
|
||||
|
||||
export interface RemoveExternalWorkspaceAccessResult {
|
||||
success: boolean
|
||||
error?: string
|
||||
workspaceAccessRevoked: number
|
||||
permissionGroupsRevoked: number
|
||||
credentialMembershipsRevoked: number
|
||||
pendingInvitationsCancelled: number
|
||||
}
|
||||
|
||||
export type MembershipAdditionFailureCode =
|
||||
| 'user-not-found'
|
||||
| 'organization-not-found'
|
||||
| 'already-member'
|
||||
| 'already-in-other-organization'
|
||||
| 'no-seats-available'
|
||||
|
||||
async function reassignOwnedOrganizationWorkspacesTx({
|
||||
tx,
|
||||
userId,
|
||||
organizationId,
|
||||
workspaceIds,
|
||||
}: {
|
||||
tx: DbOrTx
|
||||
userId: string
|
||||
organizationId: string
|
||||
workspaceIds: string[]
|
||||
}) {
|
||||
const [ownerMembership] = await tx
|
||||
.select({ userId: member.userId })
|
||||
.from(member)
|
||||
.where(and(eq(member.organizationId, organizationId), eq(member.role, 'owner')))
|
||||
.limit(1)
|
||||
|
||||
const ownerId = ownerMembership?.userId
|
||||
if (!ownerId || ownerId === userId || workspaceIds.length === 0) return 0
|
||||
|
||||
const reassignedWorkspaces = await tx
|
||||
.update(workspace)
|
||||
.set({ ownerId, updatedAt: new Date() })
|
||||
.where(
|
||||
and(
|
||||
eq(workspace.organizationId, organizationId),
|
||||
eq(workspace.ownerId, userId),
|
||||
inArray(workspace.id, workspaceIds)
|
||||
)
|
||||
)
|
||||
.returning({ id: workspace.id })
|
||||
|
||||
if (reassignedWorkspaces.length === 0) return 0
|
||||
|
||||
const now = new Date()
|
||||
await tx
|
||||
.update(permissions)
|
||||
.set({ permissionType: 'admin', updatedAt: now })
|
||||
.where(
|
||||
and(
|
||||
eq(permissions.userId, ownerId),
|
||||
eq(permissions.entityType, 'workspace'),
|
||||
inArray(
|
||||
permissions.entityId,
|
||||
reassignedWorkspaces.map((row) => row.id)
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
await tx
|
||||
.insert(permissions)
|
||||
.values(
|
||||
reassignedWorkspaces.map((row) => ({
|
||||
id: generateId(),
|
||||
userId: ownerId,
|
||||
entityType: 'workspace',
|
||||
entityId: row.id,
|
||||
permissionType: 'admin' as const,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
}))
|
||||
)
|
||||
.onConflictDoNothing()
|
||||
|
||||
return reassignedWorkspaces.length
|
||||
}
|
||||
|
||||
async function revokeWorkspaceCredentialMembershipsTx({
|
||||
tx,
|
||||
workspaceIds,
|
||||
userId,
|
||||
}: {
|
||||
tx: DbOrTx
|
||||
workspaceIds: string[]
|
||||
userId: string
|
||||
}) {
|
||||
if (workspaceIds.length === 0) return 0
|
||||
|
||||
const workspaceCredentialRows = await tx
|
||||
.select({
|
||||
credentialId: credential.id,
|
||||
workspaceId: credential.workspaceId,
|
||||
ownerId: workspace.ownerId,
|
||||
})
|
||||
.from(credential)
|
||||
.innerJoin(workspace, eq(credential.workspaceId, workspace.id))
|
||||
.where(inArray(credential.workspaceId, workspaceIds))
|
||||
|
||||
if (workspaceCredentialRows.length === 0) return 0
|
||||
|
||||
const credentialIds = workspaceCredentialRows.map((row) => row.credentialId)
|
||||
const ownerByCredentialId = new Map(
|
||||
workspaceCredentialRows.map((row) => [row.credentialId, row.ownerId])
|
||||
)
|
||||
|
||||
const userAdminMemberships = await tx
|
||||
.select({ credentialId: credentialMember.credentialId })
|
||||
.from(credentialMember)
|
||||
.where(
|
||||
and(
|
||||
eq(credentialMember.userId, userId),
|
||||
eq(credentialMember.role, 'admin'),
|
||||
eq(credentialMember.status, 'active'),
|
||||
inArray(credentialMember.credentialId, credentialIds)
|
||||
)
|
||||
)
|
||||
|
||||
for (const { credentialId } of userAdminMemberships) {
|
||||
const ownerId = ownerByCredentialId.get(credentialId)
|
||||
if (!ownerId || ownerId === userId) continue
|
||||
|
||||
const otherAdmins = await tx
|
||||
.select({ id: credentialMember.id })
|
||||
.from(credentialMember)
|
||||
.where(
|
||||
and(
|
||||
eq(credentialMember.credentialId, credentialId),
|
||||
eq(credentialMember.role, 'admin'),
|
||||
eq(credentialMember.status, 'active'),
|
||||
ne(credentialMember.userId, userId)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (otherAdmins.length > 0) continue
|
||||
|
||||
const now = new Date()
|
||||
const [existingOwnerMembership] = await tx
|
||||
.select({ id: credentialMember.id })
|
||||
.from(credentialMember)
|
||||
.where(
|
||||
and(eq(credentialMember.credentialId, credentialId), eq(credentialMember.userId, ownerId))
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (existingOwnerMembership) {
|
||||
await tx
|
||||
.update(credentialMember)
|
||||
.set({ role: 'admin', status: 'active', updatedAt: now })
|
||||
.where(eq(credentialMember.id, existingOwnerMembership.id))
|
||||
} else {
|
||||
await tx.insert(credentialMember).values({
|
||||
id: generateId(),
|
||||
credentialId,
|
||||
userId: ownerId,
|
||||
role: 'admin',
|
||||
status: 'active',
|
||||
joinedAt: now,
|
||||
invitedBy: ownerId,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const revokedMemberships = await tx
|
||||
.update(credentialMember)
|
||||
.set({ status: 'revoked', updatedAt: new Date() })
|
||||
.where(
|
||||
and(
|
||||
eq(credentialMember.userId, userId),
|
||||
eq(credentialMember.status, 'active'),
|
||||
inArray(credentialMember.credentialId, credentialIds)
|
||||
)
|
||||
)
|
||||
.returning({ credentialId: credentialMember.credentialId })
|
||||
|
||||
return revokedMemberships.length
|
||||
}
|
||||
|
||||
export interface MembershipValidationResult {
|
||||
canAdd: boolean
|
||||
reason?: string
|
||||
failureCode?: MembershipAdditionFailureCode
|
||||
existingOrgId?: string
|
||||
seatValidation?: {
|
||||
currentSeats: number
|
||||
@@ -301,6 +494,7 @@ export async function ensureUserInOrganization(
|
||||
success: false,
|
||||
alreadyMember: false,
|
||||
existingOrgId: existingMembership.organizationId,
|
||||
failureCode: 'already-in-other-organization',
|
||||
error:
|
||||
'User is already a member of another organization. Users can only belong to one organization at a time.',
|
||||
billingActions: {
|
||||
@@ -330,7 +524,7 @@ export async function validateMembershipAddition(
|
||||
const [userData] = await db.select({ id: user.id }).from(user).where(eq(user.id, userId)).limit(1)
|
||||
|
||||
if (!userData) {
|
||||
return { canAdd: false, reason: 'User not found' }
|
||||
return { canAdd: false, reason: 'User not found', failureCode: 'user-not-found' }
|
||||
}
|
||||
|
||||
const [orgData] = await db
|
||||
@@ -340,7 +534,11 @@ export async function validateMembershipAddition(
|
||||
.limit(1)
|
||||
|
||||
if (!orgData) {
|
||||
return { canAdd: false, reason: 'Organization not found' }
|
||||
return {
|
||||
canAdd: false,
|
||||
reason: 'Organization not found',
|
||||
failureCode: 'organization-not-found',
|
||||
}
|
||||
}
|
||||
|
||||
const existingMemberships = await db
|
||||
@@ -354,13 +552,18 @@ export async function validateMembershipAddition(
|
||||
)
|
||||
|
||||
if (isAlreadyMemberOfThisOrg) {
|
||||
return { canAdd: false, reason: 'User is already a member of this organization' }
|
||||
return {
|
||||
canAdd: false,
|
||||
reason: 'User is already a member of this organization',
|
||||
failureCode: 'already-member',
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
canAdd: false,
|
||||
reason:
|
||||
'User is already a member of another organization. Users can only belong to one organization at a time.',
|
||||
failureCode: 'already-in-other-organization',
|
||||
existingOrgId: existingMemberships[0].organizationId,
|
||||
}
|
||||
}
|
||||
@@ -372,6 +575,7 @@ export async function validateMembershipAddition(
|
||||
return {
|
||||
canAdd: false,
|
||||
reason: seatValidation.reason || 'No seats available',
|
||||
failureCode: 'no-seats-available',
|
||||
seatValidation: {
|
||||
currentSeats: seatValidation.currentSeats,
|
||||
maxSeats: seatValidation.maxSeats,
|
||||
@@ -573,7 +777,12 @@ export async function addUserToOrganization(params: AddMemberParams): Promise<Ad
|
||||
acceptingInvitationId,
|
||||
})
|
||||
if (!validation.canAdd) {
|
||||
return { success: false, error: validation.reason, billingActions }
|
||||
return {
|
||||
success: false,
|
||||
error: validation.reason,
|
||||
failureCode: validation.failureCode,
|
||||
billingActions,
|
||||
}
|
||||
}
|
||||
} else {
|
||||
const existingMemberships = await db
|
||||
@@ -590,6 +799,7 @@ export async function addUserToOrganization(params: AddMemberParams): Promise<Ad
|
||||
return {
|
||||
success: false,
|
||||
error: 'User is already a member of this organization',
|
||||
failureCode: 'already-member',
|
||||
billingActions,
|
||||
}
|
||||
}
|
||||
@@ -598,6 +808,7 @@ export async function addUserToOrganization(params: AddMemberParams): Promise<Ad
|
||||
success: false,
|
||||
error:
|
||||
'User is already a member of another organization. Users can only belong to one organization at a time.',
|
||||
failureCode: 'already-in-other-organization',
|
||||
billingActions,
|
||||
}
|
||||
}
|
||||
@@ -676,6 +887,7 @@ export async function removeUserFromOrganization(
|
||||
proRestored: false,
|
||||
usageRestored: false,
|
||||
workspaceAccessRevoked: 0,
|
||||
pendingInvitationsCancelled: 0,
|
||||
}
|
||||
|
||||
try {
|
||||
@@ -697,7 +909,12 @@ export async function removeUserFromOrganization(
|
||||
return { success: false, error: 'Cannot remove organization owner', billingActions }
|
||||
}
|
||||
|
||||
const { workspaceIdsToRevoke, usageCaptured } = await db.transaction(async (tx) => {
|
||||
const {
|
||||
workspaceIdsToRevoke,
|
||||
usageCaptured,
|
||||
credentialMembershipsRevoked,
|
||||
pendingInvitationsCancelled,
|
||||
} = await db.transaction(async (tx) => {
|
||||
const deletedMember = await tx
|
||||
.delete(member)
|
||||
.where(and(eq(member.id, memberId), ne(member.role, 'owner')))
|
||||
@@ -737,22 +954,49 @@ export async function removeUserFromOrganization(
|
||||
}
|
||||
}
|
||||
|
||||
const [targetUser] = await tx
|
||||
.select({ email: user.email })
|
||||
.from(user)
|
||||
.where(eq(user.id, userId))
|
||||
.limit(1)
|
||||
|
||||
const cancelledInvitations = targetUser?.email
|
||||
? await tx
|
||||
.update(invitation)
|
||||
.set({ status: 'cancelled', updatedAt: new Date() })
|
||||
.where(
|
||||
and(
|
||||
eq(invitation.organizationId, organizationId),
|
||||
eq(invitation.status, 'pending'),
|
||||
sql`lower(${invitation.email}) = lower(${targetUser.email})`
|
||||
)
|
||||
)
|
||||
.returning({ id: invitation.id })
|
||||
: []
|
||||
|
||||
const orgWorkspaces = await tx
|
||||
.select({ id: workspace.id })
|
||||
.from(workspace)
|
||||
.where(
|
||||
and(
|
||||
eq(workspace.organizationId, organizationId),
|
||||
eq(workspace.workspaceMode, 'organization')
|
||||
)
|
||||
)
|
||||
.where(eq(workspace.organizationId, organizationId))
|
||||
|
||||
if (orgWorkspaces.length === 0) {
|
||||
return { workspaceIdsToRevoke: [] as string[], usageCaptured: capturedUsage }
|
||||
return {
|
||||
workspaceIdsToRevoke: [] as string[],
|
||||
usageCaptured: capturedUsage,
|
||||
credentialMembershipsRevoked: 0,
|
||||
pendingInvitationsCancelled: cancelledInvitations.length,
|
||||
}
|
||||
}
|
||||
|
||||
const workspaceIds = orgWorkspaces.map((w) => w.id)
|
||||
|
||||
await reassignOwnedOrganizationWorkspacesTx({
|
||||
tx,
|
||||
userId,
|
||||
organizationId,
|
||||
workspaceIds,
|
||||
})
|
||||
|
||||
const deletedPerms = await tx
|
||||
.delete(permissions)
|
||||
.where(
|
||||
@@ -764,14 +1008,32 @@ export async function removeUserFromOrganization(
|
||||
)
|
||||
.returning({ entityId: permissions.entityId })
|
||||
|
||||
await tx
|
||||
.delete(permissionGroupMember)
|
||||
.where(
|
||||
and(
|
||||
eq(permissionGroupMember.userId, userId),
|
||||
inArray(permissionGroupMember.workspaceId, workspaceIds)
|
||||
)
|
||||
)
|
||||
|
||||
const credentialMembershipsRevoked = await revokeWorkspaceCredentialMembershipsTx({
|
||||
tx,
|
||||
workspaceIds,
|
||||
userId,
|
||||
})
|
||||
|
||||
return {
|
||||
workspaceIdsToRevoke: deletedPerms.map((row) => row.entityId),
|
||||
usageCaptured: capturedUsage,
|
||||
credentialMembershipsRevoked,
|
||||
pendingInvitationsCancelled: cancelledInvitations.length,
|
||||
}
|
||||
})
|
||||
|
||||
billingActions.usageCaptured = usageCaptured
|
||||
billingActions.workspaceAccessRevoked = workspaceIdsToRevoke.length
|
||||
billingActions.pendingInvitationsCancelled = pendingInvitationsCancelled
|
||||
|
||||
if (usageCaptured > 0) {
|
||||
logger.info('Captured departed member usage', {
|
||||
@@ -786,21 +1048,10 @@ export async function removeUserFromOrganization(
|
||||
userId,
|
||||
memberId,
|
||||
workspaceAccessRevoked: workspaceIdsToRevoke.length,
|
||||
credentialMembershipsRevoked,
|
||||
pendingInvitationsCancelled,
|
||||
})
|
||||
|
||||
for (const workspaceId of workspaceIdsToRevoke) {
|
||||
try {
|
||||
await revokeWorkspaceCredentialMemberships(workspaceId, userId)
|
||||
} catch (credentialError) {
|
||||
logger.error('Failed to revoke workspace credential memberships on org leave', {
|
||||
organizationId,
|
||||
userId,
|
||||
workspaceId,
|
||||
error: credentialError,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
if (!skipBillingLogic) {
|
||||
try {
|
||||
const remainingPaidTeams = await db
|
||||
@@ -852,6 +1103,167 @@ export async function removeUserFromOrganization(
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes a non-member's access from every workspace owned by an organization.
|
||||
* External workspace members have workspace permissions but no organization member row.
|
||||
*/
|
||||
export async function removeExternalUserFromOrganizationWorkspaces(params: {
|
||||
userId: string
|
||||
organizationId: string
|
||||
}): Promise<RemoveExternalWorkspaceAccessResult> {
|
||||
const { userId, organizationId } = params
|
||||
|
||||
try {
|
||||
const [existingMember] = await db
|
||||
.select({ id: member.id })
|
||||
.from(member)
|
||||
.where(and(eq(member.organizationId, organizationId), eq(member.userId, userId)))
|
||||
.limit(1)
|
||||
|
||||
if (existingMember) {
|
||||
return {
|
||||
success: false,
|
||||
error: 'User is an organization member',
|
||||
workspaceAccessRevoked: 0,
|
||||
permissionGroupsRevoked: 0,
|
||||
credentialMembershipsRevoked: 0,
|
||||
pendingInvitationsCancelled: 0,
|
||||
}
|
||||
}
|
||||
|
||||
const {
|
||||
workspaceAccessRevoked,
|
||||
permissionGroupsRevoked,
|
||||
credentialMembershipsRevoked,
|
||||
pendingInvitationsCancelled,
|
||||
} = await db.transaction(async (tx) => {
|
||||
const orgWorkspaces = await tx
|
||||
.select({ id: workspace.id })
|
||||
.from(workspace)
|
||||
.where(eq(workspace.organizationId, organizationId))
|
||||
|
||||
if (orgWorkspaces.length === 0) {
|
||||
return {
|
||||
workspaceAccessRevoked: 0,
|
||||
permissionGroupsRevoked: 0,
|
||||
credentialMembershipsRevoked: 0,
|
||||
pendingInvitationsCancelled: 0,
|
||||
}
|
||||
}
|
||||
|
||||
const workspaceIds = orgWorkspaces.map((w) => w.id)
|
||||
const [targetUser] = await tx
|
||||
.select({ email: user.email })
|
||||
.from(user)
|
||||
.where(eq(user.id, userId))
|
||||
.limit(1)
|
||||
|
||||
await reassignOwnedOrganizationWorkspacesTx({
|
||||
tx,
|
||||
userId,
|
||||
organizationId,
|
||||
workspaceIds,
|
||||
})
|
||||
|
||||
const deletedPermissions = await tx
|
||||
.delete(permissions)
|
||||
.where(
|
||||
and(
|
||||
eq(permissions.userId, userId),
|
||||
eq(permissions.entityType, 'workspace'),
|
||||
inArray(permissions.entityId, workspaceIds)
|
||||
)
|
||||
)
|
||||
.returning({ entityId: permissions.entityId })
|
||||
|
||||
const deletedPermissionGroups = await tx
|
||||
.delete(permissionGroupMember)
|
||||
.where(
|
||||
and(
|
||||
eq(permissionGroupMember.userId, userId),
|
||||
inArray(permissionGroupMember.workspaceId, workspaceIds)
|
||||
)
|
||||
)
|
||||
.returning({ id: permissionGroupMember.id })
|
||||
|
||||
const credentialMembershipsRevoked = await revokeWorkspaceCredentialMembershipsTx({
|
||||
tx,
|
||||
workspaceIds,
|
||||
userId,
|
||||
})
|
||||
|
||||
const cancelledInvitations = targetUser?.email
|
||||
? await tx
|
||||
.update(invitation)
|
||||
.set({ status: 'cancelled', updatedAt: new Date() })
|
||||
.where(
|
||||
and(
|
||||
eq(invitation.organizationId, organizationId),
|
||||
eq(invitation.status, 'pending'),
|
||||
eq(invitation.membershipIntent, 'external'),
|
||||
sql`lower(${invitation.email}) = lower(${targetUser.email})`
|
||||
)
|
||||
)
|
||||
.returning({ id: invitation.id })
|
||||
: []
|
||||
|
||||
return {
|
||||
workspaceAccessRevoked: deletedPermissions.length,
|
||||
permissionGroupsRevoked: deletedPermissionGroups.length,
|
||||
credentialMembershipsRevoked,
|
||||
pendingInvitationsCancelled: cancelledInvitations.length,
|
||||
}
|
||||
})
|
||||
|
||||
if (
|
||||
workspaceAccessRevoked === 0 &&
|
||||
permissionGroupsRevoked === 0 &&
|
||||
credentialMembershipsRevoked === 0 &&
|
||||
pendingInvitationsCancelled === 0
|
||||
) {
|
||||
return {
|
||||
success: false,
|
||||
error: 'External workspace member not found',
|
||||
workspaceAccessRevoked,
|
||||
permissionGroupsRevoked,
|
||||
credentialMembershipsRevoked,
|
||||
pendingInvitationsCancelled,
|
||||
}
|
||||
}
|
||||
|
||||
logger.info('Removed external workspace member from organization workspaces', {
|
||||
organizationId,
|
||||
userId,
|
||||
workspaceAccessRevoked,
|
||||
permissionGroupsRevoked,
|
||||
credentialMembershipsRevoked,
|
||||
pendingInvitationsCancelled,
|
||||
})
|
||||
|
||||
return {
|
||||
success: true,
|
||||
workspaceAccessRevoked,
|
||||
permissionGroupsRevoked,
|
||||
credentialMembershipsRevoked,
|
||||
pendingInvitationsCancelled,
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Failed to remove external workspace member from organization workspaces', {
|
||||
organizationId,
|
||||
userId,
|
||||
error,
|
||||
})
|
||||
return {
|
||||
success: false,
|
||||
error: 'Failed to remove external workspace member',
|
||||
workspaceAccessRevoked: 0,
|
||||
permissionGroupsRevoked: 0,
|
||||
credentialMembershipsRevoked: 0,
|
||||
pendingInvitationsCancelled: 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export interface TransferOwnershipParams {
|
||||
organizationId: string
|
||||
currentOwnerUserId: string
|
||||
@@ -1044,6 +1456,7 @@ export async function transferOrganizationOwnership(
|
||||
|
||||
const [orgSub] = await tx
|
||||
.select({
|
||||
id: subscriptionTable.id,
|
||||
stripeCustomerId: subscriptionTable.stripeCustomerId,
|
||||
})
|
||||
.from(subscriptionTable)
|
||||
@@ -1056,20 +1469,10 @@ export async function transferOrganizationOwnership(
|
||||
.limit(1)
|
||||
|
||||
if (orgSub?.stripeCustomerId) {
|
||||
const [newOwnerUser] = await tx
|
||||
.select({ email: user.email, name: user.name })
|
||||
.from(user)
|
||||
.where(eq(user.id, newOwnerUserId))
|
||||
.limit(1)
|
||||
|
||||
if (newOwnerUser?.email) {
|
||||
await enqueueOutboxEvent(tx, OUTBOX_EVENT_TYPES.STRIPE_SYNC_CUSTOMER_CONTACT, {
|
||||
stripeCustomerId: orgSub.stripeCustomerId,
|
||||
email: newOwnerUser.email,
|
||||
name: newOwnerUser.name ?? undefined,
|
||||
reason: 'ownership-transfer',
|
||||
})
|
||||
}
|
||||
await enqueueOutboxEvent(tx, OUTBOX_EVENT_TYPES.STRIPE_SYNC_CUSTOMER_CONTACT, {
|
||||
subscriptionId: orgSub.id,
|
||||
reason: 'ownership-transfer',
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
130
apps/sim/lib/billing/organizations/seats.ts
Normal file
130
apps/sim/lib/billing/organizations/seats.ts
Normal file
@@ -0,0 +1,130 @@
|
||||
import { db } from '@sim/db'
|
||||
import { invitation, member, subscription } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, count, eq, gt, inArray, ne } from 'drizzle-orm'
|
||||
import { isOrganizationBillingBlocked } from '@/lib/billing/core/access'
|
||||
import { isTeam } from '@/lib/billing/plan-helpers'
|
||||
import { USABLE_SUBSCRIPTION_STATUSES } from '@/lib/billing/subscriptions/utils'
|
||||
import { OUTBOX_EVENT_TYPES } from '@/lib/billing/webhooks/outbox-handlers'
|
||||
import { isBillingEnabled } from '@/lib/core/config/feature-flags'
|
||||
import { enqueueOutboxEvent } from '@/lib/core/outbox/service'
|
||||
|
||||
const logger = createLogger('OrganizationSeats')
|
||||
|
||||
export interface ReduceOrganizationSeatsResult {
|
||||
reduced: boolean
|
||||
previousSeats?: number
|
||||
seats?: number
|
||||
reason?: string
|
||||
outboxEventId?: string
|
||||
}
|
||||
|
||||
interface ReduceOrganizationSeatsByOneParams {
|
||||
organizationId: string
|
||||
actorUserId: string
|
||||
removedUserId: string
|
||||
}
|
||||
|
||||
export async function reduceOrganizationSeatsByOne({
|
||||
organizationId,
|
||||
actorUserId,
|
||||
removedUserId,
|
||||
}: ReduceOrganizationSeatsByOneParams): Promise<ReduceOrganizationSeatsResult> {
|
||||
if (!isBillingEnabled) {
|
||||
return { reduced: false, reason: 'Billing is not enabled' }
|
||||
}
|
||||
|
||||
return db.transaction(async (tx) => {
|
||||
const [orgSubscription] = await tx
|
||||
.select()
|
||||
.from(subscription)
|
||||
.where(
|
||||
and(
|
||||
eq(subscription.referenceId, organizationId),
|
||||
inArray(subscription.status, USABLE_SUBSCRIPTION_STATUSES)
|
||||
)
|
||||
)
|
||||
.for('update')
|
||||
.limit(1)
|
||||
|
||||
if (!orgSubscription) {
|
||||
return { reduced: false, reason: 'No active subscription found' }
|
||||
}
|
||||
|
||||
if (await isOrganizationBillingBlocked(organizationId)) {
|
||||
return { reduced: false, reason: 'An active subscription is required' }
|
||||
}
|
||||
|
||||
if (!isTeam(orgSubscription.plan)) {
|
||||
return { reduced: false, reason: 'Seat changes are only available for Team plans' }
|
||||
}
|
||||
|
||||
if (!orgSubscription.stripeSubscriptionId) {
|
||||
return { reduced: false, reason: 'No Stripe subscription found for this organization' }
|
||||
}
|
||||
|
||||
const currentSeats = orgSubscription.seats || 1
|
||||
if (currentSeats <= 1) {
|
||||
return {
|
||||
reduced: false,
|
||||
previousSeats: currentSeats,
|
||||
seats: currentSeats,
|
||||
reason: 'Minimum 1 seat required',
|
||||
}
|
||||
}
|
||||
|
||||
const [memberCountRow] = await tx
|
||||
.select({ count: count() })
|
||||
.from(member)
|
||||
.where(eq(member.organizationId, organizationId))
|
||||
|
||||
const [pendingCountRow] = await tx
|
||||
.select({ count: count() })
|
||||
.from(invitation)
|
||||
.where(
|
||||
and(
|
||||
eq(invitation.organizationId, organizationId),
|
||||
eq(invitation.status, 'pending'),
|
||||
ne(invitation.membershipIntent, 'external'),
|
||||
gt(invitation.expiresAt, new Date())
|
||||
)
|
||||
)
|
||||
|
||||
const occupiedSeats = (memberCountRow?.count ?? 0) + (pendingCountRow?.count ?? 0)
|
||||
const nextSeats = currentSeats - 1
|
||||
|
||||
if (nextSeats < occupiedSeats) {
|
||||
return {
|
||||
reduced: false,
|
||||
previousSeats: currentSeats,
|
||||
seats: currentSeats,
|
||||
reason: `Cannot reduce seats below current occupancy (${occupiedSeats}).`,
|
||||
}
|
||||
}
|
||||
|
||||
await tx
|
||||
.update(subscription)
|
||||
.set({ seats: nextSeats })
|
||||
.where(eq(subscription.id, orgSubscription.id))
|
||||
|
||||
const outboxEventId = await enqueueOutboxEvent(
|
||||
tx,
|
||||
OUTBOX_EVENT_TYPES.STRIPE_SYNC_SUBSCRIPTION_SEATS,
|
||||
{
|
||||
subscriptionId: orgSubscription.id,
|
||||
reason: 'member-removed-seat-reduction',
|
||||
}
|
||||
)
|
||||
|
||||
logger.info('Reduced organization seats after member removal', {
|
||||
organizationId,
|
||||
actorUserId,
|
||||
removedUserId,
|
||||
previousSeats: currentSeats,
|
||||
seats: nextSeats,
|
||||
outboxEventId,
|
||||
})
|
||||
|
||||
return { reduced: true, previousSeats: currentSeats, seats: nextSeats, outboxEventId }
|
||||
})
|
||||
}
|
||||
@@ -35,7 +35,10 @@ vi.mock('@/lib/messaging/email/validation', () => ({
|
||||
quickValidateEmail: vi.fn((email: string) => ({ isValid: email.includes('@') })),
|
||||
}))
|
||||
|
||||
import { getOrganizationSeatInfo } from '@/lib/billing/validation/seat-management'
|
||||
import {
|
||||
getOrganizationSeatInfo,
|
||||
validateSeatAvailability,
|
||||
} from '@/lib/billing/validation/seat-management'
|
||||
|
||||
/**
|
||||
* Queues the next N responses for `db.select().from(...).where(...)` calls,
|
||||
@@ -82,3 +85,30 @@ describe('getOrganizationSeatInfo', () => {
|
||||
expect(mockGetOrganizationSubscription).not.toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
describe('validateSeatAvailability', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
resetDbChainMock()
|
||||
mockFeatureFlags.isBillingEnabled = true
|
||||
mockGetOrganizationSubscription.mockResolvedValue({
|
||||
id: 'sub-1',
|
||||
plan: 'team',
|
||||
status: 'active',
|
||||
seats: 10,
|
||||
})
|
||||
})
|
||||
|
||||
it('uses the internal pending invitation count when checking seats', async () => {
|
||||
queueSelectResponses([[{ count: 2 }], [{ count: 1 }]])
|
||||
|
||||
const result = await validateSeatAvailability('org-1', 1)
|
||||
|
||||
expect(result).toMatchObject({
|
||||
canInvite: true,
|
||||
currentSeats: 3,
|
||||
maxSeats: 10,
|
||||
availableSeats: 7,
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -82,6 +82,7 @@ export async function validateSeatAvailability(
|
||||
const pendingFilters = [
|
||||
eq(invitation.organizationId, organizationId),
|
||||
eq(invitation.status, 'pending'),
|
||||
ne(invitation.membershipIntent, 'external'),
|
||||
gt(invitation.expiresAt, new Date()),
|
||||
]
|
||||
if (options.excludePendingInvitationId) {
|
||||
@@ -164,6 +165,7 @@ export async function getOrganizationSeatInfo(
|
||||
and(
|
||||
eq(invitation.organizationId, organizationId),
|
||||
eq(invitation.status, 'pending'),
|
||||
ne(invitation.membershipIntent, 'external'),
|
||||
gt(invitation.expiresAt, new Date())
|
||||
)
|
||||
)
|
||||
@@ -247,6 +249,7 @@ export async function validateBulkInvitations(
|
||||
and(
|
||||
eq(invitation.organizationId, organizationId),
|
||||
eq(invitation.status, 'pending'),
|
||||
ne(invitation.membershipIntent, 'external'),
|
||||
gt(invitation.expiresAt, new Date())
|
||||
)
|
||||
)
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
import { db } from '@sim/db'
|
||||
import { subscription as subscriptionTable } from '@sim/db/schema'
|
||||
import { member, subscription as subscriptionTable, user } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { isTeam } from '@/lib/billing/plan-helpers'
|
||||
import { requireStripeClient } from '@/lib/billing/stripe-client'
|
||||
import { resolveDefaultPaymentMethod } from '@/lib/billing/stripe-payment-method'
|
||||
import { hasUsableSubscriptionStatus } from '@/lib/billing/subscriptions/utils'
|
||||
import type { OutboxHandler } from '@/lib/core/outbox/service'
|
||||
|
||||
const logger = createLogger('BillingOutboxHandlers')
|
||||
@@ -17,6 +19,7 @@ export const OUTBOX_EVENT_TYPES = {
|
||||
* enqueue this event after every DB change to `cancelAtPeriodEnd`.
|
||||
*/
|
||||
STRIPE_SYNC_CANCEL_AT_PERIOD_END: 'stripe.sync-cancel-at-period-end',
|
||||
STRIPE_SYNC_SUBSCRIPTION_SEATS: 'stripe.sync-subscription-seats',
|
||||
STRIPE_THRESHOLD_OVERAGE_INVOICE: 'stripe.threshold-overage-invoice',
|
||||
STRIPE_SYNC_CUSTOMER_CONTACT: 'stripe.sync-customer-contact',
|
||||
} as const
|
||||
@@ -29,10 +32,15 @@ export interface StripeSyncCancelAtPeriodEndPayload {
|
||||
reason?: string
|
||||
}
|
||||
|
||||
export interface StripeSyncSubscriptionSeatsPayload {
|
||||
/** The DB subscription row id — the handler reads current seats from this row. */
|
||||
subscriptionId: string
|
||||
reason?: string
|
||||
}
|
||||
|
||||
export interface StripeSyncCustomerContactPayload {
|
||||
stripeCustomerId: string
|
||||
email: string
|
||||
name?: string
|
||||
/** The DB subscription row id — handler resolves current owner/contact at processing time. */
|
||||
subscriptionId: string
|
||||
reason?: string
|
||||
}
|
||||
|
||||
@@ -49,6 +57,21 @@ export interface StripeThresholdOverageInvoicePayload {
|
||||
metadata?: Record<string, string>
|
||||
}
|
||||
|
||||
async function getSubscriptionSeatSyncState(subscriptionId: string) {
|
||||
const [row] = await db
|
||||
.select({
|
||||
plan: subscriptionTable.plan,
|
||||
seats: subscriptionTable.seats,
|
||||
status: subscriptionTable.status,
|
||||
stripeSubscriptionId: subscriptionTable.stripeSubscriptionId,
|
||||
})
|
||||
.from(subscriptionTable)
|
||||
.where(eq(subscriptionTable.id, subscriptionId))
|
||||
.limit(1)
|
||||
|
||||
return row ?? null
|
||||
}
|
||||
|
||||
const stripeSyncCancelAtPeriodEnd: OutboxHandler<StripeSyncCancelAtPeriodEndPayload> = async (
|
||||
payload,
|
||||
ctx
|
||||
@@ -86,6 +109,113 @@ const stripeSyncCancelAtPeriodEnd: OutboxHandler<StripeSyncCancelAtPeriodEndPayl
|
||||
})
|
||||
}
|
||||
|
||||
const stripeSyncSubscriptionSeats: OutboxHandler<StripeSyncSubscriptionSeatsPayload> = async (
|
||||
payload,
|
||||
ctx
|
||||
) => {
|
||||
const stripe = requireStripeClient()
|
||||
const maxSyncAttempts = 2
|
||||
|
||||
for (let attempt = 1; attempt <= maxSyncAttempts; attempt++) {
|
||||
const row = await getSubscriptionSeatSyncState(payload.subscriptionId)
|
||||
if (!row) {
|
||||
logger.warn('Subscription not found when syncing seats', {
|
||||
eventId: ctx.eventId,
|
||||
subscriptionId: payload.subscriptionId,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
if (!isTeam(row.plan)) {
|
||||
logger.info('Skipping seat sync for non-Team subscription', {
|
||||
eventId: ctx.eventId,
|
||||
subscriptionId: payload.subscriptionId,
|
||||
plan: row.plan,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
if (!row.stripeSubscriptionId) {
|
||||
logger.warn('Subscription has no Stripe id when syncing seats', {
|
||||
eventId: ctx.eventId,
|
||||
subscriptionId: payload.subscriptionId,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
if (!hasUsableSubscriptionStatus(row.status)) {
|
||||
logger.warn('Skipping seat sync for unusable DB subscription status', {
|
||||
eventId: ctx.eventId,
|
||||
subscriptionId: payload.subscriptionId,
|
||||
status: row.status,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
const desiredSeats = row.seats || 1
|
||||
const stripeSubscription = await stripe.subscriptions.retrieve(row.stripeSubscriptionId)
|
||||
|
||||
if (!hasUsableSubscriptionStatus(stripeSubscription.status)) {
|
||||
logger.warn('Skipping seat sync for unusable Stripe subscription', {
|
||||
eventId: ctx.eventId,
|
||||
subscriptionId: payload.subscriptionId,
|
||||
stripeSubscriptionId: row.stripeSubscriptionId,
|
||||
stripeStatus: stripeSubscription.status,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
const subscriptionItem = stripeSubscription.items.data[0]
|
||||
if (!subscriptionItem) {
|
||||
throw new Error(
|
||||
`No subscription item found for Stripe subscription ${row.stripeSubscriptionId}`
|
||||
)
|
||||
}
|
||||
|
||||
if (subscriptionItem.quantity !== desiredSeats) {
|
||||
await stripe.subscriptions.update(
|
||||
row.stripeSubscriptionId,
|
||||
{
|
||||
items: [
|
||||
{
|
||||
id: subscriptionItem.id,
|
||||
quantity: desiredSeats,
|
||||
},
|
||||
],
|
||||
proration_behavior: 'always_invoice',
|
||||
},
|
||||
{ idempotencyKey: `outbox:${ctx.eventId}:seats:${desiredSeats}` }
|
||||
)
|
||||
}
|
||||
|
||||
const latest = await getSubscriptionSeatSyncState(payload.subscriptionId)
|
||||
const latestSeats = latest?.seats || 1
|
||||
if (latestSeats !== desiredSeats) {
|
||||
logger.info('Subscription seats changed during Stripe sync; retrying latest value', {
|
||||
eventId: ctx.eventId,
|
||||
subscriptionId: payload.subscriptionId,
|
||||
stripeSubscriptionId: row.stripeSubscriptionId,
|
||||
attemptedSeats: desiredSeats,
|
||||
latestSeats,
|
||||
attempt,
|
||||
})
|
||||
continue
|
||||
}
|
||||
|
||||
logger.info('Synced subscription seats from DB to Stripe', {
|
||||
eventId: ctx.eventId,
|
||||
subscriptionId: payload.subscriptionId,
|
||||
stripeSubscriptionId: row.stripeSubscriptionId,
|
||||
seats: desiredSeats,
|
||||
alreadySynced: subscriptionItem.quantity === desiredSeats,
|
||||
reason: payload.reason,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
throw new Error(`Subscription seats changed while syncing ${payload.subscriptionId}`)
|
||||
}
|
||||
|
||||
const stripeThresholdOverageInvoice: OutboxHandler<StripeThresholdOverageInvoicePayload> = async (
|
||||
payload,
|
||||
ctx
|
||||
@@ -178,18 +308,63 @@ const stripeSyncCustomerContact: OutboxHandler<StripeSyncCustomerContactPayload>
|
||||
payload,
|
||||
ctx
|
||||
) => {
|
||||
const [subscriptionRow] = await db
|
||||
.select({
|
||||
referenceId: subscriptionTable.referenceId,
|
||||
stripeCustomerId: subscriptionTable.stripeCustomerId,
|
||||
})
|
||||
.from(subscriptionTable)
|
||||
.where(eq(subscriptionTable.id, payload.subscriptionId))
|
||||
.limit(1)
|
||||
|
||||
if (!subscriptionRow) {
|
||||
logger.warn('Subscription not found when syncing Stripe customer contact', {
|
||||
eventId: ctx.eventId,
|
||||
subscriptionId: payload.subscriptionId,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
if (!subscriptionRow.stripeCustomerId) {
|
||||
logger.warn('Subscription has no Stripe customer id when syncing contact', {
|
||||
eventId: ctx.eventId,
|
||||
subscriptionId: payload.subscriptionId,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
const [owner] = await db
|
||||
.select({
|
||||
email: user.email,
|
||||
name: user.name,
|
||||
})
|
||||
.from(member)
|
||||
.innerJoin(user, eq(member.userId, user.id))
|
||||
.where(and(eq(member.organizationId, subscriptionRow.referenceId), eq(member.role, 'owner')))
|
||||
.limit(1)
|
||||
|
||||
if (!owner) {
|
||||
logger.warn('Organization owner not found when syncing Stripe customer contact', {
|
||||
eventId: ctx.eventId,
|
||||
subscriptionId: payload.subscriptionId,
|
||||
organizationId: subscriptionRow.referenceId,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
const stripe = requireStripeClient()
|
||||
await stripe.customers.update(
|
||||
payload.stripeCustomerId,
|
||||
subscriptionRow.stripeCustomerId,
|
||||
{
|
||||
email: payload.email,
|
||||
...(payload.name ? { name: payload.name } : {}),
|
||||
email: owner.email,
|
||||
...(owner.name ? { name: owner.name } : {}),
|
||||
},
|
||||
{ idempotencyKey: `outbox:${ctx.eventId}` }
|
||||
)
|
||||
logger.info('Synced Stripe customer contact', {
|
||||
eventId: ctx.eventId,
|
||||
stripeCustomerId: payload.stripeCustomerId,
|
||||
stripeCustomerId: subscriptionRow.stripeCustomerId,
|
||||
subscriptionId: payload.subscriptionId,
|
||||
reason: payload.reason,
|
||||
})
|
||||
}
|
||||
@@ -197,6 +372,8 @@ const stripeSyncCustomerContact: OutboxHandler<StripeSyncCustomerContactPayload>
|
||||
export const billingOutboxHandlers = {
|
||||
[OUTBOX_EVENT_TYPES.STRIPE_SYNC_CANCEL_AT_PERIOD_END]:
|
||||
stripeSyncCancelAtPeriodEnd as OutboxHandler<unknown>,
|
||||
[OUTBOX_EVENT_TYPES.STRIPE_SYNC_SUBSCRIPTION_SEATS]:
|
||||
stripeSyncSubscriptionSeats as OutboxHandler<unknown>,
|
||||
[OUTBOX_EVENT_TYPES.STRIPE_THRESHOLD_OVERAGE_INVOICE]:
|
||||
stripeThresholdOverageInvoice as OutboxHandler<unknown>,
|
||||
[OUTBOX_EVENT_TYPES.STRIPE_SYNC_CUSTOMER_CONTACT]:
|
||||
|
||||
@@ -7,6 +7,55 @@ const logger = createLogger('BatchDelete')
|
||||
|
||||
export const DEFAULT_BATCH_SIZE = 2000
|
||||
export const DEFAULT_MAX_BATCHES_PER_TABLE = 10
|
||||
/**
|
||||
* Split workspaceIds into this-sized groups before running SELECT/DELETE. Large
|
||||
* IN lists combined with `started_at < X` force Postgres to probe every
|
||||
* workspace range in the composite index, which blows the 90s statement timeout
|
||||
* at the scale of the full free tier.
|
||||
*/
|
||||
export const DEFAULT_WORKSPACE_CHUNK_SIZE = 50
|
||||
|
||||
export function chunkArray<T>(arr: T[], size: number): T[][] {
|
||||
const out: T[][] = []
|
||||
for (let i = 0; i < arr.length; i += size) out.push(arr.slice(i, i + size))
|
||||
return out
|
||||
}
|
||||
|
||||
export interface SelectByIdChunksOptions {
|
||||
/** Cap on rows returned across all chunks. Defaults to a full per-table cleanup budget. */
|
||||
overallLimit?: number
|
||||
chunkSize?: number
|
||||
}
|
||||
|
||||
/**
|
||||
* Run a SELECT query once per ID chunk and concatenate results up to
|
||||
* `overallLimit`. Each chunk's query is passed the remaining row budget so the
|
||||
* total never exceeds the cap. Use this when you need the selected row set
|
||||
* (e.g. to drive S3 or copilot-backend cleanup alongside the DB delete).
|
||||
*
|
||||
* Works for any large ID set — workspace IDs, workflow IDs, etc. Avoids
|
||||
* sending one massive `IN (...)` list that would blow Postgres's statement
|
||||
* timeout.
|
||||
*/
|
||||
export async function selectRowsByIdChunks<T>(
|
||||
ids: string[],
|
||||
query: (chunkIds: string[], chunkLimit: number) => Promise<T[]>,
|
||||
{
|
||||
overallLimit = DEFAULT_BATCH_SIZE * DEFAULT_MAX_BATCHES_PER_TABLE,
|
||||
chunkSize = DEFAULT_WORKSPACE_CHUNK_SIZE,
|
||||
}: SelectByIdChunksOptions = {}
|
||||
): Promise<T[]> {
|
||||
if (ids.length === 0) return []
|
||||
|
||||
const rows: T[] = []
|
||||
for (const chunkIds of chunkArray(ids, chunkSize)) {
|
||||
if (rows.length >= overallLimit) break
|
||||
const remaining = overallLimit - rows.length
|
||||
const chunkRows = await query(chunkIds, remaining)
|
||||
rows.push(...chunkRows)
|
||||
}
|
||||
return rows
|
||||
}
|
||||
|
||||
export interface TableCleanupResult {
|
||||
table: string
|
||||
@@ -14,6 +63,111 @@ export interface TableCleanupResult {
|
||||
failed: number
|
||||
}
|
||||
|
||||
export interface ChunkedBatchDeleteOptions<TRow extends { id: string }> {
|
||||
tableDef: PgTable
|
||||
workspaceIds: string[]
|
||||
tableName: string
|
||||
/** SELECT eligible rows for one workspace chunk. The result must include `id`. */
|
||||
selectChunk: (chunkIds: string[], limit: number) => Promise<TRow[]>
|
||||
/** Runs between SELECT and DELETE; receives the just-selected rows. */
|
||||
onBatch?: (rows: TRow[]) => Promise<void>
|
||||
batchSize?: number
|
||||
/** Max batches per workspace chunk. */
|
||||
maxBatches?: number
|
||||
/**
|
||||
* Hard cap on rows processed (deleted + failed) across all chunks per call.
|
||||
* Defaults to `DEFAULT_BATCH_SIZE * DEFAULT_MAX_BATCHES_PER_TABLE`. Cron
|
||||
* runs frequently enough to catch up the backlog over multiple invocations.
|
||||
*/
|
||||
totalRowLimit?: number
|
||||
workspaceChunkSize?: number
|
||||
}
|
||||
|
||||
/**
|
||||
* Inner loop primitive for cleanup jobs.
|
||||
*
|
||||
* For each workspace chunk: SELECT a batch of eligible rows → run optional
|
||||
* `onBatch` hook (e.g. to delete S3 files) → DELETE those rows by ID. Repeats
|
||||
* until exhausted or `maxBatches` is hit, then moves to the next chunk. Stops
|
||||
* the whole call once `totalRowLimit` rows have been processed.
|
||||
*
|
||||
* Workspace IDs are chunked before the SELECT — see
|
||||
* `DEFAULT_WORKSPACE_CHUNK_SIZE` for why.
|
||||
*/
|
||||
export async function chunkedBatchDelete<TRow extends { id: string }>({
|
||||
tableDef,
|
||||
workspaceIds,
|
||||
tableName,
|
||||
selectChunk,
|
||||
onBatch,
|
||||
batchSize = DEFAULT_BATCH_SIZE,
|
||||
maxBatches = DEFAULT_MAX_BATCHES_PER_TABLE,
|
||||
totalRowLimit = DEFAULT_BATCH_SIZE * DEFAULT_MAX_BATCHES_PER_TABLE,
|
||||
workspaceChunkSize = DEFAULT_WORKSPACE_CHUNK_SIZE,
|
||||
}: ChunkedBatchDeleteOptions<TRow>): Promise<TableCleanupResult> {
|
||||
const result: TableCleanupResult = { table: tableName, deleted: 0, failed: 0 }
|
||||
|
||||
if (workspaceIds.length === 0) {
|
||||
logger.info(`[${tableName}] Skipped — no workspaces in scope`)
|
||||
return result
|
||||
}
|
||||
|
||||
const chunks = chunkArray(workspaceIds, workspaceChunkSize)
|
||||
let stoppedEarly = false
|
||||
|
||||
for (const [chunkIdx, chunkIds] of chunks.entries()) {
|
||||
if (result.deleted + result.failed >= totalRowLimit) {
|
||||
stoppedEarly = true
|
||||
break
|
||||
}
|
||||
|
||||
let batchesProcessed = 0
|
||||
let hasMore = true
|
||||
|
||||
while (
|
||||
hasMore &&
|
||||
batchesProcessed < maxBatches &&
|
||||
result.deleted + result.failed < totalRowLimit
|
||||
) {
|
||||
let rows: TRow[] = []
|
||||
try {
|
||||
rows = await selectChunk(chunkIds, batchSize)
|
||||
|
||||
if (rows.length === 0) {
|
||||
hasMore = false
|
||||
break
|
||||
}
|
||||
|
||||
if (onBatch) await onBatch(rows)
|
||||
|
||||
const ids = rows.map((r) => r.id)
|
||||
const deleted = await db
|
||||
.delete(tableDef)
|
||||
.where(inArray(sql`id`, ids))
|
||||
.returning({ id: sql`id` })
|
||||
|
||||
result.deleted += deleted.length
|
||||
hasMore = rows.length === batchSize
|
||||
batchesProcessed++
|
||||
} catch (error) {
|
||||
// Count rows we tried to delete; SELECT-stage errors leave rows=[].
|
||||
result.failed += rows.length
|
||||
logger.error(
|
||||
`[${tableName}] Batch failed (chunk ${chunkIdx + 1}/${chunks.length}, ${rows.length} rows):`,
|
||||
{ error }
|
||||
)
|
||||
hasMore = false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`[${tableName}] Complete: ${result.deleted} deleted, ${result.failed} failed across ${chunks.length} chunks${stoppedEarly ? ' (row-limit reached, remaining chunks deferred to next run)' : ''}`
|
||||
)
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
export interface BatchDeleteOptions {
|
||||
tableDef: PgTable
|
||||
workspaceIdCol: PgColumn
|
||||
@@ -25,13 +179,13 @@ export interface BatchDeleteOptions {
|
||||
requireTimestampNotNull?: boolean
|
||||
batchSize?: number
|
||||
maxBatches?: number
|
||||
workspaceChunkSize?: number
|
||||
}
|
||||
|
||||
/**
|
||||
* Iteratively delete rows in a table matching a workspace + time-based predicate.
|
||||
*
|
||||
* Uses a SELECT-with-LIMIT → DELETE-by-ID pattern to keep each round bounded in
|
||||
* memory and I/O (PostgreSQL DELETE does not support LIMIT directly).
|
||||
* Convenience wrapper around `chunkedBatchDelete` for the common case: delete
|
||||
* rows where `workspaceId IN (...) AND timestamp < retentionDate`. Use this
|
||||
* when there's no per-row side effect (e.g. no S3 files to clean up alongside).
|
||||
*/
|
||||
export async function batchDeleteByWorkspaceAndTimestamp({
|
||||
tableDef,
|
||||
@@ -41,56 +195,23 @@ export async function batchDeleteByWorkspaceAndTimestamp({
|
||||
retentionDate,
|
||||
tableName,
|
||||
requireTimestampNotNull = false,
|
||||
batchSize = DEFAULT_BATCH_SIZE,
|
||||
maxBatches = DEFAULT_MAX_BATCHES_PER_TABLE,
|
||||
...rest
|
||||
}: BatchDeleteOptions): Promise<TableCleanupResult> {
|
||||
const result: TableCleanupResult = { table: tableName, deleted: 0, failed: 0 }
|
||||
|
||||
if (workspaceIds.length === 0) {
|
||||
logger.info(`[${tableName}] Skipped — no workspaces in scope`)
|
||||
return result
|
||||
}
|
||||
|
||||
const predicates = [inArray(workspaceIdCol, workspaceIds), lt(timestampCol, retentionDate)]
|
||||
if (requireTimestampNotNull) predicates.push(isNotNull(timestampCol))
|
||||
const whereClause = and(...predicates)
|
||||
|
||||
let batchesProcessed = 0
|
||||
let hasMore = true
|
||||
|
||||
while (hasMore && batchesProcessed < maxBatches) {
|
||||
try {
|
||||
const batch = await db
|
||||
return chunkedBatchDelete({
|
||||
tableDef,
|
||||
workspaceIds,
|
||||
tableName,
|
||||
selectChunk: (chunkIds, limit) => {
|
||||
const predicates = [inArray(workspaceIdCol, chunkIds), lt(timestampCol, retentionDate)]
|
||||
if (requireTimestampNotNull) predicates.push(isNotNull(timestampCol))
|
||||
return db
|
||||
.select({ id: sql<string>`id` })
|
||||
.from(tableDef)
|
||||
.where(whereClause)
|
||||
.limit(batchSize)
|
||||
|
||||
if (batch.length === 0) {
|
||||
logger.info(`[${tableName}] No expired rows found`)
|
||||
hasMore = false
|
||||
break
|
||||
}
|
||||
|
||||
const ids = batch.map((r) => r.id)
|
||||
const deleted = await db
|
||||
.delete(tableDef)
|
||||
.where(inArray(sql`id`, ids))
|
||||
.returning({ id: sql`id` })
|
||||
|
||||
result.deleted += deleted.length
|
||||
hasMore = batch.length === batchSize
|
||||
batchesProcessed++
|
||||
|
||||
logger.info(`[${tableName}] Batch ${batchesProcessed}: deleted ${deleted.length} rows`)
|
||||
} catch (error) {
|
||||
result.failed++
|
||||
logger.error(`[${tableName}] Batch delete failed:`, { error })
|
||||
hasMore = false
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
.where(and(...predicates))
|
||||
.limit(limit)
|
||||
},
|
||||
...rest,
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -46,7 +46,11 @@ function toToolCallInfo(block: PersistedContentBlock): ToolCallInfo | undefined
|
||||
|
||||
function toDisplayBlock(block: PersistedContentBlock): ContentBlock | undefined {
|
||||
const displayed = toDisplayBlockBody(block)
|
||||
return displayed ? withBlockTiming(displayed, block) : undefined
|
||||
if (!displayed) return undefined
|
||||
if (block.parentToolCallId && displayed.parentToolCallId === undefined) {
|
||||
displayed.parentToolCallId = block.parentToolCallId
|
||||
}
|
||||
return withBlockTiming(displayed, block)
|
||||
}
|
||||
|
||||
function toDisplayBlockBody(block: PersistedContentBlock): ContentBlock | undefined {
|
||||
|
||||
@@ -77,11 +77,16 @@ function appendTextBlock(
|
||||
content: string,
|
||||
options: {
|
||||
lane?: 'subagent'
|
||||
parentToolCallId?: string
|
||||
}
|
||||
): void {
|
||||
if (!content) return
|
||||
const last = blocks[blocks.length - 1]
|
||||
if (last?.type === MothershipStreamV1EventType.text && last.lane === options.lane) {
|
||||
if (
|
||||
last?.type === MothershipStreamV1EventType.text &&
|
||||
last.lane === options.lane &&
|
||||
last.parentToolCallId === options.parentToolCallId
|
||||
) {
|
||||
last.content = `${typeof last.content === 'string' ? last.content : ''}${content}`
|
||||
return
|
||||
}
|
||||
@@ -89,6 +94,7 @@ function appendTextBlock(
|
||||
blocks.push({
|
||||
type: MothershipStreamV1EventType.text,
|
||||
...(options.lane ? { lane: options.lane } : {}),
|
||||
...(options.parentToolCallId ? { parentToolCallId: options.parentToolCallId } : {}),
|
||||
content,
|
||||
})
|
||||
}
|
||||
@@ -122,10 +128,24 @@ function buildLiveAssistantMessage(params: {
|
||||
return activeSubagent
|
||||
}
|
||||
|
||||
const resolveParentForSubagentBlock = (
|
||||
subagent: string | undefined,
|
||||
scopedParent: string | undefined
|
||||
): string | undefined => {
|
||||
if (!subagent) return undefined
|
||||
if (scopedParent) return scopedParent
|
||||
if (activeSubagent === subagent) return activeSubagentParentToolCallId
|
||||
for (const [parent, name] of subagentByParentToolCallId) {
|
||||
if (name === subagent) return parent
|
||||
}
|
||||
return undefined
|
||||
}
|
||||
|
||||
const ensureToolBlock = (input: {
|
||||
toolCallId: string
|
||||
toolName: string
|
||||
calledBy?: string
|
||||
parentToolCallId?: string
|
||||
displayTitle?: string
|
||||
params?: Record<string, unknown>
|
||||
result?: { success: boolean; output?: unknown; error?: string }
|
||||
@@ -155,6 +175,7 @@ function buildLiveAssistantMessage(params: {
|
||||
? { display: existingToolCall.display }
|
||||
: {}),
|
||||
}
|
||||
if (input.parentToolCallId) existing.parentToolCallId = input.parentToolCallId
|
||||
return existing
|
||||
}
|
||||
|
||||
@@ -176,6 +197,7 @@ function buildLiveAssistantMessage(params: {
|
||||
}
|
||||
: {}),
|
||||
},
|
||||
...(input.parentToolCallId ? { parentToolCallId: input.parentToolCallId } : {}),
|
||||
}
|
||||
toolIndexById.set(input.toolCallId, blocks.length)
|
||||
blocks.push(nextBlock)
|
||||
@@ -219,8 +241,10 @@ function buildLiveAssistantMessage(params: {
|
||||
runningText.length > 0 &&
|
||||
!runningText.endsWith('\n')
|
||||
const normalizedChunk = needsBoundaryNewline ? `\n${chunk}` : chunk
|
||||
const parentForBlock = resolveParentForSubagentBlock(scopedSubagent, scopedParentToolCallId)
|
||||
appendTextBlock(blocks, normalizedChunk, {
|
||||
...(scopedSubagent ? { lane: 'subagent' as const } : {}),
|
||||
...(parentForBlock ? { parentToolCallId: parentForBlock } : {}),
|
||||
})
|
||||
runningText += normalizedChunk
|
||||
lastContentSource = contentSource
|
||||
@@ -239,11 +263,14 @@ function buildLiveAssistantMessage(params: {
|
||||
continue
|
||||
}
|
||||
|
||||
const parentForBlock = resolveParentForSubagentBlock(scopedSubagent, scopedParentToolCallId)
|
||||
|
||||
if (payload.phase === MothershipStreamV1ToolPhase.result) {
|
||||
ensureToolBlock({
|
||||
toolCallId,
|
||||
toolName: payload.toolName,
|
||||
calledBy: scopedSubagent,
|
||||
...(parentForBlock ? { parentToolCallId: parentForBlock } : {}),
|
||||
state: resolveStreamToolOutcome(payload),
|
||||
result: {
|
||||
success: payload.success,
|
||||
@@ -258,6 +285,7 @@ function buildLiveAssistantMessage(params: {
|
||||
toolCallId,
|
||||
toolName: payload.toolName,
|
||||
calledBy: scopedSubagent,
|
||||
...(parentForBlock ? { parentToolCallId: parentForBlock } : {}),
|
||||
displayTitle,
|
||||
params: isRecord(payload.arguments) ? payload.arguments : undefined,
|
||||
state: typeof payload.status === 'string' ? payload.status : 'executing',
|
||||
@@ -270,9 +298,13 @@ function buildLiveAssistantMessage(params: {
|
||||
}
|
||||
|
||||
const spanData = asPayloadRecord(parsed.payload.data)
|
||||
const parentToolCallId =
|
||||
scopedParentToolCallId ??
|
||||
(typeof spanData?.tool_call_id === 'string' ? spanData.tool_call_id : undefined)
|
||||
const parentToolCallIdFromData =
|
||||
typeof spanData?.tool_call_id === 'string'
|
||||
? spanData.tool_call_id
|
||||
: typeof spanData?.toolCallId === 'string'
|
||||
? spanData.toolCallId
|
||||
: undefined
|
||||
const parentToolCallId = scopedParentToolCallId ?? parentToolCallIdFromData
|
||||
const name = typeof parsed.payload.agent === 'string' ? parsed.payload.agent : scopedAgentId
|
||||
if (parsed.payload.event === MothershipStreamV1SpanLifecycleEvent.start && name) {
|
||||
if (parentToolCallId) {
|
||||
@@ -285,6 +317,7 @@ function buildLiveAssistantMessage(params: {
|
||||
kind: MothershipStreamV1SpanPayloadKind.subagent,
|
||||
lifecycle: MothershipStreamV1SpanLifecycleEvent.start,
|
||||
content: name,
|
||||
...(parentToolCallId ? { parentToolCallId } : {}),
|
||||
})
|
||||
continue
|
||||
}
|
||||
@@ -308,6 +341,7 @@ function buildLiveAssistantMessage(params: {
|
||||
type: MothershipStreamV1EventType.span,
|
||||
kind: MothershipStreamV1SpanPayloadKind.subagent,
|
||||
lifecycle: MothershipStreamV1SpanLifecycleEvent.end,
|
||||
...(parentToolCallId ? { parentToolCallId } : {}),
|
||||
})
|
||||
}
|
||||
continue
|
||||
@@ -343,8 +377,10 @@ function buildLiveAssistantMessage(params: {
|
||||
}
|
||||
const prefix = runningText.length > 0 && !runningText.endsWith('\n') ? '\n' : ''
|
||||
const content = `${prefix}${tag}`
|
||||
const errorParent = resolveParentForSubagentBlock(scopedSubagent, scopedParentToolCallId)
|
||||
appendTextBlock(blocks, content, {
|
||||
...(scopedSubagent ? { lane: 'subagent' as const } : {}),
|
||||
...(errorParent ? { parentToolCallId: errorParent } : {}),
|
||||
})
|
||||
runningText += content
|
||||
continue
|
||||
|
||||
@@ -41,6 +41,7 @@ export interface PersistedContentBlock {
|
||||
toolCall?: PersistedToolCall
|
||||
timestamp?: number
|
||||
endedAt?: number
|
||||
parentToolCallId?: string
|
||||
}
|
||||
|
||||
export interface PersistedFileAttachment {
|
||||
@@ -101,9 +102,16 @@ export function withBlockTiming<T>(target: T, src: { timestamp?: number; endedAt
|
||||
return target
|
||||
}
|
||||
|
||||
function withBlockParent<T>(target: T, src: { parentToolCallId?: string }): T {
|
||||
if (src.parentToolCallId) {
|
||||
;(target as { parentToolCallId?: string }).parentToolCallId = src.parentToolCallId
|
||||
}
|
||||
return target
|
||||
}
|
||||
|
||||
function mapContentBlock(block: ContentBlock): PersistedContentBlock {
|
||||
const persisted = mapContentBlockBody(block)
|
||||
return withBlockTiming(persisted, block)
|
||||
return withBlockParent(withBlockTiming(persisted, block), block)
|
||||
}
|
||||
|
||||
function mapContentBlockBody(block: ContentBlock): PersistedContentBlock {
|
||||
@@ -265,6 +273,7 @@ interface RawBlock {
|
||||
status?: string
|
||||
timestamp?: number
|
||||
endedAt?: number
|
||||
parentToolCallId?: string
|
||||
toolCall?: {
|
||||
id?: string
|
||||
name?: string
|
||||
@@ -321,6 +330,7 @@ function normalizeCanonicalBlock(block: RawBlock): PersistedContentBlock {
|
||||
if (block.kind) result.kind = block.kind as MothershipStreamV1SpanPayloadKind
|
||||
if (block.lifecycle) result.lifecycle = block.lifecycle as MothershipStreamV1SpanLifecycleEvent
|
||||
if (block.status) result.status = block.status as MothershipStreamV1CompletionStatus
|
||||
if (block.parentToolCallId) result.parentToolCallId = block.parentToolCallId
|
||||
if (block.toolCall) {
|
||||
result.toolCall = {
|
||||
id: block.toolCall.id ?? '',
|
||||
@@ -438,6 +448,9 @@ function normalizeBlock(block: RawBlock): PersistedContentBlock {
|
||||
if (typeof block.endedAt === 'number' && result.endedAt === undefined) {
|
||||
result.endedAt = block.endedAt
|
||||
}
|
||||
if (block.parentToolCallId && result.parentToolCallId === undefined) {
|
||||
result.parentToolCallId = block.parentToolCallId
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
|
||||
@@ -361,28 +361,29 @@ export async function runStreamLoop(
|
||||
flushSubagentThinkingBlock(context)
|
||||
flushThinkingBlock(context)
|
||||
if (spanEvt === MothershipStreamV1SpanLifecycleEvent.start) {
|
||||
const lastParent = context.subAgentParentStack[context.subAgentParentStack.length - 1]
|
||||
const lastBlock = context.contentBlocks[context.contentBlocks.length - 1]
|
||||
if (toolCallId) {
|
||||
if (lastParent !== toolCallId) {
|
||||
if (!context.subAgentParentStack.includes(toolCallId)) {
|
||||
context.subAgentParentStack.push(toolCallId)
|
||||
}
|
||||
context.subAgentParentToolCallId = toolCallId
|
||||
context.subAgentContent[toolCallId] ??= ''
|
||||
context.subAgentToolCalls[toolCallId] ??= []
|
||||
}
|
||||
if (
|
||||
subagentName &&
|
||||
!(
|
||||
lastParent === toolCallId &&
|
||||
lastBlock?.type === 'subagent' &&
|
||||
lastBlock.content === subagentName
|
||||
)
|
||||
) {
|
||||
context.contentBlocks.push({
|
||||
type: 'subagent',
|
||||
content: subagentName,
|
||||
timestamp: Date.now(),
|
||||
if (toolCallId && subagentName) {
|
||||
const openParents = (context.openSubagentParents ??= new Set<string>())
|
||||
if (!openParents.has(toolCallId)) {
|
||||
openParents.add(toolCallId)
|
||||
context.contentBlocks.push({
|
||||
type: 'subagent',
|
||||
content: subagentName,
|
||||
parentToolCallId: toolCallId,
|
||||
timestamp: Date.now(),
|
||||
})
|
||||
}
|
||||
} else {
|
||||
logger.warn('subagent start missing toolCallId or agent name', {
|
||||
hasToolCallId: Boolean(toolCallId),
|
||||
hasSubagentName: Boolean(subagentName),
|
||||
})
|
||||
}
|
||||
return
|
||||
@@ -391,27 +392,33 @@ export async function runStreamLoop(
|
||||
if (isPendingPause) {
|
||||
return
|
||||
}
|
||||
if (context.subAgentParentStack.length > 0) {
|
||||
context.subAgentParentStack.pop()
|
||||
if (toolCallId) {
|
||||
const idx = context.subAgentParentStack.lastIndexOf(toolCallId)
|
||||
if (idx >= 0) {
|
||||
context.subAgentParentStack.splice(idx, 1)
|
||||
} else {
|
||||
logger.warn('subagent end without matching start', { toolCallId })
|
||||
}
|
||||
} else {
|
||||
logger.warn('subagent end without matching start')
|
||||
logger.warn('subagent end missing toolCallId')
|
||||
}
|
||||
context.subAgentParentToolCallId =
|
||||
context.subAgentParentStack.length > 0
|
||||
? context.subAgentParentStack[context.subAgentParentStack.length - 1]
|
||||
: undefined
|
||||
if (subagentName) {
|
||||
if (toolCallId) {
|
||||
for (let i = context.contentBlocks.length - 1; i >= 0; i--) {
|
||||
const b = context.contentBlocks[i]
|
||||
if (
|
||||
b.type === 'subagent' &&
|
||||
b.content === subagentName &&
|
||||
b.endedAt === undefined
|
||||
b.endedAt === undefined &&
|
||||
b.parentToolCallId === toolCallId
|
||||
) {
|
||||
b.endedAt = Date.now()
|
||||
break
|
||||
}
|
||||
}
|
||||
context.openSubagentParents?.delete(toolCallId)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
@@ -22,10 +22,17 @@ export function handleTextEvent(scope: ToolScope): StreamHandler {
|
||||
const parentToolCallId = getScopedParentToolCallId(event, context)
|
||||
if (!parentToolCallId) return
|
||||
if (event.payload.channel === MothershipStreamV1TextChannel.thinking) {
|
||||
if (
|
||||
context.currentSubagentThinkingBlock &&
|
||||
context.currentSubagentThinkingBlock.parentToolCallId !== parentToolCallId
|
||||
) {
|
||||
flushSubagentThinkingBlock(context)
|
||||
}
|
||||
if (!context.currentSubagentThinkingBlock) {
|
||||
context.currentSubagentThinkingBlock = {
|
||||
type: 'subagent_thinking',
|
||||
content: '',
|
||||
parentToolCallId,
|
||||
timestamp: Date.now(),
|
||||
}
|
||||
}
|
||||
@@ -40,7 +47,7 @@ export function handleTextEvent(scope: ToolScope): StreamHandler {
|
||||
}
|
||||
context.subAgentContent[parentToolCallId] =
|
||||
(context.subAgentContent[parentToolCallId] || '') + chunk
|
||||
addContentBlock(context, { type: 'subagent_text', content: chunk })
|
||||
addContentBlock(context, { type: 'subagent_text', content: chunk, parentToolCallId })
|
||||
return
|
||||
}
|
||||
|
||||
|
||||
@@ -340,6 +340,7 @@ function registerSubagentToolCall(
|
||||
type: 'tool_call',
|
||||
toolCall,
|
||||
calledBy: parentToolCall?.name,
|
||||
parentToolCallId,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -56,6 +56,7 @@ export interface ContentBlock {
|
||||
calledBy?: string
|
||||
timestamp: number
|
||||
endedAt?: number
|
||||
parentToolCallId?: string
|
||||
}
|
||||
|
||||
export interface StreamingContext {
|
||||
@@ -86,6 +87,7 @@ export interface StreamingContext {
|
||||
subAgentParentStack: string[]
|
||||
subAgentContent: Record<string, string>
|
||||
subAgentToolCalls: Record<string, ToolCallState[]>
|
||||
openSubagentParents?: Set<string>
|
||||
pendingContent: string
|
||||
streamComplete: boolean
|
||||
wasAborted: boolean
|
||||
@@ -136,31 +138,12 @@ export interface OrchestratorOptions {
|
||||
onComplete?: (result: OrchestratorResult) => void | Promise<void>
|
||||
onError?: (error: Error) => void | Promise<void>
|
||||
abortSignal?: AbortSignal
|
||||
/**
|
||||
* Invoked when the orchestrator infers that the run was aborted via
|
||||
* an out-of-band signal (currently: a Redis abort marker observed
|
||||
* at SSE body close). Callers wire this to fire their local
|
||||
* `AbortController` so `signal.reason` is set and `recordCancelled`
|
||||
* classifies as `explicit_stop` rather than `unknown`.
|
||||
*/
|
||||
onAbortObserved?: (reason: string) => void
|
||||
interactive?: boolean
|
||||
}
|
||||
|
||||
export interface OrchestratorResult {
|
||||
success: boolean
|
||||
/**
|
||||
* True iff the non-success outcome was a user-initiated cancel
|
||||
* (abort signal fired or client disconnected). Lets callers treat
|
||||
* cancels differently from actual errors — notably, `buildOnComplete`
|
||||
* must NOT finalize the chat row on cancel, because the browser's
|
||||
* `/api/copilot/chat/stop` POST owns writing the partial assistant
|
||||
* content and clearing `conversationId` in one UPDATE. Finalizing
|
||||
* here would race and clear `conversationId` first, making the stop
|
||||
* UPDATE match zero rows and the partial content vanish on refetch.
|
||||
*
|
||||
* Always false when `success=true`.
|
||||
*/
|
||||
cancelled?: boolean
|
||||
content: string
|
||||
contentBlocks: ContentBlock[]
|
||||
|
||||
@@ -3,10 +3,11 @@ import { credential } from '@sim/db/schema'
|
||||
import { toError } from '@sim/utils/errors'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import type { ExecutionContext, ToolCallResult } from '@/lib/copilot/request/types'
|
||||
import { getCredentialActorContext } from '@/lib/credentials/access'
|
||||
|
||||
export function executeManageCredential(
|
||||
rawParams: Record<string, unknown>,
|
||||
_context: ExecutionContext
|
||||
context: ExecutionContext
|
||||
): Promise<ToolCallResult> {
|
||||
const params = rawParams as {
|
||||
operation: string
|
||||
@@ -17,26 +18,30 @@ export function executeManageCredential(
|
||||
const { operation, displayName } = params
|
||||
return (async () => {
|
||||
try {
|
||||
if (!context?.userId) {
|
||||
return { success: false, error: 'Authentication required' }
|
||||
}
|
||||
|
||||
switch (operation) {
|
||||
case 'rename': {
|
||||
const credentialId = params.credentialId
|
||||
if (!credentialId) return { success: false, error: 'credentialId is required for rename' }
|
||||
if (!displayName) return { success: false, error: 'displayName is required for rename' }
|
||||
const [row] = await db
|
||||
.select({
|
||||
id: credential.id,
|
||||
type: credential.type,
|
||||
displayName: credential.displayName,
|
||||
})
|
||||
.from(credential)
|
||||
.where(eq(credential.id, credentialId))
|
||||
.limit(1)
|
||||
if (!row) return { success: false, error: 'Credential not found' }
|
||||
if (row.type !== 'oauth')
|
||||
|
||||
const actor = await getCredentialActorContext(credentialId, context.userId)
|
||||
if (!actor.credential || !actor.hasWorkspaceAccess) {
|
||||
return { success: false, error: 'Credential not found' }
|
||||
}
|
||||
if (actor.credential.type !== 'oauth') {
|
||||
return {
|
||||
success: false,
|
||||
error: 'Only OAuth credentials can be managed with this tool.',
|
||||
}
|
||||
}
|
||||
if (!actor.canWriteWorkspace && !actor.isAdmin) {
|
||||
return { success: false, error: 'Write access required to rename this credential' }
|
||||
}
|
||||
|
||||
await db
|
||||
.update(credential)
|
||||
.set({ displayName, updatedAt: new Date() })
|
||||
@@ -53,12 +58,16 @@ export function executeManageCredential(
|
||||
const failed: string[] = []
|
||||
|
||||
for (const id of ids) {
|
||||
const [row] = await db
|
||||
.select({ id: credential.id, type: credential.type })
|
||||
.from(credential)
|
||||
.where(eq(credential.id, id))
|
||||
.limit(1)
|
||||
if (!row || row.type !== 'oauth') {
|
||||
const actor = await getCredentialActorContext(id, context.userId)
|
||||
if (
|
||||
!actor.credential ||
|
||||
!actor.hasWorkspaceAccess ||
|
||||
actor.credential.type !== 'oauth'
|
||||
) {
|
||||
failed.push(id)
|
||||
continue
|
||||
}
|
||||
if (!actor.canWriteWorkspace && !actor.isAdmin) {
|
||||
failed.push(id)
|
||||
continue
|
||||
}
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
import { db } from '@sim/db'
|
||||
import { knowledgeBase } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { toError } from '@sim/utils/errors'
|
||||
import { generateId } from '@sim/utils/id'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import type { ExecutionContext, ToolCallResult } from '@/lib/copilot/request/types'
|
||||
import { restoreKnowledgeBase } from '@/lib/knowledge/service'
|
||||
import { getTableById, restoreTable } from '@/lib/table/service'
|
||||
@@ -10,6 +13,8 @@ import {
|
||||
} from '@/lib/uploads/contexts/workspace/workspace-file-manager'
|
||||
import { restoreWorkflow } from '@/lib/workflows/lifecycle'
|
||||
import { performRestoreFolder } from '@/lib/workflows/orchestration/folder-lifecycle'
|
||||
import { getWorkflowById } from '@/lib/workflows/utils'
|
||||
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
|
||||
|
||||
const logger = createLogger('RestoreResource')
|
||||
|
||||
@@ -33,10 +38,25 @@ export async function executeRestoreResource(
|
||||
}
|
||||
|
||||
const requestId = generateId().slice(0, 8)
|
||||
const callerWorkspaceId = context.workspaceId
|
||||
|
||||
const hasWriteAccess = async (resourceWorkspaceId: string | null | undefined) => {
|
||||
if (!resourceWorkspaceId || resourceWorkspaceId !== callerWorkspaceId) return false
|
||||
const permission = await getUserEntityPermissions(
|
||||
context.userId,
|
||||
'workspace',
|
||||
resourceWorkspaceId
|
||||
)
|
||||
return permission === 'write' || permission === 'admin'
|
||||
}
|
||||
|
||||
try {
|
||||
switch (type) {
|
||||
case 'workflow': {
|
||||
const existing = await getWorkflowById(id, { includeArchived: true })
|
||||
if (!existing || !(await hasWriteAccess(existing.workspaceId))) {
|
||||
return { success: false, error: 'Workflow not found' }
|
||||
}
|
||||
const result = await restoreWorkflow(id, { requestId })
|
||||
if (!result.restored) {
|
||||
return { success: false, error: 'Workflow not found or not archived' }
|
||||
@@ -50,9 +70,13 @@ export async function executeRestoreResource(
|
||||
}
|
||||
|
||||
case 'table': {
|
||||
const existing = await getTableById(id, { includeArchived: true })
|
||||
if (!existing || !(await hasWriteAccess(existing.workspaceId))) {
|
||||
return { success: false, error: 'Table not found' }
|
||||
}
|
||||
await restoreTable(id, requestId)
|
||||
const table = await getTableById(id)
|
||||
const tableName = table?.name || id
|
||||
const tableName = table?.name || existing.name
|
||||
logger.info('Table restored via copilot', { tableId: id, name: tableName })
|
||||
return {
|
||||
success: true,
|
||||
@@ -62,6 +86,9 @@ export async function executeRestoreResource(
|
||||
}
|
||||
|
||||
case 'file': {
|
||||
if (!(await hasWriteAccess(context.workspaceId))) {
|
||||
return { success: false, error: 'File not found' }
|
||||
}
|
||||
await restoreWorkspaceFile(context.workspaceId, id)
|
||||
const fileRecord = await getWorkspaceFile(context.workspaceId, id)
|
||||
const fileName = fileRecord?.name || id
|
||||
@@ -74,6 +101,14 @@ export async function executeRestoreResource(
|
||||
}
|
||||
|
||||
case 'knowledgebase': {
|
||||
const [existing] = await db
|
||||
.select({ workspaceId: knowledgeBase.workspaceId })
|
||||
.from(knowledgeBase)
|
||||
.where(eq(knowledgeBase.id, id))
|
||||
.limit(1)
|
||||
if (!existing || !(await hasWriteAccess(existing.workspaceId))) {
|
||||
return { success: false, error: 'Knowledge base not found' }
|
||||
}
|
||||
await restoreKnowledgeBase(id, requestId)
|
||||
logger.info('Knowledge base restored via copilot', { knowledgeBaseId: id })
|
||||
return {
|
||||
@@ -83,6 +118,9 @@ export async function executeRestoreResource(
|
||||
}
|
||||
|
||||
case 'folder': {
|
||||
if (!(await hasWriteAccess(context.workspaceId))) {
|
||||
return { success: false, error: 'Folder not found' }
|
||||
}
|
||||
const result = await performRestoreFolder({
|
||||
folderId: id,
|
||||
workspaceId: context.workspaceId,
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user