Compare commits

..

8 Commits

Author SHA1 Message Date
waleed
cb58ccbffe fix(audit-log): resolve userName/userEmail for JWT and API key auth paths 2026-02-19 16:02:53 -08:00
waleed
aeeead0b44 improvement(audit-log): add resourceName to credential set invitation accept 2026-02-18 11:50:29 -08:00
waleed
767ba42625 fix(audit-log): add missing actorName/actorEmail to workflow duplicate 2026-02-18 11:45:47 -08:00
waleed
affcdfb126 improvement(audit-log): use better-auth callback for password reset audit, remove cast
- Move password reset audit to onPasswordReset callback in auth config
  instead of coupling to better-auth's verification table internals
- Remove ugly double-cast on workflowData.workspaceId in deployment activation
2026-02-18 11:38:49 -08:00
waleed
5b88698561 fix(audit-log): add workspaceId to deployment activation audit 2026-02-18 11:35:59 -08:00
waleed
b0aca7cd80 fix(audit-log): resolve user for password reset, add CREDENTIAL_SET_INVITATION_RESENT action 2026-02-18 11:12:51 -08:00
waleed
241e56e00c improvement(audit-log): add actorName/actorEmail to all recordAudit calls 2026-02-18 11:08:45 -08:00
waleed
acd5d9d7e1 feat(audit-log): add audit events for templates, billing, credentials, env, deployments, passwords 2026-02-18 10:55:59 -08:00
240 changed files with 480 additions and 41824 deletions

View File

@@ -4407,161 +4407,6 @@ export function DatadogIcon(props: SVGProps<SVGSVGElement>) {
)
}
export function MicrosoftDataverseIcon(props: SVGProps<SVGSVGElement>) {
const id = useId()
const clip0 = `dataverse_clip0_${id}`
const clip1 = `dataverse_clip1_${id}`
const clip2 = `dataverse_clip2_${id}`
const paint0 = `dataverse_paint0_${id}`
const paint1 = `dataverse_paint1_${id}`
const paint2 = `dataverse_paint2_${id}`
const paint3 = `dataverse_paint3_${id}`
const paint4 = `dataverse_paint4_${id}`
const paint5 = `dataverse_paint5_${id}`
const paint6 = `dataverse_paint6_${id}`
return (
<svg
{...props}
width='96'
height='96'
viewBox='0 0 96 96'
fill='none'
xmlns='http://www.w3.org/2000/svg'
>
<g clipPath={`url(#${clip0})`}>
<g clipPath={`url(#${clip1})`}>
<g clipPath={`url(#${clip2})`}>
<path
d='M13.8776 21.8242C29.1033 8.13791 49.7501 8.1861 62.955 18.9134C74.9816 28.6836 77.4697 44.3159 70.851 55.7801C64.2321 67.2443 52.5277 70.1455 39.5011 62.6247L31.7286 76.087L31.7234 76.0862C27.4181 83.5324 17.8937 86.0828 10.4437 81.7817C7.45394 80.0556 5.25322 77.4879 3.96665 74.551L3.96096 74.5511C-4.07832 55.7804 0.200745 34.1184 13.8776 21.8242Z'
fill={`url(#${paint0})`}
/>
<path
d='M13.8776 21.8242C29.1033 8.13791 49.7501 8.1861 62.955 18.9134C74.9816 28.6836 77.4697 44.3159 70.851 55.7801C64.2321 67.2443 52.5277 70.1455 39.5011 62.6247L31.7286 76.087L31.7234 76.0862C27.4181 83.5324 17.8937 86.0828 10.4437 81.7817C7.45394 80.0556 5.25322 77.4879 3.96665 74.551L3.96096 74.5511C-4.07832 55.7804 0.200745 34.1184 13.8776 21.8242Z'
fill={`url(#${paint1})`}
fillOpacity='0.8'
/>
<path
d='M85.4327 14.2231C88.4528 15.9668 90.6686 18.569 91.9494 21.5433L91.9533 21.5444C99.9406 40.2943 95.6533 61.9068 81.9983 74.1814C66.7726 87.8677 46.1257 87.8196 32.9209 77.0923C20.8945 67.3221 18.4062 51.6897 25.0249 40.2256C31.6438 28.7614 43.3482 25.8601 56.3748 33.381L64.1434 19.9255L64.1482 19.9249C68.4516 12.4736 77.9805 9.92084 85.4327 14.2231Z'
fill={`url(#${paint2})`}
/>
<path
d='M85.4327 14.2231C88.4528 15.9668 90.6686 18.569 91.9494 21.5433L91.9533 21.5444C99.9406 40.2943 95.6533 61.9068 81.9983 74.1814C66.7726 87.8677 46.1257 87.8196 32.9209 77.0923C20.8945 67.3221 18.4062 51.6897 25.0249 40.2256C31.6438 28.7614 43.3482 25.8601 56.3748 33.381L64.1434 19.9255L64.1482 19.9249C68.4516 12.4736 77.9805 9.92084 85.4327 14.2231Z'
fill={`url(#${paint3})`}
fillOpacity='0.9'
/>
<path
d='M39.5041 62.6261C52.5307 70.1469 64.2352 67.2456 70.8541 55.7814C77.2488 44.7055 75.1426 29.7389 64.147 19.9271L56.3791 33.3814L39.5041 62.6261Z'
fill={`url(#${paint4})`}
/>
<path
d='M56.3794 33.3815C43.3528 25.8607 31.6482 28.762 25.0294 40.2262C18.6347 51.3021 20.7409 66.2687 31.7364 76.0806L39.5043 62.6262L56.3794 33.3815Z'
fill={`url(#${paint5})`}
/>
<path
d='M33.3215 56.4453C37.9837 64.5204 48.3094 67.2872 56.3846 62.625C64.4598 57.9628 67.2266 47.6371 62.5643 39.5619C57.9021 31.4867 47.5764 28.72 39.5013 33.3822C31.4261 38.0444 28.6593 48.3701 33.3215 56.4453Z'
fill={`url(#${paint6})`}
/>
</g>
</g>
</g>
<defs>
<radialGradient
id={paint0}
cx='0'
cy='0'
r='1'
gradientUnits='userSpaceOnUse'
gradientTransform='translate(46.0001 49.4996) rotate(-148.717) scale(46.2195 47.5359)'
>
<stop offset='0.465088' stopColor='#09442A' />
<stop offset='0.70088' stopColor='#136C6C' />
<stop offset='1' stopColor='#22918B' />
</radialGradient>
<radialGradient
id={paint1}
cx='0'
cy='0'
r='1'
gradientUnits='userSpaceOnUse'
gradientTransform='translate(50.0001 32.4996) rotate(123.57) scale(66.0095 46.5498)'
>
<stop offset='0.718705' stopColor='#1A7F7C' stopOpacity='0' />
<stop offset='1' stopColor='#16BBDA' />
</radialGradient>
<radialGradient
id={paint2}
cx='0'
cy='0'
r='1'
gradientUnits='userSpaceOnUse'
gradientTransform='translate(50.4999 44.5001) rotate(30.75) scale(45.9618 44.5095)'
>
<stop offset='0.358097' stopColor='#136C6C' />
<stop offset='0.789474' stopColor='#42B870' />
<stop offset='1' stopColor='#76D45E' />
</radialGradient>
<radialGradient
id={paint3}
cx='0'
cy='0'
r='1'
gradientTransform='matrix(42.5 -36.0002 31.1824 36.8127 49.4998 55.5001)'
gradientUnits='userSpaceOnUse'
>
<stop offset='0.583166' stopColor='#76D45E' stopOpacity='0' />
<stop offset='1' stopColor='#C8F5B7' />
</radialGradient>
<radialGradient
id={paint4}
cx='0'
cy='0'
r='1'
gradientUnits='userSpaceOnUse'
gradientTransform='translate(47.5 48) rotate(-58.9042) scale(32.6898)'
>
<stop offset='0.486266' stopColor='#22918B' />
<stop offset='0.729599' stopColor='#42B870' />
<stop offset='1' stopColor='#43E5CA' />
</radialGradient>
<radialGradient
id={paint5}
cx='0'
cy='0'
r='1'
gradientUnits='userSpaceOnUse'
gradientTransform='translate(47.3833 49.0077) rotate(119.859) scale(31.1328 29.4032)'
>
<stop offset='0.459553' stopColor='#08494E' />
<stop offset='0.742242' stopColor='#1A7F7C' />
<stop offset='1' stopColor='#309C61' />
</radialGradient>
<radialGradient
id={paint6}
cx='0'
cy='0'
r='1'
gradientUnits='userSpaceOnUse'
gradientTransform='translate(52.5 40) rotate(120.784) scale(27.3542)'
>
<stop stopColor='#C8F5B7' />
<stop offset='0.24583' stopColor='#98F0B0' />
<stop offset='0.643961' stopColor='#52D17C' />
<stop offset='1' stopColor='#119FC5' />
</radialGradient>
<clipPath id={clip0}>
<rect width='96' height='96' fill='white' />
</clipPath>
<clipPath id={clip1}>
<rect width='96' height='96' fill='white' />
</clipPath>
<clipPath id={clip2}>
<rect width='95.9998' height='96' fill='white' />
</clipPath>
</defs>
</svg>
)
}
export function KalshiIcon(props: SVGProps<SVGSVGElement>) {
return (
<svg {...props} viewBox='0 0 78 20' fill='currentColor' xmlns='http://www.w3.org/2000/svg'>
@@ -4964,26 +4809,6 @@ export function BedrockIcon(props: SVGProps<SVGSVGElement>) {
)
}
export function TableIcon(props: SVGProps<SVGSVGElement>) {
return (
<svg
xmlns='http://www.w3.org/2000/svg'
viewBox='0 0 24 24'
fill='none'
stroke='currentColor'
strokeWidth={2}
strokeLinecap='round'
strokeLinejoin='round'
{...props}
>
<rect width='18' height='18' x='3' y='3' rx='2' />
<path d='M3 9h18' />
<path d='M3 15h18' />
<path d='M9 3v18' />
<path d='M15 3v18' />
</svg>
)
}
export function ReductoIcon(props: SVGProps<SVGSVGElement>) {
return (
<svg
@@ -5707,33 +5532,3 @@ export function OnePasswordIcon(props: SVGProps<SVGSVGElement>) {
</svg>
)
}
export function VercelIcon(props: SVGProps<SVGSVGElement>) {
return (
<svg
{...props}
viewBox='0 0 256 222'
xmlns='http://www.w3.org/2000/svg'
preserveAspectRatio='xMidYMid'
>
<g transform='translate(19.2 16.63) scale(0.85)'>
<polygon fill='#fafafa' points='128 0 256 221.705007 0 221.705007' />
</g>
</svg>
)
}
export function CloudflareIcon(props: SVGProps<SVGSVGElement>) {
return (
<svg {...props} xmlns='http://www.w3.org/2000/svg' viewBox='0 0 512 512'>
<path
fill='#f38020'
d='M331 326c11-26-4-38-19-38l-148-2c-4 0-4-6 1-7l150-2c17-1 37-15 43-33 0 0 10-21 9-24a97 97 0 0 0-187-11c-38-25-78 9-69 46-48 3-65 46-60 72 0 1 1 2 3 2h274c1 0 3-1 3-3z'
/>
<path
fill='#faae40'
d='M381 224c-4 0-6-1-7 1l-5 21c-5 16 3 30 20 31l32 2c4 0 4 6-1 7l-33 1c-36 4-46 39-46 39 0 2 0 3 2 3h113l3-2a81 81 0 0 0-78-103'
/>
</svg>
)
}

View File

@@ -19,7 +19,6 @@ import {
CirclebackIcon,
ClayIcon,
ClerkIcon,
CloudflareIcon,
ConfluenceIcon,
CursorIcon,
DatadogIcon,
@@ -72,7 +71,6 @@ import {
MailgunIcon,
MailServerIcon,
Mem0Icon,
MicrosoftDataverseIcon,
MicrosoftExcelIcon,
MicrosoftOneDriveIcon,
MicrosoftPlannerIcon,
@@ -127,7 +125,6 @@ import {
TTSIcon,
TwilioIcon,
TypeformIcon,
VercelIcon,
VideoIcon,
WealthboxIcon,
WebflowIcon,
@@ -158,7 +155,6 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
circleback: CirclebackIcon,
clay: ClayIcon,
clerk: ClerkIcon,
cloudflare: CloudflareIcon,
confluence_v2: ConfluenceIcon,
cursor_v2: CursorIcon,
datadog: DatadogIcon,
@@ -212,7 +208,6 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
mailgun: MailgunIcon,
mem0: Mem0Icon,
memory: BrainIcon,
microsoft_dataverse: MicrosoftDataverseIcon,
microsoft_excel_v2: MicrosoftExcelIcon,
microsoft_planner: MicrosoftPlannerIcon,
microsoft_teams: MicrosoftTeamsIcon,
@@ -267,7 +262,6 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
twilio_sms: TwilioIcon,
twilio_voice: TwilioIcon,
typeform: TypeformIcon,
vercel: VercelIcon,
video_generator_v2: VideoIcon,
vision_v2: EyeIcon,
wealthbox: WealthboxIcon,

View File

@@ -1,569 +0,0 @@
---
title: Cloudflare
description: Manage DNS, domains, certificates, and cache
---
import { BlockInfoCard } from "@/components/ui/block-info-card"
<BlockInfoCard
type="cloudflare"
color="#F5F6FA"
/>
{/* MANUAL-CONTENT-START:intro */}
[Cloudflare](https://cloudflare.com/) is a global cloud platform that provides content delivery, domain management, cybersecurity, and performance services for websites and applications.
In Sim, the Cloudflare integration empowers your agents to automate the management of DNS records, SSL/TLS certificates, domains (zones), cache, zone settings, and more through easy-to-use API tools. Agents can securely list and edit domains, update DNS records, monitor analytics, and manage security and performance—all as part of your automated workflows.
With Cloudflare, you can:
- **Manage DNS and Domains**: List all your domains (zones), view zone details, and fully control DNS records from your automated agent workflows.
- **Handle SSL/TLS Certificates and Settings**: Issue, renew, or list certificates and adjust security and performance settings for your sites.
- **Purge Cache and Analyze Traffic**: Instantly purge edge cache and review real-time DNS analytics directly within your Sim agent processes.
- **Automate Security and Operations**: Use agents to programmatically manage zones, update settings, and streamline repetitive Cloudflare tasks.
This integration enables streamlined, secure management of your site's infrastructure from within Sim. Your agents can integrate Cloudflare operations directly into processes—keeping DNS records up-to-date, responding to security events, improving site performance, and automating large-scale site and account administration.
{/* MANUAL-CONTENT-END */}
## Usage Instructions
Integrate Cloudflare into the workflow. Manage zones (domains), DNS records, SSL/TLS certificates, zone settings, DNS analytics, and cache purging via the Cloudflare API.
## Tools
### `cloudflare_list_zones`
Lists all zones (domains) in the Cloudflare account.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `name` | string | No | Filter zones by domain name \(e.g., "example.com"\) |
| `status` | string | No | Filter by zone status: "initializing", "pending", "active", or "moved" |
| `page` | number | No | Page number for pagination \(default: 1\) |
| `per_page` | number | No | Number of zones per page \(default: 20, max: 50\) |
| `accountId` | string | No | Filter zones by account ID |
| `order` | string | No | Sort field \(name, status, account.id, account.name\) |
| `direction` | string | No | Sort direction \(asc, desc\) |
| `match` | string | No | Match logic for filters \(any, all\). Default: all |
| `apiKey` | string | Yes | Cloudflare API Token |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `zones` | array | List of zones/domains |
| ↳ `id` | string | Zone ID |
| ↳ `name` | string | Domain name |
| ↳ `status` | string | Zone status \(initializing, pending, active, moved\) |
| ↳ `paused` | boolean | Whether the zone is paused |
| ↳ `type` | string | Zone type \(full, partial, or secondary\) |
| ↳ `name_servers` | array | Assigned Cloudflare name servers |
| ↳ `original_name_servers` | array | Original name servers before moving to Cloudflare |
| ↳ `created_on` | string | ISO 8601 date when the zone was created |
| ↳ `modified_on` | string | ISO 8601 date when the zone was last modified |
| ↳ `activated_on` | string | ISO 8601 date when the zone was activated |
| ↳ `development_mode` | number | Seconds remaining in development mode \(0 = off\) |
| ↳ `plan` | object | Zone plan information |
| ↳ `id` | string | Plan identifier |
| ↳ `name` | string | Plan name |
| ↳ `price` | number | Plan price |
| ↳ `is_subscribed` | boolean | Whether the zone is subscribed to the plan |
| ↳ `frequency` | string | Plan billing frequency |
| ↳ `currency` | string | Plan currency |
| ↳ `legacy_id` | string | Legacy plan identifier |
| ↳ `account` | object | Account the zone belongs to |
| ↳ `id` | string | Account identifier |
| ↳ `name` | string | Account name |
| ↳ `owner` | object | Zone owner information |
| ↳ `id` | string | Owner identifier |
| ↳ `name` | string | Owner name |
| ↳ `type` | string | Owner type |
| ↳ `meta` | object | Zone metadata |
| ↳ `cdn_only` | boolean | Whether the zone is CDN only |
| ↳ `custom_certificate_quota` | number | Custom certificate quota |
| ↳ `dns_only` | boolean | Whether the zone is DNS only |
| ↳ `foundation_dns` | boolean | Whether foundation DNS is enabled |
| ↳ `page_rule_quota` | number | Page rule quota |
| ↳ `phishing_detected` | boolean | Whether phishing was detected |
| ↳ `step` | number | Current setup step |
| ↳ `vanity_name_servers` | array | Custom vanity name servers |
| ↳ `permissions` | array | User permissions for the zone |
| `total_count` | number | Total number of zones matching the query |
### `cloudflare_get_zone`
Gets details for a specific zone (domain) by its ID.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `zoneId` | string | Yes | The zone ID to retrieve details for |
| `apiKey` | string | Yes | Cloudflare API Token |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `id` | string | Zone ID |
| `name` | string | Domain name |
| `status` | string | Zone status \(initializing, pending, active, moved\) |
| `paused` | boolean | Whether the zone is paused |
| `type` | string | Zone type \(full, partial, or secondary\) |
| `name_servers` | array | Assigned Cloudflare name servers |
| `original_name_servers` | array | Original name servers before moving to Cloudflare |
| `created_on` | string | ISO 8601 date when the zone was created |
| `modified_on` | string | ISO 8601 date when the zone was last modified |
| `activated_on` | string | ISO 8601 date when the zone was activated |
| `development_mode` | number | Seconds remaining in development mode \(0 = off\) |
| `plan` | object | Zone plan information |
| ↳ `id` | string | Plan identifier |
| ↳ `name` | string | Plan name |
| ↳ `price` | number | Plan price |
| ↳ `is_subscribed` | boolean | Whether the zone is subscribed to the plan |
| ↳ `frequency` | string | Plan billing frequency |
| ↳ `currency` | string | Plan currency |
| ↳ `legacy_id` | string | Legacy plan identifier |
| `account` | object | Account the zone belongs to |
| ↳ `id` | string | Account identifier |
| ↳ `name` | string | Account name |
| `owner` | object | Zone owner information |
| ↳ `id` | string | Owner identifier |
| ↳ `name` | string | Owner name |
| ↳ `type` | string | Owner type |
| `meta` | object | Zone metadata |
| ↳ `cdn_only` | boolean | Whether the zone is CDN only |
| ↳ `custom_certificate_quota` | number | Custom certificate quota |
| ↳ `dns_only` | boolean | Whether the zone is DNS only |
| ↳ `foundation_dns` | boolean | Whether foundation DNS is enabled |
| ↳ `page_rule_quota` | number | Page rule quota |
| ↳ `phishing_detected` | boolean | Whether phishing was detected |
| ↳ `step` | number | Current setup step |
| `vanity_name_servers` | array | Custom vanity name servers |
| `permissions` | array | User permissions for the zone |
### `cloudflare_create_zone`
Adds a new zone (domain) to the Cloudflare account.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `name` | string | Yes | The domain name to add \(e.g., "example.com"\) |
| `accountId` | string | Yes | The Cloudflare account ID |
| `type` | string | No | Zone type: "full" \(Cloudflare manages DNS\), "partial" \(CNAME setup\), or "secondary" \(secondary DNS\) |
| `jump_start` | boolean | No | Automatically attempt to fetch existing DNS records when creating the zone |
| `apiKey` | string | Yes | Cloudflare API Token |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `id` | string | Created zone ID |
| `name` | string | Domain name |
| `status` | string | Zone status \(initializing, pending, active, moved\) |
| `paused` | boolean | Whether the zone is paused |
| `type` | string | Zone type \(full, partial, or secondary\) |
| `name_servers` | array | Assigned Cloudflare name servers |
| `original_name_servers` | array | Original name servers before moving to Cloudflare |
| `created_on` | string | ISO 8601 date when the zone was created |
| `modified_on` | string | ISO 8601 date when the zone was last modified |
| `activated_on` | string | ISO 8601 date when the zone was activated |
| `development_mode` | number | Seconds remaining in development mode \(0 = off\) |
| `plan` | object | Zone plan information |
| ↳ `id` | string | Plan identifier |
| ↳ `name` | string | Plan name |
| ↳ `price` | number | Plan price |
| ↳ `is_subscribed` | boolean | Whether the zone is subscribed to the plan |
| ↳ `frequency` | string | Plan billing frequency |
| ↳ `currency` | string | Plan currency |
| ↳ `legacy_id` | string | Legacy plan identifier |
| `account` | object | Account the zone belongs to |
| ↳ `id` | string | Account identifier |
| ↳ `name` | string | Account name |
| `owner` | object | Zone owner information |
| ↳ `id` | string | Owner identifier |
| ↳ `name` | string | Owner name |
| ↳ `type` | string | Owner type |
| `meta` | object | Zone metadata |
| ↳ `cdn_only` | boolean | Whether the zone is CDN only |
| ↳ `custom_certificate_quota` | number | Custom certificate quota |
| ↳ `dns_only` | boolean | Whether the zone is DNS only |
| ↳ `foundation_dns` | boolean | Whether foundation DNS is enabled |
| ↳ `page_rule_quota` | number | Page rule quota |
| ↳ `phishing_detected` | boolean | Whether phishing was detected |
| ↳ `step` | number | Current setup step |
| `vanity_name_servers` | array | Custom vanity name servers |
| `permissions` | array | User permissions for the zone |
### `cloudflare_delete_zone`
Deletes a zone (domain) from the Cloudflare account.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `zoneId` | string | Yes | The zone ID to delete |
| `apiKey` | string | Yes | Cloudflare API Token |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `id` | string | Deleted zone ID |
### `cloudflare_list_dns_records`
Lists DNS records for a specific zone.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `zoneId` | string | Yes | The zone ID to list DNS records for |
| `type` | string | No | Filter by record type \(e.g., "A", "AAAA", "CNAME", "MX", "TXT"\) |
| `name` | string | No | Filter by record name \(exact match\) |
| `content` | string | No | Filter by record content \(exact match\) |
| `page` | number | No | Page number for pagination \(default: 1\) |
| `per_page` | number | No | Number of records per page \(default: 100, max: 5000000\) |
| `direction` | string | No | Sort direction \(asc or desc\) |
| `match` | string | No | Match logic for filters: any or all \(default: all\) |
| `order` | string | No | Sort field \(type, name, content, ttl, proxied\) |
| `proxied` | boolean | No | Filter by proxy status |
| `search` | string | No | Free-text search across record name, content, and value |
| `tag` | string | No | Filter by tags \(comma-separated\) |
| `tag_match` | string | No | Tag filter match logic: any or all |
| `commentFilter` | string | No | Filter records by comment content \(substring match\) |
| `apiKey` | string | Yes | Cloudflare API Token |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `records` | array | List of DNS records |
| ↳ `id` | string | Unique identifier for the DNS record |
| ↳ `zone_id` | string | The ID of the zone the record belongs to |
| ↳ `zone_name` | string | The name of the zone |
| ↳ `type` | string | Record type \(A, AAAA, CNAME, MX, TXT, etc.\) |
| ↳ `name` | string | Record name \(e.g., example.com\) |
| ↳ `content` | string | Record content \(e.g., IP address\) |
| ↳ `proxiable` | boolean | Whether the record can be proxied |
| ↳ `proxied` | boolean | Whether Cloudflare proxy is enabled |
| ↳ `ttl` | number | TTL in seconds \(1 = automatic\) |
| ↳ `locked` | boolean | Whether the record is locked |
| ↳ `priority` | number | MX/SRV record priority |
| ↳ `comment` | string | Comment associated with the record |
| ↳ `tags` | array | Tags associated with the record |
| ↳ `comment_modified_on` | string | ISO 8601 timestamp when the comment was last modified |
| ↳ `tags_modified_on` | string | ISO 8601 timestamp when tags were last modified |
| ↳ `meta` | object | Record metadata |
| ↳ `source` | string | Source of the DNS record |
| ↳ `created_on` | string | ISO 8601 timestamp when the record was created |
| ↳ `modified_on` | string | ISO 8601 timestamp when the record was last modified |
| `total_count` | number | Total number of DNS records matching the query |
### `cloudflare_create_dns_record`
Creates a new DNS record for a zone.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `zoneId` | string | Yes | The zone ID to create the DNS record in |
| `type` | string | Yes | DNS record type \(e.g., "A", "AAAA", "CNAME", "MX", "TXT", "NS", "SRV"\) |
| `name` | string | Yes | DNS record name \(e.g., "example.com" or "subdomain.example.com"\) |
| `content` | string | Yes | DNS record content \(e.g., IP address for A records, target for CNAME\) |
| `ttl` | number | No | Time to live in seconds \(1 = automatic, default: 1\) |
| `proxied` | boolean | No | Whether to enable Cloudflare proxy \(default: false\) |
| `priority` | number | No | Priority for MX and SRV records |
| `comment` | string | No | Comment for the DNS record |
| `tags` | string | No | Comma-separated tags for the DNS record |
| `apiKey` | string | Yes | Cloudflare API Token |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `id` | string | Unique identifier for the created DNS record |
| `zone_id` | string | The ID of the zone the record belongs to |
| `zone_name` | string | The name of the zone |
| `type` | string | DNS record type \(A, AAAA, CNAME, MX, TXT, etc.\) |
| `name` | string | DNS record hostname |
| `content` | string | DNS record value \(e.g., IP address, target hostname\) |
| `proxiable` | boolean | Whether the record can be proxied through Cloudflare |
| `proxied` | boolean | Whether Cloudflare proxy is enabled |
| `ttl` | number | Time to live in seconds \(1 = automatic\) |
| `locked` | boolean | Whether the record is locked |
| `priority` | number | Priority for MX and SRV records |
| `comment` | string | Comment associated with the record |
| `tags` | array | Tags associated with the record |
| `comment_modified_on` | string | ISO 8601 timestamp when the comment was last modified |
| `tags_modified_on` | string | ISO 8601 timestamp when tags were last modified |
| `meta` | object | Record metadata |
| ↳ `source` | string | Source of the DNS record |
| `created_on` | string | ISO 8601 timestamp when the record was created |
| `modified_on` | string | ISO 8601 timestamp when the record was last modified |
### `cloudflare_update_dns_record`
Updates an existing DNS record for a zone.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `zoneId` | string | Yes | The zone ID containing the DNS record |
| `recordId` | string | Yes | The DNS record ID to update |
| `type` | string | No | DNS record type \(e.g., "A", "AAAA", "CNAME", "MX", "TXT"\) |
| `name` | string | No | DNS record name |
| `content` | string | No | DNS record content \(e.g., IP address\) |
| `ttl` | number | No | Time to live in seconds \(1 = automatic\) |
| `proxied` | boolean | No | Whether to enable Cloudflare proxy |
| `priority` | number | No | Priority for MX and SRV records |
| `comment` | string | No | Comment for the DNS record |
| `tags` | string | No | Comma-separated tags for the DNS record |
| `apiKey` | string | Yes | Cloudflare API Token |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `id` | string | Unique identifier for the updated DNS record |
| `zone_id` | string | The ID of the zone the record belongs to |
| `zone_name` | string | The name of the zone |
| `type` | string | DNS record type \(A, AAAA, CNAME, MX, TXT, etc.\) |
| `name` | string | DNS record hostname |
| `content` | string | DNS record value \(e.g., IP address, target hostname\) |
| `proxiable` | boolean | Whether the record can be proxied through Cloudflare |
| `proxied` | boolean | Whether Cloudflare proxy is enabled |
| `ttl` | number | Time to live in seconds \(1 = automatic\) |
| `locked` | boolean | Whether the record is locked |
| `priority` | number | Priority for MX and SRV records |
| `comment` | string | Comment associated with the record |
| `tags` | array | Tags associated with the record |
| `comment_modified_on` | string | ISO 8601 timestamp when the comment was last modified |
| `tags_modified_on` | string | ISO 8601 timestamp when tags were last modified |
| `meta` | object | Record metadata |
| ↳ `source` | string | Source of the DNS record |
| `created_on` | string | ISO 8601 timestamp when the record was created |
| `modified_on` | string | ISO 8601 timestamp when the record was last modified |
### `cloudflare_delete_dns_record`
Deletes a DNS record from a zone.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `zoneId` | string | Yes | The zone ID containing the DNS record |
| `recordId` | string | Yes | The DNS record ID to delete |
| `apiKey` | string | Yes | Cloudflare API Token |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `id` | string | Deleted record ID |
### `cloudflare_list_certificates`
Lists SSL/TLS certificate packs for a zone.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `zoneId` | string | Yes | The zone ID to list certificates for |
| `status` | string | No | Filter certificate packs by status \(e.g., "all", "active", "pending"\) |
| `page` | number | No | Page number of paginated results \(default: 1\) |
| `per_page` | number | No | Number of certificate packs per page \(default: 20, min: 5, max: 50\) |
| `deploy` | string | No | Filter by deployment environment: "staging" or "production" |
| `apiKey` | string | Yes | Cloudflare API Token |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `certificates` | array | List of SSL/TLS certificate packs |
| ↳ `id` | string | Certificate pack ID |
| ↳ `type` | string | Certificate type \(e.g., "universal", "advanced"\) |
| ↳ `hosts` | array | Hostnames covered by this certificate pack |
| ↳ `primary_certificate` | string | ID of the primary certificate in the pack |
| ↳ `status` | string | Certificate pack status \(e.g., "active", "pending"\) |
| ↳ `certificates` | array | Individual certificates within the pack |
| ↳ `id` | string | Certificate ID |
| ↳ `hosts` | array | Hostnames covered by this certificate |
| ↳ `issuer` | string | Certificate issuer |
| ↳ `signature` | string | Signature algorithm \(e.g., "ECDSAWithSHA256"\) |
| ↳ `status` | string | Certificate status |
| ↳ `bundle_method` | string | Bundle method \(e.g., "ubiquitous"\) |
| ↳ `zone_id` | string | Zone ID the certificate belongs to |
| ↳ `uploaded_on` | string | Upload date \(ISO 8601\) |
| ↳ `modified_on` | string | Last modified date \(ISO 8601\) |
| ↳ `expires_on` | string | Expiration date \(ISO 8601\) |
| ↳ `priority` | number | Certificate priority order |
| ↳ `geo_restrictions` | object | Geographic restrictions for the certificate |
| ↳ `label` | string | Geographic restriction label |
| ↳ `cloudflare_branding` | boolean | Whether Cloudflare branding is enabled on the certificate |
| ↳ `validation_method` | string | Validation method \(e.g., "txt", "http", "cname"\) |
| ↳ `validity_days` | number | Validity period in days |
| ↳ `certificate_authority` | string | Certificate authority \(e.g., "lets_encrypt", "google"\) |
| ↳ `validation_errors` | array | Validation issues for the certificate pack |
| ↳ `message` | string | Validation error message |
| ↳ `validation_records` | array | Validation records for the certificate pack |
| ↳ `cname` | string | CNAME record name |
| ↳ `cname_target` | string | CNAME record target |
| ↳ `emails` | array | Email addresses for validation |
| ↳ `http_body` | string | HTTP validation body content |
| ↳ `http_url` | string | HTTP validation URL |
| ↳ `status` | string | Validation record status |
| ↳ `txt_name` | string | TXT record name |
| ↳ `txt_value` | string | TXT record value |
| ↳ `dcv_delegation_records` | array | Domain control validation delegation records |
| ↳ `cname` | string | CNAME record name |
| ↳ `cname_target` | string | CNAME record target |
| ↳ `emails` | array | Email addresses for validation |
| ↳ `http_body` | string | HTTP validation body content |
| ↳ `http_url` | string | HTTP validation URL |
| ↳ `status` | string | Delegation record status |
| ↳ `txt_name` | string | TXT record name |
| ↳ `txt_value` | string | TXT record value |
| `total_count` | number | Total number of certificate packs |
### `cloudflare_get_zone_settings`
Gets all settings for a zone including SSL mode, minification, caching level, and security settings.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `zoneId` | string | Yes | The zone ID to get settings for |
| `apiKey` | string | Yes | Cloudflare API Token |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `settings` | array | List of zone settings |
| ↳ `id` | string | Setting identifier \(e.g., ssl, minify, cache_level, security_level, always_use_https\) |
| ↳ `value` | string | Setting value as a string. Simple values returned as-is \(e.g., "full", "on"\). Complex values are JSON-stringified \(e.g., \ |
| ↳ `editable` | boolean | Whether the setting can be modified for the current zone plan |
| ↳ `modified_on` | string | ISO 8601 timestamp when the setting was last modified |
| ↳ `time_remaining` | number | Seconds remaining until the setting can be modified again \(only present for rate-limited settings\) |
### `cloudflare_update_zone_setting`
Updates a specific zone setting such as SSL mode, security level, cache level, minification, or other configuration.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `zoneId` | string | Yes | The zone ID to update settings for |
| `settingId` | string | Yes | Setting to update \(e.g., "ssl", "security_level", "cache_level", "minify", "always_use_https", "browser_cache_ttl", "http3", "min_tls_version", "ciphers"\) |
| `value` | string | Yes | New value for the setting as a string or JSON string for complex values \(e.g., "full" for SSL, "medium" for security_level, "aggressive" for cache_level, \'\{"css":"on","html":"on","js":"on"\}\' for minify, \'\["ECDHE-RSA-AES128-GCM-SHA256"\]\' for ciphers\) |
| `apiKey` | string | Yes | Cloudflare API Token |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `id` | string | Setting identifier \(e.g., ssl, minify, cache_level\) |
| `value` | string | Updated setting value as a string. Simple values returned as-is \(e.g., "full", "on"\). Complex values are JSON-stringified. |
| `editable` | boolean | Whether the setting can be modified for the current zone plan |
| `modified_on` | string | ISO 8601 timestamp when the setting was last modified |
| `time_remaining` | number | Seconds remaining until the setting can be modified again \(only present for rate-limited settings\) |
### `cloudflare_dns_analytics`
Gets DNS analytics report for a zone including query counts and trends.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `zoneId` | string | Yes | The zone ID to get DNS analytics for |
| `since` | string | No | Start date for analytics \(ISO 8601, e.g., "2024-01-01T00:00:00Z"\) or relative \(e.g., "-6h"\) |
| `until` | string | No | End date for analytics \(ISO 8601, e.g., "2024-01-31T23:59:59Z"\) or relative \(e.g., "now"\) |
| `metrics` | string | Yes | Comma-separated metrics to retrieve \(e.g., "queryCount,uncachedCount,staleCount,responseTimeAvg,responseTimeMedian,responseTime90th,responseTime99th"\) |
| `dimensions` | string | No | Comma-separated dimensions to group by \(e.g., "queryName,queryType,responseCode,responseCached,coloName,origin,dayOfWeek,tcp,ipVersion,querySizeBucket,responseSizeBucket"\) |
| `filters` | string | No | Filters to apply to the data \(e.g., "queryType==A"\) |
| `sort` | string | No | Sort order for the result set. Fields must be included in metrics or dimensions \(e.g., "+queryCount" or "-responseTimeAvg"\) |
| `limit` | number | No | Maximum number of results to return |
| `apiKey` | string | Yes | Cloudflare API Token |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `totals` | object | Aggregate DNS analytics totals for the entire queried period |
| ↳ `queryCount` | number | Total number of DNS queries |
| ↳ `uncachedCount` | number | Number of uncached DNS queries |
| ↳ `staleCount` | number | Number of stale DNS queries |
| ↳ `responseTimeAvg` | number | Average response time in milliseconds |
| ↳ `responseTimeMedian` | number | Median response time in milliseconds |
| ↳ `responseTime90th` | number | 90th percentile response time in milliseconds |
| ↳ `responseTime99th` | number | 99th percentile response time in milliseconds |
| `min` | object | Minimum values across the analytics period |
| ↳ `queryCount` | number | Minimum number of DNS queries |
| ↳ `uncachedCount` | number | Minimum number of uncached DNS queries |
| ↳ `staleCount` | number | Minimum number of stale DNS queries |
| ↳ `responseTimeAvg` | number | Minimum average response time in milliseconds |
| ↳ `responseTimeMedian` | number | Minimum median response time in milliseconds |
| ↳ `responseTime90th` | number | Minimum 90th percentile response time in milliseconds |
| ↳ `responseTime99th` | number | Minimum 99th percentile response time in milliseconds |
| `max` | object | Maximum values across the analytics period |
| ↳ `queryCount` | number | Maximum number of DNS queries |
| ↳ `uncachedCount` | number | Maximum number of uncached DNS queries |
| ↳ `staleCount` | number | Maximum number of stale DNS queries |
| ↳ `responseTimeAvg` | number | Maximum average response time in milliseconds |
| ↳ `responseTimeMedian` | number | Maximum median response time in milliseconds |
| ↳ `responseTime90th` | number | Maximum 90th percentile response time in milliseconds |
| ↳ `responseTime99th` | number | Maximum 99th percentile response time in milliseconds |
| `data` | array | Raw analytics data rows returned by the Cloudflare DNS analytics report |
| ↳ `dimensions` | array | Dimension values for this data row, parallel to the requested dimensions list |
| ↳ `metrics` | array | Metric values for this data row, parallel to the requested metrics list |
| `data_lag` | number | Processing lag in seconds before analytics data becomes available |
| `rows` | number | Total number of rows in the result set |
| `query` | object | Echo of the query parameters sent to the API |
| ↳ `since` | string | Start date of the analytics query |
| ↳ `until` | string | End date of the analytics query |
| ↳ `metrics` | array | Metrics requested in the query |
| ↳ `dimensions` | array | Dimensions requested in the query |
| ↳ `filters` | string | Filters applied to the query |
| ↳ `sort` | array | Sort order applied to the query |
| ↳ `limit` | number | Maximum number of results requested |
### `cloudflare_purge_cache`
Purges cached content for a zone. Can purge everything or specific files/tags/hosts/prefixes.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `zoneId` | string | Yes | The zone ID to purge cache for |
| `purge_everything` | boolean | No | Set to true to purge all cached content. Mutually exclusive with files, tags, hosts, and prefixes |
| `files` | string | No | Comma-separated list of URLs to purge from cache |
| `tags` | string | No | Comma-separated list of cache tags to purge \(Enterprise only\) |
| `hosts` | string | No | Comma-separated list of hostnames to purge \(Enterprise only\) |
| `prefixes` | string | No | Comma-separated list of URL prefixes to purge \(Enterprise only\) |
| `apiKey` | string | Yes | Cloudflare API Token |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `id` | string | Purge request identifier returned by Cloudflare |

View File

@@ -15,7 +15,6 @@
"circleback",
"clay",
"clerk",
"cloudflare",
"confluence",
"cursor",
"datadog",
@@ -69,7 +68,6 @@
"mailgun",
"mem0",
"memory",
"microsoft_dataverse",
"microsoft_excel",
"microsoft_planner",
"microsoft_teams",
@@ -114,7 +112,6 @@
"stripe",
"stt",
"supabase",
"table",
"tavily",
"telegram",
"textract",
@@ -125,7 +122,6 @@
"twilio_sms",
"twilio_voice",
"typeform",
"vercel",
"video_generator",
"vision",
"wealthbox",

View File

@@ -1,426 +0,0 @@
---
title: Microsoft Dataverse
description: Manage records in Microsoft Dataverse tables
---
import { BlockInfoCard } from "@/components/ui/block-info-card"
<BlockInfoCard
type="microsoft_dataverse"
color="#E0E0E0"
/>
{/* MANUAL-CONTENT-START:intro */}
[Microsoft Dataverse](https://learn.microsoft.com/en-us/power-apps/maker/data-platform/data-platform-intro) is a powerful cloud data platform for securely storing, managing, and interacting with structured business data. The Microsoft Dataverse integration enables you to programmatically create, read, update, delete, and link records in Dataverse tables as part of your workflow and automation needs.
With Microsoft Dataverse integration, you can:
- **List and query records:** Access lists of records or query with advanced filters to find the data you need from any Dataverse table.
- **Create and update records:** Add new records or update existing ones in any table for use across Power Platform, Dynamics 365, and custom apps.
- **Delete and manage records:** Remove records as part of data lifecycle management directly from your automation flows.
- **Associate and disassociate records:** Link related items together or remove associations using entity relationships and navigation properties—essential for reflecting complex business processes.
- **Work with any Dataverse environment:** Connect to your organizations environments, including production, sandbox, or Dynamics 365 tenants, for maximum flexibility.
- **Integrate with Power Platform and Dynamics 365:** Automate tasks ranging from sales and marketing data updates to custom app workflows—all powered by Dataverse's security and governance.
The Dataverse integration empowers solution builders and business users to automate business processes, maintain accurate and up-to-date information, create system integrations, trigger actions, and drive insights—all with robust security and governance.
Connect Microsoft Dataverse to your automations to unlock sophisticated data management, orchestration, and business logic across your apps, teams, and cloud services.
{/* MANUAL-CONTENT-END */}
## Usage Instructions
Integrate Microsoft Dataverse into your workflow. Create, read, update, delete, upsert, associate, query, search, and execute actions and functions against Dataverse tables using the Web API. Supports bulk operations, FetchXML, file uploads, and relevance search. Works with Dynamics 365, Power Platform, and custom Dataverse environments.
## Tools
### `microsoft_dataverse_associate`
Associate two records in Microsoft Dataverse via a navigation property. Creates a relationship between a source record and a target record. Supports both collection-valued (POST) and single-valued (PUT) navigation properties.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) |
| `entitySetName` | string | Yes | Source entity set name \(e.g., accounts\) |
| `recordId` | string | Yes | Source record GUID |
| `navigationProperty` | string | Yes | Navigation property name \(e.g., contact_customer_accounts for collection-valued, or parentcustomerid_account for single-valued\) |
| `targetEntitySetName` | string | Yes | Target entity set name \(e.g., contacts\) |
| `targetRecordId` | string | Yes | Target record GUID to associate |
| `navigationType` | string | No | Type of navigation property: "collection" \(default, uses POST\) or "single" \(uses PUT for lookup fields\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether the association was created successfully |
| `entitySetName` | string | Source entity set name used in the association |
| `recordId` | string | Source record GUID that was associated |
| `navigationProperty` | string | Navigation property used for the association |
| `targetEntitySetName` | string | Target entity set name used in the association |
| `targetRecordId` | string | Target record GUID that was associated |
### `microsoft_dataverse_create_multiple`
Create multiple records of the same table type in a single request. Each record in the Targets array must include an @odata.type annotation. Recommended batch size: 100-1000 records for standard tables.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) |
| `entitySetName` | string | Yes | Entity set name \(plural table name, e.g., accounts, contacts\) |
| `entityLogicalName` | string | Yes | Table logical name for @odata.type annotation \(e.g., account, contact\). Used to set Microsoft.Dynamics.CRM.\{entityLogicalName\} on each record. |
| `records` | object | Yes | Array of record objects to create. Each record should contain column logical names as keys. The @odata.type annotation is added automatically. |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `ids` | array | Array of GUIDs for the created records |
| `count` | number | Number of records created |
| `success` | boolean | Whether all records were created successfully |
### `microsoft_dataverse_create_record`
Create a new record in a Microsoft Dataverse table. Requires the entity set name (plural table name) and record data as a JSON object.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) |
| `entitySetName` | string | Yes | Entity set name \(plural table name, e.g., accounts, contacts\) |
| `data` | object | Yes | Record data as a JSON object with column names as keys |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `recordId` | string | The ID of the created record |
| `record` | object | Dataverse record object. Contains dynamic columns based on the queried table, plus OData metadata fields. |
| `success` | boolean | Whether the record was created successfully |
### `microsoft_dataverse_delete_record`
Delete a record from a Microsoft Dataverse table by its ID.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) |
| `entitySetName` | string | Yes | Entity set name \(plural table name, e.g., accounts, contacts\) |
| `recordId` | string | Yes | The unique identifier \(GUID\) of the record to delete |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `recordId` | string | The ID of the deleted record |
| `success` | boolean | Operation success status |
### `microsoft_dataverse_disassociate`
Remove an association between two records in Microsoft Dataverse. For collection-valued navigation properties, provide the target record ID. For single-valued navigation properties, only the navigation property name is needed.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) |
| `entitySetName` | string | Yes | Source entity set name \(e.g., accounts\) |
| `recordId` | string | Yes | Source record GUID |
| `navigationProperty` | string | Yes | Navigation property name \(e.g., contact_customer_accounts for collection-valued, or parentcustomerid_account for single-valued\) |
| `targetRecordId` | string | No | Target record GUID \(required for collection-valued navigation properties, omit for single-valued\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether the disassociation was completed successfully |
| `entitySetName` | string | Source entity set name used in the disassociation |
| `recordId` | string | Source record GUID that was disassociated |
| `navigationProperty` | string | Navigation property used for the disassociation |
| `targetRecordId` | string | Target record GUID that was disassociated |
### `microsoft_dataverse_download_file`
Download a file from a file or image column on a Dataverse record. Returns the file content as a base64-encoded string along with file metadata from response headers.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) |
| `entitySetName` | string | Yes | Entity set name \(plural table name, e.g., accounts, contacts\) |
| `recordId` | string | Yes | Record GUID to download the file from |
| `fileColumn` | string | Yes | File or image column logical name \(e.g., entityimage, cr_document\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `fileContent` | string | Base64-encoded file content |
| `fileName` | string | Name of the downloaded file |
| `fileSize` | number | File size in bytes |
| `mimeType` | string | MIME type of the file |
| `success` | boolean | Whether the file was downloaded successfully |
### `microsoft_dataverse_execute_action`
Execute a bound or unbound Dataverse action. Actions perform operations with side effects (e.g., Merge, GrantAccess, SendEmail, QualifyLead). For bound actions, provide the entity set name and record ID.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) |
| `actionName` | string | Yes | Action name \(e.g., Merge, GrantAccess, SendEmail\). Do not include the Microsoft.Dynamics.CRM. namespace prefix for unbound actions. |
| `entitySetName` | string | No | Entity set name for bound actions \(e.g., accounts\). Leave empty for unbound actions. |
| `recordId` | string | No | Record GUID for bound actions. Leave empty for unbound or collection-bound actions. |
| `parameters` | object | No | Action parameters as a JSON object. For entity references, include @odata.type annotation \(e.g., \{"Target": \{"@odata.type": "Microsoft.Dynamics.CRM.account", "accountid": "..."\}\}\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `result` | object | Action response data. Structure varies by action. Null for actions that return 204 No Content. |
| `success` | boolean | Whether the action executed successfully |
### `microsoft_dataverse_execute_function`
Execute a bound or unbound Dataverse function. Functions are read-only operations (e.g., RetrievePrincipalAccess, RetrieveTotalRecordCount, InitializeFrom). For bound functions, provide the entity set name and record ID.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) |
| `functionName` | string | Yes | Function name \(e.g., RetrievePrincipalAccess, RetrieveTotalRecordCount\). Do not include the Microsoft.Dynamics.CRM. namespace prefix for unbound functions. |
| `entitySetName` | string | No | Entity set name for bound functions \(e.g., systemusers\). Leave empty for unbound functions. |
| `recordId` | string | No | Record GUID for bound functions. Leave empty for unbound functions. |
| `parameters` | string | No | Function parameters as a comma-separated list of name=value pairs for the URL \(e.g., "LocalizedStandardName=\'Pacific Standard Time\ |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `result` | object | Function response data. Structure varies by function. |
| `success` | boolean | Whether the function executed successfully |
### `microsoft_dataverse_fetchxml_query`
Execute a FetchXML query against a Microsoft Dataverse table. FetchXML supports aggregation, grouping, linked-entity joins, and complex filtering beyond OData capabilities.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) |
| `entitySetName` | string | Yes | Entity set name \(plural table name, e.g., accounts, contacts\) |
| `fetchXml` | string | Yes | FetchXML query string. Must include &lt;fetch&gt; root element and &lt;entity&gt; child element matching the table logical name. |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `records` | array | Array of Dataverse records. Each record has dynamic columns based on the table schema. |
| `count` | number | Number of records returned in the current page |
| `fetchXmlPagingCookie` | string | Paging cookie for retrieving the next page of results |
| `moreRecords` | boolean | Whether more records are available beyond the current page |
| `success` | boolean | Operation success status |
### `microsoft_dataverse_get_record`
Retrieve a single record from a Microsoft Dataverse table by its ID. Supports $select and $expand OData query options.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) |
| `entitySetName` | string | Yes | Entity set name \(plural table name, e.g., accounts, contacts\) |
| `recordId` | string | Yes | The unique identifier \(GUID\) of the record to retrieve |
| `select` | string | No | Comma-separated list of columns to return \(OData $select\) |
| `expand` | string | No | Navigation properties to expand \(OData $expand\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `record` | object | Dataverse record object. Contains dynamic columns based on the queried table, plus OData metadata fields. |
| `recordId` | string | The record primary key ID \(auto-detected from response\) |
| `success` | boolean | Whether the record was retrieved successfully |
### `microsoft_dataverse_list_records`
Query and list records from a Microsoft Dataverse table. Supports OData query options for filtering, selecting columns, ordering, and pagination.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) |
| `entitySetName` | string | Yes | Entity set name \(plural table name, e.g., accounts, contacts\) |
| `select` | string | No | Comma-separated list of columns to return \(OData $select\) |
| `filter` | string | No | OData $filter expression \(e.g., statecode eq 0\) |
| `orderBy` | string | No | OData $orderby expression \(e.g., name asc, createdon desc\) |
| `top` | number | No | Maximum number of records to return \(OData $top\) |
| `expand` | string | No | Navigation properties to expand \(OData $expand\) |
| `count` | string | No | Set to "true" to include total record count in response \(OData $count\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `records` | array | Array of Dataverse records. Each record has dynamic columns based on the table schema. |
| `count` | number | Number of records returned in the current page |
| `totalCount` | number | Total number of matching records server-side \(requires $count=true\) |
| `nextLink` | string | URL for the next page of results |
| `success` | boolean | Operation success status |
### `microsoft_dataverse_search`
Perform a full-text relevance search across Microsoft Dataverse tables. Requires Dataverse Search to be enabled on the environment. Supports simple and Lucene query syntax.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) |
| `searchTerm` | string | Yes | Search text \(1-100 chars\). Supports simple syntax: + \(AND\), \| \(OR\), - \(NOT\), * \(wildcard\), "exact phrase" |
| `entities` | string | No | JSON array of search entity configs. Each object: \{"Name":"account","SelectColumns":\["name"\],"SearchColumns":\["name"\],"Filter":"statecode eq 0"\} |
| `filter` | string | No | Global OData filter applied across all entities \(e.g., "createdon gt 2024-01-01"\) |
| `facets` | string | No | JSON array of facet specifications \(e.g., \["entityname,count:100","ownerid,count:100"\]\) |
| `top` | number | No | Maximum number of results \(default: 50, max: 100\) |
| `skip` | number | No | Number of results to skip for pagination |
| `orderBy` | string | No | JSON array of sort expressions \(e.g., \["createdon desc"\]\) |
| `searchMode` | string | No | Search mode: "any" \(default, match any term\) or "all" \(match all terms\) |
| `searchType` | string | No | Query type: "simple" \(default\) or "lucene" \(enables regex, fuzzy, proximity, boosting\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `results` | array | Array of search result objects |
| ↳ `Id` | string | Record GUID |
| ↳ `EntityName` | string | Table logical name \(e.g., account, contact\) |
| ↳ `ObjectTypeCode` | number | Entity type code |
| ↳ `Attributes` | object | Record attributes matching the search. Keys are column logical names. |
| ↳ `Highlights` | object | Highlighted search matches. Keys are column names, values are arrays of strings with \{crmhit\}/\{/crmhit\} markers. |
| ↳ `Score` | number | Relevance score for this result |
| `totalCount` | number | Total number of matching records across all tables |
| `count` | number | Number of results returned in this page |
| `facets` | object | Facet results when facets were requested. Keys are facet names, values are arrays of facet value objects with count and value properties. |
| `success` | boolean | Operation success status |
### `microsoft_dataverse_update_multiple`
Update multiple records of the same table type in a single request. Each record must include its primary key. Only include columns that need to be changed. Recommended batch size: 100-1000 records.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) |
| `entitySetName` | string | Yes | Entity set name \(plural table name, e.g., accounts, contacts\) |
| `entityLogicalName` | string | Yes | Table logical name for @odata.type annotation \(e.g., account, contact\). Used to set Microsoft.Dynamics.CRM.\{entityLogicalName\} on each record. |
| `records` | object | Yes | Array of record objects to update. Each record must include its primary key \(e.g., accountid\) and only the columns being changed. The @odata.type annotation is added automatically. |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether all records were updated successfully |
### `microsoft_dataverse_update_record`
Update an existing record in a Microsoft Dataverse table. Only send the columns you want to change.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) |
| `entitySetName` | string | Yes | Entity set name \(plural table name, e.g., accounts, contacts\) |
| `recordId` | string | Yes | The unique identifier \(GUID\) of the record to update |
| `data` | object | Yes | Record data to update as a JSON object with column names as keys |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `recordId` | string | The ID of the updated record |
| `success` | boolean | Operation success status |
### `microsoft_dataverse_upload_file`
Upload a file to a file or image column on a Dataverse record. Supports single-request upload for files up to 128 MB. The file content must be provided as a base64-encoded string.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) |
| `entitySetName` | string | Yes | Entity set name \(plural table name, e.g., accounts, contacts\) |
| `recordId` | string | Yes | Record GUID to upload the file to |
| `fileColumn` | string | Yes | File or image column logical name \(e.g., entityimage, cr_document\) |
| `fileName` | string | Yes | Name of the file being uploaded \(e.g., document.pdf\) |
| `file` | file | No | File to upload \(UserFile object\) |
| `fileContent` | string | No | Base64-encoded file content \(legacy\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `recordId` | string | Record GUID the file was uploaded to |
| `fileColumn` | string | File column the file was uploaded to |
| `fileName` | string | Name of the uploaded file |
| `success` | boolean | Whether the file was uploaded successfully |
### `microsoft_dataverse_upsert_record`
Create or update a record in a Microsoft Dataverse table. If a record with the given ID exists, it is updated; otherwise, a new record is created.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) |
| `entitySetName` | string | Yes | Entity set name \(plural table name, e.g., accounts, contacts\) |
| `recordId` | string | Yes | The unique identifier \(GUID\) of the record to upsert |
| `data` | object | Yes | Record data as a JSON object with column names as keys |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `recordId` | string | The ID of the upserted record |
| `created` | boolean | True if the record was created, false if updated |
| `record` | object | Dataverse record object. Contains dynamic columns based on the queried table, plus OData metadata fields. |
| `success` | boolean | Operation success status |
### `microsoft_dataverse_whoami`
Retrieve the current authenticated user information from Microsoft Dataverse. Useful for testing connectivity and getting the user ID, business unit ID, and organization ID.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `environmentUrl` | string | Yes | Dataverse environment URL \(e.g., https://myorg.crm.dynamics.com\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `userId` | string | The authenticated user ID |
| `businessUnitId` | string | The business unit ID |
| `organizationId` | string | The organization ID |
| `success` | boolean | Operation success status |

View File

@@ -1,351 +0,0 @@
---
title: Table
description: User-defined data tables for storing and querying structured data
---
import { BlockInfoCard } from "@/components/ui/block-info-card"
<BlockInfoCard
type="table"
color="#10B981"
/>
Tables allow you to create and manage custom data tables directly within Sim. Store, query, and manipulate structured data within your workflows without needing external database integrations.
**Why Use Tables?**
- **No external setup**: Create tables instantly without configuring external databases
- **Workflow-native**: Data persists across workflow executions and is accessible from any workflow in your workspace
- **Flexible schema**: Define columns with types (string, number, boolean, date, json) and constraints (required, unique)
- **Powerful querying**: Filter, sort, and paginate data using MongoDB-style operators
- **Agent-friendly**: Tables can be used as tools by AI agents for dynamic data storage and retrieval
**Key Features:**
- Create tables with custom schemas
- Insert, update, upsert, and delete rows
- Query with filters and sorting
- Batch operations for bulk inserts
- Bulk updates and deletes by filter
- Up to 10,000 rows per table, 100 tables per workspace
## Creating Tables
Tables are created from the **Tables** section in the sidebar. Each table requires:
- **Name**: Alphanumeric with underscores (e.g., `customer_leads`)
- **Description**: Optional description of the table's purpose
- **Schema**: Define columns with name, type, and optional constraints
### Column Types
| Type | Description | Example Values |
|------|-------------|----------------|
| `string` | Text data | `"John Doe"`, `"active"` |
| `number` | Numeric data | `42`, `99.99` |
| `boolean` | True/false values | `true`, `false` |
| `date` | Date/time values | `"2024-01-15T10:30:00Z"` |
| `json` | Complex nested data | `{"address": {"city": "NYC"}}` |
### Column Constraints
- **Required**: Column must have a value (cannot be null)
- **Unique**: Values must be unique across all rows (enables upsert matching)
## Usage Instructions
Create and manage custom data tables. Store, query, and manipulate structured data within workflows.
## Tools
### `table_query_rows`
Query rows from a table with filtering, sorting, and pagination
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `tableId` | string | Yes | Table ID |
| `filter` | object | No | Filter conditions using MongoDB-style operators |
| `sort` | object | No | Sort order as \{column: "asc"\|"desc"\} |
| `limit` | number | No | Maximum rows to return \(default: 100, max: 1000\) |
| `offset` | number | No | Number of rows to skip \(default: 0\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether query succeeded |
| `rows` | array | Query result rows |
| `rowCount` | number | Number of rows returned |
| `totalCount` | number | Total rows matching filter |
| `limit` | number | Limit used in query |
| `offset` | number | Offset used in query |
### `table_insert_row`
Insert a new row into a table
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `tableId` | string | Yes | Table ID |
| `data` | object | Yes | Row data as JSON object matching the table schema |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether row was inserted |
| `row` | object | Inserted row data including generated ID |
| `message` | string | Status message |
### `table_upsert_row`
Insert or update a row based on unique column constraints. If a row with matching unique field exists, update it; otherwise insert a new row.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `tableId` | string | Yes | Table ID |
| `data` | object | Yes | Row data to insert or update |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether row was upserted |
| `row` | object | Upserted row data |
| `operation` | string | Operation performed: "insert" or "update" |
| `message` | string | Status message |
### `table_batch_insert_rows`
Insert multiple rows at once (up to 1000 rows per batch)
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `tableId` | string | Yes | Table ID |
| `rows` | array | Yes | Array of row data objects to insert |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether batch insert succeeded |
| `rows` | array | Array of inserted rows with IDs |
| `insertedCount` | number | Number of rows inserted |
| `message` | string | Status message |
### `table_update_row`
Update a specific row by its ID
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `tableId` | string | Yes | Table ID |
| `rowId` | string | Yes | Row ID to update |
| `data` | object | Yes | Data to update \(partial update supported\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether row was updated |
| `row` | object | Updated row data |
| `message` | string | Status message |
### `table_update_rows_by_filter`
Update multiple rows matching a filter condition
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `tableId` | string | Yes | Table ID |
| `filter` | object | Yes | Filter to match rows for update |
| `data` | object | Yes | Data to apply to matching rows |
| `limit` | number | No | Maximum rows to update \(default: 1000\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether update succeeded |
| `updatedCount` | number | Number of rows updated |
| `updatedRowIds` | array | IDs of updated rows |
| `message` | string | Status message |
### `table_delete_row`
Delete a specific row by its ID
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `tableId` | string | Yes | Table ID |
| `rowId` | string | Yes | Row ID to delete |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether row was deleted |
| `deletedCount` | number | Number of rows deleted \(1 or 0\) |
| `message` | string | Status message |
### `table_delete_rows_by_filter`
Delete multiple rows matching a filter condition
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `tableId` | string | Yes | Table ID |
| `filter` | object | Yes | Filter to match rows for deletion |
| `limit` | number | No | Maximum rows to delete \(default: 1000\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether delete succeeded |
| `deletedCount` | number | Number of rows deleted |
| `deletedRowIds` | array | IDs of deleted rows |
| `message` | string | Status message |
### `table_get_row`
Get a single row by its ID
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `tableId` | string | Yes | Table ID |
| `rowId` | string | Yes | Row ID to retrieve |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether row was found |
| `row` | object | Row data |
| `message` | string | Status message |
### `table_get_schema`
Get the schema definition for a table
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `tableId` | string | Yes | Table ID |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether schema was retrieved |
| `name` | string | Table name |
| `columns` | array | Array of column definitions |
| `message` | string | Status message |
## Filter Operators
Filters use MongoDB-style operators for flexible querying:
| Operator | Description | Example |
|----------|-------------|---------|
| `$eq` | Equals | `{"status": {"$eq": "active"}}` or `{"status": "active"}` |
| `$ne` | Not equals | `{"status": {"$ne": "deleted"}}` |
| `$gt` | Greater than | `{"age": {"$gt": 18}}` |
| `$gte` | Greater than or equal | `{"score": {"$gte": 80}}` |
| `$lt` | Less than | `{"price": {"$lt": 100}}` |
| `$lte` | Less than or equal | `{"quantity": {"$lte": 10}}` |
| `$in` | In array | `{"status": {"$in": ["active", "pending"]}}` |
| `$nin` | Not in array | `{"type": {"$nin": ["spam", "blocked"]}}` |
| `$contains` | String contains | `{"email": {"$contains": "@gmail.com"}}` |
### Combining Filters
Multiple field conditions are combined with AND logic:
```json
{
"status": "active",
"age": {"$gte": 18}
}
```
Use `$or` for OR logic:
```json
{
"$or": [
{"status": "active"},
{"status": "pending"}
]
}
```
## Sort Specification
Specify sort order with column names and direction:
```json
{
"createdAt": "desc"
}
```
Multi-column sorting:
```json
{
"priority": "desc",
"name": "asc"
}
```
## Built-in Columns
Every row automatically includes:
| Column | Type | Description |
|--------|------|-------------|
| `id` | string | Unique row identifier |
| `createdAt` | date | When the row was created |
| `updatedAt` | date | When the row was last modified |
These can be used in filters and sorting.
## Limits
| Resource | Limit |
|----------|-------|
| Tables per workspace | 100 |
| Rows per table | 10,000 |
| Columns per table | 50 |
| Max row size | 100KB |
| String value length | 10,000 characters |
| Query limit | 1,000 rows |
| Batch insert size | 1,000 rows |
| Bulk update/delete | 1,000 rows |
## Notes
- Category: `blocks`
- Type: `table`
- Tables are scoped to workspaces and accessible from any workflow within that workspace
- Data persists across workflow executions
- Use unique constraints to enable upsert functionality
- The visual filter/sort builder provides an easy way to construct queries without writing JSON

File diff suppressed because it is too large Load Diff

View File

@@ -21,7 +21,7 @@
"fumadocs-mdx": "14.1.0",
"fumadocs-ui": "16.2.3",
"lucide-react": "^0.511.0",
"next": "16.1.6",
"next": "16.1.0-canary.21",
"next-themes": "^0.4.6",
"postgres": "^3.4.5",
"react": "19.2.1",

View File

@@ -1,7 +1,7 @@
import { db } from '@sim/db'
import { workflow, workflowFolder } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq, isNull, min } from 'drizzle-orm'
import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
@@ -37,6 +37,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
logger.info(`[${requestId}] Duplicating folder ${sourceFolderId} for user ${session.user.id}`)
// Verify the source folder exists
const sourceFolder = await db
.select()
.from(workflowFolder)
@@ -47,6 +48,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
throw new Error('Source folder not found')
}
// Check if user has permission to access the source folder
const userPermission = await getUserEntityPermissions(
session.user.id,
'workspace',
@@ -59,51 +61,26 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
const targetWorkspaceId = workspaceId || sourceFolder.workspaceId
// Step 1: Duplicate folder structure
const { newFolderId, folderMapping } = await db.transaction(async (tx) => {
const newFolderId = crypto.randomUUID()
const now = new Date()
const targetParentId = parentId ?? sourceFolder.parentId
const folderParentCondition = targetParentId
? eq(workflowFolder.parentId, targetParentId)
: isNull(workflowFolder.parentId)
const workflowParentCondition = targetParentId
? eq(workflow.folderId, targetParentId)
: isNull(workflow.folderId)
const [[folderResult], [workflowResult]] = await Promise.all([
tx
.select({ minSortOrder: min(workflowFolder.sortOrder) })
.from(workflowFolder)
.where(and(eq(workflowFolder.workspaceId, targetWorkspaceId), folderParentCondition)),
tx
.select({ minSortOrder: min(workflow.sortOrder) })
.from(workflow)
.where(and(eq(workflow.workspaceId, targetWorkspaceId), workflowParentCondition)),
])
const minSortOrder = [folderResult?.minSortOrder, workflowResult?.minSortOrder].reduce<
number | null
>((currentMin, candidate) => {
if (candidate == null) return currentMin
if (currentMin == null) return candidate
return Math.min(currentMin, candidate)
}, null)
const sortOrder = minSortOrder != null ? minSortOrder - 1 : 0
// Create the new root folder
await tx.insert(workflowFolder).values({
id: newFolderId,
userId: session.user.id,
workspaceId: targetWorkspaceId,
name,
color: color || sourceFolder.color,
parentId: targetParentId,
sortOrder,
parentId: parentId || sourceFolder.parentId,
sortOrder: sourceFolder.sortOrder,
isExpanded: false,
createdAt: now,
updatedAt: now,
})
// Recursively duplicate child folders
const folderMapping = new Map<string, string>([[sourceFolderId, newFolderId]])
await duplicateFolderStructure(
tx,
@@ -119,6 +96,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
return { newFolderId, folderMapping }
})
// Step 2: Duplicate workflows
const workflowStats = await duplicateWorkflowsInFolderTree(
sourceFolder.workspaceId,
targetWorkspaceId,
@@ -195,6 +173,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
}
}
// Helper to recursively duplicate folder structure
async function duplicateFolderStructure(
tx: any,
sourceFolderId: string,
@@ -205,6 +184,7 @@ async function duplicateFolderStructure(
timestamp: Date,
folderMapping: Map<string, string>
): Promise<void> {
// Get all child folders
const childFolders = await tx
.select()
.from(workflowFolder)
@@ -215,6 +195,7 @@ async function duplicateFolderStructure(
)
)
// Create each child folder and recurse
for (const childFolder of childFolders) {
const newChildFolderId = crypto.randomUUID()
folderMapping.set(childFolder.id, newChildFolderId)
@@ -232,6 +213,7 @@ async function duplicateFolderStructure(
updatedAt: timestamp,
})
// Recurse for this child's children
await duplicateFolderStructure(
tx,
childFolder.id,
@@ -245,6 +227,7 @@ async function duplicateFolderStructure(
}
}
// Helper to duplicate all workflows in a folder tree
async function duplicateWorkflowsInFolderTree(
sourceWorkspaceId: string,
targetWorkspaceId: string,
@@ -254,7 +237,9 @@ async function duplicateWorkflowsInFolderTree(
): Promise<{ total: number; succeeded: number; failed: number }> {
const stats = { total: 0, succeeded: 0, failed: 0 }
// Process each folder in the mapping
for (const [oldFolderId, newFolderId] of folderMapping.entries()) {
// Get workflows in this folder
const workflowsInFolder = await db
.select()
.from(workflow)
@@ -262,6 +247,7 @@ async function duplicateWorkflowsInFolderTree(
stats.total += workflowsInFolder.length
// Duplicate each workflow
for (const sourceWorkflow of workflowsInFolder) {
try {
await duplicateWorkflow({

View File

@@ -10,14 +10,9 @@ import {
mockConsoleLogger,
setupCommonApiMocks,
} from '@sim/testing'
import { drizzleOrmMock } from '@sim/testing/mocks'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
vi.mock('@/lib/audit/log', () => auditMock)
vi.mock('drizzle-orm', () => ({
...drizzleOrmMock,
min: vi.fn((field) => ({ type: 'min', field })),
}))
interface CapturedFolderValues {
name?: string
@@ -29,35 +24,29 @@ interface CapturedFolderValues {
}
function createMockTransaction(mockData: {
selectResults?: Array<Array<{ [key: string]: unknown }>>
selectData?: Array<{ id: string; [key: string]: unknown }>
insertResult?: Array<{ id: string; [key: string]: unknown }>
onInsertValues?: (values: CapturedFolderValues) => void
}) {
const { selectResults = [[], []], insertResult = [], onInsertValues } = mockData
return async (callback: (tx: unknown) => Promise<unknown>) => {
const where = vi.fn()
for (const result of selectResults) {
where.mockReturnValueOnce(result)
}
where.mockReturnValue([])
const { selectData = [], insertResult = [] } = mockData
return vi.fn().mockImplementation(async (callback: (tx: unknown) => Promise<unknown>) => {
const tx = {
select: vi.fn().mockReturnValue({
from: vi.fn().mockReturnValue({
where,
where: vi.fn().mockReturnValue({
orderBy: vi.fn().mockReturnValue({
limit: vi.fn().mockReturnValue(selectData),
}),
}),
}),
}),
insert: vi.fn().mockReturnValue({
values: vi.fn().mockImplementation((values: CapturedFolderValues) => {
onInsertValues?.(values)
return {
returning: vi.fn().mockReturnValue(insertResult),
}
values: vi.fn().mockReturnValue({
returning: vi.fn().mockReturnValue(insertResult),
}),
}),
}
return await callback(tx)
}
})
}
describe('Folders API Route', () => {
@@ -268,12 +257,25 @@ describe('Folders API Route', () => {
it('should create a new folder successfully', async () => {
mockAuthenticatedUser()
mockTransaction.mockImplementationOnce(
createMockTransaction({
selectResults: [[], []],
insertResult: [mockFolders[0]],
})
)
mockTransaction.mockImplementationOnce(async (callback: any) => {
const tx = {
select: vi.fn().mockReturnValue({
from: vi.fn().mockReturnValue({
where: vi.fn().mockReturnValue({
orderBy: vi.fn().mockReturnValue({
limit: vi.fn().mockReturnValue([]), // No existing folders
}),
}),
}),
}),
insert: vi.fn().mockReturnValue({
values: vi.fn().mockReturnValue({
returning: vi.fn().mockReturnValue([mockFolders[0]]),
}),
}),
}
return await callback(tx)
})
const req = createMockRequest('POST', {
name: 'New Test Folder',
@@ -283,11 +285,12 @@ describe('Folders API Route', () => {
const { POST } = await import('@/app/api/folders/route')
const response = await POST(req)
const responseBody = await response.json()
expect(response.status).toBe(200)
expect(responseBody).toHaveProperty('folder')
expect(responseBody.folder).toMatchObject({
const data = await response.json()
expect(data).toHaveProperty('folder')
expect(data.folder).toMatchObject({
id: 'folder-1',
name: 'Test Folder 1',
workspaceId: 'workspace-123',
@@ -296,17 +299,26 @@ describe('Folders API Route', () => {
it('should create folder with correct sort order', async () => {
mockAuthenticatedUser()
let capturedValues: CapturedFolderValues | null = null
mockTransaction.mockImplementationOnce(
createMockTransaction({
selectResults: [[{ minSortOrder: 5 }], [{ minSortOrder: 2 }]],
insertResult: [{ ...mockFolders[0], sortOrder: 1 }],
onInsertValues: (values) => {
capturedValues = values
},
})
)
mockTransaction.mockImplementationOnce(async (callback: any) => {
const tx = {
select: vi.fn().mockReturnValue({
from: vi.fn().mockReturnValue({
where: vi.fn().mockReturnValue({
orderBy: vi.fn().mockReturnValue({
limit: vi.fn().mockReturnValue([{ sortOrder: 5 }]), // Existing folder with sort order 5
}),
}),
}),
}),
insert: vi.fn().mockReturnValue({
values: vi.fn().mockReturnValue({
returning: vi.fn().mockReturnValue([{ ...mockFolders[0], sortOrder: 6 }]),
}),
}),
}
return await callback(tx)
})
const req = createMockRequest('POST', {
name: 'New Test Folder',
@@ -320,10 +332,8 @@ describe('Folders API Route', () => {
const data = await response.json()
expect(data.folder).toMatchObject({
sortOrder: 1,
sortOrder: 6,
})
expect(capturedValues).not.toBeNull()
expect(capturedValues!.sortOrder).toBe(1)
})
it('should create subfolder with parent reference', async () => {
@@ -331,7 +341,7 @@ describe('Folders API Route', () => {
mockTransaction.mockImplementationOnce(
createMockTransaction({
selectResults: [[], []],
selectData: [], // No existing folders
insertResult: [{ ...mockFolders[1] }],
})
)
@@ -392,12 +402,25 @@ describe('Folders API Route', () => {
mockAuthenticatedUser()
mockGetUserEntityPermissions.mockResolvedValue('write') // Write permissions
mockTransaction.mockImplementationOnce(
createMockTransaction({
selectResults: [[], []],
insertResult: [mockFolders[0]],
})
)
mockTransaction.mockImplementationOnce(async (callback: any) => {
const tx = {
select: vi.fn().mockReturnValue({
from: vi.fn().mockReturnValue({
where: vi.fn().mockReturnValue({
orderBy: vi.fn().mockReturnValue({
limit: vi.fn().mockReturnValue([]), // No existing folders
}),
}),
}),
}),
insert: vi.fn().mockReturnValue({
values: vi.fn().mockReturnValue({
returning: vi.fn().mockReturnValue([mockFolders[0]]),
}),
}),
}
return await callback(tx)
})
const req = createMockRequest('POST', {
name: 'Test Folder',
@@ -417,12 +440,25 @@ describe('Folders API Route', () => {
mockAuthenticatedUser()
mockGetUserEntityPermissions.mockResolvedValue('admin') // Admin permissions
mockTransaction.mockImplementationOnce(
createMockTransaction({
selectResults: [[], []],
insertResult: [mockFolders[0]],
})
)
mockTransaction.mockImplementationOnce(async (callback: any) => {
const tx = {
select: vi.fn().mockReturnValue({
from: vi.fn().mockReturnValue({
where: vi.fn().mockReturnValue({
orderBy: vi.fn().mockReturnValue({
limit: vi.fn().mockReturnValue([]), // No existing folders
}),
}),
}),
}),
insert: vi.fn().mockReturnValue({
values: vi.fn().mockReturnValue({
returning: vi.fn().mockReturnValue([mockFolders[0]]),
}),
}),
}
return await callback(tx)
})
const req = createMockRequest('POST', {
name: 'Test Folder',
@@ -491,15 +527,28 @@ describe('Folders API Route', () => {
let capturedValues: CapturedFolderValues | null = null
mockTransaction.mockImplementationOnce(
createMockTransaction({
selectResults: [[], []],
insertResult: [mockFolders[0]],
onInsertValues: (values) => {
capturedValues = values
},
})
)
mockTransaction.mockImplementationOnce(async (callback: any) => {
const tx = {
select: vi.fn().mockReturnValue({
from: vi.fn().mockReturnValue({
where: vi.fn().mockReturnValue({
orderBy: vi.fn().mockReturnValue({
limit: vi.fn().mockReturnValue([]),
}),
}),
}),
}),
insert: vi.fn().mockReturnValue({
values: vi.fn().mockImplementation((values) => {
capturedValues = values
return {
returning: vi.fn().mockReturnValue([mockFolders[0]]),
}
}),
}),
}
return await callback(tx)
})
const req = createMockRequest('POST', {
name: ' Test Folder With Spaces ',
@@ -518,15 +567,28 @@ describe('Folders API Route', () => {
let capturedValues: CapturedFolderValues | null = null
mockTransaction.mockImplementationOnce(
createMockTransaction({
selectResults: [[], []],
insertResult: [mockFolders[0]],
onInsertValues: (values) => {
capturedValues = values
},
})
)
mockTransaction.mockImplementationOnce(async (callback: any) => {
const tx = {
select: vi.fn().mockReturnValue({
from: vi.fn().mockReturnValue({
where: vi.fn().mockReturnValue({
orderBy: vi.fn().mockReturnValue({
limit: vi.fn().mockReturnValue([]),
}),
}),
}),
}),
insert: vi.fn().mockReturnValue({
values: vi.fn().mockImplementation((values) => {
capturedValues = values
return {
returning: vi.fn().mockReturnValue([mockFolders[0]]),
}
}),
}),
}
return await callback(tx)
})
const req = createMockRequest('POST', {
name: 'Test Folder',

View File

@@ -1,7 +1,7 @@
import { db } from '@sim/db'
import { workflow, workflowFolder } from '@sim/db/schema'
import { workflowFolder } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, asc, eq, isNull, min } from 'drizzle-orm'
import { and, asc, desc, eq, isNull } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { getSession } from '@/lib/auth'
@@ -87,33 +87,19 @@ export async function POST(request: NextRequest) {
if (providedSortOrder !== undefined) {
sortOrder = providedSortOrder
} else {
const folderParentCondition = parentId
? eq(workflowFolder.parentId, parentId)
: isNull(workflowFolder.parentId)
const workflowParentCondition = parentId
? eq(workflow.folderId, parentId)
: isNull(workflow.folderId)
const existingFolders = await tx
.select({ sortOrder: workflowFolder.sortOrder })
.from(workflowFolder)
.where(
and(
eq(workflowFolder.workspaceId, workspaceId),
parentId ? eq(workflowFolder.parentId, parentId) : isNull(workflowFolder.parentId)
)
)
.orderBy(desc(workflowFolder.sortOrder))
.limit(1)
const [[folderResult], [workflowResult]] = await Promise.all([
tx
.select({ minSortOrder: min(workflowFolder.sortOrder) })
.from(workflowFolder)
.where(and(eq(workflowFolder.workspaceId, workspaceId), folderParentCondition)),
tx
.select({ minSortOrder: min(workflow.sortOrder) })
.from(workflow)
.where(and(eq(workflow.workspaceId, workspaceId), workflowParentCondition)),
])
const minSortOrder = [folderResult?.minSortOrder, workflowResult?.minSortOrder].reduce<
number | null
>((currentMin, candidate) => {
if (candidate == null) return currentMin
if (currentMin == null) return candidate
return Math.min(currentMin, candidate)
}, null)
sortOrder = minSortOrder != null ? minSortOrder - 1 : 0
sortOrder = existingFolders.length > 0 ? existingFolders[0].sortOrder + 1 : 0
}
const [folder] = await tx

View File

@@ -163,18 +163,17 @@ export async function checkKnowledgeBaseAccess(
const kbData = kb[0]
// Case 1: User owns the knowledge base directly
if (kbData.userId === userId) {
return { hasAccess: true, knowledgeBase: kbData }
}
// Case 2: Knowledge base belongs to a workspace the user has permissions for
if (kbData.workspaceId) {
// Workspace KB: use workspace permissions only
const userPermission = await getUserEntityPermissions(userId, 'workspace', kbData.workspaceId)
if (userPermission !== null) {
return { hasAccess: true, knowledgeBase: kbData }
}
return { hasAccess: false }
}
// Legacy non-workspace KB: allow owner access
if (kbData.userId === userId) {
return { hasAccess: true, knowledgeBase: kbData }
}
return { hasAccess: false }
@@ -183,8 +182,8 @@ export async function checkKnowledgeBaseAccess(
/**
* Check if a user has write access to a knowledge base
* Write access is granted if:
* 1. KB has a workspace: user has write or admin permissions on that workspace
* 2. KB has no workspace (legacy): user owns the KB directly
* 1. User owns the knowledge base directly, OR
* 2. User has write or admin permissions on the knowledge base's workspace
*/
export async function checkKnowledgeBaseWriteAccess(
knowledgeBaseId: string,
@@ -207,18 +206,17 @@ export async function checkKnowledgeBaseWriteAccess(
const kbData = kb[0]
// Case 1: User owns the knowledge base directly
if (kbData.userId === userId) {
return { hasAccess: true, knowledgeBase: kbData }
}
// Case 2: Knowledge base belongs to a workspace and user has write/admin permissions
if (kbData.workspaceId) {
// Workspace KB: use workspace permissions only
const userPermission = await getUserEntityPermissions(userId, 'workspace', kbData.workspaceId)
if (userPermission === 'write' || userPermission === 'admin') {
return { hasAccess: true, knowledgeBase: kbData }
}
return { hasAccess: false }
}
// Legacy non-workspace KB: allow owner access
if (kbData.userId === userId) {
return { hasAccess: true, knowledgeBase: kbData }
}
return { hasAccess: false }

View File

@@ -1,138 +0,0 @@
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import { deleteTable, type TableSchema } from '@/lib/table'
import { accessError, checkAccess, normalizeColumn, verifyTableWorkspace } from '../utils'
const logger = createLogger('TableDetailAPI')
const GetTableSchema = z.object({
workspaceId: z.string().min(1, 'Workspace ID is required'),
})
interface TableRouteParams {
params: Promise<{ tableId: string }>
}
/** GET /api/table/[tableId] - Retrieves a single table's details. */
export async function GET(request: NextRequest, { params }: TableRouteParams) {
const requestId = generateRequestId()
const { tableId } = await params
try {
const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
if (!authResult.success || !authResult.userId) {
logger.warn(`[${requestId}] Unauthorized table access attempt`)
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
}
const { searchParams } = new URL(request.url)
const validated = GetTableSchema.parse({
workspaceId: searchParams.get('workspaceId'),
})
const result = await checkAccess(tableId, authResult.userId, 'read')
if (!result.ok) return accessError(result, requestId, tableId)
const { table } = result
const isValidWorkspace = await verifyTableWorkspace(tableId, validated.workspaceId)
if (!isValidWorkspace) {
logger.warn(
`[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}`
)
return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 })
}
logger.info(`[${requestId}] Retrieved table ${tableId} for user ${authResult.userId}`)
const schemaData = table.schema as TableSchema
return NextResponse.json({
success: true,
data: {
table: {
id: table.id,
name: table.name,
description: table.description,
schema: {
columns: schemaData.columns.map(normalizeColumn),
},
rowCount: table.rowCount,
maxRows: table.maxRows,
createdAt:
table.createdAt instanceof Date
? table.createdAt.toISOString()
: String(table.createdAt),
updatedAt:
table.updatedAt instanceof Date
? table.updatedAt.toISOString()
: String(table.updatedAt),
},
},
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: 'Validation error', details: error.errors },
{ status: 400 }
)
}
logger.error(`[${requestId}] Error getting table:`, error)
return NextResponse.json({ error: 'Failed to get table' }, { status: 500 })
}
}
/** DELETE /api/table/[tableId] - Deletes a table and all its rows. */
export async function DELETE(request: NextRequest, { params }: TableRouteParams) {
const requestId = generateRequestId()
const { tableId } = await params
try {
const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
if (!authResult.success || !authResult.userId) {
logger.warn(`[${requestId}] Unauthorized table delete attempt`)
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
}
const { searchParams } = new URL(request.url)
const validated = GetTableSchema.parse({
workspaceId: searchParams.get('workspaceId'),
})
const result = await checkAccess(tableId, authResult.userId, 'write')
if (!result.ok) return accessError(result, requestId, tableId)
const { table } = result
const isValidWorkspace = await verifyTableWorkspace(tableId, validated.workspaceId)
if (!isValidWorkspace) {
logger.warn(
`[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}`
)
return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 })
}
await deleteTable(tableId, requestId)
return NextResponse.json({
success: true,
data: {
message: 'Table deleted successfully',
},
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: 'Validation error', details: error.errors },
{ status: 400 }
)
}
logger.error(`[${requestId}] Error deleting table:`, error)
return NextResponse.json({ error: 'Failed to delete table' }, { status: 500 })
}
}

View File

@@ -1,276 +0,0 @@
import { db } from '@sim/db'
import { userTableRows } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import type { RowData, TableSchema } from '@/lib/table'
import { validateRowData } from '@/lib/table'
import { accessError, checkAccess, verifyTableWorkspace } from '../../../utils'
const logger = createLogger('TableRowAPI')
const GetRowSchema = z.object({
workspaceId: z.string().min(1, 'Workspace ID is required'),
})
const UpdateRowSchema = z.object({
workspaceId: z.string().min(1, 'Workspace ID is required'),
data: z.record(z.unknown(), { required_error: 'Row data is required' }),
})
const DeleteRowSchema = z.object({
workspaceId: z.string().min(1, 'Workspace ID is required'),
})
interface RowRouteParams {
params: Promise<{ tableId: string; rowId: string }>
}
/** GET /api/table/[tableId]/rows/[rowId] - Retrieves a single row. */
export async function GET(request: NextRequest, { params }: RowRouteParams) {
const requestId = generateRequestId()
const { tableId, rowId } = await params
try {
const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
if (!authResult.success || !authResult.userId) {
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
}
const { searchParams } = new URL(request.url)
const validated = GetRowSchema.parse({
workspaceId: searchParams.get('workspaceId'),
})
const result = await checkAccess(tableId, authResult.userId, 'read')
if (!result.ok) return accessError(result, requestId, tableId)
const { table } = result
const isValidWorkspace = await verifyTableWorkspace(tableId, validated.workspaceId)
if (!isValidWorkspace) {
logger.warn(
`[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}`
)
return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 })
}
const [row] = await db
.select({
id: userTableRows.id,
data: userTableRows.data,
createdAt: userTableRows.createdAt,
updatedAt: userTableRows.updatedAt,
})
.from(userTableRows)
.where(
and(
eq(userTableRows.id, rowId),
eq(userTableRows.tableId, tableId),
eq(userTableRows.workspaceId, validated.workspaceId)
)
)
.limit(1)
if (!row) {
return NextResponse.json({ error: 'Row not found' }, { status: 404 })
}
logger.info(`[${requestId}] Retrieved row ${rowId} from table ${tableId}`)
return NextResponse.json({
success: true,
data: {
row: {
id: row.id,
data: row.data,
createdAt: row.createdAt.toISOString(),
updatedAt: row.updatedAt.toISOString(),
},
},
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: 'Validation error', details: error.errors },
{ status: 400 }
)
}
logger.error(`[${requestId}] Error getting row:`, error)
return NextResponse.json({ error: 'Failed to get row' }, { status: 500 })
}
}
/** PATCH /api/table/[tableId]/rows/[rowId] - Updates a single row (supports partial updates). */
export async function PATCH(request: NextRequest, { params }: RowRouteParams) {
const requestId = generateRequestId()
const { tableId, rowId } = await params
try {
const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
if (!authResult.success || !authResult.userId) {
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
}
const body: unknown = await request.json()
const validated = UpdateRowSchema.parse(body)
const result = await checkAccess(tableId, authResult.userId, 'write')
if (!result.ok) return accessError(result, requestId, tableId)
const { table } = result
const isValidWorkspace = await verifyTableWorkspace(tableId, validated.workspaceId)
if (!isValidWorkspace) {
logger.warn(
`[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}`
)
return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 })
}
// Fetch existing row to support partial updates
const [existingRow] = await db
.select({ data: userTableRows.data })
.from(userTableRows)
.where(
and(
eq(userTableRows.id, rowId),
eq(userTableRows.tableId, tableId),
eq(userTableRows.workspaceId, validated.workspaceId)
)
)
.limit(1)
if (!existingRow) {
return NextResponse.json({ error: 'Row not found' }, { status: 404 })
}
// Merge existing data with incoming partial data (incoming takes precedence)
const mergedData = {
...(existingRow.data as RowData),
...(validated.data as RowData),
}
const validation = await validateRowData({
rowData: mergedData,
schema: table.schema as TableSchema,
tableId,
excludeRowId: rowId,
})
if (!validation.valid) return validation.response
const now = new Date()
const [updatedRow] = await db
.update(userTableRows)
.set({
data: mergedData,
updatedAt: now,
})
.where(
and(
eq(userTableRows.id, rowId),
eq(userTableRows.tableId, tableId),
eq(userTableRows.workspaceId, validated.workspaceId)
)
)
.returning()
if (!updatedRow) {
return NextResponse.json({ error: 'Row not found' }, { status: 404 })
}
logger.info(`[${requestId}] Updated row ${rowId} in table ${tableId}`)
return NextResponse.json({
success: true,
data: {
row: {
id: updatedRow.id,
data: updatedRow.data,
createdAt: updatedRow.createdAt.toISOString(),
updatedAt: updatedRow.updatedAt.toISOString(),
},
message: 'Row updated successfully',
},
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: 'Validation error', details: error.errors },
{ status: 400 }
)
}
logger.error(`[${requestId}] Error updating row:`, error)
return NextResponse.json({ error: 'Failed to update row' }, { status: 500 })
}
}
/** DELETE /api/table/[tableId]/rows/[rowId] - Deletes a single row. */
export async function DELETE(request: NextRequest, { params }: RowRouteParams) {
const requestId = generateRequestId()
const { tableId, rowId } = await params
try {
const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
if (!authResult.success || !authResult.userId) {
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
}
const body: unknown = await request.json()
const validated = DeleteRowSchema.parse(body)
const result = await checkAccess(tableId, authResult.userId, 'write')
if (!result.ok) return accessError(result, requestId, tableId)
const { table } = result
const isValidWorkspace = await verifyTableWorkspace(tableId, validated.workspaceId)
if (!isValidWorkspace) {
logger.warn(
`[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}`
)
return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 })
}
const [deletedRow] = await db
.delete(userTableRows)
.where(
and(
eq(userTableRows.id, rowId),
eq(userTableRows.tableId, tableId),
eq(userTableRows.workspaceId, validated.workspaceId)
)
)
.returning()
if (!deletedRow) {
return NextResponse.json({ error: 'Row not found' }, { status: 404 })
}
logger.info(`[${requestId}] Deleted row ${rowId} from table ${tableId}`)
return NextResponse.json({
success: true,
data: {
message: 'Row deleted successfully',
deletedCount: 1,
},
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: 'Validation error', details: error.errors },
{ status: 400 }
)
}
logger.error(`[${requestId}] Error deleting row:`, error)
return NextResponse.json({ error: 'Failed to delete row' }, { status: 500 })
}
}

View File

@@ -1,725 +0,0 @@
import { db } from '@sim/db'
import { userTableRows } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq, sql } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import type { Filter, RowData, Sort, TableSchema } from '@/lib/table'
import {
checkUniqueConstraintsDb,
getUniqueColumns,
TABLE_LIMITS,
USER_TABLE_ROWS_SQL_NAME,
validateBatchRows,
validateRowAgainstSchema,
validateRowData,
validateRowSize,
} from '@/lib/table'
import { buildFilterClause, buildSortClause } from '@/lib/table/sql'
import { accessError, checkAccess } from '../../utils'
const logger = createLogger('TableRowsAPI')
const InsertRowSchema = z.object({
workspaceId: z.string().min(1, 'Workspace ID is required'),
data: z.record(z.unknown(), { required_error: 'Row data is required' }),
})
const BatchInsertRowsSchema = z.object({
workspaceId: z.string().min(1, 'Workspace ID is required'),
rows: z
.array(z.record(z.unknown()), { required_error: 'Rows array is required' })
.min(1, 'At least one row is required')
.max(1000, 'Cannot insert more than 1000 rows per batch'),
})
const QueryRowsSchema = z.object({
workspaceId: z.string().min(1, 'Workspace ID is required'),
filter: z.record(z.unknown()).optional(),
sort: z.record(z.enum(['asc', 'desc'])).optional(),
limit: z.coerce
.number({ required_error: 'Limit must be a number' })
.int('Limit must be an integer')
.min(1, 'Limit must be at least 1')
.max(TABLE_LIMITS.MAX_QUERY_LIMIT, `Limit cannot exceed ${TABLE_LIMITS.MAX_QUERY_LIMIT}`)
.optional()
.default(100),
offset: z.coerce
.number({ required_error: 'Offset must be a number' })
.int('Offset must be an integer')
.min(0, 'Offset must be 0 or greater')
.optional()
.default(0),
})
const UpdateRowsByFilterSchema = z.object({
workspaceId: z.string().min(1, 'Workspace ID is required'),
filter: z.record(z.unknown(), { required_error: 'Filter criteria is required' }),
data: z.record(z.unknown(), { required_error: 'Update data is required' }),
limit: z.coerce
.number({ required_error: 'Limit must be a number' })
.int('Limit must be an integer')
.min(1, 'Limit must be at least 1')
.max(1000, 'Cannot update more than 1000 rows per operation')
.optional(),
})
const DeleteRowsByFilterSchema = z.object({
workspaceId: z.string().min(1, 'Workspace ID is required'),
filter: z.record(z.unknown(), { required_error: 'Filter criteria is required' }),
limit: z.coerce
.number({ required_error: 'Limit must be a number' })
.int('Limit must be an integer')
.min(1, 'Limit must be at least 1')
.max(1000, 'Cannot delete more than 1000 rows per operation')
.optional(),
})
const DeleteRowsByIdsSchema = z.object({
workspaceId: z.string().min(1, 'Workspace ID is required'),
rowIds: z
.array(z.string().min(1), { required_error: 'Row IDs are required' })
.min(1, 'At least one row ID is required')
.max(1000, 'Cannot delete more than 1000 rows per operation'),
})
const DeleteRowsRequestSchema = z.union([DeleteRowsByFilterSchema, DeleteRowsByIdsSchema])
interface TableRowsRouteParams {
params: Promise<{ tableId: string }>
}
async function handleBatchInsert(
requestId: string,
tableId: string,
body: z.infer<typeof BatchInsertRowsSchema>,
userId: string
): Promise<NextResponse> {
const validated = BatchInsertRowsSchema.parse(body)
const accessResult = await checkAccess(tableId, userId, 'write')
if (!accessResult.ok) return accessError(accessResult, requestId, tableId)
const { table } = accessResult
if (validated.workspaceId !== table.workspaceId) {
logger.warn(
`[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}`
)
return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 })
}
const workspaceId = validated.workspaceId
const remainingCapacity = table.maxRows - table.rowCount
if (remainingCapacity < validated.rows.length) {
return NextResponse.json(
{
error: `Insufficient capacity. Can only insert ${remainingCapacity} more rows (table has ${table.rowCount}/${table.maxRows} rows)`,
},
{ status: 400 }
)
}
const validation = await validateBatchRows({
rows: validated.rows as RowData[],
schema: table.schema as TableSchema,
tableId,
})
if (!validation.valid) return validation.response
const now = new Date()
const rowsToInsert = validated.rows.map((data) => ({
id: `row_${crypto.randomUUID().replace(/-/g, '')}`,
tableId,
workspaceId,
data,
createdAt: now,
updatedAt: now,
createdBy: userId,
}))
const insertedRows = await db.insert(userTableRows).values(rowsToInsert).returning()
logger.info(`[${requestId}] Batch inserted ${insertedRows.length} rows into table ${tableId}`)
return NextResponse.json({
success: true,
data: {
rows: insertedRows.map((r) => ({
id: r.id,
data: r.data,
createdAt: r.createdAt.toISOString(),
updatedAt: r.updatedAt.toISOString(),
})),
insertedCount: insertedRows.length,
message: `Successfully inserted ${insertedRows.length} rows`,
},
})
}
/** POST /api/table/[tableId]/rows - Inserts row(s). Supports single or batch insert. */
export async function POST(request: NextRequest, { params }: TableRowsRouteParams) {
const requestId = generateRequestId()
const { tableId } = await params
try {
const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
if (!authResult.success || !authResult.userId) {
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
}
const body: unknown = await request.json()
if (
typeof body === 'object' &&
body !== null &&
'rows' in body &&
Array.isArray((body as Record<string, unknown>).rows)
) {
return handleBatchInsert(
requestId,
tableId,
body as z.infer<typeof BatchInsertRowsSchema>,
authResult.userId
)
}
const validated = InsertRowSchema.parse(body)
const accessResult = await checkAccess(tableId, authResult.userId, 'write')
if (!accessResult.ok) return accessError(accessResult, requestId, tableId)
const { table } = accessResult
if (validated.workspaceId !== table.workspaceId) {
logger.warn(
`[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}`
)
return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 })
}
const workspaceId = validated.workspaceId
const rowData = validated.data as RowData
const validation = await validateRowData({
rowData,
schema: table.schema as TableSchema,
tableId,
})
if (!validation.valid) return validation.response
if (table.rowCount >= table.maxRows) {
return NextResponse.json(
{ error: `Table row limit reached (${table.maxRows} rows max)` },
{ status: 400 }
)
}
const rowId = `row_${crypto.randomUUID().replace(/-/g, '')}`
const now = new Date()
const [row] = await db
.insert(userTableRows)
.values({
id: rowId,
tableId,
workspaceId,
data: validated.data,
createdAt: now,
updatedAt: now,
createdBy: authResult.userId,
})
.returning()
logger.info(`[${requestId}] Inserted row ${rowId} into table ${tableId}`)
return NextResponse.json({
success: true,
data: {
row: {
id: row.id,
data: row.data,
createdAt: row.createdAt.toISOString(),
updatedAt: row.updatedAt.toISOString(),
},
message: 'Row inserted successfully',
},
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: 'Validation error', details: error.errors },
{ status: 400 }
)
}
logger.error(`[${requestId}] Error inserting row:`, error)
return NextResponse.json({ error: 'Failed to insert row' }, { status: 500 })
}
}
/** GET /api/table/[tableId]/rows - Queries rows with filtering, sorting, and pagination. */
export async function GET(request: NextRequest, { params }: TableRowsRouteParams) {
const requestId = generateRequestId()
const { tableId } = await params
try {
const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
if (!authResult.success || !authResult.userId) {
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
}
const { searchParams } = new URL(request.url)
const workspaceId = searchParams.get('workspaceId')
const filterParam = searchParams.get('filter')
const sortParam = searchParams.get('sort')
const limit = searchParams.get('limit')
const offset = searchParams.get('offset')
let filter: Record<string, unknown> | undefined
let sort: Sort | undefined
try {
if (filterParam) {
filter = JSON.parse(filterParam) as Record<string, unknown>
}
if (sortParam) {
sort = JSON.parse(sortParam) as Sort
}
} catch {
return NextResponse.json({ error: 'Invalid filter or sort JSON' }, { status: 400 })
}
const validated = QueryRowsSchema.parse({
workspaceId,
filter,
sort,
limit,
offset,
})
const accessResult = await checkAccess(tableId, authResult.userId, 'read')
if (!accessResult.ok) return accessError(accessResult, requestId, tableId)
const { table } = accessResult
if (validated.workspaceId !== table.workspaceId) {
logger.warn(
`[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}`
)
return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 })
}
const baseConditions = [
eq(userTableRows.tableId, tableId),
eq(userTableRows.workspaceId, validated.workspaceId),
]
if (validated.filter) {
const filterClause = buildFilterClause(validated.filter as Filter, USER_TABLE_ROWS_SQL_NAME)
if (filterClause) {
baseConditions.push(filterClause)
}
}
let query = db
.select({
id: userTableRows.id,
data: userTableRows.data,
createdAt: userTableRows.createdAt,
updatedAt: userTableRows.updatedAt,
})
.from(userTableRows)
.where(and(...baseConditions))
if (validated.sort) {
const schema = table.schema as TableSchema
const sortClause = buildSortClause(validated.sort, USER_TABLE_ROWS_SQL_NAME, schema.columns)
if (sortClause) {
query = query.orderBy(sortClause) as typeof query
}
} else {
query = query.orderBy(userTableRows.createdAt) as typeof query
}
const countQuery = db
.select({ count: sql<number>`count(*)` })
.from(userTableRows)
.where(and(...baseConditions))
const [{ count: totalCount }] = await countQuery
const rows = await query.limit(validated.limit).offset(validated.offset)
logger.info(
`[${requestId}] Queried ${rows.length} rows from table ${tableId} (total: ${totalCount})`
)
return NextResponse.json({
success: true,
data: {
rows: rows.map((r) => ({
id: r.id,
data: r.data,
createdAt: r.createdAt.toISOString(),
updatedAt: r.updatedAt.toISOString(),
})),
rowCount: rows.length,
totalCount: Number(totalCount),
limit: validated.limit,
offset: validated.offset,
},
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: 'Validation error', details: error.errors },
{ status: 400 }
)
}
logger.error(`[${requestId}] Error querying rows:`, error)
return NextResponse.json({ error: 'Failed to query rows' }, { status: 500 })
}
}
/** PUT /api/table/[tableId]/rows - Updates rows matching filter criteria. */
export async function PUT(request: NextRequest, { params }: TableRowsRouteParams) {
const requestId = generateRequestId()
const { tableId } = await params
try {
const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
if (!authResult.success || !authResult.userId) {
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
}
const body: unknown = await request.json()
const validated = UpdateRowsByFilterSchema.parse(body)
const accessResult = await checkAccess(tableId, authResult.userId, 'write')
if (!accessResult.ok) return accessError(accessResult, requestId, tableId)
const { table } = accessResult
if (validated.workspaceId !== table.workspaceId) {
logger.warn(
`[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}`
)
return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 })
}
const updateData = validated.data as RowData
const sizeValidation = validateRowSize(updateData)
if (!sizeValidation.valid) {
return NextResponse.json(
{ error: 'Invalid row data', details: sizeValidation.errors },
{ status: 400 }
)
}
const baseConditions = [
eq(userTableRows.tableId, tableId),
eq(userTableRows.workspaceId, validated.workspaceId),
]
const filterClause = buildFilterClause(validated.filter as Filter, USER_TABLE_ROWS_SQL_NAME)
if (filterClause) {
baseConditions.push(filterClause)
}
let matchingRowsQuery = db
.select({
id: userTableRows.id,
data: userTableRows.data,
})
.from(userTableRows)
.where(and(...baseConditions))
if (validated.limit) {
matchingRowsQuery = matchingRowsQuery.limit(validated.limit) as typeof matchingRowsQuery
}
const matchingRows = await matchingRowsQuery
if (matchingRows.length === 0) {
return NextResponse.json(
{
success: true,
data: {
message: 'No rows matched the filter criteria',
updatedCount: 0,
},
},
{ status: 200 }
)
}
if (matchingRows.length > TABLE_LIMITS.MAX_BULK_OPERATION_SIZE) {
logger.warn(`[${requestId}] Updating ${matchingRows.length} rows. This may take some time.`)
}
for (const row of matchingRows) {
const existingData = row.data as RowData
const mergedData = { ...existingData, ...updateData }
const rowValidation = validateRowAgainstSchema(mergedData, table.schema as TableSchema)
if (!rowValidation.valid) {
return NextResponse.json(
{
error: 'Updated data does not match schema',
details: rowValidation.errors,
affectedRowId: row.id,
},
{ status: 400 }
)
}
}
const uniqueColumns = getUniqueColumns(table.schema as TableSchema)
if (uniqueColumns.length > 0) {
// If updating multiple rows, check that updateData doesn't set any unique column
// (would cause all rows to have the same value, violating uniqueness)
if (matchingRows.length > 1) {
const uniqueColumnsInUpdate = uniqueColumns.filter((col) => col.name in updateData)
if (uniqueColumnsInUpdate.length > 0) {
return NextResponse.json(
{
error: 'Cannot set unique column values when updating multiple rows',
details: [
`Columns with unique constraint: ${uniqueColumnsInUpdate.map((c) => c.name).join(', ')}. ` +
`Updating ${matchingRows.length} rows with the same value would violate uniqueness.`,
],
},
{ status: 400 }
)
}
}
// Check unique constraints against database for each row
for (const row of matchingRows) {
const existingData = row.data as RowData
const mergedData = { ...existingData, ...updateData }
const uniqueValidation = await checkUniqueConstraintsDb(
tableId,
mergedData,
table.schema as TableSchema,
row.id
)
if (!uniqueValidation.valid) {
return NextResponse.json(
{
error: 'Unique constraint violation',
details: uniqueValidation.errors,
affectedRowId: row.id,
},
{ status: 400 }
)
}
}
}
const now = new Date()
await db.transaction(async (trx) => {
let totalUpdated = 0
for (let i = 0; i < matchingRows.length; i += TABLE_LIMITS.UPDATE_BATCH_SIZE) {
const batch = matchingRows.slice(i, i + TABLE_LIMITS.UPDATE_BATCH_SIZE)
const updatePromises = batch.map((row) => {
const existingData = row.data as RowData
return trx
.update(userTableRows)
.set({
data: { ...existingData, ...updateData },
updatedAt: now,
})
.where(eq(userTableRows.id, row.id))
})
await Promise.all(updatePromises)
totalUpdated += batch.length
logger.info(
`[${requestId}] Updated batch ${Math.floor(i / TABLE_LIMITS.UPDATE_BATCH_SIZE) + 1} (${totalUpdated}/${matchingRows.length} rows)`
)
}
})
logger.info(`[${requestId}] Updated ${matchingRows.length} rows in table ${tableId}`)
return NextResponse.json({
success: true,
data: {
message: 'Rows updated successfully',
updatedCount: matchingRows.length,
updatedRowIds: matchingRows.map((r) => r.id),
},
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: 'Validation error', details: error.errors },
{ status: 400 }
)
}
logger.error(`[${requestId}] Error updating rows by filter:`, error)
const errorMessage = error instanceof Error ? error.message : String(error)
const detailedError = `Failed to update rows: ${errorMessage}`
return NextResponse.json({ error: detailedError }, { status: 500 })
}
}
/** DELETE /api/table/[tableId]/rows - Deletes rows matching filter criteria. */
export async function DELETE(request: NextRequest, { params }: TableRowsRouteParams) {
const requestId = generateRequestId()
const { tableId } = await params
try {
const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
if (!authResult.success || !authResult.userId) {
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
}
const body: unknown = await request.json()
const validated = DeleteRowsRequestSchema.parse(body)
const accessResult = await checkAccess(tableId, authResult.userId, 'write')
if (!accessResult.ok) return accessError(accessResult, requestId, tableId)
const { table } = accessResult
if (validated.workspaceId !== table.workspaceId) {
logger.warn(
`[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}`
)
return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 })
}
const baseConditions = [
eq(userTableRows.tableId, tableId),
eq(userTableRows.workspaceId, validated.workspaceId),
]
let rowIds: string[] = []
let missingRowIds: string[] | undefined
let requestedCount: number | undefined
if ('rowIds' in validated) {
const uniqueRequestedRowIds = Array.from(new Set(validated.rowIds))
requestedCount = uniqueRequestedRowIds.length
const matchingRows = await db
.select({ id: userTableRows.id })
.from(userTableRows)
.where(
and(
...baseConditions,
sql`${userTableRows.id} = ANY(ARRAY[${sql.join(
uniqueRequestedRowIds.map((id) => sql`${id}`),
sql`, `
)}])`
)
)
const matchedRowIds = matchingRows.map((r) => r.id)
const matchedIdSet = new Set(matchedRowIds)
missingRowIds = uniqueRequestedRowIds.filter((id) => !matchedIdSet.has(id))
rowIds = matchedRowIds
} else {
const filterClause = buildFilterClause(validated.filter as Filter, USER_TABLE_ROWS_SQL_NAME)
if (filterClause) {
baseConditions.push(filterClause)
}
let matchingRowsQuery = db
.select({ id: userTableRows.id })
.from(userTableRows)
.where(and(...baseConditions))
if (validated.limit) {
matchingRowsQuery = matchingRowsQuery.limit(validated.limit) as typeof matchingRowsQuery
}
const matchingRows = await matchingRowsQuery
rowIds = matchingRows.map((r) => r.id)
}
if (rowIds.length === 0) {
return NextResponse.json(
{
success: true,
data: {
message:
'rowIds' in validated
? 'No matching rows found for the provided IDs'
: 'No rows matched the filter criteria',
deletedCount: 0,
deletedRowIds: [],
...(requestedCount !== undefined ? { requestedCount } : {}),
...(missingRowIds ? { missingRowIds } : {}),
},
},
{ status: 200 }
)
}
if (rowIds.length > TABLE_LIMITS.DELETE_BATCH_SIZE) {
logger.warn(`[${requestId}] Deleting ${rowIds.length} rows. This may take some time.`)
}
await db.transaction(async (trx) => {
let totalDeleted = 0
for (let i = 0; i < rowIds.length; i += TABLE_LIMITS.DELETE_BATCH_SIZE) {
const batch = rowIds.slice(i, i + TABLE_LIMITS.DELETE_BATCH_SIZE)
await trx.delete(userTableRows).where(
and(
eq(userTableRows.tableId, tableId),
eq(userTableRows.workspaceId, validated.workspaceId),
sql`${userTableRows.id} = ANY(ARRAY[${sql.join(
batch.map((id) => sql`${id}`),
sql`, `
)}])`
)
)
totalDeleted += batch.length
logger.info(
`[${requestId}] Deleted batch ${Math.floor(i / TABLE_LIMITS.DELETE_BATCH_SIZE) + 1} (${totalDeleted}/${rowIds.length} rows)`
)
}
})
logger.info(`[${requestId}] Deleted ${rowIds.length} rows from table ${tableId}`)
return NextResponse.json({
success: true,
data: {
message: 'Rows deleted successfully',
deletedCount: rowIds.length,
deletedRowIds: rowIds,
...(requestedCount !== undefined ? { requestedCount } : {}),
...(missingRowIds ? { missingRowIds } : {}),
},
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: 'Validation error', details: error.errors },
{ status: 400 }
)
}
logger.error(`[${requestId}] Error deleting rows by filter:`, error)
const errorMessage = error instanceof Error ? error.message : String(error)
const detailedError = `Failed to delete rows: ${errorMessage}`
return NextResponse.json({ error: detailedError }, { status: 500 })
}
}

View File

@@ -1,182 +0,0 @@
import { db } from '@sim/db'
import { userTableRows } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq, or, sql } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import type { RowData, TableSchema } from '@/lib/table'
import { getUniqueColumns, validateRowData } from '@/lib/table'
import { accessError, checkAccess, verifyTableWorkspace } from '../../../utils'
const logger = createLogger('TableUpsertAPI')
const UpsertRowSchema = z.object({
workspaceId: z.string().min(1, 'Workspace ID is required'),
data: z.record(z.unknown(), { required_error: 'Row data is required' }),
})
interface UpsertRouteParams {
params: Promise<{ tableId: string }>
}
/** POST /api/table/[tableId]/rows/upsert - Inserts or updates based on unique columns. */
export async function POST(request: NextRequest, { params }: UpsertRouteParams) {
const requestId = generateRequestId()
const { tableId } = await params
try {
const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
if (!authResult.success || !authResult.userId) {
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
}
const body: unknown = await request.json()
const validated = UpsertRowSchema.parse(body)
const result = await checkAccess(tableId, authResult.userId, 'write')
if (!result.ok) return accessError(result, requestId, tableId)
const { table } = result
const isValidWorkspace = await verifyTableWorkspace(tableId, validated.workspaceId)
if (!isValidWorkspace) {
logger.warn(
`[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}`
)
return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 })
}
const schema = table.schema as TableSchema
const rowData = validated.data as RowData
const validation = await validateRowData({
rowData,
schema,
tableId,
checkUnique: false,
})
if (!validation.valid) return validation.response
const uniqueColumns = getUniqueColumns(schema)
if (uniqueColumns.length === 0) {
return NextResponse.json(
{
error:
'Upsert requires at least one unique column in the schema. Please add a unique constraint to a column or use insert instead.',
},
{ status: 400 }
)
}
const uniqueFilters = uniqueColumns.map((col) => {
const value = rowData[col.name]
if (value === undefined || value === null) {
return null
}
return sql`${userTableRows.data}->>${col.name} = ${String(value)}`
})
const validUniqueFilters = uniqueFilters.filter((f): f is Exclude<typeof f, null> => f !== null)
if (validUniqueFilters.length === 0) {
return NextResponse.json(
{
error: `Upsert requires values for at least one unique field: ${uniqueColumns.map((c) => c.name).join(', ')}`,
},
{ status: 400 }
)
}
const [existingRow] = await db
.select()
.from(userTableRows)
.where(
and(
eq(userTableRows.tableId, tableId),
eq(userTableRows.workspaceId, validated.workspaceId),
or(...validUniqueFilters)
)
)
.limit(1)
const now = new Date()
if (!existingRow && table.rowCount >= table.maxRows) {
return NextResponse.json(
{ error: `Table row limit reached (${table.maxRows} rows max)` },
{ status: 400 }
)
}
const upsertResult = await db.transaction(async (trx) => {
if (existingRow) {
const [updatedRow] = await trx
.update(userTableRows)
.set({
data: validated.data,
updatedAt: now,
})
.where(eq(userTableRows.id, existingRow.id))
.returning()
return {
row: updatedRow,
operation: 'update' as const,
}
}
const [insertedRow] = await trx
.insert(userTableRows)
.values({
id: `row_${crypto.randomUUID().replace(/-/g, '')}`,
tableId,
workspaceId: validated.workspaceId,
data: validated.data,
createdAt: now,
updatedAt: now,
createdBy: authResult.userId,
})
.returning()
return {
row: insertedRow,
operation: 'insert' as const,
}
})
logger.info(
`[${requestId}] Upserted (${upsertResult.operation}) row ${upsertResult.row.id} in table ${tableId}`
)
return NextResponse.json({
success: true,
data: {
row: {
id: upsertResult.row.id,
data: upsertResult.row.data,
createdAt: upsertResult.row.createdAt.toISOString(),
updatedAt: upsertResult.row.updatedAt.toISOString(),
},
operation: upsertResult.operation,
message: `Row ${upsertResult.operation === 'update' ? 'updated' : 'inserted'} successfully`,
},
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: 'Validation error', details: error.errors },
{ status: 400 }
)
}
logger.error(`[${requestId}] Error upserting row:`, error)
const errorMessage = error instanceof Error ? error.message : String(error)
const detailedError = `Failed to upsert row: ${errorMessage}`
return NextResponse.json({ error: detailedError }, { status: 500 })
}
}

View File

@@ -1,258 +0,0 @@
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import {
canCreateTable,
createTable,
getWorkspaceTableLimits,
listTables,
TABLE_LIMITS,
type TableSchema,
} from '@/lib/table'
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
import { normalizeColumn } from './utils'
const logger = createLogger('TableAPI')
const ColumnSchema = z.object({
name: z
.string()
.min(1, 'Column name is required')
.max(
TABLE_LIMITS.MAX_COLUMN_NAME_LENGTH,
`Column name must be ${TABLE_LIMITS.MAX_COLUMN_NAME_LENGTH} characters or less`
)
.regex(
/^[a-z_][a-z0-9_]*$/i,
'Column name must start with a letter or underscore and contain only alphanumeric characters and underscores'
),
type: z.enum(['string', 'number', 'boolean', 'date', 'json'], {
errorMap: () => ({
message: 'Column type must be one of: string, number, boolean, date, json',
}),
}),
required: z.boolean().optional().default(false),
unique: z.boolean().optional().default(false),
})
const CreateTableSchema = z.object({
name: z
.string()
.min(1, 'Table name is required')
.max(
TABLE_LIMITS.MAX_TABLE_NAME_LENGTH,
`Table name must be ${TABLE_LIMITS.MAX_TABLE_NAME_LENGTH} characters or less`
)
.regex(
/^[a-z_][a-z0-9_]*$/i,
'Table name must start with a letter or underscore and contain only alphanumeric characters and underscores'
),
description: z
.string()
.max(
TABLE_LIMITS.MAX_DESCRIPTION_LENGTH,
`Description must be ${TABLE_LIMITS.MAX_DESCRIPTION_LENGTH} characters or less`
)
.optional(),
schema: z.object({
columns: z
.array(ColumnSchema)
.min(1, 'Table must have at least one column')
.max(
TABLE_LIMITS.MAX_COLUMNS_PER_TABLE,
`Table cannot have more than ${TABLE_LIMITS.MAX_COLUMNS_PER_TABLE} columns`
),
}),
workspaceId: z.string().min(1, 'Workspace ID is required'),
})
const ListTablesSchema = z.object({
workspaceId: z.string().min(1, 'Workspace ID is required'),
})
interface WorkspaceAccessResult {
hasAccess: boolean
canWrite: boolean
}
async function checkWorkspaceAccess(
workspaceId: string,
userId: string
): Promise<WorkspaceAccessResult> {
const permission = await getUserEntityPermissions(userId, 'workspace', workspaceId)
if (permission === null) {
return { hasAccess: false, canWrite: false }
}
const canWrite = permission === 'admin' || permission === 'write'
return { hasAccess: true, canWrite }
}
/** POST /api/table - Creates a new user-defined table. */
export async function POST(request: NextRequest) {
const requestId = generateRequestId()
try {
const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
if (!authResult.success || !authResult.userId) {
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
}
const body: unknown = await request.json()
const params = CreateTableSchema.parse(body)
const { hasAccess, canWrite } = await checkWorkspaceAccess(
params.workspaceId,
authResult.userId
)
if (!hasAccess || !canWrite) {
return NextResponse.json({ error: 'Access denied' }, { status: 403 })
}
// Check billing plan limits
const existingTables = await listTables(params.workspaceId)
const { canCreate, maxTables } = await canCreateTable(params.workspaceId, existingTables.length)
if (!canCreate) {
return NextResponse.json(
{
error: `Workspace has reached the maximum table limit (${maxTables}) for your plan. Please upgrade to create more tables.`,
},
{ status: 403 }
)
}
// Get plan-based row limits
const planLimits = await getWorkspaceTableLimits(params.workspaceId)
const maxRowsPerTable = planLimits.maxRowsPerTable
const normalizedSchema: TableSchema = {
columns: params.schema.columns.map(normalizeColumn),
}
const table = await createTable(
{
name: params.name,
description: params.description,
schema: normalizedSchema,
workspaceId: params.workspaceId,
userId: authResult.userId,
maxRows: maxRowsPerTable,
},
requestId
)
return NextResponse.json({
success: true,
data: {
table: {
id: table.id,
name: table.name,
description: table.description,
schema: table.schema,
rowCount: table.rowCount,
maxRows: table.maxRows,
createdAt:
table.createdAt instanceof Date
? table.createdAt.toISOString()
: String(table.createdAt),
updatedAt:
table.updatedAt instanceof Date
? table.updatedAt.toISOString()
: String(table.updatedAt),
},
message: 'Table created successfully',
},
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: 'Validation error', details: error.errors },
{ status: 400 }
)
}
if (error instanceof Error) {
if (
error.message.includes('Invalid table name') ||
error.message.includes('Invalid schema') ||
error.message.includes('already exists') ||
error.message.includes('maximum table limit')
) {
return NextResponse.json({ error: error.message }, { status: 400 })
}
}
logger.error(`[${requestId}] Error creating table:`, error)
return NextResponse.json({ error: 'Failed to create table' }, { status: 500 })
}
}
/** GET /api/table - Lists all tables in a workspace. */
export async function GET(request: NextRequest) {
const requestId = generateRequestId()
try {
const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
if (!authResult.success || !authResult.userId) {
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
}
const { searchParams } = new URL(request.url)
const workspaceId = searchParams.get('workspaceId')
const validation = ListTablesSchema.safeParse({ workspaceId })
if (!validation.success) {
return NextResponse.json(
{ error: 'Validation error', details: validation.error.errors },
{ status: 400 }
)
}
const params = validation.data
const { hasAccess } = await checkWorkspaceAccess(params.workspaceId, authResult.userId)
if (!hasAccess) {
return NextResponse.json({ error: 'Access denied' }, { status: 403 })
}
const tables = await listTables(params.workspaceId)
logger.info(`[${requestId}] Listed ${tables.length} tables in workspace ${params.workspaceId}`)
return NextResponse.json({
success: true,
data: {
tables: tables.map((t) => {
const schemaData = t.schema as TableSchema
return {
...t,
schema: {
columns: schemaData.columns.map(normalizeColumn),
},
createdAt:
t.createdAt instanceof Date ? t.createdAt.toISOString() : String(t.createdAt),
updatedAt:
t.updatedAt instanceof Date ? t.updatedAt.toISOString() : String(t.updatedAt),
}
}),
totalCount: tables.length,
},
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: 'Validation error', details: error.errors },
{ status: 400 }
)
}
logger.error(`[${requestId}] Error listing tables:`, error)
return NextResponse.json({ error: 'Failed to list tables' }, { status: 500 })
}
}

View File

@@ -1,164 +0,0 @@
import { createLogger } from '@sim/logger'
import { NextResponse } from 'next/server'
import type { ColumnDefinition, TableDefinition } from '@/lib/table'
import { getTableById } from '@/lib/table'
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
const logger = createLogger('TableUtils')
export interface TableAccessResult {
hasAccess: true
table: TableDefinition
}
export interface TableAccessDenied {
hasAccess: false
notFound?: boolean
reason?: string
}
export type TableAccessCheck = TableAccessResult | TableAccessDenied
export type AccessResult = { ok: true; table: TableDefinition } | { ok: false; status: 404 | 403 }
export interface ApiErrorResponse {
error: string
details?: unknown
}
/**
* Check if a user has read access to a table.
* Read access requires any workspace permission (read, write, or admin).
*/
export async function checkTableAccess(tableId: string, userId: string): Promise<TableAccessCheck> {
const table = await getTableById(tableId)
if (!table) {
return { hasAccess: false, notFound: true }
}
const userPermission = await getUserEntityPermissions(userId, 'workspace', table.workspaceId)
if (userPermission !== null) {
return { hasAccess: true, table }
}
return { hasAccess: false, reason: 'User does not have access to this table' }
}
/**
* Check if a user has write access to a table.
* Write access requires write or admin workspace permission.
*/
export async function checkTableWriteAccess(
tableId: string,
userId: string
): Promise<TableAccessCheck> {
const table = await getTableById(tableId)
if (!table) {
return { hasAccess: false, notFound: true }
}
const userPermission = await getUserEntityPermissions(userId, 'workspace', table.workspaceId)
if (userPermission === 'write' || userPermission === 'admin') {
return { hasAccess: true, table }
}
return { hasAccess: false, reason: 'User does not have write access to this table' }
}
/**
* Access check returning `{ ok, table }` or `{ ok: false, status }`.
* Uses workspace permissions only.
*/
export async function checkAccess(
tableId: string,
userId: string,
level: 'read' | 'write' | 'admin' = 'read'
): Promise<AccessResult> {
const table = await getTableById(tableId)
if (!table) {
return { ok: false, status: 404 }
}
const permission = await getUserEntityPermissions(userId, 'workspace', table.workspaceId)
const hasAccess =
permission !== null &&
(level === 'read' ||
(level === 'write' && (permission === 'write' || permission === 'admin')) ||
(level === 'admin' && permission === 'admin'))
return hasAccess ? { ok: true, table } : { ok: false, status: 403 }
}
export function accessError(
result: { ok: false; status: 404 | 403 },
requestId: string,
context?: string
): NextResponse {
const message = result.status === 404 ? 'Table not found' : 'Access denied'
logger.warn(`[${requestId}] ${message}${context ? `: ${context}` : ''}`)
return NextResponse.json({ error: message }, { status: result.status })
}
/**
* Converts a TableAccessDenied result to an appropriate HTTP response.
* Use with checkTableAccess or checkTableWriteAccess.
*/
export function tableAccessError(
result: TableAccessDenied,
requestId: string,
context?: string
): NextResponse {
const status = result.notFound ? 404 : 403
const message = result.notFound ? 'Table not found' : (result.reason ?? 'Access denied')
logger.warn(`[${requestId}] ${message}${context ? `: ${context}` : ''}`)
return NextResponse.json({ error: message }, { status })
}
export async function verifyTableWorkspace(tableId: string, workspaceId: string): Promise<boolean> {
const table = await getTableById(tableId)
return table?.workspaceId === workspaceId
}
export function errorResponse(
message: string,
status: number,
details?: unknown
): NextResponse<ApiErrorResponse> {
const body: ApiErrorResponse = { error: message }
if (details !== undefined) {
body.details = details
}
return NextResponse.json(body, { status })
}
export function badRequestResponse(message: string, details?: unknown) {
return errorResponse(message, 400, details)
}
export function unauthorizedResponse(message = 'Authentication required') {
return errorResponse(message, 401)
}
export function forbiddenResponse(message = 'Access denied') {
return errorResponse(message, 403)
}
export function notFoundResponse(message = 'Resource not found') {
return errorResponse(message, 404)
}
export function serverErrorResponse(message = 'Internal server error') {
return errorResponse(message, 500)
}
export function normalizeColumn(col: ColumnDefinition): ColumnDefinition {
return {
name: col.name,
type: col.type,
required: col.required ?? false,
unique: col.unique ?? false,
}
}

View File

@@ -1,145 +0,0 @@
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkInternalAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import { RawFileInputSchema } from '@/lib/uploads/utils/file-schemas'
import { processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
export const dynamic = 'force-dynamic'
const logger = createLogger('DataverseUploadFileAPI')
const DataverseUploadFileSchema = z.object({
accessToken: z.string().min(1, 'Access token is required'),
environmentUrl: z.string().min(1, 'Environment URL is required'),
entitySetName: z.string().min(1, 'Entity set name is required'),
recordId: z.string().min(1, 'Record ID is required'),
fileColumn: z.string().min(1, 'File column is required'),
fileName: z.string().min(1, 'File name is required'),
file: RawFileInputSchema.optional().nullable(),
fileContent: z.string().optional().nullable(),
})
export async function POST(request: NextRequest) {
const requestId = generateRequestId()
try {
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
if (!authResult.success) {
logger.warn(`[${requestId}] Unauthorized Dataverse upload attempt: ${authResult.error}`)
return NextResponse.json(
{ success: false, error: authResult.error || 'Authentication required' },
{ status: 401 }
)
}
logger.info(
`[${requestId}] Authenticated Dataverse upload request via ${authResult.authType}`,
{
userId: authResult.userId,
}
)
const body = await request.json()
const validatedData = DataverseUploadFileSchema.parse(body)
logger.info(`[${requestId}] Uploading file to Dataverse`, {
entitySetName: validatedData.entitySetName,
recordId: validatedData.recordId,
fileColumn: validatedData.fileColumn,
fileName: validatedData.fileName,
hasFile: !!validatedData.file,
hasFileContent: !!validatedData.fileContent,
})
let fileBuffer: Buffer
if (validatedData.file) {
const rawFile = validatedData.file
logger.info(`[${requestId}] Processing UserFile upload: ${rawFile.name}`)
let userFile
try {
userFile = processSingleFileToUserFile(rawFile, requestId, logger)
} catch (error) {
return NextResponse.json(
{
success: false,
error: error instanceof Error ? error.message : 'Failed to process file',
},
{ status: 400 }
)
}
fileBuffer = await downloadFileFromStorage(userFile, requestId, logger)
} else if (validatedData.fileContent) {
fileBuffer = Buffer.from(validatedData.fileContent, 'base64')
} else {
return NextResponse.json(
{ success: false, error: 'Either file or fileContent must be provided' },
{ status: 400 }
)
}
const baseUrl = validatedData.environmentUrl.replace(/\/$/, '')
const uploadUrl = `${baseUrl}/api/data/v9.2/${validatedData.entitySetName}(${validatedData.recordId})/${validatedData.fileColumn}`
const response = await fetch(uploadUrl, {
method: 'PATCH',
headers: {
Authorization: `Bearer ${validatedData.accessToken}`,
'Content-Type': 'application/octet-stream',
'OData-MaxVersion': '4.0',
'OData-Version': '4.0',
'x-ms-file-name': validatedData.fileName,
},
body: new Uint8Array(fileBuffer),
})
if (!response.ok) {
const errorData = await response.json().catch(() => ({}))
const errorMessage =
errorData?.error?.message ??
`Dataverse API error: ${response.status} ${response.statusText}`
logger.error(`[${requestId}] Dataverse upload file failed`, {
errorData,
status: response.status,
})
return NextResponse.json({ success: false, error: errorMessage }, { status: response.status })
}
logger.info(`[${requestId}] File uploaded to Dataverse successfully`, {
entitySetName: validatedData.entitySetName,
recordId: validatedData.recordId,
fileColumn: validatedData.fileColumn,
})
return NextResponse.json({
success: true,
output: {
recordId: validatedData.recordId,
fileColumn: validatedData.fileColumn,
fileName: validatedData.fileName,
success: true,
},
})
} catch (error) {
if (error instanceof z.ZodError) {
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
return NextResponse.json(
{ success: false, error: 'Invalid request data', details: error.errors },
{ status: 400 }
)
}
logger.error(`[${requestId}] Error uploading file to Dataverse:`, error)
return NextResponse.json(
{ success: false, error: error instanceof Error ? error.message : 'Internal server error' },
{ status: 500 }
)
}
}

View File

@@ -10,7 +10,6 @@ import { checkAndBillOverageThreshold } from '@/lib/billing/threshold-billing'
import { env } from '@/lib/core/config/env'
import { getCostMultiplier, isBillingEnabled } from '@/lib/core/config/feature-flags'
import { generateRequestId } from '@/lib/core/utils/request'
import { enrichTableSchema } from '@/lib/table/llm/wand'
import { verifyWorkspaceMembership } from '@/app/api/workflows/utils'
import { extractResponseText, parseResponsesUsage } from '@/providers/openai/utils'
import { getModelPricing } from '@/providers/utils'
@@ -49,7 +48,6 @@ interface RequestBody {
history?: ChatMessage[]
workflowId?: string
generationType?: string
wandContext?: Record<string, unknown>
}
function safeStringify(value: unknown): string {
@@ -60,38 +58,6 @@ function safeStringify(value: unknown): string {
}
}
/**
* Wand enricher function type.
* Enrichers add context to the system prompt based on generationType.
*/
type WandEnricher = (
workspaceId: string | null,
context: Record<string, unknown>
) => Promise<string | null>
/**
* Registry of wand enrichers by generationType.
* Each enricher returns additional context to append to the system prompt.
*/
const wandEnrichers: Partial<Record<string, WandEnricher>> = {
timestamp: async () => {
const now = new Date()
return `Current date and time context for reference:
- Current UTC timestamp: ${now.toISOString()}
- Current Unix timestamp (seconds): ${Math.floor(now.getTime() / 1000)}
- Current Unix timestamp (milliseconds): ${now.getTime()}
- Current date (UTC): ${now.toISOString().split('T')[0]}
- Current year: ${now.getUTCFullYear()}
- Current month: ${now.getUTCMonth() + 1}
- Current day of month: ${now.getUTCDate()}
- Current day of week: ${['Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday'][now.getUTCDay()]}
Use this context to calculate relative dates like "yesterday", "last week", "beginning of this month", etc.`
},
'table-schema': enrichTableSchema,
}
async function updateUserStatsForWand(
userId: string,
usage: {
@@ -181,15 +147,7 @@ export async function POST(req: NextRequest) {
try {
const body = (await req.json()) as RequestBody
const {
prompt,
systemPrompt,
stream = false,
history = [],
workflowId,
generationType,
wandContext = {},
} = body
const { prompt, systemPrompt, stream = false, history = [], workflowId, generationType } = body
if (!prompt) {
logger.warn(`[${requestId}] Invalid request: Missing prompt.`)
@@ -264,15 +222,20 @@ export async function POST(req: NextRequest) {
systemPrompt ||
'You are a helpful AI assistant. Generate content exactly as requested by the user.'
// Apply enricher if one exists for this generationType
if (generationType) {
const enricher = wandEnrichers[generationType]
if (enricher) {
const enrichment = await enricher(workspaceId, wandContext)
if (enrichment) {
finalSystemPrompt += `\n\n${enrichment}`
}
}
if (generationType === 'timestamp') {
const now = new Date()
const currentTimeContext = `\n\nCurrent date and time context for reference:
- Current UTC timestamp: ${now.toISOString()}
- Current Unix timestamp (seconds): ${Math.floor(now.getTime() / 1000)}
- Current Unix timestamp (milliseconds): ${now.getTime()}
- Current date (UTC): ${now.toISOString().split('T')[0]}
- Current year: ${now.getUTCFullYear()}
- Current month: ${now.getUTCMonth() + 1}
- Current day of month: ${now.getUTCDate()}
- Current day of week: ${['Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday'][now.getUTCDay()]}
Use this context to calculate relative dates like "yesterday", "last week", "beginning of this month", etc.`
finalSystemPrompt += currentTimeContext
}
if (generationType === 'cron-expression') {

View File

@@ -1,137 +0,0 @@
/**
* @vitest-environment node
*/
import { auditMock, createMockRequest, mockConsoleLogger, setupCommonApiMocks } from '@sim/testing'
import { drizzleOrmMock } from '@sim/testing/mocks'
import { beforeEach, describe, expect, it, vi } from 'vitest'
const mockCheckSessionOrInternalAuth = vi.fn()
const mockGetUserEntityPermissions = vi.fn()
const mockDbSelect = vi.fn()
const mockDbInsert = vi.fn()
const mockWorkflowCreated = vi.fn()
vi.mock('drizzle-orm', () => ({
...drizzleOrmMock,
min: vi.fn((field) => ({ type: 'min', field })),
}))
vi.mock('@/lib/audit/log', () => auditMock)
describe('Workflows API Route - POST ordering', () => {
beforeEach(() => {
vi.resetModules()
vi.clearAllMocks()
setupCommonApiMocks()
mockConsoleLogger()
vi.stubGlobal('crypto', {
randomUUID: vi.fn().mockReturnValue('workflow-new-id'),
})
mockCheckSessionOrInternalAuth.mockResolvedValue({
success: true,
userId: 'user-123',
userName: 'Test User',
userEmail: 'test@example.com',
})
mockGetUserEntityPermissions.mockResolvedValue('write')
vi.doMock('@sim/db', () => ({
db: {
select: (...args: unknown[]) => mockDbSelect(...args),
insert: (...args: unknown[]) => mockDbInsert(...args),
},
}))
vi.doMock('@/lib/auth/hybrid', () => ({
checkSessionOrInternalAuth: (...args: unknown[]) => mockCheckSessionOrInternalAuth(...args),
}))
vi.doMock('@/lib/workspaces/permissions/utils', () => ({
getUserEntityPermissions: (...args: unknown[]) => mockGetUserEntityPermissions(...args),
workspaceExists: vi.fn(),
}))
vi.doMock('@/app/api/workflows/utils', () => ({
verifyWorkspaceMembership: vi.fn(),
}))
vi.doMock('@/lib/core/telemetry', () => ({
PlatformEvents: {
workflowCreated: (...args: unknown[]) => mockWorkflowCreated(...args),
},
}))
})
it('uses top insertion against mixed siblings (folders + workflows)', async () => {
const minResultsQueue: Array<Array<{ minOrder: number }>> = [
[{ minOrder: 5 }],
[{ minOrder: 2 }],
]
mockDbSelect.mockImplementation(() => ({
from: vi.fn().mockReturnValue({
where: vi.fn().mockImplementation(() => Promise.resolve(minResultsQueue.shift() ?? [])),
}),
}))
let insertedValues: Record<string, unknown> | null = null
mockDbInsert.mockReturnValue({
values: vi.fn().mockImplementation((values: Record<string, unknown>) => {
insertedValues = values
return Promise.resolve(undefined)
}),
})
const req = createMockRequest('POST', {
name: 'New Workflow',
description: 'desc',
color: '#3972F6',
workspaceId: 'workspace-123',
folderId: null,
})
const { POST } = await import('@/app/api/workflows/route')
const response = await POST(req)
const data = await response.json()
expect(response.status).toBe(200)
expect(data.sortOrder).toBe(1)
expect(insertedValues).not.toBeNull()
expect(insertedValues?.sortOrder).toBe(1)
})
it('defaults to sortOrder 0 when there are no siblings', async () => {
const minResultsQueue: Array<Array<{ minOrder: number }>> = [[], []]
mockDbSelect.mockImplementation(() => ({
from: vi.fn().mockReturnValue({
where: vi.fn().mockImplementation(() => Promise.resolve(minResultsQueue.shift() ?? [])),
}),
}))
let insertedValues: Record<string, unknown> | null = null
mockDbInsert.mockReturnValue({
values: vi.fn().mockImplementation((values: Record<string, unknown>) => {
insertedValues = values
return Promise.resolve(undefined)
}),
})
const req = createMockRequest('POST', {
name: 'New Workflow',
description: 'desc',
color: '#3972F6',
workspaceId: 'workspace-123',
folderId: null,
})
const { POST } = await import('@/app/api/workflows/route')
const response = await POST(req)
const data = await response.json()
expect(response.status).toBe(200)
expect(data.sortOrder).toBe(0)
expect(insertedValues?.sortOrder).toBe(0)
})
})

View File

@@ -1,5 +1,5 @@
import { db } from '@sim/db'
import { permissions, workflow, workflowFolder } from '@sim/db/schema'
import { permissions, workflow } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, asc, eq, inArray, isNull, min } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
@@ -162,33 +162,12 @@ export async function POST(req: NextRequest) {
if (providedSortOrder !== undefined) {
sortOrder = providedSortOrder
} else {
const workflowParentCondition = folderId
? eq(workflow.folderId, folderId)
: isNull(workflow.folderId)
const folderParentCondition = folderId
? eq(workflowFolder.parentId, folderId)
: isNull(workflowFolder.parentId)
const [[workflowMinResult], [folderMinResult]] = await Promise.all([
db
.select({ minOrder: min(workflow.sortOrder) })
.from(workflow)
.where(and(eq(workflow.workspaceId, workspaceId), workflowParentCondition)),
db
.select({ minOrder: min(workflowFolder.sortOrder) })
.from(workflowFolder)
.where(and(eq(workflowFolder.workspaceId, workspaceId), folderParentCondition)),
])
const minSortOrder = [workflowMinResult?.minOrder, folderMinResult?.minOrder].reduce<
number | null
>((currentMin, candidate) => {
if (candidate == null) return currentMin
if (currentMin == null) return candidate
return Math.min(currentMin, candidate)
}, null)
sortOrder = minSortOrder != null ? minSortOrder - 1 : 0
const folderCondition = folderId ? eq(workflow.folderId, folderId) : isNull(workflow.folderId)
const [minResult] = await db
.select({ minOrder: min(workflow.sortOrder) })
.from(workflow)
.where(and(eq(workflow.workspaceId, workspaceId), folderCondition))
sortOrder = (minResult?.minOrder ?? 1) - 1
}
await db.insert(workflow).values({

View File

@@ -1,31 +0,0 @@
'use client'
import { Trash2, X } from 'lucide-react'
import { Button } from '@/components/emcn'
interface ActionBarProps {
selectedCount: number
onDelete: () => void
onClearSelection: () => void
}
export function ActionBar({ selectedCount, onDelete, onClearSelection }: ActionBarProps) {
return (
<div className='flex h-[36px] shrink-0 items-center justify-between border-[var(--border)] border-b bg-[var(--surface-4)] px-[16px]'>
<div className='flex items-center gap-[12px]'>
<span className='font-medium text-[12px] text-[var(--text-secondary)]'>
{selectedCount} {selectedCount === 1 ? 'row' : 'rows'} selected
</span>
<Button variant='ghost' size='sm' onClick={onClearSelection}>
<X className='mr-[4px] h-[10px] w-[10px]' />
Clear
</Button>
</div>
<Button variant='destructive' size='sm' onClick={onDelete}>
<Trash2 className='mr-[4px] h-[10px] w-[10px]' />
Delete
</Button>
</div>
)
}

View File

@@ -1,72 +0,0 @@
import { Plus } from 'lucide-react'
import { Button, TableCell, TableRow } from '@/components/emcn'
import { Skeleton } from '@/components/ui/skeleton'
import type { ColumnDefinition } from '@/lib/table'
interface LoadingRowsProps {
columns: ColumnDefinition[]
}
export function LoadingRows({ columns }: LoadingRowsProps) {
return (
<>
{Array.from({ length: 25 }).map((_, rowIndex) => (
<TableRow key={rowIndex}>
<TableCell>
<Skeleton className='h-[14px] w-[14px]' />
</TableCell>
{columns.map((col, colIndex) => {
const baseWidth =
col.type === 'json'
? 200
: col.type === 'string'
? 160
: col.type === 'number'
? 80
: col.type === 'boolean'
? 50
: col.type === 'date'
? 100
: 120
const variation = ((rowIndex + colIndex) % 3) * 20
const width = baseWidth + variation
return (
<TableCell key={col.name}>
<Skeleton className='h-[16px]' style={{ width: `${width}px` }} />
</TableCell>
)
})}
</TableRow>
))}
</>
)
}
interface EmptyRowsProps {
columnCount: number
hasFilter: boolean
onAddRow: () => void
}
export function EmptyRows({ columnCount, hasFilter, onAddRow }: EmptyRowsProps) {
return (
<TableRow>
<TableCell colSpan={columnCount + 1} className='h-[160px]'>
<div className='-translate-x-1/2 fixed left-1/2'>
<div className='flex flex-col items-center gap-[12px]'>
<span className='text-[13px] text-[var(--text-tertiary)]'>
{hasFilter ? 'No rows match your filter' : 'No data'}
</span>
{!hasFilter && (
<Button variant='default' size='sm' onClick={onAddRow}>
<Plus className='mr-[4px] h-[12px] w-[12px]' />
Add first row
</Button>
)}
</div>
</div>
</TableCell>
</TableRow>
)
}

View File

@@ -1,99 +0,0 @@
import type { ColumnDefinition } from '@/lib/table'
import { STRING_TRUNCATE_LENGTH } from '../lib/constants'
import type { CellViewerData } from '../lib/types'
interface CellRendererProps {
value: unknown
column: ColumnDefinition
onCellClick: (columnName: string, value: unknown, type: CellViewerData['type']) => void
}
export function CellRenderer({ value, column, onCellClick }: CellRendererProps) {
const isNull = value === null || value === undefined
if (isNull) {
return <span className='text-[var(--text-muted)] italic'></span>
}
if (column.type === 'json') {
const jsonStr = JSON.stringify(value)
return (
<button
type='button'
className='block max-w-[300px] cursor-pointer select-none truncate rounded-[4px] border border-[var(--border-1)] px-[6px] py-[2px] text-left font-mono text-[11px] text-[var(--text-secondary)] transition-colors hover:border-[var(--text-muted)] hover:text-[var(--text-primary)]'
onClick={(e) => {
e.preventDefault()
e.stopPropagation()
onCellClick(column.name, value, 'json')
}}
title='Click to view full JSON'
>
{jsonStr}
</button>
)
}
if (column.type === 'boolean') {
const boolValue = Boolean(value)
return (
<span className={boolValue ? 'text-green-500' : 'text-[var(--text-tertiary)]'}>
{boolValue ? 'true' : 'false'}
</span>
)
}
if (column.type === 'number') {
return (
<span className='font-mono text-[12px] text-[var(--text-secondary)]'>{String(value)}</span>
)
}
if (column.type === 'date') {
try {
const date = new Date(String(value))
const formatted = date.toLocaleDateString('en-US', {
year: 'numeric',
month: 'short',
day: 'numeric',
hour: '2-digit',
minute: '2-digit',
})
return (
<button
type='button'
className='cursor-pointer select-none text-left text-[12px] text-[var(--text-secondary)] underline decoration-[var(--border-1)] decoration-dotted underline-offset-2 transition-colors hover:text-[var(--text-primary)] hover:decoration-[var(--text-muted)]'
onClick={(e) => {
e.preventDefault()
e.stopPropagation()
onCellClick(column.name, value, 'date')
}}
title='Click to view ISO format'
>
{formatted}
</button>
)
} catch {
return <span className='text-[var(--text-primary)]'>{String(value)}</span>
}
}
const strValue = String(value)
if (strValue.length > STRING_TRUNCATE_LENGTH) {
return (
<button
type='button'
className='block max-w-[300px] cursor-pointer select-none truncate text-left text-[var(--text-primary)] underline decoration-[var(--border-1)] decoration-dotted underline-offset-2 transition-colors hover:decoration-[var(--text-muted)]'
onClick={(e) => {
e.preventDefault()
e.stopPropagation()
onCellClick(column.name, value, 'text')
}}
title='Click to view full text'
>
{strValue}
</button>
)
}
return <span className='text-[var(--text-primary)]'>{strValue}</span>
}

View File

@@ -1,84 +0,0 @@
import { Copy, X } from 'lucide-react'
import { Badge, Button, Modal, ModalBody, ModalContent } from '@/components/emcn'
import type { CellViewerData } from '../lib/types'
interface CellViewerModalProps {
cellViewer: CellViewerData | null
onClose: () => void
onCopy: () => void
copied: boolean
}
export function CellViewerModal({ cellViewer, onClose, onCopy, copied }: CellViewerModalProps) {
if (!cellViewer) return null
return (
<Modal open={!!cellViewer} onOpenChange={(open) => !open && onClose()}>
<ModalContent className='w-[640px] duration-100'>
<div className='flex items-center justify-between gap-[8px] px-[16px] py-[10px]'>
<div className='flex min-w-0 items-center gap-[8px]'>
<span className='truncate font-medium text-[14px] text-[var(--text-primary)]'>
{cellViewer.columnName}
</span>
<Badge
variant={
cellViewer.type === 'json' ? 'blue' : cellViewer.type === 'date' ? 'purple' : 'gray'
}
size='sm'
>
{cellViewer.type === 'json' ? 'JSON' : cellViewer.type === 'date' ? 'Date' : 'Text'}
</Badge>
</div>
<div className='flex shrink-0 items-center gap-[8px]'>
<Button variant={copied ? 'tertiary' : 'default'} size='sm' onClick={onCopy}>
<Copy className='mr-[4px] h-[12px] w-[12px]' />
{copied ? 'Copied!' : 'Copy'}
</Button>
<Button variant='ghost' size='sm' onClick={onClose}>
<X className='h-[14px] w-[14px]' />
</Button>
</div>
</div>
<ModalBody className='p-0'>
{cellViewer.type === 'json' ? (
<pre className='m-[16px] max-h-[450px] overflow-auto rounded-[6px] border border-[var(--border)] bg-[var(--surface-4)] p-[16px] font-mono text-[12px] text-[var(--text-primary)] leading-[1.6]'>
{JSON.stringify(cellViewer.value, null, 2)}
</pre>
) : cellViewer.type === 'date' ? (
<div className='m-[16px] space-y-[12px]'>
<div className='rounded-[6px] border border-[var(--border)] bg-[var(--surface-4)] p-[16px]'>
<div className='mb-[6px] font-medium text-[11px] text-[var(--text-tertiary)] uppercase tracking-wide'>
Formatted
</div>
<div className='text-[14px] text-[var(--text-primary)]'>
{new Date(String(cellViewer.value)).toLocaleDateString('en-US', {
weekday: 'long',
year: 'numeric',
month: 'long',
day: 'numeric',
hour: '2-digit',
minute: '2-digit',
second: '2-digit',
timeZoneName: 'short',
})}
</div>
</div>
<div className='rounded-[6px] border border-[var(--border)] bg-[var(--surface-4)] p-[16px]'>
<div className='mb-[6px] font-medium text-[11px] text-[var(--text-tertiary)] uppercase tracking-wide'>
ISO Format
</div>
<div className='font-mono text-[13px] text-[var(--text-secondary)]'>
{String(cellViewer.value)}
</div>
</div>
</div>
) : (
<div className='m-[16px] max-h-[450px] overflow-auto whitespace-pre-wrap break-words rounded-[6px] border border-[var(--border)] bg-[var(--surface-4)] p-[16px] text-[13px] text-[var(--text-primary)] leading-[1.7]'>
{String(cellViewer.value)}
</div>
)}
</ModalBody>
</ModalContent>
</Modal>
)
}

View File

@@ -1,49 +0,0 @@
import { Edit, Trash2 } from 'lucide-react'
import {
Popover,
PopoverAnchor,
PopoverContent,
PopoverDivider,
PopoverItem,
} from '@/components/emcn'
import type { ContextMenuState } from '../lib/types'
interface ContextMenuProps {
contextMenu: ContextMenuState
onClose: () => void
onEdit: () => void
onDelete: () => void
}
export function ContextMenu({ contextMenu, onClose, onEdit, onDelete }: ContextMenuProps) {
return (
<Popover
open={contextMenu.isOpen}
onOpenChange={(open) => !open && onClose()}
variant='secondary'
size='sm'
colorScheme='inverted'
>
<PopoverAnchor
style={{
position: 'fixed',
left: `${contextMenu.position.x}px`,
top: `${contextMenu.position.y}px`,
width: '1px',
height: '1px',
}}
/>
<PopoverContent align='start' side='bottom' sideOffset={4}>
<PopoverItem onClick={onEdit}>
<Edit className='mr-[8px] h-[12px] w-[12px]' />
Edit row
</PopoverItem>
<PopoverDivider />
<PopoverItem onClick={onDelete} className='text-[var(--text-error)]'>
<Trash2 className='mr-[8px] h-[12px] w-[12px]' />
Delete row
</PopoverItem>
</PopoverContent>
</Popover>
)
}

View File

@@ -1,63 +0,0 @@
import { Info, RefreshCw } from 'lucide-react'
import { Badge, Button, Tooltip } from '@/components/emcn'
import { Skeleton } from '@/components/ui/skeleton'
interface HeaderBarProps {
tableName: string
totalCount: number
isLoading: boolean
onNavigateBack: () => void
onShowSchema: () => void
onRefresh: () => void
}
export function HeaderBar({
tableName,
totalCount,
isLoading,
onNavigateBack,
onShowSchema,
onRefresh,
}: HeaderBarProps) {
return (
<div className='flex h-[48px] shrink-0 items-center justify-between border-[var(--border)] border-b px-[16px]'>
<div className='flex items-center gap-[8px]'>
<button
onClick={onNavigateBack}
className='text-[13px] text-[var(--text-tertiary)] transition-colors hover:text-[var(--text-primary)]'
>
Tables
</button>
<span className='text-[var(--text-muted)]'>/</span>
<span className='font-medium text-[13px] text-[var(--text-primary)]'>{tableName}</span>
{isLoading ? (
<Skeleton className='h-[18px] w-[60px] rounded-full' />
) : (
<Badge variant='gray-secondary' size='sm'>
{totalCount} {totalCount === 1 ? 'row' : 'rows'}
</Badge>
)}
</div>
<div className='flex items-center gap-[8px]'>
<Tooltip.Root>
<Tooltip.Trigger asChild>
<Button variant='ghost' size='sm' onClick={onShowSchema}>
<Info className='h-[14px] w-[14px]' />
</Button>
</Tooltip.Trigger>
<Tooltip.Content>View Schema</Tooltip.Content>
</Tooltip.Root>
<Tooltip.Root>
<Tooltip.Trigger asChild>
<Button variant='ghost' size='sm' onClick={onRefresh}>
<RefreshCw className='h-[14px] w-[14px]' />
</Button>
</Tooltip.Trigger>
<Tooltip.Content>Refresh</Tooltip.Content>
</Tooltip.Root>
</div>
</div>
)
}

View File

@@ -1,11 +0,0 @@
export * from './action-bar'
export * from './body-states'
export * from './cell-renderer'
export * from './cell-viewer-modal'
export * from './context-menu'
export * from './header-bar'
export * from './pagination'
export * from './query-builder'
export * from './row-modal'
export * from './schema-modal'
export * from './table-viewer'

View File

@@ -1,40 +0,0 @@
import { Button } from '@/components/emcn'
interface PaginationProps {
currentPage: number
totalPages: number
totalCount: number
onPreviousPage: () => void
onNextPage: () => void
}
export function Pagination({
currentPage,
totalPages,
totalCount,
onPreviousPage,
onNextPage,
}: PaginationProps) {
if (totalPages <= 1) return null
return (
<div className='flex h-[40px] shrink-0 items-center justify-between border-[var(--border)] border-t px-[16px]'>
<span className='text-[11px] text-[var(--text-tertiary)]'>
Page {currentPage + 1} of {totalPages} ({totalCount} rows)
</span>
<div className='flex items-center gap-[4px]'>
<Button variant='ghost' size='sm' onClick={onPreviousPage} disabled={currentPage === 0}>
Previous
</Button>
<Button
variant='ghost'
size='sm'
onClick={onNextPage}
disabled={currentPage === totalPages - 1}
>
Next
</Button>
</div>
</div>
)
}

View File

@@ -1,89 +0,0 @@
'use client'
import { X } from 'lucide-react'
import { Button, Combobox, Input } from '@/components/emcn'
import type { FilterRule } from '@/lib/table/query-builder/constants'
interface FilterRowProps {
rule: FilterRule
index: number
columnOptions: Array<{ value: string; label: string }>
comparisonOptions: Array<{ value: string; label: string }>
logicalOptions: Array<{ value: string; label: string }>
onUpdate: (id: string, field: keyof FilterRule, value: string) => void
onRemove: (id: string) => void
onApply: () => void
}
export function FilterRow({
rule,
index,
columnOptions,
comparisonOptions,
logicalOptions,
onUpdate,
onRemove,
onApply,
}: FilterRowProps) {
return (
<div className='flex items-center gap-[8px]'>
<Button
variant='ghost'
size='sm'
onClick={() => onRemove(rule.id)}
className='h-[28px] w-[28px] shrink-0 p-0 text-[var(--text-tertiary)] hover:text-[var(--text-primary)]'
>
<X className='h-[12px] w-[12px]' />
</Button>
<div className='w-[80px] shrink-0'>
{index === 0 ? (
<Combobox
size='sm'
options={[{ value: 'where', label: 'where' }]}
value='where'
disabled
/>
) : (
<Combobox
size='sm'
options={logicalOptions}
value={rule.logicalOperator}
onChange={(value) => onUpdate(rule.id, 'logicalOperator', value as 'and' | 'or')}
/>
)}
</div>
<div className='w-[140px] shrink-0'>
<Combobox
size='sm'
options={columnOptions}
value={rule.column}
onChange={(value) => onUpdate(rule.id, 'column', value)}
placeholder='Column'
/>
</div>
<div className='w-[130px] shrink-0'>
<Combobox
size='sm'
options={comparisonOptions}
value={rule.operator}
onChange={(value) => onUpdate(rule.id, 'operator', value)}
/>
</div>
<Input
className='h-[28px] min-w-[200px] flex-1 text-[12px]'
value={rule.value}
onChange={(e) => onUpdate(rule.id, 'value', e.target.value)}
placeholder='Value'
onKeyDown={(e) => {
if (e.key === 'Enter') {
onApply()
}
}}
/>
</div>
)
}

View File

@@ -1,137 +0,0 @@
'use client'
import { useCallback, useMemo, useState } from 'react'
import { ArrowUpAZ, Loader2, Plus } from 'lucide-react'
import { nanoid } from 'nanoid'
import { Button } from '@/components/emcn'
import type { FilterRule, SortRule } from '@/lib/table/query-builder/constants'
import { filterRulesToFilter, sortRuleToSort } from '@/lib/table/query-builder/converters'
import { useFilterBuilder } from '@/lib/table/query-builder/use-query-builder'
import type { ColumnDefinition } from '@/lib/table/types'
import type { QueryOptions } from '../../lib/types'
import { FilterRow } from './filter-row'
import { SortRow } from './sort-row'
type Column = Pick<ColumnDefinition, 'name' | 'type'>
interface QueryBuilderProps {
columns: Column[]
onApply: (options: QueryOptions) => void
onAddRow: () => void
isLoading?: boolean
}
export function QueryBuilder({ columns, onApply, onAddRow, isLoading = false }: QueryBuilderProps) {
const [rules, setRules] = useState<FilterRule[]>([])
const [sortRule, setSortRule] = useState<SortRule | null>(null)
const columnOptions = useMemo(
() => columns.map((col) => ({ value: col.name, label: col.name })),
[columns]
)
const {
comparisonOptions,
logicalOptions,
sortDirectionOptions,
addRule: handleAddRule,
removeRule: handleRemoveRule,
updateRule: handleUpdateRule,
} = useFilterBuilder({
columns: columnOptions,
rules,
setRules,
})
const handleAddSort = useCallback(() => {
setSortRule({
id: nanoid(),
column: columns[0]?.name || '',
direction: 'asc',
})
}, [columns])
const handleRemoveSort = useCallback(() => {
setSortRule(null)
}, [])
const handleApply = useCallback(() => {
const filter = filterRulesToFilter(rules)
const sort = sortRuleToSort(sortRule)
onApply({ filter, sort })
}, [rules, sortRule, onApply])
const handleClear = useCallback(() => {
setRules([])
setSortRule(null)
onApply({
filter: null,
sort: null,
})
}, [onApply])
const hasChanges = rules.length > 0 || sortRule !== null
return (
<div className='flex flex-col gap-[8px]'>
{rules.map((rule, index) => (
<FilterRow
key={rule.id}
rule={rule}
index={index}
columnOptions={columnOptions}
comparisonOptions={comparisonOptions}
logicalOptions={logicalOptions}
onUpdate={handleUpdateRule}
onRemove={handleRemoveRule}
onApply={handleApply}
/>
))}
{sortRule && (
<SortRow
sortRule={sortRule}
columnOptions={columnOptions}
sortDirectionOptions={sortDirectionOptions}
onChange={setSortRule}
onRemove={handleRemoveSort}
/>
)}
<div className='flex items-center gap-[8px]'>
<Button variant='default' size='sm' onClick={onAddRow}>
<Plus className='mr-[4px] h-[12px] w-[12px]' />
Add row
</Button>
<Button variant='default' size='sm' onClick={handleAddRule}>
<Plus className='mr-[4px] h-[12px] w-[12px]' />
Add filter
</Button>
{!sortRule && (
<Button variant='default' size='sm' onClick={handleAddSort}>
<ArrowUpAZ className='mr-[4px] h-[12px] w-[12px]' />
Add sort
</Button>
)}
{hasChanges && (
<>
<Button variant='default' size='sm' onClick={handleApply} disabled={isLoading}>
{isLoading && <Loader2 className='mr-[4px] h-[12px] w-[12px] animate-spin' />}
{isLoading ? 'Applying...' : 'Apply'}
</Button>
<button
onClick={handleClear}
className='text-[12px] text-[var(--text-tertiary)] transition-colors hover:text-[var(--text-primary)]'
>
Clear all
</button>
</>
)}
</div>
</div>
)
}

View File

@@ -1,65 +0,0 @@
'use client'
import { ArrowDownAZ, ArrowUpAZ, X } from 'lucide-react'
import { Button, Combobox } from '@/components/emcn'
import type { SortRule } from '@/lib/table/query-builder/constants'
interface SortRowProps {
sortRule: SortRule
columnOptions: Array<{ value: string; label: string }>
sortDirectionOptions: Array<{ value: string; label: string }>
onChange: (rule: SortRule | null) => void
onRemove: () => void
}
export function SortRow({
sortRule,
columnOptions,
sortDirectionOptions,
onChange,
onRemove,
}: SortRowProps) {
return (
<div className='flex items-center gap-[8px]'>
<Button
variant='ghost'
size='sm'
onClick={onRemove}
className='h-[28px] w-[28px] shrink-0 p-0 text-[var(--text-tertiary)] hover:text-[var(--text-primary)]'
>
<X className='h-[12px] w-[12px]' />
</Button>
<div className='w-[80px] shrink-0'>
<Combobox size='sm' options={[{ value: 'order', label: 'order' }]} value='order' disabled />
</div>
<div className='w-[140px] shrink-0'>
<Combobox
size='sm'
options={columnOptions}
value={sortRule.column}
onChange={(value) => onChange({ ...sortRule, column: value })}
placeholder='Column'
/>
</div>
<div className='w-[130px] shrink-0'>
<Combobox
size='sm'
options={sortDirectionOptions}
value={sortRule.direction}
onChange={(value) => onChange({ ...sortRule, direction: value as 'asc' | 'desc' })}
/>
</div>
<div className='flex items-center text-[12px] text-[var(--text-tertiary)]'>
{sortRule.direction === 'asc' ? (
<ArrowUpAZ className='h-[14px] w-[14px]' />
) : (
<ArrowDownAZ className='h-[14px] w-[14px]' />
)}
</div>
</div>
)
}

View File

@@ -1,363 +0,0 @@
'use client'
import { useState } from 'react'
import { createLogger } from '@sim/logger'
import { AlertCircle } from 'lucide-react'
import { useParams } from 'next/navigation'
import {
Button,
Checkbox,
Input,
Label,
Modal,
ModalBody,
ModalContent,
ModalFooter,
ModalHeader,
Textarea,
} from '@/components/emcn'
import type { ColumnDefinition, TableInfo, TableRow } from '@/lib/table'
import {
useCreateTableRow,
useDeleteTableRow,
useDeleteTableRows,
useUpdateTableRow,
} from '@/hooks/queries/tables'
const logger = createLogger('RowModal')
export interface RowModalProps {
mode: 'add' | 'edit' | 'delete'
isOpen: boolean
onClose: () => void
table: TableInfo
row?: TableRow
rowIds?: string[]
onSuccess: () => void
}
function createInitialRowData(columns: ColumnDefinition[]): Record<string, unknown> {
const initial: Record<string, unknown> = {}
columns.forEach((col) => {
if (col.type === 'boolean') {
initial[col.name] = false
} else {
initial[col.name] = ''
}
})
return initial
}
function cleanRowData(
columns: ColumnDefinition[],
rowData: Record<string, unknown>
): Record<string, unknown> {
const cleanData: Record<string, unknown> = {}
columns.forEach((col) => {
const value = rowData[col.name]
if (col.type === 'number') {
cleanData[col.name] = value === '' ? null : Number(value)
} else if (col.type === 'json') {
if (typeof value === 'string') {
if (value === '') {
cleanData[col.name] = null
} else {
try {
cleanData[col.name] = JSON.parse(value)
} catch {
throw new Error(`Invalid JSON for field: ${col.name}`)
}
}
} else {
cleanData[col.name] = value
}
} else if (col.type === 'boolean') {
cleanData[col.name] = Boolean(value)
} else {
cleanData[col.name] = value || null
}
})
return cleanData
}
function formatValueForInput(value: unknown, type: string): string {
if (value === null || value === undefined) return ''
if (type === 'json') {
return typeof value === 'string' ? value : JSON.stringify(value, null, 2)
}
if (type === 'date' && value) {
try {
const date = new Date(String(value))
return date.toISOString().split('T')[0]
} catch {
return String(value)
}
}
return String(value)
}
function getInitialRowData(
mode: RowModalProps['mode'],
columns: ColumnDefinition[],
row?: TableRow
): Record<string, unknown> {
if (mode === 'add' && columns.length > 0) {
return createInitialRowData(columns)
}
if (mode === 'edit' && row) {
return row.data
}
return {}
}
export function RowModal({ mode, isOpen, onClose, table, row, rowIds, onSuccess }: RowModalProps) {
const params = useParams()
const workspaceId = params.workspaceId as string
const tableId = table.id
const schema = table?.schema
const columns = schema?.columns || []
const [rowData, setRowData] = useState<Record<string, unknown>>(() =>
getInitialRowData(mode, columns, row)
)
const [error, setError] = useState<string | null>(null)
const createRowMutation = useCreateTableRow({ workspaceId, tableId })
const updateRowMutation = useUpdateTableRow({ workspaceId, tableId })
const deleteRowMutation = useDeleteTableRow({ workspaceId, tableId })
const deleteRowsMutation = useDeleteTableRows({ workspaceId, tableId })
const isSubmitting =
createRowMutation.isPending ||
updateRowMutation.isPending ||
deleteRowMutation.isPending ||
deleteRowsMutation.isPending
const handleFormSubmit = async (e: React.FormEvent) => {
e.preventDefault()
setError(null)
try {
const cleanData = cleanRowData(columns, rowData)
if (mode === 'add') {
await createRowMutation.mutateAsync(cleanData)
} else if (mode === 'edit' && row) {
await updateRowMutation.mutateAsync({ rowId: row.id, data: cleanData })
}
onSuccess()
} catch (err) {
logger.error(`Failed to ${mode} row:`, err)
setError(err instanceof Error ? err.message : `Failed to ${mode} row`)
}
}
const handleDelete = async () => {
setError(null)
const idsToDelete = rowIds ?? (row ? [row.id] : [])
try {
if (idsToDelete.length === 1) {
await deleteRowMutation.mutateAsync(idsToDelete[0])
} else {
await deleteRowsMutation.mutateAsync(idsToDelete)
}
onSuccess()
} catch (err) {
logger.error('Failed to delete row(s):', err)
setError(err instanceof Error ? err.message : 'Failed to delete row(s)')
}
}
const handleClose = () => {
setRowData({})
setError(null)
onClose()
}
// Delete mode UI
if (mode === 'delete') {
const deleteCount = rowIds?.length ?? (row ? 1 : 0)
const isSingleRow = deleteCount === 1
return (
<Modal open={isOpen} onOpenChange={handleClose}>
<ModalContent className='w-[480px]'>
<ModalHeader>
<div className='flex items-center gap-[10px]'>
<div className='flex h-[36px] w-[36px] items-center justify-center rounded-[8px] bg-[var(--bg-error)] text-[var(--text-error)]'>
<AlertCircle className='h-[18px] w-[18px]' />
</div>
<h2 className='font-semibold text-[16px]'>
Delete {isSingleRow ? 'Row' : `${deleteCount} Rows`}
</h2>
</div>
</ModalHeader>
<ModalBody>
<div className='flex flex-col gap-[16px]'>
<ErrorMessage error={error} />
<p className='text-[14px] text-[var(--text-secondary)]'>
Are you sure you want to delete {isSingleRow ? 'this row' : 'these rows'}? This
action cannot be undone.
</p>
</div>
</ModalBody>
<ModalFooter className='gap-[10px]'>
<Button
type='button'
variant='default'
onClick={handleClose}
className='min-w-[90px]'
disabled={isSubmitting}
>
Cancel
</Button>
<Button
type='button'
variant='destructive'
onClick={handleDelete}
disabled={isSubmitting}
className='min-w-[120px]'
>
{isSubmitting ? 'Deleting...' : 'Delete'}
</Button>
</ModalFooter>
</ModalContent>
</Modal>
)
}
const isAddMode = mode === 'add'
return (
<Modal open={isOpen} onOpenChange={handleClose}>
<ModalContent className='w-[600px]'>
<ModalHeader>
<div className='flex flex-col gap-[4px]'>
<h2 className='font-semibold text-[16px]'>{isAddMode ? 'Add New Row' : 'Edit Row'}</h2>
<p className='font-normal text-[13px] text-[var(--text-tertiary)]'>
{isAddMode ? 'Fill in the values for' : 'Update values for'} {table?.name ?? 'table'}
</p>
</div>
</ModalHeader>
<ModalBody className='max-h-[60vh] overflow-y-auto'>
<form onSubmit={handleFormSubmit} className='flex flex-col gap-[16px]'>
<ErrorMessage error={error} />
{columns.map((column) => (
<ColumnField
key={column.name}
column={column}
value={rowData[column.name]}
onChange={(value) => setRowData((prev) => ({ ...prev, [column.name]: value }))}
/>
))}
</form>
</ModalBody>
<ModalFooter className='gap-[10px]'>
<Button
type='button'
variant='default'
onClick={handleClose}
className='min-w-[90px]'
disabled={isSubmitting}
>
Cancel
</Button>
<Button
type='button'
variant='tertiary'
onClick={handleFormSubmit}
disabled={isSubmitting}
className='min-w-[120px]'
>
{isSubmitting
? isAddMode
? 'Adding...'
: 'Updating...'
: isAddMode
? 'Add Row'
: 'Update Row'}
</Button>
</ModalFooter>
</ModalContent>
</Modal>
)
}
function ErrorMessage({ error }: { error: string | null }) {
if (!error) return null
return (
<div className='rounded-[8px] border border-[var(--status-error-border)] bg-[var(--status-error-bg)] px-[14px] py-[12px] text-[13px] text-[var(--status-error-text)]'>
{error}
</div>
)
}
interface ColumnFieldProps {
column: ColumnDefinition
value: unknown
onChange: (value: unknown) => void
}
function ColumnField({ column, value, onChange }: ColumnFieldProps) {
return (
<div className='flex flex-col gap-[8px]'>
<Label htmlFor={column.name} className='font-medium text-[13px]'>
{column.name}
{column.required && <span className='text-[var(--text-error)]'> *</span>}
{column.unique && (
<span className='ml-[6px] font-normal text-[11px] text-[var(--text-tertiary)]'>
(unique)
</span>
)}
</Label>
{column.type === 'boolean' ? (
<div className='flex items-center gap-[8px]'>
<Checkbox
id={column.name}
checked={Boolean(value)}
onCheckedChange={(checked) => onChange(checked === true)}
/>
<Label
htmlFor={column.name}
className='font-normal text-[13px] text-[var(--text-tertiary)]'
>
{value ? 'True' : 'False'}
</Label>
</div>
) : column.type === 'json' ? (
<Textarea
id={column.name}
value={formatValueForInput(value, column.type)}
onChange={(e) => onChange(e.target.value)}
placeholder='{"key": "value"}'
rows={4}
className='font-mono text-[12px]'
required={column.required}
/>
) : (
<Input
id={column.name}
type={column.type === 'number' ? 'number' : column.type === 'date' ? 'date' : 'text'}
value={formatValueForInput(value, column.type)}
onChange={(e) => onChange(e.target.value)}
placeholder={`Enter ${column.name}`}
className='h-[38px]'
required={column.required}
/>
)}
<div className='text-[12px] text-[var(--text-tertiary)]'>
Type: {column.type}
{!column.required && ' (optional)'}
</div>
</div>
)
}

View File

@@ -1,94 +0,0 @@
import {
Badge,
Button,
Modal,
ModalBody,
ModalContent,
ModalFooter,
ModalHeader,
Table,
TableBody,
TableCell,
TableHead,
TableHeader,
TableRow,
} from '@/components/emcn'
import type { ColumnDefinition } from '@/lib/table'
import { getTypeBadgeVariant } from '../lib/utils'
interface SchemaModalProps {
isOpen: boolean
onClose: () => void
columns: ColumnDefinition[]
tableName?: string
}
export function SchemaModal({ isOpen, onClose, columns, tableName }: SchemaModalProps) {
const columnCount = columns.length
return (
<Modal open={isOpen} onOpenChange={onClose}>
<ModalContent size='md'>
<ModalHeader>Table Schema</ModalHeader>
<ModalBody className='max-h-[60vh] overflow-y-auto'>
<div className='mb-[10px] flex items-center justify-between gap-[8px]'>
{tableName ? (
<span className='truncate font-medium text-[13px] text-[var(--text-primary)]'>
{tableName}
</span>
) : (
<div />
)}
<Badge variant='gray' size='sm'>
{columnCount} {columnCount === 1 ? 'column' : 'columns'}
</Badge>
</div>
<Table>
<TableHeader>
<TableRow>
<TableHead>Column</TableHead>
<TableHead>Type</TableHead>
<TableHead>Constraints</TableHead>
</TableRow>
</TableHeader>
<TableBody>
{columns.map((column) => (
<TableRow key={column.name}>
<TableCell className='font-mono'>{column.name}</TableCell>
<TableCell>
<Badge variant={getTypeBadgeVariant(column.type)} size='sm'>
{column.type}
</Badge>
</TableCell>
<TableCell>
<div className='flex items-center gap-[6px]'>
{column.required && (
<Badge variant='red' size='sm'>
required
</Badge>
)}
{column.unique && (
<Badge variant='purple' size='sm'>
unique
</Badge>
)}
{!column.required && !column.unique && (
<span className='text-[var(--text-muted)]'>None</span>
)}
</div>
</TableCell>
</TableRow>
))}
</TableBody>
</Table>
</ModalBody>
<ModalFooter>
<Button variant='default' onClick={onClose}>
Close
</Button>
</ModalFooter>
</ModalContent>
</Modal>
)
}

View File

@@ -1,308 +0,0 @@
'use client'
import { useCallback, useState } from 'react'
import { useParams, useRouter } from 'next/navigation'
import {
Badge,
Checkbox,
Table,
TableBody,
TableCell,
TableHead,
TableHeader,
TableRow,
} from '@/components/emcn'
import { cn } from '@/lib/core/utils/cn'
import type { TableRow as TableRowType } from '@/lib/table'
import { useContextMenu, useRowSelection, useTableData } from '../hooks'
import type { CellViewerData, QueryOptions } from '../lib/types'
import { ActionBar } from './action-bar'
import { EmptyRows, LoadingRows } from './body-states'
import { CellRenderer } from './cell-renderer'
import { CellViewerModal } from './cell-viewer-modal'
import { ContextMenu } from './context-menu'
import { HeaderBar } from './header-bar'
import { Pagination } from './pagination'
import { QueryBuilder } from './query-builder'
import { RowModal } from './row-modal'
import { SchemaModal } from './schema-modal'
export function TableViewer() {
const params = useParams()
const router = useRouter()
const workspaceId = params.workspaceId as string
const tableId = params.tableId as string
const [queryOptions, setQueryOptions] = useState<QueryOptions>({
filter: null,
sort: null,
})
const [currentPage, setCurrentPage] = useState(0)
const [showAddModal, setShowAddModal] = useState(false)
const [editingRow, setEditingRow] = useState<TableRowType | null>(null)
const [deletingRows, setDeletingRows] = useState<string[]>([])
const [showSchemaModal, setShowSchemaModal] = useState(false)
const [cellViewer, setCellViewer] = useState<CellViewerData | null>(null)
const [copied, setCopied] = useState(false)
const { tableData, isLoadingTable, rows, totalCount, totalPages, isLoadingRows, refetchRows } =
useTableData({
workspaceId,
tableId,
queryOptions,
currentPage,
})
const { selectedRows, handleSelectAll, handleSelectRow, clearSelection } = useRowSelection(rows)
const { contextMenu, handleRowContextMenu, closeContextMenu } = useContextMenu()
const columns = tableData?.schema?.columns || []
const selectedCount = selectedRows.size
const hasSelection = selectedCount > 0
const isAllSelected = rows.length > 0 && selectedCount === rows.length
const handleNavigateBack = useCallback(() => {
router.push(`/workspace/${workspaceId}/tables`)
}, [router, workspaceId])
const handleShowSchema = useCallback(() => {
setShowSchemaModal(true)
}, [])
const handleAddRow = useCallback(() => {
setShowAddModal(true)
}, [])
const handleApplyQueryOptions = useCallback((options: QueryOptions) => {
setQueryOptions(options)
setCurrentPage(0)
}, [])
const handleDeleteSelected = useCallback(() => {
setDeletingRows(Array.from(selectedRows))
}, [selectedRows])
const handleContextMenuEdit = useCallback(() => {
if (contextMenu.row) {
setEditingRow(contextMenu.row)
}
closeContextMenu()
}, [contextMenu.row, closeContextMenu])
const handleContextMenuDelete = useCallback(() => {
if (contextMenu.row) {
setDeletingRows([contextMenu.row.id])
}
closeContextMenu()
}, [contextMenu.row, closeContextMenu])
const handleCopyCellValue = useCallback(async () => {
if (cellViewer) {
let text: string
if (cellViewer.type === 'json') {
text = JSON.stringify(cellViewer.value, null, 2)
} else if (cellViewer.type === 'date') {
text = String(cellViewer.value)
} else {
text = String(cellViewer.value)
}
await navigator.clipboard.writeText(text)
setCopied(true)
setTimeout(() => setCopied(false), 2000)
}
}, [cellViewer])
const handleCellClick = useCallback(
(columnName: string, value: unknown, type: CellViewerData['type']) => {
setCellViewer({ columnName, value, type })
},
[]
)
if (isLoadingTable) {
return (
<div className='flex h-full items-center justify-center'>
<span className='text-[13px] text-[var(--text-tertiary)]'>Loading table...</span>
</div>
)
}
if (!tableData) {
return (
<div className='flex h-full items-center justify-center'>
<span className='text-[13px] text-[var(--text-error)]'>Table not found</span>
</div>
)
}
return (
<div className='flex h-full flex-col'>
<HeaderBar
tableName={tableData.name}
totalCount={totalCount}
isLoading={isLoadingRows}
onNavigateBack={handleNavigateBack}
onShowSchema={handleShowSchema}
onRefresh={refetchRows}
/>
<div className='flex shrink-0 flex-col gap-[8px] border-[var(--border)] border-b px-[16px] py-[10px]'>
<QueryBuilder
columns={columns}
onApply={handleApplyQueryOptions}
onAddRow={handleAddRow}
isLoading={isLoadingRows}
/>
{hasSelection && (
<span className='text-[11px] text-[var(--text-tertiary)]'>{selectedCount} selected</span>
)}
</div>
{hasSelection && (
<ActionBar
selectedCount={selectedCount}
onDelete={handleDeleteSelected}
onClearSelection={clearSelection}
/>
)}
<div className='flex-1 overflow-auto'>
<Table>
<TableHeader className='sticky top-0 z-10 bg-[var(--surface-3)]'>
<TableRow>
<TableHead className='w-[40px]'>
<Checkbox size='sm' checked={isAllSelected} onCheckedChange={handleSelectAll} />
</TableHead>
{columns.map((column) => (
<TableHead key={column.name}>
<div className='flex items-center gap-[6px]'>
<span className='text-[12px]'>{column.name}</span>
<Badge variant='outline' size='sm'>
{column.type}
</Badge>
{column.required && (
<span className='text-[10px] text-[var(--text-error)]'>*</span>
)}
</div>
</TableHead>
))}
</TableRow>
</TableHeader>
<TableBody>
{isLoadingRows ? (
<LoadingRows columns={columns} />
) : rows.length === 0 ? (
<EmptyRows
columnCount={columns.length}
hasFilter={!!queryOptions.filter}
onAddRow={handleAddRow}
/>
) : (
rows.map((row) => (
<TableRow
key={row.id}
className={cn(
'group hover:bg-[var(--surface-4)]',
selectedRows.has(row.id) && 'bg-[var(--surface-5)]'
)}
onContextMenu={(e) => handleRowContextMenu(e, row)}
>
<TableCell>
<Checkbox
size='sm'
checked={selectedRows.has(row.id)}
onCheckedChange={() => handleSelectRow(row.id)}
/>
</TableCell>
{columns.map((column) => (
<TableCell key={column.name}>
<div className='max-w-[300px] truncate text-[13px]'>
<CellRenderer
value={row.data[column.name]}
column={column}
onCellClick={handleCellClick}
/>
</div>
</TableCell>
))}
</TableRow>
))
)}
</TableBody>
</Table>
</div>
<Pagination
currentPage={currentPage}
totalPages={totalPages}
totalCount={totalCount}
onPreviousPage={() => setCurrentPage((p) => Math.max(0, p - 1))}
onNextPage={() => setCurrentPage((p) => Math.min(totalPages - 1, p + 1))}
/>
{showAddModal && (
<RowModal
mode='add'
isOpen={true}
onClose={() => setShowAddModal(false)}
table={tableData}
onSuccess={() => {
setShowAddModal(false)
}}
/>
)}
{editingRow && (
<RowModal
mode='edit'
isOpen={true}
onClose={() => setEditingRow(null)}
table={tableData}
row={editingRow}
onSuccess={() => {
setEditingRow(null)
}}
/>
)}
{deletingRows.length > 0 && (
<RowModal
mode='delete'
isOpen={true}
onClose={() => setDeletingRows([])}
table={tableData}
rowIds={deletingRows}
onSuccess={() => {
setDeletingRows([])
clearSelection()
}}
/>
)}
<SchemaModal
isOpen={showSchemaModal}
onClose={() => setShowSchemaModal(false)}
columns={columns}
tableName={tableData.name}
/>
<CellViewerModal
cellViewer={cellViewer}
onClose={() => setCellViewer(null)}
onCopy={handleCopyCellValue}
copied={copied}
/>
<ContextMenu
contextMenu={contextMenu}
onClose={closeContextMenu}
onEdit={handleContextMenuEdit}
onDelete={handleContextMenuDelete}
/>
</div>
)
}

View File

@@ -1,71 +0,0 @@
'use client'
import { useEffect } from 'react'
import { createLogger } from '@sim/logger'
import { AlertTriangle, ArrowLeft, RefreshCw } from 'lucide-react'
import { useParams, useRouter } from 'next/navigation'
import { Button } from '@/components/emcn'
const logger = createLogger('TableViewerError')
interface TableViewerErrorProps {
error: Error & { digest?: string }
reset: () => void
}
export default function TableViewerError({ error, reset }: TableViewerErrorProps) {
const router = useRouter()
const params = useParams()
const workspaceId = params.workspaceId as string
useEffect(() => {
logger.error('Table viewer error:', { error: error.message, digest: error.digest })
}, [error])
return (
<div className='flex h-full flex-1 flex-col'>
{/* Header */}
<div className='flex h-[48px] shrink-0 items-center border-[var(--border)] border-b px-[16px]'>
<button
onClick={() => router.push(`/workspace/${workspaceId}/tables`)}
className='flex items-center gap-[6px] text-[13px] text-[var(--text-tertiary)] transition-colors hover:text-[var(--text-primary)]'
>
<ArrowLeft className='h-[14px] w-[14px]' />
Back to Tables
</button>
</div>
{/* Error Content */}
<div className='flex flex-1 items-center justify-center'>
<div className='flex flex-col items-center gap-[16px] text-center'>
<div className='flex h-[48px] w-[48px] items-center justify-center rounded-full bg-[var(--surface-4)]'>
<AlertTriangle className='h-[24px] w-[24px] text-[var(--text-error)]' />
</div>
<div className='flex flex-col gap-[8px]'>
<h2 className='font-semibold text-[16px] text-[var(--text-primary)]'>
Failed to load table
</h2>
<p className='max-w-[300px] text-[13px] text-[var(--text-tertiary)]'>
Something went wrong while loading this table. The table may have been deleted or you
may not have permission to view it.
</p>
</div>
<div className='flex items-center gap-[8px]'>
<Button
variant='ghost'
size='sm'
onClick={() => router.push(`/workspace/${workspaceId}/tables`)}
>
<ArrowLeft className='mr-[6px] h-[14px] w-[14px]' />
Go back
</Button>
<Button variant='default' size='sm' onClick={reset}>
<RefreshCw className='mr-[6px] h-[14px] w-[14px]' />
Try again
</Button>
</div>
</div>
</div>
</div>
)
}

View File

@@ -1,3 +0,0 @@
export * from './use-context-menu'
export * from './use-row-selection'
export * from './use-table-data'

View File

@@ -1,37 +0,0 @@
import { useCallback, useState } from 'react'
import type { TableRow } from '@/lib/table'
import type { ContextMenuState } from '../lib/types'
interface UseContextMenuReturn {
contextMenu: ContextMenuState
handleRowContextMenu: (e: React.MouseEvent, row: TableRow) => void
closeContextMenu: () => void
}
export function useContextMenu(): UseContextMenuReturn {
const [contextMenu, setContextMenu] = useState<ContextMenuState>({
isOpen: false,
position: { x: 0, y: 0 },
row: null,
})
const handleRowContextMenu = useCallback((e: React.MouseEvent, row: TableRow) => {
e.preventDefault()
e.stopPropagation()
setContextMenu({
isOpen: true,
position: { x: e.clientX, y: e.clientY },
row,
})
}, [])
const closeContextMenu = useCallback(() => {
setContextMenu((prev) => ({ ...prev, isOpen: false }))
}, [])
return {
contextMenu,
handleRowContextMenu,
closeContextMenu,
}
}

View File

@@ -1,65 +0,0 @@
import { useCallback, useMemo, useState } from 'react'
import type { TableRow } from '@/lib/table'
interface UseRowSelectionReturn {
selectedRows: Set<string>
handleSelectAll: () => void
handleSelectRow: (rowId: string) => void
clearSelection: () => void
}
export function useRowSelection(rows: TableRow[]): UseRowSelectionReturn {
const [selectedRows, setSelectedRows] = useState<Set<string>>(new Set())
const [prevRowsSignature, setPrevRowsSignature] = useState('')
const currentRowIds = useMemo(() => new Set(rows.map((r) => r.id)), [rows])
const rowsSignature = useMemo(() => rows.map((r) => r.id).join('|'), [rows])
if (rowsSignature !== prevRowsSignature) {
setPrevRowsSignature(rowsSignature)
setSelectedRows((prev) => {
if (prev.size === 0) return prev
const filtered = new Set([...prev].filter((id) => currentRowIds.has(id)))
return filtered.size !== prev.size ? filtered : prev
})
}
const visibleSelectedRows = useMemo(
() => new Set([...selectedRows].filter((id) => currentRowIds.has(id))),
[selectedRows, currentRowIds]
)
const handleSelectAll = useCallback(() => {
if (visibleSelectedRows.size === rows.length) {
setSelectedRows(new Set())
} else {
setSelectedRows(new Set(rows.map((r) => r.id)))
}
}, [rows, visibleSelectedRows.size])
const handleSelectRow = useCallback(
(rowId: string) => {
setSelectedRows((prev) => {
const newSet = new Set([...prev].filter((id) => currentRowIds.has(id)))
if (newSet.has(rowId)) {
newSet.delete(rowId)
} else {
newSet.add(rowId)
}
return newSet
})
},
[currentRowIds]
)
const clearSelection = useCallback(() => {
setSelectedRows(new Set())
}, [])
return {
selectedRows: visibleSelectedRows,
handleSelectAll,
handleSelectRow,
clearSelection,
}
}

View File

@@ -1,58 +0,0 @@
import type { TableDefinition, TableRow } from '@/lib/table'
import { useTable, useTableRows } from '@/hooks/queries/tables'
import { ROWS_PER_PAGE } from '../lib/constants'
import type { QueryOptions } from '../lib/types'
interface UseTableDataParams {
workspaceId: string
tableId: string
queryOptions: QueryOptions
currentPage: number
}
interface UseTableDataReturn {
tableData: TableDefinition | undefined
isLoadingTable: boolean
rows: TableRow[]
totalCount: number
totalPages: number
isLoadingRows: boolean
refetchRows: () => void
}
export function useTableData({
workspaceId,
tableId,
queryOptions,
currentPage,
}: UseTableDataParams): UseTableDataReturn {
const { data: tableData, isLoading: isLoadingTable } = useTable(workspaceId, tableId)
const {
data: rowsData,
isLoading: isLoadingRows,
refetch: refetchRows,
} = useTableRows({
workspaceId,
tableId,
limit: ROWS_PER_PAGE,
offset: currentPage * ROWS_PER_PAGE,
filter: queryOptions.filter,
sort: queryOptions.sort,
enabled: Boolean(workspaceId && tableId),
})
const rows = (rowsData?.rows || []) as TableRow[]
const totalCount = rowsData?.totalCount || 0
const totalPages = Math.ceil(totalCount / ROWS_PER_PAGE)
return {
tableData,
isLoadingTable,
rows,
totalCount,
totalPages,
isLoadingRows,
refetchRows,
}
}

View File

@@ -1,2 +0,0 @@
export const ROWS_PER_PAGE = 100
export const STRING_TRUNCATE_LENGTH = 50

View File

@@ -1,3 +0,0 @@
export * from './constants'
export * from './types'
export * from './utils'

View File

@@ -1,27 +0,0 @@
import type { Filter, Sort, TableRow } from '@/lib/table'
/**
* Query options for filtering and sorting table data
*/
export interface QueryOptions {
filter: Filter | null
sort: Sort | null
}
/**
* Data for viewing a cell's full content in a modal
*/
export interface CellViewerData {
columnName: string
value: unknown
type: 'json' | 'text' | 'date' | 'boolean' | 'number'
}
/**
* State for the row context menu (right-click)
*/
export interface ContextMenuState {
isOpen: boolean
position: { x: number; y: number }
row: TableRow | null
}

View File

@@ -1,21 +0,0 @@
type BadgeVariant = 'green' | 'blue' | 'purple' | 'orange' | 'teal' | 'gray'
/**
* Returns the appropriate badge color variant for a column type
*/
export function getTypeBadgeVariant(type: string): BadgeVariant {
switch (type) {
case 'string':
return 'green'
case 'number':
return 'blue'
case 'boolean':
return 'purple'
case 'json':
return 'orange'
case 'date':
return 'teal'
default:
return 'gray'
}
}

View File

@@ -1,5 +0,0 @@
import { TableViewer } from './components'
export default function TablePage() {
return <TableViewer />
}

View File

@@ -1,330 +0,0 @@
'use client'
import { useState } from 'react'
import { createLogger } from '@sim/logger'
import { Plus, Trash2 } from 'lucide-react'
import { nanoid } from 'nanoid'
import { useParams } from 'next/navigation'
import {
Button,
Checkbox,
Combobox,
Input,
Label,
Modal,
ModalBody,
ModalContent,
ModalFooter,
ModalHeader,
Textarea,
} from '@/components/emcn'
import { cn } from '@/lib/core/utils/cn'
import type { ColumnDefinition } from '@/lib/table'
import { useCreateTable } from '@/hooks/queries/tables'
const logger = createLogger('CreateModal')
interface CreateModalProps {
isOpen: boolean
onClose: () => void
}
const COLUMN_TYPE_OPTIONS: Array<{ value: ColumnDefinition['type']; label: string }> = [
{ value: 'string', label: 'String' },
{ value: 'number', label: 'Number' },
{ value: 'boolean', label: 'Boolean' },
{ value: 'date', label: 'Date' },
{ value: 'json', label: 'JSON' },
]
interface ColumnWithId extends ColumnDefinition {
id: string
}
function createEmptyColumn(): ColumnWithId {
return { id: nanoid(), name: '', type: 'string', required: true, unique: false }
}
export function CreateModal({ isOpen, onClose }: CreateModalProps) {
const params = useParams()
const workspaceId = params.workspaceId as string
const [tableName, setTableName] = useState('')
const [description, setDescription] = useState('')
const [columns, setColumns] = useState<ColumnWithId[]>([createEmptyColumn()])
const [error, setError] = useState<string | null>(null)
const createTable = useCreateTable(workspaceId)
const handleAddColumn = () => {
setColumns([...columns, createEmptyColumn()])
}
const handleRemoveColumn = (columnId: string) => {
if (columns.length > 1) {
setColumns(columns.filter((col) => col.id !== columnId))
}
}
const handleColumnChange = (
columnId: string,
field: keyof ColumnDefinition,
value: string | boolean
) => {
setColumns(columns.map((col) => (col.id === columnId ? { ...col, [field]: value } : col)))
}
const handleSubmit = async (e: React.FormEvent) => {
e.preventDefault()
setError(null)
if (!tableName.trim()) {
setError('Table name is required')
return
}
// Validate column names
const validColumns = columns.filter((col) => col.name.trim())
if (validColumns.length === 0) {
setError('At least one column is required')
return
}
// Check for duplicate column names
const columnNames = validColumns.map((col) => col.name.toLowerCase())
const uniqueNames = new Set(columnNames)
if (uniqueNames.size !== columnNames.length) {
setError('Duplicate column names found')
return
}
// Strip internal IDs before sending to API
const columnsForApi = validColumns.map(({ id: _id, ...col }) => col)
try {
await createTable.mutateAsync({
name: tableName,
description: description || undefined,
schema: {
columns: columnsForApi,
},
})
// Reset form
resetForm()
onClose()
} catch (err) {
logger.error('Failed to create table:', err)
setError(err instanceof Error ? err.message : 'Failed to create table')
}
}
const resetForm = () => {
setTableName('')
setDescription('')
setColumns([createEmptyColumn()])
setError(null)
}
const handleClose = () => {
resetForm()
onClose()
}
return (
<Modal open={isOpen} onOpenChange={handleClose}>
<ModalContent size='lg'>
<ModalHeader>Create Table</ModalHeader>
<form onSubmit={handleSubmit} className='flex min-h-0 flex-1 flex-col'>
<ModalBody>
<div className='min-h-0 flex-1 overflow-y-auto'>
<div className='space-y-[12px]'>
<p className='text-[12px] text-[var(--text-tertiary)]'>
Define your table schema with columns and constraints.
</p>
{error && (
<p className='text-[12px] text-[var(--text-error)] leading-tight'>{error}</p>
)}
<div className='flex flex-col gap-[8px]'>
<Label htmlFor='tableName'>Name</Label>
<Input
id='tableName'
value={tableName}
onChange={(e: React.ChangeEvent<HTMLInputElement>) =>
setTableName(e.target.value)
}
placeholder='customers, orders, products'
className={cn(
error === 'Table name is required' && 'border-[var(--text-error)]'
)}
required
/>
<p className='text-[11px] text-[var(--text-muted)]'>
Use lowercase with underscores (e.g., customer_orders)
</p>
</div>
<div className='flex flex-col gap-[8px]'>
<Label htmlFor='description'>Description</Label>
<Textarea
id='description'
value={description}
onChange={(e: React.ChangeEvent<HTMLTextAreaElement>) =>
setDescription(e.target.value)
}
placeholder='Optional description for this table'
rows={3}
className='resize-none'
/>
</div>
<div className='space-y-[8px]'>
<div className='flex items-center justify-between'>
<Label>Columns*</Label>
<Button
type='button'
size='sm'
variant='default'
onClick={handleAddColumn}
className='h-[30px] rounded-[6px] px-[12px] text-[12px]'
>
<Plus className='mr-[4px] h-[14px] w-[14px]' />
Add Column
</Button>
</div>
<div className='space-y-[8px]'>
{columns.map((column, index) => (
<ColumnRow
key={column.id}
index={index}
column={column}
isRemovable={columns.length > 1}
onChange={handleColumnChange}
onRemove={handleRemoveColumn}
/>
))}
</div>
<p className='text-[11px] text-[var(--text-muted)]'>
Mark columns as <span className='font-medium'>unique</span> to prevent duplicate
values (e.g., id, email)
</p>
</div>
</div>
</div>
</ModalBody>
<ModalFooter>
<div className='flex w-full items-center justify-end gap-[8px]'>
<Button
type='button'
variant='default'
onClick={handleClose}
disabled={createTable.isPending}
>
Cancel
</Button>
<Button
type='submit'
variant='tertiary'
disabled={createTable.isPending}
className='min-w-[120px]'
>
{createTable.isPending ? 'Creating...' : 'Create Table'}
</Button>
</div>
</ModalFooter>
</form>
</ModalContent>
</Modal>
)
}
interface ColumnRowProps {
index: number
column: ColumnWithId
isRemovable: boolean
onChange: (columnId: string, field: keyof ColumnDefinition, value: string | boolean) => void
onRemove: (columnId: string) => void
}
function ColumnRow({ index, column, isRemovable, onChange, onRemove }: ColumnRowProps) {
return (
<div className='rounded-[6px] border border-[var(--border-1)] bg-[var(--surface-1)] p-[10px]'>
<div className='mb-[8px] flex items-center justify-between'>
<span className='font-medium text-[11px] text-[var(--text-tertiary)]'>
Column {index + 1}
</span>
<Button
type='button'
size='sm'
variant='ghost'
onClick={() => onRemove(column.id)}
disabled={!isRemovable}
className='h-[28px] w-[28px] p-0 text-[var(--text-tertiary)] transition-colors hover:bg-[var(--bg-error)] hover:text-[var(--text-error)]'
>
<Trash2 className='h-[15px] w-[15px]' />
</Button>
</div>
<div className='grid grid-cols-[minmax(0,1fr)_120px_76px_76px] items-end gap-[10px]'>
<div className='flex flex-col gap-[6px]'>
<Label
htmlFor={`column-name-${column.id}`}
className='text-[11px] text-[var(--text-muted)]'
>
Name
</Label>
<Input
id={`column-name-${column.id}`}
value={column.name}
onChange={(e: React.ChangeEvent<HTMLInputElement>) =>
onChange(column.id, 'name', e.target.value)
}
placeholder='column_name'
className='h-[36px]'
/>
</div>
<div className='flex flex-col gap-[6px]'>
<Label
htmlFor={`column-type-${column.id}`}
className='text-[11px] text-[var(--text-muted)]'
>
Type
</Label>
<Combobox
options={COLUMN_TYPE_OPTIONS}
value={column.type}
selectedValue={column.type}
onChange={(value) => onChange(column.id, 'type', value as ColumnDefinition['type'])}
placeholder='Type'
editable={false}
filterOptions={false}
className='h-[36px]'
/>
</div>
<div className='flex flex-col items-center gap-[8px]'>
<span className='text-[11px] text-[var(--text-tertiary)]'>Required</span>
<Checkbox
checked={column.required}
onCheckedChange={(checked) => onChange(column.id, 'required', checked === true)}
/>
</div>
<div className='flex flex-col items-center gap-[8px]'>
<span className='text-[11px] text-[var(--text-tertiary)]'>Unique</span>
<Checkbox
checked={column.unique}
onCheckedChange={(checked) => onChange(column.id, 'unique', checked === true)}
/>
</div>
</div>
</div>
)
}

View File

@@ -1,20 +0,0 @@
interface EmptyStateProps {
hasSearchQuery: boolean
}
export function EmptyState({ hasSearchQuery }: EmptyStateProps) {
return (
<div className='col-span-full flex h-64 items-center justify-center rounded-lg border border-muted-foreground/25 bg-muted/20'>
<div className='text-center'>
<p className='font-medium text-[var(--text-secondary)] text-sm'>
{hasSearchQuery ? 'No tables found' : 'No tables yet'}
</p>
<p className='mt-1 text-[var(--text-muted)] text-xs'>
{hasSearchQuery
? 'Try a different search term'
: 'Create your first table to store structured data for your workflows'}
</p>
</div>
</div>
)
}

View File

@@ -1,16 +0,0 @@
interface ErrorStateProps {
error: unknown
}
export function ErrorState({ error }: ErrorStateProps) {
return (
<div className='col-span-full flex h-64 items-center justify-center rounded-[4px] bg-[var(--surface-3)] dark:bg-[var(--surface-4)]'>
<div className='text-center'>
<p className='font-medium text-[var(--text-secondary)] text-sm'>Error loading tables</p>
<p className='mt-1 text-[var(--text-muted)] text-xs'>
{error instanceof Error ? error.message : 'An error occurred'}
</p>
</div>
</div>
)
}

View File

@@ -1,6 +0,0 @@
export * from './create-modal'
export * from './empty-state'
export * from './error-state'
export * from './loading-state'
export * from './table-card'
export * from './tables-view'

View File

@@ -1,31 +0,0 @@
export function LoadingState() {
return (
<>
{Array.from({ length: 8 }).map((_, i) => (
<div
key={i}
className='flex h-full flex-col gap-[12px] rounded-[4px] bg-[var(--surface-3)] px-[8px] py-[6px] dark:bg-[var(--surface-4)]'
>
<div className='flex items-center justify-between gap-[8px]'>
<div className='h-[17px] w-[120px] animate-pulse rounded-[4px] bg-[var(--surface-4)] dark:bg-[var(--surface-5)]' />
<div className='h-[22px] w-[90px] animate-pulse rounded-[4px] bg-[var(--surface-4)] dark:bg-[var(--surface-5)]' />
</div>
<div className='flex flex-1 flex-col gap-[8px]'>
<div className='flex items-center justify-between'>
<div className='flex items-center gap-[12px]'>
<div className='h-[15px] w-[50px] animate-pulse rounded-[4px] bg-[var(--surface-4)] dark:bg-[var(--surface-5)]' />
<div className='h-[15px] w-[50px] animate-pulse rounded-[4px] bg-[var(--surface-4)] dark:bg-[var(--surface-5)]' />
</div>
<div className='h-[15px] w-[60px] animate-pulse rounded-[4px] bg-[var(--surface-4)] dark:bg-[var(--surface-5)]' />
</div>
<div className='h-0 w-full border-[var(--divider)] border-t' />
<div className='flex h-[36px] flex-col gap-[6px]'>
<div className='h-[15px] w-full animate-pulse rounded-[4px] bg-[var(--surface-4)] dark:bg-[var(--surface-5)]' />
<div className='h-[15px] w-[75%] animate-pulse rounded-[4px] bg-[var(--surface-4)] dark:bg-[var(--surface-5)]' />
</div>
</div>
</div>
))}
</>
)
}

View File

@@ -1,171 +0,0 @@
'use client'
import { useCallback, useState } from 'react'
import { createLogger } from '@sim/logger'
import { Columns, Rows3 } from 'lucide-react'
import { useRouter } from 'next/navigation'
import {
Badge,
Button,
Modal,
ModalBody,
ModalContent,
ModalFooter,
ModalHeader,
Tooltip,
} from '@/components/emcn'
import type { TableDefinition } from '@/lib/table'
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
import { TableContextMenu } from '@/app/workspace/[workspaceId]/tables/components/table-context-menu'
import { useContextMenu } from '@/app/workspace/[workspaceId]/w/components/sidebar/hooks'
import { useDeleteTable } from '@/hooks/queries/tables'
import { SchemaModal } from '../[tableId]/components/schema-modal'
import { formatAbsoluteDate, formatRelativeTime } from '../lib/utils'
const logger = createLogger('TableCard')
interface TableCardProps {
table: TableDefinition
workspaceId: string
}
export function TableCard({ table, workspaceId }: TableCardProps) {
const router = useRouter()
const userPermissions = useUserPermissionsContext()
const [isDeleteDialogOpen, setIsDeleteDialogOpen] = useState(false)
const [isSchemaModalOpen, setIsSchemaModalOpen] = useState(false)
const deleteTable = useDeleteTable(workspaceId)
const {
isOpen: isContextMenuOpen,
position: contextMenuPosition,
menuRef,
handleContextMenu,
closeMenu: closeContextMenu,
} = useContextMenu()
const handleDelete = async () => {
try {
await deleteTable.mutateAsync(table.id)
setIsDeleteDialogOpen(false)
} catch (error) {
logger.error('Failed to delete table:', error)
}
}
const navigateToTable = useCallback(() => {
router.push(`/workspace/${workspaceId}/tables/${table.id}`)
}, [router, workspaceId, table.id])
const columnCount = table.schema.columns.length
const shortId = `tb-${table.id.slice(0, 8)}`
return (
<>
<div
role='button'
tabIndex={0}
data-table-card
className='h-full cursor-pointer'
onClick={(e) => {
if (isContextMenuOpen) {
e.preventDefault()
return
}
navigateToTable()
}}
onKeyDown={(e) => {
if (e.key === 'Enter' || e.key === ' ') {
e.preventDefault()
navigateToTable()
}
}}
onContextMenu={handleContextMenu}
>
<div className='group flex h-full flex-col gap-[12px] rounded-[4px] bg-[var(--surface-3)] px-[8px] py-[6px] transition-colors hover:bg-[var(--surface-4)] dark:bg-[var(--surface-4)] dark:hover:bg-[var(--surface-5)]'>
<div className='flex items-center justify-between gap-[8px]'>
<h3 className='min-w-0 flex-1 truncate font-medium text-[14px] text-[var(--text-primary)]'>
{table.name}
</h3>
<Badge className='flex-shrink-0 rounded-[4px] text-[12px]'>{shortId}</Badge>
</div>
<div className='flex flex-1 flex-col gap-[8px]'>
<div className='flex items-center justify-between'>
<div className='flex items-center gap-[12px] text-[12px] text-[var(--text-tertiary)]'>
<span className='flex items-center gap-[4px]'>
<Columns className='h-[12px] w-[12px]' />
{columnCount} {columnCount === 1 ? 'col' : 'cols'}
</span>
<span className='flex items-center gap-[4px]'>
<Rows3 className='h-[12px] w-[12px]' />
{table.rowCount} {table.rowCount === 1 ? 'row' : 'rows'}
</span>
</div>
<Tooltip.Root>
<Tooltip.Trigger asChild>
<span className='text-[12px] text-[var(--text-tertiary)]'>
{formatRelativeTime(table.updatedAt)}
</span>
</Tooltip.Trigger>
<Tooltip.Content>{formatAbsoluteDate(table.updatedAt)}</Tooltip.Content>
</Tooltip.Root>
</div>
<div className='h-0 w-full border-[var(--divider)] border-t' />
<p className='line-clamp-2 h-[36px] text-[12px] text-[var(--text-tertiary)] leading-[18px]'>
{table.description || 'No description'}
</p>
</div>
</div>
</div>
<TableContextMenu
isOpen={isContextMenuOpen}
position={contextMenuPosition}
menuRef={menuRef}
onClose={closeContextMenu}
onViewSchema={() => setIsSchemaModalOpen(true)}
onCopyId={() => navigator.clipboard.writeText(table.id)}
onDelete={() => setIsDeleteDialogOpen(true)}
disableDelete={userPermissions.canEdit !== true}
/>
{/* Delete Confirmation Modal */}
<Modal open={isDeleteDialogOpen} onOpenChange={setIsDeleteDialogOpen}>
<ModalContent className='w-[400px]'>
<ModalHeader>Delete Table</ModalHeader>
<ModalBody>
<p className='text-[12px] text-[var(--text-secondary)]'>
Are you sure you want to delete{' '}
<span className='font-medium text-[var(--text-primary)]'>{table.name}</span>? This
will permanently delete all {table.rowCount} rows.{' '}
<span className='text-[var(--text-error)]'>This action cannot be undone.</span>
</p>
</ModalBody>
<ModalFooter>
<Button
variant='default'
onClick={() => setIsDeleteDialogOpen(false)}
disabled={deleteTable.isPending}
>
Cancel
</Button>
<Button variant='default' onClick={handleDelete} disabled={deleteTable.isPending}>
{deleteTable.isPending ? 'Deleting...' : 'Delete'}
</Button>
</ModalFooter>
</ModalContent>
</Modal>
{/* Schema Viewer Modal */}
<SchemaModal
isOpen={isSchemaModalOpen}
onClose={() => setIsSchemaModalOpen(false)}
columns={table.schema.columns}
tableName={table.name}
/>
</>
)
}

View File

@@ -1,85 +0,0 @@
'use client'
import {
Popover,
PopoverAnchor,
PopoverContent,
PopoverDivider,
PopoverItem,
} from '@/components/emcn'
interface TableContextMenuProps {
isOpen: boolean
position: { x: number; y: number }
menuRef: React.RefObject<HTMLDivElement | null>
onClose: () => void
onViewSchema?: () => void
onCopyId?: () => void
onDelete?: () => void
disableDelete?: boolean
}
export function TableContextMenu({
isOpen,
position,
menuRef,
onClose,
onViewSchema,
onCopyId,
onDelete,
disableDelete = false,
}: TableContextMenuProps) {
return (
<Popover
open={isOpen}
onOpenChange={(open) => !open && onClose()}
variant='secondary'
size='sm'
>
<PopoverAnchor
style={{
position: 'fixed',
left: `${position.x}px`,
top: `${position.y}px`,
width: '1px',
height: '1px',
}}
/>
<PopoverContent ref={menuRef} align='start' side='bottom' sideOffset={4}>
{onViewSchema && (
<PopoverItem
onClick={() => {
onViewSchema()
onClose()
}}
>
View Schema
</PopoverItem>
)}
{onViewSchema && (onCopyId || onDelete) && <PopoverDivider />}
{onCopyId && (
<PopoverItem
onClick={() => {
onCopyId()
onClose()
}}
>
Copy ID
</PopoverItem>
)}
{onCopyId && onDelete && <PopoverDivider />}
{onDelete && (
<PopoverItem
disabled={disableDelete}
onClick={() => {
onDelete()
onClose()
}}
>
Delete
</PopoverItem>
)}
</PopoverContent>
</Popover>
)
}

View File

@@ -1,53 +0,0 @@
'use client'
import { Popover, PopoverAnchor, PopoverContent, PopoverItem } from '@/components/emcn'
interface TablesListContextMenuProps {
isOpen: boolean
position: { x: number; y: number }
menuRef: React.RefObject<HTMLDivElement | null>
onClose: () => void
onCreateTable?: () => void
disableCreate?: boolean
}
export function TablesListContextMenu({
isOpen,
position,
menuRef,
onClose,
onCreateTable,
disableCreate = false,
}: TablesListContextMenuProps) {
return (
<Popover
open={isOpen}
onOpenChange={(open) => !open && onClose()}
variant='secondary'
size='sm'
>
<PopoverAnchor
style={{
position: 'fixed',
left: `${position.x}px`,
top: `${position.y}px`,
width: '1px',
height: '1px',
}}
/>
<PopoverContent ref={menuRef} align='start' side='bottom' sideOffset={4}>
{onCreateTable && (
<PopoverItem
disabled={disableCreate}
onClick={() => {
onCreateTable()
onClose()
}}
>
Create table
</PopoverItem>
)}
</PopoverContent>
</Popover>
)
}

View File

@@ -1,141 +0,0 @@
'use client'
import { useCallback, useState } from 'react'
import { Database, Plus, Search } from 'lucide-react'
import { useParams } from 'next/navigation'
import { Button, Input, Tooltip } from '@/components/emcn'
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
import { useContextMenu } from '@/app/workspace/[workspaceId]/w/components/sidebar/hooks'
import { useTablesList } from '@/hooks/queries/tables'
import { useDebounce } from '@/hooks/use-debounce'
import { CreateModal } from './create-modal'
import { EmptyState } from './empty-state'
import { ErrorState } from './error-state'
import { LoadingState } from './loading-state'
import { TableCard } from './table-card'
import { TablesListContextMenu } from './tables-list-context-menu'
export function TablesView() {
const params = useParams()
const workspaceId = params.workspaceId as string
const userPermissions = useUserPermissionsContext()
const { data: tables = [], isLoading, error } = useTablesList(workspaceId)
const [searchQuery, setSearchQuery] = useState('')
const debouncedSearchQuery = useDebounce(searchQuery, 300)
const [isCreateModalOpen, setIsCreateModalOpen] = useState(false)
const {
isOpen: isListContextMenuOpen,
position: listContextMenuPosition,
menuRef: listMenuRef,
handleContextMenu: handleListContextMenu,
closeMenu: closeListContextMenu,
} = useContextMenu()
const handleContentContextMenu = useCallback(
(e: React.MouseEvent) => {
const target = e.target as HTMLElement
const isOnCard = target.closest('[data-table-card]')
const isOnInteractive = target.closest('button, input, a, [role="button"]')
if (!isOnCard && !isOnInteractive) {
handleListContextMenu(e)
}
},
[handleListContextMenu]
)
// Filter tables by search query
const filteredTables = tables.filter((table) => {
if (!debouncedSearchQuery) return true
const query = debouncedSearchQuery.toLowerCase()
return (
table.name.toLowerCase().includes(query) || table.description?.toLowerCase().includes(query)
)
})
return (
<>
<div className='flex h-full flex-1 flex-col'>
<div className='flex flex-1 overflow-hidden'>
<div
className='flex flex-1 flex-col overflow-auto bg-white px-[24px] pt-[28px] pb-[24px] dark:bg-[var(--bg)]'
onContextMenu={handleContentContextMenu}
>
{/* Header */}
<div>
<div className='flex items-start gap-[12px]'>
<div className='flex h-[26px] w-[26px] items-center justify-center rounded-[6px] border border-[#3B82F6] bg-[#EFF6FF] dark:border-[#1E40AF] dark:bg-[#1E3A5F]'>
<Database className='h-[14px] w-[14px] text-[#3B82F6] dark:text-[#60A5FA]' />
</div>
<h1 className='font-medium text-[18px]'>Tables</h1>
</div>
<p className='mt-[10px] text-[14px] text-[var(--text-tertiary)]'>
Create and manage data tables for your workflows.
</p>
</div>
{/* Search and Actions */}
<div className='mt-[14px] flex items-center justify-between'>
<div className='flex h-[32px] w-[400px] items-center gap-[6px] rounded-[8px] bg-[var(--surface-4)] px-[8px]'>
<Search className='h-[14px] w-[14px] text-[var(--text-subtle)]' />
<Input
placeholder='Search'
value={searchQuery}
onChange={(e) => setSearchQuery(e.target.value)}
className='flex-1 border-0 bg-transparent px-0 font-medium text-[var(--text-secondary)] text-small leading-none placeholder:text-[var(--text-subtle)] focus-visible:ring-0 focus-visible:ring-offset-0'
/>
</div>
<div className='flex items-center gap-[8px]'>
<Tooltip.Root>
<Tooltip.Trigger asChild>
<Button
onClick={() => setIsCreateModalOpen(true)}
disabled={userPermissions.canEdit !== true}
variant='tertiary'
className='h-[32px] rounded-[6px]'
>
<Plus className='mr-[6px] h-[14px] w-[14px]' />
Create Table
</Button>
</Tooltip.Trigger>
{userPermissions.canEdit !== true && (
<Tooltip.Content>Write permission required to create tables</Tooltip.Content>
)}
</Tooltip.Root>
</div>
</div>
{/* Content */}
<div className='mt-[24px] grid grid-cols-1 gap-[20px] md:grid-cols-2 lg:grid-cols-3 xl:grid-cols-4'>
{isLoading ? (
<LoadingState />
) : error ? (
<ErrorState error={error} />
) : filteredTables.length === 0 ? (
<EmptyState hasSearchQuery={!!searchQuery} />
) : (
filteredTables.map((table) => (
<TableCard key={table.id} table={table} workspaceId={workspaceId} />
))
)}
</div>
</div>
</div>
</div>
<TablesListContextMenu
isOpen={isListContextMenuOpen}
position={listContextMenuPosition}
menuRef={listMenuRef}
onClose={closeListContextMenu}
onCreateTable={() => setIsCreateModalOpen(true)}
disableCreate={userPermissions.canEdit !== true}
/>
<CreateModal isOpen={isCreateModalOpen} onClose={() => setIsCreateModalOpen(false)} />
</>
)
}

View File

@@ -1,41 +0,0 @@
'use client'
import { useEffect } from 'react'
import { createLogger } from '@sim/logger'
import { AlertTriangle, RefreshCw } from 'lucide-react'
import { Button } from '@/components/emcn'
const logger = createLogger('TablesError')
interface TablesErrorProps {
error: Error & { digest?: string }
reset: () => void
}
export default function TablesError({ error, reset }: TablesErrorProps) {
useEffect(() => {
logger.error('Tables error:', { error: error.message, digest: error.digest })
}, [error])
return (
<div className='flex h-full flex-1 items-center justify-center bg-white dark:bg-[var(--bg)]'>
<div className='flex flex-col items-center gap-[16px] text-center'>
<div className='flex h-[48px] w-[48px] items-center justify-center rounded-full bg-[var(--surface-4)]'>
<AlertTriangle className='h-[24px] w-[24px] text-[var(--text-error)]' />
</div>
<div className='flex flex-col gap-[8px]'>
<h2 className='font-semibold text-[16px] text-[var(--text-primary)]'>
Failed to load tables
</h2>
<p className='max-w-[300px] text-[13px] text-[var(--text-tertiary)]'>
Something went wrong while loading the tables. Please try again.
</p>
</div>
<Button variant='default' size='sm' onClick={reset}>
<RefreshCw className='mr-[6px] h-[14px] w-[14px]' />
Try again
</Button>
</div>
</div>
)
}

View File

@@ -1,7 +0,0 @@
export default function TablesLayout({ children }: { children: React.ReactNode }) {
return (
<div className='flex h-full flex-1 flex-col overflow-hidden pl-[var(--sidebar-width)]'>
{children}
</div>
)
}

View File

@@ -1 +0,0 @@
export * from './utils'

View File

@@ -1,32 +0,0 @@
/**
* Formats a date as relative time (e.g., "5m ago", "2d ago")
*/
export function formatRelativeTime(dateValue: string | Date): string {
const dateString = typeof dateValue === 'string' ? dateValue : dateValue.toISOString()
const date = new Date(dateString)
const now = new Date()
const diffInSeconds = Math.floor((now.getTime() - date.getTime()) / 1000)
if (diffInSeconds < 60) return 'just now'
if (diffInSeconds < 3600) return `${Math.floor(diffInSeconds / 60)}m ago`
if (diffInSeconds < 86400) return `${Math.floor(diffInSeconds / 3600)}h ago`
if (diffInSeconds < 604800) return `${Math.floor(diffInSeconds / 86400)}d ago`
if (diffInSeconds < 2592000) return `${Math.floor(diffInSeconds / 604800)}w ago`
if (diffInSeconds < 31536000) return `${Math.floor(diffInSeconds / 2592000)}mo ago`
return `${Math.floor(diffInSeconds / 31536000)}y ago`
}
/**
* Formats a date as absolute date string (e.g., "Jan 15, 2024, 10:30 AM")
*/
export function formatAbsoluteDate(dateValue: string | Date): string {
const dateString = typeof dateValue === 'string' ? dateValue : dateValue.toISOString()
const date = new Date(dateString)
return date.toLocaleDateString('en-US', {
year: 'numeric',
month: 'short',
day: 'numeric',
hour: '2-digit',
minute: '2-digit',
})
}

View File

@@ -1,26 +0,0 @@
import { redirect } from 'next/navigation'
import { getSession } from '@/lib/auth'
import { verifyWorkspaceMembership } from '@/app/api/workflows/utils'
import { TablesView } from './components'
interface TablesPageProps {
params: Promise<{
workspaceId: string
}>
}
export default async function TablesPage({ params }: TablesPageProps) {
const { workspaceId } = await params
const session = await getSession()
if (!session?.user?.id) {
redirect('/')
}
const hasPermission = await verifyWorkspaceMembership(session.user.id, workspaceId)
if (!hasPermission) {
redirect('/')
}
return <TablesView />
}

View File

@@ -259,7 +259,6 @@ export const Code = memo(function Code({
case 'json-schema':
return 'Describe the JSON schema to generate...'
case 'json-object':
case 'table-schema':
return 'Describe the JSON object to generate...'
default:
return 'Describe the JavaScript code to generate...'
@@ -284,14 +283,9 @@ export const Code = memo(function Code({
return wandConfig
}, [wandConfig, languageValue])
const [tableIdValue] = useSubBlockValue<string>(blockId, 'tableId')
const wandHook = useWand({
wandConfig: dynamicWandConfig || { enabled: false, prompt: '' },
currentValue: code,
contextParams: {
tableId: typeof tableIdValue === 'string' ? tableIdValue : null,
},
onStreamStart: () => handleStreamStartRef.current?.(),
onStreamChunk: (chunk: string) => handleStreamChunkRef.current?.(chunk),
onGeneratedContent: (content: string) => handleGeneratedContentRef.current?.(content),

View File

@@ -1,19 +0,0 @@
import { Plus } from 'lucide-react'
import { Button } from '@/components/emcn'
interface EmptyStateProps {
onAdd: () => void
disabled: boolean
label: string
}
export function EmptyState({ onAdd, disabled, label }: EmptyStateProps) {
return (
<div className='flex items-center justify-center rounded-[4px] border border-[var(--border-1)] border-dashed py-[16px]'>
<Button variant='ghost' size='sm' onClick={onAdd} disabled={disabled}>
<Plus className='mr-[4px] h-[12px] w-[12px]' />
{label}
</Button>
</div>
)
}

View File

@@ -1,137 +0,0 @@
import { X } from 'lucide-react'
import { Button, Combobox, type ComboboxOption, Input } from '@/components/emcn'
import { cn } from '@/lib/core/utils/cn'
import type { FilterRule } from '@/lib/table/query-builder/constants'
import { formatDisplayText } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/formatted-text'
import { SubBlockInputController } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/sub-block-input-controller'
import { useAccessibleReferencePrefixes } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-accessible-reference-prefixes'
interface FilterRuleRowProps {
blockId: string
subBlockId: string
rule: FilterRule
index: number
columns: ComboboxOption[]
comparisonOptions: ComboboxOption[]
logicalOptions: ComboboxOption[]
isReadOnly: boolean
isPreview: boolean
disabled: boolean
onRemove: (id: string) => void
onUpdate: (id: string, field: keyof FilterRule, value: string) => void
}
export function FilterRuleRow({
blockId,
subBlockId,
rule,
index,
columns,
comparisonOptions,
logicalOptions,
isReadOnly,
isPreview,
disabled,
onRemove,
onUpdate,
}: FilterRuleRowProps) {
const accessiblePrefixes = useAccessibleReferencePrefixes(blockId)
return (
<div className='flex items-center gap-[6px]'>
<Button
variant='ghost'
size='sm'
onClick={() => onRemove(rule.id)}
disabled={isReadOnly}
className='h-[24px] w-[24px] shrink-0 p-0 text-[var(--text-tertiary)] hover:text-[var(--text-primary)]'
>
<X className='h-[12px] w-[12px]' />
</Button>
<div className='w-[80px] shrink-0'>
{index === 0 ? (
<Combobox
size='sm'
options={[{ value: 'where', label: 'where' }]}
value='where'
disabled
/>
) : (
<Combobox
size='sm'
options={logicalOptions}
value={rule.logicalOperator}
onChange={(v) => onUpdate(rule.id, 'logicalOperator', v as 'and' | 'or')}
disabled={isReadOnly}
/>
)}
</div>
<div className='w-[100px] shrink-0'>
<Combobox
size='sm'
options={columns}
value={rule.column}
onChange={(v) => onUpdate(rule.id, 'column', v)}
placeholder='Column'
disabled={isReadOnly}
/>
</div>
<div className='w-[110px] shrink-0'>
<Combobox
size='sm'
options={comparisonOptions}
value={rule.operator}
onChange={(v) => onUpdate(rule.id, 'operator', v)}
disabled={isReadOnly}
/>
</div>
<div className='relative min-w-[80px] flex-1'>
<SubBlockInputController
blockId={blockId}
subBlockId={`${subBlockId}_filter_${rule.id}`}
config={{ id: `filter_value_${rule.id}`, type: 'short-input' }}
value={rule.value}
onChange={(newValue) => onUpdate(rule.id, 'value', newValue)}
isPreview={isPreview}
disabled={disabled}
>
{({ ref, value: ctrlValue, onChange, onKeyDown, onDrop, onDragOver }) => {
const formattedText = formatDisplayText(ctrlValue, {
accessiblePrefixes,
highlightAll: !accessiblePrefixes,
})
return (
<div className='relative'>
<Input
ref={ref as React.RefObject<HTMLInputElement>}
className='h-[28px] w-full overflow-auto text-[12px] text-transparent caret-foreground [-ms-overflow-style:none] [scrollbar-width:none] placeholder:text-muted-foreground/50 [&::-webkit-scrollbar]:hidden'
value={ctrlValue}
onChange={onChange as (e: React.ChangeEvent<HTMLInputElement>) => void}
onKeyDown={onKeyDown as (e: React.KeyboardEvent<HTMLInputElement>) => void}
onDrop={onDrop as (e: React.DragEvent<HTMLInputElement>) => void}
onDragOver={onDragOver as (e: React.DragEvent<HTMLInputElement>) => void}
placeholder='Value'
disabled={isReadOnly}
autoComplete='off'
/>
<div
className={cn(
'pointer-events-none absolute inset-0 flex items-center overflow-x-auto bg-transparent px-[8px] py-[6px] font-medium font-sans text-[12px] text-foreground [-ms-overflow-style:none] [scrollbar-width:none] [&::-webkit-scrollbar]:hidden',
(isPreview || disabled) && 'opacity-50'
)}
>
<div className='min-w-fit whitespace-pre'>{formattedText}</div>
</div>
</div>
)
}}
</SubBlockInputController>
</div>
</div>
)
}

View File

@@ -1,90 +0,0 @@
'use client'
import { useMemo } from 'react'
import { Plus } from 'lucide-react'
import { Button } from '@/components/emcn'
import { useTableColumns } from '@/lib/table/hooks'
import type { FilterRule } from '@/lib/table/query-builder/constants'
import { useFilterBuilder } from '@/lib/table/query-builder/use-query-builder'
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value'
import { EmptyState } from './components/empty-state'
import { FilterRuleRow } from './components/filter-rule-row'
interface FilterBuilderProps {
blockId: string
subBlockId: string
isPreview?: boolean
previewValue?: FilterRule[] | null
disabled?: boolean
columns?: Array<{ value: string; label: string }>
tableIdSubBlockId?: string
}
/** Visual builder for table filter rules in workflow blocks. */
export function FilterBuilder({
blockId,
subBlockId,
isPreview = false,
previewValue,
disabled = false,
columns: propColumns,
tableIdSubBlockId = 'tableId',
}: FilterBuilderProps) {
const [storeValue, setStoreValue] = useSubBlockValue<FilterRule[]>(blockId, subBlockId)
const [tableIdValue] = useSubBlockValue<string>(blockId, tableIdSubBlockId)
const dynamicColumns = useTableColumns({ tableId: tableIdValue })
const columns = useMemo(() => {
if (propColumns && propColumns.length > 0) return propColumns
return dynamicColumns
}, [propColumns, dynamicColumns])
const value = isPreview ? previewValue : storeValue
const rules: FilterRule[] = Array.isArray(value) && value.length > 0 ? value : []
const isReadOnly = isPreview || disabled
const { comparisonOptions, logicalOptions, addRule, removeRule, updateRule } = useFilterBuilder({
columns,
rules,
setRules: setStoreValue,
isReadOnly,
})
return (
<div className='flex flex-col gap-[8px]'>
{rules.length === 0 ? (
<EmptyState onAdd={addRule} disabled={isReadOnly} label='Add filter rule' />
) : (
<>
{rules.map((rule, index) => (
<FilterRuleRow
key={rule.id}
blockId={blockId}
subBlockId={subBlockId}
rule={rule}
index={index}
columns={columns}
comparisonOptions={comparisonOptions}
logicalOptions={logicalOptions}
isReadOnly={isReadOnly}
isPreview={isPreview}
disabled={disabled}
onRemove={removeRule}
onUpdate={updateRule}
/>
))}
<Button
variant='ghost'
size='sm'
onClick={addRule}
disabled={isReadOnly}
className='self-start'
>
<Plus className='mr-[4px] h-[12px] w-[12px]' />
Add rule
</Button>
</>
)}
</div>
)
}

View File

@@ -9,7 +9,6 @@ export { Dropdown } from './dropdown/dropdown'
export { EvalInput } from './eval-input/eval-input'
export { FileSelectorInput } from './file-selector/file-selector-input'
export { FileUpload } from './file-upload/file-upload'
export { FilterBuilder } from './filter-builder/filter-builder'
export { FolderSelectorInput } from './folder-selector/components/folder-selector-input'
export { GroupedCheckboxList } from './grouped-checkbox-list/grouped-checkbox-list'
export { InputMapping } from './input-mapping/input-mapping'
@@ -28,12 +27,10 @@ export { ShortInput } from './short-input/short-input'
export { SkillInput } from './skill-input/skill-input'
export { SlackSelectorInput } from './slack-selector/slack-selector-input'
export { SliderInput } from './slider-input/slider-input'
export { SortBuilder } from './sort-builder/sort-builder'
export { InputFormat } from './starter/input-format'
export { SubBlockInputController } from './sub-block-input-controller'
export { Switch } from './switch/switch'
export { Table } from './table/table'
export { TableSelector } from './table-selector/table-selector'
export { Text } from './text/text'
export { TimeInput } from './time-input/time-input'
export { ToolInput } from './tool-input/tool-input'

View File

@@ -1,19 +0,0 @@
import { Plus } from 'lucide-react'
import { Button } from '@/components/emcn'
interface EmptyStateProps {
onAdd: () => void
disabled: boolean
label: string
}
export function EmptyState({ onAdd, disabled, label }: EmptyStateProps) {
return (
<div className='flex items-center justify-center rounded-[4px] border border-[var(--border-1)] border-dashed py-[16px]'>
<Button variant='ghost' size='sm' onClick={onAdd} disabled={disabled}>
<Plus className='mr-[4px] h-[12px] w-[12px]' />
{label}
</Button>
</div>
)
}

View File

@@ -1,67 +0,0 @@
import { X } from 'lucide-react'
import { Button, Combobox, type ComboboxOption } from '@/components/emcn'
import type { SortRule } from '@/lib/table/query-builder/constants'
interface SortRuleRowProps {
rule: SortRule
index: number
columns: ComboboxOption[]
directionOptions: ComboboxOption[]
isReadOnly: boolean
onRemove: (id: string) => void
onUpdate: (id: string, field: keyof SortRule, value: string) => void
}
export function SortRuleRow({
rule,
index,
columns,
directionOptions,
isReadOnly,
onRemove,
onUpdate,
}: SortRuleRowProps) {
return (
<div className='flex items-center gap-[6px]'>
<Button
variant='ghost'
size='sm'
onClick={() => onRemove(rule.id)}
disabled={isReadOnly}
className='h-[24px] w-[24px] shrink-0 p-0 text-[var(--text-tertiary)] hover:text-[var(--text-primary)]'
>
<X className='h-[12px] w-[12px]' />
</Button>
<div className='w-[90px] shrink-0'>
<Combobox
size='sm'
options={[{ value: String(index + 1), label: index === 0 ? 'order by' : 'then by' }]}
value={String(index + 1)}
disabled
/>
</div>
<div className='min-w-[120px] flex-1'>
<Combobox
size='sm'
options={columns}
value={rule.column}
onChange={(v) => onUpdate(rule.id, 'column', v)}
placeholder='Column'
disabled={isReadOnly}
/>
</div>
<div className='w-[110px] shrink-0'>
<Combobox
size='sm'
options={directionOptions}
value={rule.direction}
onChange={(v) => onUpdate(rule.id, 'direction', v as 'asc' | 'desc')}
disabled={isReadOnly}
/>
</div>
</div>
)
}

View File

@@ -1,110 +0,0 @@
'use client'
import { useCallback, useMemo } from 'react'
import { Plus } from 'lucide-react'
import { nanoid } from 'nanoid'
import { Button, type ComboboxOption } from '@/components/emcn'
import { useTableColumns } from '@/lib/table/hooks'
import { SORT_DIRECTIONS, type SortRule } from '@/lib/table/query-builder/constants'
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value'
import { EmptyState } from './components/empty-state'
import { SortRuleRow } from './components/sort-rule-row'
interface SortBuilderProps {
blockId: string
subBlockId: string
isPreview?: boolean
previewValue?: SortRule[] | null
disabled?: boolean
columns?: Array<{ value: string; label: string }>
tableIdSubBlockId?: string
}
const createDefaultRule = (columns: ComboboxOption[]): SortRule => ({
id: nanoid(),
column: columns[0]?.value || '',
direction: 'asc',
})
/** Visual builder for table sort rules in workflow blocks. */
export function SortBuilder({
blockId,
subBlockId,
isPreview = false,
previewValue,
disabled = false,
columns: propColumns,
tableIdSubBlockId = 'tableId',
}: SortBuilderProps) {
const [storeValue, setStoreValue] = useSubBlockValue<SortRule[]>(blockId, subBlockId)
const [tableIdValue] = useSubBlockValue<string>(blockId, tableIdSubBlockId)
const dynamicColumns = useTableColumns({ tableId: tableIdValue, includeBuiltIn: true })
const columns = useMemo(() => {
if (propColumns && propColumns.length > 0) return propColumns
return dynamicColumns
}, [propColumns, dynamicColumns])
const directionOptions = useMemo(
() => SORT_DIRECTIONS.map((dir) => ({ value: dir.value, label: dir.label })),
[]
)
const value = isPreview ? previewValue : storeValue
const rules: SortRule[] = Array.isArray(value) && value.length > 0 ? value : []
const isReadOnly = isPreview || disabled
const addRule = useCallback(() => {
if (isReadOnly) return
setStoreValue([...rules, createDefaultRule(columns)])
}, [isReadOnly, rules, columns, setStoreValue])
const removeRule = useCallback(
(id: string) => {
if (isReadOnly) return
setStoreValue(rules.filter((r) => r.id !== id))
},
[isReadOnly, rules, setStoreValue]
)
const updateRule = useCallback(
(id: string, field: keyof SortRule, newValue: string) => {
if (isReadOnly) return
setStoreValue(rules.map((r) => (r.id === id ? { ...r, [field]: newValue } : r)))
},
[isReadOnly, rules, setStoreValue]
)
return (
<div className='flex flex-col gap-[8px]'>
{rules.length === 0 ? (
<EmptyState onAdd={addRule} disabled={isReadOnly} label='Add sort rule' />
) : (
<>
{rules.map((rule, index) => (
<SortRuleRow
key={rule.id}
rule={rule}
index={index}
columns={columns}
directionOptions={directionOptions}
isReadOnly={isReadOnly}
onRemove={removeRule}
onUpdate={updateRule}
/>
))}
<Button
variant='ghost'
size='sm'
onClick={addRule}
disabled={isReadOnly}
className='self-start'
>
<Plus className='mr-[4px] h-[12px] w-[12px]' />
Add sort
</Button>
</>
)}
</div>
)
}

View File

@@ -1,78 +0,0 @@
'use client'
import { useCallback, useMemo } from 'react'
import { useParams } from 'next/navigation'
import { Combobox, type ComboboxOption } from '@/components/emcn'
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value'
import type { SubBlockConfig } from '@/blocks/types'
import { useTablesList } from '@/hooks/queries/tables'
interface TableSelectorProps {
blockId: string
subBlock: SubBlockConfig
disabled?: boolean
isPreview?: boolean
previewValue?: string | null
}
/**
* Table selector component with dropdown for selecting workspace tables
*
* @remarks
* Provides a dropdown to select workspace tables.
* Uses React Query for efficient data fetching and caching.
* The external link to view the table is rendered in the label row by the parent SubBlock.
*/
export function TableSelector({
blockId,
subBlock,
disabled = false,
isPreview = false,
previewValue,
}: TableSelectorProps) {
const params = useParams()
const workspaceId = params.workspaceId as string
const [storeValue, setStoreValue] = useSubBlockValue<string>(blockId, subBlock.id)
const {
data: tables = [],
isLoading,
error,
} = useTablesList(isPreview || disabled ? undefined : workspaceId)
const value = isPreview ? previewValue : storeValue
const tableId = typeof value === 'string' ? value : null
const options = useMemo<ComboboxOption[]>(() => {
return tables.map((table) => ({
label: table.name.toLowerCase(),
value: table.id,
}))
}, [tables])
const handleChange = useCallback(
(selectedValue: string) => {
if (isPreview || disabled) return
setStoreValue(selectedValue)
},
[isPreview, disabled, setStoreValue]
)
const errorMessage = error instanceof Error ? error.message : error ? String(error) : undefined
return (
<Combobox
options={options}
value={tableId ?? undefined}
onChange={handleChange}
placeholder={subBlock.placeholder || 'Select a table'}
disabled={disabled || isPreview}
editable={false}
isLoading={isLoading}
error={errorMessage}
searchable={options.length > 5}
searchPlaceholder='Search...'
/>
)
}

View File

@@ -19,11 +19,11 @@ interface TableProps {
subBlockId: string
columns: string[]
isPreview?: boolean
previewValue?: WorkflowTableRow[] | null
previewValue?: TableRow[] | null
disabled?: boolean
}
interface WorkflowTableRow {
interface TableRow {
id: string
cells: Record<string, string>
}
@@ -38,7 +38,7 @@ export function Table({
}: TableProps) {
const params = useParams()
const workspaceId = params.workspaceId as string
const [storeValue, setStoreValue] = useSubBlockValue<WorkflowTableRow[]>(blockId, subBlockId)
const [storeValue, setStoreValue] = useSubBlockValue<TableRow[]>(blockId, subBlockId)
const accessiblePrefixes = useAccessibleReferencePrefixes(blockId)
// Use the extended hook for field-level management
@@ -73,7 +73,7 @@ export function Table({
*/
useEffect(() => {
if (!isPreview && !disabled && (!Array.isArray(storeValue) || storeValue.length === 0)) {
const initialRow: WorkflowTableRow = {
const initialRow: TableRow = {
id: crypto.randomUUID(),
cells: { ...emptyCellsTemplate },
}
@@ -110,7 +110,7 @@ export function Table({
}
})
return validatedRows as WorkflowTableRow[]
return validatedRows as TableRow[]
}, [value, emptyCellsTemplate])
// Helper to update a cell value
@@ -164,12 +164,7 @@ export function Table({
</thead>
)
const renderCell = (
row: WorkflowTableRow,
rowIndex: number,
column: string,
cellIndex: number
) => {
const renderCell = (row: TableRow, rowIndex: number, column: string, cellIndex: number) => {
// Defensive programming: ensure row.cells exists and has the expected structure
const hasValidCells = row.cells && typeof row.cells === 'object'
if (!hasValidCells) logger.warn('Table row has malformed cells data:', row)

View File

@@ -357,7 +357,6 @@ const BUILT_IN_TOOL_TYPES = new Set([
'tts',
'stt',
'memory',
'table',
'webhook_request',
'workflow',
])
@@ -615,8 +614,7 @@ export const ToolInput = memo(function ToolInput({
block.type === 'workflow' ||
block.type === 'workflow_input' ||
block.type === 'knowledge' ||
block.type === 'function' ||
block.type === 'table') &&
block.type === 'function') &&
block.type !== 'evaluator' &&
block.type !== 'mcp' &&
block.type !== 'file'

View File

@@ -1,17 +1,8 @@
import { type JSX, type MouseEvent, memo, useCallback, useMemo, useRef, useState } from 'react'
import { type JSX, type MouseEvent, memo, useCallback, useRef, useState } from 'react'
import isEqual from 'lodash/isEqual'
import {
AlertTriangle,
ArrowLeftRight,
ArrowUp,
Check,
Clipboard,
ExternalLink,
} from 'lucide-react'
import { useParams } from 'next/navigation'
import { AlertTriangle, ArrowLeftRight, ArrowUp, Check, Clipboard } from 'lucide-react'
import { Button, Input, Label, Tooltip } from '@/components/emcn/components'
import { cn } from '@/lib/core/utils/cn'
import type { FilterRule, SortRule } from '@/lib/table/query-builder/constants'
import {
CheckboxList,
Code,
@@ -24,7 +15,6 @@ import {
EvalInput,
FileSelectorInput,
FileUpload,
FilterBuilder,
FolderSelectorInput,
GroupedCheckboxList,
InputFormat,
@@ -44,10 +34,8 @@ import {
SkillInput,
SlackSelectorInput,
SliderInput,
SortBuilder,
Switch,
Table,
TableSelector,
Text,
TimeInput,
ToolInput,
@@ -214,12 +202,7 @@ const renderLabel = (
copied: boolean
onCopy: () => void
},
labelSuffix?: React.ReactNode,
externalLink?: {
show: boolean
onClick: () => void
tooltip: string
}
labelSuffix?: React.ReactNode
): JSX.Element | null => {
if (config.type === 'switch') return null
if (!config.title) return null
@@ -228,7 +211,6 @@ const renderLabel = (
const showWand = wandState?.isWandEnabled && !wandState.isPreview && !wandState.disabled
const showCanonicalToggle = !!canonicalToggle && !wandState?.isPreview
const showCopy = copyState?.showCopyButton && !wandState?.isPreview
const showExternalLink = externalLink?.show && !wandState?.isPreview
const canonicalToggleDisabledResolved = canonicalToggleIsDisabled ?? canonicalToggle?.disabled
return (
@@ -369,23 +351,6 @@ const renderLabel = (
</Tooltip.Content>
</Tooltip.Root>
)}
{showExternalLink && (
<Tooltip.Root>
<Tooltip.Trigger asChild>
<button
type='button'
className='flex h-[12px] w-[12px] flex-shrink-0 items-center justify-center bg-transparent p-0'
onClick={externalLink?.onClick}
aria-label={externalLink?.tooltip}
>
<ExternalLink className='!h-[12px] !w-[12px] text-[var(--text-secondary)]' />
</button>
</Tooltip.Trigger>
<Tooltip.Content side='top'>
<p>{externalLink?.tooltip}</p>
</Tooltip.Content>
</Tooltip.Root>
)}
</div>
</div>
)
@@ -450,9 +415,6 @@ function SubBlockComponent({
labelSuffix,
dependencyContext,
}: SubBlockProps): JSX.Element {
const params = useParams()
const workspaceId = params.workspaceId as string
const [isValidJson, setIsValidJson] = useState(true)
const [isSearchActive, setIsSearchActive] = useState(false)
const [searchQuery, setSearchQuery] = useState('')
@@ -489,30 +451,6 @@ function SubBlockComponent({
}
}, [webhookManagement?.webhookUrl])
const tableId =
config.type === 'table-selector' && subBlockValues
? (subBlockValues[config.id]?.value as string | null)
: null
const hasSelectedTable = tableId && !tableId.startsWith('<')
const handleNavigateToTable = useCallback(() => {
if (tableId && workspaceId) {
window.open(`/workspace/${workspaceId}/tables/${tableId}`, '_blank')
}
}, [workspaceId, tableId])
const externalLink = useMemo(
() =>
config.type === 'table-selector' && hasSelectedTable
? {
show: true,
onClick: handleNavigateToTable,
tooltip: 'View table',
}
: undefined,
[config.type, hasSelectedTable, handleNavigateToTable]
)
/**
* Handles wand icon click to activate inline prompt mode.
* Focuses the input after a brief delay to ensure DOM is ready.
@@ -646,19 +584,6 @@ function SubBlockComponent({
</div>
)
case 'table-selector':
return (
<div onMouseDown={handleMouseDown}>
<TableSelector
blockId={blockId}
subBlock={config}
disabled={isDisabled}
isPreview={isPreview}
previewValue={previewValue as string | null}
/>
</div>
)
case 'combobox':
return (
<div onMouseDown={handleMouseDown}>
@@ -1019,28 +944,6 @@ function SubBlockComponent({
/>
)
case 'filter-builder':
return (
<FilterBuilder
blockId={blockId}
subBlockId={config.id}
isPreview={isPreview}
previewValue={previewValue as FilterRule[] | null | undefined}
disabled={isDisabled}
/>
)
case 'sort-builder':
return (
<SortBuilder
blockId={blockId}
subBlockId={config.id}
isPreview={isPreview}
previewValue={previewValue as SortRule[] | null | undefined}
disabled={isDisabled}
/>
)
case 'channel-selector':
case 'user-selector':
return (
@@ -1157,8 +1060,7 @@ function SubBlockComponent({
copied,
onCopy: handleCopy,
},
labelSuffix,
externalLink
labelSuffix
)}
{renderInput()}
</div>

View File

@@ -9,7 +9,6 @@ import { cn } from '@/lib/core/utils/cn'
import { getBaseUrl } from '@/lib/core/utils/urls'
import { createMcpToolId } from '@/lib/mcp/shared'
import { getProviderIdFromServiceId } from '@/lib/oauth'
import type { FilterRule, SortRule } from '@/lib/table/types'
import { BLOCK_DIMENSIONS, HANDLE_POSITIONS } from '@/lib/workflows/blocks/block-dimensions'
import {
buildCanonicalIndex,
@@ -42,7 +41,6 @@ import { useMcpServers, useMcpToolsQuery } from '@/hooks/queries/mcp'
import { useCredentialName } from '@/hooks/queries/oauth-credentials'
import { useReactivateSchedule, useScheduleInfo } from '@/hooks/queries/schedules'
import { useSkills } from '@/hooks/queries/skills'
import { useTablesList } from '@/hooks/queries/tables'
import { useDeployChildWorkflow } from '@/hooks/queries/workflows'
import { useSelectorDisplayName } from '@/hooks/use-selector-display-name'
import { useVariablesStore } from '@/stores/panel'
@@ -57,9 +55,9 @@ const logger = createLogger('WorkflowBlock')
const EMPTY_SUBBLOCK_VALUES = {} as Record<string, any>
/**
* Type guard for workflow table row structure (sub-block table inputs)
* Type guard for table row structure
*/
interface WorkflowTableRow {
interface TableRow {
id: string
cells: Record<string, string>
}
@@ -78,7 +76,7 @@ interface FieldFormat {
/**
* Checks if a value is a table row array
*/
const isTableRowArray = (value: unknown): value is WorkflowTableRow[] => {
const isTableRowArray = (value: unknown): value is TableRow[] => {
if (!Array.isArray(value) || value.length === 0) return false
const firstItem = value[0]
return (
@@ -97,11 +95,7 @@ const isFieldFormatArray = (value: unknown): value is FieldFormat[] => {
if (!Array.isArray(value) || value.length === 0) return false
const firstItem = value[0]
return (
typeof firstItem === 'object' &&
firstItem !== null &&
'id' in firstItem &&
'name' in firstItem &&
typeof firstItem.name === 'string'
typeof firstItem === 'object' && firstItem !== null && 'id' in firstItem && 'name' in firstItem
)
}
@@ -167,8 +161,7 @@ const isTagFilterArray = (value: unknown): value is TagFilterItem[] => {
typeof firstItem === 'object' &&
firstItem !== null &&
'tagName' in firstItem &&
'tagValue' in firstItem &&
typeof firstItem.tagName === 'string'
'tagValue' in firstItem
)
}
@@ -190,40 +183,7 @@ const isDocumentTagArray = (value: unknown): value is DocumentTagItem[] => {
firstItem !== null &&
'tagName' in firstItem &&
'value' in firstItem &&
!('tagValue' in firstItem) && // Distinguish from tag filters
typeof firstItem.tagName === 'string'
)
}
/**
* Type guard for filter condition array (used in table block filter builder)
*/
const isFilterConditionArray = (value: unknown): value is FilterRule[] => {
if (!Array.isArray(value) || value.length === 0) return false
const firstItem = value[0]
return (
typeof firstItem === 'object' &&
firstItem !== null &&
'column' in firstItem &&
'operator' in firstItem &&
'logicalOperator' in firstItem &&
typeof firstItem.column === 'string'
)
}
/**
* Type guard for sort condition array (used in table block sort builder)
*/
const isSortConditionArray = (value: unknown): value is SortRule[] => {
if (!Array.isArray(value) || value.length === 0) return false
const firstItem = value[0]
return (
typeof firstItem === 'object' &&
firstItem !== null &&
'column' in firstItem &&
'direction' in firstItem &&
typeof firstItem.column === 'string' &&
(firstItem.direction === 'asc' || firstItem.direction === 'desc')
!('tagValue' in firstItem) // Distinguish from tag filters
)
}
@@ -271,9 +231,7 @@ export const getDisplayValue = (value: unknown): string => {
}
if (isTagFilterArray(parsedValue)) {
const validFilters = parsedValue.filter(
(f) => typeof f.tagName === 'string' && f.tagName.trim() !== ''
)
const validFilters = parsedValue.filter((f) => f.tagName?.trim())
if (validFilters.length === 0) return '-'
if (validFilters.length === 1) return validFilters[0].tagName
if (validFilters.length === 2) return `${validFilters[0].tagName}, ${validFilters[1].tagName}`
@@ -281,54 +239,13 @@ export const getDisplayValue = (value: unknown): string => {
}
if (isDocumentTagArray(parsedValue)) {
const validTags = parsedValue.filter(
(t) => typeof t.tagName === 'string' && t.tagName.trim() !== ''
)
const validTags = parsedValue.filter((t) => t.tagName?.trim())
if (validTags.length === 0) return '-'
if (validTags.length === 1) return validTags[0].tagName
if (validTags.length === 2) return `${validTags[0].tagName}, ${validTags[1].tagName}`
return `${validTags[0].tagName}, ${validTags[1].tagName} +${validTags.length - 2}`
}
if (isFilterConditionArray(parsedValue)) {
const validConditions = parsedValue.filter(
(c) => typeof c.column === 'string' && c.column.trim() !== ''
)
if (validConditions.length === 0) return '-'
const formatCondition = (c: FilterRule) => {
const opLabels: Record<string, string> = {
eq: '=',
ne: '≠',
gt: '>',
gte: '≥',
lt: '<',
lte: '≤',
contains: '~',
in: 'in',
}
const op = opLabels[c.operator] || c.operator
return `${c.column} ${op} ${c.value || '?'}`
}
if (validConditions.length === 1) return formatCondition(validConditions[0])
if (validConditions.length === 2) {
return `${formatCondition(validConditions[0])}, ${formatCondition(validConditions[1])}`
}
return `${formatCondition(validConditions[0])}, ${formatCondition(validConditions[1])} +${validConditions.length - 2}`
}
if (isSortConditionArray(parsedValue)) {
const validConditions = parsedValue.filter(
(c) => typeof c.column === 'string' && c.column.trim() !== ''
)
if (validConditions.length === 0) return '-'
const formatSort = (c: SortRule) => `${c.column} ${c.direction === 'desc' ? '↓' : '↑'}`
if (validConditions.length === 1) return formatSort(validConditions[0])
if (validConditions.length === 2) {
return `${formatSort(validConditions[0])}, ${formatSort(validConditions[1])}`
}
return `${formatSort(validConditions[0])}, ${formatSort(validConditions[1])} +${validConditions.length - 2}`
}
if (isTableRowArray(parsedValue)) {
const nonEmptyRows = parsedValue.filter((row) => {
const cellValues = Object.values(row.cells)
@@ -350,9 +267,7 @@ export const getDisplayValue = (value: unknown): string => {
}
if (isFieldFormatArray(parsedValue)) {
const namedFields = parsedValue.filter(
(field) => typeof field.name === 'string' && field.name.trim() !== ''
)
const namedFields = parsedValue.filter((field) => field.name && field.name.trim() !== '')
if (namedFields.length === 0) return '-'
if (namedFields.length === 1) return namedFields[0].name
if (namedFields.length === 2) return `${namedFields[0].name}, ${namedFields[1].name}`
@@ -598,15 +513,6 @@ const SubBlockRow = memo(function SubBlockRow({
return tool?.name ?? null
}, [subBlock?.type, rawValue, mcpToolsData])
const { data: tables = [] } = useTablesList(workspaceId || '')
const tableDisplayName = useMemo(() => {
if (subBlock?.id !== 'tableId' || typeof rawValue !== 'string') {
return null
}
const table = tables.find((t) => t.id === rawValue)
return table?.name ?? null
}, [subBlock?.id, rawValue, tables])
const webhookUrlDisplayValue = useMemo(() => {
if (subBlock?.id !== 'webhookUrlDisplay' || !blockId) {
return null
@@ -713,27 +619,6 @@ const SubBlockRow = memo(function SubBlockRow({
return `${toolNames[0]}, ${toolNames[1]} +${toolNames.length - 2}`
}, [subBlock?.type, rawValue, customTools, workspaceId])
const filterDisplayValue = useMemo(() => {
const isFilterField =
subBlock?.id === 'filter' || subBlock?.id === 'filterCriteria' || subBlock?.id === 'sort'
if (!isFilterField || !rawValue) return null
const parsedValue = tryParseJson(rawValue)
if (isPlainObject(parsedValue) || Array.isArray(parsedValue)) {
try {
const jsonStr = JSON.stringify(parsedValue, null, 0)
if (jsonStr.length <= 35) return jsonStr
return `${jsonStr.slice(0, 32)}...`
} catch {
return null
}
}
return null
}, [subBlock?.id, rawValue])
/**
* Hydrates skill references to display names.
* Resolves skill IDs to their current names from the skills query.
@@ -778,21 +663,18 @@ const SubBlockRow = memo(function SubBlockRow({
const isPasswordField = subBlock?.password === true
const maskedValue = isPasswordField && value && value !== '-' ? '•••' : null
const isMonospaceField = Boolean(filterDisplayValue)
const isSelectorType = subBlock?.type && SELECTOR_TYPES_HYDRATION_REQUIRED.includes(subBlock.type)
const hydratedName =
credentialName ||
dropdownLabel ||
variablesDisplayValue ||
filterDisplayValue ||
toolsDisplayValue ||
skillsDisplayValue ||
knowledgeBaseDisplayName ||
workflowSelectionName ||
mcpServerDisplayName ||
mcpToolDisplayName ||
tableDisplayName ||
webhookUrlDisplayValue ||
selectorDisplayName
const displayValue = maskedValue || hydratedName || (isSelectorType && value ? '-' : value)
@@ -807,10 +689,7 @@ const SubBlockRow = memo(function SubBlockRow({
</span>
{displayValue !== undefined && (
<span
className={cn(
'flex-1 truncate text-right text-[14px] text-[var(--text-primary)]',
isMonospaceField && 'font-mono'
)}
className='flex-1 truncate text-right text-[14px] text-[var(--text-primary)]'
title={displayValue}
>
{displayValue}

View File

@@ -4,37 +4,23 @@ import { useQueryClient } from '@tanstack/react-query'
import { readSSEStream } from '@/lib/core/utils/sse'
import type { GenerationType } from '@/blocks/types'
import { subscriptionKeys } from '@/hooks/queries/subscription'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
const logger = createLogger('useWand')
interface ChatMessage {
role: 'user' | 'assistant' | 'system'
content: string
}
interface BuildWandContextInfoOptions {
currentValue?: string
generationType?: string
}
/**
* Builds rich context information based on current content and generation type.
* Note: Table schema context is now fetched server-side in /api/wand for simplicity.
* Builds rich context information based on current content and generation type
*/
function buildWandContextInfo({
currentValue,
generationType,
}: BuildWandContextInfoOptions): string {
const hasContent = Boolean(currentValue && currentValue.trim() !== '')
const contentLength = currentValue?.length ?? 0
const lineCount = currentValue ? currentValue.split('\n').length : 0
function buildContextInfo(currentValue?: string, generationType?: string): string {
if (!currentValue || currentValue.trim() === '') {
return 'no current content'
}
let contextInfo = hasContent
? `Current content (${contentLength} characters, ${lineCount} lines):\n${currentValue}`
: 'no current content'
const contentLength = currentValue.length
const lineCount = currentValue.split('\n').length
if (generationType && currentValue) {
let contextInfo = `Current content (${contentLength} characters, ${lineCount} lines):\n${currentValue}`
if (generationType) {
switch (generationType) {
case 'javascript-function-body':
case 'typescript-function-body': {
@@ -47,7 +33,6 @@ function buildWandContextInfo({
case 'json-schema':
case 'json-object':
case 'table-schema':
try {
const parsed = JSON.parse(currentValue)
const keys = Object.keys(parsed)
@@ -62,6 +47,11 @@ function buildWandContextInfo({
return contextInfo
}
interface ChatMessage {
role: 'user' | 'assistant' | 'system'
content: string
}
export interface WandConfig {
enabled: boolean
prompt: string
@@ -73,9 +63,6 @@ export interface WandConfig {
interface UseWandProps {
wandConfig?: WandConfig
currentValue?: string
contextParams?: {
tableId?: string | null
}
onGeneratedContent: (content: string) => void
onStreamChunk?: (chunk: string) => void
onStreamStart?: () => void
@@ -85,14 +72,12 @@ interface UseWandProps {
export function useWand({
wandConfig,
currentValue,
contextParams,
onGeneratedContent,
onStreamChunk,
onStreamStart,
onGenerationComplete,
}: UseWandProps) {
const queryClient = useQueryClient()
const workflowId = useWorkflowRegistry((state) => state.hydration.workflowId)
const [isLoading, setIsLoading] = useState(false)
const [isPromptVisible, setIsPromptVisible] = useState(false)
const [promptInputValue, setPromptInputValue] = useState('')
@@ -163,10 +148,7 @@ export function useWand({
}
try {
const contextInfo = buildWandContextInfo({
currentValue,
generationType: wandConfig?.generationType,
})
const contextInfo = buildContextInfo(currentValue, wandConfig?.generationType)
let systemPrompt = wandConfig?.prompt || ''
if (systemPrompt.includes('{context}')) {
@@ -189,8 +171,6 @@ export function useWand({
stream: true,
history: wandConfig?.maintainHistory ? conversationHistory : [],
generationType: wandConfig?.generationType,
workflowId,
wandContext: contextParams?.tableId ? { tableId: contextParams.tableId } : undefined,
}),
signal: abortControllerRef.current.signal,
cache: 'no-store',
@@ -255,8 +235,6 @@ export function useWand({
onStreamStart,
onGenerationComplete,
queryClient,
contextParams?.tableId,
workflowId,
]
)

View File

@@ -227,7 +227,7 @@ export function Integrations({ onOpenChange, registerCloseHandler }: Integration
(acc, service) => {
if (
permissionConfig.allowedIntegrations !== null &&
!permissionConfig.allowedIntegrations.includes(service.id.replace(/-/g, '_').toLowerCase())
!permissionConfig.allowedIntegrations.includes(service.id.replace(/-/g, '_'))
) {
return acc
}

View File

@@ -7,8 +7,6 @@ export interface SubscriptionPermissions {
canCancelSubscription: boolean
showTeamMemberView: boolean
showUpgradePlans: boolean
isEnterpriseMember: boolean
canViewUsageInfo: boolean
}
export interface SubscriptionState {
@@ -33,9 +31,6 @@ export function getSubscriptionPermissions(
const { isFree, isPro, isTeam, isEnterprise, isPaid } = subscription
const { isTeamAdmin } = userRole
const isEnterpriseMember = isEnterprise && !isTeamAdmin
const canViewUsageInfo = !isEnterpriseMember
return {
canUpgradeToPro: isFree,
canUpgradeToTeam: isFree || (isPro && !isTeam),
@@ -45,8 +40,6 @@ export function getSubscriptionPermissions(
canCancelSubscription: isPaid && !isEnterprise && !(isTeam && !isTeamAdmin), // Team members can't cancel
showTeamMemberView: isTeam && !isTeamAdmin,
showUpgradePlans: isFree || (isPro && !isTeam) || (isTeam && isTeamAdmin), // Free users, Pro users, Team owners see plans
isEnterpriseMember,
canViewUsageInfo,
}
}

View File

@@ -300,16 +300,12 @@ export function Subscription() {
)
const showBadge =
!permissions.isEnterpriseMember &&
((permissions.canEditUsageLimit && !permissions.showTeamMemberView) ||
permissions.showTeamMemberView ||
subscription.isEnterprise ||
isBlocked)
(permissions.canEditUsageLimit && !permissions.showTeamMemberView) ||
permissions.showTeamMemberView ||
subscription.isEnterprise ||
isBlocked
const getBadgeConfig = (): { text: string; variant: 'blue-secondary' | 'red' } => {
if (permissions.isEnterpriseMember) {
return { text: '', variant: 'blue-secondary' }
}
if (permissions.showTeamMemberView || subscription.isEnterprise) {
return { text: `${subscription.seats} seats`, variant: 'blue-secondary' }
}
@@ -447,75 +443,67 @@ export function Subscription() {
return (
<div className='flex h-full flex-col gap-[20px]'>
{/* Current Plan & Usage Overview - hidden from enterprise members (non-admin) */}
{permissions.canViewUsageInfo ? (
<UsageHeader
title={formatPlanName(subscription.plan)}
showBadge={showBadge}
badgeText={badgeConfig.text}
badgeVariant={badgeConfig.variant}
onBadgeClick={permissions.showTeamMemberView ? undefined : handleBadgeClick}
seatsText={
permissions.canManageTeam || subscription.isEnterprise
? `${subscription.seats} seats`
: undefined
}
current={usage.current}
limit={
subscription.isEnterprise || subscription.isTeam
? organizationBillingData?.data?.totalUsageLimit
: !subscription.isFree &&
(permissions.canEditUsageLimit || permissions.showTeamMemberView)
? usage.current // placeholder; rightContent will render UsageLimit
: usage.limit
}
isBlocked={isBlocked}
progressValue={Math.min(usage.percentUsed, 100)}
rightContent={
!subscription.isFree &&
(permissions.canEditUsageLimit || permissions.showTeamMemberView) ? (
<UsageLimit
ref={usageLimitRef}
currentLimit={
(subscription.isTeam || subscription.isEnterprise) &&
isTeamAdmin &&
organizationBillingData?.data
? organizationBillingData.data.totalUsageLimit
: usageLimitData.currentLimit || usage.limit
}
currentUsage={usage.current}
canEdit={permissions.canEditUsageLimit}
minimumLimit={
(subscription.isTeam || subscription.isEnterprise) &&
isTeamAdmin &&
organizationBillingData?.data
? organizationBillingData.data.minimumBillingAmount
: usageLimitData.minimumLimit || (subscription.isPro ? 20 : 40)
}
context={
(subscription.isTeam || subscription.isEnterprise) && isTeamAdmin
? 'organization'
: 'user'
}
organizationId={
(subscription.isTeam || subscription.isEnterprise) && isTeamAdmin
? activeOrgId
: undefined
}
onLimitUpdated={() => {
logger.info('Usage limit updated')
}}
/>
) : undefined
}
/>
) : (
<div className='flex items-center'>
<span className='font-medium text-[14px] text-[var(--text-primary)]'>
{formatPlanName(subscription.plan)}
</span>
</div>
)}
{/* Current Plan & Usage Overview */}
<UsageHeader
title={formatPlanName(subscription.plan)}
showBadge={showBadge}
badgeText={badgeConfig.text}
badgeVariant={badgeConfig.variant}
onBadgeClick={permissions.showTeamMemberView ? undefined : handleBadgeClick}
seatsText={
permissions.canManageTeam || subscription.isEnterprise
? `${subscription.seats} seats`
: undefined
}
current={usage.current}
limit={
subscription.isEnterprise || subscription.isTeam
? organizationBillingData?.data?.totalUsageLimit
: !subscription.isFree &&
(permissions.canEditUsageLimit || permissions.showTeamMemberView)
? usage.current // placeholder; rightContent will render UsageLimit
: usage.limit
}
isBlocked={isBlocked}
progressValue={Math.min(usage.percentUsed, 100)}
rightContent={
!subscription.isFree &&
(permissions.canEditUsageLimit || permissions.showTeamMemberView) ? (
<UsageLimit
ref={usageLimitRef}
currentLimit={
(subscription.isTeam || subscription.isEnterprise) &&
isTeamAdmin &&
organizationBillingData?.data
? organizationBillingData.data.totalUsageLimit
: usageLimitData.currentLimit || usage.limit
}
currentUsage={usage.current}
canEdit={permissions.canEditUsageLimit}
minimumLimit={
(subscription.isTeam || subscription.isEnterprise) &&
isTeamAdmin &&
organizationBillingData?.data
? organizationBillingData.data.minimumBillingAmount
: usageLimitData.minimumLimit || (subscription.isPro ? 20 : 40)
}
context={
(subscription.isTeam || subscription.isEnterprise) && isTeamAdmin
? 'organization'
: 'user'
}
organizationId={
(subscription.isTeam || subscription.isEnterprise) && isTeamAdmin
? activeOrgId
: undefined
}
onLimitUpdated={() => {
logger.info('Usage limit updated')
}}
/>
) : undefined
}
/>
{/* Upgrade Plans */}
{permissions.showUpgradePlans && (
@@ -551,8 +539,8 @@ export function Subscription() {
</div>
)}
{/* Credit Balance - hidden from enterprise members (non-admin) */}
{subscription.isPaid && permissions.canViewUsageInfo && (
{/* Credit Balance */}
{subscription.isPaid && (
<CreditBalance
balance={subscriptionData?.data?.creditBalance ?? 0}
canPurchase={permissions.canEditUsageLimit}
@@ -566,11 +554,10 @@ export function Subscription() {
<ReferralCode onRedeemComplete={() => refetchSubscription()} />
)}
{/* Next Billing Date - hidden from team members and enterprise members (non-admin) */}
{/* Next Billing Date - hidden from team members */}
{subscription.isPaid &&
subscriptionData?.data?.periodEnd &&
!permissions.showTeamMemberView &&
!permissions.isEnterpriseMember && (
!permissions.showTeamMemberView && (
<div className='flex items-center justify-between'>
<Label>Next Billing Date</Label>
<span className='text-[12px] text-[var(--text-secondary)]'>
@@ -579,8 +566,8 @@ export function Subscription() {
</div>
)}
{/* Usage notifications - hidden from enterprise members (non-admin) */}
{subscription.isPaid && permissions.canViewUsageInfo && <BillingUsageNotificationsToggle />}
{/* Usage notifications */}
{subscription.isPaid && <BillingUsageNotificationsToggle />}
{/* Cancel Subscription */}
{permissions.canCancelSubscription && (

View File

@@ -285,7 +285,6 @@ export function UsageIndicator({ onClick }: UsageIndicatorProps) {
const isPro = planType === 'pro'
const isTeam = planType === 'team'
const isEnterprise = planType === 'enterprise'
const isEnterpriseMember = isEnterprise && !userCanManageBilling
const handleUpgradeToPro = useCallback(async () => {
try {
@@ -464,18 +463,6 @@ export function UsageIndicator({ onClick }: UsageIndicatorProps) {
}
}
if (isEnterpriseMember) {
return (
<div className='flex flex-shrink-0 flex-col border-t px-[13.5px] pt-[8px] pb-[10px]'>
<div className='flex h-[18px] items-center'>
<span className='font-medium text-[12px] text-[var(--text-primary)]'>
{PLAN_NAMES[planType]}
</span>
</div>
</div>
)
}
return (
<>
<div

View File

@@ -2,7 +2,7 @@
import { memo, useCallback, useEffect, useMemo, useRef, useState } from 'react'
import { createLogger } from '@sim/logger'
import { Database, HelpCircle, Layout, Plus, Search, Settings, Table } from 'lucide-react'
import { Database, HelpCircle, Layout, Plus, Search, Settings } from 'lucide-react'
import Link from 'next/link'
import { useParams, usePathname, useRouter } from 'next/navigation'
import { Button, Download, FolderPlus, Library, Loader, Tooltip } from '@/components/emcn'
@@ -268,12 +268,6 @@ export const Sidebar = memo(function Sidebar() {
href: `/workspace/${workspaceId}/knowledge`,
hidden: permissionConfig.hideKnowledgeBaseTab,
},
{
id: 'tables',
label: 'Tables',
icon: Table,
href: `/workspace/${workspaceId}/tables`,
},
{
id: 'help',
label: 'Help',

View File

@@ -408,9 +408,6 @@ describe.concurrent('Blocks Module', () => {
'workflow-input-mapper',
'text',
'router-input',
'table-selector',
'filter-builder',
'sort-builder',
'skill-input',
]

File diff suppressed because it is too large Load Diff

View File

@@ -1,597 +0,0 @@
import { MicrosoftDataverseIcon } from '@/components/icons'
import type { BlockConfig } from '@/blocks/types'
import { AuthMode } from '@/blocks/types'
import { normalizeFileInput } from '@/blocks/utils'
import type { DataverseResponse } from '@/tools/microsoft_dataverse/types'
export const MicrosoftDataverseBlock: BlockConfig<DataverseResponse> = {
type: 'microsoft_dataverse',
name: 'Microsoft Dataverse',
description: 'Manage records in Microsoft Dataverse tables',
authMode: AuthMode.OAuth,
longDescription:
'Integrate Microsoft Dataverse into your workflow. Create, read, update, delete, upsert, associate, query, search, and execute actions and functions against Dataverse tables using the Web API. Supports bulk operations, FetchXML, file uploads, and relevance search. Works with Dynamics 365, Power Platform, and custom Dataverse environments.',
docsLink: 'https://docs.sim.ai/tools/microsoft_dataverse',
category: 'tools',
bgColor: '#E0E0E0',
icon: MicrosoftDataverseIcon,
subBlocks: [
{
id: 'operation',
title: 'Operation',
type: 'dropdown',
options: [
{ label: 'List Records', id: 'list_records' },
{ label: 'Get Record', id: 'get_record' },
{ label: 'Create Record', id: 'create_record' },
{ label: 'Update Record', id: 'update_record' },
{ label: 'Upsert Record', id: 'upsert_record' },
{ label: 'Delete Record', id: 'delete_record' },
{ label: 'Create Multiple', id: 'create_multiple' },
{ label: 'Update Multiple', id: 'update_multiple' },
{ label: 'FetchXML Query', id: 'fetchxml_query' },
{ label: 'Search', id: 'search' },
{ label: 'Execute Action', id: 'execute_action' },
{ label: 'Execute Function', id: 'execute_function' },
{ label: 'Upload File', id: 'upload_file' },
{ label: 'Download File', id: 'download_file' },
{ label: 'Associate Records', id: 'associate' },
{ label: 'Disassociate Records', id: 'disassociate' },
{ label: 'WhoAmI', id: 'whoami' },
],
value: () => 'list_records',
},
{
id: 'credential',
title: 'Microsoft Account',
type: 'oauth-input',
serviceId: 'microsoft-dataverse',
requiredScopes: [
'openid',
'profile',
'email',
'https://dynamics.microsoft.com/user_impersonation',
'offline_access',
],
placeholder: 'Select Microsoft account',
required: true,
},
{
id: 'environmentUrl',
title: 'Environment URL',
type: 'short-input',
placeholder: 'https://myorg.crm.dynamics.com',
required: true,
},
{
id: 'entitySetName',
title: 'Entity Set Name',
type: 'short-input',
placeholder: 'Plural table name (e.g., accounts, contacts)',
condition: {
field: 'operation',
value: ['whoami', 'search'],
not: true,
},
required: {
field: 'operation',
value: ['whoami', 'search', 'execute_action', 'execute_function'],
not: true,
},
},
{
id: 'recordId',
title: 'Record ID',
type: 'short-input',
placeholder: 'Record GUID (e.g., 00000000-0000-0000-0000-000000000000)',
condition: {
field: 'operation',
value: [
'get_record',
'update_record',
'upsert_record',
'delete_record',
'associate',
'disassociate',
'upload_file',
'download_file',
'execute_action',
'execute_function',
],
},
required: {
field: 'operation',
value: [
'get_record',
'update_record',
'upsert_record',
'delete_record',
'associate',
'disassociate',
'upload_file',
'download_file',
],
},
},
{
id: 'data',
title: 'Record Data',
type: 'long-input',
placeholder:
'JSON object with column values (e.g., {"name": "Contoso", "telephone1": "555-0100"})',
condition: { field: 'operation', value: ['create_record', 'update_record', 'upsert_record'] },
required: { field: 'operation', value: ['create_record', 'update_record', 'upsert_record'] },
wandConfig: {
enabled: true,
prompt: `Generate a Dataverse record JSON object based on the user's description.
The JSON should contain column logical names as keys and appropriate values.
Common Dataverse column naming conventions:
- Text: "name", "description", "emailaddress1", "telephone1"
- Lookup: "_primarycontactid_value" (read-only), use "primarycontactid@odata.bind": "/contacts(guid)" for setting
- Choice/OptionSet: integer values (e.g., "statecode": 0, "statuscode": 1)
- Date: ISO 8601 format (e.g., "createdon": "2024-01-15T00:00:00Z")
- Currency: decimal numbers (e.g., "revenue": 1000000.00)
Return ONLY valid JSON - no explanations, no markdown code blocks.`,
placeholder: 'Describe the record data you want to create or update...',
generationType: 'json-object',
},
},
// FetchXML Query
{
id: 'fetchXml',
title: 'FetchXML',
type: 'long-input',
placeholder:
'<fetch top="50"><entity name="account"><attribute name="name"/><filter><condition attribute="statecode" operator="eq" value="0"/></filter></entity></fetch>',
condition: { field: 'operation', value: 'fetchxml_query' },
required: { field: 'operation', value: 'fetchxml_query' },
wandConfig: {
enabled: true,
prompt: `Generate a FetchXML query for the Microsoft Dataverse Web API based on the user's description.
FetchXML structure:
- Root: <fetch top="N" aggregate="true|false" distinct="true|false">
- Entity: <entity name="logical_name"> (singular table name, e.g., "account")
- Attributes: <attribute name="column"/> or <all-attributes/>
- Filter: <filter type="and|or"><condition attribute="name" operator="eq" value="val"/></filter>
- Order: <order attribute="name" descending="true|false"/>
- Link-entity: <link-entity name="contact" from="parentcustomerid" to="accountid" alias="c">
- Aggregation: <attribute name="revenue" aggregate="sum" alias="total"/>
Operators: eq, ne, gt, ge, lt, le, like, not-like, in, not-in, null, not-null, between, not-between, contains, not-contain
Return ONLY valid FetchXML - no explanations, no markdown code blocks.`,
placeholder: 'Describe the query you want to run...',
generationType: 'json-object',
},
},
// Search
{
id: 'searchTerm',
title: 'Search Term',
type: 'short-input',
placeholder: 'Search text (e.g., Contoso)',
condition: { field: 'operation', value: 'search' },
required: { field: 'operation', value: 'search' },
},
{
id: 'searchEntities',
title: 'Search Entities',
type: 'long-input',
placeholder:
'JSON array of entity configs (e.g., [{"Name":"account","SelectColumns":["name"],"SearchColumns":["name"]}])',
condition: { field: 'operation', value: 'search' },
mode: 'advanced',
},
{
id: 'searchMode',
title: 'Search Mode',
type: 'dropdown',
options: [
{ label: 'Any (match any term)', id: 'any' },
{ label: 'All (match all terms)', id: 'all' },
],
value: () => 'any',
condition: { field: 'operation', value: 'search' },
mode: 'advanced',
},
{
id: 'searchType',
title: 'Query Type',
type: 'dropdown',
options: [
{ label: 'Simple (default)', id: 'simple' },
{ label: 'Lucene (regex, fuzzy, proximity)', id: 'lucene' },
],
value: () => 'simple',
condition: { field: 'operation', value: 'search' },
mode: 'advanced',
},
// Execute Action
{
id: 'actionName',
title: 'Action Name',
type: 'short-input',
placeholder: 'e.g., Merge, GrantAccess, SendEmail',
condition: { field: 'operation', value: 'execute_action' },
required: { field: 'operation', value: 'execute_action' },
},
// Execute Function
{
id: 'functionName',
title: 'Function Name',
type: 'short-input',
placeholder: 'e.g., RetrievePrincipalAccess, RetrieveTotalRecordCount',
condition: { field: 'operation', value: 'execute_function' },
required: { field: 'operation', value: 'execute_function' },
},
{
id: 'functionParameters',
title: 'Function Parameters',
type: 'short-input',
placeholder: "e.g., LocalizedStandardName='Pacific Standard Time',LocaleId=1033",
condition: { field: 'operation', value: 'execute_function' },
mode: 'advanced',
},
// Action/Function parameters (shared JSON body for actions)
{
id: 'parameters',
title: 'Action Parameters',
type: 'long-input',
placeholder:
'JSON object with action parameters (e.g., {"Target": {"@odata.type": "Microsoft.Dynamics.CRM.account", "accountid": "..."}})',
condition: { field: 'operation', value: 'execute_action' },
wandConfig: {
enabled: true,
prompt: `Generate a JSON object containing parameters for a Microsoft Dataverse action based on the user's description.
For entity references, include @odata.type annotations:
- {"Target": {"@odata.type": "Microsoft.Dynamics.CRM.account", "accountid": "guid"}}
- {"EntityMoniker": {"@odata.type": "Microsoft.Dynamics.CRM.contact", "contactid": "guid"}}
For simple values, just use the parameter name and value.
Return ONLY valid JSON - no explanations, no markdown code blocks.`,
placeholder: 'Describe the action parameters...',
generationType: 'json-object',
},
},
// Bulk operations
{
id: 'entityLogicalName',
title: 'Table Logical Name',
type: 'short-input',
placeholder: 'Singular table name (e.g., account, contact)',
condition: { field: 'operation', value: ['create_multiple', 'update_multiple'] },
required: { field: 'operation', value: ['create_multiple', 'update_multiple'] },
},
{
id: 'records',
title: 'Records',
type: 'long-input',
placeholder: 'JSON array of records (e.g., [{"name": "Contoso"}, {"name": "Fabrikam"}])',
condition: { field: 'operation', value: ['create_multiple', 'update_multiple'] },
required: { field: 'operation', value: ['create_multiple', 'update_multiple'] },
wandConfig: {
enabled: true,
prompt: `Generate a JSON array of Dataverse records based on the user's description.
Each record should be an object with column logical names as keys.
For UpdateMultiple, each record must include its primary key (e.g., accountid).
Common column naming conventions:
- Text: "name", "description", "emailaddress1", "telephone1"
- Choice/OptionSet: integer values (e.g., "statecode": 0)
- Date: ISO 8601 format (e.g., "2024-01-15T00:00:00Z")
Return ONLY a valid JSON array - no explanations, no markdown code blocks.`,
placeholder: 'Describe the records you want to create or update...',
generationType: 'json-object',
},
},
// File operations
{
id: 'fileColumn',
title: 'File Column',
type: 'short-input',
placeholder: 'File column logical name (e.g., entityimage, cr_document)',
condition: { field: 'operation', value: ['upload_file', 'download_file'] },
required: { field: 'operation', value: ['upload_file', 'download_file'] },
},
{
id: 'fileName',
title: 'File Name',
type: 'short-input',
placeholder: 'e.g., document.pdf',
condition: { field: 'operation', value: 'upload_file' },
required: { field: 'operation', value: 'upload_file' },
},
{
id: 'uploadFile',
title: 'File',
type: 'file-upload',
canonicalParamId: 'file',
placeholder: 'Upload a file',
condition: { field: 'operation', value: 'upload_file' },
mode: 'basic',
multiple: false,
required: { field: 'operation', value: 'upload_file' },
},
{
id: 'fileReference',
title: 'File',
type: 'short-input',
canonicalParamId: 'file',
placeholder: 'Reference a file from previous blocks (e.g., {{block_1.output.file}})',
condition: { field: 'operation', value: 'upload_file' },
mode: 'advanced',
required: { field: 'operation', value: 'upload_file' },
},
// OData query options (list_records)
{
id: 'select',
title: 'Select Columns',
type: 'short-input',
placeholder: 'Comma-separated columns (e.g., name,telephone1,emailaddress1)',
condition: { field: 'operation', value: ['list_records', 'get_record'] },
mode: 'advanced',
wandConfig: {
enabled: true,
prompt: `Generate a comma-separated list of Dataverse column logical names based on the user's description.
Use lowercase logical names without spaces.
Common columns by table:
- Accounts: name, accountnumber, telephone1, emailaddress1, address1_city, revenue, industrycode
- Contacts: firstname, lastname, fullname, emailaddress1, telephone1, jobtitle, birthdate
- General: statecode, statuscode, createdon, modifiedon, ownerid, createdby
Return ONLY the comma-separated column names - no explanations.`,
placeholder: 'Describe which columns you want to retrieve...',
generationType: 'odata-expression',
},
},
{
id: 'filter',
title: 'Filter',
type: 'short-input',
placeholder: "OData filter (e.g., statecode eq 0 and contains(name,'Contoso'))",
condition: { field: 'operation', value: ['list_records', 'search'] },
wandConfig: {
enabled: true,
prompt: `Generate an OData $filter expression for the Dataverse Web API based on the user's description.
OData filter syntax:
- Comparison: eq, ne, gt, ge, lt, le (e.g., "revenue gt 1000000")
- Logical: and, or, not (e.g., "statecode eq 0 and revenue gt 1000000")
- String functions: contains(name,'value'), startswith(name,'value'), endswith(name,'value')
- Date functions: year(createdon) eq 2024, month(createdon) eq 1
- Null check: fieldname eq null, fieldname ne null
- Status: statecode eq 0 (active), statecode eq 1 (inactive)
Return ONLY the filter expression - no $filter= prefix, no explanations.`,
placeholder: 'Describe which records you want to filter for...',
generationType: 'odata-expression',
},
},
{
id: 'orderBy',
title: 'Order By',
type: 'short-input',
placeholder: 'e.g., name asc, createdon desc',
condition: { field: 'operation', value: ['list_records', 'search'] },
mode: 'advanced',
wandConfig: {
enabled: true,
prompt: `Generate an OData $orderby expression for sorting Dataverse records based on the user's description.
Format: column_name asc|desc, separated by commas for multi-column sort.
Examples:
- "name asc" - Sort by name alphabetically
- "createdon desc" - Sort by creation date, newest first
- "name asc, createdon desc" - Sort by name, then by date
Return ONLY the orderby expression - no $orderby= prefix, no explanations.`,
placeholder: 'Describe how you want to sort the results...',
generationType: 'odata-expression',
},
},
{
id: 'top',
title: 'Max Results',
type: 'short-input',
placeholder: 'Maximum number of records (default: 5000)',
condition: { field: 'operation', value: ['list_records', 'search'] },
mode: 'advanced',
},
{
id: 'expand',
title: 'Expand',
type: 'short-input',
placeholder: 'Navigation properties to expand (e.g., primarycontactid)',
condition: { field: 'operation', value: ['list_records', 'get_record'] },
mode: 'advanced',
wandConfig: {
enabled: true,
prompt: `Generate an OData $expand expression for the Dataverse Web API based on the user's description.
$expand retrieves related records through navigation properties.
Examples:
- "primarycontactid" - Expand the primary contact lookup
- "contact_customer_accounts" - Expand related contacts for an account
- "primarycontactid($select=fullname,emailaddress1)" - Expand with selected columns
- "contact_customer_accounts($select=fullname;$top=5;$orderby=fullname asc)" - Expand with query options
Return ONLY the expand expression - no $expand= prefix, no explanations.`,
placeholder: 'Describe which related records you want to include...',
generationType: 'odata-expression',
},
},
// Associate/Disassociate
{
id: 'navigationProperty',
title: 'Navigation Property',
type: 'short-input',
placeholder: 'e.g., contact_customer_accounts',
condition: { field: 'operation', value: ['associate', 'disassociate'] },
required: { field: 'operation', value: ['associate', 'disassociate'] },
},
{
id: 'navigationType',
title: 'Navigation Type',
type: 'dropdown',
options: [
{ label: 'Collection-valued (default)', id: 'collection' },
{ label: 'Single-valued (lookup)', id: 'single' },
],
value: () => 'collection',
condition: { field: 'operation', value: 'associate' },
mode: 'advanced',
},
{
id: 'targetEntitySetName',
title: 'Target Entity Set',
type: 'short-input',
placeholder: 'Target table name (e.g., contacts)',
condition: { field: 'operation', value: 'associate' },
required: { field: 'operation', value: 'associate' },
},
{
id: 'targetRecordId',
title: 'Target Record ID',
type: 'short-input',
placeholder: 'Target record GUID',
condition: { field: 'operation', value: ['associate', 'disassociate'] },
required: { field: 'operation', value: 'associate' },
},
],
tools: {
access: [
'microsoft_dataverse_associate',
'microsoft_dataverse_create_multiple',
'microsoft_dataverse_create_record',
'microsoft_dataverse_delete_record',
'microsoft_dataverse_disassociate',
'microsoft_dataverse_download_file',
'microsoft_dataverse_execute_action',
'microsoft_dataverse_execute_function',
'microsoft_dataverse_fetchxml_query',
'microsoft_dataverse_get_record',
'microsoft_dataverse_list_records',
'microsoft_dataverse_search',
'microsoft_dataverse_update_multiple',
'microsoft_dataverse_update_record',
'microsoft_dataverse_upload_file',
'microsoft_dataverse_upsert_record',
'microsoft_dataverse_whoami',
],
config: {
tool: (params) => `microsoft_dataverse_${params.operation}`,
params: (params) => {
const { credential, operation, file, ...rest } = params
const cleanParams: Record<string, unknown> = {
credential,
}
// Normalize file input from basic (uploadFile) or advanced (fileReference) mode
const normalizedFile = normalizeFileInput(file, { single: true })
if (normalizedFile) {
cleanParams.file = normalizedFile
}
// Map block subBlock IDs to tool param names where they differ
if (operation === 'search' && rest.searchEntities) {
cleanParams.entities = rest.searchEntities
rest.searchEntities = undefined
}
if (operation === 'execute_function' && rest.functionParameters) {
cleanParams.parameters = rest.functionParameters
rest.functionParameters = undefined
// Prevent stale action parameters from overwriting mapped function parameters
rest.parameters = undefined
}
// Always clean up mapped subBlock IDs so they don't leak through the loop below
rest.searchEntities = undefined
rest.functionParameters = undefined
Object.entries(rest).forEach(([key, value]) => {
if (value !== undefined && value !== null && value !== '') {
cleanParams[key] = value
}
})
return cleanParams
},
},
},
inputs: {
operation: { type: 'string', description: 'Operation to perform' },
credential: { type: 'string', description: 'Microsoft Dataverse OAuth credential' },
environmentUrl: { type: 'string', description: 'Dataverse environment URL' },
entitySetName: { type: 'string', description: 'Entity set name (plural table name)' },
recordId: { type: 'string', description: 'Record GUID' },
data: { type: 'json', description: 'Record data as JSON object' },
select: { type: 'string', description: 'Columns to return (comma-separated)' },
filter: { type: 'string', description: 'OData $filter expression' },
orderBy: { type: 'string', description: 'OData $orderby expression' },
top: { type: 'string', description: 'Maximum number of records' },
expand: { type: 'string', description: 'Navigation properties to expand' },
navigationProperty: {
type: 'string',
description: 'Navigation property name for associations',
},
navigationType: {
type: 'string',
description:
'Navigation property type: "collection" (default) or "single" (for lookup fields)',
},
targetEntitySetName: { type: 'string', description: 'Target entity set for association' },
targetRecordId: { type: 'string', description: 'Target record GUID for association' },
fetchXml: { type: 'string', description: 'FetchXML query string' },
searchTerm: { type: 'string', description: 'Search text for relevance search' },
searchEntities: { type: 'string', description: 'JSON array of search entity configurations' },
searchMode: { type: 'string', description: 'Search mode: "any" or "all"' },
searchType: { type: 'string', description: 'Query type: "simple" or "lucene"' },
actionName: { type: 'string', description: 'Dataverse action name to execute' },
functionName: { type: 'string', description: 'Dataverse function name to execute' },
functionParameters: {
type: 'string',
description: 'Function parameters as URL-encoded string',
},
parameters: { type: 'json', description: 'Action parameters as JSON object' },
entityLogicalName: { type: 'string', description: 'Table logical name for @odata.type' },
records: { type: 'json', description: 'Array of record objects for bulk operations' },
fileColumn: { type: 'string', description: 'File or image column logical name' },
fileName: { type: 'string', description: 'Name of the file to upload' },
file: { type: 'json', description: 'File to upload (canonical param)' },
},
outputs: {
records: { type: 'json', description: 'Array of records (list/fetchxml/search)' },
record: { type: 'json', description: 'Single record data' },
recordId: { type: 'string', description: 'Record ID' },
count: { type: 'number', description: 'Number of records returned in the current page' },
totalCount: {
type: 'number',
description: 'Total matching records server-side',
},
nextLink: { type: 'string', description: 'URL for next page of results' },
created: { type: 'boolean', description: 'Whether a new record was created (upsert)' },
userId: { type: 'string', description: 'Authenticated user ID (WhoAmI)' },
businessUnitId: { type: 'string', description: 'Business unit ID (WhoAmI)' },
organizationId: { type: 'string', description: 'Organization ID (WhoAmI)' },
entitySetName: {
type: 'string',
description: 'Source entity set name (associate/disassociate)',
},
navigationProperty: {
type: 'string',
description: 'Navigation property used (associate/disassociate)',
},
targetEntitySetName: { type: 'string', description: 'Target entity set name (associate)' },
targetRecordId: { type: 'string', description: 'Target record GUID (associate/disassociate)' },
success: { type: 'boolean', description: 'Operation success status' },
result: { type: 'json', description: 'Action/function result data' },
ids: { type: 'json', description: 'Array of created record IDs (create multiple)' },
fetchXmlPagingCookie: { type: 'string', description: 'Paging cookie for FetchXML pagination' },
moreRecords: { type: 'boolean', description: 'Whether more records are available (FetchXML)' },
results: { type: 'json', description: 'Search results array' },
facets: { type: 'json', description: 'Facet results for search (when facets requested)' },
fileContent: { type: 'string', description: 'Base64-encoded downloaded file content' },
fileName: { type: 'string', description: 'Downloaded file name' },
fileSize: { type: 'number', description: 'File size in bytes' },
mimeType: { type: 'string', description: 'File MIME type' },
fileColumn: { type: 'string', description: 'File column name' },
},
}

View File

@@ -1,679 +0,0 @@
import { TableIcon } from '@/components/icons'
import { TABLE_LIMITS } from '@/lib/table/constants'
import { filterRulesToFilter, sortRulesToSort } from '@/lib/table/query-builder/converters'
import type { BlockConfig } from '@/blocks/types'
import type { TableQueryResponse } from '@/tools/table/types'
/**
* Parses a JSON string with helpful error messages.
*
* Handles common issues like unquoted block references in JSON values.
*
* @param value - The value to parse (string or already-parsed object)
* @param fieldName - Name of the field for error messages
* @returns Parsed JSON value
* @throws Error with helpful hints if JSON is invalid
*/
function parseJSON(value: string | unknown, fieldName: string): unknown {
if (typeof value !== 'string') return value
try {
return JSON.parse(value)
} catch (error) {
const errorMsg = error instanceof Error ? error.message : String(error)
// Check if the error might be due to unquoted string values
// This happens when users write {"field": <ref>} instead of {"field": "<ref>"}
const unquotedValueMatch = value.match(
/:\s*([a-zA-Z][a-zA-Z0-9_\s]*[a-zA-Z0-9]|[a-zA-Z])\s*[,}]/
)
let hint =
'Make sure all property names are in double quotes (e.g., {"name": "value"} not {name: "value"}).'
if (unquotedValueMatch) {
hint =
'It looks like a string value is not quoted. When using block references in JSON, wrap them in double quotes: {"field": "<blockName.output>"} not {"field": <blockName.output>}.'
}
throw new Error(`Invalid JSON in ${fieldName}: ${errorMsg}. ${hint}`)
}
}
/** Raw params from block UI before JSON parsing and type conversion */
interface TableBlockParams {
operation: string
tableId?: string
rowId?: string
data?: string | unknown
rows?: string | unknown
filter?: string | unknown
sort?: string | unknown
limit?: string
offset?: string
builderMode?: string
filterBuilder?: unknown
sortBuilder?: unknown
bulkFilterMode?: string
bulkFilterBuilder?: unknown
}
/** Normalized params after parsing, ready for tool request body */
interface ParsedParams {
tableId?: string
rowId?: string
data?: unknown
rows?: unknown
filter?: unknown
sort?: unknown
limit?: number
offset?: number
}
/** Transforms raw block params into tool request params for each operation */
const paramTransformers: Record<string, (params: TableBlockParams) => ParsedParams> = {
insert_row: (params) => ({
tableId: params.tableId,
data: parseJSON(params.data, 'Row Data'),
}),
upsert_row: (params) => ({
tableId: params.tableId,
data: parseJSON(params.data, 'Row Data'),
}),
batch_insert_rows: (params) => ({
tableId: params.tableId,
rows: parseJSON(params.rows, 'Rows Data'),
}),
update_row: (params) => ({
tableId: params.tableId,
rowId: params.rowId,
data: parseJSON(params.data, 'Row Data'),
}),
update_rows_by_filter: (params) => {
let filter: unknown
if (params.bulkFilterMode === 'builder' && params.bulkFilterBuilder) {
filter =
filterRulesToFilter(
params.bulkFilterBuilder as Parameters<typeof filterRulesToFilter>[0]
) || undefined
} else if (params.filter) {
filter = parseJSON(params.filter, 'Filter')
}
return {
tableId: params.tableId,
filter,
data: parseJSON(params.data, 'Row Data'),
limit: params.limit ? Number.parseInt(params.limit) : undefined,
}
},
delete_row: (params) => ({
tableId: params.tableId,
rowId: params.rowId,
}),
delete_rows_by_filter: (params) => {
let filter: unknown
if (params.bulkFilterMode === 'builder' && params.bulkFilterBuilder) {
filter =
filterRulesToFilter(
params.bulkFilterBuilder as Parameters<typeof filterRulesToFilter>[0]
) || undefined
} else if (params.filter) {
filter = parseJSON(params.filter, 'Filter')
}
return {
tableId: params.tableId,
filter,
limit: params.limit ? Number.parseInt(params.limit) : undefined,
}
},
get_row: (params) => ({
tableId: params.tableId,
rowId: params.rowId,
}),
get_schema: (params) => ({
tableId: params.tableId,
}),
query_rows: (params) => {
let filter: unknown
if (params.builderMode === 'builder' && params.filterBuilder) {
filter =
filterRulesToFilter(params.filterBuilder as Parameters<typeof filterRulesToFilter>[0]) ||
undefined
} else if (params.filter) {
filter = parseJSON(params.filter, 'Filter')
}
let sort: unknown
if (params.builderMode === 'builder' && params.sortBuilder) {
sort =
sortRulesToSort(params.sortBuilder as Parameters<typeof sortRulesToSort>[0]) || undefined
} else if (params.sort) {
sort = parseJSON(params.sort, 'Sort')
}
return {
tableId: params.tableId,
filter,
sort,
limit: params.limit ? Number.parseInt(params.limit) : 100,
offset: params.offset ? Number.parseInt(params.offset) : 0,
}
},
}
export const TableBlock: BlockConfig<TableQueryResponse> = {
type: 'table',
name: 'Table',
description: 'User-defined data tables',
longDescription:
'Create and manage custom data tables. Store, query, and manipulate structured data within workflows.',
docsLink: 'https://docs.simstudio.ai/tools/table',
category: 'blocks',
bgColor: '#10B981',
icon: TableIcon,
subBlocks: [
{
id: 'operation',
title: 'Operation',
type: 'dropdown',
options: [
{ label: 'Query Rows', id: 'query_rows' },
{ label: 'Insert Row', id: 'insert_row' },
{ label: 'Upsert Row', id: 'upsert_row' },
{ label: 'Batch Insert Rows', id: 'batch_insert_rows' },
{ label: 'Update Rows by Filter', id: 'update_rows_by_filter' },
{ label: 'Delete Rows by Filter', id: 'delete_rows_by_filter' },
{ label: 'Update Row by ID', id: 'update_row' },
{ label: 'Delete Row by ID', id: 'delete_row' },
{ label: 'Get Row by ID', id: 'get_row' },
{ label: 'Get Schema', id: 'get_schema' },
],
value: () => 'query_rows',
},
// Table selector (for all operations)
{
id: 'tableId',
title: 'Table',
type: 'table-selector',
placeholder: 'Select a table',
required: true,
},
// Row ID for get/update/delete
{
id: 'rowId',
title: 'Row ID',
type: 'short-input',
placeholder: 'row_xxxxx',
condition: { field: 'operation', value: ['get_row', 'update_row', 'delete_row'] },
required: true,
},
// Insert/Update/Upsert Row data (single row)
{
id: 'data',
title: 'Row Data (JSON)',
type: 'code',
placeholder: '{"column_name": "value"}',
condition: {
field: 'operation',
value: ['insert_row', 'upsert_row', 'update_row', 'update_rows_by_filter'],
},
required: true,
wandConfig: {
enabled: true,
maintainHistory: true,
prompt: `Generate row data as a JSON object matching the table's column schema.
### CONTEXT
{context}
### INSTRUCTION
Return ONLY a valid JSON object with field values based on the table's columns. No explanations or markdown.
IMPORTANT: Reference the table schema visible in the table selector to know which columns exist and their types.
### EXAMPLES
Table with columns: email (string), name (string), age (number)
"user with email john@example.com and age 25"
→ {"email": "john@example.com", "name": "John", "age": 25}
Table with columns: customer_id (string), total (number), status (string)
"order with customer ID 123, total 99.99, status pending"
→ {"customer_id": "123", "total": 99.99, "status": "pending"}
Return ONLY the data JSON:`,
generationType: 'table-schema',
},
},
// Batch Insert - multiple rows
{
id: 'rows',
title: 'Rows Data (Array of JSON)',
type: 'code',
placeholder: '[{"col1": "val1"}, {"col1": "val2"}]',
condition: { field: 'operation', value: 'batch_insert_rows' },
required: true,
wandConfig: {
enabled: true,
maintainHistory: true,
prompt: `Generate an array of row data objects matching the table's column schema.
### CONTEXT
{context}
### INSTRUCTION
Return ONLY a valid JSON array of objects. Each object represents one row. No explanations or markdown.
Maximum ${TABLE_LIMITS.MAX_BATCH_INSERT_SIZE} rows per batch.
IMPORTANT: Reference the table schema to know which columns exist and their types.
### EXAMPLES
Table with columns: email (string), name (string), age (number)
"3 users: john@example.com age 25, jane@example.com age 30, bob@example.com age 28"
→ [
{"email": "john@example.com", "name": "John", "age": 25},
{"email": "jane@example.com", "name": "Jane", "age": 30},
{"email": "bob@example.com", "name": "Bob", "age": 28}
]
Return ONLY the rows array:`,
generationType: 'table-schema',
},
},
// Filter mode selector for bulk operations
{
id: 'bulkFilterMode',
title: 'Filter Mode',
type: 'dropdown',
options: [
{ label: 'Builder', id: 'builder' },
{ label: 'Editor', id: 'json' },
],
value: () => 'builder',
condition: {
field: 'operation',
value: ['update_rows_by_filter', 'delete_rows_by_filter'],
},
},
// Filter builder for bulk operations (visual)
{
id: 'bulkFilterBuilder',
title: 'Filter Conditions',
type: 'filter-builder',
required: {
field: 'operation',
value: ['update_rows_by_filter', 'delete_rows_by_filter'],
},
condition: {
field: 'operation',
value: ['update_rows_by_filter', 'delete_rows_by_filter'],
and: { field: 'bulkFilterMode', value: 'builder' },
},
},
// Filter for update/delete operations (JSON editor - bulk ops)
{
id: 'filter',
title: 'Filter',
type: 'code',
placeholder: '{"column_name": {"$eq": "value"}}',
condition: {
field: 'operation',
value: ['update_rows_by_filter', 'delete_rows_by_filter'],
and: { field: 'bulkFilterMode', value: 'json' },
},
required: true,
wandConfig: {
enabled: true,
maintainHistory: true,
prompt: `Generate filter criteria for selecting rows in a table.
### CONTEXT
{context}
### INSTRUCTION
Return ONLY a valid JSON filter object. No explanations or markdown.
IMPORTANT: Reference the table schema to know which columns exist and their types.
### OPERATORS
- **$eq**: Equals - {"column": {"$eq": "value"}} or {"column": "value"}
- **$ne**: Not equals - {"column": {"$ne": "value"}}
- **$gt**: Greater than - {"column": {"$gt": 18}}
- **$gte**: Greater than or equal - {"column": {"$gte": 100}}
- **$lt**: Less than - {"column": {"$lt": 90}}
- **$lte**: Less than or equal - {"column": {"$lte": 5}}
- **$in**: In array - {"column": {"$in": ["value1", "value2"]}}
- **$nin**: Not in array - {"column": {"$nin": ["value1", "value2"]}}
- **$contains**: String contains - {"column": {"$contains": "text"}}
### EXAMPLES
"rows where status is active"
→ {"status": "active"}
"rows where age is over 18 and status is pending"
→ {"age": {"$gte": 18}, "status": "pending"}
"rows where email contains gmail.com"
→ {"email": {"$contains": "gmail.com"}}
Return ONLY the filter JSON:`,
generationType: 'table-schema',
},
},
// Builder mode selector for query_rows (controls both filter and sort)
{
id: 'builderMode',
title: 'Input Mode',
type: 'dropdown',
options: [
{ label: 'Builder', id: 'builder' },
{ label: 'Editor', id: 'json' },
],
value: () => 'builder',
condition: { field: 'operation', value: 'query_rows' },
},
// Filter builder (visual)
{
id: 'filterBuilder',
title: 'Filter Conditions',
type: 'filter-builder',
condition: {
field: 'operation',
value: 'query_rows',
and: { field: 'builderMode', value: 'builder' },
},
},
// Sort builder (visual)
{
id: 'sortBuilder',
title: 'Sort Order',
type: 'sort-builder',
condition: {
field: 'operation',
value: 'query_rows',
and: { field: 'builderMode', value: 'builder' },
},
},
// Filter for query_rows (JSON editor mode or tool call context)
{
id: 'filter',
title: 'Filter',
type: 'code',
placeholder: '{"column_name": {"$eq": "value"}}',
condition: {
field: 'operation',
value: 'query_rows',
and: { field: 'builderMode', value: 'builder', not: true },
},
wandConfig: {
enabled: true,
maintainHistory: true,
prompt: `Generate filter criteria for selecting rows in a table.
### CONTEXT
{context}
### INSTRUCTION
Return ONLY a valid JSON filter object. No explanations or markdown.
IMPORTANT: Reference the table schema to know which columns exist and their types.
### OPERATORS
- **$eq**: Equals - {"column": {"$eq": "value"}} or {"column": "value"}
- **$ne**: Not equals - {"column": {"$ne": "value"}}
- **$gt**: Greater than - {"column": {"$gt": 18}}
- **$gte**: Greater than or equal - {"column": {"$gte": 100}}
- **$lt**: Less than - {"column": {"$lt": 90}}
- **$lte**: Less than or equal - {"column": {"$lte": 5}}
- **$in**: In array - {"column": {"$in": ["value1", "value2"]}}
- **$nin**: Not in array - {"column": {"$nin": ["value1", "value2"]}}
- **$contains**: String contains - {"column": {"$contains": "text"}}
### EXAMPLES
"rows where status is active"
→ {"status": "active"}
"rows where age is over 18 and status is pending"
→ {"age": {"$gte": 18}, "status": "pending"}
"rows where email contains gmail.com"
→ {"email": {"$contains": "gmail.com"}}
Return ONLY the filter JSON:`,
generationType: 'table-schema',
},
},
// Sort (JSON editor or tool call context)
{
id: 'sort',
title: 'Sort',
type: 'code',
placeholder: '{"column_name": "desc"}',
condition: {
field: 'operation',
value: 'query_rows',
and: { field: 'builderMode', value: 'builder', not: true },
},
wandConfig: {
enabled: true,
maintainHistory: true,
prompt: `Generate sort order for table query results.
### CONTEXT
{context}
### INSTRUCTION
Return ONLY a valid JSON object specifying sort order. No explanations or markdown.
IMPORTANT: Reference the table schema to know which columns exist. You can sort by any column or the built-in columns (createdAt, updatedAt).
### FORMAT
{"column_name": "asc" or "desc"}
You can specify multiple columns for multi-level sorting.
### EXAMPLES
Table with columns: name (string), age (number), email (string), createdAt (date)
"sort by newest first"
→ {"createdAt": "desc"}
"sort by name alphabetically"
→ {"name": "asc"}
"sort by age descending"
→ {"age": "desc"}
"sort by age descending, then name ascending"
→ {"age": "desc", "name": "asc"}
"sort by oldest created first"
→ {"createdAt": "asc"}
Return ONLY the sort JSON:`,
generationType: 'table-schema',
},
},
{
id: 'limit',
title: 'Limit',
type: 'short-input',
placeholder: '100',
condition: {
field: 'operation',
value: ['query_rows', 'update_rows_by_filter', 'delete_rows_by_filter'],
},
},
{
id: 'offset',
title: 'Offset',
type: 'short-input',
placeholder: '0',
condition: { field: 'operation', value: 'query_rows' },
value: () => '0',
},
],
tools: {
access: [
'table_insert_row',
'table_batch_insert_rows',
'table_upsert_row',
'table_update_row',
'table_update_rows_by_filter',
'table_delete_row',
'table_delete_rows_by_filter',
'table_query_rows',
'table_get_row',
'table_get_schema',
],
config: {
tool: (params) => {
const toolMap: Record<string, string> = {
insert_row: 'table_insert_row',
batch_insert_rows: 'table_batch_insert_rows',
upsert_row: 'table_upsert_row',
update_row: 'table_update_row',
update_rows_by_filter: 'table_update_rows_by_filter',
delete_row: 'table_delete_row',
delete_rows_by_filter: 'table_delete_rows_by_filter',
query_rows: 'table_query_rows',
get_row: 'table_get_row',
get_schema: 'table_get_schema',
}
return toolMap[params.operation] || 'table_query_rows'
},
params: (params) => {
const { operation, ...rest } = params
const transformer = paramTransformers[operation]
if (transformer) {
return transformer(rest as TableBlockParams)
}
return rest
},
},
},
inputs: {
operation: { type: 'string', description: 'Table operation to perform' },
tableId: { type: 'string', description: 'Table identifier' },
data: { type: 'json', description: 'Row data for insert/update' },
rows: { type: 'array', description: 'Array of row data for batch insert' },
rowId: { type: 'string', description: 'Row identifier for ID-based operations' },
bulkFilterMode: {
type: 'string',
description: 'Filter input mode for bulk operations (builder or json)',
},
bulkFilterBuilder: {
type: 'json',
description: 'Visual filter builder conditions for bulk operations',
},
filter: { type: 'json', description: 'Filter criteria for query/update/delete operations' },
limit: { type: 'number', description: 'Query or bulk operation limit' },
builderMode: {
type: 'string',
description: 'Input mode for filter and sort (builder or json)',
},
filterBuilder: { type: 'json', description: 'Visual filter builder conditions' },
sortBuilder: { type: 'json', description: 'Visual sort builder conditions' },
sort: { type: 'json', description: 'Sort order (JSON)' },
offset: { type: 'number', description: 'Query result offset' },
},
outputs: {
success: { type: 'boolean', description: 'Operation success status' },
row: {
type: 'json',
description: 'Single row data',
condition: {
field: 'operation',
value: ['get_row', 'insert_row', 'upsert_row', 'update_row'],
},
},
operation: {
type: 'string',
description: 'Operation performed (insert or update)',
condition: { field: 'operation', value: 'upsert_row' },
},
rows: {
type: 'array',
description: 'Array of rows',
condition: { field: 'operation', value: ['query_rows', 'batch_insert_rows'] },
},
rowCount: {
type: 'number',
description: 'Number of rows returned',
condition: { field: 'operation', value: 'query_rows' },
},
totalCount: {
type: 'number',
description: 'Total rows matching filter',
condition: { field: 'operation', value: 'query_rows' },
},
insertedCount: {
type: 'number',
description: 'Number of rows inserted',
condition: { field: 'operation', value: 'batch_insert_rows' },
},
updatedCount: {
type: 'number',
description: 'Number of rows updated',
condition: { field: 'operation', value: 'update_rows_by_filter' },
},
updatedRowIds: {
type: 'array',
description: 'IDs of updated rows',
condition: { field: 'operation', value: 'update_rows_by_filter' },
},
deletedCount: {
type: 'number',
description: 'Number of rows deleted',
condition: { field: 'operation', value: ['delete_row', 'delete_rows_by_filter'] },
},
deletedRowIds: {
type: 'array',
description: 'IDs of deleted rows',
condition: { field: 'operation', value: 'delete_rows_by_filter' },
},
name: {
type: 'string',
description: 'Table name',
condition: { field: 'operation', value: 'get_schema' },
},
columns: {
type: 'array',
description: 'Column definitions',
condition: { field: 'operation', value: 'get_schema' },
},
message: { type: 'string', description: 'Operation status message' },
},
}

File diff suppressed because it is too large Load Diff

View File

@@ -50,7 +50,6 @@ export const YouTubeBlock: BlockConfig<YouTubeResponse> = {
step: 1,
integer: true,
condition: { field: 'operation', value: 'youtube_search' },
mode: 'advanced',
},
{
id: 'pageToken',
@@ -58,7 +57,6 @@ export const YouTubeBlock: BlockConfig<YouTubeResponse> = {
type: 'short-input',
placeholder: 'Token for pagination (from nextPageToken)',
condition: { field: 'operation', value: 'youtube_search' },
mode: 'advanced',
},
{
id: 'channelId',
@@ -66,7 +64,6 @@ export const YouTubeBlock: BlockConfig<YouTubeResponse> = {
type: 'short-input',
placeholder: 'Filter results to a specific channel',
condition: { field: 'operation', value: 'youtube_search' },
mode: 'advanced',
},
{
id: 'eventType',
@@ -80,7 +77,6 @@ export const YouTubeBlock: BlockConfig<YouTubeResponse> = {
],
value: () => '',
condition: { field: 'operation', value: 'youtube_search' },
mode: 'advanced',
},
{
id: 'publishedAfter',
@@ -88,7 +84,6 @@ export const YouTubeBlock: BlockConfig<YouTubeResponse> = {
type: 'short-input',
placeholder: '2024-01-01T00:00:00Z',
condition: { field: 'operation', value: 'youtube_search' },
mode: 'advanced',
wandConfig: {
enabled: true,
prompt: `Generate an ISO 8601 timestamp based on the user's description.
@@ -111,7 +106,6 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
type: 'short-input',
placeholder: '2024-12-31T23:59:59Z',
condition: { field: 'operation', value: 'youtube_search' },
mode: 'advanced',
wandConfig: {
enabled: true,
prompt: `Generate an ISO 8601 timestamp based on the user's description.
@@ -140,7 +134,6 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
],
value: () => 'any',
condition: { field: 'operation', value: 'youtube_search' },
mode: 'advanced',
},
{
id: 'order',
@@ -155,7 +148,6 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
],
value: () => 'relevance',
condition: { field: 'operation', value: 'youtube_search' },
mode: 'advanced',
},
{
id: 'videoCategoryId',
@@ -163,7 +155,6 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
type: 'short-input',
placeholder: 'Use Get Video Categories to find IDs',
condition: { field: 'operation', value: 'youtube_search' },
mode: 'advanced',
},
{
id: 'videoDefinition',
@@ -176,7 +167,6 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
],
value: () => 'any',
condition: { field: 'operation', value: 'youtube_search' },
mode: 'advanced',
},
{
id: 'videoCaption',
@@ -189,7 +179,6 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
],
value: () => 'any',
condition: { field: 'operation', value: 'youtube_search' },
mode: 'advanced',
},
{
id: 'regionCode',
@@ -200,7 +189,6 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
field: 'operation',
value: ['youtube_search', 'youtube_trending', 'youtube_video_categories'],
},
mode: 'advanced',
},
{
id: 'relevanceLanguage',
@@ -208,7 +196,6 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
type: 'short-input',
placeholder: 'en, es, fr',
condition: { field: 'operation', value: 'youtube_search' },
mode: 'advanced',
},
{
id: 'safeSearch',
@@ -221,7 +208,6 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
],
value: () => 'moderate',
condition: { field: 'operation', value: 'youtube_search' },
mode: 'advanced',
},
// Get Trending Videos operation inputs
{
@@ -240,7 +226,6 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
type: 'short-input',
placeholder: 'Use Get Video Categories to find IDs',
condition: { field: 'operation', value: 'youtube_trending' },
mode: 'advanced',
},
{
id: 'pageToken',
@@ -248,7 +233,6 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
type: 'short-input',
placeholder: 'Token for pagination (from nextPageToken)',
condition: { field: 'operation', value: 'youtube_trending' },
mode: 'advanced',
},
// Get Video Details operation inputs
{
@@ -266,7 +250,6 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
type: 'short-input',
placeholder: 'en, es, fr (for category names)',
condition: { field: 'operation', value: 'youtube_video_categories' },
mode: 'advanced',
},
// Get Channel Info operation inputs
{
@@ -315,7 +298,6 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
],
value: () => 'date',
condition: { field: 'operation', value: 'youtube_channel_videos' },
mode: 'advanced',
},
{
id: 'pageToken',
@@ -323,7 +305,6 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
type: 'short-input',
placeholder: 'Token for pagination (from nextPageToken)',
condition: { field: 'operation', value: 'youtube_channel_videos' },
mode: 'advanced',
},
// Get Channel Playlists operation inputs
{
@@ -350,7 +331,6 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
type: 'short-input',
placeholder: 'Token for pagination (from nextPageToken)',
condition: { field: 'operation', value: 'youtube_channel_playlists' },
mode: 'advanced',
},
// Get Playlist Items operation inputs
{
@@ -377,7 +357,6 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
type: 'short-input',
placeholder: 'Token for pagination (from nextPageToken)',
condition: { field: 'operation', value: 'youtube_playlist_items' },
mode: 'advanced',
},
// Get Video Comments operation inputs
{
@@ -408,7 +387,6 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
],
value: () => 'relevance',
condition: { field: 'operation', value: 'youtube_comments' },
mode: 'advanced',
},
{
id: 'pageToken',
@@ -416,7 +394,6 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
type: 'short-input',
placeholder: 'Token for pagination (from nextPageToken)',
condition: { field: 'operation', value: 'youtube_comments' },
mode: 'advanced',
},
// API Key (common to all operations)
{

View File

@@ -16,7 +16,6 @@ import { ChatTriggerBlock } from '@/blocks/blocks/chat_trigger'
import { CirclebackBlock } from '@/blocks/blocks/circleback'
import { ClayBlock } from '@/blocks/blocks/clay'
import { ClerkBlock } from '@/blocks/blocks/clerk'
import { CloudflareBlock } from '@/blocks/blocks/cloudflare'
import { ConditionBlock } from '@/blocks/blocks/condition'
import { ConfluenceBlock, ConfluenceV2Block } from '@/blocks/blocks/confluence'
import { CursorBlock, CursorV2Block } from '@/blocks/blocks/cursor'
@@ -79,7 +78,6 @@ import { ManualTriggerBlock } from '@/blocks/blocks/manual_trigger'
import { McpBlock } from '@/blocks/blocks/mcp'
import { Mem0Block } from '@/blocks/blocks/mem0'
import { MemoryBlock } from '@/blocks/blocks/memory'
import { MicrosoftDataverseBlock } from '@/blocks/blocks/microsoft_dataverse'
import { MicrosoftExcelBlock, MicrosoftExcelV2Block } from '@/blocks/blocks/microsoft_excel'
import { MicrosoftPlannerBlock } from '@/blocks/blocks/microsoft_planner'
import { MicrosoftTeamsBlock } from '@/blocks/blocks/microsoft_teams'
@@ -136,7 +134,6 @@ import { StarterBlock } from '@/blocks/blocks/starter'
import { StripeBlock } from '@/blocks/blocks/stripe'
import { SttBlock, SttV2Block } from '@/blocks/blocks/stt'
import { SupabaseBlock } from '@/blocks/blocks/supabase'
import { TableBlock } from '@/blocks/blocks/table'
import { TavilyBlock } from '@/blocks/blocks/tavily'
import { TelegramBlock } from '@/blocks/blocks/telegram'
import { TextractBlock, TextractV2Block } from '@/blocks/blocks/textract'
@@ -149,7 +146,6 @@ import { TwilioSMSBlock } from '@/blocks/blocks/twilio'
import { TwilioVoiceBlock } from '@/blocks/blocks/twilio_voice'
import { TypeformBlock } from '@/blocks/blocks/typeform'
import { VariablesBlock } from '@/blocks/blocks/variables'
import { VercelBlock } from '@/blocks/blocks/vercel'
import { VideoGeneratorBlock, VideoGeneratorV2Block } from '@/blocks/blocks/video_generator'
import { VisionBlock, VisionV2Block } from '@/blocks/blocks/vision'
import { WaitBlock } from '@/blocks/blocks/wait'
@@ -186,7 +182,6 @@ export const registry: Record<string, BlockConfig> = {
calendly: CalendlyBlock,
chat_trigger: ChatTriggerBlock,
circleback: CirclebackBlock,
cloudflare: CloudflareBlock,
clay: ClayBlock,
clerk: ClerkBlock,
condition: ConditionBlock,
@@ -263,7 +258,6 @@ export const registry: Record<string, BlockConfig> = {
mcp: McpBlock,
mem0: Mem0Block,
memory: MemoryBlock,
microsoft_dataverse: MicrosoftDataverseBlock,
microsoft_excel: MicrosoftExcelBlock,
microsoft_excel_v2: MicrosoftExcelV2Block,
microsoft_planner: MicrosoftPlannerBlock,
@@ -324,7 +318,6 @@ export const registry: Record<string, BlockConfig> = {
stt: SttBlock,
stt_v2: SttV2Block,
supabase: SupabaseBlock,
table: TableBlock,
tavily: TavilyBlock,
telegram: TelegramBlock,
textract: TextractBlock,
@@ -337,7 +330,6 @@ export const registry: Record<string, BlockConfig> = {
twilio_sms: TwilioSMSBlock,
twilio_voice: TwilioVoiceBlock,
typeform: TypeformBlock,
vercel: VercelBlock,
variables: VariablesBlock,
video_generator: VideoGeneratorBlock,
video_generator_v2: VideoGeneratorV2Block,

View File

@@ -27,7 +27,6 @@ export type GenerationType =
| 'typescript-function-body'
| 'json-schema'
| 'json-object'
| 'table-schema'
| 'system-prompt'
| 'custom-tool-schema'
| 'sql-query'
@@ -42,7 +41,6 @@ export type GenerationType =
| 'timestamp'
| 'timezone'
| 'cron-expression'
| 'odata-expression'
export type SubBlockType =
| 'short-input' // Single line input
@@ -78,8 +76,6 @@ export type SubBlockType =
| 'mcp-dynamic-args' // MCP dynamic arguments based on tool schema
| 'input-format' // Input structure format
| 'response-format' // Response structure format
| 'filter-builder' // Filter conditions builder
| 'sort-builder' // Sort conditions builder
/**
* @deprecated Legacy trigger save subblock type.
*/
@@ -92,7 +88,6 @@ export type SubBlockType =
| 'workflow-input-mapper' // Dynamic workflow input mapper based on selected workflow
| 'text' // Read-only text display
| 'router-input' // Router route definitions with descriptions
| 'table-selector' // Table selector with link to view table
/**
* Selector types that require display name hydration
@@ -112,7 +107,6 @@ export const SELECTOR_TYPES_HYDRATION_REQUIRED: SubBlockType[] = [
'variables-input',
'mcp-server-selector',
'mcp-tool-selector',
'table-selector',
] as const
export type ExtractToolOutput<T> = T extends ToolResponse ? T['output'] : never

View File

@@ -4407,161 +4407,6 @@ export function DatadogIcon(props: SVGProps<SVGSVGElement>) {
)
}
export function MicrosoftDataverseIcon(props: SVGProps<SVGSVGElement>) {
const id = useId()
const clip0 = `dataverse_clip0_${id}`
const clip1 = `dataverse_clip1_${id}`
const clip2 = `dataverse_clip2_${id}`
const paint0 = `dataverse_paint0_${id}`
const paint1 = `dataverse_paint1_${id}`
const paint2 = `dataverse_paint2_${id}`
const paint3 = `dataverse_paint3_${id}`
const paint4 = `dataverse_paint4_${id}`
const paint5 = `dataverse_paint5_${id}`
const paint6 = `dataverse_paint6_${id}`
return (
<svg
{...props}
width='96'
height='96'
viewBox='0 0 96 96'
fill='none'
xmlns='http://www.w3.org/2000/svg'
>
<g clipPath={`url(#${clip0})`}>
<g clipPath={`url(#${clip1})`}>
<g clipPath={`url(#${clip2})`}>
<path
d='M13.8776 21.8242C29.1033 8.13791 49.7501 8.1861 62.955 18.9134C74.9816 28.6836 77.4697 44.3159 70.851 55.7801C64.2321 67.2443 52.5277 70.1455 39.5011 62.6247L31.7286 76.087L31.7234 76.0862C27.4181 83.5324 17.8937 86.0828 10.4437 81.7817C7.45394 80.0556 5.25322 77.4879 3.96665 74.551L3.96096 74.5511C-4.07832 55.7804 0.200745 34.1184 13.8776 21.8242Z'
fill={`url(#${paint0})`}
/>
<path
d='M13.8776 21.8242C29.1033 8.13791 49.7501 8.1861 62.955 18.9134C74.9816 28.6836 77.4697 44.3159 70.851 55.7801C64.2321 67.2443 52.5277 70.1455 39.5011 62.6247L31.7286 76.087L31.7234 76.0862C27.4181 83.5324 17.8937 86.0828 10.4437 81.7817C7.45394 80.0556 5.25322 77.4879 3.96665 74.551L3.96096 74.5511C-4.07832 55.7804 0.200745 34.1184 13.8776 21.8242Z'
fill={`url(#${paint1})`}
fillOpacity='0.8'
/>
<path
d='M85.4327 14.2231C88.4528 15.9668 90.6686 18.569 91.9494 21.5433L91.9533 21.5444C99.9406 40.2943 95.6533 61.9068 81.9983 74.1814C66.7726 87.8677 46.1257 87.8196 32.9209 77.0923C20.8945 67.3221 18.4062 51.6897 25.0249 40.2256C31.6438 28.7614 43.3482 25.8601 56.3748 33.381L64.1434 19.9255L64.1482 19.9249C68.4516 12.4736 77.9805 9.92084 85.4327 14.2231Z'
fill={`url(#${paint2})`}
/>
<path
d='M85.4327 14.2231C88.4528 15.9668 90.6686 18.569 91.9494 21.5433L91.9533 21.5444C99.9406 40.2943 95.6533 61.9068 81.9983 74.1814C66.7726 87.8677 46.1257 87.8196 32.9209 77.0923C20.8945 67.3221 18.4062 51.6897 25.0249 40.2256C31.6438 28.7614 43.3482 25.8601 56.3748 33.381L64.1434 19.9255L64.1482 19.9249C68.4516 12.4736 77.9805 9.92084 85.4327 14.2231Z'
fill={`url(#${paint3})`}
fillOpacity='0.9'
/>
<path
d='M39.5041 62.6261C52.5307 70.1469 64.2352 67.2456 70.8541 55.7814C77.2488 44.7055 75.1426 29.7389 64.147 19.9271L56.3791 33.3814L39.5041 62.6261Z'
fill={`url(#${paint4})`}
/>
<path
d='M56.3794 33.3815C43.3528 25.8607 31.6482 28.762 25.0294 40.2262C18.6347 51.3021 20.7409 66.2687 31.7364 76.0806L39.5043 62.6262L56.3794 33.3815Z'
fill={`url(#${paint5})`}
/>
<path
d='M33.3215 56.4453C37.9837 64.5204 48.3094 67.2872 56.3846 62.625C64.4598 57.9628 67.2266 47.6371 62.5643 39.5619C57.9021 31.4867 47.5764 28.72 39.5013 33.3822C31.4261 38.0444 28.6593 48.3701 33.3215 56.4453Z'
fill={`url(#${paint6})`}
/>
</g>
</g>
</g>
<defs>
<radialGradient
id={paint0}
cx='0'
cy='0'
r='1'
gradientUnits='userSpaceOnUse'
gradientTransform='translate(46.0001 49.4996) rotate(-148.717) scale(46.2195 47.5359)'
>
<stop offset='0.465088' stopColor='#09442A' />
<stop offset='0.70088' stopColor='#136C6C' />
<stop offset='1' stopColor='#22918B' />
</radialGradient>
<radialGradient
id={paint1}
cx='0'
cy='0'
r='1'
gradientUnits='userSpaceOnUse'
gradientTransform='translate(50.0001 32.4996) rotate(123.57) scale(66.0095 46.5498)'
>
<stop offset='0.718705' stopColor='#1A7F7C' stopOpacity='0' />
<stop offset='1' stopColor='#16BBDA' />
</radialGradient>
<radialGradient
id={paint2}
cx='0'
cy='0'
r='1'
gradientUnits='userSpaceOnUse'
gradientTransform='translate(50.4999 44.5001) rotate(30.75) scale(45.9618 44.5095)'
>
<stop offset='0.358097' stopColor='#136C6C' />
<stop offset='0.789474' stopColor='#42B870' />
<stop offset='1' stopColor='#76D45E' />
</radialGradient>
<radialGradient
id={paint3}
cx='0'
cy='0'
r='1'
gradientTransform='matrix(42.5 -36.0002 31.1824 36.8127 49.4998 55.5001)'
gradientUnits='userSpaceOnUse'
>
<stop offset='0.583166' stopColor='#76D45E' stopOpacity='0' />
<stop offset='1' stopColor='#C8F5B7' />
</radialGradient>
<radialGradient
id={paint4}
cx='0'
cy='0'
r='1'
gradientUnits='userSpaceOnUse'
gradientTransform='translate(47.5 48) rotate(-58.9042) scale(32.6898)'
>
<stop offset='0.486266' stopColor='#22918B' />
<stop offset='0.729599' stopColor='#42B870' />
<stop offset='1' stopColor='#43E5CA' />
</radialGradient>
<radialGradient
id={paint5}
cx='0'
cy='0'
r='1'
gradientUnits='userSpaceOnUse'
gradientTransform='translate(47.3833 49.0077) rotate(119.859) scale(31.1328 29.4032)'
>
<stop offset='0.459553' stopColor='#08494E' />
<stop offset='0.742242' stopColor='#1A7F7C' />
<stop offset='1' stopColor='#309C61' />
</radialGradient>
<radialGradient
id={paint6}
cx='0'
cy='0'
r='1'
gradientUnits='userSpaceOnUse'
gradientTransform='translate(52.5 40) rotate(120.784) scale(27.3542)'
>
<stop stopColor='#C8F5B7' />
<stop offset='0.24583' stopColor='#98F0B0' />
<stop offset='0.643961' stopColor='#52D17C' />
<stop offset='1' stopColor='#119FC5' />
</radialGradient>
<clipPath id={clip0}>
<rect width='96' height='96' fill='white' />
</clipPath>
<clipPath id={clip1}>
<rect width='96' height='96' fill='white' />
</clipPath>
<clipPath id={clip2}>
<rect width='95.9998' height='96' fill='white' />
</clipPath>
</defs>
</svg>
)
}
export function KalshiIcon(props: SVGProps<SVGSVGElement>) {
return (
<svg {...props} viewBox='0 0 78 20' fill='currentColor' xmlns='http://www.w3.org/2000/svg'>
@@ -4964,26 +4809,6 @@ export function BedrockIcon(props: SVGProps<SVGSVGElement>) {
)
}
export function TableIcon(props: SVGProps<SVGSVGElement>) {
return (
<svg
xmlns='http://www.w3.org/2000/svg'
viewBox='0 0 24 24'
fill='none'
stroke='currentColor'
strokeWidth={2}
strokeLinecap='round'
strokeLinejoin='round'
{...props}
>
<rect width='18' height='18' x='3' y='3' rx='2' />
<path d='M3 9h18' />
<path d='M3 15h18' />
<path d='M9 3v18' />
<path d='M15 3v18' />
</svg>
)
}
export function ReductoIcon(props: SVGProps<SVGSVGElement>) {
return (
<svg
@@ -5707,33 +5532,3 @@ export function OnePasswordIcon(props: SVGProps<SVGSVGElement>) {
</svg>
)
}
export function VercelIcon(props: SVGProps<SVGSVGElement>) {
return (
<svg
{...props}
viewBox='0 0 256 222'
xmlns='http://www.w3.org/2000/svg'
preserveAspectRatio='xMidYMid'
>
<g transform='translate(19.2 16.63) scale(0.85)'>
<polygon fill='#fafafa' points='128 0 256 221.705007 0 221.705007' />
</g>
</svg>
)
}
export function CloudflareIcon(props: SVGProps<SVGSVGElement>) {
return (
<svg {...props} xmlns='http://www.w3.org/2000/svg' viewBox='0 0 512 512'>
<path
fill='#f38020'
d='M331 326c11-26-4-38-19-38l-148-2c-4 0-4-6 1-7l150-2c17-1 37-15 43-33 0 0 10-21 9-24a97 97 0 0 0-187-11c-38-25-78 9-69 46-48 3-65 46-60 72 0 1 1 2 3 2h274c1 0 3-1 3-3z'
/>
<path
fill='#faae40'
d='M381 224c-4 0-6-1-7 1l-5 21c-5 16 3 30 20 31l32 2c4 0 4 6-1 7l-33 1c-36 4-46 39-46 39 0 2 0 3 2 3h113l3-2a81 81 0 0 0-78-103'
/>
</svg>
)
}

View File

@@ -73,7 +73,6 @@ const DialogContent = React.forwardRef<
}}
{...props}
>
<DialogPrimitive.Title>Dialog</DialogPrimitive.Title>
{children}
{!hideCloseButton && (
<DialogPrimitive.Close

View File

@@ -1,177 +0,0 @@
import { beforeEach, describe, expect, it, vi } from 'vitest'
const { mockLogger, queryClient, useFolderStoreMock, useWorkflowRegistryMock } = vi.hoisted(() => ({
mockLogger: {
info: vi.fn(),
warn: vi.fn(),
error: vi.fn(),
debug: vi.fn(),
},
queryClient: {
cancelQueries: vi.fn().mockResolvedValue(undefined),
invalidateQueries: vi.fn().mockResolvedValue(undefined),
},
useFolderStoreMock: Object.assign(vi.fn(), {
getState: vi.fn(),
setState: vi.fn(),
}),
useWorkflowRegistryMock: Object.assign(vi.fn(), {
getState: vi.fn(),
setState: vi.fn(),
}),
}))
let folderState: {
folders: Record<string, any>
}
let workflowRegistryState: {
workflows: Record<string, any>
}
vi.mock('@sim/logger', () => ({
createLogger: vi.fn(() => mockLogger),
}))
vi.mock('@tanstack/react-query', () => ({
keepPreviousData: {},
useQuery: vi.fn(),
useQueryClient: vi.fn(() => queryClient),
useMutation: vi.fn((options) => options),
}))
vi.mock('@/stores/folders/store', () => ({
useFolderStore: useFolderStoreMock,
}))
vi.mock('@/stores/workflows/registry/store', () => ({
useWorkflowRegistry: useWorkflowRegistryMock,
}))
vi.mock('@/hooks/queries/workflows', () => ({
workflowKeys: {
list: (workspaceId: string | undefined) => ['workflows', 'list', workspaceId ?? ''],
},
}))
import { useCreateFolder, useDuplicateFolderMutation } from '@/hooks/queries/folders'
function getOptimisticFolderByName(name: string) {
return Object.values(folderState.folders).find((folder: any) => folder.name === name) as
| { sortOrder: number }
| undefined
}
describe('folder optimistic top insertion ordering', () => {
beforeEach(() => {
vi.clearAllMocks()
useFolderStoreMock.getState.mockImplementation(() => folderState)
useFolderStoreMock.setState.mockImplementation((updater: any) => {
if (typeof updater === 'function') {
const next = updater(folderState)
if (next) {
folderState = { ...folderState, ...next }
}
return
}
folderState = { ...folderState, ...updater }
})
useWorkflowRegistryMock.getState.mockImplementation(() => workflowRegistryState)
folderState = {
folders: {
'folder-parent-match': {
id: 'folder-parent-match',
name: 'Existing sibling folder',
userId: 'user-1',
workspaceId: 'ws-1',
parentId: 'parent-1',
color: '#808080',
isExpanded: false,
sortOrder: 5,
createdAt: new Date(),
updatedAt: new Date(),
},
'folder-other-parent': {
id: 'folder-other-parent',
name: 'Other parent folder',
userId: 'user-1',
workspaceId: 'ws-1',
parentId: 'parent-2',
color: '#808080',
isExpanded: false,
sortOrder: -100,
createdAt: new Date(),
updatedAt: new Date(),
},
},
}
workflowRegistryState = {
workflows: {
'workflow-parent-match': {
id: 'workflow-parent-match',
name: 'Existing sibling workflow',
workspaceId: 'ws-1',
folderId: 'parent-1',
sortOrder: 2,
},
'workflow-other-parent': {
id: 'workflow-other-parent',
name: 'Other parent workflow',
workspaceId: 'ws-1',
folderId: 'parent-2',
sortOrder: -50,
},
},
}
})
it('creates folders at top of mixed non-root siblings', async () => {
const mutation = useCreateFolder()
await mutation.onMutate({
workspaceId: 'ws-1',
name: 'New child folder',
parentId: 'parent-1',
})
const optimisticFolder = getOptimisticFolderByName('New child folder')
expect(optimisticFolder).toBeDefined()
expect(optimisticFolder?.sortOrder).toBe(1)
})
it('duplicates folders at top of mixed non-root siblings', async () => {
const mutation = useDuplicateFolderMutation()
await mutation.onMutate({
workspaceId: 'ws-1',
id: 'folder-parent-match',
name: 'Duplicated child folder',
parentId: 'parent-1',
})
const optimisticFolder = getOptimisticFolderByName('Duplicated child folder')
expect(optimisticFolder).toBeDefined()
expect(optimisticFolder?.sortOrder).toBe(1)
})
it('uses source parent scope when duplicate parentId is undefined', async () => {
const mutation = useDuplicateFolderMutation()
await mutation.onMutate({
workspaceId: 'ws-1',
id: 'folder-parent-match',
name: 'Duplicated with inherited parent',
// parentId intentionally omitted to mirror duplicate fallback behavior
})
const optimisticFolder = getOptimisticFolderByName('Duplicated with inherited parent') as
| { parentId: string | null; sortOrder: number }
| undefined
expect(optimisticFolder).toBeDefined()
expect(optimisticFolder?.parentId).toBe('parent-1')
expect(optimisticFolder?.sortOrder).toBe(1)
})
})

View File

@@ -5,11 +5,9 @@ import {
createOptimisticMutationHandlers,
generateTempId,
} from '@/hooks/queries/utils/optimistic-mutation'
import { getTopInsertionSortOrder } from '@/hooks/queries/utils/top-insertion-sort-order'
import { workflowKeys } from '@/hooks/queries/workflows'
import { useFolderStore } from '@/stores/folders/store'
import type { WorkflowFolder } from '@/stores/folders/types'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
const logger = createLogger('FolderQueries')
@@ -135,35 +133,40 @@ function createFolderMutationHandlers<TVariables extends { workspaceId: string }
})
}
/**
* Calculates the next sort order for a folder in a given parent
*/
function getNextSortOrder(
folders: Record<string, WorkflowFolder>,
workspaceId: string,
parentId: string | null | undefined
): number {
const siblingFolders = Object.values(folders).filter(
(f) => f.workspaceId === workspaceId && f.parentId === (parentId || null)
)
return siblingFolders.reduce((max, f) => Math.max(max, f.sortOrder), -1) + 1
}
export function useCreateFolder() {
const queryClient = useQueryClient()
const handlers = createFolderMutationHandlers<CreateFolderVariables>(
queryClient,
'CreateFolder',
(variables, tempId, previousFolders) => {
const currentWorkflows = useWorkflowRegistry.getState().workflows
return {
id: tempId,
name: variables.name,
userId: '',
workspaceId: variables.workspaceId,
parentId: variables.parentId || null,
color: variables.color || '#808080',
isExpanded: false,
sortOrder:
variables.sortOrder ??
getTopInsertionSortOrder(
currentWorkflows,
previousFolders,
variables.workspaceId,
variables.parentId
),
createdAt: new Date(),
updatedAt: new Date(),
}
}
(variables, tempId, previousFolders) => ({
id: tempId,
name: variables.name,
userId: '',
workspaceId: variables.workspaceId,
parentId: variables.parentId || null,
color: variables.color || '#808080',
isExpanded: false,
sortOrder:
variables.sortOrder ??
getNextSortOrder(previousFolders, variables.workspaceId, variables.parentId),
createdAt: new Date(),
updatedAt: new Date(),
})
)
return useMutation({
@@ -239,25 +242,17 @@ export function useDuplicateFolderMutation() {
queryClient,
'DuplicateFolder',
(variables, tempId, previousFolders) => {
const currentWorkflows = useWorkflowRegistry.getState().workflows
// Get source folder info if available
const sourceFolder = previousFolders[variables.id]
const targetParentId = variables.parentId ?? sourceFolder?.parentId ?? null
return {
id: tempId,
name: variables.name,
userId: sourceFolder?.userId || '',
workspaceId: variables.workspaceId,
parentId: targetParentId,
parentId: variables.parentId ?? sourceFolder?.parentId ?? null,
color: variables.color || sourceFolder?.color || '#808080',
isExpanded: false,
sortOrder: getTopInsertionSortOrder(
currentWorkflows,
previousFolders,
variables.workspaceId,
targetParentId
),
sortOrder: getNextSortOrder(previousFolders, variables.workspaceId, variables.parentId),
createdAt: new Date(),
updatedAt: new Date(),
}

View File

@@ -1,375 +0,0 @@
/**
* React Query hooks for managing user-defined tables.
*/
import { keepPreviousData, useMutation, useQuery, useQueryClient } from '@tanstack/react-query'
import type { Filter, Sort, TableDefinition, TableRow } from '@/lib/table'
export const tableKeys = {
all: ['tables'] as const,
lists: () => [...tableKeys.all, 'list'] as const,
list: (workspaceId?: string) => [...tableKeys.lists(), workspaceId ?? ''] as const,
details: () => [...tableKeys.all, 'detail'] as const,
detail: (tableId: string) => [...tableKeys.details(), tableId] as const,
rowsRoot: (tableId: string) => [...tableKeys.detail(tableId), 'rows'] as const,
rows: (tableId: string, paramsKey: string) =>
[...tableKeys.rowsRoot(tableId), paramsKey] as const,
}
interface TableRowsParams {
workspaceId: string
tableId: string
limit: number
offset: number
filter?: Filter | null
sort?: Sort | null
}
interface TableRowsResponse {
rows: TableRow[]
totalCount: number
}
interface RowMutationContext {
workspaceId: string
tableId: string
}
interface UpdateTableRowParams {
rowId: string
data: Record<string, unknown>
}
interface TableRowsDeleteResult {
deletedRowIds: string[]
}
function createRowsParamsKey({
limit,
offset,
filter,
sort,
}: Omit<TableRowsParams, 'workspaceId' | 'tableId'>): string {
return JSON.stringify({
limit,
offset,
filter: filter ?? null,
sort: sort ?? null,
})
}
async function fetchTable(workspaceId: string, tableId: string): Promise<TableDefinition> {
const res = await fetch(`/api/table/${tableId}?workspaceId=${encodeURIComponent(workspaceId)}`)
if (!res.ok) {
const error = await res.json().catch(() => ({}))
throw new Error(error.error || 'Failed to fetch table')
}
const json: { data?: { table: TableDefinition }; table?: TableDefinition } = await res.json()
const data = json.data || json
return (data as { table: TableDefinition }).table
}
async function fetchTableRows({
workspaceId,
tableId,
limit,
offset,
filter,
sort,
}: TableRowsParams): Promise<TableRowsResponse> {
const searchParams = new URLSearchParams({
workspaceId,
limit: String(limit),
offset: String(offset),
})
if (filter) {
searchParams.set('filter', JSON.stringify(filter))
}
if (sort) {
searchParams.set('sort', JSON.stringify(sort))
}
const res = await fetch(`/api/table/${tableId}/rows?${searchParams}`)
if (!res.ok) {
const error = await res.json().catch(() => ({}))
throw new Error(error.error || 'Failed to fetch rows')
}
const json: {
data?: { rows: TableRow[]; totalCount: number }
rows?: TableRow[]
totalCount?: number
} = await res.json()
const data = json.data || json
return {
rows: (data.rows || []) as TableRow[],
totalCount: data.totalCount || 0,
}
}
function invalidateTableData(
queryClient: ReturnType<typeof useQueryClient>,
workspaceId: string,
tableId: string
) {
queryClient.invalidateQueries({ queryKey: tableKeys.list(workspaceId) })
queryClient.invalidateQueries({ queryKey: tableKeys.detail(tableId) })
queryClient.invalidateQueries({ queryKey: tableKeys.rowsRoot(tableId) })
}
/**
* Fetch all tables for a workspace.
*/
export function useTablesList(workspaceId?: string) {
return useQuery({
queryKey: tableKeys.list(workspaceId),
queryFn: async () => {
if (!workspaceId) throw new Error('Workspace ID required')
const res = await fetch(`/api/table?workspaceId=${encodeURIComponent(workspaceId)}`)
if (!res.ok) {
const error = await res.json()
throw new Error(error.error || 'Failed to fetch tables')
}
const response = await res.json()
return (response.data?.tables || []) as TableDefinition[]
},
enabled: Boolean(workspaceId),
staleTime: 30 * 1000,
})
}
/**
* Fetch a single table by id.
*/
export function useTable(workspaceId: string | undefined, tableId: string | undefined) {
return useQuery({
queryKey: tableKeys.detail(tableId ?? ''),
queryFn: () => fetchTable(workspaceId as string, tableId as string),
enabled: Boolean(workspaceId && tableId),
staleTime: 30 * 1000,
})
}
/**
* Fetch rows for a table with pagination/filter/sort.
*/
export function useTableRows({
workspaceId,
tableId,
limit,
offset,
filter,
sort,
enabled = true,
}: TableRowsParams & { enabled?: boolean }) {
const paramsKey = createRowsParamsKey({ limit, offset, filter, sort })
return useQuery({
queryKey: tableKeys.rows(tableId, paramsKey),
queryFn: () =>
fetchTableRows({
workspaceId,
tableId,
limit,
offset,
filter,
sort,
}),
enabled: Boolean(workspaceId && tableId) && enabled,
placeholderData: keepPreviousData,
})
}
/**
* Create a new table in a workspace.
*/
export function useCreateTable(workspaceId: string) {
const queryClient = useQueryClient()
return useMutation({
mutationFn: async (params: {
name: string
description?: string
schema: { columns: Array<{ name: string; type: string; required?: boolean }> }
}) => {
const res = await fetch('/api/table', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ ...params, workspaceId }),
})
if (!res.ok) {
const error = await res.json()
throw new Error(error.error || 'Failed to create table')
}
return res.json()
},
onSuccess: () => {
queryClient.invalidateQueries({ queryKey: tableKeys.list(workspaceId) })
},
})
}
/**
* Delete a table from a workspace.
*/
export function useDeleteTable(workspaceId: string) {
const queryClient = useQueryClient()
return useMutation({
mutationFn: async (tableId: string) => {
const res = await fetch(
`/api/table/${tableId}?workspaceId=${encodeURIComponent(workspaceId)}`,
{
method: 'DELETE',
}
)
if (!res.ok) {
const error = await res.json()
throw new Error(error.error || 'Failed to delete table')
}
return res.json()
},
onSuccess: () => {
queryClient.invalidateQueries({ queryKey: tableKeys.list(workspaceId) })
},
})
}
/**
* Create a row in a table.
*/
export function useCreateTableRow({ workspaceId, tableId }: RowMutationContext) {
const queryClient = useQueryClient()
return useMutation({
mutationFn: async (data: Record<string, unknown>) => {
const res = await fetch(`/api/table/${tableId}/rows`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ workspaceId, data }),
})
if (!res.ok) {
const error = await res.json().catch(() => ({}))
throw new Error(error.error || 'Failed to add row')
}
return res.json()
},
onSuccess: () => {
invalidateTableData(queryClient, workspaceId, tableId)
},
})
}
/**
* Update a single row in a table.
*/
export function useUpdateTableRow({ workspaceId, tableId }: RowMutationContext) {
const queryClient = useQueryClient()
return useMutation({
mutationFn: async ({ rowId, data }: UpdateTableRowParams) => {
const res = await fetch(`/api/table/${tableId}/rows/${rowId}`, {
method: 'PATCH',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ workspaceId, data }),
})
if (!res.ok) {
const error = await res.json().catch(() => ({}))
throw new Error(error.error || 'Failed to update row')
}
return res.json()
},
onSuccess: () => {
invalidateTableData(queryClient, workspaceId, tableId)
},
})
}
/**
* Delete a single row from a table.
*/
export function useDeleteTableRow({ workspaceId, tableId }: RowMutationContext) {
const queryClient = useQueryClient()
return useMutation({
mutationFn: async (rowId: string) => {
const res = await fetch(`/api/table/${tableId}/rows/${rowId}`, {
method: 'DELETE',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ workspaceId }),
})
if (!res.ok) {
const error = await res.json().catch(() => ({}))
throw new Error(error.error || 'Failed to delete row')
}
return res.json()
},
onSuccess: () => {
invalidateTableData(queryClient, workspaceId, tableId)
},
})
}
/**
* Delete multiple rows from a table.
* Returns both deleted ids and failure details for partial-failure UI.
*/
export function useDeleteTableRows({ workspaceId, tableId }: RowMutationContext) {
const queryClient = useQueryClient()
return useMutation({
mutationFn: async (rowIds: string[]): Promise<TableRowsDeleteResult> => {
const uniqueRowIds = Array.from(new Set(rowIds))
const res = await fetch(`/api/table/${tableId}/rows`, {
method: 'DELETE',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ workspaceId, rowIds: uniqueRowIds }),
})
const json: {
error?: string
data?: { deletedRowIds?: string[]; missingRowIds?: string[]; requestedCount?: number }
} = await res.json().catch(() => ({}))
if (!res.ok) {
throw new Error(json.error || 'Failed to delete rows')
}
const deletedRowIds = json.data?.deletedRowIds || []
const missingRowIds = json.data?.missingRowIds || []
if (missingRowIds.length > 0) {
const failureCount = missingRowIds.length
const totalCount = json.data?.requestedCount ?? uniqueRowIds.length
const successCount = deletedRowIds.length
const firstMissing = missingRowIds[0]
throw new Error(
`Failed to delete ${failureCount} of ${totalCount} row(s)${successCount > 0 ? ` (${successCount} deleted successfully)` : ''}. Row not found: ${firstMissing}`
)
}
return { deletedRowIds }
},
onSettled: () => {
invalidateTableData(queryClient, workspaceId, tableId)
},
})
}

View File

@@ -1,44 +0,0 @@
interface SortableWorkflow {
workspaceId?: string
folderId?: string | null
sortOrder?: number
}
interface SortableFolder {
workspaceId?: string
parentId?: string | null
sortOrder: number
}
/**
* Calculates the insertion sort order that places a new item at the top of a
* mixed list of folders and workflows within the same parent scope.
*/
export function getTopInsertionSortOrder(
workflows: Record<string, SortableWorkflow>,
folders: Record<string, SortableFolder>,
workspaceId: string,
parentId: string | null | undefined
): number {
const normalizedParentId = parentId ?? null
const siblingWorkflows = Object.values(workflows).filter(
(workflow) =>
workflow.workspaceId === workspaceId && (workflow.folderId ?? null) === normalizedParentId
)
const siblingFolders = Object.values(folders).filter(
(folder) =>
folder.workspaceId === workspaceId && (folder.parentId ?? null) === normalizedParentId
)
const siblingOrders = [
...siblingWorkflows.map((workflow) => workflow.sortOrder ?? 0),
...siblingFolders.map((folder) => folder.sortOrder),
]
if (siblingOrders.length === 0) {
return 0
}
return Math.min(...siblingOrders) - 1
}

View File

@@ -8,8 +8,6 @@ import {
createOptimisticMutationHandlers,
generateTempId,
} from '@/hooks/queries/utils/optimistic-mutation'
import { getTopInsertionSortOrder } from '@/hooks/queries/utils/top-insertion-sort-order'
import { useFolderStore } from '@/stores/folders/store'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
import type { WorkflowMetadata } from '@/stores/workflows/registry/types'
import { generateCreativeWorkflowName } from '@/stores/workflows/registry/utils'
@@ -225,13 +223,11 @@ export function useCreateWorkflow() {
sortOrder = variables.sortOrder
} else {
const currentWorkflows = useWorkflowRegistry.getState().workflows
const currentFolders = useFolderStore.getState().folders
sortOrder = getTopInsertionSortOrder(
currentWorkflows,
currentFolders,
variables.workspaceId,
variables.folderId
const targetFolderId = variables.folderId || null
const workflowsInFolder = Object.values(currentWorkflows).filter(
(w) => w.folderId === targetFolderId
)
sortOrder = workflowsInFolder.reduce((min, w) => Math.min(min, w.sortOrder ?? 0), 1) - 1
}
return {
@@ -327,8 +323,11 @@ export function useDuplicateWorkflowMutation() {
'DuplicateWorkflow',
(variables, tempId) => {
const currentWorkflows = useWorkflowRegistry.getState().workflows
const currentFolders = useFolderStore.getState().folders
const targetFolderId = variables.folderId ?? null
const targetFolderId = variables.folderId || null
const workflowsInFolder = Object.values(currentWorkflows).filter(
(w) => w.folderId === targetFolderId
)
const minSortOrder = workflowsInFolder.reduce((min, w) => Math.min(min, w.sortOrder ?? 0), 1)
return {
id: tempId,
@@ -339,12 +338,7 @@ export function useDuplicateWorkflowMutation() {
color: variables.color,
workspaceId: variables.workspaceId,
folderId: targetFolderId,
sortOrder: getTopInsertionSortOrder(
currentWorkflows,
currentFolders,
variables.workspaceId,
targetFolderId
),
sortOrder: minSortOrder - 1,
}
}
)

View File

@@ -44,7 +44,7 @@ function useAllowedIntegrationsFromEnv() {
*/
function intersectAllowlists(a: string[] | null, b: string[] | null): string[] | null {
if (a === null) return b
if (b === null) return a.map((i) => i.toLowerCase())
if (b === null) return a
return a.map((i) => i.toLowerCase()).filter((i) => b.includes(i))
}

View File

@@ -395,7 +395,6 @@ export const auth = betterAuth({
'google-groups',
'vertex-ai',
'github-repo',
'microsoft-dataverse',
'microsoft-teams',
'microsoft-excel',
'microsoft-planner',
@@ -1154,54 +1153,6 @@ export const auth = betterAuth({
}
},
},
{
providerId: 'microsoft-dataverse',
clientId: env.MICROSOFT_CLIENT_ID as string,
clientSecret: env.MICROSOFT_CLIENT_SECRET as string,
authorizationUrl: 'https://login.microsoftonline.com/common/oauth2/v2.0/authorize',
tokenUrl: 'https://login.microsoftonline.com/common/oauth2/v2.0/token',
userInfoUrl: 'https://graph.microsoft.com/v1.0/me',
scopes: [
'openid',
'profile',
'email',
'https://dynamics.microsoft.com/user_impersonation',
'offline_access',
],
responseType: 'code',
accessType: 'offline',
authentication: 'basic',
pkce: true,
redirectURI: `${getBaseUrl()}/api/auth/oauth2/callback/microsoft-dataverse`,
getUserInfo: async (tokens) => {
// Dataverse access tokens target dynamics.microsoft.com, not graph.microsoft.com,
// so we cannot call the Graph API /me endpoint. Instead, we decode the ID token JWT
// which is always returned when the openid scope is requested.
const idToken = (tokens as Record<string, unknown>).idToken as string | undefined
if (!idToken) {
logger.error(
'Microsoft Dataverse OAuth: no ID token received. Ensure openid scope is requested.'
)
throw new Error('Microsoft Dataverse OAuth requires an ID token (openid scope)')
}
const parts = idToken.split('.')
if (parts.length !== 3) {
throw new Error('Microsoft Dataverse OAuth: malformed ID token')
}
const payload = JSON.parse(Buffer.from(parts[1], 'base64url').toString('utf-8'))
const now = new Date()
return {
id: `${payload.oid || payload.sub}-${crypto.randomUUID()}`,
name: payload.name || 'Microsoft User',
email: payload.preferred_username || payload.email || payload.upn,
emailVerified: true,
createdAt: now,
updatedAt: now,
}
},
},
{
providerId: 'microsoft-planner',
clientId: env.MICROSOFT_CLIENT_ID as string,

View File

@@ -1,4 +1,7 @@
import { db } from '@sim/db'
import { user } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm'
import type { NextRequest } from 'next/server'
import { authenticateApiKeyFromHeader, updateApiKeyLastUsed } from '@/lib/api-key/service'
import { getSession } from '@/lib/auth'
@@ -16,6 +19,25 @@ export interface AuthResult {
error?: string
}
/**
* Looks up a user's name and email by ID. Returns empty values on failure
* so auth is never blocked by a lookup error.
*/
async function lookupUserInfo(
userId: string
): Promise<{ userName: string | null; userEmail: string | null }> {
try {
const [row] = await db
.select({ name: user.name, email: user.email })
.from(user)
.where(eq(user.id, userId))
.limit(1)
return { userName: row?.name ?? null, userEmail: row?.email ?? null }
} catch {
return { userName: null, userEmail: null }
}
}
/**
* Resolves userId from a verified internal JWT token.
* Extracts userId from the JWT payload, URL search params, or POST body.
@@ -46,7 +68,8 @@ async function resolveUserFromJwt(
}
if (userId) {
return { success: true, userId, authType: 'internal_jwt' }
const { userName, userEmail } = await lookupUserInfo(userId)
return { success: true, userId, userName, userEmail, authType: 'internal_jwt' }
}
if (options.requireWorkflowId !== false) {
@@ -205,9 +228,12 @@ export async function checkHybridAuth(
const result = await authenticateApiKeyFromHeader(apiKeyHeader)
if (result.success) {
await updateApiKeyLastUsed(result.keyId!)
const { userName, userEmail } = await lookupUserInfo(result.userId!)
return {
success: true,
userId: result.userId!,
userName,
userEmail,
authType: 'api_key',
apiKeyType: result.keyType,
}

View File

@@ -2,7 +2,6 @@ export const MICROSOFT_REFRESH_TOKEN_LIFETIME_DAYS = 90
export const PROACTIVE_REFRESH_THRESHOLD_DAYS = 7
export const MICROSOFT_PROVIDERS = new Set([
'microsoft-dataverse',
'microsoft-excel',
'microsoft-planner',
'microsoft-teams',

View File

@@ -18,7 +18,6 @@ import {
JiraIcon,
LinearIcon,
LinkedInIcon,
MicrosoftDataverseIcon,
MicrosoftExcelIcon,
MicrosoftIcon,
MicrosoftOneDriveIcon,
@@ -155,20 +154,6 @@ export const OAUTH_PROVIDERS: Record<string, OAuthProviderConfig> = {
name: 'Microsoft',
icon: MicrosoftIcon,
services: {
'microsoft-dataverse': {
name: 'Microsoft Dataverse',
description: 'Connect to Microsoft Dataverse and manage records.',
providerId: 'microsoft-dataverse',
icon: MicrosoftDataverseIcon,
baseProviderIcon: MicrosoftIcon,
scopes: [
'openid',
'profile',
'email',
'https://dynamics.microsoft.com/user_impersonation',
'offline_access',
],
},
'microsoft-excel': {
name: 'Microsoft Excel',
description: 'Connect to Microsoft Excel and manage spreadsheets.',

View File

@@ -20,7 +20,6 @@ export type OAuthProvider =
| 'jira'
| 'dropbox'
| 'microsoft'
| 'microsoft-dataverse'
| 'microsoft-excel'
| 'microsoft-planner'
| 'microsoft-teams'
@@ -62,7 +61,6 @@ export type OAuthService =
| 'notion'
| 'jira'
| 'dropbox'
| 'microsoft-dataverse'
| 'microsoft-excel'
| 'microsoft-teams'
| 'microsoft-planner'

Some files were not shown because too many files have changed in this diff Show More