Compare commits

..

189 Commits

Author SHA1 Message Date
waleed
b03d1f5d58 styling 2026-01-21 20:14:30 -08:00
Vikhyath Mondreti
be757a4f1e adhere to size limits for tables 2026-01-21 17:20:33 -08:00
Vikhyath Mondreti
1938818027 address bugbot concerns 2026-01-21 17:13:58 -08:00
Vikhyath Mondreti
2818b745d1 migrate enrichment logic to general abstraction 2026-01-21 17:08:20 -08:00
Vikhyath Mondreti
2d49de76ea add back missed code 2026-01-21 16:37:28 -08:00
Vikhyath Mondreti
1f682eb343 readd migrations 2026-01-21 16:34:32 -08:00
Vikhyath Mondreti
8d43947eb5 Merge staging into lakees/db
- Resolve merge conflicts in input-format.tsx, workflow-block.tsx, providers/utils.ts
- Fix tests to use blockData/blockNameMapping for tag variable resolution
- Add getBlockOutputs mock to block.test.ts for schema validation tests
- Fix normalizeName import path in utils.test.ts
- Add sql.raw and sql.join to drizzle-orm mock for sql.test.ts
- Add new subBlock types (table-selector, filter-builder, sort-builder) to blocks.test.ts
2026-01-21 16:33:31 -08:00
Vikhyath Mondreti
107679bf41 prepare merge 2026-01-21 16:25:13 -08:00
Waleed
103b31a569 fix(stores): remove dead code causing log spam on startup (#2927)
* fix(stores): remove dead code causing log spam on startup

* fix(stores): replace custom tools zustand store with react query cache
2026-01-21 16:08:26 -08:00
Waleed
004e058353 fix(messages-input): fix cursor alignment and auto-resize with overlay (#2926)
* fix(messages-input): fix cursor alignment and auto-resize with overlay

* fixed remaining zustand warnings
2026-01-21 15:30:13 -08:00
Vikhyath Mondreti
5157f0bbb2 fix(resolver): agent response format, input formats, root level (#2925)
* fix(resolvers): agent response format, input formats, root level

* fix response block initial seeding

* fix tests
2026-01-21 14:55:23 -08:00
Waleed
8bbcf31b83 fix(action-bar): duplicate subflows with children (#2923)
* fix(action-bar): duplicate subflows with children

* fix(action-bar): add validateTriggerPaste for subflow duplicate
2026-01-21 14:54:29 -08:00
Waleed
9e814315dd fix(auth): improve reset password flow and consolidate brand detection (#2924)
* fix(auth): improve reset password flow and consolidate brand detection

* fix(auth): set errorHandled for EMAIL_NOT_VERIFIED to prevent duplicate error

* fix(auth): clear success message on login errors

* chore(auth): fix import order per lint
2026-01-21 14:42:14 -08:00
Waleed
0ea0256623 chore(helm): add env vars for Vertex AI, orgs, and telemetry (#2922) 2026-01-21 11:36:16 -08:00
Waleed
fb8868c854 fix(notifications): text overflow with line-clamp (#2921) 2026-01-21 10:20:21 -08:00
Waleed
ea4964052d fix(logger): use direct env access for webpack inlining (#2920) 2026-01-21 10:14:40 -08:00
Waleed
268e2f114f fix(zustand): updated to useShallow from deprecated createWithEqualityFn (#2919) 2026-01-21 09:47:48 -08:00
Vikhyath Mondreti
5988d0e46f fix(ring): duplicate should clear original block (#2916)
* fix(ring): duplicate should clear original block

* rename correctly
2026-01-21 02:40:58 -08:00
Vikhyath Mondreti
145db9d8c3 fix(http): options not parsed accurately (#2914)
* fix(http): options not parsed accurately

* fix lint

* remove boilerplate code'
2026-01-21 01:36:29 -08:00
Emir Karabeg
294b168ed9 feat(broadcast): email v0.5 (#2905) 2026-01-20 23:42:48 -08:00
Waleed
0dc2c1fe0d improvement(logs): improved logs ui bugs, added subflow disable UI (#2910)
* improvement(logs): improved logs ui bugs, added subflow disable UI

* added duplicate to action bar for subflows
2026-01-20 23:13:05 -08:00
Vikhyath Mondreti
fb90c4e9b1 fix(change-detection): copilot diffs have extra field (#2913) 2026-01-20 22:04:08 -08:00
Vikhyath Mondreti
0af96d06c6 fix(a2a): canonical merge (#2912)
* fix canonical merge

* fix empty array case
2026-01-20 21:58:13 -08:00
Vikhyath Mondreti
1d450578c8 fix(copilot): legacy tool display names (#2911) 2026-01-20 21:16:48 -08:00
Waleed
c6d408c65b fix(canvas): removed invite to workspace from canvas popover (#2908)
* fix(canvas): removed invite to workspace

* removed unused props
2026-01-20 20:29:53 -08:00
Waleed
16716ea26a fix(ui): change add inputs button to match output selector (#2907) 2026-01-20 19:24:59 -08:00
Waleed
563098ca0a feat(tools): added textract, added v2 for mistral, updated tag dropdown (#2904)
* feat(tools): added textract

* cleanup

* ack pr comments

* reorder

* removed upload for textract async version

* fix additional fields dropdown in editor, update parser to leave validation to be done on the server

* added mistral v2, files v2, and finalized textract

* updated the rest of the old file patterns, updated mistral outputs for v2

* updated tag dropdown to parse non-operation fields as well

* updated extension finder

* cleanup

* added description for inputs to workflow

* use helper for internal route check

* fix tag dropdown merge conflict change

* remove duplicate code

---------

Co-authored-by: Vikhyath Mondreti <vikhyath@simstudio.ai>
2026-01-20 18:41:26 -08:00
Vikhyath Mondreti
1f1f015031 improvement(files): update execution for passing base64 strings (#2906)
* progress

* improvement(execution): update execution for passing base64 strings

* fix types

* cleanup comments

* path security vuln

* reject promise correctly

* fix redirect case

* remove proxy routes

* fix tests

* use ipaddr
2026-01-20 17:49:00 -08:00
Waleed
4afb245fa2 improvement(executor): upgraded abort controller to handle aborts for loops and parallels (#2880)
* improvement(executor): upgraded abort controller to handle aborts for loops and parallels

* comments
2026-01-20 15:40:37 -08:00
Vikhyath Mondreti
8344d68ca8 improvement(browseruse): add profile id param (#2903)
* improvement(browseruse): add profile id param

* make request a stub since we have directExec
2026-01-20 11:08:47 -08:00
Waleed
a26a1a9737 fix(rss): add top-level title, link, pubDate fields to RSS trigger output (#2902)
* fix(rss): add top-level title, link, pubDate fields to RSS trigger output

* fix(imap): add top-level fields to IMAP trigger output
2026-01-20 10:06:13 -08:00
Vikhyath Mondreti
689037a300 fix(canonical): copilot path + update parent (#2901) 2026-01-20 09:43:41 -08:00
Waleed
07f0c01dc4 fix(google): wrap primitive tool responses for Gemini API compatibility (#2900) 2026-01-20 09:27:45 -08:00
Waleed
e4ad31bb6b fix(kb): align bulk chunk operation with API response (#2899)
* fix(kb): align bulk chunk operation with API response

* fix(kb): skip local state update for failed chunks

* fix(kb): correct errors type and refresh on partial failure
2026-01-20 00:24:50 -08:00
Waleed
84691fc873 improvement(modal): fixed popover issue in custom tools modal, removed the ability to update if no changes made (#2897)
* improvement(modal): fixed popover issue in custom tools modal, removed the ability to update if no changes made

* improvement(modal): fixed popover issue in custom tools modal, removed the ability to update if no changes made

* popover fixes, color picker keyboard nav, code simplification

* color standardization

* fix color picker

* set discard alert state when closing modal
2026-01-19 23:52:07 -08:00
Emir Karabeg
2daf34386e fix(copilot): ui/ux (#2891)
* feat(claude): added rules

* fix(copilot): chat loading; refactor(copilot): components, utils, hooks

* fix(copilot): options selection strikethrough

* fix(copilot): options render inside thinking

* fix(copilot): checkpoints, user-input; improvement(code): colors

* fix(copilot): scrolling, tool-call truncation, thinking ui

* fix(copilot): tool call spacing and shimmer/actions on previous messages

* improvement(copilot): queue

* addressed comments
2026-01-19 23:23:21 -08:00
Waleed
ac991d4b54 fix(sso): removed provider specific OIDC logic from SSO registration & deregistration scripts (#2896)
* fix(sso): updated registration & deregistration script for explicit support for Entra ID

* cleanup

* ack PR comment

* ack PR comment

* tested edge cases, ack'd PR comments

* remove trailing slash
2026-01-19 19:23:50 -08:00
Waleed
69614d2d93 improvement(kb): migrate manual fetches in kb module to use reactquery (#2894)
* improvement(kb): migrate manual fetches in kb module to use reactquery

* converted remaining manual kb fetches

* unwrap kb tags before API call, added more query invalidation for chunks

* added resetMutation calls after modal closes
2026-01-19 17:25:17 -08:00
Waleed
6cbadd7110 feat(api): added workflows api route for dynamic discovery (#2892)
* feat(api): added workflows api route for dynamic discovery

* added ability to edit parameter and workflow descriptions

* added new rate limit category, ack PR comments

* fix hasChanges logic

* added whitespace trimming before hasChanges check
2026-01-19 17:21:51 -08:00
Vikhyath Mondreti
9efd3d5b4c improvement(stats): should track mcp and a2a executions like other trigger types (#2895)
* improvement(stats): should track mcp and a2a executions like other trigger types

* update types
2026-01-19 16:29:37 -08:00
Siddharth Ganesan
e575ba2965 feat(settings): add debug mode for superusers (#2893)
* Superuser debug

* Fix

* update templates routes to use helper

---------

Co-authored-by: Vikhyath Mondreti <vikhyath@simstudio.ai>
2026-01-19 16:28:43 -08:00
Siddharth Ganesan
5f45db4343 improvement(copilot): variables, conditions, router (#2887)
* Temp

* Condition and router copilot syntax updates

* Plan respond plan
2026-01-19 15:24:50 -08:00
Waleed
81cbfe7af4 feat(browseruse): upgraded browseruse endpoints to v2 (#2890) 2026-01-19 14:47:19 -08:00
Waleed
739341b08e improvement(router): add resizable textareas for router conditions (#2888) 2026-01-19 13:59:13 -08:00
Waleed
3c43779ba3 feat(search): added operations to search modal in main app, updated retrieval in docs to use RRF (#2889) 2026-01-19 13:57:56 -08:00
Waleed
1861f77283 feat(terminal): add fix in copilot for errors (#2885) 2026-01-19 13:42:34 -08:00
Vikhyath Mondreti
72c2ba7443 fix(linear): team selector in tool input (#2886) 2026-01-19 12:40:45 -08:00
Waleed
037dad6975 fix(undo-redo): preserve subblock values during undo/redo cycles (#2884)
* fix(undo-redo): preserve subblock values during undo/redo cycles

* added tests
2026-01-19 12:19:51 -08:00
Waleed
408597e12b feat(notifs): added block name to error notifications (#2883) 2026-01-19 09:54:19 -08:00
Waleed
932f8fd654 feat(mcp): updated mcp subblocks for mcp tools to match subblocks (#2882)
* feat(mcp): updated mcp subblocks for mcp tools to match subblocks

* updated trigger descriptions
2026-01-19 09:50:03 -08:00
Waleed
b4c2294e67 improvement(emails): update unsub page, standardize unsub process (#2881) 2026-01-18 20:42:04 -08:00
Vikhyath Mondreti
1dbf92db3f fix(api): tool input parsing into table from agent output (#2879)
* fix(api): transformTable to map agent output to table subblock format

* fix api

* add test
2026-01-18 14:43:02 -08:00
Waleed
3a923648cb feat(ux): more explicit verbiage on some dialog menus, google drive updates, advanved to additional fields, remove general settings store sync in favor of tanstack (#2875)
* fix(verbiage): more explicit verbiage on some dialog menus, google drive updates, advanved to additional fields, remove general settings store sync in favor of tanstack

* updated docs

* nested tag dropdown, more well-defined nested outputs, keyboard nav for context menus, etc

* cleanup

* allow cannonical toggle even if depends on not satisfied

* remove smooth scroll in tag drop

* fix selection

* fix

---------

Co-authored-by: Vikhyath Mondreti <vikhyath@simstudio.ai>
2026-01-18 13:40:59 -08:00
Vikhyath Mondreti
5e2468cfd3 impovement(slides): add missing properties definitions (#2877) 2026-01-18 12:35:58 -08:00
Vikhyath Mondreti
7c0f43305b fix(resolver): tool configs must take precedence (#2876) 2026-01-18 10:11:57 -08:00
Waleed
ee7572185a improvement(tools): added visibility for tools that were missing it, added new google and github tools (#2874)
* improvement(tools): added visibility for tools that were missing it, added new google tools

* fixed the name for google forms

* revert schema enrichers change

* fixed block ordering
2026-01-17 20:51:15 -08:00
Waleed
19a8daedf7 improvement(performance): used react scan to identify rerendering issues and react issues (#2873) 2026-01-17 19:20:52 -08:00
Vikhyath Mondreti
0fcd52683a improvement(tool-input): general abstraction to enrich agent context, reuse visibility helpers (#2872)
* add abstraction for schema enrichment, improve agent KB block experience for tags, fix visibility of subblocks

* cleanup code

* consolidate

* fix workflow tool react query

* fix deployed context propagation

* fix tests
2026-01-17 19:13:27 -08:00
Waleed
b8b20576d3 improvement(ui): modal style standardization, select drop improvement, duplication selection fixes (#2871)
* improvement(ui): modal style standardization, select drop improvement

* consolidation, fixed canvas issues

* more
2026-01-17 13:31:46 -08:00
Waleed
4b8534ebd0 feat(oauth): upgraded all generic oauth plugin providers to use unqiue account ids (#2870) 2026-01-17 13:09:54 -08:00
Lakee Sivaraya
a8e413a999 fix 2026-01-17 13:04:07 -08:00
Lakee Sivaraya
f05f5bbc6d fix 2026-01-17 12:57:37 -08:00
Lakee Sivaraya
87f8fcdbf2 fix 2026-01-17 12:49:36 -08:00
Waleed
f6960a4bd4 fix(wand): improved flickering for invalid JSON icon while streaming (#2868) 2026-01-17 12:43:22 -08:00
Vikhyath Mondreti
8740566f6a fix(block-resolver): path lookup check (#2869)
* fix(block-resolver): path lookup check

* remove comments
2026-01-17 12:17:55 -08:00
Lakee Sivaraya
6e8dc771fe fix 2026-01-17 10:16:45 -08:00
Lakee Sivaraya
d0c3c6aec7 updates 2026-01-17 10:02:52 -08:00
Lakee Sivaraya
8574d66aac uncook 2026-01-17 09:58:48 -08:00
Lakee Sivaraya
e79e9e7367 Merge origin/main into lakees/db
Resolved conflicts:
- workflow-block.tsx: Kept both table types and schedule hooks
- types.ts: Kept both filter-builder/sort-builder and deprecated comment
- icons.tsx (both apps): Kept TableIcon and added ReductoIcon/PulseIcon
- Migration files: Accepted main branch versions
2026-01-17 09:11:51 -08:00
Vikhyath Mondreti
a8bb0db660 v0.5.62: webhook bug fixes, seeding default subblock values, block selection fixes 2026-01-16 20:27:06 -08:00
Waleed
5de7228dd9 improvement(avatar): use selection-update as the source of truth for presence, ignore other socket ops (#2866)
* improvement(avatar): use selection-update as the source of truth for presence, ignore other socket ops

* added logs
2026-01-16 20:17:07 -08:00
Vikhyath Mondreti
75898c69ed fix(start): seed initial subblock values on batch add (#2864) 2026-01-16 20:07:20 -08:00
Vikhyath Mondreti
b14672887b fix(sockets): webhooks logic removal from copilot ops (#2862)
* fix(sockets): dying on deployed webhooks

* fix edit workflow
2026-01-16 19:53:14 -08:00
Waleed
d024c1e489 fix(shift): fix shift select blue ring fading (#2863) 2026-01-16 19:52:51 -08:00
Lakee Sivaraya
4b6de03a62 revert 2026-01-16 18:40:03 -08:00
Lakee Sivaraya
37b50cbce6 dedupe 2026-01-16 18:40:03 -08:00
Lakee Sivaraya
7ca628db13 rename 2026-01-16 18:40:03 -08:00
Lakee Sivaraya
118e4f65f0 updates 2026-01-16 18:40:02 -08:00
Lakee Sivaraya
292cd39cfb docs 2026-01-16 18:40:02 -08:00
Lakee Sivaraya
ea77790484 docs 2026-01-16 18:40:02 -08:00
Lakee Sivaraya
895591514a updates 2026-01-16 18:40:02 -08:00
Lakee Sivaraya
0e1133fc42 fix error handling 2026-01-16 18:40:02 -08:00
Lakee Sivaraya
4357230a9d fix 2026-01-16 18:40:02 -08:00
Lakee Sivaraya
e7f45166af type fix 2026-01-16 18:40:02 -08:00
Lakee Sivaraya
c662a31ac8 db updates 2026-01-16 18:40:02 -08:00
Lakee Sivaraya
51d1b958e2 updates 2026-01-16 18:39:17 -08:00
Lakee Sivaraya
3d81c1cc14 revert 2026-01-16 18:38:51 -08:00
Lakee Sivaraya
94c6795efc updates 2026-01-16 18:38:51 -08:00
Lakee Sivaraya
86c5e1b4ff updates 2026-01-16 18:38:51 -08:00
Lakee Sivaraya
cca1772ae1 simplify 2026-01-16 18:38:51 -08:00
Lakee Sivaraya
e4dd14df7a undo 2026-01-16 18:38:51 -08:00
Lakee Sivaraya
448b8f056c undo changes 2026-01-16 18:38:51 -08:00
Lakee Sivaraya
abb671e61b rename 2026-01-16 18:38:51 -08:00
Lakee Sivaraya
f90c9c7593 undo 2026-01-16 18:38:51 -08:00
Lakee Sivaraya
2e624c20b5 reduced type confusion 2026-01-16 18:38:51 -08:00
Lakee Sivaraya
7093209bce refactor 2026-01-16 18:38:51 -08:00
Lakee Sivaraya
897891ee1e updates 2026-01-16 18:38:50 -08:00
Lakee Sivaraya
42aa794713 updates 2026-01-16 18:38:50 -08:00
Lakee Sivaraya
ea72ab5aa9 simplicifcaiton 2026-01-16 18:38:50 -08:00
Lakee Sivaraya
5173320bb5 clean comments 2026-01-16 18:38:50 -08:00
Lakee Sivaraya
26d96624af comments 2026-01-16 18:38:50 -08:00
Lakee Sivaraya
271375df9b rename 2026-01-16 18:38:50 -08:00
Lakee Sivaraya
a940dd6351 updates 2026-01-16 18:38:50 -08:00
Lakee Sivaraya
e69500726b rm 2026-01-16 18:38:50 -08:00
Lakee Sivaraya
c94bb5acda updating prompt to make it user sort 2026-01-16 18:38:50 -08:00
Lakee Sivaraya
fef2d2cc82 fix appearnce 2026-01-16 18:38:50 -08:00
Lakee Sivaraya
44909964b7 fix sorting 2026-01-16 18:38:50 -08:00
Lakee Sivaraya
1a13762617 updates 2026-01-16 18:38:50 -08:00
Lakee Sivaraya
cfffd050a2 updates 2026-01-16 18:38:50 -08:00
Lakee Sivaraya
d00997c5ea updates 2026-01-16 18:38:50 -08:00
Lakee Sivaraya
466559578e validation 2026-01-16 18:38:50 -08:00
Lakee Sivaraya
0a6312dbac better comments 2026-01-16 18:38:50 -08:00
Lakee Sivaraya
e503408825 renames 2026-01-16 18:38:50 -08:00
Lakee Sivaraya
ed543a71f9 u[dates 2026-01-16 18:38:50 -08:00
Lakee Sivaraya
7f894ec023 simplify comments 2026-01-16 18:38:50 -08:00
Lakee Sivaraya
57fbd2aa1c fixes 2026-01-16 18:38:49 -08:00
Lakee Sivaraya
80270ce7b2 fix comments 2026-01-16 18:38:49 -08:00
Lakee Sivaraya
fdc3af994c updates 2026-01-16 18:38:49 -08:00
Lakee Sivaraya
5a69d16e65 wand 2026-01-16 18:38:49 -08:00
Lakee Sivaraya
c3afbaebce update db 2026-01-16 18:38:49 -08:00
Lakee Sivaraya
793c888808 undo 2026-01-16 18:37:59 -08:00
Lakee Sivaraya
ffad20efc5 updates 2026-01-16 18:37:59 -08:00
Lakee Sivaraya
b08ce03409 refactoring 2026-01-16 18:37:58 -08:00
Lakee Sivaraya
c9373c7b3e renames & refactors 2026-01-16 18:37:58 -08:00
Lakee Sivaraya
cbb93c65b6 refactoring 2026-01-16 18:37:58 -08:00
Lakee Sivaraya
96a3fe59ff updates 2026-01-16 18:37:57 -08:00
Lakee Sivaraya
df3e869f22 updates 2026-01-16 18:37:57 -08:00
Lakee Sivaraya
b3ca0c947c updates 2026-01-16 18:37:54 -08:00
Lakee Sivaraya
cfbc8d7211 dedupe 2026-01-16 18:37:54 -08:00
Lakee Sivaraya
15bef489f2 updates 2026-01-16 18:37:53 -08:00
Lakee Sivaraya
4422a69a17 revert 2026-01-16 18:37:52 -08:00
Lakee Sivaraya
8f9cf93231 changes 2026-01-16 18:37:51 -08:00
Lakee Sivaraya
22f89cf67d comments 2026-01-16 18:37:51 -08:00
Lakee Sivaraya
dfa018f2d4 updates 2026-01-16 18:37:51 -08:00
Lakee Sivaraya
e287388b03 update comments with ai 2026-01-16 18:37:51 -08:00
Lakee Sivaraya
4d176c0717 breaking down file 2026-01-16 18:37:51 -08:00
Lakee Sivaraya
c155d8ac6c doc strings 2026-01-16 18:37:50 -08:00
Lakee Sivaraya
48250f5ed8 chages 2026-01-16 18:37:50 -08:00
Lakee Sivaraya
fc6dbcf066 updates 2026-01-16 18:37:50 -08:00
Lakee Sivaraya
a537ca7ebe updates 2026-01-16 18:37:50 -08:00
Lakee Sivaraya
c1eef30578 improved errors 2026-01-16 18:37:50 -08:00
Lakee Sivaraya
6605c887ed fix lints 2026-01-16 18:37:49 -08:00
Lakee Sivaraya
a919816bff format 2026-01-16 18:37:49 -08:00
Lakee Sivaraya
8a8589e18d one input mode 2026-01-16 18:37:49 -08:00
Lakee Sivaraya
ed807bebf2 updates 2026-01-16 18:37:49 -08:00
Lakee Sivaraya
48ecb19af7 updates 2026-01-16 18:37:49 -08:00
Lakee Sivaraya
9a3d5631f2 updates 2026-01-16 18:37:47 -08:00
Lakee Sivaraya
0872314fbf filtering ui 2026-01-16 18:37:45 -08:00
Lakee Sivaraya
7e4fc32d82 updates 2026-01-16 18:37:45 -08:00
Lakee Sivaraya
4316f45175 updates 2026-01-16 18:37:45 -08:00
Lakee Sivaraya
e80660f218 trashy table viewer 2026-01-16 18:37:45 -08:00
Lakee Sivaraya
5dddb03eac required 2026-01-16 18:37:44 -08:00
Lakee Sivaraya
6386e6b437 updates 2026-01-16 18:37:44 -08:00
Waleed
d75ea37b3c chore(readme): updated readme (#2861) 2026-01-16 18:18:40 -08:00
Waleed
af82820a28 v0.5.61: webhook improvements, workflow controls, react query for deployment status, chat fixes, reducto and pulse OCR, linear fixes 2026-01-16 18:06:23 -08:00
Vikhyath Mondreti
fd23220cc3 fix(slack): tool params should be in line with block (#2860)
* env var pattern outside loop

* fix(slack): tool params should line up with block

* remove comments
2026-01-16 18:00:44 -08:00
Adam Gough
a8d81097fc fix(google-vault): error handling improvement and more params (#2735)
* new error throw and improvement

* fixed critical issues

* restore error thorwing

* restore

* added handler for vault

* updated docs

* restored

* removed google vault from executor

* updated translations

* updated docs

* fixed inputs and outputs

---------

Co-authored-by: aadamgough <adam@sim.ai>
Co-authored-by: waleed <walif6@gmail.com>
2026-01-16 17:59:17 -08:00
Waleed
3768c6379c feat(readme): added deepwiki to readme, consolidated utils (#2856)
* feat(readme): added deepwiki to readme, consolidated utils

* standardized all modals

* updated modal copy

* standardized modals

* streamlined all error msg patterns
2026-01-16 16:07:31 -08:00
Siddharth Ganesan
aa80116b99 fix(copilot): copilot edit router block accepts semantic handles (#2857)
* Fix copilot diff controls

* Fix router block for copilot

* Fix queue

* Fix lint

* Get block options and config for subflows

* Lint
2026-01-16 15:54:28 -08:00
Vikhyath Mondreti
78e4ca9d45 improvement(serializer): canonical subblock, serialization cleanups, schedules/webhooks are deployment version friendly (#2848)
* hide form deployment tab from docs

* progress

* fix resolution

* cleanup code

* fix positioning

* cleanup dead sockets adv mode ops

* address greptile comments

* fix tests plus more simplification

* fix cleanup

* bring back advanced mode with specific definition

* revert feature flags

* improvement(subblock): ui

* resolver change to make all var references optional chaining

* fix(webhooks/schedules): deployment version friendly

* fix tests

* fix credential sets with new lifecycle

* prep merge

* add back migration

* fix display check for adv fields

* fix trigger vs block scoping

---------

Co-authored-by: Emir Karabeg <emirkarabeg@berkeley.edu>
2026-01-16 15:23:43 -08:00
Waleed
ce3ddb6ba0 improvement(deployed-mcp): added the ability to make the visibility for deployed mcp tools public, updated UX (#2853)
* improvement(deployed-mcp): added the ability to make the visibility for deployed mcp tools public, updated UX

* use reactquery

* migrated chats to use reactquery, upgraded entire deploymodal to use reactquery instead of manual state management

* added hooks for chat chats and updated callers to all use reactquery

* fix

* updated comments

* consolidated utils
2026-01-16 14:18:39 -08:00
Siddharth Ganesan
8361931cdf fix(copilot): fix copilot bugs (#2855)
* Fix edit workflow returning bad state

* Fix block id edit, slash commands at end, thinking tag resolution, add continue button

* Clean up autosend and continue options and enable mention menu

* Cleanup

* Fix thinking tags

* Fix thinking text

* Fix get block options text

* Fix bugs

* Fix redeploy

* Fix loading indicators

* User input expansion

* Normalize copilot subblock ids

* Fix handlecancelcheckpoint
2026-01-16 13:57:55 -08:00
Waleed
4372841797 v0.5.60: invitation flow improvements, chat fixes, a2a improvements, additional copilot actions 2026-01-15 00:02:18 -08:00
Waleed
5e8c843241 v0.5.59: a2a support, documentation 2026-01-13 13:21:21 -08:00
Waleed
7bf3d73ee6 v0.5.58: export folders, new tools, permissions groups enhancements 2026-01-13 00:56:59 -08:00
Vikhyath Mondreti
7ffc11a738 v0.5.57: subagents, context menu improvements, bug fixes 2026-01-11 11:38:40 -08:00
Waleed
be578e2ed7 v0.5.56: batch operations, access control and permission groups, billing fixes 2026-01-10 00:31:34 -08:00
Waleed
f415e5edc4 v0.5.55: polling groups, bedrock provider, devcontainer fixes, workflow preview enhancements 2026-01-08 23:36:56 -08:00
Waleed
13a6e6c3fa v0.5.54: seo, model blacklist, helm chart updates, fireflies integration, autoconnect improvements, billing fixes 2026-01-07 16:09:45 -08:00
Waleed
f5ab7f21ae v0.5.53: hotkey improvements, added redis fallback, fixes for workflow tool 2026-01-06 23:34:52 -08:00
Waleed
bfb6fffe38 v0.5.52: new port-based router block, combobox expression and variable support 2026-01-06 16:14:10 -08:00
Waleed
4fbec0a43f v0.5.51: triggers, kb, condition block improvements, supabase and grain integration updates 2026-01-06 14:26:46 -08:00
Waleed
585f5e365b v0.5.50: import improvements, ui upgrades, kb styling and performance improvements 2026-01-05 00:35:55 -08:00
Waleed
3792bdd252 v0.5.49: hitl improvements, new email styles, imap trigger, logs context menu (#2672)
* feat(logs-context-menu): consolidated logs utils and types, added logs record context menu (#2659)

* feat(email): welcome email; improvement(emails): ui/ux (#2658)

* feat(email): welcome email; improvement(emails): ui/ux

* improvement(emails): links, accounts, preview

* refactor(emails): file structure and wrapper components

* added envvar for personal emails sent, added isHosted gate

* fixed failing tests, added env mock

* fix: removed comment

---------

Co-authored-by: waleed <walif6@gmail.com>

* fix(logging): hitl + trigger dev crash protection (#2664)

* hitl gaps

* deal with trigger worker crashes

* cleanup import strcuture

* feat(imap): added support for imap trigger (#2663)

* feat(tools): added support for imap trigger

* feat(imap): added parity, tested

* ack PR comments

* final cleanup

* feat(i18n): update translations (#2665)

Co-authored-by: waleedlatif1 <waleedlatif1@users.noreply.github.com>

* fix(grain): updated grain trigger to auto-establish trigger (#2666)

Co-authored-by: aadamgough <adam@sim.ai>

* feat(admin): routes to manage deployments (#2667)

* feat(admin): routes to manage deployments

* fix naming fo deployed by

* feat(time-picker): added timepicker emcn component, added to playground, added searchable prop for dropdown, added more timezones for schedule, updated license and notice date (#2668)

* feat(time-picker): added timepicker emcn component, added to playground, added searchable prop for dropdown, added more timezones for schedule, updated license and notice date

* removed unused params, cleaned up redundant utils

* improvement(invite): aligned styling (#2669)

* improvement(invite): aligned with rest of app

* fix(invite): error handling

* fix: addressed comments

---------

Co-authored-by: Emir Karabeg <78010029+emir-karabeg@users.noreply.github.com>
Co-authored-by: Vikhyath Mondreti <vikhyathvikku@gmail.com>
Co-authored-by: waleedlatif1 <waleedlatif1@users.noreply.github.com>
Co-authored-by: Adam Gough <77861281+aadamgough@users.noreply.github.com>
Co-authored-by: aadamgough <adam@sim.ai>
2026-01-03 13:19:18 -08:00
Waleed
eb5d1f3e5b v0.5.48: copy-paste workflow blocks, docs updates, mcp tool fixes 2025-12-31 18:00:04 -08:00
Waleed
54ab82c8dd v0.5.47: deploy workflow as mcp, kb chunks tokenizer, UI improvements, jira service management tools 2025-12-30 23:18:58 -08:00
Waleed
f895bf469b v0.5.46: build improvements, greptile, light mode improvements 2025-12-29 02:17:52 -08:00
Waleed
dd3209af06 v0.5.45: light mode fixes, realtime usage indicator, docker build improvements 2025-12-27 19:57:42 -08:00
Waleed
b6ba3b50a7 v0.5.44: keyboard shortcuts, autolayout, light mode, byok, testing improvements 2025-12-26 21:25:19 -08:00
Waleed
b304233062 v0.5.43: export logs, circleback, grain, vertex, code hygiene, schedule improvements 2025-12-23 19:19:18 -08:00
Vikhyath Mondreti
57e4b49bd6 v0.5.42: fix memory migration 2025-12-23 01:24:54 -08:00
Vikhyath Mondreti
e12dd204ed v0.5.41: memory fixes, copilot improvements, knowledgebase improvements, LLM providers standardization 2025-12-23 00:15:18 -08:00
Vikhyath Mondreti
3d9d9cbc54 v0.5.40: supabase ops to allow non-public schemas, jira uuid 2025-12-21 22:28:05 -08:00
Waleed
0f4ec962ad v0.5.39: notion, workflow variables fixes 2025-12-20 20:44:00 -08:00
Waleed
4827866f9a v0.5.38: snap to grid, copilot ux improvements, billing line items 2025-12-20 17:24:38 -08:00
Waleed
3e697d9ed9 v0.5.37: redaction utils consolidation, logs updates, autoconnect improvements, additional kb tag types 2025-12-19 22:31:55 -08:00
Martin Yankov
4431a1a484 fix(helm): add custom egress rules to realtime network policy (#2481)
The realtime service network policy was missing the custom egress rules section
that allows configuration of additional egress rules via values.yaml. This caused
the realtime pods to be unable to connect to external databases (e.g., PostgreSQL
on port 5432) when using external database configurations.

The app network policy already had this section, but the realtime network policy
was missing it, creating an inconsistency and preventing the realtime service
from accessing external databases configured via networkPolicy.egress values.

This fix adds the same custom egress rules template section to the realtime
network policy, matching the app network policy behavior and allowing users to
configure database connectivity via values.yaml.
2025-12-19 18:59:08 -08:00
Waleed
4d1a9a3f22 v0.5.36: hitl improvements, opengraph, slack fixes, one-click unsubscribe, auth checks, new db indexes 2025-12-19 01:27:49 -08:00
Vikhyath Mondreti
eb07a080fb v0.5.35: helm updates, copilot improvements, 404 for docs, salesforce fixes, subflow resize clamping 2025-12-18 16:23:19 -08:00
922 changed files with 101801 additions and 15476 deletions

View File

@@ -0,0 +1,35 @@
---
paths:
- "apps/sim/components/emcn/**"
---
# EMCN Components
Import from `@/components/emcn`, never from subpaths (except CSS files).
## CVA vs Direct Styles
**Use CVA when:** 2+ variants (primary/secondary, sm/md/lg)
```tsx
const buttonVariants = cva('base-classes', {
variants: { variant: { default: '...', primary: '...' } }
})
export { Button, buttonVariants }
```
**Use direct className when:** Single consistent style, no variations
```tsx
function Label({ className, ...props }) {
return <Primitive className={cn('style-classes', className)} {...props} />
}
```
## Rules
- Use Radix UI primitives for accessibility
- Export component and variants (if using CVA)
- TSDoc with usage examples
- Consistent tokens: `font-medium`, `text-[12px]`, `rounded-[4px]`
- `transition-colors` for hover states

13
.claude/rules/global.md Normal file
View File

@@ -0,0 +1,13 @@
# Global Standards
## Logging
Import `createLogger` from `sim/logger`. Use `logger.info`, `logger.warn`, `logger.error` instead of `console.log`.
## Comments
Use TSDoc for documentation. No `====` separators. No non-TSDoc comments.
## Styling
Never update global styles. Keep all styling local to components.
## Package Manager
Use `bun` and `bunx`, not `npm` and `npx`.

View File

@@ -0,0 +1,56 @@
---
paths:
- "apps/sim/**"
---
# Sim App Architecture
## Core Principles
1. **Single Responsibility**: Each component, hook, store has one clear purpose
2. **Composition Over Complexity**: Break down complex logic into smaller pieces
3. **Type Safety First**: TypeScript interfaces for all props, state, return types
4. **Predictable State**: Zustand for global state, useState for UI-only concerns
## Root-Level Structure
```
apps/sim/
├── app/ # Next.js app router (pages, API routes)
├── blocks/ # Block definitions and registry
├── components/ # Shared UI (emcn/, ui/)
├── executor/ # Workflow execution engine
├── hooks/ # Shared hooks (queries/, selectors/)
├── lib/ # App-wide utilities
├── providers/ # LLM provider integrations
├── stores/ # Zustand stores
├── tools/ # Tool definitions
└── triggers/ # Trigger definitions
```
## Feature Organization
Features live under `app/workspace/[workspaceId]/`:
```
feature/
├── components/ # Feature components
├── hooks/ # Feature-scoped hooks
├── utils/ # Feature-scoped utilities (2+ consumers)
├── feature.tsx # Main component
└── page.tsx # Next.js page entry
```
## Naming Conventions
- **Components**: PascalCase (`WorkflowList`)
- **Hooks**: `use` prefix (`useWorkflowOperations`)
- **Files**: kebab-case (`workflow-list.tsx`)
- **Stores**: `stores/feature/store.ts`
- **Constants**: SCREAMING_SNAKE_CASE
- **Interfaces**: PascalCase with suffix (`WorkflowListProps`)
## Utils Rules
- **Never create `utils.ts` for single consumer** - inline it
- **Create `utils.ts` when** 2+ files need the same helper
- **Check existing sources** before duplicating (`lib/` has many utilities)
- **Location**: `lib/` (app-wide) → `feature/utils/` (feature-scoped) → inline (single-use)

View File

@@ -0,0 +1,48 @@
---
paths:
- "apps/sim/**/*.tsx"
---
# Component Patterns
## Structure Order
```typescript
'use client' // Only if using hooks
// Imports (external → internal)
// Constants at module level
const CONFIG = { SPACING: 8 } as const
// Props interface
interface ComponentProps {
requiredProp: string
optionalProp?: boolean
}
export function Component({ requiredProp, optionalProp = false }: ComponentProps) {
// a. Refs
// b. External hooks (useParams, useRouter)
// c. Store hooks
// d. Custom hooks
// e. Local state
// f. useMemo
// g. useCallback
// h. useEffect
// i. Return JSX
}
```
## Rules
1. `'use client'` only when using React hooks
2. Always define props interface
3. Extract constants with `as const`
4. Semantic HTML (`aside`, `nav`, `article`)
5. Optional chain callbacks: `onAction?.(id)`
## Component Extraction
**Extract when:** 50+ lines, used in 2+ files, or has own state/logic
**Keep inline when:** < 10 lines, single use, purely presentational

View File

@@ -0,0 +1,55 @@
---
paths:
- "apps/sim/**/use-*.ts"
- "apps/sim/**/hooks/**/*.ts"
---
# Hook Patterns
## Structure
```typescript
interface UseFeatureProps {
id: string
onSuccess?: (result: Result) => void
}
export function useFeature({ id, onSuccess }: UseFeatureProps) {
// 1. Refs for stable dependencies
const idRef = useRef(id)
const onSuccessRef = useRef(onSuccess)
// 2. State
const [data, setData] = useState<Data | null>(null)
const [isLoading, setIsLoading] = useState(false)
// 3. Sync refs
useEffect(() => {
idRef.current = id
onSuccessRef.current = onSuccess
}, [id, onSuccess])
// 4. Operations (useCallback with empty deps when using refs)
const fetchData = useCallback(async () => {
setIsLoading(true)
try {
const result = await fetch(`/api/${idRef.current}`).then(r => r.json())
setData(result)
onSuccessRef.current?.(result)
} finally {
setIsLoading(false)
}
}, [])
return { data, isLoading, fetchData }
}
```
## Rules
1. Single responsibility per hook
2. Props interface required
3. Refs for stable callback dependencies
4. Wrap returned functions in useCallback
5. Always try/catch async operations
6. Track loading/error states

View File

@@ -0,0 +1,62 @@
---
paths:
- "apps/sim/**/*.ts"
- "apps/sim/**/*.tsx"
---
# Import Patterns
## Absolute Imports
**Always use absolute imports.** Never use relative imports.
```typescript
// ✓ Good
import { useWorkflowStore } from '@/stores/workflows/store'
import { Button } from '@/components/ui/button'
// ✗ Bad
import { useWorkflowStore } from '../../../stores/workflows/store'
```
## Barrel Exports
Use barrel exports (`index.ts`) when a folder has 3+ exports. Import from barrel, not individual files.
```typescript
// ✓ Good
import { Dashboard, Sidebar } from '@/app/workspace/[workspaceId]/logs/components'
// ✗ Bad
import { Dashboard } from '@/app/workspace/[workspaceId]/logs/components/dashboard/dashboard'
```
## No Re-exports
Do not re-export from non-barrel files. Import directly from the source.
```typescript
// ✓ Good - import from where it's declared
import { CORE_TRIGGER_TYPES } from '@/stores/logs/filters/types'
// ✗ Bad - re-exporting in utils.ts then importing from there
import { CORE_TRIGGER_TYPES } from '@/app/workspace/.../utils'
```
## Import Order
1. React/core libraries
2. External libraries
3. UI components (`@/components/emcn`, `@/components/ui`)
4. Utilities (`@/lib/...`)
5. Stores (`@/stores/...`)
6. Feature imports
7. CSS imports
## Type Imports
Use `type` keyword for type-only imports:
```typescript
import type { WorkflowLog } from '@/stores/logs/types'
```

View File

@@ -0,0 +1,209 @@
---
paths:
- "apps/sim/tools/**"
- "apps/sim/blocks/**"
- "apps/sim/triggers/**"
---
# Adding Integrations
## Overview
Adding a new integration typically requires:
1. **Tools** - API operations (`tools/{service}/`)
2. **Block** - UI component (`blocks/blocks/{service}.ts`)
3. **Icon** - SVG icon (`components/icons.tsx`)
4. **Trigger** (optional) - Webhooks/polling (`triggers/{service}/`)
Always look up the service's API docs first.
## 1. Tools (`tools/{service}/`)
```
tools/{service}/
├── index.ts # Export all tools
├── types.ts # Params/response types
├── {action}.ts # Individual tool (e.g., send_message.ts)
└── ...
```
**Tool file structure:**
```typescript
// tools/{service}/{action}.ts
import type { {Service}Params, {Service}Response } from '@/tools/{service}/types'
import type { ToolConfig } from '@/tools/types'
export const {service}{Action}Tool: ToolConfig<{Service}Params, {Service}Response> = {
id: '{service}_{action}',
name: '{Service} {Action}',
description: 'What this tool does',
version: '1.0.0',
oauth: { required: true, provider: '{service}' }, // if OAuth
params: { /* param definitions */ },
request: {
url: '/api/tools/{service}/{action}',
method: 'POST',
headers: () => ({ 'Content-Type': 'application/json' }),
body: (params) => ({ ...params }),
},
transformResponse: async (response) => {
const data = await response.json()
if (!data.success) throw new Error(data.error)
return { success: true, output: data.output }
},
outputs: { /* output definitions */ },
}
```
**Register in `tools/registry.ts`:**
```typescript
import { {service}{Action}Tool } from '@/tools/{service}'
// Add to registry object
{service}_{action}: {service}{Action}Tool,
```
## 2. Block (`blocks/blocks/{service}.ts`)
```typescript
import { {Service}Icon } from '@/components/icons'
import type { BlockConfig } from '@/blocks/types'
import type { {Service}Response } from '@/tools/{service}/types'
export const {Service}Block: BlockConfig<{Service}Response> = {
type: '{service}',
name: '{Service}',
description: 'Short description',
longDescription: 'Detailed description',
category: 'tools',
bgColor: '#hexcolor',
icon: {Service}Icon,
subBlocks: [ /* see SubBlock Properties below */ ],
tools: {
access: ['{service}_{action}', ...],
config: {
tool: (params) => `{service}_${params.operation}`,
params: (params) => ({ ...params }),
},
},
inputs: { /* input definitions */ },
outputs: { /* output definitions */ },
}
```
### SubBlock Properties
```typescript
{
id: 'fieldName', // Unique identifier
title: 'Field Label', // UI label
type: 'short-input', // See SubBlock Types below
placeholder: 'Hint text',
required: true, // See Required below
condition: { ... }, // See Condition below
dependsOn: ['otherField'], // See DependsOn below
mode: 'basic', // 'basic' | 'advanced' | 'both' | 'trigger'
}
```
**SubBlock Types:** `short-input`, `long-input`, `dropdown`, `code`, `switch`, `slider`, `oauth-input`, `channel-selector`, `user-selector`, `file-upload`, etc.
### `condition` - Show/hide based on another field
```typescript
// Show when operation === 'send'
condition: { field: 'operation', value: 'send' }
// Show when operation is 'send' OR 'read'
condition: { field: 'operation', value: ['send', 'read'] }
// Show when operation !== 'send'
condition: { field: 'operation', value: 'send', not: true }
// Complex: NOT in list AND another condition
condition: {
field: 'operation',
value: ['list_channels', 'list_users'],
not: true,
and: { field: 'destinationType', value: 'dm', not: true }
}
```
### `required` - Field validation
```typescript
// Always required
required: true
// Conditionally required (same syntax as condition)
required: { field: 'operation', value: 'send' }
```
### `dependsOn` - Clear field when dependencies change
```typescript
// Clear when credential changes
dependsOn: ['credential']
// Clear when authMethod changes AND (credential OR botToken) changes
dependsOn: { all: ['authMethod'], any: ['credential', 'botToken'] }
```
### `mode` - When to show field
- `'basic'` - Only in basic mode (default UI)
- `'advanced'` - Only in advanced mode (manual input)
- `'both'` - Show in both modes (default)
- `'trigger'` - Only when block is used as trigger
**Register in `blocks/registry.ts`:**
```typescript
import { {Service}Block } from '@/blocks/blocks/{service}'
// Add to registry object (alphabetically)
{service}: {Service}Block,
```
## 3. Icon (`components/icons.tsx`)
```typescript
export function {Service}Icon(props: SVGProps<SVGSVGElement>) {
return (
<svg {...props} viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
{/* SVG path from service's brand assets */}
</svg>
)
}
```
## 4. Trigger (`triggers/{service}/`) - Optional
```
triggers/{service}/
├── index.ts # Export all triggers
├── webhook.ts # Webhook handler
├── utils.ts # Shared utilities
└── {event}.ts # Specific event handlers
```
**Register in `triggers/registry.ts`:**
```typescript
import { {service}WebhookTrigger } from '@/triggers/{service}'
// Add to TRIGGER_REGISTRY
{service}_webhook: {service}WebhookTrigger,
```
## Checklist
- [ ] Look up API docs for the service
- [ ] Create `tools/{service}/types.ts` with proper types
- [ ] Create tool files for each operation
- [ ] Create `tools/{service}/index.ts` barrel export
- [ ] Register tools in `tools/registry.ts`
- [ ] Add icon to `components/icons.tsx`
- [ ] Create block in `blocks/blocks/{service}.ts`
- [ ] Register block in `blocks/registry.ts`
- [ ] (Optional) Create triggers in `triggers/{service}/`
- [ ] (Optional) Register triggers in `triggers/registry.ts`

View File

@@ -0,0 +1,66 @@
---
paths:
- "apps/sim/hooks/queries/**/*.ts"
---
# React Query Patterns
All React Query hooks live in `hooks/queries/`.
## Query Key Factory
Every query file defines a keys factory:
```typescript
export const entityKeys = {
all: ['entity'] as const,
list: (workspaceId?: string) => [...entityKeys.all, 'list', workspaceId ?? ''] as const,
detail: (id?: string) => [...entityKeys.all, 'detail', id ?? ''] as const,
}
```
## File Structure
```typescript
// 1. Query keys factory
// 2. Types (if needed)
// 3. Private fetch functions
// 4. Exported hooks
```
## Query Hook
```typescript
export function useEntityList(workspaceId?: string, options?: { enabled?: boolean }) {
return useQuery({
queryKey: entityKeys.list(workspaceId),
queryFn: () => fetchEntities(workspaceId as string),
enabled: Boolean(workspaceId) && (options?.enabled ?? true),
staleTime: 60 * 1000,
placeholderData: keepPreviousData,
})
}
```
## Mutation Hook
```typescript
export function useCreateEntity() {
const queryClient = useQueryClient()
return useMutation({
mutationFn: async (variables) => { /* fetch POST */ },
onSuccess: () => queryClient.invalidateQueries({ queryKey: entityKeys.all }),
})
}
```
## Optimistic Updates
For optimistic mutations syncing with Zustand, use `createOptimisticMutationHandlers` from `@/hooks/queries/utils/optimistic-mutation`.
## Naming
- **Keys**: `entityKeys`
- **Query hooks**: `useEntity`, `useEntityList`
- **Mutation hooks**: `useCreateEntity`, `useUpdateEntity`
- **Fetch functions**: `fetchEntity` (private)

View File

@@ -0,0 +1,71 @@
---
paths:
- "apps/sim/**/store.ts"
- "apps/sim/**/stores/**/*.ts"
---
# Zustand Store Patterns
Stores live in `stores/`. Complex stores split into `store.ts` + `types.ts`.
## Basic Store
```typescript
import { create } from 'zustand'
import { devtools } from 'zustand/middleware'
import type { FeatureState } from '@/stores/feature/types'
const initialState = { items: [] as Item[], activeId: null as string | null }
export const useFeatureStore = create<FeatureState>()(
devtools(
(set, get) => ({
...initialState,
setItems: (items) => set({ items }),
addItem: (item) => set((state) => ({ items: [...state.items, item] })),
reset: () => set(initialState),
}),
{ name: 'feature-store' }
)
)
```
## Persisted Store
```typescript
import { create } from 'zustand'
import { persist } from 'zustand/middleware'
export const useFeatureStore = create<FeatureState>()(
persist(
(set) => ({
width: 300,
setWidth: (width) => set({ width }),
_hasHydrated: false,
setHasHydrated: (v) => set({ _hasHydrated: v }),
}),
{
name: 'feature-state',
partialize: (state) => ({ width: state.width }),
onRehydrateStorage: () => (state) => state?.setHasHydrated(true),
}
)
)
```
## Rules
1. Use `devtools` middleware (named stores)
2. Use `persist` only when data should survive reload
3. `partialize` to persist only necessary state
4. `_hasHydrated` pattern for persisted stores needing hydration tracking
5. Immutable updates only
6. `set((state) => ...)` when depending on previous state
7. Provide `reset()` action
## Outside React
```typescript
const items = useFeatureStore.getState().items
useFeatureStore.setState({ items: newItems })
```

View File

@@ -0,0 +1,41 @@
---
paths:
- "apps/sim/**/*.tsx"
- "apps/sim/**/*.css"
---
# Styling Rules
## Tailwind
1. **No inline styles** - Use Tailwind classes
2. **No duplicate dark classes** - Skip `dark:` when value matches light mode
3. **Exact values** - `text-[14px]`, `h-[26px]`
4. **Transitions** - `transition-colors` for interactive states
## Conditional Classes
```typescript
import { cn } from '@/lib/utils'
<div className={cn(
'base-classes',
isActive && 'active-classes',
disabled ? 'opacity-60' : 'hover:bg-accent'
)} />
```
## CSS Variables
For dynamic values (widths, heights) synced with stores:
```typescript
// In store
setWidth: (width) => {
set({ width })
document.documentElement.style.setProperty('--sidebar-width', `${width}px`)
}
// In component
<aside style={{ width: 'var(--sidebar-width)' }} />
```

View File

@@ -0,0 +1,58 @@
---
paths:
- "apps/sim/**/*.test.ts"
- "apps/sim/**/*.test.tsx"
---
# Testing Patterns
Use Vitest. Test files: `feature.ts``feature.test.ts`
## Structure
```typescript
/**
* @vitest-environment node
*/
import { databaseMock, loggerMock } from '@sim/testing'
import { describe, expect, it, vi } from 'vitest'
vi.mock('@sim/db', () => databaseMock)
vi.mock('@sim/logger', () => loggerMock)
import { myFunction } from '@/lib/feature'
describe('myFunction', () => {
beforeEach(() => vi.clearAllMocks())
it.concurrent('isolated tests run in parallel', () => { ... })
})
```
## @sim/testing Package
Always prefer over local mocks.
| Category | Utilities |
|----------|-----------|
| **Mocks** | `loggerMock`, `databaseMock`, `setupGlobalFetchMock()` |
| **Factories** | `createSession()`, `createWorkflowRecord()`, `createBlock()`, `createExecutorContext()` |
| **Builders** | `WorkflowBuilder`, `ExecutionContextBuilder` |
| **Assertions** | `expectWorkflowAccessGranted()`, `expectBlockExecuted()` |
## Rules
1. `@vitest-environment node` directive at file top
2. `vi.mock()` calls before importing mocked modules
3. `@sim/testing` utilities over local mocks
4. `it.concurrent` for isolated tests (no shared mutable state)
5. `beforeEach(() => vi.clearAllMocks())` to reset state
## Hoisted Mocks
For mutable mock references:
```typescript
const mockFn = vi.hoisted(() => vi.fn())
vi.mock('@/lib/module', () => ({ myFunction: mockFn }))
mockFn.mockResolvedValue({ data: 'test' })
```

View File

@@ -0,0 +1,21 @@
---
paths:
- "apps/sim/**/*.ts"
- "apps/sim/**/*.tsx"
---
# TypeScript Rules
1. **No `any`** - Use proper types or `unknown` with type guards
2. **Props interface** - Always define for components
3. **Const assertions** - `as const` for constant objects/arrays
4. **Ref types** - Explicit: `useRef<HTMLDivElement>(null)`
5. **Type imports** - `import type { X }` for type-only imports
```typescript
// ✗ Bad
const handleClick = (e: any) => {}
// ✓ Good
const handleClick = (e: React.MouseEvent<HTMLButtonElement>) => {}
```

View File

@@ -8,7 +8,7 @@ alwaysApply: true
You are a professional software engineer. All code must follow best practices: accurate, readable, clean, and efficient.
## Logging
Import `createLogger` from `sim/logger`. Use `logger.info`, `logger.warn`, `logger.error` instead of `console.log`.
Import `createLogger` from `@sim/logger`. Use `logger.info`, `logger.warn`, `logger.error` instead of `console.log`.
## Comments
Use TSDoc for documentation. No `====` separators. No non-TSDoc comments.

View File

@@ -9,12 +9,12 @@
<p align="center">
<a href="https://sim.ai" target="_blank" rel="noopener noreferrer"><img src="https://img.shields.io/badge/sim.ai-6F3DFA" alt="Sim.ai"></a>
<a href="https://discord.gg/Hr4UWYEcTT" target="_blank" rel="noopener noreferrer"><img src="https://img.shields.io/badge/Discord-Join%20Server-5865F2?logo=discord&logoColor=white" alt="Discord"></a>
<a href="https://x.com/simdotai" target="_blank" rel="noopener noreferrer"><img src="https://img.shields.io/twitter/follow/simstudioai?style=social" alt="Twitter"></a>
<a href="https://x.com/simdotai" target="_blank" rel="noopener noreferrer"><img src="https://img.shields.io/twitter/follow/simdotai?style=social" alt="Twitter"></a>
<a href="https://docs.sim.ai" target="_blank" rel="noopener noreferrer"><img src="https://img.shields.io/badge/Docs-6F3DFA.svg" alt="Documentation"></a>
</p>
<p align="center">
<a href="https://cursor.com/link/prompt?text=Help%20me%20set%20up%20Sim%20Studio%20locally.%20Follow%20these%20steps%3A%0A%0A1.%20First%2C%20verify%20Docker%20is%20installed%20and%20running%3A%0A%20%20%20docker%20--version%0A%20%20%20docker%20info%0A%0A2.%20Clone%20the%20repository%3A%0A%20%20%20git%20clone%20https%3A%2F%2Fgithub.com%2Fsimstudioai%2Fsim.git%0A%20%20%20cd%20sim%0A%0A3.%20Start%20the%20services%20with%20Docker%20Compose%3A%0A%20%20%20docker%20compose%20-f%20docker-compose.prod.yml%20up%20-d%0A%0A4.%20Wait%20for%20all%20containers%20to%20be%20healthy%20(this%20may%20take%201-2%20minutes)%3A%0A%20%20%20docker%20compose%20-f%20docker-compose.prod.yml%20ps%0A%0A5.%20Verify%20the%20app%20is%20accessible%20at%20http%3A%2F%2Flocalhost%3A3000%0A%0AIf%20there%20are%20any%20errors%2C%20help%20me%20troubleshoot%20them.%20Common%20issues%3A%0A-%20Port%203000%2C%203002%2C%20or%205432%20already%20in%20use%0A-%20Docker%20not%20running%0A-%20Insufficient%20memory%20(needs%2012GB%2B%20RAM)%0A%0AFor%20local%20AI%20models%20with%20Ollama%2C%20use%20this%20instead%20of%20step%203%3A%0A%20%20%20docker%20compose%20-f%20docker-compose.ollama.yml%20--profile%20setup%20up%20-d"><img src="https://img.shields.io/badge/Set%20Up%20with-Cursor-000000?logo=cursor&logoColor=white" alt="Set Up with Cursor"></a>
<a href="https://deepwiki.com/simstudioai/sim" target="_blank" rel="noopener noreferrer"><img src="https://deepwiki.com/badge.svg" alt="Ask DeepWiki"></a> <a href="https://cursor.com/link/prompt?text=Help%20me%20set%20up%20Sim%20locally.%20Follow%20these%20steps%3A%0A%0A1.%20First%2C%20verify%20Docker%20is%20installed%20and%20running%3A%0A%20%20%20docker%20--version%0A%20%20%20docker%20info%0A%0A2.%20Clone%20the%20repository%3A%0A%20%20%20git%20clone%20https%3A%2F%2Fgithub.com%2Fsimstudioai%2Fsim.git%0A%20%20%20cd%20sim%0A%0A3.%20Start%20the%20services%20with%20Docker%20Compose%3A%0A%20%20%20docker%20compose%20-f%20docker-compose.prod.yml%20up%20-d%0A%0A4.%20Wait%20for%20all%20containers%20to%20be%20healthy%20(this%20may%20take%201-2%20minutes)%3A%0A%20%20%20docker%20compose%20-f%20docker-compose.prod.yml%20ps%0A%0A5.%20Verify%20the%20app%20is%20accessible%20at%20http%3A%2F%2Flocalhost%3A3000%0A%0AIf%20there%20are%20any%20errors%2C%20help%20me%20troubleshoot%20them.%20Common%20issues%3A%0A-%20Port%203000%2C%203002%2C%20or%205432%20already%20in%20use%0A-%20Docker%20not%20running%0A-%20Insufficient%20memory%20(needs%2012GB%2B%20RAM)%0A%0AFor%20local%20AI%20models%20with%20Ollama%2C%20use%20this%20instead%20of%20step%203%3A%0A%20%20%20docker%20compose%20-f%20docker-compose.ollama.yml%20--profile%20setup%20up%20-d"><img src="https://img.shields.io/badge/Set%20Up%20with-Cursor-000000?logo=cursor&logoColor=white" alt="Set Up with Cursor"></a>
</p>
### Build Workflows with Ease

View File

@@ -86,27 +86,112 @@ export async function GET(request: NextRequest) {
)
.limit(candidateLimit)
const seenIds = new Set<string>()
const mergedResults = []
const knownLocales = ['en', 'es', 'fr', 'de', 'ja', 'zh']
for (let i = 0; i < Math.max(vectorResults.length, keywordResults.length); i++) {
if (i < vectorResults.length && !seenIds.has(vectorResults[i].chunkId)) {
mergedResults.push(vectorResults[i])
seenIds.add(vectorResults[i].chunkId)
}
if (i < keywordResults.length && !seenIds.has(keywordResults[i].chunkId)) {
mergedResults.push(keywordResults[i])
seenIds.add(keywordResults[i].chunkId)
const vectorRankMap = new Map<string, number>()
vectorResults.forEach((r, idx) => vectorRankMap.set(r.chunkId, idx + 1))
const keywordRankMap = new Map<string, number>()
keywordResults.forEach((r, idx) => keywordRankMap.set(r.chunkId, idx + 1))
const allChunkIds = new Set([
...vectorResults.map((r) => r.chunkId),
...keywordResults.map((r) => r.chunkId),
])
const k = 60
type ResultWithRRF = (typeof vectorResults)[0] & { rrfScore: number }
const scoredResults: ResultWithRRF[] = []
for (const chunkId of allChunkIds) {
const vectorRank = vectorRankMap.get(chunkId) ?? Number.POSITIVE_INFINITY
const keywordRank = keywordRankMap.get(chunkId) ?? Number.POSITIVE_INFINITY
const rrfScore = 1 / (k + vectorRank) + 1 / (k + keywordRank)
const result =
vectorResults.find((r) => r.chunkId === chunkId) ||
keywordResults.find((r) => r.chunkId === chunkId)
if (result) {
scoredResults.push({ ...result, rrfScore })
}
}
const filteredResults = mergedResults.slice(0, limit)
const searchResults = filteredResults.map((result) => {
scoredResults.sort((a, b) => b.rrfScore - a.rrfScore)
const localeFilteredResults = scoredResults.filter((result) => {
const firstPart = result.sourceDocument.split('/')[0]
if (knownLocales.includes(firstPart)) {
return firstPart === locale
}
return locale === 'en'
})
const queryLower = query.toLowerCase()
const getTitleBoost = (result: ResultWithRRF): number => {
const fileName = result.sourceDocument
.replace('.mdx', '')
.split('/')
.pop()
?.toLowerCase()
?.replace(/_/g, ' ')
if (fileName === queryLower) return 0.01
if (fileName?.includes(queryLower)) return 0.005
return 0
}
localeFilteredResults.sort((a, b) => {
return b.rrfScore + getTitleBoost(b) - (a.rrfScore + getTitleBoost(a))
})
const pageMap = new Map<string, ResultWithRRF>()
for (const result of localeFilteredResults) {
const pageKey = result.sourceDocument
const existing = pageMap.get(pageKey)
if (!existing || result.rrfScore > existing.rrfScore) {
pageMap.set(pageKey, result)
}
}
const deduplicatedResults = Array.from(pageMap.values())
.sort((a, b) => b.rrfScore + getTitleBoost(b) - (a.rrfScore + getTitleBoost(a)))
.slice(0, limit)
const searchResults = deduplicatedResults.map((result) => {
const title = result.headerText || result.sourceDocument.replace('.mdx', '')
const pathParts = result.sourceDocument
.replace('.mdx', '')
.split('/')
.map((part) => part.charAt(0).toUpperCase() + part.slice(1))
.filter((part) => part !== 'index' && !knownLocales.includes(part))
.map((part) => {
return part
.replace(/_/g, ' ')
.split(' ')
.map((word) => {
const acronyms = [
'api',
'mcp',
'sdk',
'url',
'http',
'json',
'xml',
'html',
'css',
'ai',
]
if (acronyms.includes(word.toLowerCase())) {
return word.toUpperCase()
}
return word.charAt(0).toUpperCase() + word.slice(1)
})
.join(' ')
})
return {
id: result.chunkId,

View File

@@ -1739,12 +1739,12 @@ export function BrowserUseIcon(props: SVGProps<SVGSVGElement>) {
{...props}
version='1.0'
xmlns='http://www.w3.org/2000/svg'
width='150pt'
height='150pt'
width='28'
height='28'
viewBox='0 0 150 150'
preserveAspectRatio='xMidYMid meet'
>
<g transform='translate(0,150) scale(0.05,-0.05)' fill='#000000' stroke='none'>
<g transform='translate(0,150) scale(0.05,-0.05)' fill='currentColor' stroke='none'>
<path
d='M786 2713 c-184 -61 -353 -217 -439 -405 -76 -165 -65 -539 19 -666
l57 -85 -48 -124 c-203 -517 -79 -930 346 -1155 159 -85 441 -71 585 28 l111
@@ -4093,6 +4093,23 @@ export function SQSIcon(props: SVGProps<SVGSVGElement>) {
)
}
export function TextractIcon(props: SVGProps<SVGSVGElement>) {
return (
<svg
{...props}
viewBox='10 14 60 52'
version='1.1'
xmlns='http://www.w3.org/2000/svg'
xmlnsXlink='http://www.w3.org/1999/xlink'
>
<path
d='M22.0624102,50 C24.3763895,53.603 28.4103535,56 33.0003125,56 C40.1672485,56 45.9991964,50.168 45.9991964,43 C45.9991964,35.832 40.1672485,30 33.0003125,30 C27.6033607,30 22.9664021,33.307 21.0024196,38 L23.2143999,38 C25.0393836,34.444 28.7363506,32 33.0003125,32 C39.0652583,32 43.9992143,36.935 43.9992143,43 C43.9992143,49.065 39.0652583,54 33.0003125,54 C29.5913429,54 26.5413702,52.441 24.5213882,50 L22.0624102,50 Z M37.0002768,45 L37.0002768,43 L41.9992321,43 C41.9992321,38.038 37.9622682,34 33.0003125,34 C28.0373568,34 23.9993929,38.038 23.9993929,43 L28.9993482,43 L28.9993482,45 L24.2313908,45 C25.1443826,49.002 28.7253507,52 33.0003125,52 C35.1362934,52 37.0992759,51.249 38.6442621,50 L34.0003036,50 L34.0003036,48 L40.4782457,48 C41.0812403,47.102 41.5202364,46.087 41.7682342,45 L37.0002768,45 Z M21.0024196,48 L23.2143999,48 C22.4434068,46.498 22.0004107,44.801 22.0004107,43 C22.0004107,41.959 22.1554093,40.955 22.4264069,40 L20.3634253,40 C20.1344274,40.965 19.9994286,41.966 19.9994286,43 C19.9994286,44.771 20.3584254,46.46 21.0024196,48 L21.0024196,48 Z M19.7434309,50 L17.0004554,50 L17.0004554,48 L18.8744386,48 C18.5344417,47.04 18.2894438,46.038 18.1494451,45 L15.4144695,45 L16.707458,46.293 L15.2924706,47.707 L12.2924974,44.707 C11.9025009,44.316 11.9025009,43.684 12.2924974,43.293 L15.2924706,40.293 L16.707458,41.707 L15.4144695,43 L18.0004464,43 C18.0004464,41.973 18.1044455,40.97 18.3024437,40 L17.0004554,40 L17.0004554,38 L18.8744386,38 C20.9404202,32.184 26.4833707,28 33.0003125,28 C37.427273,28 41.4002375,29.939 44.148213,33 L59.0000804,33 L59.0000804,35 L45.6661994,35 C47.1351863,37.318 47.9991786,40.058 47.9991786,43 L59.0000804,43 L59.0000804,45 L47.8501799,45 C46.8681887,52.327 40.5912447,58 33.0003125,58 C27.2563638,58 22.2624084,54.752 19.7434309,50 L19.7434309,50 Z M37.0002768,39 C37.0002768,38.448 36.5522808,38 36.0002857,38 L29.9993482,38 C29.4473442,38 28.9993482,38.448 28.9993482,39 L28.9993482,41 L31.0003304,41 L31.0003304,40 L32.0003214,40 L32.0003214,43 L31.0003304,43 L31.0003304,45 L35.0002946,45 L35.0002946,43 L34.0003036,43 L34.0003036,40 L35.0002946,40 L35.0002946,41 L37.0002768,41 L37.0002768,39 Z M49.0001696,40 L59.0000804,40 L59.0000804,38 L49.0001696,38 L49.0001696,40 Z M49.0001696,50 L59.0000804,50 L59.0000804,48 L49.0001696,48 L49.0001696,50 Z M57.0000982,27 L60.5850662,27 L57.0000982,23.414 L57.0000982,27 Z M63.7070383,27.293 C63.8940367,27.48 64.0000357,27.735 64.0000357,28 L64.0000357,63 C64.0000357,63.552 63.5520397,64 63.0000446,64 L32.0003304,64 C31.4473264,64 31.0003304,63.552 31.0003304,63 L31.0003304,59 L33.0003125,59 L33.0003125,62 L62.0000536,62 L62.0000536,29 L56.0001071,29 C55.4471121,29 55.0001161,28.552 55.0001161,28 L55.0001161,22 L33.0003125,22 L33.0003125,27 L31.0003304,27 L31.0003304,21 C31.0003304,20.448 31.4473264,20 32.0003304,20 L56.0001071,20 C56.2651048,20 56.5191025,20.105 56.7071008,20.293 L63.7070383,27.293 Z M68,24.166 L68,61 C68,61.552 67.552004,62 67.0000089,62 L65.0000268,62 L65.0000268,60 L66.0000179,60 L66.0000179,24.612 L58.6170838,18 L36.0002857,18 L36.0002857,19 L34.0003036,19 L34.0003036,17 C34.0003036,16.448 34.4472996,16 35.0003036,16 L59.0000804,16 C59.2460782,16 59.483076,16.091 59.6660744,16.255 L67.666003,23.42 C67.8780011,23.61 68,23.881 68,24.166 L68,24.166 Z'
fill='currentColor'
/>
</svg>
)
}
export function McpIcon(props: SVGProps<SVGSVGElement>) {
return (
<svg
@@ -4679,6 +4696,26 @@ export function BedrockIcon(props: SVGProps<SVGSVGElement>) {
)
}
export function TableIcon(props: SVGProps<SVGSVGElement>) {
return (
<svg
xmlns='http://www.w3.org/2000/svg'
viewBox='0 0 24 24'
fill='none'
stroke='currentColor'
strokeWidth={2}
strokeLinecap='round'
strokeLinejoin='round'
{...props}
>
<rect width='18' height='18' x='3' y='3' rx='2' />
<path d='M3 9h18' />
<path d='M3 15h18' />
<path d='M9 3v18' />
<path d='M15 3v18' />
</svg>
)
}
export function ReductoIcon(props: SVGProps<SVGSVGElement>) {
return (
<svg

View File

@@ -108,8 +108,10 @@ import {
StagehandIcon,
StripeIcon,
SupabaseIcon,
TableIcon,
TavilyIcon,
TelegramIcon,
TextractIcon,
TinybirdIcon,
TranslateIcon,
TrelloIcon,
@@ -143,7 +145,7 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
calendly: CalendlyIcon,
circleback: CirclebackIcon,
clay: ClayIcon,
confluence: ConfluenceIcon,
confluence_v2: ConfluenceIcon,
cursor_v2: CursorIcon,
datadog: DatadogIcon,
discord: DiscordIcon,
@@ -153,7 +155,7 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
elasticsearch: ElasticsearchIcon,
elevenlabs: ElevenLabsIcon,
exa: ExaAIIcon,
file: DocumentIcon,
file_v2: DocumentIcon,
firecrawl: FirecrawlIcon,
fireflies: FirefliesIcon,
github_v2: GithubIcon,
@@ -195,7 +197,7 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
microsoft_excel_v2: MicrosoftExcelIcon,
microsoft_planner: MicrosoftPlannerIcon,
microsoft_teams: MicrosoftTeamsIcon,
mistral_parse: MistralIcon,
mistral_parse_v2: MistralIcon,
mongodb: MongoDBIcon,
mysql: MySQLIcon,
neo4j: Neo4jIcon,
@@ -235,8 +237,10 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
stripe: StripeIcon,
stt: STTIcon,
supabase: SupabaseIcon,
table: TableIcon,
tavily: TavilyIcon,
telegram: TelegramIcon,
textract: TextractIcon,
tinybird: TinybirdIcon,
translate: TranslateIcon,
trello: TrelloIcon,
@@ -244,7 +248,7 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
twilio_sms: TwilioIcon,
twilio_voice: TwilioIcon,
typeform: TypeformIcon,
video_generator: VideoIcon,
video_generator_v2: VideoIcon,
vision: EyeIcon,
wealthbox: WealthboxIcon,
webflow: WebflowIcon,

View File

@@ -1,3 +1,3 @@
{
"pages": ["index", "basics", "api", "form", "logging", "costs"]
"pages": ["index", "basics", "api", "logging", "costs"]
}

View File

@@ -7,7 +7,7 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
<BlockInfoCard
type="browser_use"
color="#E0E0E0"
color="#181C1E"
/>
{/* MANUAL-CONTENT-START:intro */}

View File

@@ -6,7 +6,7 @@ description: Interact with Confluence
import { BlockInfoCard } from "@/components/ui/block-info-card"
<BlockInfoCard
type="confluence"
type="confluence_v2"
color="#E0E0E0"
/>

View File

@@ -6,7 +6,7 @@ description: Read and parse multiple files
import { BlockInfoCard } from "@/components/ui/block-info-card"
<BlockInfoCard
type="file"
type="file_v2"
color="#40916C"
/>
@@ -48,7 +48,7 @@ Parse one or more uploaded files or files from URLs (text, PDF, CSV, images, etc
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `files` | array | Array of parsed files |
| `combinedContent` | string | Combined content of all parsed files |
| `files` | array | Array of parsed files with content, metadata, and file properties |
| `combinedContent` | string | All file contents merged into a single text string |

File diff suppressed because it is too large Load Diff

View File

@@ -119,6 +119,145 @@ Get a specific event from Google Calendar. Returns API-aligned fields only.
| `creator` | json | Event creator |
| `organizer` | json | Event organizer |
### `google_calendar_update`
Update an existing event in Google Calendar. Returns API-aligned fields only.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `calendarId` | string | No | Calendar ID \(defaults to primary\) |
| `eventId` | string | Yes | Event ID to update |
| `summary` | string | No | New event title/summary |
| `description` | string | No | New event description |
| `location` | string | No | New event location |
| `startDateTime` | string | No | New start date and time. MUST include timezone offset \(e.g., 2025-06-03T10:00:00-08:00\) OR provide timeZone parameter |
| `endDateTime` | string | No | New end date and time. MUST include timezone offset \(e.g., 2025-06-03T11:00:00-08:00\) OR provide timeZone parameter |
| `timeZone` | string | No | Time zone \(e.g., America/Los_Angeles\). Required if datetime does not include offset. |
| `attendees` | array | No | Array of attendee email addresses \(replaces existing attendees\) |
| `sendUpdates` | string | No | How to send updates to attendees: all, externalOnly, or none |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `id` | string | Event ID |
| `htmlLink` | string | Event link |
| `status` | string | Event status |
| `summary` | string | Event title |
| `description` | string | Event description |
| `location` | string | Event location |
| `start` | json | Event start |
| `end` | json | Event end |
| `attendees` | json | Event attendees |
| `creator` | json | Event creator |
| `organizer` | json | Event organizer |
### `google_calendar_delete`
Delete an event from Google Calendar. Returns API-aligned fields only.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `calendarId` | string | No | Calendar ID \(defaults to primary\) |
| `eventId` | string | Yes | Event ID to delete |
| `sendUpdates` | string | No | How to send updates to attendees: all, externalOnly, or none |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `eventId` | string | Deleted event ID |
| `deleted` | boolean | Whether deletion was successful |
### `google_calendar_move`
Move an event to a different calendar. Returns API-aligned fields only.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `calendarId` | string | No | Source calendar ID \(defaults to primary\) |
| `eventId` | string | Yes | Event ID to move |
| `destinationCalendarId` | string | Yes | Destination calendar ID |
| `sendUpdates` | string | No | How to send updates to attendees: all, externalOnly, or none |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `id` | string | Event ID |
| `htmlLink` | string | Event link |
| `status` | string | Event status |
| `summary` | string | Event title |
| `description` | string | Event description |
| `location` | string | Event location |
| `start` | json | Event start |
| `end` | json | Event end |
| `attendees` | json | Event attendees |
| `creator` | json | Event creator |
| `organizer` | json | Event organizer |
### `google_calendar_instances`
Get instances of a recurring event from Google Calendar. Returns API-aligned fields only.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `calendarId` | string | No | Calendar ID \(defaults to primary\) |
| `eventId` | string | Yes | Recurring event ID to get instances of |
| `timeMin` | string | No | Lower bound for instances \(RFC3339 timestamp, e.g., 2025-06-03T00:00:00Z\) |
| `timeMax` | string | No | Upper bound for instances \(RFC3339 timestamp, e.g., 2025-06-04T00:00:00Z\) |
| `maxResults` | number | No | Maximum number of instances to return \(default 250, max 2500\) |
| `pageToken` | string | No | Token for retrieving subsequent pages of results |
| `showDeleted` | boolean | No | Include deleted instances |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `nextPageToken` | string | Next page token |
| `timeZone` | string | Calendar time zone |
| `instances` | json | List of recurring event instances |
### `google_calendar_list_calendars`
List all calendars in the user
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `minAccessRole` | string | No | Minimum access role for returned calendars: freeBusyReader, reader, writer, or owner |
| `maxResults` | number | No | Maximum number of calendars to return \(default 100, max 250\) |
| `pageToken` | string | No | Token for retrieving subsequent pages of results |
| `showDeleted` | boolean | No | Include deleted calendars |
| `showHidden` | boolean | No | Include hidden calendars |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `nextPageToken` | string | Next page token |
| `calendars` | array | List of calendars |
| ↳ `id` | string | Calendar ID |
| ↳ `summary` | string | Calendar title |
| ↳ `description` | string | Calendar description |
| ↳ `location` | string | Calendar location |
| ↳ `timeZone` | string | Calendar time zone |
| ↳ `accessRole` | string | Access role for the calendar |
| ↳ `backgroundColor` | string | Calendar background color |
| ↳ `foregroundColor` | string | Calendar foreground color |
| ↳ `primary` | boolean | Whether this is the primary calendar |
| ↳ `hidden` | boolean | Whether the calendar is hidden |
| ↳ `selected` | boolean | Whether the calendar is selected |
### `google_calendar_quick_add`
Create events from natural language text. Returns API-aligned fields only.

View File

@@ -1,6 +1,6 @@
---
title: Google Drive
description: Create, upload, and list files
description: Manage files, folders, and permissions
---
import { BlockInfoCard } from "@/components/ui/block-info-card"
@@ -40,217 +40,12 @@ In Sim, the Google Drive integration enables your agents to interact directly wi
## Usage Instructions
Integrate Google Drive into the workflow. Can create, upload, and list files.
Integrate Google Drive into the workflow. Can create, upload, download, copy, move, delete, share files and manage permissions.
## Tools
### `google_drive_upload`
Upload a file to Google Drive with complete metadata returned
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `fileName` | string | Yes | The name of the file to upload |
| `file` | file | No | Binary file to upload \(UserFile object\) |
| `content` | string | No | Text content to upload \(use this OR file, not both\) |
| `mimeType` | string | No | The MIME type of the file to upload \(auto-detected from file if not provided\) |
| `folderSelector` | string | No | Select the folder to upload the file to |
| `folderId` | string | No | The ID of the folder to upload the file to \(internal use\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `file` | object | Complete uploaded file metadata from Google Drive |
| ↳ `id` | string | Google Drive file ID |
| ↳ `name` | string | File name |
| ↳ `mimeType` | string | MIME type |
| ↳ `kind` | string | Resource type identifier |
| ↳ `description` | string | File description |
| ↳ `originalFilename` | string | Original uploaded filename |
| ↳ `fullFileExtension` | string | Full file extension |
| ↳ `fileExtension` | string | File extension |
| ↳ `owners` | json | List of file owners |
| ↳ `permissions` | json | File permissions |
| ↳ `permissionIds` | json | Permission IDs |
| ↳ `shared` | boolean | Whether file is shared |
| ↳ `ownedByMe` | boolean | Whether owned by current user |
| ↳ `writersCanShare` | boolean | Whether writers can share |
| ↳ `viewersCanCopyContent` | boolean | Whether viewers can copy |
| ↳ `copyRequiresWriterPermission` | boolean | Whether copy requires writer permission |
| ↳ `sharingUser` | json | User who shared the file |
| ↳ `starred` | boolean | Whether file is starred |
| ↳ `trashed` | boolean | Whether file is in trash |
| ↳ `explicitlyTrashed` | boolean | Whether explicitly trashed |
| ↳ `appProperties` | json | App-specific properties |
| ↳ `createdTime` | string | File creation time |
| ↳ `modifiedTime` | string | Last modification time |
| ↳ `modifiedByMeTime` | string | When modified by current user |
| ↳ `viewedByMeTime` | string | When last viewed by current user |
| ↳ `sharedWithMeTime` | string | When shared with current user |
| ↳ `lastModifyingUser` | json | User who last modified the file |
| ↳ `viewedByMe` | boolean | Whether viewed by current user |
| ↳ `modifiedByMe` | boolean | Whether modified by current user |
| ↳ `webViewLink` | string | URL to view in browser |
| ↳ `webContentLink` | string | Direct download URL |
| ↳ `iconLink` | string | URL to file icon |
| ↳ `thumbnailLink` | string | URL to thumbnail |
| ↳ `exportLinks` | json | Export format links |
| ↳ `size` | string | File size in bytes |
| ↳ `quotaBytesUsed` | string | Storage quota used |
| ↳ `md5Checksum` | string | MD5 hash |
| ↳ `sha1Checksum` | string | SHA-1 hash |
| ↳ `sha256Checksum` | string | SHA-256 hash |
| ↳ `parents` | json | Parent folder IDs |
| ↳ `spaces` | json | Spaces containing file |
| ↳ `driveId` | string | Shared drive ID |
| ↳ `capabilities` | json | User capabilities on file |
| ↳ `version` | string | Version number |
| ↳ `headRevisionId` | string | Head revision ID |
| ↳ `hasThumbnail` | boolean | Whether has thumbnail |
| ↳ `thumbnailVersion` | string | Thumbnail version |
| ↳ `imageMediaMetadata` | json | Image-specific metadata |
| ↳ `videoMediaMetadata` | json | Video-specific metadata |
| ↳ `isAppAuthorized` | boolean | Whether created by requesting app |
| ↳ `contentRestrictions` | json | Content restrictions |
| ↳ `linkShareMetadata` | json | Link share metadata |
### `google_drive_create_folder`
Create a new folder in Google Drive with complete metadata returned
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `fileName` | string | Yes | Name of the folder to create |
| `folderSelector` | string | No | Select the parent folder to create the folder in |
| `folderId` | string | No | ID of the parent folder \(internal use\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `file` | object | Complete created folder metadata from Google Drive |
| ↳ `id` | string | Google Drive folder ID |
| ↳ `name` | string | Folder name |
| ↳ `mimeType` | string | MIME type \(application/vnd.google-apps.folder\) |
| ↳ `kind` | string | Resource type identifier |
| ↳ `description` | string | Folder description |
| ↳ `owners` | json | List of folder owners |
| ↳ `permissions` | json | Folder permissions |
| ↳ `permissionIds` | json | Permission IDs |
| ↳ `shared` | boolean | Whether folder is shared |
| ↳ `ownedByMe` | boolean | Whether owned by current user |
| ↳ `writersCanShare` | boolean | Whether writers can share |
| ↳ `viewersCanCopyContent` | boolean | Whether viewers can copy |
| ↳ `copyRequiresWriterPermission` | boolean | Whether copy requires writer permission |
| ↳ `sharingUser` | json | User who shared the folder |
| ↳ `starred` | boolean | Whether folder is starred |
| ↳ `trashed` | boolean | Whether folder is in trash |
| ↳ `explicitlyTrashed` | boolean | Whether explicitly trashed |
| ↳ `appProperties` | json | App-specific properties |
| ↳ `folderColorRgb` | string | Folder color |
| ↳ `createdTime` | string | Folder creation time |
| ↳ `modifiedTime` | string | Last modification time |
| ↳ `modifiedByMeTime` | string | When modified by current user |
| ↳ `viewedByMeTime` | string | When last viewed by current user |
| ↳ `sharedWithMeTime` | string | When shared with current user |
| ↳ `lastModifyingUser` | json | User who last modified the folder |
| ↳ `viewedByMe` | boolean | Whether viewed by current user |
| ↳ `modifiedByMe` | boolean | Whether modified by current user |
| ↳ `webViewLink` | string | URL to view in browser |
| ↳ `iconLink` | string | URL to folder icon |
| ↳ `parents` | json | Parent folder IDs |
| ↳ `spaces` | json | Spaces containing folder |
| ↳ `driveId` | string | Shared drive ID |
| ↳ `capabilities` | json | User capabilities on folder |
| ↳ `version` | string | Version number |
| ↳ `isAppAuthorized` | boolean | Whether created by requesting app |
| ↳ `contentRestrictions` | json | Content restrictions |
| ↳ `linkShareMetadata` | json | Link share metadata |
### `google_drive_download`
Download a file from Google Drive with complete metadata (exports Google Workspace files automatically)
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `fileId` | string | Yes | The ID of the file to download |
| `mimeType` | string | No | The MIME type to export Google Workspace files to \(optional\) |
| `fileName` | string | No | Optional filename override |
| `includeRevisions` | boolean | No | Whether to include revision history in the metadata \(default: true, returns first 100 revisions\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `file` | object | Downloaded file data |
| ↳ `name` | string | File name |
| ↳ `mimeType` | string | MIME type of the file |
| ↳ `data` | string | File content as base64-encoded string |
| ↳ `size` | number | File size in bytes |
| `metadata` | object | Complete file metadata from Google Drive |
| ↳ `id` | string | Google Drive file ID |
| ↳ `name` | string | File name |
| ↳ `mimeType` | string | MIME type |
| ↳ `kind` | string | Resource type identifier |
| ↳ `description` | string | File description |
| ↳ `originalFilename` | string | Original uploaded filename |
| ↳ `fullFileExtension` | string | Full file extension |
| ↳ `fileExtension` | string | File extension |
| ↳ `owners` | json | List of file owners |
| ↳ `permissions` | json | File permissions |
| ↳ `permissionIds` | json | Permission IDs |
| ↳ `shared` | boolean | Whether file is shared |
| ↳ `ownedByMe` | boolean | Whether owned by current user |
| ↳ `writersCanShare` | boolean | Whether writers can share |
| ↳ `viewersCanCopyContent` | boolean | Whether viewers can copy |
| ↳ `copyRequiresWriterPermission` | boolean | Whether copy requires writer permission |
| ↳ `sharingUser` | json | User who shared the file |
| ↳ `starred` | boolean | Whether file is starred |
| ↳ `trashed` | boolean | Whether file is in trash |
| ↳ `explicitlyTrashed` | boolean | Whether explicitly trashed |
| ↳ `appProperties` | json | App-specific properties |
| ↳ `createdTime` | string | File creation time |
| ↳ `modifiedTime` | string | Last modification time |
| ↳ `modifiedByMeTime` | string | When modified by current user |
| ↳ `viewedByMeTime` | string | When last viewed by current user |
| ↳ `sharedWithMeTime` | string | When shared with current user |
| ↳ `lastModifyingUser` | json | User who last modified the file |
| ↳ `viewedByMe` | boolean | Whether viewed by current user |
| ↳ `modifiedByMe` | boolean | Whether modified by current user |
| ↳ `webViewLink` | string | URL to view in browser |
| ↳ `webContentLink` | string | Direct download URL |
| ↳ `iconLink` | string | URL to file icon |
| ↳ `thumbnailLink` | string | URL to thumbnail |
| ↳ `exportLinks` | json | Export format links |
| ↳ `size` | string | File size in bytes |
| ↳ `quotaBytesUsed` | string | Storage quota used |
| ↳ `md5Checksum` | string | MD5 hash |
| ↳ `sha1Checksum` | string | SHA-1 hash |
| ↳ `sha256Checksum` | string | SHA-256 hash |
| ↳ `parents` | json | Parent folder IDs |
| ↳ `spaces` | json | Spaces containing file |
| ↳ `driveId` | string | Shared drive ID |
| ↳ `capabilities` | json | User capabilities on file |
| ↳ `version` | string | Version number |
| ↳ `headRevisionId` | string | Head revision ID |
| ↳ `hasThumbnail` | boolean | Whether has thumbnail |
| ↳ `thumbnailVersion` | string | Thumbnail version |
| ↳ `imageMediaMetadata` | json | Image-specific metadata |
| ↳ `videoMediaMetadata` | json | Video-specific metadata |
| ↳ `isAppAuthorized` | boolean | Whether created by requesting app |
| ↳ `contentRestrictions` | json | Content restrictions |
| ↳ `linkShareMetadata` | json | Link share metadata |
| ↳ `revisions` | json | File revision history \(first 100 revisions only\) |
### `google_drive_list`
List files and folders in Google Drive with complete metadata
@@ -271,9 +66,9 @@ List files and folders in Google Drive with complete metadata
| --------- | ---- | ----------- |
| `files` | array | Array of file metadata objects from Google Drive |
| ↳ `id` | string | Google Drive file ID |
| ↳ `kind` | string | Resource type identifier |
| ↳ `name` | string | File name |
| ↳ `mimeType` | string | MIME type |
| ↳ `kind` | string | Resource type identifier |
| ↳ `description` | string | File description |
| ↳ `originalFilename` | string | Original uploaded filename |
| ↳ `fullFileExtension` | string | Full file extension |
@@ -324,4 +119,455 @@ List files and folders in Google Drive with complete metadata
| ↳ `linkShareMetadata` | json | Link share metadata |
| `nextPageToken` | string | Token for fetching the next page of results |
### `google_drive_get_file`
Get metadata for a specific file in Google Drive by its ID
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `fileId` | string | Yes | The ID of the file to retrieve |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `file` | json | The file metadata |
| ↳ `id` | string | Google Drive file ID |
| ↳ `kind` | string | Resource type identifier |
| ↳ `name` | string | File name |
| ↳ `mimeType` | string | MIME type |
| ↳ `description` | string | File description |
| ↳ `size` | string | File size in bytes |
| ↳ `starred` | boolean | Whether file is starred |
| ↳ `trashed` | boolean | Whether file is in trash |
| ↳ `webViewLink` | string | URL to view in browser |
| ↳ `webContentLink` | string | Direct download URL |
| ↳ `iconLink` | string | URL to file icon |
| ↳ `thumbnailLink` | string | URL to thumbnail |
| ↳ `parents` | json | Parent folder IDs |
| ↳ `owners` | json | List of file owners |
| ↳ `permissions` | json | File permissions |
| ↳ `createdTime` | string | File creation time |
| ↳ `modifiedTime` | string | Last modification time |
| ↳ `lastModifyingUser` | json | User who last modified the file |
| ↳ `shared` | boolean | Whether file is shared |
| ↳ `ownedByMe` | boolean | Whether owned by current user |
| ↳ `capabilities` | json | User capabilities on file |
| ↳ `md5Checksum` | string | MD5 hash |
| ↳ `version` | string | Version number |
### `google_drive_create_folder`
Create a new folder in Google Drive with complete metadata returned
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `fileName` | string | Yes | Name of the folder to create |
| `folderSelector` | string | No | Select the parent folder to create the folder in |
| `folderId` | string | No | ID of the parent folder \(internal use\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `file` | object | Complete created folder metadata from Google Drive |
| ↳ `id` | string | Google Drive folder ID |
| ↳ `kind` | string | Resource type identifier |
| ↳ `name` | string | Folder name |
| ↳ `mimeType` | string | MIME type \(application/vnd.google-apps.folder\) |
| ↳ `description` | string | Folder description |
| ↳ `owners` | json | List of folder owners |
| ↳ `permissions` | json | Folder permissions |
| ↳ `permissionIds` | json | Permission IDs |
| ↳ `shared` | boolean | Whether folder is shared |
| ↳ `ownedByMe` | boolean | Whether owned by current user |
| ↳ `writersCanShare` | boolean | Whether writers can share |
| ↳ `viewersCanCopyContent` | boolean | Whether viewers can copy |
| ↳ `copyRequiresWriterPermission` | boolean | Whether copy requires writer permission |
| ↳ `sharingUser` | json | User who shared the folder |
| ↳ `starred` | boolean | Whether folder is starred |
| ↳ `trashed` | boolean | Whether folder is in trash |
| ↳ `explicitlyTrashed` | boolean | Whether explicitly trashed |
| ↳ `appProperties` | json | App-specific properties |
| ↳ `folderColorRgb` | string | Folder color |
| ↳ `createdTime` | string | Folder creation time |
| ↳ `modifiedTime` | string | Last modification time |
| ↳ `modifiedByMeTime` | string | When modified by current user |
| ↳ `viewedByMeTime` | string | When last viewed by current user |
| ↳ `sharedWithMeTime` | string | When shared with current user |
| ↳ `lastModifyingUser` | json | User who last modified the folder |
| ↳ `viewedByMe` | boolean | Whether viewed by current user |
| ↳ `modifiedByMe` | boolean | Whether modified by current user |
| ↳ `webViewLink` | string | URL to view in browser |
| ↳ `iconLink` | string | URL to folder icon |
| ↳ `parents` | json | Parent folder IDs |
| ↳ `spaces` | json | Spaces containing folder |
| ↳ `driveId` | string | Shared drive ID |
| ↳ `capabilities` | json | User capabilities on folder |
| ↳ `version` | string | Version number |
| ↳ `isAppAuthorized` | boolean | Whether created by requesting app |
| ↳ `contentRestrictions` | json | Content restrictions |
| ↳ `linkShareMetadata` | json | Link share metadata |
### `google_drive_upload`
Upload a file to Google Drive with complete metadata returned
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `fileName` | string | Yes | The name of the file to upload |
| `file` | file | No | Binary file to upload \(UserFile object\) |
| `content` | string | No | Text content to upload \(use this OR file, not both\) |
| `mimeType` | string | No | The MIME type of the file to upload \(auto-detected from file if not provided\) |
| `folderSelector` | string | No | Select the folder to upload the file to |
| `folderId` | string | No | The ID of the folder to upload the file to \(internal use\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `file` | object | Complete uploaded file metadata from Google Drive |
| ↳ `id` | string | Google Drive file ID |
| ↳ `kind` | string | Resource type identifier |
| ↳ `name` | string | File name |
| ↳ `mimeType` | string | MIME type |
| ↳ `description` | string | File description |
| ↳ `originalFilename` | string | Original uploaded filename |
| ↳ `fullFileExtension` | string | Full file extension |
| ↳ `fileExtension` | string | File extension |
| ↳ `owners` | json | List of file owners |
| ↳ `permissions` | json | File permissions |
| ↳ `permissionIds` | json | Permission IDs |
| ↳ `shared` | boolean | Whether file is shared |
| ↳ `ownedByMe` | boolean | Whether owned by current user |
| ↳ `writersCanShare` | boolean | Whether writers can share |
| ↳ `viewersCanCopyContent` | boolean | Whether viewers can copy |
| ↳ `copyRequiresWriterPermission` | boolean | Whether copy requires writer permission |
| ↳ `sharingUser` | json | User who shared the file |
| ↳ `starred` | boolean | Whether file is starred |
| ↳ `trashed` | boolean | Whether file is in trash |
| ↳ `explicitlyTrashed` | boolean | Whether explicitly trashed |
| ↳ `appProperties` | json | App-specific properties |
| ↳ `createdTime` | string | File creation time |
| ↳ `modifiedTime` | string | Last modification time |
| ↳ `modifiedByMeTime` | string | When modified by current user |
| ↳ `viewedByMeTime` | string | When last viewed by current user |
| ↳ `sharedWithMeTime` | string | When shared with current user |
| ↳ `lastModifyingUser` | json | User who last modified the file |
| ↳ `viewedByMe` | boolean | Whether viewed by current user |
| ↳ `modifiedByMe` | boolean | Whether modified by current user |
| ↳ `webViewLink` | string | URL to view in browser |
| ↳ `webContentLink` | string | Direct download URL |
| ↳ `iconLink` | string | URL to file icon |
| ↳ `thumbnailLink` | string | URL to thumbnail |
| ↳ `exportLinks` | json | Export format links |
| ↳ `size` | string | File size in bytes |
| ↳ `quotaBytesUsed` | string | Storage quota used |
| ↳ `md5Checksum` | string | MD5 hash |
| ↳ `sha1Checksum` | string | SHA-1 hash |
| ↳ `sha256Checksum` | string | SHA-256 hash |
| ↳ `parents` | json | Parent folder IDs |
| ↳ `spaces` | json | Spaces containing file |
| ↳ `driveId` | string | Shared drive ID |
| ↳ `capabilities` | json | User capabilities on file |
| ↳ `version` | string | Version number |
| ↳ `headRevisionId` | string | Head revision ID |
| ↳ `hasThumbnail` | boolean | Whether has thumbnail |
| ↳ `thumbnailVersion` | string | Thumbnail version |
| ↳ `imageMediaMetadata` | json | Image-specific metadata |
| ↳ `videoMediaMetadata` | json | Video-specific metadata |
| ↳ `isAppAuthorized` | boolean | Whether created by requesting app |
| ↳ `contentRestrictions` | json | Content restrictions |
| ↳ `linkShareMetadata` | json | Link share metadata |
### `google_drive_download`
Download a file from Google Drive with complete metadata (exports Google Workspace files automatically)
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `fileId` | string | Yes | The ID of the file to download |
| `mimeType` | string | No | The MIME type to export Google Workspace files to \(optional\) |
| `fileName` | string | No | Optional filename override |
| `includeRevisions` | boolean | No | Whether to include revision history in the metadata \(default: true, returns first 100 revisions\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `file` | object | Downloaded file data |
| ↳ `name` | string | File name |
| ↳ `mimeType` | string | MIME type of the file |
| ↳ `data` | string | File content as base64-encoded string |
| ↳ `size` | number | File size in bytes |
| `metadata` | object | Complete file metadata from Google Drive |
| ↳ `id` | string | Google Drive file ID |
| ↳ `kind` | string | Resource type identifier |
| ↳ `name` | string | File name |
| ↳ `mimeType` | string | MIME type |
| ↳ `description` | string | File description |
| ↳ `originalFilename` | string | Original uploaded filename |
| ↳ `fullFileExtension` | string | Full file extension |
| ↳ `fileExtension` | string | File extension |
| ↳ `owners` | json | List of file owners |
| ↳ `permissions` | json | File permissions |
| ↳ `permissionIds` | json | Permission IDs |
| ↳ `shared` | boolean | Whether file is shared |
| ↳ `ownedByMe` | boolean | Whether owned by current user |
| ↳ `writersCanShare` | boolean | Whether writers can share |
| ↳ `viewersCanCopyContent` | boolean | Whether viewers can copy |
| ↳ `copyRequiresWriterPermission` | boolean | Whether copy requires writer permission |
| ↳ `sharingUser` | json | User who shared the file |
| ↳ `starred` | boolean | Whether file is starred |
| ↳ `trashed` | boolean | Whether file is in trash |
| ↳ `explicitlyTrashed` | boolean | Whether explicitly trashed |
| ↳ `appProperties` | json | App-specific properties |
| ↳ `createdTime` | string | File creation time |
| ↳ `modifiedTime` | string | Last modification time |
| ↳ `modifiedByMeTime` | string | When modified by current user |
| ↳ `viewedByMeTime` | string | When last viewed by current user |
| ↳ `sharedWithMeTime` | string | When shared with current user |
| ↳ `lastModifyingUser` | json | User who last modified the file |
| ↳ `viewedByMe` | boolean | Whether viewed by current user |
| ↳ `modifiedByMe` | boolean | Whether modified by current user |
| ↳ `webViewLink` | string | URL to view in browser |
| ↳ `webContentLink` | string | Direct download URL |
| ↳ `iconLink` | string | URL to file icon |
| ↳ `thumbnailLink` | string | URL to thumbnail |
| ↳ `exportLinks` | json | Export format links |
| ↳ `size` | string | File size in bytes |
| ↳ `quotaBytesUsed` | string | Storage quota used |
| ↳ `md5Checksum` | string | MD5 hash |
| ↳ `sha1Checksum` | string | SHA-1 hash |
| ↳ `sha256Checksum` | string | SHA-256 hash |
| ↳ `parents` | json | Parent folder IDs |
| ↳ `spaces` | json | Spaces containing file |
| ↳ `driveId` | string | Shared drive ID |
| ↳ `capabilities` | json | User capabilities on file |
| ↳ `version` | string | Version number |
| ↳ `headRevisionId` | string | Head revision ID |
| ↳ `hasThumbnail` | boolean | Whether has thumbnail |
| ↳ `thumbnailVersion` | string | Thumbnail version |
| ↳ `imageMediaMetadata` | json | Image-specific metadata |
| ↳ `videoMediaMetadata` | json | Video-specific metadata |
| ↳ `isAppAuthorized` | boolean | Whether created by requesting app |
| ↳ `contentRestrictions` | json | Content restrictions |
| ↳ `linkShareMetadata` | json | Link share metadata |
| ↳ `revisions` | json | File revision history \(first 100 revisions only\) |
### `google_drive_copy`
Create a copy of a file in Google Drive
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `fileId` | string | Yes | The ID of the file to copy |
| `newName` | string | No | Name for the copied file \(defaults to "Copy of \[original name\]"\) |
| `destinationFolderId` | string | No | ID of the folder to place the copy in \(defaults to same location as original\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `file` | json | The copied file metadata |
| ↳ `id` | string | Google Drive file ID of the copy |
| ↳ `kind` | string | Resource type identifier |
| ↳ `name` | string | File name |
| ↳ `mimeType` | string | MIME type |
| ↳ `webViewLink` | string | URL to view in browser |
| ↳ `parents` | json | Parent folder IDs |
| ↳ `createdTime` | string | File creation time |
| ↳ `modifiedTime` | string | Last modification time |
| ↳ `owners` | json | List of file owners |
| ↳ `size` | string | File size in bytes |
### `google_drive_update`
Update file metadata in Google Drive (rename, move, star, add description)
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `fileId` | string | Yes | The ID of the file to update |
| `name` | string | No | New name for the file |
| `description` | string | No | New description for the file |
| `addParents` | string | No | Comma-separated list of parent folder IDs to add \(moves file to these folders\) |
| `removeParents` | string | No | Comma-separated list of parent folder IDs to remove |
| `starred` | boolean | No | Whether to star or unstar the file |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `file` | json | The updated file metadata |
| ↳ `id` | string | Google Drive file ID |
| ↳ `kind` | string | Resource type identifier |
| ↳ `name` | string | File name |
| ↳ `mimeType` | string | MIME type |
| ↳ `description` | string | File description |
| ↳ `starred` | boolean | Whether file is starred |
| ↳ `webViewLink` | string | URL to view in browser |
| ↳ `parents` | json | Parent folder IDs |
| ↳ `modifiedTime` | string | Last modification time |
### `google_drive_trash`
Move a file to the trash in Google Drive (can be restored later)
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `fileId` | string | Yes | The ID of the file to move to trash |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `file` | json | The trashed file metadata |
| ↳ `id` | string | Google Drive file ID |
| ↳ `kind` | string | Resource type identifier |
| ↳ `name` | string | File name |
| ↳ `mimeType` | string | MIME type |
| ↳ `trashed` | boolean | Whether file is in trash \(should be true\) |
| ↳ `trashedTime` | string | When file was trashed |
| ↳ `webViewLink` | string | URL to view in browser |
### `google_drive_delete`
Permanently delete a file from Google Drive (bypasses trash)
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `fileId` | string | Yes | The ID of the file to permanently delete |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `deleted` | boolean | Whether the file was successfully deleted |
| `fileId` | string | The ID of the deleted file |
### `google_drive_share`
Share a file with a user, group, domain, or make it public
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `fileId` | string | Yes | The ID of the file to share |
| `type` | string | Yes | Type of grantee: user, group, domain, or anyone |
| `role` | string | Yes | Permission role: owner \(transfer ownership\), organizer \(shared drive only\), fileOrganizer \(shared drive only\), writer \(edit\), commenter \(view and comment\), reader \(view only\) |
| `email` | string | No | Email address of the user or group \(required for type=user or type=group\) |
| `domain` | string | No | Domain to share with \(required for type=domain\) |
| `transferOwnership` | boolean | No | Required when role is owner. Transfers ownership to the specified user. |
| `moveToNewOwnersRoot` | boolean | No | When transferring ownership, move the file to the new owner's My Drive root folder. |
| `sendNotification` | boolean | No | Whether to send an email notification \(default: true\) |
| `emailMessage` | string | No | Custom message to include in the notification email |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `permission` | json | The created permission details |
| ↳ `id` | string | Permission ID |
| ↳ `type` | string | Grantee type \(user, group, domain, anyone\) |
| ↳ `role` | string | Permission role |
| ↳ `emailAddress` | string | Email of the grantee |
| ↳ `displayName` | string | Display name of the grantee |
| ↳ `domain` | string | Domain of the grantee |
| ↳ `expirationTime` | string | Expiration time |
| ↳ `deleted` | boolean | Whether grantee is deleted |
### `google_drive_unshare`
Remove a permission from a file (revoke access)
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `fileId` | string | Yes | The ID of the file to modify permissions on |
| `permissionId` | string | Yes | The ID of the permission to remove \(use list_permissions to find this\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `removed` | boolean | Whether the permission was successfully removed |
| `fileId` | string | The ID of the file |
| `permissionId` | string | The ID of the removed permission |
### `google_drive_list_permissions`
List all permissions (who has access) for a file in Google Drive
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `fileId` | string | Yes | The ID of the file to list permissions for |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `permissions` | array | List of permissions on the file |
| ↳ `id` | string | Permission ID \(use to remove permission\) |
| ↳ `type` | string | Grantee type \(user, group, domain, anyone\) |
| ↳ `role` | string | Permission role \(owner, organizer, fileOrganizer, writer, commenter, reader\) |
| ↳ `emailAddress` | string | Email of the grantee |
| ↳ `displayName` | string | Display name of the grantee |
| ↳ `photoLink` | string | Photo URL of the grantee |
| ↳ `domain` | string | Domain of the grantee |
| ↳ `expirationTime` | string | When permission expires |
| ↳ `deleted` | boolean | Whether grantee account is deleted |
| ↳ `allowFileDiscovery` | boolean | Whether file is discoverable by grantee |
| ↳ `pendingOwner` | boolean | Whether ownership transfer is pending |
| ↳ `permissionDetails` | json | Details about inherited permissions |
| `nextPageToken` | string | Token for fetching the next page of permissions |
### `google_drive_get_about`
Get information about the user and their Google Drive (storage quota, capabilities)
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `user` | json | Information about the authenticated user |
| ↳ `displayName` | string | User display name |
| ↳ `emailAddress` | string | User email address |
| ↳ `photoLink` | string | URL to user profile photo |
| ↳ `permissionId` | string | User permission ID |
| ↳ `me` | boolean | Whether this is the authenticated user |
| `storageQuota` | json | Storage quota information in bytes |
| ↳ `limit` | string | Total storage limit in bytes \(null for unlimited\) |
| ↳ `usage` | string | Total storage used in bytes |
| ↳ `usageInDrive` | string | Storage used by Drive files in bytes |
| ↳ `usageInDriveTrash` | string | Storage used by trashed files in bytes |
| `canCreateDrives` | boolean | Whether user can create shared drives |
| `importFormats` | json | Map of MIME types that can be imported and their target formats |
| `exportFormats` | json | Map of Google Workspace MIME types and their exportable formats |
| `maxUploadSize` | string | Maximum upload size in bytes |

View File

@@ -1,6 +1,6 @@
---
title: Google Forms
description: Read responses from a Google Form
description: Manage Google Forms and responses
---
import { BlockInfoCard } from "@/components/ui/block-info-card"
@@ -29,7 +29,7 @@ In Sim, the Google Forms integration enables your agents to programmatically acc
## Usage Instructions
Integrate Google Forms into your workflow. Provide a Form ID to list responses, or specify a Response ID to fetch a single response. Requires OAuth.
Integrate Google Forms into your workflow. Read form structure, get responses, create forms, update content, and manage notification watches.
@@ -37,15 +37,246 @@ Integrate Google Forms into your workflow. Provide a Form ID to list responses,
### `google_forms_get_responses`
Retrieve a single response or list responses from a Google Form
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `formId` | string | Yes | The ID of the Google Form |
| `responseId` | string | No | If provided, returns this specific response |
| `pageSize` | number | No | Maximum number of responses to return \(service may return fewer\). Defaults to 5000. |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `data` | json | Response or list of responses |
| `responses` | array | Array of form responses \(when no responseId provided\) |
| ↳ `responseId` | string | Unique response ID |
| ↳ `createTime` | string | When the response was created |
| ↳ `lastSubmittedTime` | string | When the response was last submitted |
| ↳ `answers` | json | Map of question IDs to answer values |
| `response` | object | Single form response \(when responseId is provided\) |
| ↳ `responseId` | string | Unique response ID |
| ↳ `createTime` | string | When the response was created |
| ↳ `lastSubmittedTime` | string | When the response was last submitted |
| ↳ `answers` | json | Map of question IDs to answer values |
| `raw` | json | Raw API response data |
### `google_forms_get_form`
Retrieve a form structure including its items, settings, and metadata
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `formId` | string | Yes | The ID of the Google Form to retrieve |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `formId` | string | The form ID |
| `title` | string | The form title visible to responders |
| `description` | string | The form description |
| `documentTitle` | string | The document title visible in Drive |
| `responderUri` | string | The URI to share with responders |
| `linkedSheetId` | string | The ID of the linked Google Sheet |
| `revisionId` | string | The revision ID of the form |
| `items` | array | The form items \(questions, sections, etc.\) |
| ↳ `itemId` | string | Item ID |
| ↳ `title` | string | Item title |
| ↳ `description` | string | Item description |
| `settings` | json | Form settings |
| `publishSettings` | json | Form publish settings |
### `google_forms_create_form`
Create a new Google Form with a title
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `title` | string | Yes | The title of the form visible to responders |
| `documentTitle` | string | No | The document title visible in Drive \(defaults to form title\) |
| `unpublished` | boolean | No | If true, create an unpublished form that does not accept responses |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `formId` | string | The ID of the created form |
| `title` | string | The form title |
| `documentTitle` | string | The document title in Drive |
| `responderUri` | string | The URI to share with responders |
| `revisionId` | string | The revision ID of the form |
### `google_forms_batch_update`
Apply multiple updates to a form (add items, update info, change settings, etc.)
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `formId` | string | Yes | The ID of the Google Form to update |
| `requests` | json | Yes | Array of update requests \(updateFormInfo, updateSettings, createItem, updateItem, moveItem, deleteItem\) |
| `includeFormInResponse` | boolean | No | Whether to return the updated form in the response |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `replies` | array | The replies from each update request |
| `writeControl` | object | Write control information with revision IDs |
| ↳ `requiredRevisionId` | string | Required revision ID for conflict detection |
| ↳ `targetRevisionId` | string | Target revision ID |
| `form` | object | The updated form \(if includeFormInResponse was true\) |
| ↳ `formId` | string | The form ID |
| ↳ `info` | object | Form info containing title and description |
| ↳ `title` | string | The form title visible to responders |
| ↳ `description` | string | The form description |
| ↳ `documentTitle` | string | The document title visible in Drive |
| ↳ `title` | string | Item title |
| ↳ `description` | string | Item description |
| ↳ `documentTitle` | string | The document title visible in Drive |
| ↳ `settings` | object | Form settings |
| ↳ `quizSettings` | object | Quiz settings |
| ↳ `isQuiz` | boolean | Whether the form is a quiz |
| ↳ `isQuiz` | boolean | Whether the form is a quiz |
| ↳ `emailCollectionType` | string | Email collection type |
| ↳ `quizSettings` | object | Quiz settings |
| ↳ `isQuiz` | boolean | Whether the form is a quiz |
| ↳ `isQuiz` | boolean | Whether the form is a quiz |
| ↳ `emailCollectionType` | string | Email collection type |
| ↳ `itemId` | string | Item ID |
| ↳ `questionItem` | json | Question item configuration |
| ↳ `questionGroupItem` | json | Question group configuration |
| ↳ `pageBreakItem` | json | Page break configuration |
| ↳ `textItem` | json | Text item configuration |
| ↳ `imageItem` | json | Image item configuration |
| ↳ `videoItem` | json | Video item configuration |
| ↳ `revisionId` | string | The revision ID of the form |
| ↳ `responderUri` | string | The URI to share with responders |
| ↳ `linkedSheetId` | string | The ID of the linked Google Sheet |
| ↳ `publishSettings` | object | Form publish settings |
| ↳ `publishState` | object | Current publish state |
| ↳ `isPublished` | boolean | Whether the form is published |
| ↳ `isAcceptingResponses` | boolean | Whether the form is accepting responses |
| ↳ `isPublished` | boolean | Whether the form is published |
| ↳ `isAcceptingResponses` | boolean | Whether the form is accepting responses |
| ↳ `publishState` | object | Current publish state |
| ↳ `isPublished` | boolean | Whether the form is published |
| ↳ `isAcceptingResponses` | boolean | Whether the form is accepting responses |
| ↳ `isPublished` | boolean | Whether the form is published |
| ↳ `isAcceptingResponses` | boolean | Whether the form is accepting responses |
### `google_forms_set_publish_settings`
Update the publish settings of a form (publish/unpublish, accept responses)
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `formId` | string | Yes | The ID of the Google Form |
| `isPublished` | boolean | Yes | Whether the form is published and visible to others |
| `isAcceptingResponses` | boolean | No | Whether the form accepts responses \(forced to false if isPublished is false\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `formId` | string | The form ID |
| `publishSettings` | json | The updated publish settings |
| ↳ `publishState` | object | The publish state |
| ↳ `isPublished` | boolean | Whether the form is published |
| ↳ `isAcceptingResponses` | boolean | Whether the form accepts responses |
| ↳ `isPublished` | boolean | Whether the form is published |
| ↳ `isAcceptingResponses` | boolean | Whether the form accepts responses |
### `google_forms_create_watch`
Create a notification watch for form changes (schema changes or new responses)
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `formId` | string | Yes | The ID of the Google Form to watch |
| `eventType` | string | Yes | Event type to watch: SCHEMA \(form changes\) or RESPONSES \(new submissions\) |
| `topicName` | string | Yes | The Cloud Pub/Sub topic name \(format: projects/\{project\}/topics/\{topic\}\) |
| `watchId` | string | No | Custom watch ID \(4-63 chars, lowercase letters, numbers, hyphens\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `id` | string | The watch ID |
| `eventType` | string | The event type being watched |
| `topicName` | string | The Cloud Pub/Sub topic |
| `createTime` | string | When the watch was created |
| `expireTime` | string | When the watch expires \(7 days after creation\) |
| `state` | string | The watch state \(ACTIVE, SUSPENDED\) |
### `google_forms_list_watches`
List all notification watches for a form
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `formId` | string | Yes | The ID of the Google Form |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `watches` | array | List of watches for the form |
| ↳ `id` | string | Watch ID |
| ↳ `eventType` | string | Event type \(SCHEMA or RESPONSES\) |
| ↳ `createTime` | string | When the watch was created |
| ↳ `expireTime` | string | When the watch expires |
| ↳ `state` | string | Watch state |
### `google_forms_delete_watch`
Delete a notification watch from a form
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `formId` | string | Yes | The ID of the Google Form |
| `watchId` | string | Yes | The ID of the watch to delete |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `deleted` | boolean | Whether the watch was successfully deleted |
### `google_forms_renew_watch`
Renew a notification watch for another 7 days
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `formId` | string | Yes | The ID of the Google Form |
| `watchId` | string | Yes | The ID of the watch to renew |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `id` | string | The watch ID |
| `eventType` | string | The event type being watched |
| `expireTime` | string | The new expiration time |
| `state` | string | The watch state |

View File

@@ -215,4 +215,191 @@ Check if a user is a member of a Google Group
| --------- | ---- | ----------- |
| `isMember` | boolean | Whether the user is a member of the group |
### `google_groups_list_aliases`
List all email aliases for a Google Group
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `groupKey` | string | Yes | Group email address or unique group ID |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `aliases` | array | List of email aliases for the group |
| ↳ `id` | string | Unique group identifier |
| ↳ `primaryEmail` | string | Group |
| ↳ `alias` | string | Alias email address |
| ↳ `kind` | string | API resource type |
| ↳ `etag` | string | Resource version identifier |
### `google_groups_add_alias`
Add an email alias to a Google Group
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `groupKey` | string | Yes | Group email address or unique group ID |
| `alias` | string | Yes | The email alias to add to the group |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `id` | string | Unique group identifier |
| `primaryEmail` | string | Group |
| `alias` | string | The alias that was added |
| `kind` | string | API resource type |
| `etag` | string | Resource version identifier |
### `google_groups_remove_alias`
Remove an email alias from a Google Group
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `groupKey` | string | Yes | Group email address or unique group ID |
| `alias` | string | Yes | The email alias to remove from the group |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `deleted` | boolean | Whether the alias was successfully deleted |
### `google_groups_get_settings`
Get the settings for a Google Group including access permissions, moderation, and posting options
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `groupEmail` | string | Yes | The email address of the group |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `email` | string | The group |
| `name` | string | The group name \(max 75 characters\) |
| `description` | string | The group description \(max 4096 characters\) |
| `whoCanJoin` | string | Who can join the group \(ANYONE_CAN_JOIN, ALL_IN_DOMAIN_CAN_JOIN, INVITED_CAN_JOIN, CAN_REQUEST_TO_JOIN\) |
| `whoCanViewMembership` | string | Who can view group membership |
| `whoCanViewGroup` | string | Who can view group messages |
| `whoCanPostMessage` | string | Who can post messages to the group |
| `allowExternalMembers` | string | Whether external users can be members |
| `allowWebPosting` | string | Whether web posting is allowed |
| `primaryLanguage` | string | The group |
| `isArchived` | string | Whether messages are archived |
| `archiveOnly` | string | Whether the group is archive-only \(inactive\) |
| `messageModerationLevel` | string | Message moderation level |
| `spamModerationLevel` | string | Spam handling level \(ALLOW, MODERATE, SILENTLY_MODERATE, REJECT\) |
| `replyTo` | string | Default reply destination |
| `customReplyTo` | string | Custom email for replies |
| `includeCustomFooter` | string | Whether to include custom footer |
| `customFooterText` | string | Custom footer text \(max 1000 characters\) |
| `sendMessageDenyNotification` | string | Whether to send rejection notifications |
| `defaultMessageDenyNotificationText` | string | Default rejection message text |
| `membersCanPostAsTheGroup` | string | Whether members can post as the group |
| `includeInGlobalAddressList` | string | Whether included in Global Address List |
| `whoCanLeaveGroup` | string | Who can leave the group |
| `whoCanContactOwner` | string | Who can contact the group owner |
| `favoriteRepliesOnTop` | string | Whether favorite replies appear at top |
| `whoCanApproveMembers` | string | Who can approve new members |
| `whoCanBanUsers` | string | Who can ban users |
| `whoCanModerateMembers` | string | Who can manage members |
| `whoCanModerateContent` | string | Who can moderate content |
| `whoCanAssistContent` | string | Who can assist with content metadata |
| `enableCollaborativeInbox` | string | Whether collaborative inbox is enabled |
| `whoCanDiscoverGroup` | string | Who can discover the group |
| `defaultSender` | string | Default sender identity \(DEFAULT_SELF or GROUP\) |
### `google_groups_update_settings`
Update the settings for a Google Group including access permissions, moderation, and posting options
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `groupEmail` | string | Yes | The email address of the group |
| `name` | string | No | The group name \(max 75 characters\) |
| `description` | string | No | The group description \(max 4096 characters\) |
| `whoCanJoin` | string | No | Who can join: ANYONE_CAN_JOIN, ALL_IN_DOMAIN_CAN_JOIN, INVITED_CAN_JOIN, CAN_REQUEST_TO_JOIN |
| `whoCanViewMembership` | string | No | Who can view membership: ALL_IN_DOMAIN_CAN_VIEW, ALL_MEMBERS_CAN_VIEW, ALL_MANAGERS_CAN_VIEW |
| `whoCanViewGroup` | string | No | Who can view group messages: ANYONE_CAN_VIEW, ALL_IN_DOMAIN_CAN_VIEW, ALL_MEMBERS_CAN_VIEW, ALL_MANAGERS_CAN_VIEW |
| `whoCanPostMessage` | string | No | Who can post: NONE_CAN_POST, ALL_MANAGERS_CAN_POST, ALL_MEMBERS_CAN_POST, ALL_OWNERS_CAN_POST, ALL_IN_DOMAIN_CAN_POST, ANYONE_CAN_POST |
| `allowExternalMembers` | string | No | Whether external users can be members: true or false |
| `allowWebPosting` | string | No | Whether web posting is allowed: true or false |
| `primaryLanguage` | string | No | The group's primary language \(e.g., en\) |
| `isArchived` | string | No | Whether messages are archived: true or false |
| `archiveOnly` | string | No | Whether the group is archive-only \(inactive\): true or false |
| `messageModerationLevel` | string | No | Message moderation: MODERATE_ALL_MESSAGES, MODERATE_NON_MEMBERS, MODERATE_NEW_MEMBERS, MODERATE_NONE |
| `spamModerationLevel` | string | No | Spam handling: ALLOW, MODERATE, SILENTLY_MODERATE, REJECT |
| `replyTo` | string | No | Default reply: REPLY_TO_CUSTOM, REPLY_TO_SENDER, REPLY_TO_LIST, REPLY_TO_OWNER, REPLY_TO_IGNORE, REPLY_TO_MANAGERS |
| `customReplyTo` | string | No | Custom email for replies \(when replyTo is REPLY_TO_CUSTOM\) |
| `includeCustomFooter` | string | No | Whether to include custom footer: true or false |
| `customFooterText` | string | No | Custom footer text \(max 1000 characters\) |
| `sendMessageDenyNotification` | string | No | Whether to send rejection notifications: true or false |
| `defaultMessageDenyNotificationText` | string | No | Default rejection message text |
| `membersCanPostAsTheGroup` | string | No | Whether members can post as the group: true or false |
| `includeInGlobalAddressList` | string | No | Whether included in Global Address List: true or false |
| `whoCanLeaveGroup` | string | No | Who can leave: ALL_MANAGERS_CAN_LEAVE, ALL_MEMBERS_CAN_LEAVE, NONE_CAN_LEAVE |
| `whoCanContactOwner` | string | No | Who can contact owner: ALL_IN_DOMAIN_CAN_CONTACT, ALL_MANAGERS_CAN_CONTACT, ALL_MEMBERS_CAN_CONTACT, ANYONE_CAN_CONTACT |
| `favoriteRepliesOnTop` | string | No | Whether favorite replies appear at top: true or false |
| `whoCanApproveMembers` | string | No | Who can approve members: ALL_OWNERS_CAN_APPROVE, ALL_MANAGERS_CAN_APPROVE, ALL_MEMBERS_CAN_APPROVE, NONE_CAN_APPROVE |
| `whoCanBanUsers` | string | No | Who can ban users: OWNERS_ONLY, OWNERS_AND_MANAGERS, NONE |
| `whoCanModerateMembers` | string | No | Who can manage members: OWNERS_ONLY, OWNERS_AND_MANAGERS, ALL_MEMBERS, NONE |
| `whoCanModerateContent` | string | No | Who can moderate content: OWNERS_ONLY, OWNERS_AND_MANAGERS, ALL_MEMBERS, NONE |
| `whoCanAssistContent` | string | No | Who can assist with content metadata: OWNERS_ONLY, OWNERS_AND_MANAGERS, ALL_MEMBERS, NONE |
| `enableCollaborativeInbox` | string | No | Whether collaborative inbox is enabled: true or false |
| `whoCanDiscoverGroup` | string | No | Who can discover: ANYONE_CAN_DISCOVER, ALL_IN_DOMAIN_CAN_DISCOVER, ALL_MEMBERS_CAN_DISCOVER |
| `defaultSender` | string | No | Default sender: DEFAULT_SELF or GROUP |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `email` | string | The group |
| `name` | string | The group name |
| `description` | string | The group description |
| `whoCanJoin` | string | Who can join the group |
| `whoCanViewMembership` | string | Who can view group membership |
| `whoCanViewGroup` | string | Who can view group messages |
| `whoCanPostMessage` | string | Who can post messages to the group |
| `allowExternalMembers` | string | Whether external users can be members |
| `allowWebPosting` | string | Whether web posting is allowed |
| `primaryLanguage` | string | The group |
| `isArchived` | string | Whether messages are archived |
| `archiveOnly` | string | Whether the group is archive-only |
| `messageModerationLevel` | string | Message moderation level |
| `spamModerationLevel` | string | Spam handling level |
| `replyTo` | string | Default reply destination |
| `customReplyTo` | string | Custom email for replies |
| `includeCustomFooter` | string | Whether to include custom footer |
| `customFooterText` | string | Custom footer text |
| `sendMessageDenyNotification` | string | Whether to send rejection notifications |
| `defaultMessageDenyNotificationText` | string | Default rejection message text |
| `membersCanPostAsTheGroup` | string | Whether members can post as the group |
| `includeInGlobalAddressList` | string | Whether included in Global Address List |
| `whoCanLeaveGroup` | string | Who can leave the group |
| `whoCanContactOwner` | string | Who can contact the group owner |
| `favoriteRepliesOnTop` | string | Whether favorite replies appear at top |
| `whoCanApproveMembers` | string | Who can approve new members |
| `whoCanBanUsers` | string | Who can ban users |
| `whoCanModerateMembers` | string | Who can manage members |
| `whoCanModerateContent` | string | Who can moderate content |
| `whoCanAssistContent` | string | Who can assist with content metadata |
| `enableCollaborativeInbox` | string | Whether collaborative inbox is enabled |
| `whoCanDiscoverGroup` | string | Who can discover the group |
| `defaultSender` | string | Default sender identity |

View File

@@ -28,7 +28,7 @@ In Sim, the Google Sheets integration empowers your agents to automate reading f
## Usage Instructions
Integrate Google Sheets into the workflow with explicit sheet selection. Can read, write, append, and update data in specific sheets.
Integrate Google Sheets into the workflow with explicit sheet selection. Can read, write, append, update, clear data, create spreadsheets, get spreadsheet info, and copy sheets.
@@ -42,9 +42,8 @@ Read data from a specific sheet in a Google Sheets spreadsheet
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `spreadsheetId` | string | Yes | The ID of the spreadsheet |
| `sheetName` | string | Yes | The name of the sheet/tab to read from |
| `cellRange` | string | No | The cell range to read \(e.g. "A1:D10"\). Defaults to "A1:Z1000" if not specified. |
| `spreadsheetId` | string | Yes | The ID of the spreadsheet \(found in the URL: docs.google.com/spreadsheets/d/\{SPREADSHEET_ID\}/edit\). |
| `range` | string | No | The A1 notation range to read \(e.g. "Sheet1!A1:D10", "A1:B5"\). Defaults to first sheet A1:Z1000 if not specified. |
#### Output
@@ -66,8 +65,7 @@ Write data to a specific sheet in a Google Sheets spreadsheet
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `spreadsheetId` | string | Yes | The ID of the spreadsheet |
| `sheetName` | string | Yes | The name of the sheet/tab to write to |
| `cellRange` | string | No | The cell range to write to \(e.g. "A1:D10", "A1"\). Defaults to "A1" if not specified. |
| `range` | string | No | The A1 notation range to write to \(e.g. "Sheet1!A1:D10", "A1:B5"\) |
| `values` | array | Yes | The data to write as a 2D array \(e.g. \[\["Name", "Age"\], \["Alice", 30\], \["Bob", 25\]\]\) or array of objects. |
| `valueInputOption` | string | No | The format of the data to write |
| `includeValuesInResponse` | boolean | No | Whether to include the written values in the response |
@@ -93,8 +91,7 @@ Update data in a specific sheet in a Google Sheets spreadsheet
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `spreadsheetId` | string | Yes | The ID of the spreadsheet to update |
| `sheetName` | string | Yes | The name of the sheet/tab to update |
| `cellRange` | string | No | The cell range to update \(e.g. "A1:D10", "A1"\). Defaults to "A1" if not specified. |
| `range` | string | No | The A1 notation range to update \(e.g. "Sheet1!A1:D10", "A1:B5"\) |
| `values` | array | Yes | The data to update as a 2D array \(e.g. \[\["Name", "Age"\], \["Alice", 30\]\]\) or array of objects. |
| `valueInputOption` | string | No | The format of the data to update |
| `includeValuesInResponse` | boolean | No | Whether to include the updated values in the response |
@@ -120,7 +117,7 @@ Append data to the end of a specific sheet in a Google Sheets spreadsheet
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `spreadsheetId` | string | Yes | The ID of the spreadsheet to append to |
| `sheetName` | string | Yes | The name of the sheet/tab to append to |
| `range` | string | No | The A1 notation range to append after \(e.g. "Sheet1", "Sheet1!A:D"\) |
| `values` | array | Yes | The data to append as a 2D array \(e.g. \[\["Alice", 30\], \["Bob", 25\]\]\) or array of objects. |
| `valueInputOption` | string | No | The format of the data to append |
| `insertDataOption` | string | No | How to insert the data \(OVERWRITE or INSERT_ROWS\) |
@@ -139,4 +136,180 @@ Append data to the end of a specific sheet in a Google Sheets spreadsheet
| ↳ `spreadsheetId` | string | Google Sheets spreadsheet ID |
| ↳ `spreadsheetUrl` | string | Spreadsheet URL |
### `google_sheets_clear`
Clear values from a specific range in a Google Sheets spreadsheet
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `spreadsheetId` | string | Yes | The ID of the spreadsheet |
| `sheetName` | string | Yes | The name of the sheet/tab to clear |
| `cellRange` | string | No | The cell range to clear \(e.g. "A1:D10"\). Clears entire sheet if not specified. |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `clearedRange` | string | The range that was cleared |
| `sheetName` | string | Name of the sheet that was cleared |
| `metadata` | json | Spreadsheet metadata including ID and URL |
| ↳ `spreadsheetId` | string | Google Sheets spreadsheet ID |
| ↳ `spreadsheetUrl` | string | Spreadsheet URL |
### `google_sheets_get_spreadsheet`
Get metadata about a Google Sheets spreadsheet including title and sheet list
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `spreadsheetId` | string | Yes | The ID of the spreadsheet |
| `includeGridData` | boolean | No | Whether to include grid data \(cell values\). Defaults to false. |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `spreadsheetId` | string | The spreadsheet ID |
| `title` | string | The title of the spreadsheet |
| `locale` | string | The locale of the spreadsheet |
| `timeZone` | string | The time zone of the spreadsheet |
| `spreadsheetUrl` | string | URL to the spreadsheet |
| `sheets` | array | List of sheets in the spreadsheet |
| ↳ `sheetId` | number | The sheet ID |
| ↳ `title` | string | The sheet title/name |
| ↳ `index` | number | The sheet index \(position\) |
| ↳ `rowCount` | number | Number of rows in the sheet |
| ↳ `columnCount` | number | Number of columns in the sheet |
| ↳ `hidden` | boolean | Whether the sheet is hidden |
### `google_sheets_create_spreadsheet`
Create a new Google Sheets spreadsheet
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `title` | string | Yes | The title of the new spreadsheet |
| `sheetTitles` | json | No | Array of sheet names to create \(e.g., \["Sheet1", "Data", "Summary"\]\). Defaults to a single "Sheet1". |
| `locale` | string | No | The locale of the spreadsheet \(e.g., "en_US"\) |
| `timeZone` | string | No | The time zone of the spreadsheet \(e.g., "America/New_York"\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `spreadsheetId` | string | The ID of the created spreadsheet |
| `title` | string | The title of the created spreadsheet |
| `spreadsheetUrl` | string | URL to the created spreadsheet |
| `sheets` | array | List of sheets created in the spreadsheet |
| ↳ `sheetId` | number | The sheet ID |
| ↳ `title` | string | The sheet title/name |
| ↳ `index` | number | The sheet index \(position\) |
### `google_sheets_batch_get`
Read multiple ranges from a Google Sheets spreadsheet in a single request
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `spreadsheetId` | string | Yes | The ID of the spreadsheet |
| `ranges` | json | Yes | Array of ranges to read \(e.g., \["Sheet1!A1:D10", "Sheet2!A1:B5"\]\). Each range should include sheet name. |
| `majorDimension` | string | No | The major dimension of values: "ROWS" \(default\) or "COLUMNS" |
| `valueRenderOption` | string | No | How values should be rendered: "FORMATTED_VALUE" \(default\), "UNFORMATTED_VALUE", or "FORMULA" |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `spreadsheetId` | string | The spreadsheet ID |
| `valueRanges` | array | Array of value ranges read from the spreadsheet |
| ↳ `range` | string | The range that was read |
| ↳ `majorDimension` | string | Major dimension \(ROWS or COLUMNS\) |
| ↳ `values` | array | The cell values as a 2D array |
| `metadata` | json | Spreadsheet metadata including ID and URL |
| ↳ `spreadsheetId` | string | Google Sheets spreadsheet ID |
| ↳ `spreadsheetUrl` | string | Spreadsheet URL |
### `google_sheets_batch_update`
Update multiple ranges in a Google Sheets spreadsheet in a single request
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `spreadsheetId` | string | Yes | The ID of the spreadsheet |
| `data` | json | Yes | Array of value ranges to update. Each item should have "range" \(e.g., "Sheet1!A1:D10"\) and "values" \(2D array\). |
| `valueInputOption` | string | No | How input data should be interpreted: "RAW" or "USER_ENTERED" \(default\). USER_ENTERED parses formulas. |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `spreadsheetId` | string | The spreadsheet ID |
| `totalUpdatedRows` | number | Total number of rows updated |
| `totalUpdatedColumns` | number | Total number of columns updated |
| `totalUpdatedCells` | number | Total number of cells updated |
| `totalUpdatedSheets` | number | Total number of sheets updated |
| `responses` | array | Array of update responses for each range |
| ↳ `spreadsheetId` | string | The spreadsheet ID |
| ↳ `updatedRange` | string | The range that was updated |
| ↳ `updatedRows` | number | Number of rows updated in this range |
| ↳ `updatedColumns` | number | Number of columns updated in this range |
| ↳ `updatedCells` | number | Number of cells updated in this range |
| `metadata` | json | Spreadsheet metadata including ID and URL |
| ↳ `spreadsheetId` | string | Google Sheets spreadsheet ID |
| ↳ `spreadsheetUrl` | string | Spreadsheet URL |
### `google_sheets_batch_clear`
Clear multiple ranges in a Google Sheets spreadsheet in a single request
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `spreadsheetId` | string | Yes | The ID of the spreadsheet |
| `ranges` | json | Yes | Array of ranges to clear \(e.g., \["Sheet1!A1:D10", "Sheet2!A1:B5"\]\). Each range should include sheet name. |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `spreadsheetId` | string | The spreadsheet ID |
| `clearedRanges` | array | Array of ranges that were cleared |
| `metadata` | json | Spreadsheet metadata including ID and URL |
| ↳ `spreadsheetId` | string | Google Sheets spreadsheet ID |
| ↳ `spreadsheetUrl` | string | Spreadsheet URL |
### `google_sheets_copy_sheet`
Copy a sheet from one spreadsheet to another
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `sourceSpreadsheetId` | string | Yes | The ID of the source spreadsheet |
| `sheetId` | number | Yes | The ID of the sheet to copy \(numeric ID, not the sheet name\). Use Get Spreadsheet to find sheet IDs. |
| `destinationSpreadsheetId` | string | Yes | The ID of the destination spreadsheet where the sheet will be copied |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `sheetId` | number | The ID of the newly created sheet in the destination |
| `title` | string | The title of the copied sheet |
| `index` | number | The index \(position\) of the copied sheet |
| `sheetType` | string | The type of the sheet \(GRID, CHART, etc.\) |
| `destinationSpreadsheetId` | string | The ID of the destination spreadsheet |
| `destinationSpreadsheetUrl` | string | URL to the destination spreadsheet |

View File

@@ -30,7 +30,7 @@ In Sim, the Google Slides integration enables your agents to interact directly w
## Usage Instructions
Integrate Google Slides into the workflow. Can read, write, create presentations, replace text, add slides, add images, and get thumbnails.
Integrate Google Slides into the workflow. Can read, write, create presentations, replace text, add slides, add images, get thumbnails, get page details, delete objects, duplicate objects, reorder slides, create tables, create shapes, and insert text.
@@ -52,6 +52,15 @@ Read content from a Google Slides presentation
| --------- | ---- | ----------- |
| `slides` | json | Array of slides with their content |
| `metadata` | json | Presentation metadata including ID, title, and URL |
| ↳ `presentationId` | string | The presentation ID |
| ↳ `title` | string | The presentation title |
| ↳ `pageSize` | object | Presentation page size |
| ↳ `width` | json | Page width as a Dimension object |
| ↳ `height` | json | Page height as a Dimension object |
| ↳ `width` | json | Page width as a Dimension object |
| ↳ `height` | json | Page height as a Dimension object |
| ↳ `mimeType` | string | The mime type of the presentation |
| ↳ `url` | string | URL to open the presentation |
### `google_slides_write`
@@ -71,6 +80,10 @@ Write or update content in a Google Slides presentation
| --------- | ---- | ----------- |
| `updatedContent` | boolean | Indicates if presentation content was updated successfully |
| `metadata` | json | Updated presentation metadata including ID, title, and URL |
| ↳ `presentationId` | string | The presentation ID |
| ↳ `title` | string | The presentation title |
| ↳ `mimeType` | string | The mime type of the presentation |
| ↳ `url` | string | URL to open the presentation |
### `google_slides_create`
@@ -90,6 +103,10 @@ Create a new Google Slides presentation
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `metadata` | json | Created presentation metadata including ID, title, and URL |
| ↳ `presentationId` | string | The presentation ID |
| ↳ `title` | string | The presentation title |
| ↳ `mimeType` | string | The mime type of the presentation |
| ↳ `url` | string | URL to open the presentation |
### `google_slides_replace_all_text`
@@ -111,6 +128,10 @@ Find and replace all occurrences of text throughout a Google Slides presentation
| --------- | ---- | ----------- |
| `occurrencesChanged` | number | Number of text occurrences that were replaced |
| `metadata` | json | Operation metadata including presentation ID and URL |
| ↳ `presentationId` | string | The presentation ID |
| ↳ `findText` | string | The text that was searched for |
| ↳ `replaceText` | string | The text that replaced the matches |
| ↳ `url` | string | URL to open the presentation |
### `google_slides_add_slide`
@@ -131,6 +152,10 @@ Add a new slide to a Google Slides presentation with a specified layout
| --------- | ---- | ----------- |
| `slideId` | string | The object ID of the newly created slide |
| `metadata` | json | Operation metadata including presentation ID, layout, and URL |
| ↳ `presentationId` | string | The presentation ID |
| ↳ `layout` | string | The layout used for the new slide |
| ↳ `insertionIndex` | number | The zero-based index where the slide was inserted |
| ↳ `url` | string | URL to open the presentation |
### `google_slides_add_image`
@@ -154,6 +179,10 @@ Insert an image into a specific slide in a Google Slides presentation
| --------- | ---- | ----------- |
| `imageId` | string | The object ID of the newly created image |
| `metadata` | json | Operation metadata including presentation ID and image URL |
| ↳ `presentationId` | string | The presentation ID |
| ↳ `pageObjectId` | string | The page object ID where the image was inserted |
| ↳ `imageUrl` | string | The source image URL |
| ↳ `url` | string | URL to open the presentation |
### `google_slides_get_thumbnail`
@@ -176,5 +205,182 @@ Generate a thumbnail image of a specific slide in a Google Slides presentation
| `width` | number | Width of the thumbnail in pixels |
| `height` | number | Height of the thumbnail in pixels |
| `metadata` | json | Operation metadata including presentation ID and page object ID |
| ↳ `presentationId` | string | The presentation ID |
| ↳ `pageObjectId` | string | The page object ID for the thumbnail |
| ↳ `thumbnailSize` | string | The requested thumbnail size |
| ↳ `mimeType` | string | The thumbnail MIME type |
### `google_slides_get_page`
Get detailed information about a specific slide/page in a Google Slides presentation
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `presentationId` | string | Yes | The ID of the presentation |
| `pageObjectId` | string | Yes | The object ID of the slide/page to retrieve |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `objectId` | string | The object ID of the page |
| `pageType` | string | The type of page \(SLIDE, MASTER, LAYOUT, NOTES, NOTES_MASTER\) |
| `pageElements` | array | Array of page elements \(shapes, images, tables, etc.\) on this page |
| `slideProperties` | object | Properties specific to slides \(layout, master, notes\) |
| ↳ `layoutObjectId` | string | Object ID of the layout this slide is based on |
| ↳ `masterObjectId` | string | Object ID of the master this slide is based on |
| ↳ `notesPage` | json | The notes page associated with the slide |
| `metadata` | object | Operation metadata including presentation ID and URL |
| ↳ `presentationId` | string | The presentation ID |
| ↳ `url` | string | URL to the presentation |
### `google_slides_delete_object`
Delete a page element (shape, image, table, etc.) or an entire slide from a Google Slides presentation
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `presentationId` | string | Yes | The ID of the presentation |
| `objectId` | string | Yes | The object ID of the element or slide to delete |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `deleted` | boolean | Whether the object was successfully deleted |
| `objectId` | string | The object ID that was deleted |
| `metadata` | object | Operation metadata including presentation ID and URL |
| ↳ `presentationId` | string | The presentation ID |
| ↳ `url` | string | URL to the presentation |
### `google_slides_duplicate_object`
Duplicate an object (slide, shape, image, table, etc.) in a Google Slides presentation
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `presentationId` | string | Yes | The ID of the presentation |
| `objectId` | string | Yes | The object ID of the element or slide to duplicate |
| `objectIds` | string | No | Optional JSON object mapping source object IDs \(within the slide being duplicated\) to new object IDs for the duplicates. Format: \{"sourceId1":"newId1","sourceId2":"newId2"\} |
| `Format` | string | No | No description |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `duplicatedObjectId` | string | The object ID of the newly created duplicate |
| `metadata` | object | Operation metadata including presentation ID and source object ID |
| ↳ `presentationId` | string | The presentation ID |
| ↳ `sourceObjectId` | string | The original object ID that was duplicated |
| ↳ `url` | string | URL to the presentation |
### `google_slides_update_slides_position`
Move one or more slides to a new position in a Google Slides presentation
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `presentationId` | string | Yes | The ID of the presentation |
| `slideObjectIds` | string | Yes | Comma-separated list of slide object IDs to move. The slides will maintain their relative order. |
| `insertionIndex` | number | Yes | The zero-based index where the slides should be moved. All slides with indices greater than or equal to this will be shifted right. |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `moved` | boolean | Whether the slides were successfully moved |
| `slideObjectIds` | array | The slide object IDs that were moved |
| `insertionIndex` | number | The index where the slides were moved to |
| `metadata` | object | Operation metadata including presentation ID and URL |
| ↳ `presentationId` | string | The presentation ID |
| ↳ `url` | string | URL to the presentation |
### `google_slides_create_table`
Create a new table on a slide in a Google Slides presentation
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `presentationId` | string | Yes | The ID of the presentation |
| `pageObjectId` | string | Yes | The object ID of the slide/page to add the table to |
| `rows` | number | Yes | Number of rows in the table \(minimum 1\) |
| `columns` | number | Yes | Number of columns in the table \(minimum 1\) |
| `width` | number | No | Width of the table in points \(default: 400\) |
| `height` | number | No | Height of the table in points \(default: 200\) |
| `positionX` | number | No | X position from the left edge in points \(default: 100\) |
| `positionY` | number | No | Y position from the top edge in points \(default: 100\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `tableId` | string | The object ID of the newly created table |
| `rows` | number | Number of rows in the table |
| `columns` | number | Number of columns in the table |
| `metadata` | object | Operation metadata including presentation ID and page object ID |
| ↳ `presentationId` | string | The presentation ID |
| ↳ `pageObjectId` | string | The page object ID where the table was created |
| ↳ `url` | string | URL to the presentation |
### `google_slides_create_shape`
Create a shape (rectangle, ellipse, text box, arrow, etc.) on a slide in a Google Slides presentation
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `presentationId` | string | Yes | The ID of the presentation |
| `pageObjectId` | string | Yes | The object ID of the slide/page to add the shape to |
| `shapeType` | string | Yes | The type of shape to create. Common types: TEXT_BOX, RECTANGLE, ROUND_RECTANGLE, ELLIPSE, TRIANGLE, DIAMOND, STAR_5, ARROW_EAST, HEART, CLOUD |
| `width` | number | No | Width of the shape in points \(default: 200\) |
| `height` | number | No | Height of the shape in points \(default: 100\) |
| `positionX` | number | No | X position from the left edge in points \(default: 100\) |
| `positionY` | number | No | Y position from the top edge in points \(default: 100\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `shapeId` | string | The object ID of the newly created shape |
| `shapeType` | string | The type of shape that was created |
| `metadata` | object | Operation metadata including presentation ID and page object ID |
| ↳ `presentationId` | string | The presentation ID |
| ↳ `pageObjectId` | string | The page object ID where the shape was created |
| ↳ `url` | string | URL to the presentation |
### `google_slides_insert_text`
Insert text into a shape or table cell in a Google Slides presentation. Use this to add text to text boxes, shapes, or table cells.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `presentationId` | string | Yes | The ID of the presentation |
| `objectId` | string | Yes | The object ID of the shape or table cell to insert text into. For table cells, use the cell object ID. |
| `text` | string | Yes | The text to insert |
| `insertionIndex` | number | No | The zero-based index at which to insert the text. If not specified, text is inserted at the beginning \(index 0\). |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `inserted` | boolean | Whether the text was successfully inserted |
| `objectId` | string | The object ID where text was inserted |
| `text` | string | The text that was inserted |
| `metadata` | object | Operation metadata including presentation ID and URL |
| ↳ `presentationId` | string | The presentation ID |
| ↳ `url` | string | URL to the presentation |

View File

@@ -36,43 +36,47 @@ Connect Google Vault to create exports, list exports, and manage holds within ma
### `google_vault_create_matters_export`
Create an export in a matter
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `matterId` | string | Yes | The matter ID |
| `exportName` | string | Yes | Name for the export \(avoid special characters\) |
| `corpus` | string | Yes | Data corpus to export \(MAIL, DRIVE, GROUPS, HANGOUTS_CHAT, VOICE\) |
| `accountEmails` | string | No | Comma-separated list of user emails to scope export |
| `orgUnitId` | string | No | Organization unit ID to scope export \(alternative to emails\) |
| `startTime` | string | No | Start time for date filtering \(ISO 8601 format, e.g., 2024-01-01T00:00:00Z\) |
| `endTime` | string | No | End time for date filtering \(ISO 8601 format, e.g., 2024-12-31T23:59:59Z\) |
| `terms` | string | No | Search query terms to filter exported content |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `matters` | json | Array of matter objects \(for list_matters\) |
| `exports` | json | Array of export objects \(for list_matters_export\) |
| `holds` | json | Array of hold objects \(for list_matters_holds\) |
| `matter` | json | Created matter object \(for create_matters\) |
| `export` | json | Created export object \(for create_matters_export\) |
| `hold` | json | Created hold object \(for create_matters_holds\) |
| `file` | json | Downloaded export file \(UserFile\) from execution files |
| `nextPageToken` | string | Token for fetching next page of results \(for list operations\) |
| `export` | json | Created export object |
### `google_vault_list_matters_export`
List exports for a matter
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `matterId` | string | Yes | The matter ID |
| `pageSize` | number | No | Number of exports to return per page |
| `pageToken` | string | No | Token for pagination |
| `exportId` | string | No | Optional export ID to fetch a specific export |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `matters` | json | Array of matter objects \(for list_matters\) |
| `exports` | json | Array of export objects \(for list_matters_export\) |
| `holds` | json | Array of hold objects \(for list_matters_holds\) |
| `matter` | json | Created matter object \(for create_matters\) |
| `export` | json | Created export object \(for create_matters_export\) |
| `hold` | json | Created hold object \(for create_matters_holds\) |
| `file` | json | Downloaded export file \(UserFile\) from execution files |
| `nextPageToken` | string | Token for fetching next page of results \(for list operations\) |
| `exports` | json | Array of export objects |
| `export` | json | Single export object \(when exportId is provided\) |
| `nextPageToken` | string | Token for fetching next page of results |
### `google_vault_download_export_file`
@@ -82,10 +86,10 @@ Download a single file from a Google Vault export (GCS object)
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `matterId` | string | Yes | No description |
| `bucketName` | string | Yes | No description |
| `objectName` | string | Yes | No description |
| `fileName` | string | No | No description |
| `matterId` | string | Yes | The matter ID |
| `bucketName` | string | Yes | GCS bucket name from cloudStorageSink.files.bucketName |
| `objectName` | string | Yes | GCS object name from cloudStorageSink.files.objectName |
| `fileName` | string | No | Optional filename override for the downloaded file |
#### Output
@@ -95,82 +99,84 @@ Download a single file from a Google Vault export (GCS object)
### `google_vault_create_matters_holds`
Create a hold in a matter
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `matterId` | string | Yes | The matter ID |
| `holdName` | string | Yes | Name for the hold |
| `corpus` | string | Yes | Data corpus to hold \(MAIL, DRIVE, GROUPS, HANGOUTS_CHAT, VOICE\) |
| `accountEmails` | string | No | Comma-separated list of user emails to put on hold |
| `orgUnitId` | string | No | Organization unit ID to put on hold \(alternative to accounts\) |
| `terms` | string | No | Search terms to filter held content \(for MAIL and GROUPS corpus\) |
| `startTime` | string | No | Start time for date filtering \(ISO 8601 format, for MAIL and GROUPS corpus\) |
| `endTime` | string | No | End time for date filtering \(ISO 8601 format, for MAIL and GROUPS corpus\) |
| `includeSharedDrives` | boolean | No | Include files in shared drives \(for DRIVE corpus\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `matters` | json | Array of matter objects \(for list_matters\) |
| `exports` | json | Array of export objects \(for list_matters_export\) |
| `holds` | json | Array of hold objects \(for list_matters_holds\) |
| `matter` | json | Created matter object \(for create_matters\) |
| `export` | json | Created export object \(for create_matters_export\) |
| `hold` | json | Created hold object \(for create_matters_holds\) |
| `file` | json | Downloaded export file \(UserFile\) from execution files |
| `nextPageToken` | string | Token for fetching next page of results \(for list operations\) |
| `hold` | json | Created hold object |
### `google_vault_list_matters_holds`
List holds for a matter
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `matterId` | string | Yes | The matter ID |
| `pageSize` | number | No | Number of holds to return per page |
| `pageToken` | string | No | Token for pagination |
| `holdId` | string | No | Optional hold ID to fetch a specific hold |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `matters` | json | Array of matter objects \(for list_matters\) |
| `exports` | json | Array of export objects \(for list_matters_export\) |
| `holds` | json | Array of hold objects \(for list_matters_holds\) |
| `matter` | json | Created matter object \(for create_matters\) |
| `export` | json | Created export object \(for create_matters_export\) |
| `hold` | json | Created hold object \(for create_matters_holds\) |
| `file` | json | Downloaded export file \(UserFile\) from execution files |
| `nextPageToken` | string | Token for fetching next page of results \(for list operations\) |
| `holds` | json | Array of hold objects |
| `hold` | json | Single hold object \(when holdId is provided\) |
| `nextPageToken` | string | Token for fetching next page of results |
### `google_vault_create_matters`
Create a new matter in Google Vault
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `name` | string | Yes | Name for the new matter |
| `description` | string | No | Optional description for the matter |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `matters` | json | Array of matter objects \(for list_matters\) |
| `exports` | json | Array of export objects \(for list_matters_export\) |
| `holds` | json | Array of hold objects \(for list_matters_holds\) |
| `matter` | json | Created matter object \(for create_matters\) |
| `export` | json | Created export object \(for create_matters_export\) |
| `hold` | json | Created hold object \(for create_matters_holds\) |
| `file` | json | Downloaded export file \(UserFile\) from execution files |
| `nextPageToken` | string | Token for fetching next page of results \(for list operations\) |
| `matter` | json | Created matter object |
### `google_vault_list_matters`
List matters, or get a specific matter if matterId is provided
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `pageSize` | number | No | Number of matters to return per page |
| `pageToken` | string | No | Token for pagination |
| `matterId` | string | No | Optional matter ID to fetch a specific matter |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `matters` | json | Array of matter objects \(for list_matters\) |
| `exports` | json | Array of export objects \(for list_matters_export\) |
| `holds` | json | Array of hold objects \(for list_matters_holds\) |
| `matter` | json | Created matter object \(for create_matters\) |
| `export` | json | Created export object \(for create_matters_export\) |
| `hold` | json | Created hold object \(for create_matters_holds\) |
| `file` | json | Downloaded export file \(UserFile\) from execution files |
| `nextPageToken` | string | Token for fetching next page of results \(for list operations\) |
| `matters` | json | Array of matter objects |
| `matter` | json | Single matter object \(when matterId is provided\) |
| `nextPageToken` | string | Token for fetching next page of results |

View File

@@ -51,6 +51,7 @@ Search for similar content in a knowledge base using vector similarity
| `properties` | string | No | No description |
| `tagName` | string | No | No description |
| `tagValue` | string | No | No description |
| `tagFilters` | string | No | No description |
#### Output
@@ -108,19 +109,8 @@ Create a new document in a knowledge base
| `knowledgeBaseId` | string | Yes | ID of the knowledge base containing the document |
| `name` | string | Yes | Name of the document |
| `content` | string | Yes | Content of the document |
| `tag1` | string | No | Tag 1 value for the document |
| `tag2` | string | No | Tag 2 value for the document |
| `tag3` | string | No | Tag 3 value for the document |
| `tag4` | string | No | Tag 4 value for the document |
| `tag5` | string | No | Tag 5 value for the document |
| `tag6` | string | No | Tag 6 value for the document |
| `tag7` | string | No | Tag 7 value for the document |
| `documentTagsData` | array | No | Structured tag data with names, types, and values |
| `items` | object | No | No description |
| `properties` | string | No | No description |
| `tagName` | string | No | No description |
| `tagValue` | string | No | No description |
| `tagType` | string | No | No description |
| `documentTags` | object | No | Document tags |
| `documentTags` | string | No | No description |
#### Output

View File

@@ -104,8 +104,10 @@
"stripe",
"stt",
"supabase",
"table",
"tavily",
"telegram",
"textract",
"tinybird",
"translate",
"trello",

View File

@@ -45,8 +45,7 @@ Read data from a specific sheet in a Microsoft Excel spreadsheet
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `spreadsheetId` | string | Yes | The ID of the spreadsheet to read from |
| `sheetName` | string | Yes | The name of the sheet/tab to read from |
| `cellRange` | string | No | The cell range to read \(e.g., "A1:D10"\). If not specified, reads the entire used range. |
| `range` | string | No | The range of cells to read from. Accepts "SheetName!A1:B2" for explicit ranges or just "SheetName" to read the used range of that sheet. If omitted, reads the used range of the first sheet. |
#### Output
@@ -68,9 +67,8 @@ Write data to a specific sheet in a Microsoft Excel spreadsheet
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `spreadsheetId` | string | Yes | The ID of the spreadsheet to write to |
| `sheetName` | string | Yes | The name of the sheet/tab to write to |
| `cellRange` | string | No | The cell range to write to \(e.g., "A1:D10", "A1"\). Defaults to "A1" if not specified. |
| `values` | array | Yes | The data to write as a 2D array \(e.g. \[\["Name", "Age"\], \["Alice", 30\], \["Bob", 25\]\]\) or array of objects. |
| `range` | string | No | The range of cells to write to |
| `values` | array | Yes | The data to write to the spreadsheet |
| `valueInputOption` | string | No | The format of the data to write |
| `includeValuesInResponse` | boolean | No | Whether to include the written values in the response |

View File

@@ -6,7 +6,7 @@ description: Extract text from PDF documents
import { BlockInfoCard } from "@/components/ui/block-info-card"
<BlockInfoCard
type="mistral_parse"
type="mistral_parse_v2"
color="#000000"
/>
@@ -54,18 +54,37 @@ Parse PDF documents using Mistral OCR API
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether the PDF was parsed successfully |
| `content` | string | Extracted content in the requested format \(markdown, text, or JSON\) |
| `metadata` | object | Processing metadata including jobId, fileType, pageCount, and usage info |
| ↳ `jobId` | string | Unique job identifier |
| ↳ `fileType` | string | File type \(e.g., pdf\) |
| ↳ `fileName` | string | Original file name |
| ↳ `source` | string | Source type \(url\) |
| ↳ `pageCount` | number | Number of pages processed |
| ↳ `model` | string | Mistral model used |
| ↳ `resultType` | string | Output format \(markdown, text, json\) |
| ↳ `processedAt` | string | Processing timestamp |
| ↳ `sourceUrl` | string | Source URL if applicable |
| ↳ `usageInfo` | object | Usage statistics from OCR processing |
| `pages` | array | Array of page objects from Mistral OCR |
| ↳ `index` | number | Page index \(zero-based\) |
| ↳ `markdown` | string | Extracted markdown content |
| ↳ `images` | array | Images extracted from this page with bounding boxes |
| ↳ `id` | string | Image identifier \(e.g., img-0.jpeg\) |
| ↳ `top_left_x` | number | Top-left X coordinate in pixels |
| ↳ `top_left_y` | number | Top-left Y coordinate in pixels |
| ↳ `bottom_right_x` | number | Bottom-right X coordinate in pixels |
| ↳ `bottom_right_y` | number | Bottom-right Y coordinate in pixels |
| ↳ `image_base64` | string | Base64-encoded image data \(when include_image_base64=true\) |
| ↳ `id` | string | Image identifier \(e.g., img-0.jpeg\) |
| ↳ `top_left_x` | number | Top-left X coordinate in pixels |
| ↳ `top_left_y` | number | Top-left Y coordinate in pixels |
| ↳ `bottom_right_x` | number | Bottom-right X coordinate in pixels |
| ↳ `bottom_right_y` | number | Bottom-right Y coordinate in pixels |
| ↳ `image_base64` | string | Base64-encoded image data \(when include_image_base64=true\) |
| ↳ `dimensions` | object | Page dimensions |
| ↳ `dpi` | number | Dots per inch |
| ↳ `height` | number | Page height in pixels |
| ↳ `width` | number | Page width in pixels |
| ↳ `dpi` | number | Dots per inch |
| ↳ `height` | number | Page height in pixels |
| ↳ `width` | number | Page width in pixels |
| ↳ `tables` | array | Extracted tables as HTML/markdown \(when table_format is set\). Referenced via placeholders like \[tbl-0.html\] |
| ↳ `hyperlinks` | array | Array of URL strings detected in the page \(e.g., \[ |
| ↳ `header` | string | Page header content \(when extract_header=true\) |
| ↳ `footer` | string | Page footer content \(when extract_footer=true\) |
| `model` | string | Mistral OCR model identifier \(e.g., mistral-ocr-latest\) |
| `usage_info` | object | Usage and processing statistics |
| ↳ `pages_processed` | number | Total number of pages processed |
| ↳ `doc_size_bytes` | number | Document file size in bytes |
| `document_annotation` | string | Structured annotation data as JSON string \(when applicable\) |

View File

@@ -58,6 +58,7 @@ Upload a file to an AWS S3 bucket
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `url` | string | URL of the uploaded S3 object |
| `uri` | string | S3 URI of the uploaded object \(s3://bucket/key\) |
| `metadata` | object | Upload metadata including ETag and location |
### `s3_get_object`
@@ -149,6 +150,7 @@ Copy an object within or between AWS S3 buckets
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `url` | string | URL of the copied S3 object |
| `uri` | string | S3 URI of the copied object \(s3://bucket/key\) |
| `metadata` | object | Copy operation metadata |

View File

@@ -84,9 +84,10 @@ Send messages to Slack channels or direct messages. Supports Slack mrkdwn format
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `authMethod` | string | No | Authentication method: oauth or bot_token |
| `destinationType` | string | No | Destination type: channel or dm |
| `botToken` | string | No | Bot token for Custom Bot |
| `channel` | string | No | Target Slack channel \(e.g., #general\) |
| `userId` | string | No | Target Slack user ID for direct messages \(e.g., U1234567890\) |
| `dmUserId` | string | No | Target Slack user for direct messages |
| `text` | string | Yes | Message text to send \(supports Slack mrkdwn formatting\) |
| `thread_ts` | string | No | Thread timestamp to reply to \(creates thread reply\) |
| `files` | file[] | No | Files to attach to the message |
@@ -132,9 +133,10 @@ Read the latest messages from Slack channels. Retrieve conversation history with
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `authMethod` | string | No | Authentication method: oauth or bot_token |
| `destinationType` | string | No | Destination type: channel or dm |
| `botToken` | string | No | Bot token for Custom Bot |
| `channel` | string | No | Slack channel to read messages from \(e.g., #general\) |
| `userId` | string | No | User ID for DM conversation \(e.g., U1234567890\) |
| `dmUserId` | string | No | Target Slack user for DM conversation |
| `limit` | number | No | Number of messages to retrieve \(default: 10, max: 15\) |
| `oldest` | string | No | Start of time range \(timestamp\) |
| `latest` | string | No | End of time range \(timestamp\) |

View File

@@ -0,0 +1,351 @@
---
title: Table
description: User-defined data tables for storing and querying structured data
---
import { BlockInfoCard } from "@/components/ui/block-info-card"
<BlockInfoCard
type="table"
color="#10B981"
/>
Tables allow you to create and manage custom data tables directly within Sim. Store, query, and manipulate structured data within your workflows without needing external database integrations.
**Why Use Tables?**
- **No external setup**: Create tables instantly without configuring external databases
- **Workflow-native**: Data persists across workflow executions and is accessible from any workflow in your workspace
- **Flexible schema**: Define columns with types (string, number, boolean, date, json) and constraints (required, unique)
- **Powerful querying**: Filter, sort, and paginate data using MongoDB-style operators
- **Agent-friendly**: Tables can be used as tools by AI agents for dynamic data storage and retrieval
**Key Features:**
- Create tables with custom schemas
- Insert, update, upsert, and delete rows
- Query with filters and sorting
- Batch operations for bulk inserts
- Bulk updates and deletes by filter
- Up to 10,000 rows per table, 100 tables per workspace
## Creating Tables
Tables are created from the **Tables** section in the sidebar. Each table requires:
- **Name**: Alphanumeric with underscores (e.g., `customer_leads`)
- **Description**: Optional description of the table's purpose
- **Schema**: Define columns with name, type, and optional constraints
### Column Types
| Type | Description | Example Values |
|------|-------------|----------------|
| `string` | Text data | `"John Doe"`, `"active"` |
| `number` | Numeric data | `42`, `99.99` |
| `boolean` | True/false values | `true`, `false` |
| `date` | Date/time values | `"2024-01-15T10:30:00Z"` |
| `json` | Complex nested data | `{"address": {"city": "NYC"}}` |
### Column Constraints
- **Required**: Column must have a value (cannot be null)
- **Unique**: Values must be unique across all rows (enables upsert matching)
## Usage Instructions
Create and manage custom data tables. Store, query, and manipulate structured data within workflows.
## Tools
### `table_query_rows`
Query rows from a table with filtering, sorting, and pagination
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `tableId` | string | Yes | Table ID |
| `filter` | object | No | Filter conditions using MongoDB-style operators |
| `sort` | object | No | Sort order as \{column: "asc"\|"desc"\} |
| `limit` | number | No | Maximum rows to return \(default: 100, max: 1000\) |
| `offset` | number | No | Number of rows to skip \(default: 0\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether query succeeded |
| `rows` | array | Query result rows |
| `rowCount` | number | Number of rows returned |
| `totalCount` | number | Total rows matching filter |
| `limit` | number | Limit used in query |
| `offset` | number | Offset used in query |
### `table_insert_row`
Insert a new row into a table
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `tableId` | string | Yes | Table ID |
| `data` | object | Yes | Row data as JSON object matching the table schema |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether row was inserted |
| `row` | object | Inserted row data including generated ID |
| `message` | string | Status message |
### `table_upsert_row`
Insert or update a row based on unique column constraints. If a row with matching unique field exists, update it; otherwise insert a new row.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `tableId` | string | Yes | Table ID |
| `data` | object | Yes | Row data to insert or update |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether row was upserted |
| `row` | object | Upserted row data |
| `operation` | string | Operation performed: "insert" or "update" |
| `message` | string | Status message |
### `table_batch_insert_rows`
Insert multiple rows at once (up to 1000 rows per batch)
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `tableId` | string | Yes | Table ID |
| `rows` | array | Yes | Array of row data objects to insert |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether batch insert succeeded |
| `rows` | array | Array of inserted rows with IDs |
| `insertedCount` | number | Number of rows inserted |
| `message` | string | Status message |
### `table_update_row`
Update a specific row by its ID
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `tableId` | string | Yes | Table ID |
| `rowId` | string | Yes | Row ID to update |
| `data` | object | Yes | Data to update \(partial update supported\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether row was updated |
| `row` | object | Updated row data |
| `message` | string | Status message |
### `table_update_rows_by_filter`
Update multiple rows matching a filter condition
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `tableId` | string | Yes | Table ID |
| `filter` | object | Yes | Filter to match rows for update |
| `data` | object | Yes | Data to apply to matching rows |
| `limit` | number | No | Maximum rows to update \(default: 1000\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether update succeeded |
| `updatedCount` | number | Number of rows updated |
| `updatedRowIds` | array | IDs of updated rows |
| `message` | string | Status message |
### `table_delete_row`
Delete a specific row by its ID
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `tableId` | string | Yes | Table ID |
| `rowId` | string | Yes | Row ID to delete |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether row was deleted |
| `deletedCount` | number | Number of rows deleted \(1 or 0\) |
| `message` | string | Status message |
### `table_delete_rows_by_filter`
Delete multiple rows matching a filter condition
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `tableId` | string | Yes | Table ID |
| `filter` | object | Yes | Filter to match rows for deletion |
| `limit` | number | No | Maximum rows to delete \(default: 1000\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether delete succeeded |
| `deletedCount` | number | Number of rows deleted |
| `deletedRowIds` | array | IDs of deleted rows |
| `message` | string | Status message |
### `table_get_row`
Get a single row by its ID
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `tableId` | string | Yes | Table ID |
| `rowId` | string | Yes | Row ID to retrieve |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether row was found |
| `row` | object | Row data |
| `message` | string | Status message |
### `table_get_schema`
Get the schema definition for a table
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `tableId` | string | Yes | Table ID |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether schema was retrieved |
| `name` | string | Table name |
| `columns` | array | Array of column definitions |
| `message` | string | Status message |
## Filter Operators
Filters use MongoDB-style operators for flexible querying:
| Operator | Description | Example |
|----------|-------------|---------|
| `$eq` | Equals | `{"status": {"$eq": "active"}}` or `{"status": "active"}` |
| `$ne` | Not equals | `{"status": {"$ne": "deleted"}}` |
| `$gt` | Greater than | `{"age": {"$gt": 18}}` |
| `$gte` | Greater than or equal | `{"score": {"$gte": 80}}` |
| `$lt` | Less than | `{"price": {"$lt": 100}}` |
| `$lte` | Less than or equal | `{"quantity": {"$lte": 10}}` |
| `$in` | In array | `{"status": {"$in": ["active", "pending"]}}` |
| `$nin` | Not in array | `{"type": {"$nin": ["spam", "blocked"]}}` |
| `$contains` | String contains | `{"email": {"$contains": "@gmail.com"}}` |
### Combining Filters
Multiple field conditions are combined with AND logic:
```json
{
"status": "active",
"age": {"$gte": 18}
}
```
Use `$or` for OR logic:
```json
{
"$or": [
{"status": "active"},
{"status": "pending"}
]
}
```
## Sort Specification
Specify sort order with column names and direction:
```json
{
"createdAt": "desc"
}
```
Multi-column sorting:
```json
{
"priority": "desc",
"name": "asc"
}
```
## Built-in Columns
Every row automatically includes:
| Column | Type | Description |
|--------|------|-------------|
| `id` | string | Unique row identifier |
| `createdAt` | date | When the row was created |
| `updatedAt` | date | When the row was last modified |
These can be used in filters and sorting.
## Limits
| Resource | Limit |
|----------|-------|
| Tables per workspace | 100 |
| Rows per table | 10,000 |
| Columns per table | 50 |
| Max row size | 100KB |
| String value length | 10,000 characters |
| Query limit | 1,000 rows |
| Batch insert size | 1,000 rows |
| Bulk update/delete | 1,000 rows |
## Notes
- Category: `blocks`
- Type: `table`
- Tables are scoped to workspaces and accessible from any workflow within that workspace
- Data persists across workflow executions
- Use unique constraints to enable upsert functionality
- The visual filter/sort builder provides an easy way to construct queries without writing JSON

View File

@@ -0,0 +1,120 @@
---
title: AWS Textract
description: Extract text, tables, and forms from documents
---
import { BlockInfoCard } from "@/components/ui/block-info-card"
<BlockInfoCard
type="textract"
color="linear-gradient(135deg, #055F4E 0%, #56C0A7 100%)"
/>
{/* MANUAL-CONTENT-START:intro */}
[AWS Textract](https://aws.amazon.com/textract/) is a powerful AI service from Amazon Web Services designed to automatically extract printed text, handwriting, tables, forms, key-value pairs, and other structured data from scanned documents and images. Textract leverages advanced optical character recognition (OCR) and document analysis to transform documents into actionable data, enabling automation, analytics, compliance, and more.
With AWS Textract, you can:
- **Extract text from images and documents**: Recognize printed text and handwriting in formats such as PDF, JPEG, PNG, or TIFF
- **Detect and extract tables**: Automatically find tables and output their structured content
- **Parse forms and key-value pairs**: Pull structured data from forms, including fields and their corresponding values
- **Identify signatures and layout features**: Detect signatures, geometric layout, and relationships between document elements
- **Customize extraction with queries**: Extract specific fields and answers using query-based extraction (e.g., "What is the invoice number?")
In Sim, the AWS Textract integration empowers your agents to intelligently process documents as part of their workflows. This unlocks automation scenarios such as data entry from invoices, onboarding documents, contracts, receipts, and more. Your agents can extract relevant data, analyze structured forms, and generate summaries or reports directly from document uploads or URLs. By connecting Sim with AWS Textract, you can reduce manual effort, improve data accuracy, and streamline your business processes with robust document understanding.
{/* MANUAL-CONTENT-END */}
## Usage Instructions
Integrate AWS Textract into your workflow to extract text, tables, forms, and key-value pairs from documents. Single-page mode supports JPEG, PNG, and single-page PDF. Multi-page mode supports multi-page PDF and TIFF.
## Tools
### `textract_parser`
Parse documents using AWS Textract OCR and document analysis
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `accessKeyId` | string | Yes | AWS Access Key ID |
| `secretAccessKey` | string | Yes | AWS Secret Access Key |
| `region` | string | Yes | AWS region for Textract service \(e.g., us-east-1\) |
| `processingMode` | string | No | Document type: single-page or multi-page. Defaults to single-page. |
| `filePath` | string | No | URL to a document to be processed \(JPEG, PNG, or single-page PDF\). |
| `s3Uri` | string | No | S3 URI for multi-page processing \(s3://bucket/key\). |
| `fileUpload` | object | No | File upload data from file-upload component |
| `featureTypes` | array | No | Feature types to detect: TABLES, FORMS, QUERIES, SIGNATURES, LAYOUT. If not specified, only text detection is performed. |
| `items` | string | No | Feature type |
| `queries` | array | No | Custom queries to extract specific information. Only used when featureTypes includes QUERIES. |
| `items` | object | No | Query configuration |
| `properties` | string | No | The query text |
| `Text` | string | No | No description |
| `Alias` | string | No | No description |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `blocks` | array | Array of Block objects containing detected text, tables, forms, and other elements |
| ↳ `BlockType` | string | Type of block \(PAGE, LINE, WORD, TABLE, CELL, KEY_VALUE_SET, etc.\) |
| ↳ `Id` | string | Unique identifier for the block |
| ↳ `Text` | string | Query text |
| ↳ `TextType` | string | Type of text \(PRINTED or HANDWRITING\) |
| ↳ `Confidence` | number | Confidence score \(0-100\) |
| ↳ `Page` | number | Page number |
| ↳ `Geometry` | object | Location and bounding box information |
| ↳ `BoundingBox` | object | Height as ratio of document height |
| ↳ `Height` | number | Height as ratio of document height |
| ↳ `Left` | number | Left position as ratio of document width |
| ↳ `Top` | number | Top position as ratio of document height |
| ↳ `Width` | number | Width as ratio of document width |
| ↳ `Height` | number | Height as ratio of document height |
| ↳ `Left` | number | Left position as ratio of document width |
| ↳ `Top` | number | Top position as ratio of document height |
| ↳ `Width` | number | Width as ratio of document width |
| ↳ `Polygon` | array | Polygon coordinates |
| ↳ `X` | number | X coordinate |
| ↳ `Y` | number | Y coordinate |
| ↳ `X` | number | X coordinate |
| ↳ `Y` | number | Y coordinate |
| ↳ `BoundingBox` | object | Height as ratio of document height |
| ↳ `Height` | number | Height as ratio of document height |
| ↳ `Left` | number | Left position as ratio of document width |
| ↳ `Top` | number | Top position as ratio of document height |
| ↳ `Width` | number | Width as ratio of document width |
| ↳ `Height` | number | Height as ratio of document height |
| ↳ `Left` | number | Left position as ratio of document width |
| ↳ `Top` | number | Top position as ratio of document height |
| ↳ `Width` | number | Width as ratio of document width |
| ↳ `Polygon` | array | Polygon coordinates |
| ↳ `X` | number | X coordinate |
| ↳ `Y` | number | Y coordinate |
| ↳ `X` | number | X coordinate |
| ↳ `Y` | number | Y coordinate |
| ↳ `Relationships` | array | Relationships to other blocks |
| ↳ `Type` | string | Relationship type \(CHILD, VALUE, ANSWER, etc.\) |
| ↳ `Ids` | array | IDs of related blocks |
| ↳ `Type` | string | Relationship type \(CHILD, VALUE, ANSWER, etc.\) |
| ↳ `Ids` | array | IDs of related blocks |
| ↳ `EntityTypes` | array | Entity types for KEY_VALUE_SET \(KEY or VALUE\) |
| ↳ `SelectionStatus` | string | For checkboxes: SELECTED or NOT_SELECTED |
| ↳ `RowIndex` | number | Row index for table cells |
| ↳ `ColumnIndex` | number | Column index for table cells |
| ↳ `RowSpan` | number | Row span for merged cells |
| ↳ `ColumnSpan` | number | Column span for merged cells |
| ↳ `Query` | object | Query information for QUERY blocks |
| ↳ `Text` | string | Query text |
| ↳ `Alias` | string | Query alias |
| ↳ `Pages` | array | Pages to search |
| ↳ `Alias` | string | Query alias |
| ↳ `Pages` | array | Pages to search |
| `documentMetadata` | object | Metadata about the analyzed document |
| ↳ `pages` | number | Number of pages in the document |
| `modelVersion` | string | Version of the Textract model used for processing |

View File

@@ -6,7 +6,7 @@ description: Generate videos from text using AI
import { BlockInfoCard } from "@/components/ui/block-info-card"
<BlockInfoCard
type="video_generator"
type="video_generator_v2"
color="#181C1E"
/>

View File

@@ -11,10 +11,8 @@
"next-env.d.ts",
"**/*.ts",
"**/*.tsx",
".next/types/**/*.ts",
"content/docs/execution/index.mdx",
"content/docs/connections/index.mdx",
".next/dev/types/**/*.ts"
"content/docs/connections/index.mdx"
],
"exclude": ["node_modules"]
"exclude": ["node_modules", ".next"]
}

View File

@@ -24,6 +24,7 @@ import { inter } from '@/app/_styles/fonts/inter/inter'
import { soehne } from '@/app/_styles/fonts/soehne/soehne'
import { SocialLoginButtons } from '@/app/(auth)/components/social-login-buttons'
import { SSOLoginButton } from '@/app/(auth)/components/sso-login-button'
import { useBrandedButtonClass } from '@/hooks/use-branded-button-class'
const logger = createLogger('LoginForm')
@@ -105,7 +106,7 @@ export default function LoginPage({
const [password, setPassword] = useState('')
const [passwordErrors, setPasswordErrors] = useState<string[]>([])
const [showValidationError, setShowValidationError] = useState(false)
const [buttonClass, setButtonClass] = useState('branded-button-gradient')
const buttonClass = useBrandedButtonClass()
const [isButtonHovered, setIsButtonHovered] = useState(false)
const [callbackUrl, setCallbackUrl] = useState('/workspace')
@@ -123,6 +124,7 @@ export default function LoginPage({
const [email, setEmail] = useState('')
const [emailErrors, setEmailErrors] = useState<string[]>([])
const [showEmailValidationError, setShowEmailValidationError] = useState(false)
const [resetSuccessMessage, setResetSuccessMessage] = useState<string | null>(null)
useEffect(() => {
setMounted(true)
@@ -139,32 +141,12 @@ export default function LoginPage({
const inviteFlow = searchParams.get('invite_flow') === 'true'
setIsInviteFlow(inviteFlow)
}
const checkCustomBrand = () => {
const computedStyle = getComputedStyle(document.documentElement)
const brandAccent = computedStyle.getPropertyValue('--brand-accent-hex').trim()
if (brandAccent && brandAccent !== '#6f3dfa') {
setButtonClass('branded-button-custom')
} else {
setButtonClass('branded-button-gradient')
const resetSuccess = searchParams.get('resetSuccess') === 'true'
if (resetSuccess) {
setResetSuccessMessage('Password reset successful. Please sign in with your new password.')
}
}
checkCustomBrand()
window.addEventListener('resize', checkCustomBrand)
const observer = new MutationObserver(checkCustomBrand)
observer.observe(document.documentElement, {
attributes: true,
attributeFilter: ['style', 'class'],
})
return () => {
window.removeEventListener('resize', checkCustomBrand)
observer.disconnect()
}
}, [searchParams])
useEffect(() => {
@@ -221,6 +203,7 @@ export default function LoginPage({
try {
const safeCallbackUrl = validateCallbackUrl(callbackUrl) ? callbackUrl : '/workspace'
let errorHandled = false
const result = await client.signIn.email(
{
@@ -231,11 +214,16 @@ export default function LoginPage({
{
onError: (ctx) => {
logger.error('Login error:', ctx.error)
const errorMessage: string[] = ['Invalid email or password']
// EMAIL_NOT_VERIFIED is handled by the catch block which redirects to /verify
if (ctx.error.code?.includes('EMAIL_NOT_VERIFIED')) {
errorHandled = true
return
}
errorHandled = true
const errorMessage: string[] = ['Invalid email or password']
if (
ctx.error.code?.includes('BAD_REQUEST') ||
ctx.error.message?.includes('Email and password sign in is not enabled')
@@ -271,6 +259,7 @@ export default function LoginPage({
errorMessage.push('Too many requests. Please wait a moment before trying again.')
}
setResetSuccessMessage(null)
setPasswordErrors(errorMessage)
setShowValidationError(true)
},
@@ -278,9 +267,22 @@ export default function LoginPage({
)
if (!result || result.error) {
// Show error if not already handled by onError callback
if (!errorHandled) {
setResetSuccessMessage(null)
const errorMessage = result?.error?.message || 'Login failed. Please try again.'
setPasswordErrors([errorMessage])
setShowValidationError(true)
}
setIsLoading(false)
return
}
// Clear reset success message on successful login
setResetSuccessMessage(null)
// Explicit redirect fallback if better-auth doesn't redirect
router.push(safeCallbackUrl)
} catch (err: any) {
if (err.message?.includes('not verified') || err.code?.includes('EMAIL_NOT_VERIFIED')) {
if (typeof window !== 'undefined') {
@@ -400,6 +402,13 @@ export default function LoginPage({
</div>
)}
{/* Password reset success message */}
{resetSuccessMessage && (
<div className={`${inter.className} mt-1 space-y-1 text-[#4CAF50] text-xs`}>
<p>{resetSuccessMessage}</p>
</div>
)}
{/* Email/Password Form - show unless explicitly disabled */}
{!isFalsy(getEnv('NEXT_PUBLIC_EMAIL_PASSWORD_SIGNUP_ENABLED')) && (
<form onSubmit={onSubmit} className={`${inter.className} mt-8 space-y-8`}>

View File

@@ -1,12 +1,13 @@
'use client'
import { useEffect, useState } from 'react'
import { useState } from 'react'
import { ArrowRight, ChevronRight, Eye, EyeOff } from 'lucide-react'
import { Button } from '@/components/ui/button'
import { Input } from '@/components/ui/input'
import { Label } from '@/components/ui/label'
import { cn } from '@/lib/core/utils/cn'
import { inter } from '@/app/_styles/fonts/inter/inter'
import { useBrandedButtonClass } from '@/hooks/use-branded-button-class'
interface RequestResetFormProps {
email: string
@@ -27,36 +28,9 @@ export function RequestResetForm({
statusMessage,
className,
}: RequestResetFormProps) {
const [buttonClass, setButtonClass] = useState('branded-button-gradient')
const buttonClass = useBrandedButtonClass()
const [isButtonHovered, setIsButtonHovered] = useState(false)
useEffect(() => {
const checkCustomBrand = () => {
const computedStyle = getComputedStyle(document.documentElement)
const brandAccent = computedStyle.getPropertyValue('--brand-accent-hex').trim()
if (brandAccent && brandAccent !== '#6f3dfa') {
setButtonClass('branded-button-custom')
} else {
setButtonClass('branded-button-gradient')
}
}
checkCustomBrand()
window.addEventListener('resize', checkCustomBrand)
const observer = new MutationObserver(checkCustomBrand)
observer.observe(document.documentElement, {
attributes: true,
attributeFilter: ['style', 'class'],
})
return () => {
window.removeEventListener('resize', checkCustomBrand)
observer.disconnect()
}
}, [])
const handleSubmit = async (e: React.FormEvent) => {
e.preventDefault()
onSubmit(email)
@@ -138,36 +112,9 @@ export function SetNewPasswordForm({
const [validationMessage, setValidationMessage] = useState('')
const [showPassword, setShowPassword] = useState(false)
const [showConfirmPassword, setShowConfirmPassword] = useState(false)
const [buttonClass, setButtonClass] = useState('branded-button-gradient')
const buttonClass = useBrandedButtonClass()
const [isButtonHovered, setIsButtonHovered] = useState(false)
useEffect(() => {
const checkCustomBrand = () => {
const computedStyle = getComputedStyle(document.documentElement)
const brandAccent = computedStyle.getPropertyValue('--brand-accent-hex').trim()
if (brandAccent && brandAccent !== '#6f3dfa') {
setButtonClass('branded-button-custom')
} else {
setButtonClass('branded-button-gradient')
}
}
checkCustomBrand()
window.addEventListener('resize', checkCustomBrand)
const observer = new MutationObserver(checkCustomBrand)
observer.observe(document.documentElement, {
attributes: true,
attributeFilter: ['style', 'class'],
})
return () => {
window.removeEventListener('resize', checkCustomBrand)
observer.disconnect()
}
}, [])
const handleSubmit = async (e: React.FormEvent) => {
e.preventDefault()

View File

@@ -16,6 +16,7 @@ import { inter } from '@/app/_styles/fonts/inter/inter'
import { soehne } from '@/app/_styles/fonts/soehne/soehne'
import { SocialLoginButtons } from '@/app/(auth)/components/social-login-buttons'
import { SSOLoginButton } from '@/app/(auth)/components/sso-login-button'
import { useBrandedButtonClass } from '@/hooks/use-branded-button-class'
const logger = createLogger('SignupForm')
@@ -95,7 +96,7 @@ function SignupFormContent({
const [showEmailValidationError, setShowEmailValidationError] = useState(false)
const [redirectUrl, setRedirectUrl] = useState('')
const [isInviteFlow, setIsInviteFlow] = useState(false)
const [buttonClass, setButtonClass] = useState('branded-button-gradient')
const buttonClass = useBrandedButtonClass()
const [isButtonHovered, setIsButtonHovered] = useState(false)
const [name, setName] = useState('')
@@ -126,31 +127,6 @@ function SignupFormContent({
if (inviteFlowParam === 'true') {
setIsInviteFlow(true)
}
const checkCustomBrand = () => {
const computedStyle = getComputedStyle(document.documentElement)
const brandAccent = computedStyle.getPropertyValue('--brand-accent-hex').trim()
if (brandAccent && brandAccent !== '#6f3dfa') {
setButtonClass('branded-button-custom')
} else {
setButtonClass('branded-button-gradient')
}
}
checkCustomBrand()
window.addEventListener('resize', checkCustomBrand)
const observer = new MutationObserver(checkCustomBrand)
observer.observe(document.documentElement, {
attributes: true,
attributeFilter: ['style', 'class'],
})
return () => {
window.removeEventListener('resize', checkCustomBrand)
observer.disconnect()
}
}, [searchParams])
const validatePassword = (passwordValue: string): string[] => {

View File

@@ -13,6 +13,7 @@ import { cn } from '@/lib/core/utils/cn'
import { quickValidateEmail } from '@/lib/messaging/email/validation'
import { inter } from '@/app/_styles/fonts/inter/inter'
import { soehne } from '@/app/_styles/fonts/soehne/soehne'
import { useBrandedButtonClass } from '@/hooks/use-branded-button-class'
const logger = createLogger('SSOForm')
@@ -57,7 +58,7 @@ export default function SSOForm() {
const [email, setEmail] = useState('')
const [emailErrors, setEmailErrors] = useState<string[]>([])
const [showEmailValidationError, setShowEmailValidationError] = useState(false)
const [buttonClass, setButtonClass] = useState('branded-button-gradient')
const buttonClass = useBrandedButtonClass()
const [callbackUrl, setCallbackUrl] = useState('/workspace')
useEffect(() => {
@@ -90,31 +91,6 @@ export default function SSOForm() {
setShowEmailValidationError(true)
}
}
const checkCustomBrand = () => {
const computedStyle = getComputedStyle(document.documentElement)
const brandAccent = computedStyle.getPropertyValue('--brand-accent-hex').trim()
if (brandAccent && brandAccent !== '#6f3dfa') {
setButtonClass('branded-button-custom')
} else {
setButtonClass('branded-button-gradient')
}
}
checkCustomBrand()
window.addEventListener('resize', checkCustomBrand)
const observer = new MutationObserver(checkCustomBrand)
observer.observe(document.documentElement, {
attributes: true,
attributeFilter: ['style', 'class'],
})
return () => {
window.removeEventListener('resize', checkCustomBrand)
observer.disconnect()
}
}, [searchParams])
const handleEmailChange = (e: React.ChangeEvent<HTMLInputElement>) => {

View File

@@ -8,6 +8,7 @@ import { cn } from '@/lib/core/utils/cn'
import { inter } from '@/app/_styles/fonts/inter/inter'
import { soehne } from '@/app/_styles/fonts/soehne/soehne'
import { useVerification } from '@/app/(auth)/verify/use-verification'
import { useBrandedButtonClass } from '@/hooks/use-branded-button-class'
interface VerifyContentProps {
hasEmailService: boolean
@@ -58,34 +59,7 @@ function VerificationForm({
setCountdown(30)
}
const [buttonClass, setButtonClass] = useState('branded-button-gradient')
useEffect(() => {
const checkCustomBrand = () => {
const computedStyle = getComputedStyle(document.documentElement)
const brandAccent = computedStyle.getPropertyValue('--brand-accent-hex').trim()
if (brandAccent && brandAccent !== '#6f3dfa') {
setButtonClass('branded-button-custom')
} else {
setButtonClass('branded-button-gradient')
}
}
checkCustomBrand()
window.addEventListener('resize', checkCustomBrand)
const observer = new MutationObserver(checkCustomBrand)
observer.observe(document.documentElement, {
attributes: true,
attributeFilter: ['style', 'class'],
})
return () => {
window.removeEventListener('resize', checkCustomBrand)
observer.disconnect()
}
}, [])
const buttonClass = useBrandedButtonClass()
return (
<>

View File

@@ -15,7 +15,8 @@ const resetPasswordSchema = z.object({
.max(100, 'Password must not exceed 100 characters')
.regex(/[A-Z]/, 'Password must contain at least one uppercase letter')
.regex(/[a-z]/, 'Password must contain at least one lowercase letter')
.regex(/[0-9]/, 'Password must contain at least one number'),
.regex(/[0-9]/, 'Password must contain at least one number')
.regex(/[^A-Za-z0-9]/, 'Password must contain at least one special character'),
})
export async function POST(request: NextRequest) {

View File

@@ -4,7 +4,7 @@ import { eq } from 'drizzle-orm'
import { NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
const logger = createLogger('SSO-Providers')
const logger = createLogger('SSOProvidersRoute')
export async function GET() {
try {

View File

@@ -6,7 +6,7 @@ import { hasSSOAccess } from '@/lib/billing'
import { env } from '@/lib/core/config/env'
import { REDACTED_MARKER } from '@/lib/core/security/redaction'
const logger = createLogger('SSO-Register')
const logger = createLogger('SSORegisterRoute')
const mappingSchema = z
.object({
@@ -43,6 +43,10 @@ const ssoRegistrationSchema = z.discriminatedUnion('providerType', [
])
.default(['openid', 'profile', 'email']),
pkce: z.boolean().default(true),
authorizationEndpoint: z.string().url().optional(),
tokenEndpoint: z.string().url().optional(),
userInfoEndpoint: z.string().url().optional(),
jwksEndpoint: z.string().url().optional(),
}),
z.object({
providerType: z.literal('saml'),
@@ -64,12 +68,10 @@ const ssoRegistrationSchema = z.discriminatedUnion('providerType', [
export async function POST(request: NextRequest) {
try {
// SSO plugin must be enabled in Better Auth
if (!env.SSO_ENABLED) {
return NextResponse.json({ error: 'SSO is not enabled' }, { status: 400 })
}
// Check plan access (enterprise) or env var override
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
@@ -116,7 +118,16 @@ export async function POST(request: NextRequest) {
}
if (providerType === 'oidc') {
const { clientId, clientSecret, scopes, pkce } = body
const {
clientId,
clientSecret,
scopes,
pkce,
authorizationEndpoint,
tokenEndpoint,
userInfoEndpoint,
jwksEndpoint,
} = body
const oidcConfig: any = {
clientId,
@@ -127,50 +138,104 @@ export async function POST(request: NextRequest) {
pkce: pkce ?? true,
}
// Add manual endpoints for providers that might need them
// Common patterns for OIDC providers that don't support discovery properly
if (
issuer.includes('okta.com') ||
issuer.includes('auth0.com') ||
issuer.includes('identityserver')
) {
const baseUrl = issuer.includes('/oauth2/default')
? issuer.replace('/oauth2/default', '')
: issuer.replace('/oauth', '').replace('/v2.0', '').replace('/oauth2', '')
oidcConfig.authorizationEndpoint = authorizationEndpoint
oidcConfig.tokenEndpoint = tokenEndpoint
oidcConfig.userInfoEndpoint = userInfoEndpoint
oidcConfig.jwksEndpoint = jwksEndpoint
// Okta-style endpoints
if (issuer.includes('okta.com')) {
oidcConfig.authorizationEndpoint = `${baseUrl}/oauth2/default/v1/authorize`
oidcConfig.tokenEndpoint = `${baseUrl}/oauth2/default/v1/token`
oidcConfig.userInfoEndpoint = `${baseUrl}/oauth2/default/v1/userinfo`
oidcConfig.jwksEndpoint = `${baseUrl}/oauth2/default/v1/keys`
}
// Auth0-style endpoints
else if (issuer.includes('auth0.com')) {
oidcConfig.authorizationEndpoint = `${baseUrl}/authorize`
oidcConfig.tokenEndpoint = `${baseUrl}/oauth/token`
oidcConfig.userInfoEndpoint = `${baseUrl}/userinfo`
oidcConfig.jwksEndpoint = `${baseUrl}/.well-known/jwks.json`
}
// Generic OIDC endpoints (IdentityServer, etc.)
else {
oidcConfig.authorizationEndpoint = `${baseUrl}/connect/authorize`
oidcConfig.tokenEndpoint = `${baseUrl}/connect/token`
oidcConfig.userInfoEndpoint = `${baseUrl}/connect/userinfo`
oidcConfig.jwksEndpoint = `${baseUrl}/.well-known/jwks`
}
const needsDiscovery =
!oidcConfig.authorizationEndpoint || !oidcConfig.tokenEndpoint || !oidcConfig.jwksEndpoint
logger.info('Using manual OIDC endpoints for provider', {
if (needsDiscovery) {
const discoveryUrl = `${issuer.replace(/\/$/, '')}/.well-known/openid-configuration`
try {
logger.info('Fetching OIDC discovery document for missing endpoints', {
discoveryUrl,
hasAuthEndpoint: !!oidcConfig.authorizationEndpoint,
hasTokenEndpoint: !!oidcConfig.tokenEndpoint,
hasJwksEndpoint: !!oidcConfig.jwksEndpoint,
})
const discoveryResponse = await fetch(discoveryUrl, {
headers: { Accept: 'application/json' },
})
if (!discoveryResponse.ok) {
logger.error('Failed to fetch OIDC discovery document', {
status: discoveryResponse.status,
statusText: discoveryResponse.statusText,
})
return NextResponse.json(
{
error: `Failed to fetch OIDC discovery document from ${discoveryUrl}. Status: ${discoveryResponse.status}. Provide all endpoints explicitly or verify the issuer URL.`,
},
{ status: 400 }
)
}
const discovery = await discoveryResponse.json()
oidcConfig.authorizationEndpoint =
oidcConfig.authorizationEndpoint || discovery.authorization_endpoint
oidcConfig.tokenEndpoint = oidcConfig.tokenEndpoint || discovery.token_endpoint
oidcConfig.userInfoEndpoint = oidcConfig.userInfoEndpoint || discovery.userinfo_endpoint
oidcConfig.jwksEndpoint = oidcConfig.jwksEndpoint || discovery.jwks_uri
logger.info('Merged OIDC endpoints (user-provided + discovery)', {
providerId,
issuer,
authorizationEndpoint: oidcConfig.authorizationEndpoint,
tokenEndpoint: oidcConfig.tokenEndpoint,
userInfoEndpoint: oidcConfig.userInfoEndpoint,
jwksEndpoint: oidcConfig.jwksEndpoint,
})
} catch (error) {
logger.error('Error fetching OIDC discovery document', {
error: error instanceof Error ? error.message : 'Unknown error',
discoveryUrl,
})
return NextResponse.json(
{
error: `Failed to fetch OIDC discovery document from ${discoveryUrl}. Please verify the issuer URL is correct or provide all endpoints explicitly.`,
},
{ status: 400 }
)
}
} else {
logger.info('Using explicitly provided OIDC endpoints (all present)', {
providerId,
provider: issuer.includes('okta.com')
? 'Okta'
: issuer.includes('auth0.com')
? 'Auth0'
: 'Generic',
authEndpoint: oidcConfig.authorizationEndpoint,
issuer,
authorizationEndpoint: oidcConfig.authorizationEndpoint,
tokenEndpoint: oidcConfig.tokenEndpoint,
userInfoEndpoint: oidcConfig.userInfoEndpoint,
jwksEndpoint: oidcConfig.jwksEndpoint,
})
}
if (
!oidcConfig.authorizationEndpoint ||
!oidcConfig.tokenEndpoint ||
!oidcConfig.jwksEndpoint
) {
const missing: string[] = []
if (!oidcConfig.authorizationEndpoint) missing.push('authorizationEndpoint')
if (!oidcConfig.tokenEndpoint) missing.push('tokenEndpoint')
if (!oidcConfig.jwksEndpoint) missing.push('jwksEndpoint')
logger.error('Missing required OIDC endpoints after discovery merge', {
missing,
authorizationEndpoint: oidcConfig.authorizationEndpoint,
tokenEndpoint: oidcConfig.tokenEndpoint,
jwksEndpoint: oidcConfig.jwksEndpoint,
})
return NextResponse.json(
{
error: `Missing required OIDC endpoints: ${missing.join(', ')}. Please provide these explicitly or verify the issuer supports OIDC discovery.`,
},
{ status: 400 }
)
}
providerConfig.oidcConfig = oidcConfig
} else if (providerType === 'saml') {
const {

View File

@@ -8,6 +8,7 @@ import { getSession } from '@/lib/auth'
import { generateChatTitle } from '@/lib/copilot/chat-title'
import { getCopilotModel } from '@/lib/copilot/config'
import { SIM_AGENT_API_URL_DEFAULT, SIM_AGENT_VERSION } from '@/lib/copilot/constants'
import { COPILOT_MODEL_IDS, COPILOT_REQUEST_MODES } from '@/lib/copilot/models'
import {
authenticateCopilotRequestSessionOnly,
createBadRequestResponse,
@@ -40,34 +41,8 @@ const ChatMessageSchema = z.object({
userMessageId: z.string().optional(), // ID from frontend for the user message
chatId: z.string().optional(),
workflowId: z.string().min(1, 'Workflow ID is required'),
model: z
.enum([
'gpt-5-fast',
'gpt-5',
'gpt-5-medium',
'gpt-5-high',
'gpt-5.1-fast',
'gpt-5.1',
'gpt-5.1-medium',
'gpt-5.1-high',
'gpt-5-codex',
'gpt-5.1-codex',
'gpt-5.2',
'gpt-5.2-codex',
'gpt-5.2-pro',
'gpt-4o',
'gpt-4.1',
'o3',
'claude-4-sonnet',
'claude-4.5-haiku',
'claude-4.5-sonnet',
'claude-4.5-opus',
'claude-4.1-opus',
'gemini-3-pro',
])
.optional()
.default('claude-4.5-opus'),
mode: z.enum(['ask', 'agent', 'plan']).optional().default('agent'),
model: z.enum(COPILOT_MODEL_IDS).optional().default('claude-4.5-opus'),
mode: z.enum(COPILOT_REQUEST_MODES).optional().default('agent'),
prefetch: z.boolean().optional(),
createNewChat: z.boolean().optional().default(false),
stream: z.boolean().optional().default(true),
@@ -295,7 +270,8 @@ export async function POST(req: NextRequest) {
}
const defaults = getCopilotModel('chat')
const modelToUse = env.COPILOT_MODEL || defaults.model
const selectedModel = model || defaults.model
const envModel = env.COPILOT_MODEL || defaults.model
let providerConfig: CopilotProviderConfig | undefined
const providerEnv = env.COPILOT_PROVIDER as any
@@ -304,7 +280,7 @@ export async function POST(req: NextRequest) {
if (providerEnv === 'azure-openai') {
providerConfig = {
provider: 'azure-openai',
model: modelToUse,
model: envModel,
apiKey: env.AZURE_OPENAI_API_KEY,
apiVersion: 'preview',
endpoint: env.AZURE_OPENAI_ENDPOINT,
@@ -312,7 +288,7 @@ export async function POST(req: NextRequest) {
} else if (providerEnv === 'vertex') {
providerConfig = {
provider: 'vertex',
model: modelToUse,
model: envModel,
apiKey: env.COPILOT_API_KEY,
vertexProject: env.VERTEX_PROJECT,
vertexLocation: env.VERTEX_LOCATION,
@@ -320,12 +296,15 @@ export async function POST(req: NextRequest) {
} else {
providerConfig = {
provider: providerEnv,
model: modelToUse,
model: selectedModel,
apiKey: env.COPILOT_API_KEY,
}
}
}
const effectiveMode = mode === 'agent' ? 'build' : mode
const transportMode = effectiveMode === 'build' ? 'agent' : effectiveMode
// Determine conversationId to use for this request
const effectiveConversationId =
(currentChat?.conversationId as string | undefined) || conversationId
@@ -345,7 +324,7 @@ export async function POST(req: NextRequest) {
}
} | null = null
if (mode === 'agent') {
if (effectiveMode === 'build') {
// Build base tools (executed locally, not deferred)
// Include function_execute for code execution capability
baseTools = [
@@ -452,8 +431,8 @@ export async function POST(req: NextRequest) {
userId: authenticatedUserId,
stream: stream,
streamToolCalls: true,
model: model,
mode: mode,
model: selectedModel,
mode: transportMode,
messageId: userMessageIdToUse,
version: SIM_AGENT_VERSION,
...(providerConfig ? { provider: providerConfig } : {}),
@@ -477,7 +456,7 @@ export async function POST(req: NextRequest) {
hasConversationId: !!effectiveConversationId,
hasFileAttachments: processedFileContents.length > 0,
messageLength: message.length,
mode,
mode: effectiveMode,
hasTools: integrationTools.length > 0,
toolCount: integrationTools.length,
hasBaseTools: baseTools.length > 0,

View File

@@ -4,6 +4,7 @@ import { createLogger } from '@sim/logger'
import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { COPILOT_MODES } from '@/lib/copilot/models'
import {
authenticateCopilotRequestSessionOnly,
createInternalServerErrorResponse,
@@ -45,7 +46,7 @@ const UpdateMessagesSchema = z.object({
planArtifact: z.string().nullable().optional(),
config: z
.object({
mode: z.enum(['ask', 'build', 'plan']).optional(),
mode: z.enum(COPILOT_MODES).optional(),
model: z.string().optional(),
})
.nullable()

View File

@@ -14,8 +14,7 @@ import {
import { generateRequestId } from '@/lib/core/utils/request'
import { getEffectiveDecryptedEnv } from '@/lib/environment/utils'
import { refreshTokenIfNeeded } from '@/app/api/auth/oauth/utils'
import { REFERENCE } from '@/executor/constants'
import { createEnvVarPattern } from '@/executor/utils/reference-validation'
import { resolveEnvVarReferences } from '@/executor/utils/reference-validation'
import { executeTool } from '@/tools'
import { getTool, resolveToolId } from '@/tools/utils'
@@ -28,45 +27,6 @@ const ExecuteToolSchema = z.object({
workflowId: z.string().optional(),
})
/**
* Resolves all {{ENV_VAR}} references in a value recursively
* Works with strings, arrays, and objects
*/
function resolveEnvVarReferences(value: any, envVars: Record<string, string>): any {
if (typeof value === 'string') {
// Check for exact match: entire string is "{{VAR_NAME}}"
const exactMatchPattern = new RegExp(
`^\\${REFERENCE.ENV_VAR_START}([^}]+)\\${REFERENCE.ENV_VAR_END}$`
)
const exactMatch = exactMatchPattern.exec(value)
if (exactMatch) {
const envVarName = exactMatch[1].trim()
return envVars[envVarName] ?? value
}
// Check for embedded references: "prefix {{VAR}} suffix"
const envVarPattern = createEnvVarPattern()
return value.replace(envVarPattern, (match, varName) => {
const trimmedName = varName.trim()
return envVars[trimmedName] ?? match
})
}
if (Array.isArray(value)) {
return value.map((item) => resolveEnvVarReferences(item, envVars))
}
if (value !== null && typeof value === 'object') {
const resolved: Record<string, any> = {}
for (const [key, val] of Object.entries(value)) {
resolved[key] = resolveEnvVarReferences(val, envVars)
}
return resolved
}
return value
}
export async function POST(req: NextRequest) {
const tracker = createRequestTracker()
@@ -145,7 +105,17 @@ export async function POST(req: NextRequest) {
// Build execution params starting with LLM-provided arguments
// Resolve all {{ENV_VAR}} references in the arguments
const executionParams: Record<string, any> = resolveEnvVarReferences(toolArgs, decryptedEnvVars)
const executionParams: Record<string, any> = resolveEnvVarReferences(
toolArgs,
decryptedEnvVars,
{
resolveExactMatch: true,
allowEmbedded: true,
trimKeys: true,
onMissing: 'keep',
deep: true,
}
) as Record<string, any>
logger.info(`[${tracker.requestId}] Resolved env var references in arguments`, {
toolName,
@@ -254,7 +224,7 @@ export async function POST(req: NextRequest) {
hasApiKey: !!executionParams.apiKey,
})
const result = await executeTool(resolvedToolName, executionParams, true)
const result = await executeTool(resolvedToolName, executionParams)
logger.info(`[${tracker.requestId}] Tool execution complete`, {
toolName,

View File

@@ -2,12 +2,13 @@ import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import type { CopilotModelId } from '@/lib/copilot/models'
import { db } from '@/../../packages/db'
import { settings } from '@/../../packages/db/schema'
const logger = createLogger('CopilotUserModelsAPI')
const DEFAULT_ENABLED_MODELS: Record<string, boolean> = {
const DEFAULT_ENABLED_MODELS: Record<CopilotModelId, boolean> = {
'gpt-4o': false,
'gpt-4.1': false,
'gpt-5-fast': false,
@@ -28,7 +29,7 @@ const DEFAULT_ENABLED_MODELS: Record<string, boolean> = {
'claude-4.5-haiku': true,
'claude-4.5-sonnet': true,
'claude-4.5-opus': true,
// 'claude-4.1-opus': true,
'claude-4.1-opus': false,
'gemini-3-pro': true,
}
@@ -54,7 +55,9 @@ export async function GET(request: NextRequest) {
const mergedModels = { ...DEFAULT_ENABLED_MODELS }
for (const [modelId, enabled] of Object.entries(userModelsMap)) {
mergedModels[modelId] = enabled
if (modelId in mergedModels) {
mergedModels[modelId as CopilotModelId] = enabled
}
}
const hasNewModels = Object.keys(DEFAULT_ENABLED_MODELS).some(

View File

@@ -1,10 +1,11 @@
import { db } from '@sim/db'
import { templateCreators, user } from '@sim/db/schema'
import { templateCreators } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { generateRequestId } from '@/lib/core/utils/request'
import { verifyEffectiveSuperUser } from '@/lib/templates/permissions'
const logger = createLogger('CreatorVerificationAPI')
@@ -23,9 +24,8 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
}
// Check if user is a super user
const currentUser = await db.select().from(user).where(eq(user.id, session.user.id)).limit(1)
if (!currentUser[0]?.isSuperUser) {
const { effectiveSuperUser } = await verifyEffectiveSuperUser(session.user.id)
if (!effectiveSuperUser) {
logger.warn(`[${requestId}] Non-super user attempted to verify creator: ${id}`)
return NextResponse.json({ error: 'Only super users can verify creators' }, { status: 403 })
}
@@ -76,9 +76,8 @@ export async function DELETE(
}
// Check if user is a super user
const currentUser = await db.select().from(user).where(eq(user.id, session.user.id)).limit(1)
if (!currentUser[0]?.isSuperUser) {
const { effectiveSuperUser } = await verifyEffectiveSuperUser(session.user.id)
if (!effectiveSuperUser) {
logger.warn(`[${requestId}] Non-super user attempted to unverify creator: ${id}`)
return NextResponse.json({ error: 'Only super users can unverify creators' }, { status: 403 })
}

View File

@@ -6,9 +6,10 @@ import { createLogger } from '@sim/logger'
import binaryExtensionsList from 'binary-extensions'
import { type NextRequest, NextResponse } from 'next/server'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { createPinnedUrl, validateUrlWithDNS } from '@/lib/core/security/input-validation'
import { secureFetchWithPinnedIP, validateUrlWithDNS } from '@/lib/core/security/input-validation'
import { isSupportedFileType, parseFile } from '@/lib/file-parsers'
import { isUsingCloudStorage, type StorageContext, StorageService } from '@/lib/uploads'
import { uploadExecutionFile } from '@/lib/uploads/contexts/execution'
import { UPLOAD_DIR_SERVER } from '@/lib/uploads/core/setup.server'
import { getFileMetadataByKey } from '@/lib/uploads/server/metadata'
import {
@@ -21,6 +22,7 @@ import {
} from '@/lib/uploads/utils/file-utils'
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
import { verifyFileAccess } from '@/app/api/files/authorization'
import type { UserFile } from '@/executor/types'
import '@/lib/uploads/core/setup.server'
export const dynamic = 'force-dynamic'
@@ -30,6 +32,12 @@ const logger = createLogger('FilesParseAPI')
const MAX_DOWNLOAD_SIZE_BYTES = 100 * 1024 * 1024 // 100 MB
const DOWNLOAD_TIMEOUT_MS = 30000 // 30 seconds
interface ExecutionContext {
workspaceId: string
workflowId: string
executionId: string
}
interface ParseResult {
success: boolean
content?: string
@@ -37,6 +45,7 @@ interface ParseResult {
filePath: string
originalName?: string // Original filename from database (for workspace files)
viewerUrl?: string | null // Viewer URL for the file if available
userFile?: UserFile // UserFile object for the raw file
metadata?: {
fileType: string
size: number
@@ -70,27 +79,45 @@ export async function POST(request: NextRequest) {
const userId = authResult.userId
const requestData = await request.json()
const { filePath, fileType, workspaceId } = requestData
const { filePath, fileType, workspaceId, workflowId, executionId } = requestData
if (!filePath || (typeof filePath === 'string' && filePath.trim() === '')) {
return NextResponse.json({ success: false, error: 'No file path provided' }, { status: 400 })
}
logger.info('File parse request received:', { filePath, fileType, workspaceId, userId })
// Build execution context if all required fields are present
const executionContext: ExecutionContext | undefined =
workspaceId && workflowId && executionId
? { workspaceId, workflowId, executionId }
: undefined
logger.info('File parse request received:', {
filePath,
fileType,
workspaceId,
userId,
hasExecutionContext: !!executionContext,
})
if (Array.isArray(filePath)) {
const results = []
for (const path of filePath) {
if (!path || (typeof path === 'string' && path.trim() === '')) {
for (const singlePath of filePath) {
if (!singlePath || (typeof singlePath === 'string' && singlePath.trim() === '')) {
results.push({
success: false,
error: 'Empty file path in array',
filePath: path || '',
filePath: singlePath || '',
})
continue
}
const result = await parseFileSingle(path, fileType, workspaceId, userId)
const result = await parseFileSingle(
singlePath,
fileType,
workspaceId,
userId,
executionContext
)
if (result.metadata) {
result.metadata.processingTime = Date.now() - startTime
}
@@ -106,6 +133,7 @@ export async function POST(request: NextRequest) {
fileType: result.metadata?.fileType || 'application/octet-stream',
size: result.metadata?.size || 0,
binary: false,
file: result.userFile,
},
filePath: result.filePath,
viewerUrl: result.viewerUrl,
@@ -121,7 +149,7 @@ export async function POST(request: NextRequest) {
})
}
const result = await parseFileSingle(filePath, fileType, workspaceId, userId)
const result = await parseFileSingle(filePath, fileType, workspaceId, userId, executionContext)
if (result.metadata) {
result.metadata.processingTime = Date.now() - startTime
@@ -137,6 +165,7 @@ export async function POST(request: NextRequest) {
fileType: result.metadata?.fileType || 'application/octet-stream',
size: result.metadata?.size || 0,
binary: false,
file: result.userFile,
},
filePath: result.filePath,
viewerUrl: result.viewerUrl,
@@ -164,7 +193,8 @@ async function parseFileSingle(
filePath: string,
fileType: string,
workspaceId: string,
userId: string
userId: string,
executionContext?: ExecutionContext
): Promise<ParseResult> {
logger.info('Parsing file:', filePath)
@@ -186,18 +216,18 @@ async function parseFileSingle(
}
if (filePath.includes('/api/files/serve/')) {
return handleCloudFile(filePath, fileType, undefined, userId)
return handleCloudFile(filePath, fileType, undefined, userId, executionContext)
}
if (filePath.startsWith('http://') || filePath.startsWith('https://')) {
return handleExternalUrl(filePath, fileType, workspaceId, userId)
return handleExternalUrl(filePath, fileType, workspaceId, userId, executionContext)
}
if (isUsingCloudStorage()) {
return handleCloudFile(filePath, fileType, undefined, userId)
return handleCloudFile(filePath, fileType, undefined, userId, executionContext)
}
return handleLocalFile(filePath, fileType, userId)
return handleLocalFile(filePath, fileType, userId, executionContext)
}
/**
@@ -230,12 +260,14 @@ function validateFilePath(filePath: string): { isValid: boolean; error?: string
/**
* Handle external URL
* If workspaceId is provided, checks if file already exists and saves to workspace if not
* If executionContext is provided, also stores the file in execution storage and returns UserFile
*/
async function handleExternalUrl(
url: string,
fileType: string,
workspaceId: string,
userId: string
userId: string,
executionContext?: ExecutionContext
): Promise<ParseResult> {
try {
logger.info('Fetching external URL:', url)
@@ -312,17 +344,13 @@ async function handleExternalUrl(
if (existingFile) {
const storageFilePath = `/api/files/serve/${existingFile.key}`
return handleCloudFile(storageFilePath, fileType, 'workspace', userId)
return handleCloudFile(storageFilePath, fileType, 'workspace', userId, executionContext)
}
}
}
const pinnedUrl = createPinnedUrl(url, urlValidation.resolvedIP!)
const response = await fetch(pinnedUrl, {
signal: AbortSignal.timeout(DOWNLOAD_TIMEOUT_MS),
headers: {
Host: urlValidation.originalHostname!,
},
const response = await secureFetchWithPinnedIP(url, urlValidation.resolvedIP!, {
timeout: DOWNLOAD_TIMEOUT_MS,
})
if (!response.ok) {
throw new Error(`Failed to fetch URL: ${response.status} ${response.statusText}`)
@@ -341,6 +369,19 @@ async function handleExternalUrl(
logger.info(`Downloaded file from URL: ${url}, size: ${buffer.length} bytes`)
let userFile: UserFile | undefined
const mimeType = response.headers.get('content-type') || getMimeTypeFromExtension(extension)
if (executionContext) {
try {
userFile = await uploadExecutionFile(executionContext, buffer, filename, mimeType, userId)
logger.info(`Stored file in execution storage: ${filename}`, { key: userFile.key })
} catch (uploadError) {
logger.warn(`Failed to store file in execution storage:`, uploadError)
// Continue without userFile - parsing can still work
}
}
if (shouldCheckWorkspace) {
try {
const permission = await getUserEntityPermissions(userId, 'workspace', workspaceId)
@@ -353,8 +394,6 @@ async function handleExternalUrl(
})
} else {
const { uploadWorkspaceFile } = await import('@/lib/uploads/contexts/workspace')
const mimeType =
response.headers.get('content-type') || getMimeTypeFromExtension(extension)
await uploadWorkspaceFile(workspaceId, userId, buffer, filename, mimeType)
logger.info(`Saved URL file to workspace storage: ${filename}`)
}
@@ -363,17 +402,23 @@ async function handleExternalUrl(
}
}
let parseResult: ParseResult
if (extension === 'pdf') {
return await handlePdfBuffer(buffer, filename, fileType, url)
}
if (extension === 'csv') {
return await handleCsvBuffer(buffer, filename, fileType, url)
}
if (isSupportedFileType(extension)) {
return await handleGenericTextBuffer(buffer, filename, extension, fileType, url)
parseResult = await handlePdfBuffer(buffer, filename, fileType, url)
} else if (extension === 'csv') {
parseResult = await handleCsvBuffer(buffer, filename, fileType, url)
} else if (isSupportedFileType(extension)) {
parseResult = await handleGenericTextBuffer(buffer, filename, extension, fileType, url)
} else {
parseResult = handleGenericBuffer(buffer, filename, extension, fileType)
}
return handleGenericBuffer(buffer, filename, extension, fileType)
// Attach userFile to the result
if (userFile) {
parseResult.userFile = userFile
}
return parseResult
} catch (error) {
logger.error(`Error handling external URL ${url}:`, error)
return {
@@ -386,12 +431,15 @@ async function handleExternalUrl(
/**
* Handle file stored in cloud storage
* If executionContext is provided and file is not already from execution storage,
* copies the file to execution storage and returns UserFile
*/
async function handleCloudFile(
filePath: string,
fileType: string,
explicitContext: string | undefined,
userId: string
userId: string,
executionContext?: ExecutionContext
): Promise<ParseResult> {
try {
const cloudKey = extractStorageKey(filePath)
@@ -438,6 +486,7 @@ async function handleCloudFile(
const filename = originalFilename || cloudKey.split('/').pop() || cloudKey
const extension = path.extname(filename).toLowerCase().substring(1)
const mimeType = getMimeTypeFromExtension(extension)
const normalizedFilePath = `/api/files/serve/${encodeURIComponent(cloudKey)}?context=${context}`
let workspaceIdFromKey: string | undefined
@@ -453,6 +502,39 @@ async function handleCloudFile(
const viewerUrl = getViewerUrl(cloudKey, workspaceIdFromKey)
// Store file in execution storage if executionContext is provided
let userFile: UserFile | undefined
if (executionContext) {
// If file is already from execution context, create UserFile reference without re-uploading
if (context === 'execution') {
userFile = {
id: `file_${Date.now()}_${Math.random().toString(36).substring(2, 9)}`,
name: filename,
url: normalizedFilePath,
size: fileBuffer.length,
type: mimeType,
key: cloudKey,
context: 'execution',
}
logger.info(`Created UserFile reference for existing execution file: ${filename}`)
} else {
// Copy from workspace/other storage to execution storage
try {
userFile = await uploadExecutionFile(
executionContext,
fileBuffer,
filename,
mimeType,
userId
)
logger.info(`Copied file to execution storage: ${filename}`, { key: userFile.key })
} catch (uploadError) {
logger.warn(`Failed to copy file to execution storage:`, uploadError)
}
}
}
let parseResult: ParseResult
if (extension === 'pdf') {
parseResult = await handlePdfBuffer(fileBuffer, filename, fileType, normalizedFilePath)
@@ -477,6 +559,11 @@ async function handleCloudFile(
parseResult.viewerUrl = viewerUrl
// Attach userFile to the result
if (userFile) {
parseResult.userFile = userFile
}
return parseResult
} catch (error) {
logger.error(`Error handling cloud file ${filePath}:`, error)
@@ -500,7 +587,8 @@ async function handleCloudFile(
async function handleLocalFile(
filePath: string,
fileType: string,
userId: string
userId: string,
executionContext?: ExecutionContext
): Promise<ParseResult> {
try {
const filename = filePath.split('/').pop() || filePath
@@ -540,13 +628,32 @@ async function handleLocalFile(
const hash = createHash('md5').update(fileBuffer).digest('hex')
const extension = path.extname(filename).toLowerCase().substring(1)
const mimeType = fileType || getMimeTypeFromExtension(extension)
// Store file in execution storage if executionContext is provided
let userFile: UserFile | undefined
if (executionContext) {
try {
userFile = await uploadExecutionFile(
executionContext,
fileBuffer,
filename,
mimeType,
userId
)
logger.info(`Stored local file in execution storage: ${filename}`, { key: userFile.key })
} catch (uploadError) {
logger.warn(`Failed to store local file in execution storage:`, uploadError)
}
}
return {
success: true,
content: result.content,
filePath,
userFile,
metadata: {
fileType: fileType || getMimeTypeFromExtension(extension),
fileType: mimeType,
size: stats.size,
hash,
processingTime: 0,

View File

@@ -11,6 +11,7 @@ import { preprocessExecution } from '@/lib/execution/preprocessing'
import { LoggingSession } from '@/lib/logs/execution/logging-session'
import { normalizeInputFormatValue } from '@/lib/workflows/input-format'
import { createStreamingResponse } from '@/lib/workflows/streaming/streaming'
import { isInputDefinitionTrigger } from '@/lib/workflows/triggers/input-definition-triggers'
import { setFormAuthCookie, validateFormAuth } from '@/app/api/form/utils'
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
@@ -35,10 +36,7 @@ async function getWorkflowInputSchema(workflowId: string): Promise<any[]> {
.from(workflowBlocks)
.where(eq(workflowBlocks.workflowId, workflowId))
const startBlock = blocks.find(
(block) =>
block.type === 'starter' || block.type === 'start_trigger' || block.type === 'input_trigger'
)
const startBlock = blocks.find((block) => isInputDefinitionTrigger(block.type))
if (!startBlock) {
return []

View File

@@ -276,8 +276,11 @@ describe('Function Execute API Route', () => {
it.concurrent('should resolve tag variables with <tag_name> syntax', async () => {
const req = createMockRequest('POST', {
code: 'return <email>',
params: {
email: { id: '123', subject: 'Test Email' },
blockData: {
'block-123': { id: '123', subject: 'Test Email' },
},
blockNameMapping: {
email: 'block-123',
},
})
@@ -305,9 +308,13 @@ describe('Function Execute API Route', () => {
it.concurrent('should only match valid variable names in angle brackets', async () => {
const req = createMockRequest('POST', {
code: 'return <validVar> + "<invalid@email.com>" + <another_valid>',
params: {
validVar: 'hello',
another_valid: 'world',
blockData: {
'block-1': 'hello',
'block-2': 'world',
},
blockNameMapping: {
validVar: 'block-1',
another_valid: 'block-2',
},
})
@@ -321,28 +328,22 @@ describe('Function Execute API Route', () => {
it.concurrent(
'should handle Gmail webhook data with email addresses containing angle brackets',
async () => {
const gmailData = {
email: {
id: '123',
from: 'Waleed Latif <waleed@sim.ai>',
to: 'User <user@example.com>',
subject: 'Test Email',
bodyText: 'Hello world',
},
rawEmail: {
id: '123',
payload: {
headers: [
{ name: 'From', value: 'Waleed Latif <waleed@sim.ai>' },
{ name: 'To', value: 'User <user@example.com>' },
],
},
},
const emailData = {
id: '123',
from: 'Waleed Latif <waleed@sim.ai>',
to: 'User <user@example.com>',
subject: 'Test Email',
bodyText: 'Hello world',
}
const req = createMockRequest('POST', {
code: 'return <email>',
params: gmailData,
blockData: {
'block-email': emailData,
},
blockNameMapping: {
email: 'block-email',
},
})
const response = await POST(req)
@@ -356,17 +357,20 @@ describe('Function Execute API Route', () => {
it.concurrent(
'should properly serialize complex email objects with special characters',
async () => {
const complexEmailData = {
email: {
from: 'Test User <test@example.com>',
bodyHtml: '<div>HTML content with "quotes" and \'apostrophes\'</div>',
bodyText: 'Text with\nnewlines\tand\ttabs',
},
const emailData = {
from: 'Test User <test@example.com>',
bodyHtml: '<div>HTML content with "quotes" and \'apostrophes\'</div>',
bodyText: 'Text with\nnewlines\tand\ttabs',
}
const req = createMockRequest('POST', {
code: 'return <email>',
params: complexEmailData,
blockData: {
'block-email': emailData,
},
blockNameMapping: {
email: 'block-email',
},
})
const response = await POST(req)
@@ -519,18 +523,23 @@ describe('Function Execute API Route', () => {
})
it.concurrent('should handle JSON serialization edge cases', async () => {
const complexData = {
special: 'chars"with\'quotes',
unicode: '🎉 Unicode content',
nested: {
deep: {
value: 'test',
},
},
}
const req = createMockRequest('POST', {
code: 'return <complexData>',
params: {
complexData: {
special: 'chars"with\'quotes',
unicode: '🎉 Unicode content',
nested: {
deep: {
value: 'test',
},
},
},
blockData: {
'block-complex': complexData,
},
blockNameMapping: {
complexData: 'block-complex',
},
})

View File

@@ -10,6 +10,7 @@ import {
createEnvVarPattern,
createWorkflowVariablePattern,
} from '@/executor/utils/reference-validation'
import { navigatePath } from '@/executor/variables/resolvers/reference'
export const dynamic = 'force-dynamic'
export const runtime = 'nodejs'
@@ -17,8 +18,8 @@ export const MAX_DURATION = 210
const logger = createLogger('FunctionExecuteAPI')
const E2B_JS_WRAPPER_LINES = 3 // Lines before user code: ';(async () => {', ' try {', ' const __sim_result = await (async () => {'
const E2B_PYTHON_WRAPPER_LINES = 1 // Lines before user code: 'def __sim_main__():'
const E2B_JS_WRAPPER_LINES = 3
const E2B_PYTHON_WRAPPER_LINES = 1
type TypeScriptModule = typeof import('typescript')
@@ -133,33 +134,21 @@ function extractEnhancedError(
if (error.stack) {
enhanced.stack = error.stack
// Parse stack trace to extract line and column information
// Handle both compilation errors and runtime errors
const stackLines: string[] = error.stack.split('\n')
for (const line of stackLines) {
// Pattern 1: Compilation errors - "user-function.js:6"
let match = line.match(/user-function\.js:(\d+)(?::(\d+))?/)
// Pattern 2: Runtime errors - "at user-function.js:5:12"
if (!match) {
match = line.match(/at\s+user-function\.js:(\d+):(\d+)/)
}
// Pattern 3: Generic patterns for any line containing our filename
if (!match) {
match = line.match(/user-function\.js:(\d+)(?::(\d+))?/)
}
if (match) {
const stackLine = Number.parseInt(match[1], 10)
const stackColumn = match[2] ? Number.parseInt(match[2], 10) : undefined
// Adjust line number to account for wrapper code
// The user code starts at a specific line in our wrapper
const adjustedLine = stackLine - userCodeStartLine + 1
// Check if this is a syntax error in wrapper code caused by incomplete user code
const isWrapperSyntaxError =
stackLine > userCodeStartLine &&
error.name === 'SyntaxError' &&
@@ -167,7 +156,6 @@ function extractEnhancedError(
error.message.includes('Unexpected end of input'))
if (isWrapperSyntaxError && userCode) {
// Map wrapper syntax errors to the last line of user code
const codeLines = userCode.split('\n')
const lastUserLine = codeLines.length
enhanced.line = lastUserLine
@@ -180,7 +168,6 @@ function extractEnhancedError(
enhanced.line = adjustedLine
enhanced.column = stackColumn
// Extract the actual line content from user code
if (userCode) {
const codeLines = userCode.split('\n')
if (adjustedLine <= codeLines.length) {
@@ -191,7 +178,6 @@ function extractEnhancedError(
}
if (stackLine <= userCodeStartLine) {
// Error is in wrapper code itself
enhanced.line = stackLine
enhanced.column = stackColumn
break
@@ -199,7 +185,6 @@ function extractEnhancedError(
}
}
// Clean up stack trace to show user-relevant information
const cleanedStackLines: string[] = stackLines
.filter(
(line: string) =>
@@ -213,9 +198,6 @@ function extractEnhancedError(
}
}
// Keep original message without adding error type prefix
// The error type will be added later in createUserFriendlyErrorMessage
return enhanced
}
@@ -230,7 +212,6 @@ function formatE2BError(
userCode: string,
prologueLineCount: number
): { formattedError: string; cleanedOutput: string } {
// Calculate line offset based on language and prologue
const wrapperLines =
language === CodeLanguage.Python ? E2B_PYTHON_WRAPPER_LINES : E2B_JS_WRAPPER_LINES
const totalOffset = prologueLineCount + wrapperLines
@@ -240,27 +221,20 @@ function formatE2BError(
let cleanErrorMsg = ''
if (language === CodeLanguage.Python) {
// Python error format: "Cell In[X], line Y" followed by error details
// Extract line number from the Cell reference
const cellMatch = errorOutput.match(/Cell In\[\d+\], line (\d+)/)
if (cellMatch) {
const originalLine = Number.parseInt(cellMatch[1], 10)
userLine = originalLine - totalOffset
}
// Extract clean error message from the error string
// Remove file references like "(detected at line X) (file.py, line Y)"
cleanErrorMsg = errorMessage
.replace(/\s*\(detected at line \d+\)/g, '')
.replace(/\s*\([^)]+\.py, line \d+\)/g, '')
.trim()
} else if (language === CodeLanguage.JavaScript) {
// JavaScript error format from E2B: "SyntaxError: /path/file.ts: Message. (line:col)\n\n 9 | ..."
// First, extract the error type and message from the first line
const firstLineEnd = errorMessage.indexOf('\n')
const firstLine = firstLineEnd > 0 ? errorMessage.substring(0, firstLineEnd) : errorMessage
// Parse: "SyntaxError: /home/user/index.ts: Missing semicolon. (11:9)"
const jsErrorMatch = firstLine.match(/^(\w+Error):\s*[^:]+:\s*([^(]+)\.\s*\((\d+):(\d+)\)/)
if (jsErrorMatch) {
cleanErrorType = jsErrorMatch[1]
@@ -268,13 +242,11 @@ function formatE2BError(
const originalLine = Number.parseInt(jsErrorMatch[3], 10)
userLine = originalLine - totalOffset
} else {
// Fallback: look for line number in the arrow pointer line (> 11 |)
const arrowMatch = errorMessage.match(/^>\s*(\d+)\s*\|/m)
if (arrowMatch) {
const originalLine = Number.parseInt(arrowMatch[1], 10)
userLine = originalLine - totalOffset
}
// Try to extract error type and message
const errorMatch = firstLine.match(/^(\w+Error):\s*(.+)/)
if (errorMatch) {
cleanErrorType = errorMatch[1]
@@ -288,13 +260,11 @@ function formatE2BError(
}
}
// Build the final clean error message
const finalErrorMsg =
cleanErrorType && cleanErrorMsg
? `${cleanErrorType}: ${cleanErrorMsg}`
: cleanErrorMsg || errorMessage
// Format with line number if available
let formattedError = finalErrorMsg
if (userLine && userLine > 0) {
const codeLines = userCode.split('\n')
@@ -310,7 +280,6 @@ function formatE2BError(
}
}
// For stdout, just return the clean error message without the full traceback
const cleanedOutput = finalErrorMsg
return { formattedError, cleanedOutput }
@@ -326,7 +295,6 @@ function createUserFriendlyErrorMessage(
): string {
let errorMessage = enhanced.message
// Add line information if available
if (enhanced.line !== undefined) {
let lineInfo = `Line ${enhanced.line}`
@@ -337,18 +305,14 @@ function createUserFriendlyErrorMessage(
errorMessage = `${lineInfo} - ${errorMessage}`
} else {
// If no line number, try to extract it from stack trace for display
if (enhanced.stack) {
const stackMatch = enhanced.stack.match(/user-function\.js:(\d+)(?::(\d+))?/)
if (stackMatch) {
const line = Number.parseInt(stackMatch[1], 10)
let lineInfo = `Line ${line}`
// Try to get line content if we have userCode
if (userCode) {
const codeLines = userCode.split('\n')
// Note: stackMatch gives us VM line number, need to adjust
// This is a fallback case, so we might not have perfect line mapping
if (line <= codeLines.length) {
const lineContent = codeLines[line - 1]?.trim()
if (lineContent) {
@@ -362,7 +326,6 @@ function createUserFriendlyErrorMessage(
}
}
// Add error type prefix with consistent naming
if (enhanced.name !== 'Error') {
const errorTypePrefix =
enhanced.name === 'SyntaxError'
@@ -373,7 +336,6 @@ function createUserFriendlyErrorMessage(
? 'Reference Error'
: enhanced.name
// Only add prefix if not already present
if (!errorMessage.toLowerCase().includes(errorTypePrefix.toLowerCase())) {
errorMessage = `${errorTypePrefix}: ${errorMessage}`
}
@@ -382,9 +344,6 @@ function createUserFriendlyErrorMessage(
return errorMessage
}
/**
* Resolves workflow variables with <variable.name> syntax
*/
function resolveWorkflowVariables(
code: string,
workflowVariables: Record<string, any>,
@@ -404,39 +363,35 @@ function resolveWorkflowVariables(
while ((match = regex.exec(code)) !== null) {
const variableName = match[1].trim()
// Find the variable by name (workflowVariables is indexed by ID, values are variable objects)
const foundVariable = Object.entries(workflowVariables).find(
([_, variable]) => normalizeName(variable.name || '') === variableName
)
let variableValue: unknown = ''
if (foundVariable) {
const variable = foundVariable[1]
variableValue = variable.value
if (!foundVariable) {
const availableVars = Object.values(workflowVariables)
.map((v) => v.name)
.filter(Boolean)
throw new Error(
`Variable "${variableName}" doesn't exist.` +
(availableVars.length > 0 ? ` Available: ${availableVars.join(', ')}` : '')
)
}
if (variable.value !== undefined && variable.value !== null) {
const variable = foundVariable[1]
let variableValue: unknown = variable.value
if (variable.value !== undefined && variable.value !== null) {
const type = variable.type === 'string' ? 'plain' : variable.type
if (type === 'number') {
variableValue = Number(variableValue)
} else if (type === 'boolean') {
variableValue = variableValue === 'true' || variableValue === true
} else if (type === 'json' && typeof variableValue === 'string') {
try {
// Handle 'string' type the same as 'plain' for backward compatibility
const type = variable.type === 'string' ? 'plain' : variable.type
// For plain text, use exactly what's entered without modifications
if (type === 'plain' && typeof variableValue === 'string') {
// Use as-is for plain text
} else if (type === 'number') {
variableValue = Number(variableValue)
} else if (type === 'boolean') {
variableValue = variableValue === 'true' || variableValue === true
} else if (type === 'json') {
try {
variableValue =
typeof variableValue === 'string' ? JSON.parse(variableValue) : variableValue
} catch {
// Keep original value if JSON parsing fails
}
}
variableValue = JSON.parse(variableValue)
} catch {
// Fallback to original value on error
variableValue = variable.value
// Keep as-is
}
}
}
@@ -449,11 +404,9 @@ function resolveWorkflowVariables(
})
}
// Process replacements in reverse order to maintain correct indices
for (let i = replacements.length - 1; i >= 0; i--) {
const { match: matchStr, index, variableName, variableValue } = replacements[i]
// Use variable reference approach
const safeVarName = `__variable_${variableName.replace(/[^a-zA-Z0-9_]/g, '_')}`
contextVariables[safeVarName] = variableValue
resolvedCode =
@@ -463,9 +416,6 @@ function resolveWorkflowVariables(
return resolvedCode
}
/**
* Resolves environment variables with {{var_name}} syntax
*/
function resolveEnvironmentVariables(
code: string,
params: Record<string, any>,
@@ -479,14 +429,30 @@ function resolveEnvironmentVariables(
const replacements: Array<{ match: string; index: number; varName: string; varValue: string }> =
[]
const resolverVars: Record<string, string> = {}
Object.entries(params).forEach(([key, value]) => {
if (value !== undefined && value !== null) {
resolverVars[key] = String(value)
}
})
Object.entries(envVars).forEach(([key, value]) => {
if (value !== undefined && value !== null) {
resolverVars[key] = value
}
})
while ((match = regex.exec(code)) !== null) {
const varName = match[1].trim()
const varValue = envVars[varName] || params[varName] || ''
if (!(varName in resolverVars)) {
continue
}
replacements.push({
match: match[0],
index: match.index,
varName,
varValue: String(varValue),
varValue: resolverVars[varName],
})
}
@@ -502,12 +468,8 @@ function resolveEnvironmentVariables(
return resolvedCode
}
/**
* Resolves tags with <tag_name> syntax (including nested paths like <block.response.data>)
*/
function resolveTagVariables(
code: string,
params: Record<string, any>,
blockData: Record<string, any>,
blockNameMapping: Record<string, string>,
contextVariables: Record<string, any>
@@ -522,27 +484,30 @@ function resolveTagVariables(
for (const match of tagMatches) {
const tagName = match.slice(REFERENCE.START.length, -REFERENCE.END.length).trim()
const pathParts = tagName.split(REFERENCE.PATH_DELIMITER)
const blockName = pathParts[0]
// Handle nested paths like "getrecord.response.data" or "function1.response.result"
// First try params, then blockData directly, then try with block name mapping
let tagValue = getNestedValue(params, tagName) || getNestedValue(blockData, tagName) || ''
// If not found and the path starts with a block name, try mapping the block name to ID
if (!tagValue && tagName.includes(REFERENCE.PATH_DELIMITER)) {
const pathParts = tagName.split(REFERENCE.PATH_DELIMITER)
const normalizedBlockName = pathParts[0] // This should already be normalized like "function1"
// Direct lookup using normalized block name
const blockId = blockNameMapping[normalizedBlockName] ?? null
if (blockId) {
const remainingPath = pathParts.slice(1).join('.')
const fullPath = `${blockId}.${remainingPath}`
tagValue = getNestedValue(blockData, fullPath) || ''
}
const blockId = blockNameMapping[blockName]
if (!blockId) {
continue
}
const blockOutput = blockData[blockId]
if (blockOutput === undefined) {
continue
}
let tagValue: any
if (pathParts.length === 1) {
tagValue = blockOutput
} else {
tagValue = navigatePath(blockOutput, pathParts.slice(1))
}
if (tagValue === undefined) {
continue
}
// If the value is a stringified JSON, parse it back to object
if (
typeof tagValue === 'string' &&
tagValue.length > 100 &&
@@ -550,16 +515,13 @@ function resolveTagVariables(
) {
try {
tagValue = JSON.parse(tagValue)
} catch (e) {
// Keep as string if parsing fails
} catch {
// Keep as-is
}
}
// Instead of injecting large JSON directly, create a variable reference
const safeVarName = `__tag_${tagName.replace(/[^a-zA-Z0-9_]/g, '_')}`
contextVariables[safeVarName] = tagValue
// Replace the template with a variable reference
resolvedCode = resolvedCode.replace(new RegExp(escapeRegExp(match), 'g'), safeVarName)
}
@@ -584,35 +546,13 @@ function resolveCodeVariables(
let resolvedCode = code
const contextVariables: Record<string, any> = {}
// Resolve workflow variables with <variable.name> syntax first
resolvedCode = resolveWorkflowVariables(resolvedCode, workflowVariables, contextVariables)
// Resolve environment variables with {{var_name}} syntax
resolvedCode = resolveEnvironmentVariables(resolvedCode, params, envVars, contextVariables)
// Resolve tags with <tag_name> syntax (including nested paths like <block.response.data>)
resolvedCode = resolveTagVariables(
resolvedCode,
params,
blockData,
blockNameMapping,
contextVariables
)
resolvedCode = resolveTagVariables(resolvedCode, blockData, blockNameMapping, contextVariables)
return { resolvedCode, contextVariables }
}
/**
* Get nested value from object using dot notation path
*/
function getNestedValue(obj: any, path: string): any {
if (!obj || !path) return undefined
return path.split('.').reduce((current, key) => {
return current && typeof current === 'object' ? current[key] : undefined
}, obj)
}
/**
* Remove one trailing newline from stdout
* This handles the common case where print() or console.log() adds a trailing \n
@@ -650,7 +590,6 @@ export async function POST(req: NextRequest) {
isCustomTool = false,
} = body
// Extract internal parameters that shouldn't be passed to the execution context
const executionParams = { ...params }
executionParams._context = undefined
@@ -676,7 +615,6 @@ export async function POST(req: NextRequest) {
const lang = isValidCodeLanguage(language) ? language : DEFAULT_CODE_LANGUAGE
// Extract imports once for JavaScript code (reuse later to avoid double extraction)
let jsImports = ''
let jsRemainingCode = resolvedCode
let hasImports = false
@@ -686,31 +624,22 @@ export async function POST(req: NextRequest) {
jsImports = extractionResult.imports
jsRemainingCode = extractionResult.remainingCode
// Check for ES6 imports or CommonJS require statements
// ES6 imports are extracted by the TypeScript parser
// Also check for require() calls which indicate external dependencies
const hasRequireStatements = /require\s*\(\s*['"`]/.test(resolvedCode)
hasImports = jsImports.trim().length > 0 || hasRequireStatements
}
// Python always requires E2B
if (lang === CodeLanguage.Python && !isE2bEnabled) {
throw new Error(
'Python execution requires E2B to be enabled. Please contact your administrator to enable E2B, or use JavaScript instead.'
)
}
// JavaScript with imports requires E2B
if (lang === CodeLanguage.JavaScript && hasImports && !isE2bEnabled) {
throw new Error(
'JavaScript code with import statements requires E2B to be enabled. Please remove the import statements, or contact your administrator to enable E2B.'
)
}
// Use E2B if:
// - E2B is enabled AND
// - Not a custom tool AND
// - (Python OR JavaScript with imports)
const useE2B =
isE2bEnabled &&
!isCustomTool &&
@@ -723,13 +652,10 @@ export async function POST(req: NextRequest) {
language: lang,
})
let prologue = ''
const epilogue = ''
if (lang === CodeLanguage.JavaScript) {
// Track prologue lines for error adjustment
let prologueLineCount = 0
// Reuse the imports we already extracted earlier
const imports = jsImports
const remainingCode = jsRemainingCode
@@ -761,7 +687,7 @@ export async function POST(req: NextRequest) {
' }',
'})();',
].join('\n')
const codeForE2B = importSection + prologue + wrapped + epilogue
const codeForE2B = importSection + prologue + wrapped
const execStart = Date.now()
const {
@@ -783,7 +709,6 @@ export async function POST(req: NextRequest) {
error: e2bError,
})
// If there was an execution error, format it properly
if (e2bError) {
const { formattedError, cleanedOutput } = formatE2BError(
e2bError,
@@ -807,7 +732,7 @@ export async function POST(req: NextRequest) {
output: { result: e2bResult ?? null, stdout: cleanStdout(stdout), executionTime },
})
}
// Track prologue lines for error adjustment
let prologueLineCount = 0
prologue += 'import json\n'
prologueLineCount++
@@ -825,7 +750,7 @@ export async function POST(req: NextRequest) {
'__sim_result__ = __sim_main__()',
"print('__SIM_RESULT__=' + json.dumps(__sim_result__))",
].join('\n')
const codeForE2B = prologue + wrapped + epilogue
const codeForE2B = prologue + wrapped
const execStart = Date.now()
const {
@@ -847,7 +772,6 @@ export async function POST(req: NextRequest) {
error: e2bError,
})
// If there was an execution error, format it properly
if (e2bError) {
const { formattedError, cleanedOutput } = formatE2BError(
e2bError,
@@ -876,7 +800,6 @@ export async function POST(req: NextRequest) {
const wrapperLines = ['(async () => {', ' try {']
if (isCustomTool) {
wrapperLines.push(' // For custom tools, make parameters directly accessible')
Object.keys(executionParams).forEach((key) => {
wrapperLines.push(` const ${key} = params.${key};`)
})
@@ -910,12 +833,10 @@ export async function POST(req: NextRequest) {
})
const ivmError = isolatedResult.error
// Adjust line number for prepended param destructuring in custom tools
let adjustedLine = ivmError.line
let adjustedLineContent = ivmError.lineContent
if (prependedLineCount > 0 && ivmError.line !== undefined) {
adjustedLine = Math.max(1, ivmError.line - prependedLineCount)
// Get line content from original user code, not the prepended code
const codeLines = resolvedCode.split('\n')
if (adjustedLine <= codeLines.length) {
adjustedLineContent = codeLines[adjustedLine - 1]?.trim()

View File

@@ -2,7 +2,7 @@ import { randomUUID } from 'crypto'
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { SUPPORTED_FIELD_TYPES } from '@/lib/knowledge/constants'
import { createTagDefinition, getTagDefinitions } from '@/lib/knowledge/tags/service'
import { checkKnowledgeBaseAccess } from '@/app/api/knowledge/utils'
@@ -19,19 +19,32 @@ export async function GET(req: NextRequest, { params }: { params: Promise<{ id:
try {
logger.info(`[${requestId}] Getting tag definitions for knowledge base ${knowledgeBaseId}`)
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
const auth = await checkHybridAuth(req, { requireWorkflowId: false })
if (!auth.success) {
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
}
const accessCheck = await checkKnowledgeBaseAccess(knowledgeBaseId, session.user.id)
if (!accessCheck.hasAccess) {
return NextResponse.json({ error: 'Forbidden' }, { status: 403 })
// Only allow session and internal JWT auth (not API key)
if (auth.authType === 'api_key') {
return NextResponse.json(
{ error: 'API key auth not supported for this endpoint' },
{ status: 401 }
)
}
// For session auth, verify KB access. Internal JWT is trusted.
if (auth.authType === 'session' && auth.userId) {
const accessCheck = await checkKnowledgeBaseAccess(knowledgeBaseId, auth.userId)
if (!accessCheck.hasAccess) {
return NextResponse.json({ error: 'Forbidden' }, { status: 403 })
}
}
const tagDefinitions = await getTagDefinitions(knowledgeBaseId)
logger.info(`[${requestId}] Retrieved ${tagDefinitions.length} tag definitions`)
logger.info(
`[${requestId}] Retrieved ${tagDefinitions.length} tag definitions (${auth.authType})`
)
return NextResponse.json({
success: true,
@@ -51,14 +64,25 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
try {
logger.info(`[${requestId}] Creating tag definition for knowledge base ${knowledgeBaseId}`)
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
const auth = await checkHybridAuth(req, { requireWorkflowId: false })
if (!auth.success) {
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
}
const accessCheck = await checkKnowledgeBaseAccess(knowledgeBaseId, session.user.id)
if (!accessCheck.hasAccess) {
return NextResponse.json({ error: 'Forbidden' }, { status: 403 })
// Only allow session and internal JWT auth (not API key)
if (auth.authType === 'api_key') {
return NextResponse.json(
{ error: 'API key auth not supported for this endpoint' },
{ status: 401 }
)
}
// For session auth, verify KB access. Internal JWT is trusted.
if (auth.authType === 'session' && auth.userId) {
const accessCheck = await checkKnowledgeBaseAccess(knowledgeBaseId, auth.userId)
if (!accessCheck.hasAccess) {
return NextResponse.json({ error: 'Forbidden' }, { status: 403 })
}
}
const body = await req.json()

View File

@@ -20,6 +20,7 @@ import { createLogger } from '@sim/logger'
import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { generateInternalToken } from '@/lib/auth/internal'
import { getBaseUrl } from '@/lib/core/utils/urls'
const logger = createLogger('WorkflowMcpServeAPI')
@@ -52,6 +53,8 @@ async function getServer(serverId: string) {
id: workflowMcpServer.id,
name: workflowMcpServer.name,
workspaceId: workflowMcpServer.workspaceId,
isPublic: workflowMcpServer.isPublic,
createdBy: workflowMcpServer.createdBy,
})
.from(workflowMcpServer)
.where(eq(workflowMcpServer.id, serverId))
@@ -90,9 +93,11 @@ export async function POST(request: NextRequest, { params }: { params: Promise<R
return NextResponse.json({ error: 'Server not found' }, { status: 404 })
}
const auth = await checkHybridAuth(request, { requireWorkflowId: false })
if (!auth.success || !auth.userId) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
if (!server.isPublic) {
const auth = await checkHybridAuth(request, { requireWorkflowId: false })
if (!auth.success || !auth.userId) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
}
const body = await request.json()
@@ -138,7 +143,8 @@ export async function POST(request: NextRequest, { params }: { params: Promise<R
id,
serverId,
rpcParams as { name: string; arguments?: Record<string, unknown> },
apiKey
apiKey,
server.isPublic ? server.createdBy : undefined
)
default:
@@ -200,7 +206,8 @@ async function handleToolsCall(
id: RequestId,
serverId: string,
params: { name: string; arguments?: Record<string, unknown> } | undefined,
apiKey?: string | null
apiKey?: string | null,
publicServerOwnerId?: string
): Promise<NextResponse> {
try {
if (!params?.name) {
@@ -243,7 +250,13 @@ async function handleToolsCall(
const executeUrl = `${getBaseUrl()}/api/workflows/${tool.workflowId}/execute`
const headers: Record<string, string> = { 'Content-Type': 'application/json' }
if (apiKey) headers['X-API-Key'] = apiKey
if (publicServerOwnerId) {
const internalToken = await generateInternalToken(publicServerOwnerId)
headers.Authorization = `Bearer ${internalToken}`
} else if (apiKey) {
headers['X-API-Key'] = apiKey
}
logger.info(`Executing workflow ${tool.workflowId} via MCP tool ${params.name}`)

View File

@@ -5,8 +5,7 @@ import { McpClient } from '@/lib/mcp/client'
import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware'
import type { McpServerConfig, McpTransport } from '@/lib/mcp/types'
import { createMcpErrorResponse, createMcpSuccessResponse } from '@/lib/mcp/utils'
import { REFERENCE } from '@/executor/constants'
import { createEnvVarPattern } from '@/executor/utils/reference-validation'
import { resolveEnvVarReferences } from '@/executor/utils/reference-validation'
const logger = createLogger('McpServerTestAPI')
@@ -24,22 +23,23 @@ function isUrlBasedTransport(transport: McpTransport): boolean {
* Resolve environment variables in strings
*/
function resolveEnvVars(value: string, envVars: Record<string, string>): string {
const envVarPattern = createEnvVarPattern()
const envMatches = value.match(envVarPattern)
if (!envMatches) return value
const missingVars: string[] = []
const resolvedValue = resolveEnvVarReferences(value, envVars, {
allowEmbedded: true,
resolveExactMatch: true,
trimKeys: true,
onMissing: 'keep',
deep: false,
missingKeys: missingVars,
}) as string
let resolvedValue = value
for (const match of envMatches) {
const envKey = match.slice(REFERENCE.ENV_VAR_START.length, -REFERENCE.ENV_VAR_END.length).trim()
const envValue = envVars[envKey]
if (envValue === undefined) {
if (missingVars.length > 0) {
const uniqueMissing = Array.from(new Set(missingVars))
uniqueMissing.forEach((envKey) => {
logger.warn(`Environment variable "${envKey}" not found in MCP server test`)
continue
}
resolvedValue = resolvedValue.replace(match, envValue)
})
}
return resolvedValue
}

View File

@@ -31,6 +31,7 @@ export const GET = withMcpAuth<RouteParams>('read')(
createdBy: workflowMcpServer.createdBy,
name: workflowMcpServer.name,
description: workflowMcpServer.description,
isPublic: workflowMcpServer.isPublic,
createdAt: workflowMcpServer.createdAt,
updatedAt: workflowMcpServer.updatedAt,
})
@@ -98,6 +99,9 @@ export const PATCH = withMcpAuth<RouteParams>('write')(
if (body.description !== undefined) {
updateData.description = body.description?.trim() || null
}
if (body.isPublic !== undefined) {
updateData.isPublic = body.isPublic
}
const [updatedServer] = await db
.update(workflowMcpServer)

View File

@@ -26,7 +26,6 @@ export const GET = withMcpAuth<RouteParams>('read')(
logger.info(`[${requestId}] Getting tool ${toolId} from server ${serverId}`)
// Verify server exists and belongs to workspace
const [server] = await db
.select({ id: workflowMcpServer.id })
.from(workflowMcpServer)
@@ -72,7 +71,6 @@ export const PATCH = withMcpAuth<RouteParams>('write')(
logger.info(`[${requestId}] Updating tool ${toolId} in server ${serverId}`)
// Verify server exists and belongs to workspace
const [server] = await db
.select({ id: workflowMcpServer.id })
.from(workflowMcpServer)
@@ -139,7 +137,6 @@ export const DELETE = withMcpAuth<RouteParams>('write')(
logger.info(`[${requestId}] Deleting tool ${toolId} from server ${serverId}`)
// Verify server exists and belongs to workspace
const [server] = await db
.select({ id: workflowMcpServer.id })
.from(workflowMcpServer)

View File

@@ -6,24 +6,10 @@ import type { NextRequest } from 'next/server'
import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware'
import { createMcpErrorResponse, createMcpSuccessResponse } from '@/lib/mcp/utils'
import { sanitizeToolName } from '@/lib/mcp/workflow-tool-schema'
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
import { hasValidStartBlockInState } from '@/lib/workflows/triggers/trigger-utils'
import { hasValidStartBlock } from '@/lib/workflows/triggers/trigger-utils.server'
const logger = createLogger('WorkflowMcpToolsAPI')
/**
* Check if a workflow has a valid start block by loading from database
*/
async function hasValidStartBlock(workflowId: string): Promise<boolean> {
try {
const normalizedData = await loadWorkflowFromNormalizedTables(workflowId)
return hasValidStartBlockInState(normalizedData)
} catch (error) {
logger.warn('Error checking for start block:', error)
return false
}
}
export const dynamic = 'force-dynamic'
interface RouteParams {
@@ -40,7 +26,6 @@ export const GET = withMcpAuth<RouteParams>('read')(
logger.info(`[${requestId}] Listing tools for workflow MCP server: ${serverId}`)
// Verify server exists and belongs to workspace
const [server] = await db
.select({ id: workflowMcpServer.id })
.from(workflowMcpServer)
@@ -53,7 +38,6 @@ export const GET = withMcpAuth<RouteParams>('read')(
return createMcpErrorResponse(new Error('Server not found'), 'Server not found', 404)
}
// Get tools with workflow details
const tools = await db
.select({
id: workflowMcpTool.id,
@@ -107,7 +91,6 @@ export const POST = withMcpAuth<RouteParams>('write')(
)
}
// Verify server exists and belongs to workspace
const [server] = await db
.select({ id: workflowMcpServer.id })
.from(workflowMcpServer)
@@ -120,7 +103,6 @@ export const POST = withMcpAuth<RouteParams>('write')(
return createMcpErrorResponse(new Error('Server not found'), 'Server not found', 404)
}
// Verify workflow exists and is deployed
const [workflowRecord] = await db
.select({
id: workflow.id,
@@ -137,7 +119,6 @@ export const POST = withMcpAuth<RouteParams>('write')(
return createMcpErrorResponse(new Error('Workflow not found'), 'Workflow not found', 404)
}
// Verify workflow belongs to the same workspace
if (workflowRecord.workspaceId !== workspaceId) {
return createMcpErrorResponse(
new Error('Workflow does not belong to this workspace'),
@@ -154,7 +135,6 @@ export const POST = withMcpAuth<RouteParams>('write')(
)
}
// Verify workflow has a valid start block
const hasStartBlock = await hasValidStartBlock(body.workflowId)
if (!hasStartBlock) {
return createMcpErrorResponse(
@@ -164,7 +144,6 @@ export const POST = withMcpAuth<RouteParams>('write')(
)
}
// Check if tool already exists for this workflow
const [existingTool] = await db
.select({ id: workflowMcpTool.id })
.from(workflowMcpTool)
@@ -190,7 +169,6 @@ export const POST = withMcpAuth<RouteParams>('write')(
workflowRecord.description ||
`Execute ${workflowRecord.name} workflow`
// Create the tool
const toolId = crypto.randomUUID()
const [tool] = await db
.insert(workflowMcpTool)

View File

@@ -1,10 +1,12 @@
import { db } from '@sim/db'
import { workflowMcpServer, workflowMcpTool } from '@sim/db/schema'
import { workflow, workflowMcpServer, workflowMcpTool } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { eq, inArray, sql } from 'drizzle-orm'
import type { NextRequest } from 'next/server'
import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware'
import { createMcpErrorResponse, createMcpSuccessResponse } from '@/lib/mcp/utils'
import { sanitizeToolName } from '@/lib/mcp/workflow-tool-schema'
import { hasValidStartBlock } from '@/lib/workflows/triggers/trigger-utils.server'
const logger = createLogger('WorkflowMcpServersAPI')
@@ -25,18 +27,18 @@ export const GET = withMcpAuth('read')(
createdBy: workflowMcpServer.createdBy,
name: workflowMcpServer.name,
description: workflowMcpServer.description,
isPublic: workflowMcpServer.isPublic,
createdAt: workflowMcpServer.createdAt,
updatedAt: workflowMcpServer.updatedAt,
toolCount: sql<number>`(
SELECT COUNT(*)::int
FROM "workflow_mcp_tool"
SELECT COUNT(*)::int
FROM "workflow_mcp_tool"
WHERE "workflow_mcp_tool"."server_id" = "workflow_mcp_server"."id"
)`.as('tool_count'),
})
.from(workflowMcpServer)
.where(eq(workflowMcpServer.workspaceId, workspaceId))
// Fetch all tools for these servers
const serverIds = servers.map((s) => s.id)
const tools =
serverIds.length > 0
@@ -49,7 +51,6 @@ export const GET = withMcpAuth('read')(
.where(inArray(workflowMcpTool.serverId, serverIds))
: []
// Group tool names by server
const toolNamesByServer: Record<string, string[]> = {}
for (const tool of tools) {
if (!toolNamesByServer[tool.serverId]) {
@@ -58,7 +59,6 @@ export const GET = withMcpAuth('read')(
toolNamesByServer[tool.serverId].push(tool.toolName)
}
// Attach tool names to servers
const serversWithToolNames = servers.map((server) => ({
...server,
toolNames: toolNamesByServer[server.id] || [],
@@ -90,6 +90,7 @@ export const POST = withMcpAuth('write')(
logger.info(`[${requestId}] Creating workflow MCP server:`, {
name: body.name,
workspaceId,
workflowIds: body.workflowIds,
})
if (!body.name) {
@@ -110,16 +111,76 @@ export const POST = withMcpAuth('write')(
createdBy: userId,
name: body.name.trim(),
description: body.description?.trim() || null,
isPublic: body.isPublic ?? false,
createdAt: new Date(),
updatedAt: new Date(),
})
.returning()
const workflowIds: string[] = body.workflowIds || []
const addedTools: Array<{ workflowId: string; toolName: string }> = []
if (workflowIds.length > 0) {
const workflows = await db
.select({
id: workflow.id,
name: workflow.name,
description: workflow.description,
isDeployed: workflow.isDeployed,
workspaceId: workflow.workspaceId,
})
.from(workflow)
.where(inArray(workflow.id, workflowIds))
for (const workflowRecord of workflows) {
if (workflowRecord.workspaceId !== workspaceId) {
logger.warn(
`[${requestId}] Skipping workflow ${workflowRecord.id} - does not belong to workspace`
)
continue
}
if (!workflowRecord.isDeployed) {
logger.warn(`[${requestId}] Skipping workflow ${workflowRecord.id} - not deployed`)
continue
}
const hasStartBlock = await hasValidStartBlock(workflowRecord.id)
if (!hasStartBlock) {
logger.warn(`[${requestId}] Skipping workflow ${workflowRecord.id} - no start block`)
continue
}
const toolName = sanitizeToolName(workflowRecord.name)
const toolDescription =
workflowRecord.description || `Execute ${workflowRecord.name} workflow`
const toolId = crypto.randomUUID()
await db.insert(workflowMcpTool).values({
id: toolId,
serverId,
workflowId: workflowRecord.id,
toolName,
toolDescription,
parameterSchema: {},
createdAt: new Date(),
updatedAt: new Date(),
})
addedTools.push({ workflowId: workflowRecord.id, toolName })
}
logger.info(
`[${requestId}] Added ${addedTools.length} tools to server ${serverId}:`,
addedTools.map((t) => t.toolName)
)
}
logger.info(
`[${requestId}] Successfully created workflow MCP server: ${body.name} (ID: ${serverId})`
)
return createMcpSuccessResponse({ server }, 201)
return createMcpSuccessResponse({ server, addedTools }, 201)
} catch (error) {
logger.error(`[${requestId}] Error creating workflow MCP server:`, error)
return createMcpErrorResponse(

View File

@@ -1,395 +0,0 @@
import { createLogger } from '@sim/logger'
import type { NextRequest } from 'next/server'
import { NextResponse } from 'next/server'
import { z } from 'zod'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { generateInternalToken } from '@/lib/auth/internal'
import { isDev } from '@/lib/core/config/feature-flags'
import { createPinnedUrl, validateUrlWithDNS } from '@/lib/core/security/input-validation'
import { generateRequestId } from '@/lib/core/utils/request'
import { getBaseUrl } from '@/lib/core/utils/urls'
import { executeTool } from '@/tools'
import { getTool, validateRequiredParametersAfterMerge } from '@/tools/utils'
const logger = createLogger('ProxyAPI')
const proxyPostSchema = z.object({
toolId: z.string().min(1, 'toolId is required'),
params: z.record(z.any()).optional().default({}),
executionContext: z
.object({
workflowId: z.string().optional(),
workspaceId: z.string().optional(),
executionId: z.string().optional(),
userId: z.string().optional(),
})
.optional(),
})
/**
* Creates a minimal set of default headers for proxy requests
* @returns Record of HTTP headers
*/
const getProxyHeaders = (): Record<string, string> => {
return {
'User-Agent':
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/135.0.0.0 Safari/537.36',
Accept: '*/*',
'Accept-Encoding': 'gzip, deflate, br',
'Cache-Control': 'no-cache',
Connection: 'keep-alive',
}
}
/**
* Formats a response with CORS headers
* @param responseData Response data object
* @param status HTTP status code
* @returns NextResponse with CORS headers
*/
const formatResponse = (responseData: any, status = 200) => {
return NextResponse.json(responseData, {
status,
headers: {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Methods': 'GET, POST, PUT, DELETE, OPTIONS',
'Access-Control-Allow-Headers': 'Content-Type, Authorization',
},
})
}
/**
* Creates an error response with consistent formatting
* @param error Error object or message
* @param status HTTP status code
* @param additionalData Additional data to include in the response
* @returns Formatted error response
*/
const createErrorResponse = (error: any, status = 500, additionalData = {}) => {
const errorMessage = error instanceof Error ? error.message : String(error)
const errorStack = error instanceof Error ? error.stack : undefined
logger.error('Creating error response', {
errorMessage,
status,
stack: isDev ? errorStack : undefined,
})
return formatResponse(
{
success: false,
error: errorMessage,
stack: isDev ? errorStack : undefined,
...additionalData,
},
status
)
}
/**
* GET handler for direct external URL proxying
* This allows for GET requests to external APIs
*/
export async function GET(request: Request) {
const url = new URL(request.url)
const targetUrl = url.searchParams.get('url')
const requestId = generateRequestId()
// Vault download proxy: /api/proxy?vaultDownload=1&bucket=...&object=...&credentialId=...
const vaultDownload = url.searchParams.get('vaultDownload')
if (vaultDownload === '1') {
try {
const bucket = url.searchParams.get('bucket')
const objectParam = url.searchParams.get('object')
const credentialId = url.searchParams.get('credentialId')
if (!bucket || !objectParam || !credentialId) {
return createErrorResponse('Missing bucket, object, or credentialId', 400)
}
// Fetch access token using existing token API
const baseUrl = new URL(getBaseUrl())
const tokenUrl = new URL('/api/auth/oauth/token', baseUrl)
// Build headers: forward session cookies if present; include internal auth for server-side
const tokenHeaders: Record<string, string> = { 'Content-Type': 'application/json' }
const incomingCookie = request.headers.get('cookie')
if (incomingCookie) tokenHeaders.Cookie = incomingCookie
try {
const internalToken = await generateInternalToken()
tokenHeaders.Authorization = `Bearer ${internalToken}`
} catch (_e) {
// best-effort internal auth
}
// Optional workflow context for collaboration auth
const workflowId = url.searchParams.get('workflowId') || undefined
const tokenRes = await fetch(tokenUrl.toString(), {
method: 'POST',
headers: tokenHeaders,
body: JSON.stringify({ credentialId, workflowId }),
})
if (!tokenRes.ok) {
const err = await tokenRes.text()
return createErrorResponse(`Failed to fetch access token: ${err}`, 401)
}
const tokenJson = await tokenRes.json()
const accessToken = tokenJson.accessToken
if (!accessToken) {
return createErrorResponse('No access token available', 401)
}
// Avoid double-encoding: incoming object may already be percent-encoded
const objectDecoded = decodeURIComponent(objectParam)
const gcsUrl = `https://storage.googleapis.com/storage/v1/b/${encodeURIComponent(
bucket
)}/o/${encodeURIComponent(objectDecoded)}?alt=media`
const fileRes = await fetch(gcsUrl, {
headers: { Authorization: `Bearer ${accessToken}` },
})
if (!fileRes.ok) {
const errText = await fileRes.text()
return createErrorResponse(errText || 'Failed to download file', fileRes.status)
}
const headers = new Headers()
fileRes.headers.forEach((v, k) => headers.set(k, v))
return new NextResponse(fileRes.body, { status: 200, headers })
} catch (error: any) {
logger.error(`[${requestId}] Vault download proxy failed`, {
error: error instanceof Error ? error.message : String(error),
})
return createErrorResponse('Vault download failed', 500)
}
}
if (!targetUrl) {
logger.error(`[${requestId}] Missing 'url' parameter`)
return createErrorResponse("Missing 'url' parameter", 400)
}
const urlValidation = await validateUrlWithDNS(targetUrl)
if (!urlValidation.isValid) {
logger.warn(`[${requestId}] Blocked proxy request`, {
url: targetUrl.substring(0, 100),
error: urlValidation.error,
})
return createErrorResponse(urlValidation.error || 'Invalid URL', 403)
}
const method = url.searchParams.get('method') || 'GET'
const bodyParam = url.searchParams.get('body')
let body: string | undefined
if (bodyParam && ['POST', 'PUT', 'PATCH'].includes(method.toUpperCase())) {
try {
body = decodeURIComponent(bodyParam)
} catch (error) {
logger.warn(`[${requestId}] Failed to decode body parameter`, error)
}
}
const customHeaders: Record<string, string> = {}
for (const [key, value] of url.searchParams.entries()) {
if (key.startsWith('header.')) {
const headerName = key.substring(7)
customHeaders[headerName] = value
}
}
if (body && !customHeaders['Content-Type']) {
customHeaders['Content-Type'] = 'application/json'
}
logger.info(`[${requestId}] Proxying ${method} request to: ${targetUrl}`)
try {
const pinnedUrl = createPinnedUrl(targetUrl, urlValidation.resolvedIP!)
const response = await fetch(pinnedUrl, {
method: method,
headers: {
...getProxyHeaders(),
...customHeaders,
Host: urlValidation.originalHostname!,
},
body: body || undefined,
})
const contentType = response.headers.get('content-type') || ''
let data
if (contentType.includes('application/json')) {
data = await response.json()
} else {
data = await response.text()
}
const errorMessage = !response.ok
? data && typeof data === 'object' && data.error
? `${data.error.message || JSON.stringify(data.error)}`
: response.statusText || `HTTP error ${response.status}`
: undefined
if (!response.ok) {
logger.error(`[${requestId}] External API error: ${response.status} ${response.statusText}`)
}
return formatResponse({
success: response.ok,
status: response.status,
statusText: response.statusText,
headers: Object.fromEntries(response.headers.entries()),
data,
error: errorMessage,
})
} catch (error: any) {
logger.error(`[${requestId}] Proxy GET request failed`, {
url: targetUrl,
error: error instanceof Error ? error.message : String(error),
stack: error instanceof Error ? error.stack : undefined,
})
return createErrorResponse(error)
}
}
export async function POST(request: NextRequest) {
const requestId = generateRequestId()
const startTime = new Date()
const startTimeISO = startTime.toISOString()
try {
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
if (!authResult.success) {
logger.error(`[${requestId}] Authentication failed for proxy:`, authResult.error)
return createErrorResponse('Unauthorized', 401)
}
let requestBody
try {
requestBody = await request.json()
} catch (parseError) {
logger.error(`[${requestId}] Failed to parse request body`, {
error: parseError instanceof Error ? parseError.message : String(parseError),
})
throw new Error('Invalid JSON in request body')
}
const validationResult = proxyPostSchema.safeParse(requestBody)
if (!validationResult.success) {
logger.error(`[${requestId}] Request validation failed`, {
errors: validationResult.error.errors,
})
const errorMessages = validationResult.error.errors
.map((err) => `${err.path.join('.')}: ${err.message}`)
.join(', ')
throw new Error(`Validation failed: ${errorMessages}`)
}
const { toolId, params } = validationResult.data
logger.info(`[${requestId}] Processing tool: ${toolId}`)
const tool = getTool(toolId)
if (!tool) {
logger.error(`[${requestId}] Tool not found: ${toolId}`)
throw new Error(`Tool not found: ${toolId}`)
}
try {
validateRequiredParametersAfterMerge(toolId, tool, params)
} catch (validationError) {
logger.warn(`[${requestId}] Tool validation failed for ${toolId}`, {
error: validationError instanceof Error ? validationError.message : String(validationError),
})
const endTime = new Date()
const endTimeISO = endTime.toISOString()
const duration = endTime.getTime() - startTime.getTime()
return createErrorResponse(validationError, 400, {
startTime: startTimeISO,
endTime: endTimeISO,
duration,
})
}
const hasFileOutputs =
tool.outputs &&
Object.values(tool.outputs).some(
(output) => output.type === 'file' || output.type === 'file[]'
)
const result = await executeTool(
toolId,
params,
true, // skipProxy (we're already in the proxy)
!hasFileOutputs, // skipPostProcess (don't skip if tool has file outputs)
undefined // execution context is not available in proxy context
)
if (!result.success) {
logger.warn(`[${requestId}] Tool execution failed for ${toolId}`, {
error: result.error || 'Unknown error',
})
throw new Error(result.error || 'Tool execution failed')
}
const endTime = new Date()
const endTimeISO = endTime.toISOString()
const duration = endTime.getTime() - startTime.getTime()
const responseWithTimingData = {
...result,
startTime: startTimeISO,
endTime: endTimeISO,
duration,
timing: {
startTime: startTimeISO,
endTime: endTimeISO,
duration,
},
}
logger.info(`[${requestId}] Tool executed successfully: ${toolId} (${duration}ms)`)
return formatResponse(responseWithTimingData)
} catch (error: any) {
logger.error(`[${requestId}] Proxy request failed`, {
error: error instanceof Error ? error.message : String(error),
stack: error instanceof Error ? error.stack : undefined,
name: error instanceof Error ? error.name : undefined,
})
const endTime = new Date()
const endTimeISO = endTime.toISOString()
const duration = endTime.getTime() - startTime.getTime()
return createErrorResponse(error, 500, {
startTime: startTimeISO,
endTime: endTimeISO,
duration,
})
}
}
export async function OPTIONS() {
return new NextResponse(null, {
status: 204,
headers: {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Methods': 'GET, POST, PUT, DELETE, OPTIONS',
'Access-Control-Allow-Headers': 'Content-Type, Authorization',
'Access-Control-Max-Age': '86400',
},
})
}

View File

@@ -57,6 +57,7 @@ describe('Scheduled Workflow Execution API Route', () => {
not: vi.fn((condition) => ({ type: 'not', condition })),
isNull: vi.fn((field) => ({ type: 'isNull', field })),
or: vi.fn((...conditions) => ({ type: 'or', conditions })),
sql: vi.fn((strings, ...values) => ({ type: 'sql', strings, values })),
}))
vi.doMock('@sim/db', () => {
@@ -92,6 +93,17 @@ describe('Scheduled Workflow Execution API Route', () => {
status: 'status',
nextRunAt: 'nextRunAt',
lastQueuedAt: 'lastQueuedAt',
deploymentVersionId: 'deploymentVersionId',
},
workflowDeploymentVersion: {
id: 'id',
workflowId: 'workflowId',
isActive: 'isActive',
},
workflow: {
id: 'id',
userId: 'userId',
workspaceId: 'workspaceId',
},
}
})
@@ -134,6 +146,7 @@ describe('Scheduled Workflow Execution API Route', () => {
not: vi.fn((condition) => ({ type: 'not', condition })),
isNull: vi.fn((field) => ({ type: 'isNull', field })),
or: vi.fn((...conditions) => ({ type: 'or', conditions })),
sql: vi.fn((strings, ...values) => ({ type: 'sql', strings, values })),
}))
vi.doMock('@sim/db', () => {
@@ -169,6 +182,17 @@ describe('Scheduled Workflow Execution API Route', () => {
status: 'status',
nextRunAt: 'nextRunAt',
lastQueuedAt: 'lastQueuedAt',
deploymentVersionId: 'deploymentVersionId',
},
workflowDeploymentVersion: {
id: 'id',
workflowId: 'workflowId',
isActive: 'isActive',
},
workflow: {
id: 'id',
userId: 'userId',
workspaceId: 'workspaceId',
},
}
})
@@ -206,6 +230,7 @@ describe('Scheduled Workflow Execution API Route', () => {
not: vi.fn((condition) => ({ type: 'not', condition })),
isNull: vi.fn((field) => ({ type: 'isNull', field })),
or: vi.fn((...conditions) => ({ type: 'or', conditions })),
sql: vi.fn((strings, ...values) => ({ type: 'sql', strings, values })),
}))
vi.doMock('@sim/db', () => {
@@ -228,6 +253,17 @@ describe('Scheduled Workflow Execution API Route', () => {
status: 'status',
nextRunAt: 'nextRunAt',
lastQueuedAt: 'lastQueuedAt',
deploymentVersionId: 'deploymentVersionId',
},
workflowDeploymentVersion: {
id: 'id',
workflowId: 'workflowId',
isActive: 'isActive',
},
workflow: {
id: 'id',
userId: 'userId',
workspaceId: 'workspaceId',
},
}
})
@@ -265,6 +301,7 @@ describe('Scheduled Workflow Execution API Route', () => {
not: vi.fn((condition) => ({ type: 'not', condition })),
isNull: vi.fn((field) => ({ type: 'isNull', field })),
or: vi.fn((...conditions) => ({ type: 'or', conditions })),
sql: vi.fn((strings, ...values) => ({ type: 'sql', strings, values })),
}))
vi.doMock('@sim/db', () => {
@@ -310,6 +347,17 @@ describe('Scheduled Workflow Execution API Route', () => {
status: 'status',
nextRunAt: 'nextRunAt',
lastQueuedAt: 'lastQueuedAt',
deploymentVersionId: 'deploymentVersionId',
},
workflowDeploymentVersion: {
id: 'id',
workflowId: 'workflowId',
isActive: 'isActive',
},
workflow: {
id: 'id',
userId: 'userId',
workspaceId: 'workspaceId',
},
}
})

View File

@@ -1,7 +1,7 @@
import { db, workflowSchedule } from '@sim/db'
import { db, workflowDeploymentVersion, workflowSchedule } from '@sim/db'
import { createLogger } from '@sim/logger'
import { tasks } from '@trigger.dev/sdk'
import { and, eq, isNull, lt, lte, not, or } from 'drizzle-orm'
import { and, eq, isNull, lt, lte, not, or, sql } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { verifyCronAuth } from '@/lib/auth/internal'
import { isTriggerDevEnabled } from '@/lib/core/config/feature-flags'
@@ -37,7 +37,8 @@ export async function GET(request: NextRequest) {
or(
isNull(workflowSchedule.lastQueuedAt),
lt(workflowSchedule.lastQueuedAt, workflowSchedule.nextRunAt)
)
),
sql`${workflowSchedule.deploymentVersionId} = (select ${workflowDeploymentVersion.id} from ${workflowDeploymentVersion} where ${workflowDeploymentVersion.workflowId} = ${workflowSchedule.workflowId} and ${workflowDeploymentVersion.isActive} = true)`
)
)
.returning({

View File

@@ -29,12 +29,23 @@ vi.mock('@sim/db', () => ({
vi.mock('@sim/db/schema', () => ({
workflow: { id: 'id', userId: 'userId', workspaceId: 'workspaceId' },
workflowSchedule: { workflowId: 'workflowId', blockId: 'blockId' },
workflowSchedule: {
workflowId: 'workflowId',
blockId: 'blockId',
deploymentVersionId: 'deploymentVersionId',
},
workflowDeploymentVersion: {
id: 'id',
workflowId: 'workflowId',
isActive: 'isActive',
},
}))
vi.mock('drizzle-orm', () => ({
eq: vi.fn(),
and: vi.fn(),
or: vi.fn(),
isNull: vi.fn(),
}))
vi.mock('@/lib/core/utils/request', () => ({
@@ -56,6 +67,11 @@ function mockDbChain(results: any[]) {
where: () => ({
limit: () => results[callIndex++] || [],
}),
leftJoin: () => ({
where: () => ({
limit: () => results[callIndex++] || [],
}),
}),
}),
}))
}
@@ -74,7 +90,16 @@ describe('Schedule GET API', () => {
it('returns schedule data for authorized user', async () => {
mockDbChain([
[{ userId: 'user-1', workspaceId: null }],
[{ id: 'sched-1', cronExpression: '0 9 * * *', status: 'active', failedCount: 0 }],
[
{
schedule: {
id: 'sched-1',
cronExpression: '0 9 * * *',
status: 'active',
failedCount: 0,
},
},
],
])
const res = await GET(createRequest('http://test/api/schedules?workflowId=wf-1'))
@@ -128,7 +153,7 @@ describe('Schedule GET API', () => {
it('allows workspace members to view', async () => {
mockDbChain([
[{ userId: 'other-user', workspaceId: 'ws-1' }],
[{ id: 'sched-1', status: 'active', failedCount: 0 }],
[{ schedule: { id: 'sched-1', status: 'active', failedCount: 0 } }],
])
const res = await GET(createRequest('http://test/api/schedules?workflowId=wf-1'))
@@ -139,7 +164,7 @@ describe('Schedule GET API', () => {
it('indicates disabled schedule with failures', async () => {
mockDbChain([
[{ userId: 'user-1', workspaceId: null }],
[{ id: 'sched-1', status: 'disabled', failedCount: 100 }],
[{ schedule: { id: 'sched-1', status: 'disabled', failedCount: 100 } }],
])
const res = await GET(createRequest('http://test/api/schedules?workflowId=wf-1'))

View File

@@ -1,7 +1,7 @@
import { db } from '@sim/db'
import { workflow, workflowSchedule } from '@sim/db/schema'
import { workflow, workflowDeploymentVersion, workflowSchedule } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq } from 'drizzle-orm'
import { and, eq, isNull, or } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { generateRequestId } from '@/lib/core/utils/request'
@@ -62,9 +62,24 @@ export async function GET(req: NextRequest) {
}
const schedule = await db
.select()
.select({ schedule: workflowSchedule })
.from(workflowSchedule)
.where(conditions.length > 1 ? and(...conditions) : conditions[0])
.leftJoin(
workflowDeploymentVersion,
and(
eq(workflowDeploymentVersion.workflowId, workflowSchedule.workflowId),
eq(workflowDeploymentVersion.isActive, true)
)
)
.where(
and(
...conditions,
or(
eq(workflowSchedule.deploymentVersionId, workflowDeploymentVersion.id),
and(isNull(workflowDeploymentVersion.id), isNull(workflowSchedule.deploymentVersionId))
)
)
)
.limit(1)
const headers = new Headers()
@@ -74,7 +89,7 @@ export async function GET(req: NextRequest) {
return NextResponse.json({ schedule: null }, { headers })
}
const scheduleData = schedule[0]
const scheduleData = schedule[0].schedule
const isDisabled = scheduleData.status === 'disabled'
const hasFailures = scheduleData.failedCount > 0

View File

@@ -0,0 +1,193 @@
import { db } from '@sim/db'
import { copilotChats, workflow, workspace } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { verifyEffectiveSuperUser } from '@/lib/templates/permissions'
import { parseWorkflowJson } from '@/lib/workflows/operations/import-export'
import {
loadWorkflowFromNormalizedTables,
saveWorkflowToNormalizedTables,
} from '@/lib/workflows/persistence/utils'
import { sanitizeForExport } from '@/lib/workflows/sanitization/json-sanitizer'
const logger = createLogger('SuperUserImportWorkflow')
interface ImportWorkflowRequest {
workflowId: string
targetWorkspaceId: string
}
/**
* POST /api/superuser/import-workflow
*
* Superuser endpoint to import a workflow by ID along with its copilot chats.
* This creates a copy of the workflow in the target workspace with new IDs.
* Only the workflow structure and copilot chats are copied - no deployments,
* webhooks, triggers, or other sensitive data.
*
* Requires both isSuperUser flag AND superUserModeEnabled setting.
*/
export async function POST(request: NextRequest) {
try {
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const { effectiveSuperUser, isSuperUser, superUserModeEnabled } =
await verifyEffectiveSuperUser(session.user.id)
if (!effectiveSuperUser) {
logger.warn('Non-effective-superuser attempted to access import-workflow endpoint', {
userId: session.user.id,
isSuperUser,
superUserModeEnabled,
})
return NextResponse.json({ error: 'Forbidden: Superuser access required' }, { status: 403 })
}
const body: ImportWorkflowRequest = await request.json()
const { workflowId, targetWorkspaceId } = body
if (!workflowId) {
return NextResponse.json({ error: 'workflowId is required' }, { status: 400 })
}
if (!targetWorkspaceId) {
return NextResponse.json({ error: 'targetWorkspaceId is required' }, { status: 400 })
}
// Verify target workspace exists
const [targetWorkspace] = await db
.select({ id: workspace.id, ownerId: workspace.ownerId })
.from(workspace)
.where(eq(workspace.id, targetWorkspaceId))
.limit(1)
if (!targetWorkspace) {
return NextResponse.json({ error: 'Target workspace not found' }, { status: 404 })
}
// Get the source workflow
const [sourceWorkflow] = await db
.select()
.from(workflow)
.where(eq(workflow.id, workflowId))
.limit(1)
if (!sourceWorkflow) {
return NextResponse.json({ error: 'Source workflow not found' }, { status: 404 })
}
// Load the workflow state from normalized tables
const normalizedData = await loadWorkflowFromNormalizedTables(workflowId)
if (!normalizedData) {
return NextResponse.json(
{ error: 'Workflow has no normalized data - cannot import' },
{ status: 400 }
)
}
// Use existing export logic to create export format
const workflowState = {
blocks: normalizedData.blocks,
edges: normalizedData.edges,
loops: normalizedData.loops,
parallels: normalizedData.parallels,
metadata: {
name: sourceWorkflow.name,
description: sourceWorkflow.description ?? undefined,
color: sourceWorkflow.color,
},
}
const exportData = sanitizeForExport(workflowState)
// Use existing import logic (parseWorkflowJson regenerates IDs automatically)
const { data: importedData, errors } = parseWorkflowJson(JSON.stringify(exportData))
if (!importedData || errors.length > 0) {
return NextResponse.json(
{ error: `Failed to parse workflow: ${errors.join(', ')}` },
{ status: 400 }
)
}
// Create new workflow record
const newWorkflowId = crypto.randomUUID()
const now = new Date()
await db.insert(workflow).values({
id: newWorkflowId,
userId: session.user.id,
workspaceId: targetWorkspaceId,
folderId: null, // Don't copy folder association
name: `[Debug Import] ${sourceWorkflow.name}`,
description: sourceWorkflow.description,
color: sourceWorkflow.color,
lastSynced: now,
createdAt: now,
updatedAt: now,
isDeployed: false, // Never copy deployment status
runCount: 0,
variables: sourceWorkflow.variables || {},
})
// Save using existing persistence logic
const saveResult = await saveWorkflowToNormalizedTables(newWorkflowId, importedData)
if (!saveResult.success) {
// Clean up the workflow record if save failed
await db.delete(workflow).where(eq(workflow.id, newWorkflowId))
return NextResponse.json(
{ error: `Failed to save workflow state: ${saveResult.error}` },
{ status: 500 }
)
}
// Copy copilot chats associated with the source workflow
const sourceCopilotChats = await db
.select()
.from(copilotChats)
.where(eq(copilotChats.workflowId, workflowId))
let copilotChatsImported = 0
for (const chat of sourceCopilotChats) {
await db.insert(copilotChats).values({
userId: session.user.id,
workflowId: newWorkflowId,
title: chat.title ? `[Import] ${chat.title}` : null,
messages: chat.messages,
model: chat.model,
conversationId: null, // Don't copy conversation ID
previewYaml: chat.previewYaml,
planArtifact: chat.planArtifact,
config: chat.config,
createdAt: new Date(),
updatedAt: new Date(),
})
copilotChatsImported++
}
logger.info('Superuser imported workflow', {
userId: session.user.id,
sourceWorkflowId: workflowId,
newWorkflowId,
targetWorkspaceId,
copilotChatsImported,
})
return NextResponse.json({
success: true,
newWorkflowId,
copilotChatsImported,
})
} catch (error) {
logger.error('Error importing workflow', error)
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}

View File

@@ -0,0 +1,138 @@
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import { deleteTable, type TableSchema } from '@/lib/table'
import { accessError, checkAccess, normalizeColumn, verifyTableWorkspace } from '../utils'
const logger = createLogger('TableDetailAPI')
const GetTableSchema = z.object({
workspaceId: z.string().min(1, 'Workspace ID is required'),
})
interface TableRouteParams {
params: Promise<{ tableId: string }>
}
/** GET /api/table/[tableId] - Retrieves a single table's details. */
export async function GET(request: NextRequest, { params }: TableRouteParams) {
const requestId = generateRequestId()
const { tableId } = await params
try {
const authResult = await checkHybridAuth(request)
if (!authResult.success || !authResult.userId) {
logger.warn(`[${requestId}] Unauthorized table access attempt`)
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
}
const { searchParams } = new URL(request.url)
const validated = GetTableSchema.parse({
workspaceId: searchParams.get('workspaceId'),
})
const result = await checkAccess(tableId, authResult.userId, 'read')
if (!result.ok) return accessError(result, requestId, tableId)
const { table } = result
const isValidWorkspace = await verifyTableWorkspace(tableId, validated.workspaceId)
if (!isValidWorkspace) {
logger.warn(
`[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}`
)
return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 })
}
logger.info(`[${requestId}] Retrieved table ${tableId} for user ${authResult.userId}`)
const schemaData = table.schema as TableSchema
return NextResponse.json({
success: true,
data: {
table: {
id: table.id,
name: table.name,
description: table.description,
schema: {
columns: schemaData.columns.map(normalizeColumn),
},
rowCount: table.rowCount,
maxRows: table.maxRows,
createdAt:
table.createdAt instanceof Date
? table.createdAt.toISOString()
: String(table.createdAt),
updatedAt:
table.updatedAt instanceof Date
? table.updatedAt.toISOString()
: String(table.updatedAt),
},
},
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: 'Validation error', details: error.errors },
{ status: 400 }
)
}
logger.error(`[${requestId}] Error getting table:`, error)
return NextResponse.json({ error: 'Failed to get table' }, { status: 500 })
}
}
/** DELETE /api/table/[tableId] - Deletes a table and all its rows. */
export async function DELETE(request: NextRequest, { params }: TableRouteParams) {
const requestId = generateRequestId()
const { tableId } = await params
try {
const authResult = await checkHybridAuth(request)
if (!authResult.success || !authResult.userId) {
logger.warn(`[${requestId}] Unauthorized table delete attempt`)
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
}
const { searchParams } = new URL(request.url)
const validated = GetTableSchema.parse({
workspaceId: searchParams.get('workspaceId'),
})
const result = await checkAccess(tableId, authResult.userId, 'write')
if (!result.ok) return accessError(result, requestId, tableId)
const { table } = result
const isValidWorkspace = await verifyTableWorkspace(tableId, validated.workspaceId)
if (!isValidWorkspace) {
logger.warn(
`[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}`
)
return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 })
}
await deleteTable(tableId, requestId)
return NextResponse.json({
success: true,
data: {
message: 'Table deleted successfully',
},
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: 'Validation error', details: error.errors },
{ status: 400 }
)
}
logger.error(`[${requestId}] Error deleting table:`, error)
return NextResponse.json({ error: 'Failed to delete table' }, { status: 500 })
}
}

View File

@@ -0,0 +1,276 @@
import { db } from '@sim/db'
import { userTableRows } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import type { RowData, TableSchema } from '@/lib/table'
import { validateRowData } from '@/lib/table'
import { accessError, checkAccess, verifyTableWorkspace } from '../../../utils'
const logger = createLogger('TableRowAPI')
const GetRowSchema = z.object({
workspaceId: z.string().min(1, 'Workspace ID is required'),
})
const UpdateRowSchema = z.object({
workspaceId: z.string().min(1, 'Workspace ID is required'),
data: z.record(z.unknown(), { required_error: 'Row data is required' }),
})
const DeleteRowSchema = z.object({
workspaceId: z.string().min(1, 'Workspace ID is required'),
})
interface RowRouteParams {
params: Promise<{ tableId: string; rowId: string }>
}
/** GET /api/table/[tableId]/rows/[rowId] - Retrieves a single row. */
export async function GET(request: NextRequest, { params }: RowRouteParams) {
const requestId = generateRequestId()
const { tableId, rowId } = await params
try {
const authResult = await checkHybridAuth(request)
if (!authResult.success || !authResult.userId) {
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
}
const { searchParams } = new URL(request.url)
const validated = GetRowSchema.parse({
workspaceId: searchParams.get('workspaceId'),
})
const result = await checkAccess(tableId, authResult.userId, 'read')
if (!result.ok) return accessError(result, requestId, tableId)
const { table } = result
const isValidWorkspace = await verifyTableWorkspace(tableId, validated.workspaceId)
if (!isValidWorkspace) {
logger.warn(
`[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}`
)
return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 })
}
const [row] = await db
.select({
id: userTableRows.id,
data: userTableRows.data,
createdAt: userTableRows.createdAt,
updatedAt: userTableRows.updatedAt,
})
.from(userTableRows)
.where(
and(
eq(userTableRows.id, rowId),
eq(userTableRows.tableId, tableId),
eq(userTableRows.workspaceId, validated.workspaceId)
)
)
.limit(1)
if (!row) {
return NextResponse.json({ error: 'Row not found' }, { status: 404 })
}
logger.info(`[${requestId}] Retrieved row ${rowId} from table ${tableId}`)
return NextResponse.json({
success: true,
data: {
row: {
id: row.id,
data: row.data,
createdAt: row.createdAt.toISOString(),
updatedAt: row.updatedAt.toISOString(),
},
},
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: 'Validation error', details: error.errors },
{ status: 400 }
)
}
logger.error(`[${requestId}] Error getting row:`, error)
return NextResponse.json({ error: 'Failed to get row' }, { status: 500 })
}
}
/** PATCH /api/table/[tableId]/rows/[rowId] - Updates a single row (supports partial updates). */
export async function PATCH(request: NextRequest, { params }: RowRouteParams) {
const requestId = generateRequestId()
const { tableId, rowId } = await params
try {
const authResult = await checkHybridAuth(request)
if (!authResult.success || !authResult.userId) {
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
}
const body: unknown = await request.json()
const validated = UpdateRowSchema.parse(body)
const result = await checkAccess(tableId, authResult.userId, 'write')
if (!result.ok) return accessError(result, requestId, tableId)
const { table } = result
const isValidWorkspace = await verifyTableWorkspace(tableId, validated.workspaceId)
if (!isValidWorkspace) {
logger.warn(
`[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}`
)
return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 })
}
// Fetch existing row to support partial updates
const [existingRow] = await db
.select({ data: userTableRows.data })
.from(userTableRows)
.where(
and(
eq(userTableRows.id, rowId),
eq(userTableRows.tableId, tableId),
eq(userTableRows.workspaceId, validated.workspaceId)
)
)
.limit(1)
if (!existingRow) {
return NextResponse.json({ error: 'Row not found' }, { status: 404 })
}
// Merge existing data with incoming partial data (incoming takes precedence)
const mergedData = {
...(existingRow.data as RowData),
...(validated.data as RowData),
}
const validation = await validateRowData({
rowData: mergedData,
schema: table.schema as TableSchema,
tableId,
excludeRowId: rowId,
})
if (!validation.valid) return validation.response
const now = new Date()
const [updatedRow] = await db
.update(userTableRows)
.set({
data: mergedData,
updatedAt: now,
})
.where(
and(
eq(userTableRows.id, rowId),
eq(userTableRows.tableId, tableId),
eq(userTableRows.workspaceId, validated.workspaceId)
)
)
.returning()
if (!updatedRow) {
return NextResponse.json({ error: 'Row not found' }, { status: 404 })
}
logger.info(`[${requestId}] Updated row ${rowId} in table ${tableId}`)
return NextResponse.json({
success: true,
data: {
row: {
id: updatedRow.id,
data: updatedRow.data,
createdAt: updatedRow.createdAt.toISOString(),
updatedAt: updatedRow.updatedAt.toISOString(),
},
message: 'Row updated successfully',
},
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: 'Validation error', details: error.errors },
{ status: 400 }
)
}
logger.error(`[${requestId}] Error updating row:`, error)
return NextResponse.json({ error: 'Failed to update row' }, { status: 500 })
}
}
/** DELETE /api/table/[tableId]/rows/[rowId] - Deletes a single row. */
export async function DELETE(request: NextRequest, { params }: RowRouteParams) {
const requestId = generateRequestId()
const { tableId, rowId } = await params
try {
const authResult = await checkHybridAuth(request)
if (!authResult.success || !authResult.userId) {
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
}
const body: unknown = await request.json()
const validated = DeleteRowSchema.parse(body)
const result = await checkAccess(tableId, authResult.userId, 'write')
if (!result.ok) return accessError(result, requestId, tableId)
const { table } = result
const isValidWorkspace = await verifyTableWorkspace(tableId, validated.workspaceId)
if (!isValidWorkspace) {
logger.warn(
`[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}`
)
return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 })
}
const [deletedRow] = await db
.delete(userTableRows)
.where(
and(
eq(userTableRows.id, rowId),
eq(userTableRows.tableId, tableId),
eq(userTableRows.workspaceId, validated.workspaceId)
)
)
.returning()
if (!deletedRow) {
return NextResponse.json({ error: 'Row not found' }, { status: 404 })
}
logger.info(`[${requestId}] Deleted row ${rowId} from table ${tableId}`)
return NextResponse.json({
success: true,
data: {
message: 'Row deleted successfully',
deletedCount: 1,
},
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: 'Validation error', details: error.errors },
{ status: 400 }
)
}
logger.error(`[${requestId}] Error deleting row:`, error)
return NextResponse.json({ error: 'Failed to delete row' }, { status: 500 })
}
}

View File

@@ -0,0 +1,681 @@
import { db } from '@sim/db'
import { userTableRows } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq, sql } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import type { Filter, RowData, Sort, TableSchema } from '@/lib/table'
import {
checkUniqueConstraintsDb,
getUniqueColumns,
TABLE_LIMITS,
USER_TABLE_ROWS_SQL_NAME,
validateBatchRows,
validateRowAgainstSchema,
validateRowData,
validateRowSize,
} from '@/lib/table'
import { buildFilterClause, buildSortClause } from '@/lib/table/sql'
import { accessError, checkAccess } from '../../utils'
const logger = createLogger('TableRowsAPI')
const InsertRowSchema = z.object({
workspaceId: z.string().min(1, 'Workspace ID is required'),
data: z.record(z.unknown(), { required_error: 'Row data is required' }),
})
const BatchInsertRowsSchema = z.object({
workspaceId: z.string().min(1, 'Workspace ID is required'),
rows: z
.array(z.record(z.unknown()), { required_error: 'Rows array is required' })
.min(1, 'At least one row is required')
.max(1000, 'Cannot insert more than 1000 rows per batch'),
})
const QueryRowsSchema = z.object({
workspaceId: z.string().min(1, 'Workspace ID is required'),
filter: z.record(z.unknown()).optional(),
sort: z.record(z.enum(['asc', 'desc'])).optional(),
limit: z.coerce
.number({ required_error: 'Limit must be a number' })
.int('Limit must be an integer')
.min(1, 'Limit must be at least 1')
.max(TABLE_LIMITS.MAX_QUERY_LIMIT, `Limit cannot exceed ${TABLE_LIMITS.MAX_QUERY_LIMIT}`)
.optional()
.default(100),
offset: z.coerce
.number({ required_error: 'Offset must be a number' })
.int('Offset must be an integer')
.min(0, 'Offset must be 0 or greater')
.optional()
.default(0),
})
const UpdateRowsByFilterSchema = z.object({
workspaceId: z.string().min(1, 'Workspace ID is required'),
filter: z.record(z.unknown(), { required_error: 'Filter criteria is required' }),
data: z.record(z.unknown(), { required_error: 'Update data is required' }),
limit: z.coerce
.number({ required_error: 'Limit must be a number' })
.int('Limit must be an integer')
.min(1, 'Limit must be at least 1')
.max(1000, 'Cannot update more than 1000 rows per operation')
.optional(),
})
const DeleteRowsByFilterSchema = z.object({
workspaceId: z.string().min(1, 'Workspace ID is required'),
filter: z.record(z.unknown(), { required_error: 'Filter criteria is required' }),
limit: z.coerce
.number({ required_error: 'Limit must be a number' })
.int('Limit must be an integer')
.min(1, 'Limit must be at least 1')
.max(1000, 'Cannot delete more than 1000 rows per operation')
.optional(),
})
interface TableRowsRouteParams {
params: Promise<{ tableId: string }>
}
async function handleBatchInsert(
requestId: string,
tableId: string,
body: z.infer<typeof BatchInsertRowsSchema>,
userId: string
): Promise<NextResponse> {
const validated = BatchInsertRowsSchema.parse(body)
const accessResult = await checkAccess(tableId, userId, 'write')
if (!accessResult.ok) return accessError(accessResult, requestId, tableId)
const { table } = accessResult
if (validated.workspaceId !== table.workspaceId) {
logger.warn(
`[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}`
)
return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 })
}
const workspaceId = validated.workspaceId
const remainingCapacity = table.maxRows - table.rowCount
if (remainingCapacity < validated.rows.length) {
return NextResponse.json(
{
error: `Insufficient capacity. Can only insert ${remainingCapacity} more rows (table has ${table.rowCount}/${table.maxRows} rows)`,
},
{ status: 400 }
)
}
const validation = await validateBatchRows({
rows: validated.rows as RowData[],
schema: table.schema as TableSchema,
tableId,
})
if (!validation.valid) return validation.response
const now = new Date()
const rowsToInsert = validated.rows.map((data) => ({
id: `row_${crypto.randomUUID().replace(/-/g, '')}`,
tableId,
workspaceId,
data,
createdAt: now,
updatedAt: now,
createdBy: userId,
}))
const insertedRows = await db.insert(userTableRows).values(rowsToInsert).returning()
logger.info(`[${requestId}] Batch inserted ${insertedRows.length} rows into table ${tableId}`)
return NextResponse.json({
success: true,
data: {
rows: insertedRows.map((r) => ({
id: r.id,
data: r.data,
createdAt: r.createdAt.toISOString(),
updatedAt: r.updatedAt.toISOString(),
})),
insertedCount: insertedRows.length,
message: `Successfully inserted ${insertedRows.length} rows`,
},
})
}
/** POST /api/table/[tableId]/rows - Inserts row(s). Supports single or batch insert. */
export async function POST(request: NextRequest, { params }: TableRowsRouteParams) {
const requestId = generateRequestId()
const { tableId } = await params
try {
const authResult = await checkHybridAuth(request)
if (!authResult.success || !authResult.userId) {
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
}
const body: unknown = await request.json()
if (
typeof body === 'object' &&
body !== null &&
'rows' in body &&
Array.isArray((body as Record<string, unknown>).rows)
) {
return handleBatchInsert(
requestId,
tableId,
body as z.infer<typeof BatchInsertRowsSchema>,
authResult.userId
)
}
const validated = InsertRowSchema.parse(body)
const accessResult = await checkAccess(tableId, authResult.userId, 'write')
if (!accessResult.ok) return accessError(accessResult, requestId, tableId)
const { table } = accessResult
if (validated.workspaceId !== table.workspaceId) {
logger.warn(
`[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}`
)
return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 })
}
const workspaceId = validated.workspaceId
const rowData = validated.data as RowData
const validation = await validateRowData({
rowData,
schema: table.schema as TableSchema,
tableId,
})
if (!validation.valid) return validation.response
if (table.rowCount >= table.maxRows) {
return NextResponse.json(
{ error: `Table row limit reached (${table.maxRows} rows max)` },
{ status: 400 }
)
}
const rowId = `row_${crypto.randomUUID().replace(/-/g, '')}`
const now = new Date()
const [row] = await db
.insert(userTableRows)
.values({
id: rowId,
tableId,
workspaceId,
data: validated.data,
createdAt: now,
updatedAt: now,
createdBy: authResult.userId,
})
.returning()
logger.info(`[${requestId}] Inserted row ${rowId} into table ${tableId}`)
return NextResponse.json({
success: true,
data: {
row: {
id: row.id,
data: row.data,
createdAt: row.createdAt.toISOString(),
updatedAt: row.updatedAt.toISOString(),
},
message: 'Row inserted successfully',
},
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: 'Validation error', details: error.errors },
{ status: 400 }
)
}
logger.error(`[${requestId}] Error inserting row:`, error)
return NextResponse.json({ error: 'Failed to insert row' }, { status: 500 })
}
}
/** GET /api/table/[tableId]/rows - Queries rows with filtering, sorting, and pagination. */
export async function GET(request: NextRequest, { params }: TableRowsRouteParams) {
const requestId = generateRequestId()
const { tableId } = await params
try {
const authResult = await checkHybridAuth(request)
if (!authResult.success || !authResult.userId) {
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
}
const { searchParams } = new URL(request.url)
const workspaceId = searchParams.get('workspaceId')
const filterParam = searchParams.get('filter')
const sortParam = searchParams.get('sort')
const limit = searchParams.get('limit')
const offset = searchParams.get('offset')
let filter: Record<string, unknown> | undefined
let sort: Sort | undefined
try {
if (filterParam) {
filter = JSON.parse(filterParam) as Record<string, unknown>
}
if (sortParam) {
sort = JSON.parse(sortParam) as Sort
}
} catch {
return NextResponse.json({ error: 'Invalid filter or sort JSON' }, { status: 400 })
}
const validated = QueryRowsSchema.parse({
workspaceId,
filter,
sort,
limit,
offset,
})
const accessResult = await checkAccess(tableId, authResult.userId, 'read')
if (!accessResult.ok) return accessError(accessResult, requestId, tableId)
const { table } = accessResult
if (validated.workspaceId !== table.workspaceId) {
logger.warn(
`[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}`
)
return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 })
}
const baseConditions = [
eq(userTableRows.tableId, tableId),
eq(userTableRows.workspaceId, validated.workspaceId),
]
if (validated.filter) {
const filterClause = buildFilterClause(validated.filter as Filter, USER_TABLE_ROWS_SQL_NAME)
if (filterClause) {
baseConditions.push(filterClause)
}
}
let query = db
.select({
id: userTableRows.id,
data: userTableRows.data,
createdAt: userTableRows.createdAt,
updatedAt: userTableRows.updatedAt,
})
.from(userTableRows)
.where(and(...baseConditions))
if (validated.sort) {
const schema = table.schema as TableSchema
const sortClause = buildSortClause(validated.sort, USER_TABLE_ROWS_SQL_NAME, schema.columns)
if (sortClause) {
query = query.orderBy(sortClause) as typeof query
}
} else {
query = query.orderBy(userTableRows.createdAt) as typeof query
}
const countQuery = db
.select({ count: sql<number>`count(*)` })
.from(userTableRows)
.where(and(...baseConditions))
const [{ count: totalCount }] = await countQuery
const rows = await query.limit(validated.limit).offset(validated.offset)
logger.info(
`[${requestId}] Queried ${rows.length} rows from table ${tableId} (total: ${totalCount})`
)
return NextResponse.json({
success: true,
data: {
rows: rows.map((r) => ({
id: r.id,
data: r.data,
createdAt: r.createdAt.toISOString(),
updatedAt: r.updatedAt.toISOString(),
})),
rowCount: rows.length,
totalCount: Number(totalCount),
limit: validated.limit,
offset: validated.offset,
},
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: 'Validation error', details: error.errors },
{ status: 400 }
)
}
logger.error(`[${requestId}] Error querying rows:`, error)
return NextResponse.json({ error: 'Failed to query rows' }, { status: 500 })
}
}
/** PUT /api/table/[tableId]/rows - Updates rows matching filter criteria. */
export async function PUT(request: NextRequest, { params }: TableRowsRouteParams) {
const requestId = generateRequestId()
const { tableId } = await params
try {
const authResult = await checkHybridAuth(request)
if (!authResult.success || !authResult.userId) {
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
}
const body: unknown = await request.json()
const validated = UpdateRowsByFilterSchema.parse(body)
const accessResult = await checkAccess(tableId, authResult.userId, 'write')
if (!accessResult.ok) return accessError(accessResult, requestId, tableId)
const { table } = accessResult
if (validated.workspaceId !== table.workspaceId) {
logger.warn(
`[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}`
)
return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 })
}
const updateData = validated.data as RowData
const sizeValidation = validateRowSize(updateData)
if (!sizeValidation.valid) {
return NextResponse.json(
{ error: 'Invalid row data', details: sizeValidation.errors },
{ status: 400 }
)
}
const baseConditions = [
eq(userTableRows.tableId, tableId),
eq(userTableRows.workspaceId, validated.workspaceId),
]
const filterClause = buildFilterClause(validated.filter as Filter, USER_TABLE_ROWS_SQL_NAME)
if (filterClause) {
baseConditions.push(filterClause)
}
let matchingRowsQuery = db
.select({
id: userTableRows.id,
data: userTableRows.data,
})
.from(userTableRows)
.where(and(...baseConditions))
if (validated.limit) {
matchingRowsQuery = matchingRowsQuery.limit(validated.limit) as typeof matchingRowsQuery
}
const matchingRows = await matchingRowsQuery
if (matchingRows.length === 0) {
return NextResponse.json(
{
success: true,
data: {
message: 'No rows matched the filter criteria',
updatedCount: 0,
},
},
{ status: 200 }
)
}
if (matchingRows.length > TABLE_LIMITS.MAX_BULK_OPERATION_SIZE) {
logger.warn(`[${requestId}] Updating ${matchingRows.length} rows. This may take some time.`)
}
for (const row of matchingRows) {
const existingData = row.data as RowData
const mergedData = { ...existingData, ...updateData }
const rowValidation = validateRowAgainstSchema(mergedData, table.schema as TableSchema)
if (!rowValidation.valid) {
return NextResponse.json(
{
error: 'Updated data does not match schema',
details: rowValidation.errors,
affectedRowId: row.id,
},
{ status: 400 }
)
}
}
const uniqueColumns = getUniqueColumns(table.schema as TableSchema)
if (uniqueColumns.length > 0) {
// If updating multiple rows, check that updateData doesn't set any unique column
// (would cause all rows to have the same value, violating uniqueness)
if (matchingRows.length > 1) {
const uniqueColumnsInUpdate = uniqueColumns.filter((col) => col.name in updateData)
if (uniqueColumnsInUpdate.length > 0) {
return NextResponse.json(
{
error: 'Cannot set unique column values when updating multiple rows',
details: [
`Columns with unique constraint: ${uniqueColumnsInUpdate.map((c) => c.name).join(', ')}. ` +
`Updating ${matchingRows.length} rows with the same value would violate uniqueness.`,
],
},
{ status: 400 }
)
}
}
// Check unique constraints against database for each row
for (const row of matchingRows) {
const existingData = row.data as RowData
const mergedData = { ...existingData, ...updateData }
const uniqueValidation = await checkUniqueConstraintsDb(
tableId,
mergedData,
table.schema as TableSchema,
row.id
)
if (!uniqueValidation.valid) {
return NextResponse.json(
{
error: 'Unique constraint violation',
details: uniqueValidation.errors,
affectedRowId: row.id,
},
{ status: 400 }
)
}
}
}
const now = new Date()
await db.transaction(async (trx) => {
let totalUpdated = 0
for (let i = 0; i < matchingRows.length; i += TABLE_LIMITS.UPDATE_BATCH_SIZE) {
const batch = matchingRows.slice(i, i + TABLE_LIMITS.UPDATE_BATCH_SIZE)
const updatePromises = batch.map((row) => {
const existingData = row.data as RowData
return trx
.update(userTableRows)
.set({
data: { ...existingData, ...updateData },
updatedAt: now,
})
.where(eq(userTableRows.id, row.id))
})
await Promise.all(updatePromises)
totalUpdated += batch.length
logger.info(
`[${requestId}] Updated batch ${Math.floor(i / TABLE_LIMITS.UPDATE_BATCH_SIZE) + 1} (${totalUpdated}/${matchingRows.length} rows)`
)
}
})
logger.info(`[${requestId}] Updated ${matchingRows.length} rows in table ${tableId}`)
return NextResponse.json({
success: true,
data: {
message: 'Rows updated successfully',
updatedCount: matchingRows.length,
updatedRowIds: matchingRows.map((r) => r.id),
},
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: 'Validation error', details: error.errors },
{ status: 400 }
)
}
logger.error(`[${requestId}] Error updating rows by filter:`, error)
const errorMessage = error instanceof Error ? error.message : String(error)
const detailedError = `Failed to update rows: ${errorMessage}`
return NextResponse.json({ error: detailedError }, { status: 500 })
}
}
/** DELETE /api/table/[tableId]/rows - Deletes rows matching filter criteria. */
export async function DELETE(request: NextRequest, { params }: TableRowsRouteParams) {
const requestId = generateRequestId()
const { tableId } = await params
try {
const authResult = await checkHybridAuth(request)
if (!authResult.success || !authResult.userId) {
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
}
const body: unknown = await request.json()
const validated = DeleteRowsByFilterSchema.parse(body)
const accessResult = await checkAccess(tableId, authResult.userId, 'write')
if (!accessResult.ok) return accessError(accessResult, requestId, tableId)
const { table } = accessResult
if (validated.workspaceId !== table.workspaceId) {
logger.warn(
`[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}`
)
return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 })
}
const baseConditions = [
eq(userTableRows.tableId, tableId),
eq(userTableRows.workspaceId, validated.workspaceId),
]
const filterClause = buildFilterClause(validated.filter as Filter, USER_TABLE_ROWS_SQL_NAME)
if (filterClause) {
baseConditions.push(filterClause)
}
let matchingRowsQuery = db
.select({ id: userTableRows.id })
.from(userTableRows)
.where(and(...baseConditions))
if (validated.limit) {
matchingRowsQuery = matchingRowsQuery.limit(validated.limit) as typeof matchingRowsQuery
}
const matchingRows = await matchingRowsQuery
if (matchingRows.length === 0) {
return NextResponse.json(
{
success: true,
data: {
message: 'No rows matched the filter criteria',
deletedCount: 0,
},
},
{ status: 200 }
)
}
if (matchingRows.length > TABLE_LIMITS.DELETE_BATCH_SIZE) {
logger.warn(`[${requestId}] Deleting ${matchingRows.length} rows. This may take some time.`)
}
const rowIds = matchingRows.map((r) => r.id)
await db.transaction(async (trx) => {
let totalDeleted = 0
for (let i = 0; i < rowIds.length; i += TABLE_LIMITS.DELETE_BATCH_SIZE) {
const batch = rowIds.slice(i, i + TABLE_LIMITS.DELETE_BATCH_SIZE)
await trx.delete(userTableRows).where(
and(
eq(userTableRows.tableId, tableId),
eq(userTableRows.workspaceId, validated.workspaceId),
sql`${userTableRows.id} = ANY(ARRAY[${sql.join(
batch.map((id) => sql`${id}`),
sql`, `
)}])`
)
)
totalDeleted += batch.length
logger.info(
`[${requestId}] Deleted batch ${Math.floor(i / TABLE_LIMITS.DELETE_BATCH_SIZE) + 1} (${totalDeleted}/${rowIds.length} rows)`
)
}
})
logger.info(`[${requestId}] Deleted ${matchingRows.length} rows from table ${tableId}`)
return NextResponse.json({
success: true,
data: {
message: 'Rows deleted successfully',
deletedCount: matchingRows.length,
deletedRowIds: rowIds,
},
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: 'Validation error', details: error.errors },
{ status: 400 }
)
}
logger.error(`[${requestId}] Error deleting rows by filter:`, error)
const errorMessage = error instanceof Error ? error.message : String(error)
const detailedError = `Failed to delete rows: ${errorMessage}`
return NextResponse.json({ error: detailedError }, { status: 500 })
}
}

View File

@@ -0,0 +1,182 @@
import { db } from '@sim/db'
import { userTableRows } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq, or, sql } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import type { RowData, TableSchema } from '@/lib/table'
import { getUniqueColumns, validateRowData } from '@/lib/table'
import { accessError, checkAccess, verifyTableWorkspace } from '../../../utils'
const logger = createLogger('TableUpsertAPI')
const UpsertRowSchema = z.object({
workspaceId: z.string().min(1, 'Workspace ID is required'),
data: z.record(z.unknown(), { required_error: 'Row data is required' }),
})
interface UpsertRouteParams {
params: Promise<{ tableId: string }>
}
/** POST /api/table/[tableId]/rows/upsert - Inserts or updates based on unique columns. */
export async function POST(request: NextRequest, { params }: UpsertRouteParams) {
const requestId = generateRequestId()
const { tableId } = await params
try {
const authResult = await checkHybridAuth(request)
if (!authResult.success || !authResult.userId) {
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
}
const body: unknown = await request.json()
const validated = UpsertRowSchema.parse(body)
const result = await checkAccess(tableId, authResult.userId, 'write')
if (!result.ok) return accessError(result, requestId, tableId)
const { table } = result
const isValidWorkspace = await verifyTableWorkspace(tableId, validated.workspaceId)
if (!isValidWorkspace) {
logger.warn(
`[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}`
)
return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 })
}
const schema = table.schema as TableSchema
const rowData = validated.data as RowData
const validation = await validateRowData({
rowData,
schema,
tableId,
checkUnique: false,
})
if (!validation.valid) return validation.response
const uniqueColumns = getUniqueColumns(schema)
if (uniqueColumns.length === 0) {
return NextResponse.json(
{
error:
'Upsert requires at least one unique column in the schema. Please add a unique constraint to a column or use insert instead.',
},
{ status: 400 }
)
}
const uniqueFilters = uniqueColumns.map((col) => {
const value = rowData[col.name]
if (value === undefined || value === null) {
return null
}
return sql`${userTableRows.data}->>${col.name} = ${String(value)}`
})
const validUniqueFilters = uniqueFilters.filter((f): f is Exclude<typeof f, null> => f !== null)
if (validUniqueFilters.length === 0) {
return NextResponse.json(
{
error: `Upsert requires values for at least one unique field: ${uniqueColumns.map((c) => c.name).join(', ')}`,
},
{ status: 400 }
)
}
const [existingRow] = await db
.select()
.from(userTableRows)
.where(
and(
eq(userTableRows.tableId, tableId),
eq(userTableRows.workspaceId, validated.workspaceId),
or(...validUniqueFilters)
)
)
.limit(1)
const now = new Date()
if (!existingRow && table.rowCount >= table.maxRows) {
return NextResponse.json(
{ error: `Table row limit reached (${table.maxRows} rows max)` },
{ status: 400 }
)
}
const upsertResult = await db.transaction(async (trx) => {
if (existingRow) {
const [updatedRow] = await trx
.update(userTableRows)
.set({
data: validated.data,
updatedAt: now,
})
.where(eq(userTableRows.id, existingRow.id))
.returning()
return {
row: updatedRow,
operation: 'update' as const,
}
}
const [insertedRow] = await trx
.insert(userTableRows)
.values({
id: `row_${crypto.randomUUID().replace(/-/g, '')}`,
tableId,
workspaceId: validated.workspaceId,
data: validated.data,
createdAt: now,
updatedAt: now,
createdBy: authResult.userId,
})
.returning()
return {
row: insertedRow,
operation: 'insert' as const,
}
})
logger.info(
`[${requestId}] Upserted (${upsertResult.operation}) row ${upsertResult.row.id} in table ${tableId}`
)
return NextResponse.json({
success: true,
data: {
row: {
id: upsertResult.row.id,
data: upsertResult.row.data,
createdAt: upsertResult.row.createdAt.toISOString(),
updatedAt: upsertResult.row.updatedAt.toISOString(),
},
operation: upsertResult.operation,
message: `Row ${upsertResult.operation === 'update' ? 'updated' : 'inserted'} successfully`,
},
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: 'Validation error', details: error.errors },
{ status: 400 }
)
}
logger.error(`[${requestId}] Error upserting row:`, error)
const errorMessage = error instanceof Error ? error.message : String(error)
const detailedError = `Failed to upsert row: ${errorMessage}`
return NextResponse.json({ error: detailedError }, { status: 500 })
}
}

View File

@@ -0,0 +1,293 @@
import { db } from '@sim/db'
import { permissions, workspace } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import {
canCreateTable,
createTable,
getWorkspaceTableLimits,
listTables,
TABLE_LIMITS,
type TableSchema,
} from '@/lib/table'
import { normalizeColumn } from './utils'
const logger = createLogger('TableAPI')
const ColumnSchema = z.object({
name: z
.string()
.min(1, 'Column name is required')
.max(
TABLE_LIMITS.MAX_COLUMN_NAME_LENGTH,
`Column name must be ${TABLE_LIMITS.MAX_COLUMN_NAME_LENGTH} characters or less`
)
.regex(
/^[a-z_][a-z0-9_]*$/i,
'Column name must start with a letter or underscore and contain only alphanumeric characters and underscores'
),
type: z.enum(['string', 'number', 'boolean', 'date', 'json'], {
errorMap: () => ({
message: 'Column type must be one of: string, number, boolean, date, json',
}),
}),
required: z.boolean().optional().default(false),
unique: z.boolean().optional().default(false),
})
const CreateTableSchema = z.object({
name: z
.string()
.min(1, 'Table name is required')
.max(
TABLE_LIMITS.MAX_TABLE_NAME_LENGTH,
`Table name must be ${TABLE_LIMITS.MAX_TABLE_NAME_LENGTH} characters or less`
)
.regex(
/^[a-z_][a-z0-9_]*$/i,
'Table name must start with a letter or underscore and contain only alphanumeric characters and underscores'
),
description: z
.string()
.max(
TABLE_LIMITS.MAX_DESCRIPTION_LENGTH,
`Description must be ${TABLE_LIMITS.MAX_DESCRIPTION_LENGTH} characters or less`
)
.optional(),
schema: z.object({
columns: z
.array(ColumnSchema)
.min(1, 'Table must have at least one column')
.max(
TABLE_LIMITS.MAX_COLUMNS_PER_TABLE,
`Table cannot have more than ${TABLE_LIMITS.MAX_COLUMNS_PER_TABLE} columns`
),
}),
workspaceId: z.string().min(1, 'Workspace ID is required'),
})
const ListTablesSchema = z.object({
workspaceId: z.string().min(1, 'Workspace ID is required'),
})
interface WorkspaceAccessResult {
hasAccess: boolean
canWrite: boolean
}
async function checkWorkspaceAccess(
workspaceId: string,
userId: string
): Promise<WorkspaceAccessResult> {
const [workspaceData] = await db
.select({
id: workspace.id,
ownerId: workspace.ownerId,
})
.from(workspace)
.where(eq(workspace.id, workspaceId))
.limit(1)
if (!workspaceData) {
return { hasAccess: false, canWrite: false }
}
if (workspaceData.ownerId === userId) {
return { hasAccess: true, canWrite: true }
}
const [permission] = await db
.select({
permissionType: permissions.permissionType,
})
.from(permissions)
.where(
and(
eq(permissions.userId, userId),
eq(permissions.entityType, 'workspace'),
eq(permissions.entityId, workspaceId)
)
)
.limit(1)
if (!permission) {
return { hasAccess: false, canWrite: false }
}
const canWrite = permission.permissionType === 'admin' || permission.permissionType === 'write'
return {
hasAccess: true,
canWrite,
}
}
/** POST /api/table - Creates a new user-defined table. */
export async function POST(request: NextRequest) {
const requestId = generateRequestId()
try {
const authResult = await checkHybridAuth(request)
if (!authResult.success || !authResult.userId) {
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
}
const body: unknown = await request.json()
const params = CreateTableSchema.parse(body)
const { hasAccess, canWrite } = await checkWorkspaceAccess(
params.workspaceId,
authResult.userId
)
if (!hasAccess || !canWrite) {
return NextResponse.json({ error: 'Access denied' }, { status: 403 })
}
// Check billing plan limits
const existingTables = await listTables(params.workspaceId)
const { canCreate, maxTables } = await canCreateTable(params.workspaceId, existingTables.length)
if (!canCreate) {
return NextResponse.json(
{
error: `Workspace has reached the maximum table limit (${maxTables}) for your plan. Please upgrade to create more tables.`,
},
{ status: 403 }
)
}
// Get plan-based row limits
const planLimits = await getWorkspaceTableLimits(params.workspaceId)
const maxRowsPerTable = planLimits.maxRowsPerTable
const normalizedSchema: TableSchema = {
columns: params.schema.columns.map(normalizeColumn),
}
const table = await createTable(
{
name: params.name,
description: params.description,
schema: normalizedSchema,
workspaceId: params.workspaceId,
userId: authResult.userId,
maxRows: maxRowsPerTable,
},
requestId
)
return NextResponse.json({
success: true,
data: {
table: {
id: table.id,
name: table.name,
description: table.description,
schema: table.schema,
rowCount: table.rowCount,
maxRows: table.maxRows,
createdAt:
table.createdAt instanceof Date
? table.createdAt.toISOString()
: String(table.createdAt),
updatedAt:
table.updatedAt instanceof Date
? table.updatedAt.toISOString()
: String(table.updatedAt),
},
message: 'Table created successfully',
},
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: 'Validation error', details: error.errors },
{ status: 400 }
)
}
if (error instanceof Error) {
if (
error.message.includes('Invalid table name') ||
error.message.includes('Invalid schema') ||
error.message.includes('already exists') ||
error.message.includes('maximum table limit')
) {
return NextResponse.json({ error: error.message }, { status: 400 })
}
}
logger.error(`[${requestId}] Error creating table:`, error)
return NextResponse.json({ error: 'Failed to create table' }, { status: 500 })
}
}
/** GET /api/table - Lists all tables in a workspace. */
export async function GET(request: NextRequest) {
const requestId = generateRequestId()
try {
const authResult = await checkHybridAuth(request)
if (!authResult.success || !authResult.userId) {
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
}
const { searchParams } = new URL(request.url)
const workspaceId = searchParams.get('workspaceId')
const validation = ListTablesSchema.safeParse({ workspaceId })
if (!validation.success) {
return NextResponse.json(
{ error: 'Validation error', details: validation.error.errors },
{ status: 400 }
)
}
const params = validation.data
const { hasAccess } = await checkWorkspaceAccess(params.workspaceId, authResult.userId)
if (!hasAccess) {
return NextResponse.json({ error: 'Access denied' }, { status: 403 })
}
const tables = await listTables(params.workspaceId)
logger.info(`[${requestId}] Listed ${tables.length} tables in workspace ${params.workspaceId}`)
return NextResponse.json({
success: true,
data: {
tables: tables.map((t) => {
const schemaData = t.schema as TableSchema
return {
...t,
schema: {
columns: schemaData.columns.map(normalizeColumn),
},
createdAt:
t.createdAt instanceof Date ? t.createdAt.toISOString() : String(t.createdAt),
updatedAt:
t.updatedAt instanceof Date ? t.updatedAt.toISOString() : String(t.updatedAt),
}
}),
totalCount: tables.length,
},
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: 'Validation error', details: error.errors },
{ status: 400 }
)
}
logger.error(`[${requestId}] Error listing tables:`, error)
return NextResponse.json({ error: 'Failed to list tables' }, { status: 500 })
}
}

View File

@@ -0,0 +1,188 @@
import { createLogger } from '@sim/logger'
import { NextResponse } from 'next/server'
import type { ColumnDefinition, TableDefinition } from '@/lib/table'
import { getTableById } from '@/lib/table'
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
const logger = createLogger('TableUtils')
export interface TableAccessResult {
hasAccess: true
table: TableDefinition
}
export interface TableAccessDenied {
hasAccess: false
notFound?: boolean
reason?: string
}
export type TableAccessCheck = TableAccessResult | TableAccessDenied
export type AccessResult = { ok: true; table: TableDefinition } | { ok: false; status: 404 | 403 }
export interface ApiErrorResponse {
error: string
details?: unknown
}
/**
* Check if a user has read access to a table.
* Read access is granted if:
* 1. User created the table, OR
* 2. User has any permission on the table's workspace (read, write, or admin)
*
* Follows the same pattern as Knowledge Base access checks.
*/
export async function checkTableAccess(tableId: string, userId: string): Promise<TableAccessCheck> {
const table = await getTableById(tableId)
if (!table) {
return { hasAccess: false, notFound: true }
}
// Case 1: User created the table
if (table.createdBy === userId) {
return { hasAccess: true, table }
}
// Case 2: Table belongs to a workspace the user has permissions for
const userPermission = await getUserEntityPermissions(userId, 'workspace', table.workspaceId)
if (userPermission !== null) {
return { hasAccess: true, table }
}
return { hasAccess: false, reason: 'User does not have access to this table' }
}
/**
* Check if a user has write access to a table.
* Write access is granted if:
* 1. User created the table, OR
* 2. User has write or admin permissions on the table's workspace
*
* Follows the same pattern as Knowledge Base write access checks.
*/
export async function checkTableWriteAccess(
tableId: string,
userId: string
): Promise<TableAccessCheck> {
const table = await getTableById(tableId)
if (!table) {
return { hasAccess: false, notFound: true }
}
// Case 1: User created the table
if (table.createdBy === userId) {
return { hasAccess: true, table }
}
// Case 2: Table belongs to a workspace and user has write/admin permissions
const userPermission = await getUserEntityPermissions(userId, 'workspace', table.workspaceId)
if (userPermission === 'write' || userPermission === 'admin') {
return { hasAccess: true, table }
}
return { hasAccess: false, reason: 'User does not have write access to this table' }
}
/**
* @deprecated Use checkTableAccess or checkTableWriteAccess instead.
* Legacy access check function for backwards compatibility.
*/
export async function checkAccess(
tableId: string,
userId: string,
level: 'read' | 'write' | 'admin' = 'read'
): Promise<AccessResult> {
const table = await getTableById(tableId)
if (!table) {
return { ok: false, status: 404 }
}
if (table.createdBy === userId) {
return { ok: true, table }
}
const permission = await getUserEntityPermissions(userId, 'workspace', table.workspaceId)
const hasAccess =
permission !== null &&
(level === 'read' ||
(level === 'write' && (permission === 'write' || permission === 'admin')) ||
(level === 'admin' && permission === 'admin'))
return hasAccess ? { ok: true, table } : { ok: false, status: 403 }
}
export function accessError(
result: { ok: false; status: 404 | 403 },
requestId: string,
context?: string
): NextResponse {
const message = result.status === 404 ? 'Table not found' : 'Access denied'
logger.warn(`[${requestId}] ${message}${context ? `: ${context}` : ''}`)
return NextResponse.json({ error: message }, { status: result.status })
}
/**
* Converts a TableAccessDenied result to an appropriate HTTP response.
* Use with checkTableAccess or checkTableWriteAccess.
*/
export function tableAccessError(
result: TableAccessDenied,
requestId: string,
context?: string
): NextResponse {
const status = result.notFound ? 404 : 403
const message = result.notFound ? 'Table not found' : (result.reason ?? 'Access denied')
logger.warn(`[${requestId}] ${message}${context ? `: ${context}` : ''}`)
return NextResponse.json({ error: message }, { status })
}
export async function verifyTableWorkspace(tableId: string, workspaceId: string): Promise<boolean> {
const table = await getTableById(tableId)
return table?.workspaceId === workspaceId
}
export function errorResponse(
message: string,
status: number,
details?: unknown
): NextResponse<ApiErrorResponse> {
const body: ApiErrorResponse = { error: message }
if (details !== undefined) {
body.details = details
}
return NextResponse.json(body, { status })
}
export function badRequestResponse(message: string, details?: unknown) {
return errorResponse(message, 400, details)
}
export function unauthorizedResponse(message = 'Authentication required') {
return errorResponse(message, 401)
}
export function forbiddenResponse(message = 'Access denied') {
return errorResponse(message, 403)
}
export function notFoundResponse(message = 'Resource not found') {
return errorResponse(message, 404)
}
export function serverErrorResponse(message = 'Internal server error') {
return errorResponse(message, 500)
}
export function normalizeColumn(col: ColumnDefinition): ColumnDefinition {
return {
name: col.name,
type: col.type,
required: col.required ?? false,
unique: col.unique ?? false,
}
}

View File

@@ -5,7 +5,7 @@ import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { generateRequestId } from '@/lib/core/utils/request'
import { verifySuperUser } from '@/lib/templates/permissions'
import { verifyEffectiveSuperUser } from '@/lib/templates/permissions'
const logger = createLogger('TemplateApprovalAPI')
@@ -25,8 +25,8 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const { isSuperUser } = await verifySuperUser(session.user.id)
if (!isSuperUser) {
const { effectiveSuperUser } = await verifyEffectiveSuperUser(session.user.id)
if (!effectiveSuperUser) {
logger.warn(`[${requestId}] Non-super user attempted to approve template: ${id}`)
return NextResponse.json({ error: 'Only super users can approve templates' }, { status: 403 })
}
@@ -71,8 +71,8 @@ export async function DELETE(
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const { isSuperUser } = await verifySuperUser(session.user.id)
if (!isSuperUser) {
const { effectiveSuperUser } = await verifyEffectiveSuperUser(session.user.id)
if (!effectiveSuperUser) {
logger.warn(`[${requestId}] Non-super user attempted to reject template: ${id}`)
return NextResponse.json({ error: 'Only super users can reject templates' }, { status: 403 })
}

View File

@@ -5,7 +5,7 @@ import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { generateRequestId } from '@/lib/core/utils/request'
import { verifySuperUser } from '@/lib/templates/permissions'
import { verifyEffectiveSuperUser } from '@/lib/templates/permissions'
const logger = createLogger('TemplateRejectionAPI')
@@ -25,8 +25,8 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const { isSuperUser } = await verifySuperUser(session.user.id)
if (!isSuperUser) {
const { effectiveSuperUser } = await verifyEffectiveSuperUser(session.user.id)
if (!effectiveSuperUser) {
logger.warn(`[${requestId}] Non-super user attempted to reject template: ${id}`)
return NextResponse.json({ error: 'Only super users can reject templates' }, { status: 403 })
}

View File

@@ -3,7 +3,6 @@ import {
templateCreators,
templateStars,
templates,
user,
workflow,
workflowDeploymentVersion,
} from '@sim/db/schema'
@@ -14,6 +13,7 @@ import { v4 as uuidv4 } from 'uuid'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { generateRequestId } from '@/lib/core/utils/request'
import { verifyEffectiveSuperUser } from '@/lib/templates/permissions'
import {
extractRequiredCredentials,
sanitizeCredentials,
@@ -70,8 +70,8 @@ export async function GET(request: NextRequest) {
logger.debug(`[${requestId}] Fetching templates with params:`, params)
// Check if user is a super user
const currentUser = await db.select().from(user).where(eq(user.id, session.user.id)).limit(1)
const isSuperUser = currentUser[0]?.isSuperUser || false
const { effectiveSuperUser } = await verifyEffectiveSuperUser(session.user.id)
const isSuperUser = effectiveSuperUser
// Build query conditions
const conditions = []

View File

@@ -5,7 +5,11 @@ import { checkHybridAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import { getBaseUrl } from '@/lib/core/utils/urls'
import { StorageService } from '@/lib/uploads'
import { extractStorageKey, inferContextFromKey } from '@/lib/uploads/utils/file-utils'
import {
extractStorageKey,
inferContextFromKey,
isInternalFileUrl,
} from '@/lib/uploads/utils/file-utils'
import { verifyFileAccess } from '@/app/api/files/authorization'
export const dynamic = 'force-dynamic'
@@ -47,13 +51,13 @@ export async function POST(request: NextRequest) {
logger.info(`[${requestId}] Mistral parse request`, {
filePath: validatedData.filePath,
isWorkspaceFile: validatedData.filePath.includes('/api/files/serve/'),
isWorkspaceFile: isInternalFileUrl(validatedData.filePath),
userId,
})
let fileUrl = validatedData.filePath
if (validatedData.filePath?.includes('/api/files/serve/')) {
if (isInternalFileUrl(validatedData.filePath)) {
try {
const storageKey = extractStorageKey(validatedData.filePath)

View File

@@ -5,7 +5,11 @@ import { checkHybridAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import { getBaseUrl } from '@/lib/core/utils/urls'
import { StorageService } from '@/lib/uploads'
import { extractStorageKey, inferContextFromKey } from '@/lib/uploads/utils/file-utils'
import {
extractStorageKey,
inferContextFromKey,
isInternalFileUrl,
} from '@/lib/uploads/utils/file-utils'
import { verifyFileAccess } from '@/app/api/files/authorization'
export const dynamic = 'force-dynamic'
@@ -48,13 +52,13 @@ export async function POST(request: NextRequest) {
logger.info(`[${requestId}] Pulse parse request`, {
filePath: validatedData.filePath,
isWorkspaceFile: validatedData.filePath.includes('/api/files/serve/'),
isWorkspaceFile: isInternalFileUrl(validatedData.filePath),
userId,
})
let fileUrl = validatedData.filePath
if (validatedData.filePath?.includes('/api/files/serve/')) {
if (isInternalFileUrl(validatedData.filePath)) {
try {
const storageKey = extractStorageKey(validatedData.filePath)
const context = inferContextFromKey(storageKey)

View File

@@ -5,7 +5,11 @@ import { checkHybridAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import { getBaseUrl } from '@/lib/core/utils/urls'
import { StorageService } from '@/lib/uploads'
import { extractStorageKey, inferContextFromKey } from '@/lib/uploads/utils/file-utils'
import {
extractStorageKey,
inferContextFromKey,
isInternalFileUrl,
} from '@/lib/uploads/utils/file-utils'
import { verifyFileAccess } from '@/app/api/files/authorization'
export const dynamic = 'force-dynamic'
@@ -44,13 +48,13 @@ export async function POST(request: NextRequest) {
logger.info(`[${requestId}] Reducto parse request`, {
filePath: validatedData.filePath,
isWorkspaceFile: validatedData.filePath.includes('/api/files/serve/'),
isWorkspaceFile: isInternalFileUrl(validatedData.filePath),
userId,
})
let fileUrl = validatedData.filePath
if (validatedData.filePath?.includes('/api/files/serve/')) {
if (isInternalFileUrl(validatedData.filePath)) {
try {
const storageKey = extractStorageKey(validatedData.filePath)
const context = inferContextFromKey(storageKey)

View File

@@ -79,11 +79,13 @@ export async function POST(request: NextRequest) {
// Generate public URL for destination (properly encode the destination key)
const encodedDestKey = validatedData.destinationKey.split('/').map(encodeURIComponent).join('/')
const url = `https://${validatedData.destinationBucket}.s3.${validatedData.region}.amazonaws.com/${encodedDestKey}`
const uri = `s3://${validatedData.destinationBucket}/${validatedData.destinationKey}`
return NextResponse.json({
success: true,
output: {
url,
uri,
copySourceVersionId: result.CopySourceVersionId,
versionId: result.VersionId,
etag: result.CopyObjectResult?.ETag,

View File

@@ -117,11 +117,13 @@ export async function POST(request: NextRequest) {
const encodedKey = validatedData.objectKey.split('/').map(encodeURIComponent).join('/')
const url = `https://${validatedData.bucketName}.s3.${validatedData.region}.amazonaws.com/${encodedKey}`
const uri = `s3://${validatedData.bucketName}/${validatedData.objectKey}`
return NextResponse.json({
success: true,
output: {
url,
uri,
etag: result.ETag,
location: url,
key: validatedData.objectKey,

View File

@@ -0,0 +1,637 @@
import crypto from 'crypto'
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import {
validateAwsRegion,
validateExternalUrl,
validateS3BucketName,
} from '@/lib/core/security/input-validation'
import { generateRequestId } from '@/lib/core/utils/request'
import { StorageService } from '@/lib/uploads'
import {
extractStorageKey,
inferContextFromKey,
isInternalFileUrl,
} from '@/lib/uploads/utils/file-utils'
import { verifyFileAccess } from '@/app/api/files/authorization'
export const dynamic = 'force-dynamic'
export const maxDuration = 300 // 5 minutes for large multi-page PDF processing
const logger = createLogger('TextractParseAPI')
const QuerySchema = z.object({
Text: z.string().min(1),
Alias: z.string().optional(),
Pages: z.array(z.string()).optional(),
})
const TextractParseSchema = z
.object({
accessKeyId: z.string().min(1, 'AWS Access Key ID is required'),
secretAccessKey: z.string().min(1, 'AWS Secret Access Key is required'),
region: z.string().min(1, 'AWS region is required'),
processingMode: z.enum(['sync', 'async']).optional().default('sync'),
filePath: z.string().optional(),
s3Uri: z.string().optional(),
featureTypes: z
.array(z.enum(['TABLES', 'FORMS', 'QUERIES', 'SIGNATURES', 'LAYOUT']))
.optional(),
queries: z.array(QuerySchema).optional(),
})
.superRefine((data, ctx) => {
const regionValidation = validateAwsRegion(data.region, 'AWS region')
if (!regionValidation.isValid) {
ctx.addIssue({
code: z.ZodIssueCode.custom,
message: regionValidation.error,
path: ['region'],
})
}
})
function getSignatureKey(
key: string,
dateStamp: string,
regionName: string,
serviceName: string
): Buffer {
const kDate = crypto.createHmac('sha256', `AWS4${key}`).update(dateStamp).digest()
const kRegion = crypto.createHmac('sha256', kDate).update(regionName).digest()
const kService = crypto.createHmac('sha256', kRegion).update(serviceName).digest()
const kSigning = crypto.createHmac('sha256', kService).update('aws4_request').digest()
return kSigning
}
function signAwsRequest(
method: string,
host: string,
uri: string,
body: string,
accessKeyId: string,
secretAccessKey: string,
region: string,
service: string,
amzTarget: string
): Record<string, string> {
const date = new Date()
const amzDate = date.toISOString().replace(/[:-]|\.\d{3}/g, '')
const dateStamp = amzDate.slice(0, 8)
const payloadHash = crypto.createHash('sha256').update(body).digest('hex')
const canonicalHeaders =
`content-type:application/x-amz-json-1.1\n` +
`host:${host}\n` +
`x-amz-date:${amzDate}\n` +
`x-amz-target:${amzTarget}\n`
const signedHeaders = 'content-type;host;x-amz-date;x-amz-target'
const canonicalRequest = `${method}\n${uri}\n\n${canonicalHeaders}\n${signedHeaders}\n${payloadHash}`
const algorithm = 'AWS4-HMAC-SHA256'
const credentialScope = `${dateStamp}/${region}/${service}/aws4_request`
const stringToSign = `${algorithm}\n${amzDate}\n${credentialScope}\n${crypto.createHash('sha256').update(canonicalRequest).digest('hex')}`
const signingKey = getSignatureKey(secretAccessKey, dateStamp, region, service)
const signature = crypto.createHmac('sha256', signingKey).update(stringToSign).digest('hex')
const authorizationHeader = `${algorithm} Credential=${accessKeyId}/${credentialScope}, SignedHeaders=${signedHeaders}, Signature=${signature}`
return {
'Content-Type': 'application/x-amz-json-1.1',
Host: host,
'X-Amz-Date': amzDate,
'X-Amz-Target': amzTarget,
Authorization: authorizationHeader,
}
}
async function fetchDocumentBytes(url: string): Promise<{ bytes: string; contentType: string }> {
const response = await fetch(url)
if (!response.ok) {
throw new Error(`Failed to fetch document: ${response.statusText}`)
}
const arrayBuffer = await response.arrayBuffer()
const bytes = Buffer.from(arrayBuffer).toString('base64')
const contentType = response.headers.get('content-type') || 'application/octet-stream'
return { bytes, contentType }
}
function parseS3Uri(s3Uri: string): { bucket: string; key: string } {
const match = s3Uri.match(/^s3:\/\/([^/]+)\/(.+)$/)
if (!match) {
throw new Error(
`Invalid S3 URI format: ${s3Uri}. Expected format: s3://bucket-name/path/to/object`
)
}
const bucket = match[1]
const key = match[2]
const bucketValidation = validateS3BucketName(bucket, 'S3 bucket name')
if (!bucketValidation.isValid) {
throw new Error(bucketValidation.error)
}
if (key.includes('..') || key.startsWith('/')) {
throw new Error('S3 key contains invalid path traversal sequences')
}
return { bucket, key }
}
function sleep(ms: number): Promise<void> {
return new Promise((resolve) => setTimeout(resolve, ms))
}
async function callTextractAsync(
host: string,
amzTarget: string,
body: Record<string, unknown>,
accessKeyId: string,
secretAccessKey: string,
region: string
): Promise<Record<string, unknown>> {
const bodyString = JSON.stringify(body)
const headers = signAwsRequest(
'POST',
host,
'/',
bodyString,
accessKeyId,
secretAccessKey,
region,
'textract',
amzTarget
)
const response = await fetch(`https://${host}/`, {
method: 'POST',
headers,
body: bodyString,
})
if (!response.ok) {
const errorText = await response.text()
let errorMessage = `Textract API error: ${response.statusText}`
try {
const errorJson = JSON.parse(errorText)
if (errorJson.Message) {
errorMessage = errorJson.Message
} else if (errorJson.__type) {
errorMessage = `${errorJson.__type}: ${errorJson.message || errorText}`
}
} catch {
// Use default error message
}
throw new Error(errorMessage)
}
return response.json()
}
async function pollForJobCompletion(
host: string,
jobId: string,
accessKeyId: string,
secretAccessKey: string,
region: string,
useAnalyzeDocument: boolean,
requestId: string
): Promise<Record<string, unknown>> {
const pollIntervalMs = 5000 // 5 seconds between polls
const maxPollTimeMs = 180000 // 3 minutes maximum polling time
const maxAttempts = Math.ceil(maxPollTimeMs / pollIntervalMs)
const getTarget = useAnalyzeDocument
? 'Textract.GetDocumentAnalysis'
: 'Textract.GetDocumentTextDetection'
for (let attempt = 0; attempt < maxAttempts; attempt++) {
const result = await callTextractAsync(
host,
getTarget,
{ JobId: jobId },
accessKeyId,
secretAccessKey,
region
)
const jobStatus = result.JobStatus as string
if (jobStatus === 'SUCCEEDED') {
logger.info(`[${requestId}] Async job completed successfully after ${attempt + 1} polls`)
let allBlocks = (result.Blocks as unknown[]) || []
let nextToken = result.NextToken as string | undefined
while (nextToken) {
const nextResult = await callTextractAsync(
host,
getTarget,
{ JobId: jobId, NextToken: nextToken },
accessKeyId,
secretAccessKey,
region
)
allBlocks = allBlocks.concat((nextResult.Blocks as unknown[]) || [])
nextToken = nextResult.NextToken as string | undefined
}
return {
...result,
Blocks: allBlocks,
}
}
if (jobStatus === 'FAILED') {
throw new Error(`Textract job failed: ${result.StatusMessage || 'Unknown error'}`)
}
if (jobStatus === 'PARTIAL_SUCCESS') {
logger.warn(`[${requestId}] Job completed with partial success: ${result.StatusMessage}`)
let allBlocks = (result.Blocks as unknown[]) || []
let nextToken = result.NextToken as string | undefined
while (nextToken) {
const nextResult = await callTextractAsync(
host,
getTarget,
{ JobId: jobId, NextToken: nextToken },
accessKeyId,
secretAccessKey,
region
)
allBlocks = allBlocks.concat((nextResult.Blocks as unknown[]) || [])
nextToken = nextResult.NextToken as string | undefined
}
return {
...result,
Blocks: allBlocks,
}
}
logger.info(`[${requestId}] Job status: ${jobStatus}, attempt ${attempt + 1}/${maxAttempts}`)
await sleep(pollIntervalMs)
}
throw new Error(
`Timeout waiting for Textract job to complete (max ${maxPollTimeMs / 1000} seconds)`
)
}
export async function POST(request: NextRequest) {
const requestId = generateRequestId()
try {
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
if (!authResult.success || !authResult.userId) {
logger.warn(`[${requestId}] Unauthorized Textract parse attempt`, {
error: authResult.error || 'Missing userId',
})
return NextResponse.json(
{
success: false,
error: authResult.error || 'Unauthorized',
},
{ status: 401 }
)
}
const userId = authResult.userId
const body = await request.json()
const validatedData = TextractParseSchema.parse(body)
const processingMode = validatedData.processingMode || 'sync'
const featureTypes = validatedData.featureTypes ?? []
const useAnalyzeDocument = featureTypes.length > 0
const host = `textract.${validatedData.region}.amazonaws.com`
logger.info(`[${requestId}] Textract parse request`, {
processingMode,
filePath: validatedData.filePath?.substring(0, 50),
s3Uri: validatedData.s3Uri?.substring(0, 50),
featureTypes,
userId,
})
if (processingMode === 'async') {
if (!validatedData.s3Uri) {
return NextResponse.json(
{
success: false,
error: 'S3 URI is required for multi-page processing (s3://bucket/key)',
},
{ status: 400 }
)
}
const { bucket: s3Bucket, key: s3Key } = parseS3Uri(validatedData.s3Uri)
logger.info(`[${requestId}] Starting async Textract job`, { s3Bucket, s3Key })
const startTarget = useAnalyzeDocument
? 'Textract.StartDocumentAnalysis'
: 'Textract.StartDocumentTextDetection'
const startBody: Record<string, unknown> = {
DocumentLocation: {
S3Object: {
Bucket: s3Bucket,
Name: s3Key,
},
},
}
if (useAnalyzeDocument) {
startBody.FeatureTypes = featureTypes
if (
validatedData.queries &&
validatedData.queries.length > 0 &&
featureTypes.includes('QUERIES')
) {
startBody.QueriesConfig = {
Queries: validatedData.queries.map((q) => ({
Text: q.Text,
Alias: q.Alias,
Pages: q.Pages,
})),
}
}
}
const startResult = await callTextractAsync(
host,
startTarget,
startBody,
validatedData.accessKeyId,
validatedData.secretAccessKey,
validatedData.region
)
const jobId = startResult.JobId as string
if (!jobId) {
throw new Error('Failed to start Textract job: No JobId returned')
}
logger.info(`[${requestId}] Async job started`, { jobId })
const textractData = await pollForJobCompletion(
host,
jobId,
validatedData.accessKeyId,
validatedData.secretAccessKey,
validatedData.region,
useAnalyzeDocument,
requestId
)
logger.info(`[${requestId}] Textract async parse successful`, {
pageCount: (textractData.DocumentMetadata as { Pages?: number })?.Pages ?? 0,
blockCount: (textractData.Blocks as unknown[])?.length ?? 0,
})
return NextResponse.json({
success: true,
output: {
blocks: textractData.Blocks ?? [],
documentMetadata: {
pages: (textractData.DocumentMetadata as { Pages?: number })?.Pages ?? 0,
},
modelVersion: (textractData.AnalyzeDocumentModelVersion ??
textractData.DetectDocumentTextModelVersion) as string | undefined,
},
})
}
if (!validatedData.filePath) {
return NextResponse.json(
{
success: false,
error: 'File path is required for single-page processing',
},
{ status: 400 }
)
}
let fileUrl = validatedData.filePath
const isInternalFilePath = validatedData.filePath && isInternalFileUrl(validatedData.filePath)
if (isInternalFilePath) {
try {
const storageKey = extractStorageKey(validatedData.filePath)
const context = inferContextFromKey(storageKey)
const hasAccess = await verifyFileAccess(storageKey, userId, undefined, context, false)
if (!hasAccess) {
logger.warn(`[${requestId}] Unauthorized presigned URL generation attempt`, {
userId,
key: storageKey,
context,
})
return NextResponse.json(
{
success: false,
error: 'File not found',
},
{ status: 404 }
)
}
fileUrl = await StorageService.generatePresignedDownloadUrl(storageKey, context, 5 * 60)
logger.info(`[${requestId}] Generated presigned URL for ${context} file`)
} catch (error) {
logger.error(`[${requestId}] Failed to generate presigned URL:`, error)
return NextResponse.json(
{
success: false,
error: 'Failed to generate file access URL',
},
{ status: 500 }
)
}
} else if (validatedData.filePath?.startsWith('/')) {
// Reject arbitrary absolute paths that don't contain /api/files/serve/
logger.warn(`[${requestId}] Invalid internal path`, {
userId,
path: validatedData.filePath.substring(0, 50),
})
return NextResponse.json(
{
success: false,
error: 'Invalid file path. Only uploaded files are supported for internal paths.',
},
{ status: 400 }
)
} else {
const urlValidation = validateExternalUrl(fileUrl, 'Document URL')
if (!urlValidation.isValid) {
logger.warn(`[${requestId}] SSRF attempt blocked`, {
userId,
url: fileUrl.substring(0, 100),
error: urlValidation.error,
})
return NextResponse.json(
{
success: false,
error: urlValidation.error,
},
{ status: 400 }
)
}
}
const { bytes, contentType } = await fetchDocumentBytes(fileUrl)
// Track if this is a PDF for better error messaging
const isPdf = contentType.includes('pdf') || fileUrl.toLowerCase().endsWith('.pdf')
const uri = '/'
let textractBody: Record<string, unknown>
let amzTarget: string
if (useAnalyzeDocument) {
amzTarget = 'Textract.AnalyzeDocument'
textractBody = {
Document: {
Bytes: bytes,
},
FeatureTypes: featureTypes,
}
if (
validatedData.queries &&
validatedData.queries.length > 0 &&
featureTypes.includes('QUERIES')
) {
textractBody.QueriesConfig = {
Queries: validatedData.queries.map((q) => ({
Text: q.Text,
Alias: q.Alias,
Pages: q.Pages,
})),
}
}
} else {
amzTarget = 'Textract.DetectDocumentText'
textractBody = {
Document: {
Bytes: bytes,
},
}
}
const bodyString = JSON.stringify(textractBody)
const headers = signAwsRequest(
'POST',
host,
uri,
bodyString,
validatedData.accessKeyId,
validatedData.secretAccessKey,
validatedData.region,
'textract',
amzTarget
)
const textractResponse = await fetch(`https://${host}${uri}`, {
method: 'POST',
headers,
body: bodyString,
})
if (!textractResponse.ok) {
const errorText = await textractResponse.text()
logger.error(`[${requestId}] Textract API error:`, errorText)
let errorMessage = `Textract API error: ${textractResponse.statusText}`
let isUnsupportedFormat = false
try {
const errorJson = JSON.parse(errorText)
if (errorJson.Message) {
errorMessage = errorJson.Message
} else if (errorJson.__type) {
errorMessage = `${errorJson.__type}: ${errorJson.message || errorText}`
}
// Check for unsupported document format error
isUnsupportedFormat =
errorJson.__type === 'UnsupportedDocumentException' ||
errorJson.Message?.toLowerCase().includes('unsupported document') ||
errorText.toLowerCase().includes('unsupported document')
} catch {
isUnsupportedFormat = errorText.toLowerCase().includes('unsupported document')
}
// Provide helpful message for unsupported format (likely multi-page PDF)
if (isUnsupportedFormat && isPdf) {
errorMessage =
'This document format is not supported in Single Page mode. If this is a multi-page PDF, please use "Multi-Page (PDF, TIFF via S3)" mode instead, which requires uploading your document to S3 first. Single Page mode only supports JPEG, PNG, and single-page PDF files.'
}
return NextResponse.json(
{
success: false,
error: errorMessage,
},
{ status: textractResponse.status }
)
}
const textractData = await textractResponse.json()
logger.info(`[${requestId}] Textract parse successful`, {
pageCount: textractData.DocumentMetadata?.Pages ?? 0,
blockCount: textractData.Blocks?.length ?? 0,
})
return NextResponse.json({
success: true,
output: {
blocks: textractData.Blocks ?? [],
documentMetadata: {
pages: textractData.DocumentMetadata?.Pages ?? 0,
},
modelVersion:
textractData.AnalyzeDocumentModelVersion ??
textractData.DetectDocumentTextModelVersion ??
undefined,
},
})
} catch (error) {
if (error instanceof z.ZodError) {
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
return NextResponse.json(
{
success: false,
error: 'Invalid request data',
details: error.errors,
},
{ status: 400 }
)
}
logger.error(`[${requestId}] Error in Textract parse:`, error)
return NextResponse.json(
{
success: false,
error: error instanceof Error ? error.message : 'Internal server error',
},
{ status: 500 }
)
}
}

View File

@@ -550,6 +550,8 @@ export interface AdminUserBilling {
totalWebhookTriggers: number
totalScheduledExecutions: number
totalChatExecutions: number
totalMcpExecutions: number
totalA2aExecutions: number
totalTokensUsed: number
totalCost: string
currentUsageLimit: string | null

View File

@@ -97,6 +97,8 @@ export const GET = withAdminAuthParams<RouteParams>(async (_, context) => {
totalWebhookTriggers: stats?.totalWebhookTriggers ?? 0,
totalScheduledExecutions: stats?.totalScheduledExecutions ?? 0,
totalChatExecutions: stats?.totalChatExecutions ?? 0,
totalMcpExecutions: stats?.totalMcpExecutions ?? 0,
totalA2aExecutions: stats?.totalA2aExecutions ?? 0,
totalTokensUsed: stats?.totalTokensUsed ?? 0,
totalCost: stats?.totalCost ?? '0',
currentUsageLimit: stats?.currentUsageLimit ?? null,

View File

@@ -60,7 +60,17 @@ export const POST = withAdminAuthParams<RouteParams>(async (request, context) =>
return internalErrorResponse(deployResult.error || 'Failed to deploy workflow')
}
const scheduleResult = await createSchedulesForDeploy(workflowId, normalizedData.blocks, db)
if (!deployResult.deploymentVersionId) {
await undeployWorkflow({ workflowId })
return internalErrorResponse('Failed to resolve deployment version')
}
const scheduleResult = await createSchedulesForDeploy(
workflowId,
normalizedData.blocks,
db,
deployResult.deploymentVersionId
)
if (!scheduleResult.success) {
logger.warn(`Schedule creation failed for workflow ${workflowId}: ${scheduleResult.error}`)
}

View File

@@ -19,7 +19,7 @@ export interface RateLimitResult {
export async function checkRateLimit(
request: NextRequest,
endpoint: 'logs' | 'logs-detail' = 'logs'
endpoint: 'logs' | 'logs-detail' | 'workflows' | 'workflow-detail' = 'logs'
): Promise<RateLimitResult> {
try {
const auth = await authenticateV1Request(request)

View File

@@ -0,0 +1,102 @@
import { db } from '@sim/db'
import { permissions, workflow, workflowBlocks } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { extractInputFieldsFromBlocks } from '@/lib/workflows/input-format'
import { createApiResponse, getUserLimits } from '@/app/api/v1/logs/meta'
import { checkRateLimit, createRateLimitResponse } from '@/app/api/v1/middleware'
const logger = createLogger('V1WorkflowDetailsAPI')
export const revalidate = 0
export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const requestId = crypto.randomUUID().slice(0, 8)
try {
const rateLimit = await checkRateLimit(request, 'workflow-detail')
if (!rateLimit.allowed) {
return createRateLimitResponse(rateLimit)
}
const userId = rateLimit.userId!
const { id } = await params
logger.info(`[${requestId}] Fetching workflow details for ${id}`, { userId })
const rows = await db
.select({
id: workflow.id,
name: workflow.name,
description: workflow.description,
color: workflow.color,
folderId: workflow.folderId,
workspaceId: workflow.workspaceId,
isDeployed: workflow.isDeployed,
deployedAt: workflow.deployedAt,
runCount: workflow.runCount,
lastRunAt: workflow.lastRunAt,
variables: workflow.variables,
createdAt: workflow.createdAt,
updatedAt: workflow.updatedAt,
})
.from(workflow)
.innerJoin(
permissions,
and(
eq(permissions.entityType, 'workspace'),
eq(permissions.entityId, workflow.workspaceId),
eq(permissions.userId, userId)
)
)
.where(eq(workflow.id, id))
.limit(1)
const workflowData = rows[0]
if (!workflowData) {
return NextResponse.json({ error: 'Workflow not found' }, { status: 404 })
}
const blockRows = await db
.select({
id: workflowBlocks.id,
type: workflowBlocks.type,
subBlocks: workflowBlocks.subBlocks,
})
.from(workflowBlocks)
.where(eq(workflowBlocks.workflowId, id))
const blocksRecord = Object.fromEntries(
blockRows.map((block) => [block.id, { type: block.type, subBlocks: block.subBlocks }])
)
const inputs = extractInputFieldsFromBlocks(blocksRecord)
const response = {
id: workflowData.id,
name: workflowData.name,
description: workflowData.description,
color: workflowData.color,
folderId: workflowData.folderId,
workspaceId: workflowData.workspaceId,
isDeployed: workflowData.isDeployed,
deployedAt: workflowData.deployedAt?.toISOString() || null,
runCount: workflowData.runCount,
lastRunAt: workflowData.lastRunAt?.toISOString() || null,
variables: workflowData.variables || {},
inputs,
createdAt: workflowData.createdAt.toISOString(),
updatedAt: workflowData.updatedAt.toISOString(),
}
const limits = await getUserLimits(userId)
const apiResponse = createApiResponse({ data: response }, limits, rateLimit)
return NextResponse.json(apiResponse.body, { headers: apiResponse.headers })
} catch (error: unknown) {
const message = error instanceof Error ? error.message : 'Unknown error'
logger.error(`[${requestId}] Workflow details fetch error`, { error: message })
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}

View File

@@ -0,0 +1,184 @@
import { db } from '@sim/db'
import { permissions, workflow } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, asc, eq, gt, or } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { createApiResponse, getUserLimits } from '@/app/api/v1/logs/meta'
import { checkRateLimit, createRateLimitResponse } from '@/app/api/v1/middleware'
const logger = createLogger('V1WorkflowsAPI')
export const dynamic = 'force-dynamic'
export const revalidate = 0
const QueryParamsSchema = z.object({
workspaceId: z.string(),
folderId: z.string().optional(),
deployedOnly: z.coerce.boolean().optional().default(false),
limit: z.coerce.number().min(1).max(100).optional().default(50),
cursor: z.string().optional(),
})
interface CursorData {
sortOrder: number
createdAt: string
id: string
}
function encodeCursor(data: CursorData): string {
return Buffer.from(JSON.stringify(data)).toString('base64')
}
function decodeCursor(cursor: string): CursorData | null {
try {
return JSON.parse(Buffer.from(cursor, 'base64').toString())
} catch {
return null
}
}
export async function GET(request: NextRequest) {
const requestId = crypto.randomUUID().slice(0, 8)
try {
const rateLimit = await checkRateLimit(request, 'workflows')
if (!rateLimit.allowed) {
return createRateLimitResponse(rateLimit)
}
const userId = rateLimit.userId!
const { searchParams } = new URL(request.url)
const rawParams = Object.fromEntries(searchParams.entries())
const validationResult = QueryParamsSchema.safeParse(rawParams)
if (!validationResult.success) {
return NextResponse.json(
{ error: 'Invalid parameters', details: validationResult.error.errors },
{ status: 400 }
)
}
const params = validationResult.data
logger.info(`[${requestId}] Fetching workflows for workspace ${params.workspaceId}`, {
userId,
filters: {
folderId: params.folderId,
deployedOnly: params.deployedOnly,
},
})
const conditions = [
eq(workflow.workspaceId, params.workspaceId),
eq(permissions.entityType, 'workspace'),
eq(permissions.entityId, params.workspaceId),
eq(permissions.userId, userId),
]
if (params.folderId) {
conditions.push(eq(workflow.folderId, params.folderId))
}
if (params.deployedOnly) {
conditions.push(eq(workflow.isDeployed, true))
}
if (params.cursor) {
const cursorData = decodeCursor(params.cursor)
if (cursorData) {
const cursorCondition = or(
gt(workflow.sortOrder, cursorData.sortOrder),
and(
eq(workflow.sortOrder, cursorData.sortOrder),
gt(workflow.createdAt, new Date(cursorData.createdAt))
),
and(
eq(workflow.sortOrder, cursorData.sortOrder),
eq(workflow.createdAt, new Date(cursorData.createdAt)),
gt(workflow.id, cursorData.id)
)
)
if (cursorCondition) {
conditions.push(cursorCondition)
}
}
}
const orderByClause = [asc(workflow.sortOrder), asc(workflow.createdAt), asc(workflow.id)]
const rows = await db
.select({
id: workflow.id,
name: workflow.name,
description: workflow.description,
color: workflow.color,
folderId: workflow.folderId,
workspaceId: workflow.workspaceId,
isDeployed: workflow.isDeployed,
deployedAt: workflow.deployedAt,
runCount: workflow.runCount,
lastRunAt: workflow.lastRunAt,
sortOrder: workflow.sortOrder,
createdAt: workflow.createdAt,
updatedAt: workflow.updatedAt,
})
.from(workflow)
.innerJoin(
permissions,
and(
eq(permissions.entityType, 'workspace'),
eq(permissions.entityId, params.workspaceId),
eq(permissions.userId, userId)
)
)
.where(and(...conditions))
.orderBy(...orderByClause)
.limit(params.limit + 1)
const hasMore = rows.length > params.limit
const data = rows.slice(0, params.limit)
let nextCursor: string | undefined
if (hasMore && data.length > 0) {
const lastWorkflow = data[data.length - 1]
nextCursor = encodeCursor({
sortOrder: lastWorkflow.sortOrder,
createdAt: lastWorkflow.createdAt.toISOString(),
id: lastWorkflow.id,
})
}
const formattedWorkflows = data.map((w) => ({
id: w.id,
name: w.name,
description: w.description,
color: w.color,
folderId: w.folderId,
workspaceId: w.workspaceId,
isDeployed: w.isDeployed,
deployedAt: w.deployedAt?.toISOString() || null,
runCount: w.runCount,
lastRunAt: w.lastRunAt?.toISOString() || null,
createdAt: w.createdAt.toISOString(),
updatedAt: w.updatedAt.toISOString(),
}))
const limits = await getUserLimits(userId)
const response = createApiResponse(
{
data: formattedWorkflows,
nextCursor,
},
limits,
rateLimit
)
return NextResponse.json(response.body, { headers: response.headers })
} catch (error: unknown) {
const message = error instanceof Error ? error.message : 'Unknown error'
logger.error(`[${requestId}] Workflows fetch error`, { error: message })
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}

View File

@@ -11,6 +11,7 @@ import { checkAndBillOverageThreshold } from '@/lib/billing/threshold-billing'
import { env } from '@/lib/core/config/env'
import { getCostMultiplier, isBillingEnabled } from '@/lib/core/config/feature-flags'
import { generateRequestId } from '@/lib/core/utils/request'
import { enrichTableSchema } from '@/lib/table/llm/wand'
import { verifyWorkspaceMembership } from '@/app/api/workflows/utils'
import { getModelPricing } from '@/providers/utils'
@@ -60,6 +61,7 @@ interface RequestBody {
history?: ChatMessage[]
workflowId?: string
generationType?: string
wandContext?: Record<string, unknown>
}
function safeStringify(value: unknown): string {
@@ -70,6 +72,38 @@ function safeStringify(value: unknown): string {
}
}
/**
* Wand enricher function type.
* Enrichers add context to the system prompt based on generationType.
*/
type WandEnricher = (
workspaceId: string | null,
context: Record<string, unknown>
) => Promise<string | null>
/**
* Registry of wand enrichers by generationType.
* Each enricher returns additional context to append to the system prompt.
*/
const wandEnrichers: Partial<Record<string, WandEnricher>> = {
timestamp: async () => {
const now = new Date()
return `Current date and time context for reference:
- Current UTC timestamp: ${now.toISOString()}
- Current Unix timestamp (seconds): ${Math.floor(now.getTime() / 1000)}
- Current Unix timestamp (milliseconds): ${now.getTime()}
- Current date (UTC): ${now.toISOString().split('T')[0]}
- Current year: ${now.getUTCFullYear()}
- Current month: ${now.getUTCMonth() + 1}
- Current day of month: ${now.getUTCDate()}
- Current day of week: ${['Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday'][now.getUTCDay()]}
Use this context to calculate relative dates like "yesterday", "last week", "beginning of this month", etc.`
},
'table-schema': enrichTableSchema,
}
async function updateUserStatsForWand(
userId: string,
usage: {
@@ -159,7 +193,15 @@ export async function POST(req: NextRequest) {
try {
const body = (await req.json()) as RequestBody
const { prompt, systemPrompt, stream = false, history = [], workflowId, generationType } = body
const {
prompt,
systemPrompt,
stream = false,
history = [],
workflowId,
generationType,
wandContext = {},
} = body
if (!prompt) {
logger.warn(`[${requestId}] Invalid request: Missing prompt.`)
@@ -227,20 +269,15 @@ export async function POST(req: NextRequest) {
systemPrompt ||
'You are a helpful AI assistant. Generate content exactly as requested by the user.'
if (generationType === 'timestamp') {
const now = new Date()
const currentTimeContext = `\n\nCurrent date and time context for reference:
- Current UTC timestamp: ${now.toISOString()}
- Current Unix timestamp (seconds): ${Math.floor(now.getTime() / 1000)}
- Current Unix timestamp (milliseconds): ${now.getTime()}
- Current date (UTC): ${now.toISOString().split('T')[0]}
- Current year: ${now.getUTCFullYear()}
- Current month: ${now.getUTCMonth() + 1}
- Current day of month: ${now.getUTCDate()}
- Current day of week: ${['Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday'][now.getUTCDay()]}
Use this context to calculate relative dates like "yesterday", "last week", "beginning of this month", etc.`
finalSystemPrompt += currentTimeContext
// Apply enricher if one exists for this generationType
if (generationType) {
const enricher = wandEnrichers[generationType]
if (enricher) {
const enrichment = await enricher(workspaceId, wandContext)
if (enrichment) {
finalSystemPrompt += `\n\n${enrichment}`
}
}
}
if (generationType === 'json-object') {

View File

@@ -1,7 +1,7 @@
import { db } from '@sim/db'
import { webhook, workflow } from '@sim/db/schema'
import { webhook, workflow, workflowDeploymentVersion } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, desc, eq } from 'drizzle-orm'
import { and, desc, eq, isNull, or } from 'drizzle-orm'
import { nanoid } from 'nanoid'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
@@ -71,7 +71,23 @@ export async function GET(request: NextRequest) {
})
.from(webhook)
.innerJoin(workflow, eq(webhook.workflowId, workflow.id))
.where(and(eq(webhook.workflowId, workflowId), eq(webhook.blockId, blockId)))
.leftJoin(
workflowDeploymentVersion,
and(
eq(workflowDeploymentVersion.workflowId, workflow.id),
eq(workflowDeploymentVersion.isActive, true)
)
)
.where(
and(
eq(webhook.workflowId, workflowId),
eq(webhook.blockId, blockId),
or(
eq(webhook.deploymentVersionId, workflowDeploymentVersion.id),
and(isNull(workflowDeploymentVersion.id), isNull(webhook.deploymentVersionId))
)
)
)
.orderBy(desc(webhook.updatedAt))
logger.info(
@@ -149,7 +165,23 @@ export async function POST(request: NextRequest) {
const existingForBlock = await db
.select({ id: webhook.id, path: webhook.path })
.from(webhook)
.where(and(eq(webhook.workflowId, workflowId), eq(webhook.blockId, blockId)))
.leftJoin(
workflowDeploymentVersion,
and(
eq(workflowDeploymentVersion.workflowId, workflowId),
eq(workflowDeploymentVersion.isActive, true)
)
)
.where(
and(
eq(webhook.workflowId, workflowId),
eq(webhook.blockId, blockId),
or(
eq(webhook.deploymentVersionId, workflowDeploymentVersion.id),
and(isNull(workflowDeploymentVersion.id), isNull(webhook.deploymentVersionId))
)
)
)
.limit(1)
if (existingForBlock.length > 0) {
@@ -225,7 +257,23 @@ export async function POST(request: NextRequest) {
const existingForBlock = await db
.select({ id: webhook.id })
.from(webhook)
.where(and(eq(webhook.workflowId, workflowId), eq(webhook.blockId, blockId)))
.leftJoin(
workflowDeploymentVersion,
and(
eq(workflowDeploymentVersion.workflowId, workflowId),
eq(workflowDeploymentVersion.isActive, true)
)
)
.where(
and(
eq(webhook.workflowId, workflowId),
eq(webhook.blockId, blockId),
or(
eq(webhook.deploymentVersionId, workflowDeploymentVersion.id),
and(isNull(workflowDeploymentVersion.id), isNull(webhook.deploymentVersionId))
)
)
)
.limit(1)
if (existingForBlock.length > 0) {
targetWebhookId = existingForBlock[0].id

View File

@@ -152,7 +152,6 @@ export async function POST(
const response = await queueWebhookExecution(foundWebhook, foundWorkflow, body, request, {
requestId,
path,
executionTarget: 'deployed',
})
responses.push(response)
}

View File

@@ -22,6 +22,13 @@ export async function GET(_request: Request, { params }: { params: Promise<{ id:
.select({
id: chat.id,
identifier: chat.identifier,
title: chat.title,
description: chat.description,
customizations: chat.customizations,
authType: chat.authType,
allowedEmails: chat.allowedEmails,
outputConfigs: chat.outputConfigs,
password: chat.password,
isActive: chat.isActive,
})
.from(chat)
@@ -34,6 +41,13 @@ export async function GET(_request: Request, { params }: { params: Promise<{ id:
? {
id: deploymentResults[0].id,
identifier: deploymentResults[0].identifier,
title: deploymentResults[0].title,
description: deploymentResults[0].description,
customizations: deploymentResults[0].customizations,
authType: deploymentResults[0].authType,
allowedEmails: deploymentResults[0].allowedEmails,
outputConfigs: deploymentResults[0].outputConfigs,
hasPassword: Boolean(deploymentResults[0].password),
}
: null

View File

@@ -10,7 +10,11 @@ import {
loadWorkflowFromNormalizedTables,
undeployWorkflow,
} from '@/lib/workflows/persistence/utils'
import { createSchedulesForDeploy, validateWorkflowSchedules } from '@/lib/workflows/schedules'
import {
cleanupDeploymentVersion,
createSchedulesForDeploy,
validateWorkflowSchedules,
} from '@/lib/workflows/schedules'
import { validateWorkflowPermissions } from '@/lib/workflows/utils'
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
@@ -131,22 +135,6 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
return createErrorResponse(`Invalid schedule configuration: ${scheduleValidation.error}`, 400)
}
const triggerSaveResult = await saveTriggerWebhooksForDeploy({
request,
workflowId: id,
workflow: workflowData,
userId: actorUserId,
blocks: normalizedData.blocks,
requestId,
})
if (!triggerSaveResult.success) {
return createErrorResponse(
triggerSaveResult.error?.message || 'Failed to save trigger configuration',
triggerSaveResult.error?.status || 500
)
}
const deployResult = await deployWorkflow({
workflowId: id,
deployedBy: actorUserId,
@@ -158,14 +146,58 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
}
const deployedAt = deployResult.deployedAt!
const deploymentVersionId = deployResult.deploymentVersionId
if (!deploymentVersionId) {
await undeployWorkflow({ workflowId: id })
return createErrorResponse('Failed to resolve deployment version', 500)
}
const triggerSaveResult = await saveTriggerWebhooksForDeploy({
request,
workflowId: id,
workflow: workflowData,
userId: actorUserId,
blocks: normalizedData.blocks,
requestId,
deploymentVersionId,
})
if (!triggerSaveResult.success) {
await cleanupDeploymentVersion({
workflowId: id,
workflow: workflowData as Record<string, unknown>,
requestId,
deploymentVersionId,
})
await undeployWorkflow({ workflowId: id })
return createErrorResponse(
triggerSaveResult.error?.message || 'Failed to save trigger configuration',
triggerSaveResult.error?.status || 500
)
}
let scheduleInfo: { scheduleId?: string; cronExpression?: string; nextRunAt?: Date } = {}
const scheduleResult = await createSchedulesForDeploy(id, normalizedData.blocks, db)
const scheduleResult = await createSchedulesForDeploy(
id,
normalizedData.blocks,
db,
deploymentVersionId
)
if (!scheduleResult.success) {
logger.error(
`[${requestId}] Failed to create schedule for workflow ${id}: ${scheduleResult.error}`
)
} else if (scheduleResult.scheduleId) {
await cleanupDeploymentVersion({
workflowId: id,
workflow: workflowData as Record<string, unknown>,
requestId,
deploymentVersionId,
})
await undeployWorkflow({ workflowId: id })
return createErrorResponse(scheduleResult.error || 'Failed to create schedule', 500)
}
if (scheduleResult.scheduleId) {
scheduleInfo = {
scheduleId: scheduleResult.scheduleId,
cronExpression: scheduleResult.cronExpression,

Some files were not shown because too many files have changed in this diff Show More