Compare commits

...

101 Commits

Author SHA1 Message Date
Waleed Latif
991f0442e9 v0.3.36: workflow block logs, whitelabeling configurability, session provider 2025-08-21 21:44:28 -07:00
Waleed Latif
2ebfb576ae fix(day-picker): remove unused react-day-picker (#1094) 2025-08-21 21:29:20 -07:00
Vikhyath Mondreti
11a7be54f2 fix circular dependsOn for Jira manualIssueKey 2025-08-21 21:21:19 -07:00
Vikhyath Mondreti
f5219d03c3 fix(ms-oauth): oauth edge cases (#1093) 2025-08-21 21:19:11 -07:00
Waleed Latif
f0643e01b4 fix(logs): make child workflow span errors the same as root level workflow errors (#1092) 2025-08-21 21:17:09 -07:00
Adam Gough
77b0c5b9ed Fix(excel-range): fixed excel range (#1088)
* added auto range

* lint

* removed any

* utils file

---------

Co-authored-by: Adam Gough <adamgough@Mac.attlocal.net>
2025-08-21 20:04:20 -07:00
Adam Gough
9dbd44e555 fix(webhook-payloads): fixed the variable resolution in webhooks (#1019)
* telegram webhook fix

* changed payloads

* test

* test

* test

* test

* fix github dropdown

* test

* reverted github changes

* fixed github var

* test

* bun run lint

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test push

* test

* bun run lint

* edited airtable payload and webhook deletion

* Revert bun.lock and package.json to upstream/staging

* cleaned up

* test

* test

* resolving more cmments

* resolved comments, updated trigger

* cleaned up, resolved comments

* test

* test

* lint

---------

Co-authored-by: Adam Gough <adamgough@Mac.attlocal.net>
2025-08-21 20:03:04 -07:00
Waleed Latif
9ea9f2d52e improvement(log-level): make log level configurable via envvar (#1091) 2025-08-21 19:40:47 -07:00
Waleed Latif
4cd707fadb improvement(emails): fixed email subjects to use provided brand name (#1090)
* improvement(emails): fixed email subjects to use provided brand name

* update manifest to use dynamic background & theme color
2025-08-21 19:34:05 -07:00
Waleed Latif
f0b07428bc feat(theme): added custom envvars for themes (#1089)
* feat(theme): added custom envvars for themes

* add regec
2025-08-21 19:27:56 -07:00
Vikhyath Mondreti
8c9e182e10 fix(infinite-get-session): pass session once per tree using session provider + multiple fixes (#1085)
* fix(infinite-get-session): pass session using session provider

* prevent auto refetch

* fix typing:

* fix types

* fix

* fix oauth token for microsoft file selector

* fix start block required error
2025-08-21 18:45:15 -07:00
Waleed Latif
33dd59f7a7 fix(db-consts): make the migrations image fully standalone by adding db consts (#1087) 2025-08-21 17:25:35 -07:00
Waleed Latif
53ee9f99db fix(templates): added option to delete/keep templates when deleting workspace, updated template modal, sidebar code cleanup (#1086)
* feat(templates): added in the ability to keep/remove templates when deleting workspace

* code cleanup in sidebar

* add the ability to edit existing templates

* updated template modal

* fix build

* revert bun.lock

* add template logic to workflow deletion as well

* add ability to delete templates

* add owner/admin enforcemnet to modify or delete templates
2025-08-21 17:11:22 -07:00
Vikhyath Mondreti
0f2a125eae improvement(block-error-logs): workflow in workflow (#1084)
* improvement(add-block-logs): workflow in workflow

* fix lint
2025-08-21 15:01:30 -07:00
Waleed Latif
e107363ea7 v0.3.35: migrations, custom email address support 2025-08-21 12:36:51 -07:00
Waleed Latif
7e364a7977 fix(emails): remove unused useCustomFromFormat param (#1082)
* fix(mailer): remove unused useCustomFormat

* bun.lock changes
2025-08-21 12:09:03 -07:00
Waleed Latif
35a37d8b45 fix(acs): added FROM_EMAIL_ADDRESS envvar for ACS (#1081)
* fix: clear Docker build cache to use correct Next.js version

* fix(mailer): add FROM_EMAIL_ADDRESS envvar for ACS

* bun.lock

* added tests
2025-08-21 11:57:44 -07:00
Vikhyath Mondreti
2b52d88cee fix(migrations): add missing migration for document table (#1080)
* fix(migrations): add missing migration for document table

* add newline at end of file
2025-08-21 11:48:54 -07:00
Waleed Latif
abad3620a3 fix(build): clear docker build cache to use correct Next.js version 2025-08-21 01:43:45 -07:00
Waleed Latif
a37c6bc812 fix(build): clear docker build cache to use correct Next.js version (#1075)
* fix: clear Docker build cache to use correct Next.js version

- Changed GitHub Actions cache scope from build-v2 to build-v3
- This should force a fresh build without cached Next.js 15.5.0 layers
- Reverted to ^15.3.2 version format that worked on main branch

🤖 Generated with [Claude Code](https://claude.ai/code)

Co-Authored-By: Claude <noreply@anthropic.com>

* run install

---------

Co-authored-by: Claude <noreply@anthropic.com>
2025-08-21 01:38:47 -07:00
Waleed Latif
cd1bd95952 fix(nextjs): downgrade nextjs due to known issue with bun commonjs module bundling (#1073) 2025-08-21 01:24:06 -07:00
Waleed Latif
4c9fdbe7fb fix(nextjs): downgrade nextjs due to known issue with bun commonjs module bundling (#1073) 2025-08-21 01:23:10 -07:00
Waleed Latif
2c47cf4161 v0.3.34: azure-openai options, billing fixes, mistral OCR via Azure, start block input format changes 2025-08-20 21:05:48 -07:00
Waleed Latif
db1cf8a6db fix(placeholder): fix starter block placeholder (#1071) 2025-08-20 21:01:37 -07:00
Vikhyath Mondreti
c6912095f7 fix placeholder text 2025-08-20 20:38:15 -07:00
Waleed Latif
154d9eef6a fix(gpt-5): fix chat-completions api (#1070) 2025-08-20 20:36:12 -07:00
Emir Karabeg
c2ded1f3e1 fix(theme-provider): preventing flash on page load (#1067)
* fix(theme-provider): preventing flash on page load

* consolidated themes to use NextJS theme logic

* improvement: optimized latency
2025-08-20 20:20:23 -07:00
Waleed Latif
ff43528d35 fix(gpt-5): fixed verbosity and reasoning params (#1069)
* fix(gpt-5): fixed verbosity and reasoning parsm

* fixed dropdown

* default values for verbosity and reasoning effort

* cleanup

* use default value in dropdown
2025-08-20 20:18:02 -07:00
Vikhyath Mondreti
692ba69864 fix type 2025-08-20 20:00:41 -07:00
Adam Gough
cb7ce8659b fix(msverify): changed consent for microsoft (#1057)
* changed consent

* changed excel error message and default sheets

* changed variable res for excel

---------

Co-authored-by: Adam Gough <adamgough@Mac.attlocal.net>
2025-08-20 19:54:51 -07:00
Vikhyath Mondreti
5caef3a37d fix(input-format): first time execution bug (#1068) 2025-08-20 19:52:04 -07:00
Waleed Latif
a6888da124 fix(semantics): fix incorrect imports (#1066)
* fix(semantics): fix incorrect import

* fixed all incorrecr imports
2025-08-20 19:02:52 -07:00
Vikhyath Mondreti
07b0597f4f improvement(trigger): upgrade import path for trigger (#1065) 2025-08-20 18:41:13 -07:00
Vikhyath Mondreti
71e2994f9d improvement(trigger): upgrade trigger (#1063) 2025-08-20 18:33:01 -07:00
Vikhyath Mondreti
9973b2c165 Merge branch 'staging' of github.com:simstudioai/sim into staging 2025-08-20 18:26:08 -07:00
Vikhyath Mondreti
d9e5777538 use personal access token 2025-08-20 18:24:17 -07:00
Waleed Latif
dd74267313 feat(nextjs): upgrade nextjs to 15.5 (#1062) 2025-08-20 18:22:35 -07:00
Vikhyath Mondreti
1db72dc823 pin version 2025-08-20 18:13:15 -07:00
Vikhyath Mondreti
da707fa491 improvement(gh-action): add gh action to deploy to correct environment for trigger.dev (#1060)
* improvement(gh-action): add gh action to deploy to correct environment for trigger.dev

* add dep installation

* change away from pull request target
2025-08-20 18:10:43 -07:00
Vikhyath Mondreti
9ffaf305bd feat(input-format): add value field to test input formats (#1059)
* feat(input-format): add value field to test input formats

* fix lint

* fix typing issue

* change to dropdown for boolean
2025-08-20 18:03:47 -07:00
Waleed Latif
26e6286fda fix(billing): fix team plan upgrade (#1053) 2025-08-20 17:05:35 -07:00
Waleed Latif
c795fc83aa feat(azure-openai): allow usage of azure-openai for knowledgebase uploads and wand generation (#1056)
* feat(azure-openai): allow usage of azure-openai for knowledgebase uploads

* feat(azure-openai): added azure-openai for kb and wand

* added embeddings utils, added the ability to use mistral through Azure

* fix(oauth): gdrive picker race condition, token route cleanup

* fix test

* feat(mailer): consolidated all emailing to mailer service, added support for Azure ACS (#1054)

* feat(mailer): consolidated all emailing to mailer service, added support for Azure ACS

* fix batch invitation email template

* cleanup

* improvement(emails): add help template instead of doing it inline

* remove fallback version

---------

Co-authored-by: Vikhyath Mondreti <vikhyath@simstudio.ai>
2025-08-20 17:04:52 -07:00
Waleed Latif
cea42f5135 improvement(gpt-5): added reasoning level and verbosity to gpt-5 models (#1058) 2025-08-20 17:04:39 -07:00
Waleed Latif
6fd6f921dc feat(mailer): consolidated all emailing to mailer service, added support for Azure ACS (#1054)
* feat(mailer): consolidated all emailing to mailer service, added support for Azure ACS

* fix batch invitation email template

* cleanup

* improvement(emails): add help template instead of doing it inline
2025-08-20 16:02:49 -07:00
Vikhyath Mondreti
7530fb9a4e Merge pull request #1055 from simstudioai/fix/picker-race-cond
fix(oauth): gdrive picker race condition, token route cleanup
2025-08-20 15:03:57 -07:00
Vikhyath Mondreti
9a5b035822 fix test 2025-08-20 13:55:54 -07:00
Vikhyath Mondreti
0c0b6bf967 fix(oauth): gdrive picker race condition, token route cleanup 2025-08-20 12:33:46 -07:00
Vikhyath Mondreti
5d74db53ff v0.3.33: update copilot docs 2025-08-20 09:56:09 -07:00
Siddharth Ganesan
b39bdfd55e feat(copilot-docs): update readme and docs with local hosting instructions (#1043)
* Docs

* Lint
2025-08-20 09:47:50 -07:00
Waleed Latif
6b185be9a4 v0.3.32: loop block max increase, url-encoded API calls, subflow logs, new supabase tools 2025-08-20 00:36:46 -07:00
Waleed Latif
214a0358b6 fix(billing): fix upgrade to team plan (#1045) 2025-08-20 00:28:07 -07:00
Waleed Latif
bbb5e53e43 improvement(supabase): add supabase upsert tool, insert/replace on PK conflict (#1038) 2025-08-19 21:21:09 -07:00
Waleed Latif
79e932fed9 feat(logs): added sub-workflow logs, updated trace spans UI, fix scroll behavior in workflow registry sidebar (#1037)
* added sub-workflow logs

* indent input/output in trace spans display

* better color scheme for workflow logs

* scroll behavior in sidebar updated

* cleanup

* fixed failing tests
2025-08-19 21:21:09 -07:00
Vikhyath Mondreti
9ad36c0e34 fix(oauth-block): race condition for rendering credential selectors and other subblocks + gdrive fixes (#1029)
* fix(oauth-block): race condition for rendering credential selectors and other subblocks

* fix import

* add dependsOn field to track cros-subblock deps

* remove redundant check

* remove redundant checks

* remove misleading comment

* fix

* fix jira

* fix

* fix

* confluence

* fix triggers

* fix

* fix

* make trigger creds collab supported

* fix for backwards compat

* fix trigger modal
2025-08-19 21:21:09 -07:00
Waleed Latif
2771c688ff improvement(supabase): added more verbose error logging for supabase operations (#1035)
* improvement(supabase): added more verbose error logging for supabase operations

* updated docs
2025-08-19 21:21:09 -07:00
Waleed Latif
d58ceb4bce improvement(api): add native support for form-urlencoded inputs into API block (#1033) 2025-08-19 21:21:09 -07:00
Waleed Latif
69773c3174 improvement(console): increase console max entries for larger workflows (#1032)
* improvement(console): increase console max entries for larger workflows

* increase safety limit for infinite loops
2025-08-19 21:21:09 -07:00
Waleed Latif
1619d63f2a v0.3.31: webhook fixes, advanced mode parameter filtering, credentials fixes, UI/UX improvements 2025-08-19 01:01:45 -07:00
Waleed Latif
9aa1fe8037 fix(logger): fixed logger to show prod server-side logs (#1027) 2025-08-19 00:44:24 -07:00
Emir Karabeg
1b7c111c46 Update README.md (#1026)
* Update README.md

* Update README.md
2025-08-18 23:10:18 -07:00
Siddharth Ganesan
bdfb56b262 fix(copilot): streaming (#1023)
* Fix 1

* Fix

* Bugfix

* Make thinking streaming smoother

* Better autoscroll, still not great

* Updates

* Updates

* Updates

* Restore checkpoitn logic

* Fix aborts

* Checkpoitn ui

* Lint

* Fix empty file
2025-08-18 22:48:56 -07:00
Emir Karabeg
4a7de31eee uploaded brandbook (#1024) 2025-08-18 22:04:55 -07:00
Waleed Latif
adfe56c720 improvement(logger): restore server-side logs in prod (#1022) 2025-08-18 21:01:38 -07:00
Emir Karabeg
72e3efa875 improvement(settings): ui/ux (#1021)
* completed general

* completed environment

* completed account; updated general and environment

* fixed skeleton

* finished credentials

* finished privacy; adjusted all colors and styling

* added reset password

* refactor: team and subscription

* finalized subscription settings

* fixed copilot key UI
2025-08-18 20:57:29 -07:00
Vikhyath Mondreti
b40fa3aa6e fix(picker-ui): picker UI confusing when credential not set + Microsoft OAuth Fixes (#1016)
* fix(picker-ui): picker UI confusing when credential not set

* remove comments

* remove chevron down

* fix collaboration oauth

* fix jira"

* fix

* fix ms excel selector

* fix selectors for MS blocks

* fix ms selectors

* fix

* fix ms onedrive and sharepoint

* fix to grey out dropdowns

* fix background fetches

* fix planner

* fix confluence

* fix

* fix confluence realtime sharing

* fix outlook folder selector

* check outlook folder

* make shared hook

---------

Co-authored-by: waleedlatif1 <walif6@gmail.com>
2025-08-18 20:21:23 -07:00
Waleed Latif
f924edde3a improvement(console): redact api keys from console store (#1020) 2025-08-18 16:36:33 -07:00
Waleed Latif
073030bfaa improvement(serializer): filter out advanced mode fields when executing in basic mode, persist the values but don't include them in serialized block for execution (#1018)
* improvement(serializer): filter out advanced mode fields when executing in basic mode, persist the values but don't include them in serialized block for execution

* fix serializer exclusion logic
2025-08-18 16:34:53 -07:00
Siddharth Ganesan
871f4e8e18 fix(copilot): env key validation (#1017)
* Fix v1

* Use env var

* Lint

* Fix env key validation

* Remove logger

* Fix agent url

* Fix tests
2025-08-18 16:00:56 -07:00
Siddharth Ganesan
091343a132 fix(copilot): fix origin (#1015)
* Fix v1

* Use env var

* Lint
2025-08-18 13:57:31 -07:00
Waleed Latif
63c66bfc31 fix(webhook): pin webhook URL when creating/saving generic webhook trigger (#1014)
* fix(webhook): pin webhook URL when creating a new generic webhook trigger

* change instructions copy

* remove unrelated scripts

* added optional API key for webhooks, validation tests

* remove extraneous logs
2025-08-18 13:39:49 -07:00
Waleed Latif
445ca78395 fix(export): swap upload & download icons (#1013) 2025-08-18 10:22:55 -07:00
Waleed Latif
d75cc1ed84 v0.3.30: duplication, control bar fixes 2025-08-18 08:57:26 -07:00
Waleed Latif
5a8a703ecb fix(duplicate): fixed detached state on duplication (#1011) 2025-08-18 08:51:18 -07:00
Waleed Latif
6f64188b8d fix(control-bar): fix icons styling in disabled state (#1010) 2025-08-18 08:22:06 -07:00
Vikhyath Mondreti
60a9a25553 Merge pull request #1009 from simstudioai/staging
update migration file for notekeeping purpose
2025-08-18 01:59:02 -07:00
Vikhyath Mondreti
52fa388f81 update migration file for notekeeping purpose 2025-08-18 01:56:34 -07:00
Vikhyath Mondreti
5c56cbd558 Merge pull request #1008 from simstudioai/staging
reduce batch size to prevent timeouts
2025-08-18 01:11:49 -07:00
Vikhyath Mondreti
dc19525a6f reduce batch size to prevent timeouts 2025-08-18 01:10:47 -07:00
Vikhyath Mondreti
3873f44875 Merge pull request #1007 from simstudioai/staging
syntax issue in migration
2025-08-18 00:59:53 -07:00
Vikhyath Mondreti
09b95f41ea syntax issue in migration 2025-08-18 00:58:09 -07:00
Vikhyath Mondreti
af60ccd188 fix: migration mem issues bypass
fix: migration mem issues bypass
2025-08-18 00:50:20 -07:00
Vikhyath Mondreti
eb75afd115 make logs migration batched to prevent mem issues (#1005) 2025-08-18 00:42:38 -07:00
Waleed Latif
fdb8256468 fix(subflow): remove all edges when removing a block from a subflow (#1003) 2025-08-18 00:21:26 -07:00
Vikhyath Mondreti
570c07bf2a Merge pull request #1004 from simstudioai/staging
v0.3.29: copilot fixes, remove block from subflow, code cleanups
2025-08-18 00:18:44 -07:00
Adam Gough
5c16e7d390 fix(subflow): add ability to remove block from subflow and refactor to consolidate subflow code (#983)
* added logic to remove blocks from subflows

* refactored logic into just subflow-node

* bun run lint

* added subflow test

* added a safety check for data.parentId

* added state update logic

* bun run lint

* removed old logic

* removed any

* added tests

* added type safety

* removed test script

* type safety

---------

Co-authored-by: Adam Gough <adamgough@Mac.attlocal.net>
Co-authored-by: waleedlatif1 <walif6@gmail.com>
2025-08-17 22:25:31 -07:00
Waleed Latif
bd38062705 fix(workflow-error): allow users to delete workflows with invalid configs/state (#1000)
* fix(workflow-error): allow users to delete workflows with invalid configs/state

* cleanup
2025-08-17 22:23:41 -07:00
Siddharth Ganesan
d7fd4a9618 feat(copilot): diff improvements (#1002)
* Fix abort

* Cred updates

* Updates

* Fix sheet id showing up in diff view

* Update diff view

* Text overflow

* Optimistic accept

* Serialization catching

* Depth 0 fix

* Fix icons

* Updates

* Lint
2025-08-16 15:09:48 -07:00
Vikhyath Mondreti
d972bab206 fix(logs-sidebar): remove message and fix race condition for quickly switching b/w logs (#1001) 2025-08-16 15:05:39 -07:00
Vikhyath Mondreti
f254d70624 improvement(logs): cleanup code (#999) 2025-08-16 13:44:00 -07:00
Waleed Latif
8748e1d5f9 improvement(db): remove deprecated 'state' column from workflow table (#994)
* improvement(db): remove deprecated  column from workflow table

* removed extraneous logs

* update sockets envvar
2025-08-16 13:04:49 -07:00
Siddharth Ganesan
133a32e6d3 Fix abort (#998) 2025-08-16 11:10:09 -07:00
Waleed Latif
97b6bcc43d v0.3.28: autolayout, export, copilot, kb ui improvements 2025-08-16 09:12:17 -07:00
Waleed Latif
42917ce641 fix(agent): stringify input into user prompt for agent (#984) 2025-08-15 19:36:49 -07:00
Waleed Latif
5f6d219223 fix(kb-ui): fixed upload files modal ui, processing ui to match the rest of the kb (#991)
* fix(kb-ui): fixed upload files modal, processing ui to match the rest of the kb

* more ui fixes

* ack PR comments

* fix help modal
2025-08-15 19:35:50 -07:00
Siddharth Ganesan
bab74307f4 fix(ishosted): make ishosted true on staging (#993)
* Add staging to ishosted

* www
2025-08-15 18:36:32 -07:00
Siddharth Ganesan
16aaa37dad improvement(agent): enable autolayout, export, copilot (#992)
* Enable autolayout, export, and copilot in dev

* Updates
2025-08-15 18:29:34 -07:00
Siddharth Ganesan
c6166a9483 feat(copilot): generate agent api key (#989)
* Add skeleton copilot to settings modal and add migration for copilot api keys

* Add hash index on encrypted key

* Security 1

* Remove sim agent api key

* Fix api key stuff

* Auth

* Status code handling

* Update env key

* Copilot api key ui

* Update copilot costs

* Add copilot stats

* Lint

* Remove logs

* Remove migrations

* Remove another migration

* Updates

* Hide if hosted

* Fix test

* Lint

* Lint

* Fixes

* Lint

---------

Co-authored-by: Waleed Latif <walif6@gmail.com>
Co-authored-by: Vikhyath Mondreti <vikhyathvikku@gmail.com>
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
Co-authored-by: Vikhyath Mondreti <vikhyath@simstudio.ai>
Co-authored-by: Adam Gough <77861281+aadamgough@users.noreply.github.com>
Co-authored-by: Adam Gough <adamgough@Mac.attlocal.net>
2025-08-15 18:05:54 -07:00
Waleed Latif
0258a1b4ce fix(loading): fix workflow detached on first load (#987) 2025-08-15 17:26:47 -07:00
Vikhyath Mondreti
4d4aefa346 fix(envvar): clear separation between server-side and client-side billing envvar (#988) 2025-08-15 16:41:02 -07:00
Vikhyath Mondreti
a0cf003abf Merge pull request #986 from simstudioai/staging
attempt to fix build issues (#985)
2025-08-15 15:22:26 -07:00
Vikhyath Mondreti
2e027dd77d attempt to fix build issues (#985) 2025-08-15 15:21:34 -07:00
386 changed files with 39164 additions and 9566 deletions

View File

@@ -85,8 +85,8 @@ jobs:
push: ${{ github.event_name != 'pull_request' }}
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
cache-from: type=gha,scope=build-v2
cache-to: type=gha,mode=max,scope=build-v2
cache-from: type=gha,scope=build-v3
cache-to: type=gha,mode=max,scope=build-v3
provenance: false
sbom: false

44
.github/workflows/trigger-deploy.yml vendored Normal file
View File

@@ -0,0 +1,44 @@
name: Trigger.dev Deploy
on:
push:
branches:
- main
- staging
jobs:
deploy:
name: Trigger.dev Deploy
runs-on: ubuntu-latest
concurrency:
group: trigger-deploy-${{ github.ref }}
cancel-in-progress: false
env:
TRIGGER_ACCESS_TOKEN: ${{ secrets.TRIGGER_ACCESS_TOKEN }}
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: 'lts/*'
- name: Setup Bun
uses: oven-sh/setup-bun@v2
with:
bun-version: latest
- name: Install dependencies
run: bun install
- name: Deploy to Staging
if: github.ref == 'refs/heads/staging'
working-directory: ./apps/sim
run: npx --yes trigger.dev@4.0.0 deploy -e staging
- name: Deploy to Production
if: github.ref == 'refs/heads/main'
working-directory: ./apps/sim
run: npx --yes trigger.dev@4.0.0 deploy

View File

@@ -1,50 +1,46 @@
<p align="center">
<img src="apps/sim/public/static/sim.png" alt="Sim Logo" width="500"/>
<a href="https://sim.ai" target="_blank" rel="noopener noreferrer">
<img src="apps/sim/public/logo/reverse/text/large.png" alt="Sim Logo" width="500"/>
</a>
</p>
<p align="center">
<a href="https://www.apache.org/licenses/LICENSE-2.0"><img src="https://img.shields.io/badge/License-Apache%202.0-blue.svg" alt="License: Apache-2.0"></a>
<a href="https://discord.gg/Hr4UWYEcTT"><img src="https://img.shields.io/badge/Discord-Join%20Server-7289DA?logo=discord&logoColor=white" alt="Discord"></a>
<a href="https://x.com/simdotai"><img src="https://img.shields.io/twitter/follow/simstudioai?style=social" alt="Twitter"></a>
<a href="https://github.com/simstudioai/sim/pulls"><img src="https://img.shields.io/badge/PRs-welcome-brightgreen.svg" alt="PRs welcome"></a>
<a href="https://docs.sim.ai"><img src="https://img.shields.io/badge/Docs-visit%20documentation-blue.svg" alt="Documentation"></a>
</p>
<p align="center">Build and deploy AI agent workflows in minutes.</p>
<p align="center">
<strong>Sim</strong> is a lightweight, user-friendly platform for building AI agent workflows.
<a href="https://sim.ai" target="_blank" rel="noopener noreferrer"><img src="https://img.shields.io/badge/sim.ai-6F3DFA" alt="Sim.ai"></a>
<a href="https://discord.gg/Hr4UWYEcTT" target="_blank" rel="noopener noreferrer"><img src="https://img.shields.io/badge/Discord-Join%20Server-5865F2?logo=discord&logoColor=white" alt="Discord"></a>
<a href="https://x.com/simdotai" target="_blank" rel="noopener noreferrer"><img src="https://img.shields.io/twitter/follow/simstudioai?style=social" alt="Twitter"></a>
<a href="https://docs.sim.ai" target="_blank" rel="noopener noreferrer"><img src="https://img.shields.io/badge/Docs-6F3DFA.svg" alt="Documentation"></a>
</p>
<p align="center">
<img src="apps/sim/public/static/demo.gif" alt="Sim Demo" width="800"/>
</p>
## Getting Started
## Quickstart
1. Use our [cloud-hosted version](https://sim.ai)
2. Self-host using one of the methods below
### Cloud-hosted: [sim.ai](https://sim.ai)
## Self-Hosting Options
<a href="https://sim.ai" target="_blank" rel="noopener noreferrer"><img src="https://img.shields.io/badge/sim.ai-6F3DFA?logo=data:image/svg%2bxml;base64,PHN2ZyB3aWR0aD0iNjE2IiBoZWlnaHQ9IjYxNiIgdmlld0JveD0iMCAwIDYxNiA2MTYiIGZpbGw9Im5vbmUiIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyI+CjxnIGNsaXAtcGF0aD0idXJsKCNjbGlwMF8xMTU5XzMxMykiPgo8cGF0aCBkPSJNNjE2IDBIMFY2MTZINjE2VjBaIiBmaWxsPSIjNkYzREZBIi8+CjxwYXRoIGQ9Ik04MyAzNjUuNTY3SDExM0MxMTMgMzczLjgwNSAxMTYgMzgwLjM3MyAxMjIgMzg1LjI3MkMxMjggMzg5Ljk0OCAxMzYuMTExIDM5Mi4yODUgMTQ2LjMzMyAzOTIuMjg1QzE1Ny40NDQgMzkyLjI4NSAxNjYgMzkwLjE3MSAxNzIgMzg1LjkzOUMxNzcuOTk5IDM4MS40ODcgMTgxIDM3NS41ODYgMTgxIDM2OC4yMzlDMTgxIDM2Mi44OTUgMTc5LjMzMyAzNTguNDQyIDE3NiAzNTQuODhDMTcyLjg4OSAzNTEuMzE4IDE2Ny4xMTEgMzQ4LjQyMiAxNTguNjY3IDM0Ni4xOTZMMTMwIDMzOS41MTdDMTE1LjU1NSAzMzUuOTU1IDEwNC43NzggMzMwLjQ5OSA5Ny42NjY1IDMyMy4xNTFDOTAuNzc3NSAzMTUuODA0IDg3LjMzMzQgMzA2LjExOSA4Ny4zMzM0IDI5NC4wOTZDODcuMzMzNCAyODQuMDc2IDg5Ljg4OSAyNzUuMzkyIDk0Ljk5OTYgMjY4LjA0NUMxMDAuMzMzIDI2MC42OTcgMTA3LjU1NSAyNTUuMDIgMTE2LjY2NiAyNTEuMDEyQzEyNiAyNDcuMDA0IDEzNi42NjcgMjQ1IDE0OC42NjYgMjQ1QzE2MC42NjcgMjQ1IDE3MSAyNDcuMTE2IDE3OS42NjcgMjUxLjM0NkMxODguNTU1IDI1NS41NzYgMTk1LjQ0NCAyNjEuNDc3IDIwMC4zMzMgMjY5LjA0N0MyMDUuNDQ0IDI3Ni42MTcgMjA4LjExMSAyODUuNjM0IDIwOC4zMzMgMjk2LjA5OUgxNzguMzMzQzE3OC4xMTEgMjg3LjYzOCAxNzUuMzMzIDI4MS4wNyAxNjkuOTk5IDI3Ni4zOTRDMTY0LjY2NiAyNzEuNzE5IDE1Ny4yMjIgMjY5LjM4MSAxNDcuNjY3IDI2OS4zODFDMTM3Ljg4OSAyNjkuMzgxIDEzMC4zMzMgMjcxLjQ5NiAxMjUgMjc1LjcyNkMxMTkuNjY2IDI3OS45NTcgMTE3IDI4NS43NDYgMTE3IDI5My4wOTNDMTE3IDMwNC4wMDMgMTI1IDMxMS40NjIgMTQxIDMxNS40N0wxNjkuNjY3IDMyMi40ODNDMTgzLjQ0NSAzMjUuNiAxOTMuNzc4IDMzMC43MjIgMjAwLjY2NyAzMzcuODQ3QzIwNy41NTUgMzQ0Ljc0OSAyMTEgMzU0LjIxMiAyMTEgMzY2LjIzNUMyMTEgMzc2LjQ3NyAyMDguMjIyIDM4NS40OTQgMjAyLjY2NiAzOTMuMjg3QzE5Ny4xMTEgNDAwLjg1NyAxODkuNDQ0IDQwNi43NTggMTc5LjY2NyA0MTAuOTg5QzE3MC4xMTEgNDE0Ljk5NiAxNTguNzc4IDQxNyAxNDUuNjY3IDQxN0MxMjYuNTU1IDQxNyAxMTEuMzMzIDQxMi4zMjUgOTkuOTk5NyA0MDIuOTczQzg4LjY2NjggMzkzLjYyMSA4MyAzODEuMTUzIDgzIDM2NS41NjdaIiBmaWxsPSJ3aGl0ZSIvPgo8cGF0aCBkPSJNMjMyLjI5MSA0MTNWMjUwLjA4MkMyNDQuNjg0IDI1NC42MTQgMjUwLjE0OCAyNTQuNjE0IDI2My4zNzEgMjUwLjA4MlY0MTNIMjMyLjI5MVpNMjQ3LjUgMjM5LjMxM0MyNDEuOTkgMjM5LjMxMyAyMzcuMTQgMjM3LjMxMyAyMzIuOTUyIDIzMy4zMTZDMjI4Ljk4NCAyMjkuMDk1IDIyNyAyMjQuMjA5IDIyNyAyMTguNjU2QzIyNyAyMTIuODgyIDIyOC45ODQgMjA3Ljk5NSAyMzIuOTUyIDIwMy45OTdDMjM3LjE0IDE5OS45OTkgMjQxLjk5IDE5OCAyNDcuNSAxOThDMjUzLjIzMSAxOTggMjU4LjA4IDE5OS45OTkgMjYyLjA0OSAyMDMuOTk3QzI2Ni4wMTYgMjA3Ljk5NSAyNjggMjEyLjg4MiAyNjggMjE4LjY1NkMyNjggMjI0LjIwOSAyNjYuMDE2IDIyOS4wOTUgMjYyLjA0OSAyMzMuMzE2QzI1OC4wOCAyMzcuMzEzIDI1My4yMzEgMjM5LjMxMyAyNDcuNSAyMzkuMzEzWiIgZmlsbD0id2hpdGUiLz4KPHBhdGggZD0iTTMxOS4zMzMgNDEzSDI4OFYyNDkuNjc2SDMxNlYyNzcuMjMzQzMxOS4zMzMgMjY4LjEwNCAzMjUuNzc4IDI2MC4zNjQgMzM0LjY2NyAyNTQuMzUyQzM0My43NzggMjQ4LjExNyAzNTQuNzc4IDI0NSAzNjcuNjY3IDI0NUMzODIuMTExIDI0NSAzOTQuMTEyIDI0OC44OTcgNDAzLjY2NyAyNTYuNjlDNDEzLjIyMiAyNjQuNDg0IDQxOS40NDQgMjc0LjgzNyA0MjIuMzM0IDI4Ny43NTJINDE2LjY2N0M0MTguODg5IDI3NC44MzcgNDI1IDI2NC40ODQgNDM1IDI1Ni42OUM0NDUgMjQ4Ljg5NyA0NTcuMzM0IDI0NSA0NzIgMjQ1QzQ5MC42NjYgMjQ1IDUwNS4zMzQgMjUwLjQ1NSA1MTYgMjYxLjM2NkM1MjYuNjY3IDI3Mi4yNzYgNTMyIDI4Ny4xOTUgNTMyIDMwNi4xMjFWNDEzSDUwMS4zMzNWMzEzLjgwNEM1MDEuMzMzIDMwMC44ODkgNDk4IDI5MC45ODEgNDkxLjMzMyAyODQuMDc4QzQ4NC44ODkgMjc2Ljk1MiA0NzYuMTExIDI3My4zOSA0NjUgMjczLjM5QzQ1Ny4yMjIgMjczLjM5IDQ1MC4zMzMgMjc1LjE3MSA0NDQuMzM0IDI3OC43MzRDNDM4LjU1NiAyODIuMDc0IDQzNCAyODYuOTcyIDQzMC42NjcgMjkzLjQzQzQyNy4zMzMgMjk5Ljg4NyA0MjUuNjY3IDMwNy40NTcgNDI1LjY2NyAzMTYuMTQxVjQxM0gzOTQuNjY3VjMxMy40NjlDMzk0LjY2NyAzMDAuNTU1IDM5MS40NDUgMjkwLjc1OCAzODUgMjg0LjA3OEMzNzguNTU2IDI3Ny4xNzUgMzY5Ljc3OCAyNzMuNzI0IDM1OC42NjcgMjczLjcyNEMzNTAuODg5IDI3My43MjQgMzQ0IDI3NS41MDUgMzM4IDI3OS4wNjhDMzMyLjIyMiAyODIuNDA4IDMyNy42NjcgMjg3LjMwNyAzMjQuMzMzIDI5My43NjNDMzIxIDI5OS45OTggMzE5LjMzMyAzMDcuNDU3IDMxOS4zMzMgMzE2LjE0MVY0MTNaIiBmaWxsPSJ3aGl0ZSIvPgo8L2c+CjxkZWZzPgo8Y2xpcFBhdGggaWQ9ImNsaXAwXzExNTlfMzEzIj4KPHJlY3Qgd2lkdGg9IjYxNiIgaGVpZ2h0PSI2MTYiIGZpbGw9IndoaXRlIi8+CjwvY2xpcFBhdGg+CjwvZGVmcz4KPC9zdmc+Cg==&logoColor=white" alt="Sim.ai"></a>
### Option 1: NPM Package (Simplest)
The easiest way to run Sim locally is using our [NPM package](https://www.npmjs.com/package/simstudio?activeTab=readme):
### Self-hosted: NPM Package
```bash
npx simstudio
```
→ http://localhost:3000
After running these commands, open [http://localhost:3000/](http://localhost:3000/) in your browser.
#### Note
Docker must be installed and running on your machine.
#### Options
- `-p, --port <port>`: Specify the port to run Sim on (default: 3000)
- `--no-pull`: Skip pulling the latest Docker images
| Flag | Description |
|------|-------------|
| `-p, --port <port>` | Port to run Sim on (default `3000`) |
| `--no-pull` | Skip pulling latest Docker images |
#### Requirements
- Docker must be installed and running on your machine
### Option 2: Docker Compose
### Self-hosted: Docker Compose
```bash
# Clone the repository
@@ -76,14 +72,14 @@ Wait for the model to download, then visit [http://localhost:3000](http://localh
docker compose -f docker-compose.ollama.yml exec ollama ollama pull llama3.1:8b
```
### Option 3: Dev Containers
### Self-hosted: Dev Containers
1. Open VS Code with the [Remote - Containers extension](https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.remote-containers)
2. Open the project and click "Reopen in Container" when prompted
3. Run `bun run dev:full` in the terminal or use the `sim-start` alias
- This starts both the main application and the realtime socket server
### Option 4: Manual Setup
### Self-hosted: Manual Setup
**Requirements:**
- [Bun](https://bun.sh/) runtime
@@ -158,6 +154,14 @@ cd apps/sim
bun run dev:sockets
```
## Copilot API Keys
Copilot is a Sim-managed service. To use Copilot on a self-hosted instance:
- Go to https://sim.ai → Settings → Copilot and generate a Copilot API key
- Set `COPILOT_API_KEY` in your self-hosted environment to that value
- Host Sim on a publicly available DNS and set NEXT_PUBLIC_APP_URL and BETTER_AUTH_URL to that value ([ngrok](https://ngrok.com/))
## Tech Stack
- **Framework**: [Next.js](https://nextjs.org/) (App Router)
@@ -180,4 +184,4 @@ We welcome contributions! Please see our [Contributing Guide](.github/CONTRIBUTI
This project is licensed under the Apache License 2.0 - see the [LICENSE](LICENSE) file for details.
<p align="center">Made with ❤️ by the Sim Team</p>
<p align="center">Made with ❤️ by the Sim Team</p>

View File

@@ -0,0 +1,97 @@
---
title: Copilot
description: Build and edit workflows with Sim Copilot
---
import { Callout } from 'fumadocs-ui/components/callout'
import { Card, Cards } from 'fumadocs-ui/components/card'
import { MessageCircle, Package, Zap, Infinity as InfinityIcon, Brain, BrainCircuit } from 'lucide-react'
## What is Copilot
Copilot is your in-editor assistant that helps you build, understand, and improve workflows. It can:
- **Explain**: Answer questions about Sim and your current workflow
- **Guide**: Suggest edits and best practices
- **Edit**: Make changes to blocks, connections, and settings when you approve
<Callout type="info">
Copilot is a Sim-managed service. For self-hosted deployments, generate a Copilot API key in the hosted app (sim.ai → Settings → Copilot)
1. Go to [sim.ai](https://sim.ai) → Settings → Copilot and generate a Copilot API key
2. Set `COPILOT_API_KEY` in your self-hosted environment to that value
3. Host Sim on a publicly available DNS and set `NEXT_PUBLIC_APP_URL` and `BETTER_AUTH_URL` to that value (e.g., using ngrok)
</Callout>
## Modes
<Cards>
<Card title="Ask">
<div className="flex items-start gap-3">
<span className="mt-0.5 inline-flex h-8 w-8 items-center justify-center rounded-md border border-border/50 bg-muted/60">
<MessageCircle className="h-4 w-4 text-muted-foreground" />
</span>
<div>
<p className="m-0 text-sm">
Q&A mode for explanations, guidance, and suggestions without making changes to your workflow.
</p>
</div>
</div>
</Card>
<Card title="Agent">
<div className="flex items-start gap-3">
<span className="mt-0.5 inline-flex h-8 w-8 items-center justify-center rounded-md border border-border/50 bg-muted/60">
<Package className="h-4 w-4 text-muted-foreground" />
</span>
<div>
<p className="m-0 text-sm">
Build-and-edit mode. Copilot proposes specific edits (add blocks, wire variables, tweak settings) and applies them when you approve.
</p>
</div>
</div>
</Card>
</Cards>
## Depth Levels
<Cards>
<Card title="Fast">
<div className="flex items-start gap-3">
<span className="mt-0.5 inline-flex h-8 w-8 items-center justify-center rounded-md border border-border/50 bg-muted/60">
<Zap className="h-4 w-4 text-muted-foreground" />
</span>
<div>
<p className="m-0 text-sm">Quickest and cheapest. Best for small edits, simple workflows, and minor tweaks.</p>
</div>
</div>
</Card>
<Card title="Auto">
<div className="flex items-start gap-3">
<span className="mt-0.5 inline-flex h-8 w-8 items-center justify-center rounded-md border border-border/50 bg-muted/60">
<InfinityIcon className="h-4 w-4 text-muted-foreground" />
</span>
<div>
<p className="m-0 text-sm">Balanced speed and reasoning. Recommended default for most tasks.</p>
</div>
</div>
</Card>
<Card title="Pro">
<div className="flex items-start gap-3">
<span className="mt-0.5 inline-flex h-8 w-8 items-center justify-center rounded-md border border-border/50 bg-muted/60">
<Brain className="h-4 w-4 text-muted-foreground" />
</span>
<div>
<p className="m-0 text-sm">More reasoning for larger workflows and complex edits while staying performant.</p>
</div>
</div>
</Card>
<Card title="Max">
<div className="flex items-start gap-3">
<span className="mt-0.5 inline-flex h-8 w-8 items-center justify-center rounded-md border border-border/50 bg-muted/60">
<BrainCircuit className="h-4 w-4 text-muted-foreground" />
</span>
<div>
<p className="m-0 text-sm">Maximum reasoning for deep planning, debugging, and complex architectural changes.</p>
</div>
</div>
</Card>
</Cards>

View File

@@ -0,0 +1,4 @@
{
"title": "Copilot",
"pages": ["index"]
}

View File

@@ -12,6 +12,8 @@
"connections",
"---Execution---",
"execution",
"---Copilot---",
"copilot",
"---Advanced---",
"./variables/index",
"yaml",

View File

@@ -115,8 +115,7 @@ Read data from a Microsoft Excel spreadsheet
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Operation success status |
| `output` | object | Excel spreadsheet data and metadata |
| `data` | object | Range data from the spreadsheet |
### `microsoft_excel_write`
@@ -136,8 +135,11 @@ Write data to a Microsoft Excel spreadsheet
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Operation success status |
| `output` | object | Write operation results and metadata |
| `updatedRange` | string | The range that was updated |
| `updatedRows` | number | Number of rows that were updated |
| `updatedColumns` | number | Number of columns that were updated |
| `updatedCells` | number | Number of cells that were updated |
| `metadata` | object | Spreadsheet metadata |
### `microsoft_excel_table_add`
@@ -155,8 +157,9 @@ Add new rows to a Microsoft Excel table
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Operation success status |
| `output` | object | Table add operation results and metadata |
| `index` | number | Index of the first row that was added |
| `values` | array | Array of rows that were added to the table |
| `metadata` | object | Spreadsheet metadata |

View File

@@ -142,7 +142,7 @@ Get a single row from a Supabase table based on filter criteria
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `message` | string | Operation status message |
| `results` | object | The row data if found, null if not found |
| `results` | array | Array containing the row data if found, empty array if not found |
### `supabase_update`
@@ -185,6 +185,26 @@ Delete rows from a Supabase table based on filter criteria
| `message` | string | Operation status message |
| `results` | array | Array of deleted records |
### `supabase_upsert`
Insert or update data in a Supabase table (upsert operation)
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `projectId` | string | Yes | Your Supabase project ID \(e.g., jdrkgepadsdopsntdlom\) |
| `table` | string | Yes | The name of the Supabase table to upsert data into |
| `data` | any | Yes | The data to upsert \(insert or update\) |
| `apiKey` | string | Yes | Your Supabase service role secret key |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `message` | string | Operation status message |
| `results` | array | Array of upserted records |
## Notes

View File

@@ -84,14 +84,12 @@ export async function GET(request: NextRequest) {
return NextResponse.json({ error: 'Credential not found' }, { status: 404 })
}
// Check if the access token is valid
if (!credential.accessToken) {
logger.warn(`[${requestId}] No access token available for credential`)
return NextResponse.json({ error: 'No access token available' }, { status: 400 })
}
try {
// Refresh the token if needed
const { accessToken } = await refreshTokenIfNeeded(requestId, credential, credentialId)
return NextResponse.json({ accessToken }, { status: 200 })
} catch (_error) {

View File

@@ -1,4 +1,4 @@
import { and, eq } from 'drizzle-orm'
import { and, desc, eq } from 'drizzle-orm'
import { getSession } from '@/lib/auth'
import { createLogger } from '@/lib/logs/console/logger'
import { refreshOAuthToken } from '@/lib/oauth/oauth'
@@ -70,7 +70,8 @@ export async function getOAuthToken(userId: string, providerId: string): Promise
})
.from(account)
.where(and(eq(account.userId, userId), eq(account.providerId, providerId)))
.orderBy(account.createdAt)
// Always use the most recently updated credential for this provider
.orderBy(desc(account.updatedAt))
.limit(1)
if (connections.length === 0) {
@@ -80,19 +81,13 @@ export async function getOAuthToken(userId: string, providerId: string): Promise
const credential = connections[0]
// Check if we have a valid access token
if (!credential.accessToken) {
logger.warn(`Access token is null for user ${userId}, provider ${providerId}`)
return null
}
// Check if the token is expired and needs refreshing
// Determine whether we should refresh: missing token OR expired token
const now = new Date()
const tokenExpiry = credential.accessTokenExpiresAt
// Only refresh if we have an expiration time AND it's expired AND we have a refresh token
const needsRefresh = tokenExpiry && tokenExpiry < now && !!credential.refreshToken
const shouldAttemptRefresh =
!!credential.refreshToken && (!credential.accessToken || (tokenExpiry && tokenExpiry < now))
if (needsRefresh) {
if (shouldAttemptRefresh) {
logger.info(
`Access token expired for user ${userId}, provider ${providerId}. Attempting to refresh.`
)
@@ -141,6 +136,13 @@ export async function getOAuthToken(userId: string, providerId: string): Promise
}
}
if (!credential.accessToken) {
logger.warn(
`Access token is null and no refresh attempted or available for user ${userId}, provider ${providerId}`
)
return null
}
logger.info(`Found valid OAuth token for user ${userId}, provider ${providerId}`)
return credential.accessToken
}
@@ -164,19 +166,21 @@ export async function refreshAccessTokenIfNeeded(
return null
}
// Check if we need to refresh the token
// Decide if we should refresh: token missing OR expired
const expiresAt = credential.accessTokenExpiresAt
const now = new Date()
// Only refresh if we have an expiration time AND it's expired
// If no expiration time is set (newly created credentials), assume token is valid
const needsRefresh = expiresAt && expiresAt <= now
const shouldRefresh =
!!credential.refreshToken && (!credential.accessToken || (expiresAt && expiresAt <= now))
const accessToken = credential.accessToken
if (needsRefresh && credential.refreshToken) {
if (shouldRefresh) {
logger.info(`[${requestId}] Token expired, attempting to refresh for credential`)
try {
const refreshedToken = await refreshOAuthToken(credential.providerId, credential.refreshToken)
const refreshedToken = await refreshOAuthToken(
credential.providerId,
credential.refreshToken!
)
if (!refreshedToken) {
logger.error(`[${requestId}] Failed to refresh token for credential: ${credentialId}`, {
@@ -217,6 +221,7 @@ export async function refreshAccessTokenIfNeeded(
return null
}
} else if (!accessToken) {
// We have no access token and either no refresh token or not eligible to refresh
logger.error(`[${requestId}] Missing access token for credential`)
return null
}
@@ -233,21 +238,20 @@ export async function refreshTokenIfNeeded(
credential: any,
credentialId: string
): Promise<{ accessToken: string; refreshed: boolean }> {
// Check if we need to refresh the token
// Decide if we should refresh: token missing OR expired
const expiresAt = credential.accessTokenExpiresAt
const now = new Date()
// Only refresh if we have an expiration time AND it's expired
// If no expiration time is set (newly created credentials), assume token is valid
const needsRefresh = expiresAt && expiresAt <= now
const shouldRefresh =
!!credential.refreshToken && (!credential.accessToken || (expiresAt && expiresAt <= now))
// If token is still valid, return it directly
if (!needsRefresh || !credential.refreshToken) {
// If token appears valid and present, return it directly
if (!shouldRefresh) {
logger.info(`[${requestId}] Access token is valid`)
return { accessToken: credential.accessToken, refreshed: false }
}
try {
const refreshResult = await refreshOAuthToken(credential.providerId, credential.refreshToken)
const refreshResult = await refreshOAuthToken(credential.providerId, credential.refreshToken!)
if (!refreshResult) {
logger.error(`[${requestId}] Failed to refresh token for credential`)

View File

@@ -4,8 +4,9 @@ import { auth } from '@/lib/auth'
export async function POST() {
try {
const hdrs = await headers()
const response = await auth.api.generateOneTimeToken({
headers: await headers(),
headers: hdrs,
})
if (!response) {
@@ -14,7 +15,6 @@ export async function POST() {
return NextResponse.json({ token: response.token })
} catch (error) {
console.error('Error generating one-time token:', error)
return NextResponse.json({ error: 'Failed to generate token' }, { status: 500 })
}
}

View File

@@ -3,8 +3,7 @@ import { eq, sql } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkInternalApiKey } from '@/lib/copilot/utils'
import { env } from '@/lib/env'
import { isBillingEnabled, isProd } from '@/lib/environment'
import { isBillingEnabled } from '@/lib/environment'
import { createLogger } from '@/lib/logs/console/logger'
import { db } from '@/db'
import { userStats } from '@/db/schema'
@@ -17,6 +16,7 @@ const UpdateCostSchema = z.object({
input: z.number().min(0, 'Input tokens must be a non-negative number'),
output: z.number().min(0, 'Output tokens must be a non-negative number'),
model: z.string().min(1, 'Model is required'),
multiplier: z.number().min(0),
})
/**
@@ -75,27 +75,27 @@ export async function POST(req: NextRequest) {
)
}
const { userId, input, output, model } = validation.data
const { userId, input, output, model, multiplier } = validation.data
logger.info(`[${requestId}] Processing cost update`, {
userId,
input,
output,
model,
multiplier,
})
const finalPromptTokens = input
const finalCompletionTokens = output
const totalTokens = input + output
// Calculate cost using COPILOT_COST_MULTIPLIER (only in production, like normal executions)
const copilotMultiplier = isProd ? env.COPILOT_COST_MULTIPLIER || 1 : 1
// Calculate cost using provided multiplier (required)
const costResult = calculateCost(
model,
finalPromptTokens,
finalCompletionTokens,
false,
copilotMultiplier
multiplier
)
logger.info(`[${requestId}] Cost calculation result`, {
@@ -104,7 +104,7 @@ export async function POST(req: NextRequest) {
promptTokens: finalPromptTokens,
completionTokens: finalCompletionTokens,
totalTokens: totalTokens,
copilotMultiplier,
multiplier,
costResult,
})
@@ -127,6 +127,10 @@ export async function POST(req: NextRequest) {
totalTokensUsed: totalTokens,
totalCost: costToStore.toString(),
currentPeriodCost: costToStore.toString(),
// Copilot usage tracking
totalCopilotCost: costToStore.toString(),
totalCopilotTokens: totalTokens,
totalCopilotCalls: 1,
lastActive: new Date(),
})
@@ -141,6 +145,10 @@ export async function POST(req: NextRequest) {
totalTokensUsed: sql`total_tokens_used + ${totalTokens}`,
totalCost: sql`total_cost + ${costToStore}`,
currentPeriodCost: sql`current_period_cost + ${costToStore}`,
// Copilot usage tracking increments
totalCopilotCost: sql`total_copilot_cost + ${costToStore}`,
totalCopilotTokens: sql`total_copilot_tokens + ${totalTokens}`,
totalCopilotCalls: sql`total_copilot_calls + 1`,
totalApiCalls: sql`total_api_calls`,
lastActive: new Date(),
}

View File

@@ -0,0 +1,70 @@
import { createCipheriv, createHash, createHmac, randomBytes } from 'crypto'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { env } from '@/lib/env'
import { createLogger } from '@/lib/logs/console/logger'
import { generateApiKey } from '@/lib/utils'
import { db } from '@/db'
import { copilotApiKeys } from '@/db/schema'
const logger = createLogger('CopilotApiKeysGenerate')
function deriveKey(keyString: string): Buffer {
return createHash('sha256').update(keyString, 'utf8').digest()
}
function encryptRandomIv(plaintext: string, keyString: string): string {
const key = deriveKey(keyString)
const iv = randomBytes(16)
const cipher = createCipheriv('aes-256-gcm', key, iv)
let encrypted = cipher.update(plaintext, 'utf8', 'hex')
encrypted += cipher.final('hex')
const authTag = cipher.getAuthTag().toString('hex')
return `${iv.toString('hex')}:${encrypted}:${authTag}`
}
function computeLookup(plaintext: string, keyString: string): string {
// Deterministic, constant-time comparable MAC: HMAC-SHA256(DB_KEY, plaintext)
return createHmac('sha256', Buffer.from(keyString, 'utf8'))
.update(plaintext, 'utf8')
.digest('hex')
}
export async function POST(req: NextRequest) {
try {
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
if (!env.AGENT_API_DB_ENCRYPTION_KEY) {
logger.error('AGENT_API_DB_ENCRYPTION_KEY is not set')
return NextResponse.json({ error: 'Server not configured' }, { status: 500 })
}
const userId = session.user.id
// Generate and prefix the key (strip the generic sim_ prefix from the random part)
const rawKey = generateApiKey().replace(/^sim_/, '')
const plaintextKey = `sk-sim-copilot-${rawKey}`
// Encrypt with random IV for confidentiality
const dbEncrypted = encryptRandomIv(plaintextKey, env.AGENT_API_DB_ENCRYPTION_KEY)
// Compute deterministic lookup value for O(1) search
const lookup = computeLookup(plaintextKey, env.AGENT_API_DB_ENCRYPTION_KEY)
const [inserted] = await db
.insert(copilotApiKeys)
.values({ userId, apiKeyEncrypted: dbEncrypted, apiKeyLookup: lookup })
.returning({ id: copilotApiKeys.id })
return NextResponse.json(
{ success: true, key: { id: inserted.id, apiKey: plaintextKey } },
{ status: 201 }
)
} catch (error) {
logger.error('Failed to generate copilot API key', { error })
return NextResponse.json({ error: 'Failed to generate copilot API key' }, { status: 500 })
}
}

View File

@@ -0,0 +1,85 @@
import { createDecipheriv, createHash } from 'crypto'
import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { env } from '@/lib/env'
import { createLogger } from '@/lib/logs/console/logger'
import { db } from '@/db'
import { copilotApiKeys } from '@/db/schema'
const logger = createLogger('CopilotApiKeys')
function deriveKey(keyString: string): Buffer {
return createHash('sha256').update(keyString, 'utf8').digest()
}
function decryptWithKey(encryptedValue: string, keyString: string): string {
const parts = encryptedValue.split(':')
if (parts.length !== 3) {
throw new Error('Invalid encrypted value format')
}
const [ivHex, encryptedHex, authTagHex] = parts
const key = deriveKey(keyString)
const iv = Buffer.from(ivHex, 'hex')
const decipher = createDecipheriv('aes-256-gcm', key, iv)
decipher.setAuthTag(Buffer.from(authTagHex, 'hex'))
let decrypted = decipher.update(encryptedHex, 'hex', 'utf8')
decrypted += decipher.final('utf8')
return decrypted
}
export async function GET(request: NextRequest) {
try {
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
if (!env.AGENT_API_DB_ENCRYPTION_KEY) {
logger.error('AGENT_API_DB_ENCRYPTION_KEY is not set')
return NextResponse.json({ error: 'Server not configured' }, { status: 500 })
}
const userId = session.user.id
const rows = await db
.select({ id: copilotApiKeys.id, apiKeyEncrypted: copilotApiKeys.apiKeyEncrypted })
.from(copilotApiKeys)
.where(eq(copilotApiKeys.userId, userId))
const keys = rows.map((row) => ({
id: row.id,
apiKey: decryptWithKey(row.apiKeyEncrypted, env.AGENT_API_DB_ENCRYPTION_KEY as string),
}))
return NextResponse.json({ keys }, { status: 200 })
} catch (error) {
logger.error('Failed to get copilot API keys', { error })
return NextResponse.json({ error: 'Failed to get keys' }, { status: 500 })
}
}
export async function DELETE(request: NextRequest) {
try {
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const userId = session.user.id
const url = new URL(request.url)
const id = url.searchParams.get('id')
if (!id) {
return NextResponse.json({ error: 'id is required' }, { status: 400 })
}
await db
.delete(copilotApiKeys)
.where(and(eq(copilotApiKeys.userId, userId), eq(copilotApiKeys.id, id)))
return NextResponse.json({ success: true }, { status: 200 })
} catch (error) {
logger.error('Failed to delete copilot API key', { error })
return NextResponse.json({ error: 'Failed to delete key' }, { status: 500 })
}
}

View File

@@ -0,0 +1,79 @@
import { createHmac } from 'crypto'
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { env } from '@/lib/env'
import { createLogger } from '@/lib/logs/console/logger'
import { db } from '@/db'
import { copilotApiKeys, userStats } from '@/db/schema'
const logger = createLogger('CopilotApiKeysValidate')
function computeLookup(plaintext: string, keyString: string): string {
// Deterministic MAC: HMAC-SHA256(DB_KEY, plaintext)
return createHmac('sha256', Buffer.from(keyString, 'utf8'))
.update(plaintext, 'utf8')
.digest('hex')
}
export async function POST(req: NextRequest) {
try {
if (!env.AGENT_API_DB_ENCRYPTION_KEY) {
logger.error('AGENT_API_DB_ENCRYPTION_KEY is not set')
return NextResponse.json({ error: 'Server not configured' }, { status: 500 })
}
const body = await req.json().catch(() => null)
const apiKey = typeof body?.apiKey === 'string' ? body.apiKey : undefined
if (!apiKey) {
return new NextResponse(null, { status: 401 })
}
const lookup = computeLookup(apiKey, env.AGENT_API_DB_ENCRYPTION_KEY)
// Find matching API key and its user
const rows = await db
.select({ id: copilotApiKeys.id, userId: copilotApiKeys.userId })
.from(copilotApiKeys)
.where(eq(copilotApiKeys.apiKeyLookup, lookup))
.limit(1)
if (rows.length === 0) {
return new NextResponse(null, { status: 401 })
}
const { userId } = rows[0]
// Check usage for the associated user
const usage = await db
.select({
currentPeriodCost: userStats.currentPeriodCost,
totalCost: userStats.totalCost,
currentUsageLimit: userStats.currentUsageLimit,
})
.from(userStats)
.where(eq(userStats.userId, userId))
.limit(1)
if (usage.length > 0) {
const currentUsage = Number.parseFloat(
(usage[0].currentPeriodCost?.toString() as string) ||
(usage[0].totalCost as unknown as string) ||
'0'
)
const limit = Number.parseFloat((usage[0].currentUsageLimit as unknown as string) || '0')
if (!Number.isNaN(limit) && limit > 0 && currentUsage >= limit) {
// Usage exceeded
logger.info('[API VALIDATION] Usage exceeded', { userId, currentUsage, limit })
return new NextResponse(null, { status: 402 })
}
}
// Valid and within usage limits
return new NextResponse(null, { status: 200 })
} catch (error) {
logger.error('Error validating copilot API key', { error })
return NextResponse.json({ error: 'Failed to validate key' }, { status: 500 })
}
}

View File

@@ -104,7 +104,8 @@ describe('Copilot Chat API Route', () => {
vi.doMock('@/lib/env', () => ({
env: {
SIM_AGENT_API_URL: 'http://localhost:8000',
SIM_AGENT_API_KEY: 'test-sim-agent-key',
COPILOT_API_KEY: 'test-sim-agent-key',
BETTER_AUTH_URL: 'http://localhost:3000',
},
}))
@@ -225,6 +226,7 @@ describe('Copilot Chat API Route', () => {
mode: 'agent',
provider: 'openai',
depth: 0,
origin: 'http://localhost:3000',
}),
})
)
@@ -288,6 +290,7 @@ describe('Copilot Chat API Route', () => {
mode: 'agent',
provider: 'openai',
depth: 0,
origin: 'http://localhost:3000',
}),
})
)
@@ -343,6 +346,7 @@ describe('Copilot Chat API Route', () => {
mode: 'agent',
provider: 'openai',
depth: 0,
origin: 'http://localhost:3000',
}),
})
)
@@ -438,6 +442,7 @@ describe('Copilot Chat API Route', () => {
mode: 'ask',
provider: 'openai',
depth: 0,
origin: 'http://localhost:3000',
}),
})
)

View File

@@ -1,3 +1,4 @@
import { createCipheriv, createDecipheriv, createHash, randomBytes } from 'crypto'
import { and, desc, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
@@ -13,6 +14,7 @@ import { getCopilotModel } from '@/lib/copilot/config'
import { TITLE_GENERATION_SYSTEM_PROMPT, TITLE_GENERATION_USER_PROMPT } from '@/lib/copilot/prompts'
import { env } from '@/lib/env'
import { createLogger } from '@/lib/logs/console/logger'
import { SIM_AGENT_API_URL_DEFAULT } from '@/lib/sim-agent'
import { downloadFile } from '@/lib/uploads'
import { downloadFromS3WithConfig } from '@/lib/uploads/s3/s3-client'
import { S3_COPILOT_CONFIG, USE_S3_STORAGE } from '@/lib/uploads/setup'
@@ -23,6 +25,46 @@ import { createAnthropicFileContent, isSupportedFileType } from './file-utils'
const logger = createLogger('CopilotChatAPI')
// Sim Agent API configuration
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
function getRequestOrigin(_req: NextRequest): string {
try {
// Strictly use configured Better Auth URL
return env.BETTER_AUTH_URL || ''
} catch (_) {
return ''
}
}
function deriveKey(keyString: string): Buffer {
return createHash('sha256').update(keyString, 'utf8').digest()
}
function decryptWithKey(encryptedValue: string, keyString: string): string {
const [ivHex, encryptedHex, authTagHex] = encryptedValue.split(':')
if (!ivHex || !encryptedHex || !authTagHex) {
throw new Error('Invalid encrypted format')
}
const key = deriveKey(keyString)
const iv = Buffer.from(ivHex, 'hex')
const decipher = createDecipheriv('aes-256-gcm', key, iv)
decipher.setAuthTag(Buffer.from(authTagHex, 'hex'))
let decrypted = decipher.update(encryptedHex, 'hex', 'utf8')
decrypted += decipher.final('utf8')
return decrypted
}
function encryptWithKey(plaintext: string, keyString: string): string {
const key = deriveKey(keyString)
const iv = randomBytes(16)
const cipher = createCipheriv('aes-256-gcm', key, iv)
let encrypted = cipher.update(plaintext, 'utf8', 'hex')
encrypted += cipher.final('hex')
const authTag = cipher.getAuthTag().toString('hex')
return `${iv.toString('hex')}:${encrypted}:${authTag}`
}
// Schema for file attachments
const FileAttachmentSchema = z.object({
id: z.string(),
@@ -39,7 +81,8 @@ const ChatMessageSchema = z.object({
chatId: z.string().optional(),
workflowId: z.string().min(1, 'Workflow ID is required'),
mode: z.enum(['ask', 'agent']).optional().default('agent'),
depth: z.number().int().min(0).max(3).optional().default(0),
depth: z.number().int().min(-2).max(3).optional().default(0),
prefetch: z.boolean().optional(),
createNewChat: z.boolean().optional().default(false),
stream: z.boolean().optional().default(true),
implicitFeedback: z.string().optional(),
@@ -48,10 +91,6 @@ const ChatMessageSchema = z.object({
conversationId: z.string().optional(),
})
// Sim Agent API configuration
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || 'http://localhost:8000'
const SIM_AGENT_API_KEY = env.SIM_AGENT_API_KEY
/**
* Generate a chat title using LLM
*/
@@ -160,6 +199,7 @@ export async function POST(req: NextRequest) {
workflowId,
mode,
depth,
prefetch,
createNewChat,
stream,
implicitFeedback,
@@ -168,6 +208,27 @@ export async function POST(req: NextRequest) {
conversationId,
} = ChatMessageSchema.parse(body)
// Derive request origin for downstream service
const requestOrigin = getRequestOrigin(req)
if (!requestOrigin) {
logger.error(`[${tracker.requestId}] Missing required configuration: BETTER_AUTH_URL`)
return createInternalServerErrorResponse('Missing required configuration: BETTER_AUTH_URL')
}
// Consolidation mapping: map negative depths to base depth with prefetch=true
let effectiveDepth: number | undefined = typeof depth === 'number' ? depth : undefined
let effectivePrefetch: boolean | undefined = prefetch
if (typeof effectiveDepth === 'number') {
if (effectiveDepth === -2) {
effectiveDepth = 1
effectivePrefetch = true
} else if (effectiveDepth === -1) {
effectiveDepth = 0
effectivePrefetch = true
}
}
logger.info(`[${tracker.requestId}] Processing copilot chat request`, {
userId: authenticatedUserId,
workflowId,
@@ -179,6 +240,9 @@ export async function POST(req: NextRequest) {
hasImplicitFeedback: !!implicitFeedback,
provider: provider || 'openai',
hasConversationId: !!conversationId,
depth,
prefetch,
origin: requestOrigin,
})
// Handle chat context
@@ -341,34 +405,68 @@ export async function POST(req: NextRequest) {
(currentChat?.conversationId as string | undefined) || conversationId
// If we have a conversationId, only send the most recent user message; else send full history
const messagesForAgent = effectiveConversationId ? [messages[messages.length - 1]] : messages
const latestUserMessage =
[...messages].reverse().find((m) => m?.role === 'user') || messages[messages.length - 1]
const messagesForAgent = effectiveConversationId ? [latestUserMessage] : messages
const requestPayload = {
messages: messagesForAgent,
workflowId,
userId: authenticatedUserId,
stream: stream,
streamToolCalls: true,
mode: mode,
provider: providerToUse,
...(effectiveConversationId ? { conversationId: effectiveConversationId } : {}),
...(typeof effectiveDepth === 'number' ? { depth: effectiveDepth } : {}),
...(typeof effectivePrefetch === 'boolean' ? { prefetch: effectivePrefetch } : {}),
...(session?.user?.name && { userName: session.user.name }),
...(requestOrigin ? { origin: requestOrigin } : {}),
}
// Log the payload being sent to the streaming endpoint
try {
logger.info(`[${tracker.requestId}] Sending payload to sim agent streaming endpoint`, {
url: `${SIM_AGENT_API_URL}/api/chat-completion-streaming`,
provider: providerToUse,
mode,
stream,
workflowId,
hasConversationId: !!effectiveConversationId,
depth: typeof effectiveDepth === 'number' ? effectiveDepth : undefined,
prefetch: typeof effectivePrefetch === 'boolean' ? effectivePrefetch : undefined,
messagesCount: requestPayload.messages.length,
...(requestOrigin ? { origin: requestOrigin } : {}),
})
// Full payload as JSON string
logger.info(
`[${tracker.requestId}] Full streaming payload: ${JSON.stringify(requestPayload)}`
)
} catch (e) {
logger.warn(`[${tracker.requestId}] Failed to log payload preview for streaming endpoint`, e)
}
const simAgentResponse = await fetch(`${SIM_AGENT_API_URL}/api/chat-completion-streaming`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
...(SIM_AGENT_API_KEY && { 'x-api-key': SIM_AGENT_API_KEY }),
...(env.COPILOT_API_KEY ? { 'x-api-key': env.COPILOT_API_KEY } : {}),
},
body: JSON.stringify({
messages: messagesForAgent,
workflowId,
userId: authenticatedUserId,
stream: stream,
streamToolCalls: true,
mode: mode,
provider: providerToUse,
...(effectiveConversationId ? { conversationId: effectiveConversationId } : {}),
...(typeof depth === 'number' ? { depth } : {}),
...(session?.user?.name && { userName: session.user.name }),
}),
body: JSON.stringify(requestPayload),
})
if (!simAgentResponse.ok) {
const errorText = await simAgentResponse.text()
if (simAgentResponse.status === 401 || simAgentResponse.status === 402) {
// Rethrow status only; client will render appropriate assistant message
return new NextResponse(null, { status: simAgentResponse.status })
}
const errorText = await simAgentResponse.text().catch(() => '')
logger.error(`[${tracker.requestId}] Sim agent API error:`, {
status: simAgentResponse.status,
error: errorText,
})
return NextResponse.json(
{ error: `Sim agent API error: ${simAgentResponse.statusText}` },
{ status: simAgentResponse.status }
@@ -398,6 +496,12 @@ export async function POST(req: NextRequest) {
let isFirstDone = true
let responseIdFromStart: string | undefined
let responseIdFromDone: string | undefined
// Track tool call progress to identify a safe done event
const announcedToolCallIds = new Set<string>()
const startedToolExecutionIds = new Set<string>()
const completedToolExecutionIds = new Set<string>()
let lastDoneResponseId: string | undefined
let lastSafeDoneResponseId: string | undefined
// Send chatId as first event
if (actualChatId) {
@@ -515,6 +619,9 @@ export async function POST(req: NextRequest) {
)
if (!event.data?.partial) {
toolCalls.push(event.data)
if (event.data?.id) {
announcedToolCallIds.add(event.data.id)
}
}
break
@@ -524,6 +631,14 @@ export async function POST(req: NextRequest) {
toolName: event.toolName,
status: event.status,
})
if (event.toolCallId) {
if (event.status === 'completed') {
startedToolExecutionIds.add(event.toolCallId)
completedToolExecutionIds.add(event.toolCallId)
} else {
startedToolExecutionIds.add(event.toolCallId)
}
}
break
case 'tool_result':
@@ -534,6 +649,9 @@ export async function POST(req: NextRequest) {
result: `${JSON.stringify(event.result).substring(0, 200)}...`,
resultSize: JSON.stringify(event.result).length,
})
if (event.toolCallId) {
completedToolExecutionIds.add(event.toolCallId)
}
break
case 'tool_error':
@@ -543,6 +661,9 @@ export async function POST(req: NextRequest) {
error: event.error,
success: event.success,
})
if (event.toolCallId) {
completedToolExecutionIds.add(event.toolCallId)
}
break
case 'start':
@@ -557,9 +678,25 @@ export async function POST(req: NextRequest) {
case 'done':
if (event.data?.responseId) {
responseIdFromDone = event.data.responseId
lastDoneResponseId = responseIdFromDone
logger.info(
`[${tracker.requestId}] Received done event with responseId: ${responseIdFromDone}`
)
// Mark this done as safe only if no tool call is currently in progress or pending
const announced = announcedToolCallIds.size
const completed = completedToolExecutionIds.size
const started = startedToolExecutionIds.size
const hasToolInProgress = announced > completed || started > completed
if (!hasToolInProgress) {
lastSafeDoneResponseId = responseIdFromDone
logger.info(
`[${tracker.requestId}] Marked done as SAFE (no tools in progress)`
)
} else {
logger.info(
`[${tracker.requestId}] Done received but tools are in progress (announced=${announced}, started=${started}, completed=${completed})`
)
}
}
if (isFirstDone) {
logger.info(
@@ -654,7 +791,9 @@ export async function POST(req: NextRequest) {
)
}
const responseId = responseIdFromDone || responseIdFromStart
// Persist only a safe conversationId to avoid continuing from a state that expects tool outputs
const previousConversationId = currentChat?.conversationId as string | undefined
const responseId = lastSafeDoneResponseId || previousConversationId || undefined
// Update chat in database immediately (without title)
await db

View File

@@ -48,11 +48,6 @@ async function updateToolCallStatus(
while (Date.now() - startTime < timeout) {
const exists = await redis.exists(key)
if (exists) {
logger.info('Tool call found in Redis, updating status', {
toolCallId,
key,
pollDuration: Date.now() - startTime,
})
break
}
@@ -79,27 +74,8 @@ async function updateToolCallStatus(
timestamp: new Date().toISOString(),
}
// Log what we're about to update in Redis
logger.info('About to update Redis with tool call data', {
toolCallId,
key,
toolCallData,
serializedData: JSON.stringify(toolCallData),
providedStatus: status,
providedMessage: message,
messageIsUndefined: message === undefined,
messageIsNull: message === null,
})
await redis.set(key, JSON.stringify(toolCallData), 'EX', 86400) // Keep 24 hour expiry
logger.info('Tool call status updated in Redis', {
toolCallId,
key,
status,
message,
pollDuration: Date.now() - startTime,
})
return true
} catch (error) {
logger.error('Failed to update tool call status in Redis', {
@@ -131,13 +107,6 @@ export async function POST(req: NextRequest) {
const body = await req.json()
const { toolCallId, status, message } = ConfirmationSchema.parse(body)
logger.info(`[${tracker.requestId}] Tool call confirmation request`, {
userId: authenticatedUserId,
toolCallId,
status,
message,
})
// Update the tool call status in Redis
const updated = await updateToolCallStatus(toolCallId, status, message)
@@ -153,13 +122,6 @@ export async function POST(req: NextRequest) {
}
const duration = tracker.getDuration()
logger.info(`[${tracker.requestId}] Tool call confirmation completed`, {
userId: authenticatedUserId,
toolCallId,
status,
internalStatus: status,
duration,
})
return NextResponse.json({
success: true,

View File

@@ -60,6 +60,7 @@ describe('Copilot Methods API Route', () => {
vi.doMock('@/lib/env', () => ({
env: {
INTERNAL_API_SECRET: 'test-secret-key',
COPILOT_API_KEY: 'test-copilot-key',
},
}))
@@ -123,10 +124,8 @@ describe('Copilot Methods API Route', () => {
expect(response.status).toBe(401)
const responseData = await response.json()
expect(responseData).toEqual({
success: false,
error: 'Invalid API key',
})
expect(responseData.success).toBe(false)
expect(typeof responseData.error).toBe('string')
})
it('should return 401 when internal API key is not configured', async () => {
@@ -134,6 +133,7 @@ describe('Copilot Methods API Route', () => {
vi.doMock('@/lib/env', () => ({
env: {
INTERNAL_API_SECRET: undefined,
COPILOT_API_KEY: 'test-copilot-key',
},
}))
@@ -154,10 +154,9 @@ describe('Copilot Methods API Route', () => {
expect(response.status).toBe(401)
const responseData = await response.json()
expect(responseData).toEqual({
success: false,
error: 'Internal API key not configured',
})
expect(responseData.status).toBeUndefined()
expect(responseData.success).toBe(false)
expect(typeof responseData.error).toBe('string')
})
it('should return 400 for invalid request body - missing methodId', async () => {

View File

@@ -2,7 +2,7 @@ import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { copilotToolRegistry } from '@/lib/copilot/tools/server-tools/registry'
import type { NotificationStatus } from '@/lib/copilot/types'
import { checkInternalApiKey } from '@/lib/copilot/utils'
import { checkCopilotApiKey, checkInternalApiKey } from '@/lib/copilot/utils'
import { createLogger } from '@/lib/logs/console/logger'
import { getRedisClient } from '@/lib/redis'
import { createErrorResponse } from '@/app/api/copilot/methods/utils'
@@ -69,12 +69,6 @@ async function pollRedisForTool(
const pollInterval = 1000 // 1 second
const startTime = Date.now()
logger.info('Starting to poll Redis for tool call status', {
toolCallId,
timeout,
pollInterval,
})
while (Date.now() - startTime < timeout) {
try {
const redisValue = await redis.get(key)
@@ -112,23 +106,6 @@ async function pollRedisForTool(
rawRedisValue: redisValue,
})
logger.info('Tool call status resolved', {
toolCallId,
status,
message,
duration: Date.now() - startTime,
rawRedisValue: redisValue,
parsedAsJSON: redisValue
? (() => {
try {
return JSON.parse(redisValue)
} catch {
return 'failed-to-parse'
}
})()
: null,
})
// Special logging for set environment variables tool when Redis status is found
if (toolCallId && (status === 'accepted' || status === 'rejected')) {
logger.info('SET_ENV_VARS: Redis polling found status update', {
@@ -255,10 +232,13 @@ export async function POST(req: NextRequest) {
const startTime = Date.now()
try {
// Check authentication (internal API key)
const authResult = checkInternalApiKey(req)
if (!authResult.success) {
return NextResponse.json(createErrorResponse(authResult.error || 'Authentication failed'), {
// Evaluate both auth schemes; pass if either is valid
const internalAuth = checkInternalApiKey(req)
const copilotAuth = checkCopilotApiKey(req)
const isAuthenticated = !!(internalAuth?.success || copilotAuth?.success)
if (!isAuthenticated) {
const errorMessage = copilotAuth.error || internalAuth.error || 'Authentication failed'
return NextResponse.json(createErrorResponse(errorMessage), {
status: 401,
})
}
@@ -266,7 +246,7 @@ export async function POST(req: NextRequest) {
const body = await req.json()
const { methodId, params, toolCallId } = MethodExecutionSchema.parse(body)
logger.info(`[${requestId}] Method execution request: ${methodId}`, {
logger.info(`[${requestId}] Method execution request`, {
methodId,
toolCallId,
hasParams: !!params && Object.keys(params).length > 0,

View File

@@ -178,7 +178,7 @@ export function findLocalFile(filename: string): string | null {
* Create a file response with appropriate headers
*/
export function createFileResponse(file: FileResponse): NextResponse {
return new NextResponse(file.buffer, {
return new NextResponse(file.buffer as BodyInit, {
status: 200,
headers: {
'Content-Type': file.contentType,

View File

@@ -1,15 +1,16 @@
import { type NextRequest, NextResponse } from 'next/server'
import { Resend } from 'resend'
import { z } from 'zod'
import { renderHelpConfirmationEmail } from '@/components/emails'
import { getSession } from '@/lib/auth'
import { sendEmail } from '@/lib/email/mailer'
import { getFromEmailAddress } from '@/lib/email/utils'
import { env } from '@/lib/env'
import { createLogger } from '@/lib/logs/console/logger'
import { getEmailDomain } from '@/lib/urls/utils'
const resend = env.RESEND_API_KEY ? new Resend(env.RESEND_API_KEY) : null
const logger = createLogger('HelpAPI')
const helpFormSchema = z.object({
email: z.string().email('Invalid email address'),
subject: z.string().min(1, 'Subject is required'),
message: z.string().min(1, 'Message is required'),
type: z.enum(['bug', 'feedback', 'feature_request', 'other']),
@@ -19,23 +20,19 @@ export async function POST(req: NextRequest) {
const requestId = crypto.randomUUID().slice(0, 8)
try {
// Check if Resend API key is configured
if (!resend) {
logger.error(`[${requestId}] RESEND_API_KEY not configured`)
return NextResponse.json(
{
error:
'Email service not configured. Please set RESEND_API_KEY in environment variables.',
},
{ status: 500 }
)
// Get user session
const session = await getSession()
if (!session?.user?.email) {
logger.warn(`[${requestId}] Unauthorized help request attempt`)
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
}
const email = session.user.email
// Handle multipart form data
const formData = await req.formData()
// Extract form fields
const email = formData.get('email') as string
const subject = formData.get('subject') as string
const message = formData.get('message') as string
const type = formData.get('type') as string
@@ -46,19 +43,18 @@ export async function POST(req: NextRequest) {
})
// Validate the form data
const result = helpFormSchema.safeParse({
email,
const validationResult = helpFormSchema.safeParse({
subject,
message,
type,
})
if (!result.success) {
if (!validationResult.success) {
logger.warn(`[${requestId}] Invalid help request data`, {
errors: result.error.format(),
errors: validationResult.error.format(),
})
return NextResponse.json(
{ error: 'Invalid request data', details: result.error.format() },
{ error: 'Invalid request data', details: validationResult.error.format() },
{ status: 400 }
)
}
@@ -96,63 +92,60 @@ ${message}
emailText += `\n\n${images.length} image(s) attached.`
}
// Send email using Resend
const { data, error } = await resend.emails.send({
from: `Sim <noreply@${getEmailDomain()}>`,
to: [`help@${getEmailDomain()}`],
const emailResult = await sendEmail({
to: [`help@${env.EMAIL_DOMAIN || getEmailDomain()}`],
subject: `[${type.toUpperCase()}] ${subject}`,
replyTo: email,
text: emailText,
from: getFromEmailAddress(),
replyTo: email,
emailType: 'transactional',
attachments: images.map((image) => ({
filename: image.filename,
content: image.content.toString('base64'),
contentType: image.contentType,
disposition: 'attachment', // Explicitly set as attachment
disposition: 'attachment',
})),
})
if (error) {
logger.error(`[${requestId}] Error sending help request email`, error)
if (!emailResult.success) {
logger.error(`[${requestId}] Error sending help request email`, emailResult.message)
return NextResponse.json({ error: 'Failed to send email' }, { status: 500 })
}
logger.info(`[${requestId}] Help request email sent successfully`)
// Send confirmation email to the user
await resend.emails
.send({
from: `Sim <noreply@${getEmailDomain()}>`,
try {
const confirmationHtml = await renderHelpConfirmationEmail(
email,
type as 'bug' | 'feedback' | 'feature_request' | 'other',
images.length
)
await sendEmail({
to: [email],
subject: `Your ${type} request has been received: ${subject}`,
text: `
Hello,
Thank you for your ${type} submission. We've received your request and will get back to you as soon as possible.
Your message:
${message}
${images.length > 0 ? `You attached ${images.length} image(s).` : ''}
Best regards,
The Sim Team
`,
replyTo: `help@${getEmailDomain()}`,
})
.catch((err) => {
logger.warn(`[${requestId}] Failed to send confirmation email`, err)
html: confirmationHtml,
from: getFromEmailAddress(),
replyTo: `help@${env.EMAIL_DOMAIN || getEmailDomain()}`,
emailType: 'transactional',
})
} catch (err) {
logger.warn(`[${requestId}] Failed to send confirmation email`, err)
}
return NextResponse.json(
{ success: true, message: 'Help request submitted successfully' },
{ status: 200 }
)
} catch (error) {
// Check if error is related to missing API key
if (error instanceof Error && error.message.includes('API key')) {
logger.error(`[${requestId}] API key configuration error`, error)
if (error instanceof Error && error.message.includes('not configured')) {
logger.error(`[${requestId}] Email service configuration error`, error)
return NextResponse.json(
{ error: 'Email service configuration error. Please check your RESEND_API_KEY.' },
{
error:
'Email service configuration error. Please check your email service configuration.',
},
{ status: 500 }
)
}

View File

@@ -1,4 +1,4 @@
import { runs } from '@trigger.dev/sdk/v3'
import { runs } from '@trigger.dev/sdk'
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'

View File

@@ -4,15 +4,50 @@
*
* @vitest-environment node
*/
import { describe, expect, it, vi } from 'vitest'
import { beforeEach, describe, expect, it, vi } from 'vitest'
vi.mock('drizzle-orm')
vi.mock('@/lib/logs/console/logger')
vi.mock('@/lib/logs/console/logger', () => ({
createLogger: vi.fn(() => ({
info: vi.fn(),
debug: vi.fn(),
warn: vi.fn(),
error: vi.fn(),
})),
}))
vi.mock('@/db')
vi.mock('@/lib/documents/utils', () => ({
retryWithExponentialBackoff: (fn: any) => fn(),
}))
import { handleTagAndVectorSearch, handleTagOnlySearch, handleVectorOnlySearch } from './utils'
vi.stubGlobal(
'fetch',
vi.fn().mockResolvedValue({
ok: true,
json: async () => ({
data: [{ embedding: [0.1, 0.2, 0.3] }],
}),
})
)
vi.mock('@/lib/env', () => ({
env: {},
isTruthy: (value: string | boolean | number | undefined) =>
typeof value === 'string' ? value === 'true' || value === '1' : Boolean(value),
}))
import {
generateSearchEmbedding,
handleTagAndVectorSearch,
handleTagOnlySearch,
handleVectorOnlySearch,
} from './utils'
describe('Knowledge Search Utils', () => {
beforeEach(() => {
vi.clearAllMocks()
})
describe('handleTagOnlySearch', () => {
it('should throw error when no filters provided', async () => {
const params = {
@@ -140,4 +175,251 @@ describe('Knowledge Search Utils', () => {
expect(params.distanceThreshold).toBe(0.8)
})
})
describe('generateSearchEmbedding', () => {
it('should use Azure OpenAI when KB-specific config is provided', async () => {
const { env } = await import('@/lib/env')
Object.keys(env).forEach((key) => delete (env as any)[key])
Object.assign(env, {
AZURE_OPENAI_API_KEY: 'test-azure-key',
AZURE_OPENAI_ENDPOINT: 'https://test.openai.azure.com',
AZURE_OPENAI_API_VERSION: '2024-12-01-preview',
KB_OPENAI_MODEL_NAME: 'text-embedding-ada-002',
OPENAI_API_KEY: 'test-openai-key',
})
const fetchSpy = vi.mocked(fetch)
fetchSpy.mockResolvedValueOnce({
ok: true,
json: async () => ({
data: [{ embedding: [0.1, 0.2, 0.3] }],
}),
} as any)
const result = await generateSearchEmbedding('test query')
expect(fetchSpy).toHaveBeenCalledWith(
'https://test.openai.azure.com/openai/deployments/text-embedding-ada-002/embeddings?api-version=2024-12-01-preview',
expect.objectContaining({
headers: expect.objectContaining({
'api-key': 'test-azure-key',
}),
})
)
expect(result).toEqual([0.1, 0.2, 0.3])
// Clean up
Object.keys(env).forEach((key) => delete (env as any)[key])
})
it('should fallback to OpenAI when no KB Azure config provided', async () => {
const { env } = await import('@/lib/env')
Object.keys(env).forEach((key) => delete (env as any)[key])
Object.assign(env, {
OPENAI_API_KEY: 'test-openai-key',
})
const fetchSpy = vi.mocked(fetch)
fetchSpy.mockResolvedValueOnce({
ok: true,
json: async () => ({
data: [{ embedding: [0.1, 0.2, 0.3] }],
}),
} as any)
const result = await generateSearchEmbedding('test query')
expect(fetchSpy).toHaveBeenCalledWith(
'https://api.openai.com/v1/embeddings',
expect.objectContaining({
headers: expect.objectContaining({
Authorization: 'Bearer test-openai-key',
}),
})
)
expect(result).toEqual([0.1, 0.2, 0.3])
// Clean up
Object.keys(env).forEach((key) => delete (env as any)[key])
})
it('should use default API version when not provided in Azure config', async () => {
const { env } = await import('@/lib/env')
Object.keys(env).forEach((key) => delete (env as any)[key])
Object.assign(env, {
AZURE_OPENAI_API_KEY: 'test-azure-key',
AZURE_OPENAI_ENDPOINT: 'https://test.openai.azure.com',
KB_OPENAI_MODEL_NAME: 'custom-embedding-model',
OPENAI_API_KEY: 'test-openai-key',
})
const fetchSpy = vi.mocked(fetch)
fetchSpy.mockResolvedValueOnce({
ok: true,
json: async () => ({
data: [{ embedding: [0.1, 0.2, 0.3] }],
}),
} as any)
await generateSearchEmbedding('test query')
expect(fetchSpy).toHaveBeenCalledWith(
expect.stringContaining('api-version='),
expect.any(Object)
)
// Clean up
Object.keys(env).forEach((key) => delete (env as any)[key])
})
it('should use custom model name when provided in Azure config', async () => {
const { env } = await import('@/lib/env')
Object.keys(env).forEach((key) => delete (env as any)[key])
Object.assign(env, {
AZURE_OPENAI_API_KEY: 'test-azure-key',
AZURE_OPENAI_ENDPOINT: 'https://test.openai.azure.com',
AZURE_OPENAI_API_VERSION: '2024-12-01-preview',
KB_OPENAI_MODEL_NAME: 'custom-embedding-model',
OPENAI_API_KEY: 'test-openai-key',
})
const fetchSpy = vi.mocked(fetch)
fetchSpy.mockResolvedValueOnce({
ok: true,
json: async () => ({
data: [{ embedding: [0.1, 0.2, 0.3] }],
}),
} as any)
await generateSearchEmbedding('test query', 'text-embedding-3-small')
expect(fetchSpy).toHaveBeenCalledWith(
'https://test.openai.azure.com/openai/deployments/custom-embedding-model/embeddings?api-version=2024-12-01-preview',
expect.any(Object)
)
// Clean up
Object.keys(env).forEach((key) => delete (env as any)[key])
})
it('should throw error when no API configuration provided', async () => {
const { env } = await import('@/lib/env')
Object.keys(env).forEach((key) => delete (env as any)[key])
await expect(generateSearchEmbedding('test query')).rejects.toThrow(
'Either OPENAI_API_KEY or Azure OpenAI configuration (AZURE_OPENAI_API_KEY + AZURE_OPENAI_ENDPOINT) must be configured'
)
})
it('should handle Azure OpenAI API errors properly', async () => {
const { env } = await import('@/lib/env')
Object.keys(env).forEach((key) => delete (env as any)[key])
Object.assign(env, {
AZURE_OPENAI_API_KEY: 'test-azure-key',
AZURE_OPENAI_ENDPOINT: 'https://test.openai.azure.com',
AZURE_OPENAI_API_VERSION: '2024-12-01-preview',
KB_OPENAI_MODEL_NAME: 'text-embedding-ada-002',
})
const fetchSpy = vi.mocked(fetch)
fetchSpy.mockResolvedValueOnce({
ok: false,
status: 404,
statusText: 'Not Found',
text: async () => 'Deployment not found',
} as any)
await expect(generateSearchEmbedding('test query')).rejects.toThrow('Embedding API failed')
// Clean up
Object.keys(env).forEach((key) => delete (env as any)[key])
})
it('should handle OpenAI API errors properly', async () => {
const { env } = await import('@/lib/env')
Object.keys(env).forEach((key) => delete (env as any)[key])
Object.assign(env, {
OPENAI_API_KEY: 'test-openai-key',
})
const fetchSpy = vi.mocked(fetch)
fetchSpy.mockResolvedValueOnce({
ok: false,
status: 429,
statusText: 'Too Many Requests',
text: async () => 'Rate limit exceeded',
} as any)
await expect(generateSearchEmbedding('test query')).rejects.toThrow('Embedding API failed')
// Clean up
Object.keys(env).forEach((key) => delete (env as any)[key])
})
it('should include correct request body for Azure OpenAI', async () => {
const { env } = await import('@/lib/env')
Object.keys(env).forEach((key) => delete (env as any)[key])
Object.assign(env, {
AZURE_OPENAI_API_KEY: 'test-azure-key',
AZURE_OPENAI_ENDPOINT: 'https://test.openai.azure.com',
AZURE_OPENAI_API_VERSION: '2024-12-01-preview',
KB_OPENAI_MODEL_NAME: 'text-embedding-ada-002',
})
const fetchSpy = vi.mocked(fetch)
fetchSpy.mockResolvedValueOnce({
ok: true,
json: async () => ({
data: [{ embedding: [0.1, 0.2, 0.3] }],
}),
} as any)
await generateSearchEmbedding('test query')
expect(fetchSpy).toHaveBeenCalledWith(
expect.any(String),
expect.objectContaining({
body: JSON.stringify({
input: ['test query'],
encoding_format: 'float',
}),
})
)
// Clean up
Object.keys(env).forEach((key) => delete (env as any)[key])
})
it('should include correct request body for OpenAI', async () => {
const { env } = await import('@/lib/env')
Object.keys(env).forEach((key) => delete (env as any)[key])
Object.assign(env, {
OPENAI_API_KEY: 'test-openai-key',
})
const fetchSpy = vi.mocked(fetch)
fetchSpy.mockResolvedValueOnce({
ok: true,
json: async () => ({
data: [{ embedding: [0.1, 0.2, 0.3] }],
}),
} as any)
await generateSearchEmbedding('test query', 'text-embedding-3-small')
expect(fetchSpy).toHaveBeenCalledWith(
expect.any(String),
expect.objectContaining({
body: JSON.stringify({
input: ['test query'],
model: 'text-embedding-3-small',
encoding_format: 'float',
}),
})
)
// Clean up
Object.keys(env).forEach((key) => delete (env as any)[key])
})
})
})

View File

@@ -1,22 +1,10 @@
import { and, eq, inArray, sql } from 'drizzle-orm'
import { retryWithExponentialBackoff } from '@/lib/documents/utils'
import { env } from '@/lib/env'
import { createLogger } from '@/lib/logs/console/logger'
import { db } from '@/db'
import { embedding } from '@/db/schema'
const logger = createLogger('KnowledgeSearchUtils')
export class APIError extends Error {
public status: number
constructor(message: string, status: number) {
super(message)
this.name = 'APIError'
this.status = status
}
}
export interface SearchResult {
id: string
content: string
@@ -41,61 +29,8 @@ export interface SearchParams {
distanceThreshold?: number
}
export async function generateSearchEmbedding(query: string): Promise<number[]> {
const openaiApiKey = env.OPENAI_API_KEY
if (!openaiApiKey) {
throw new Error('OPENAI_API_KEY not configured')
}
try {
const embedding = await retryWithExponentialBackoff(
async () => {
const response = await fetch('https://api.openai.com/v1/embeddings', {
method: 'POST',
headers: {
Authorization: `Bearer ${openaiApiKey}`,
'Content-Type': 'application/json',
},
body: JSON.stringify({
input: query,
model: 'text-embedding-3-small',
encoding_format: 'float',
}),
})
if (!response.ok) {
const errorText = await response.text()
const error = new APIError(
`OpenAI API error: ${response.status} ${response.statusText} - ${errorText}`,
response.status
)
throw error
}
const data = await response.json()
if (!data.data || !Array.isArray(data.data) || data.data.length === 0) {
throw new Error('Invalid response format from OpenAI embeddings API')
}
return data.data[0].embedding
},
{
maxRetries: 5,
initialDelayMs: 1000,
maxDelayMs: 30000,
backoffMultiplier: 2,
}
)
return embedding
} catch (error) {
logger.error('Failed to generate search embedding:', error)
throw new Error(
`Embedding generation failed: ${error instanceof Error ? error.message : 'Unknown error'}`
)
}
}
// Use shared embedding utility
export { generateSearchEmbedding } from '@/lib/embeddings/utils'
function getTagFilters(filters: Record<string, string>, embedding: any) {
return Object.entries(filters).map(([key, value]) => {

View File

@@ -252,5 +252,76 @@ describe('Knowledge Utils', () => {
expect(result.length).toBe(2)
})
it('should use Azure OpenAI when Azure config is provided', async () => {
const { env } = await import('@/lib/env')
Object.keys(env).forEach((key) => delete (env as any)[key])
Object.assign(env, {
AZURE_OPENAI_API_KEY: 'test-azure-key',
AZURE_OPENAI_ENDPOINT: 'https://test.openai.azure.com',
AZURE_OPENAI_API_VERSION: '2024-12-01-preview',
KB_OPENAI_MODEL_NAME: 'text-embedding-ada-002',
OPENAI_API_KEY: 'test-openai-key',
})
const fetchSpy = vi.mocked(fetch)
fetchSpy.mockResolvedValueOnce({
ok: true,
json: async () => ({
data: [{ embedding: [0.1, 0.2], index: 0 }],
}),
} as any)
await generateEmbeddings(['test text'])
expect(fetchSpy).toHaveBeenCalledWith(
'https://test.openai.azure.com/openai/deployments/text-embedding-ada-002/embeddings?api-version=2024-12-01-preview',
expect.objectContaining({
headers: expect.objectContaining({
'api-key': 'test-azure-key',
}),
})
)
Object.keys(env).forEach((key) => delete (env as any)[key])
})
it('should fallback to OpenAI when no Azure config provided', async () => {
const { env } = await import('@/lib/env')
Object.keys(env).forEach((key) => delete (env as any)[key])
Object.assign(env, {
OPENAI_API_KEY: 'test-openai-key',
})
const fetchSpy = vi.mocked(fetch)
fetchSpy.mockResolvedValueOnce({
ok: true,
json: async () => ({
data: [{ embedding: [0.1, 0.2], index: 0 }],
}),
} as any)
await generateEmbeddings(['test text'])
expect(fetchSpy).toHaveBeenCalledWith(
'https://api.openai.com/v1/embeddings',
expect.objectContaining({
headers: expect.objectContaining({
Authorization: 'Bearer test-openai-key',
}),
})
)
Object.keys(env).forEach((key) => delete (env as any)[key])
})
it('should throw error when no API configuration provided', async () => {
const { env } = await import('@/lib/env')
Object.keys(env).forEach((key) => delete (env as any)[key])
await expect(generateEmbeddings(['test text'])).rejects.toThrow(
'Either OPENAI_API_KEY or Azure OpenAI configuration (AZURE_OPENAI_API_KEY + AZURE_OPENAI_ENDPOINT) must be configured'
)
})
})
})

View File

@@ -1,8 +1,7 @@
import crypto from 'crypto'
import { and, eq, isNull } from 'drizzle-orm'
import { processDocument } from '@/lib/documents/document-processor'
import { retryWithExponentialBackoff } from '@/lib/documents/utils'
import { env } from '@/lib/env'
import { generateEmbeddings } from '@/lib/embeddings/utils'
import { createLogger } from '@/lib/logs/console/logger'
import { getUserEntityPermissions } from '@/lib/permissions/utils'
import { db } from '@/db'
@@ -10,22 +9,11 @@ import { document, embedding, knowledgeBase } from '@/db/schema'
const logger = createLogger('KnowledgeUtils')
// Timeout constants (in milliseconds)
const TIMEOUTS = {
OVERALL_PROCESSING: 150000, // 150 seconds (2.5 minutes)
EMBEDDINGS_API: 60000, // 60 seconds per batch
} as const
class APIError extends Error {
public status: number
constructor(message: string, status: number) {
super(message)
this.name = 'APIError'
this.status = status
}
}
/**
* Create a timeout wrapper for async operations
*/
@@ -110,18 +98,6 @@ export interface EmbeddingData {
updatedAt: Date
}
interface OpenAIEmbeddingResponse {
data: Array<{
embedding: number[]
index: number
}>
model: string
usage: {
prompt_tokens: number
total_tokens: number
}
}
export interface KnowledgeBaseAccessResult {
hasAccess: true
knowledgeBase: Pick<KnowledgeBaseData, 'id' | 'userId'>
@@ -405,87 +381,8 @@ export async function checkChunkAccess(
}
}
/**
* Generate embeddings using OpenAI API with retry logic for rate limiting
*/
export async function generateEmbeddings(
texts: string[],
embeddingModel = 'text-embedding-3-small'
): Promise<number[][]> {
const openaiApiKey = env.OPENAI_API_KEY
if (!openaiApiKey) {
throw new Error('OPENAI_API_KEY not configured')
}
try {
const batchSize = 100
const allEmbeddings: number[][] = []
for (let i = 0; i < texts.length; i += batchSize) {
const batch = texts.slice(i, i + batchSize)
logger.info(
`Generating embeddings for batch ${Math.floor(i / batchSize) + 1} (${batch.length} texts)`
)
const batchEmbeddings = await retryWithExponentialBackoff(
async () => {
const controller = new AbortController()
const timeoutId = setTimeout(() => controller.abort(), TIMEOUTS.EMBEDDINGS_API)
try {
const response = await fetch('https://api.openai.com/v1/embeddings', {
method: 'POST',
headers: {
Authorization: `Bearer ${openaiApiKey}`,
'Content-Type': 'application/json',
},
body: JSON.stringify({
input: batch,
model: embeddingModel,
encoding_format: 'float',
}),
signal: controller.signal,
})
clearTimeout(timeoutId)
if (!response.ok) {
const errorText = await response.text()
const error = new APIError(
`OpenAI API error: ${response.status} ${response.statusText} - ${errorText}`,
response.status
)
throw error
}
const data: OpenAIEmbeddingResponse = await response.json()
return data.data.map((item) => item.embedding)
} catch (error) {
clearTimeout(timeoutId)
if (error instanceof Error && error.name === 'AbortError') {
throw new Error('OpenAI API request timed out')
}
throw error
}
},
{
maxRetries: 5,
initialDelayMs: 1000,
maxDelayMs: 60000, // Max 1 minute delay for embeddings
backoffMultiplier: 2,
}
)
allEmbeddings.push(...batchEmbeddings)
}
return allEmbeddings
} catch (error) {
logger.error('Failed to generate embeddings:', error)
throw error
}
}
// Export for external use
export { generateEmbeddings }
/**
* Process a document asynchronously with full error handling

View File

@@ -46,20 +46,7 @@ export async function GET(
startedAt: workflowLog.startedAt.toISOString(),
endedAt: workflowLog.endedAt?.toISOString(),
totalDurationMs: workflowLog.totalDurationMs,
blockStats: {
total: workflowLog.blockCount,
success: workflowLog.successCount,
error: workflowLog.errorCount,
skipped: workflowLog.skippedCount,
},
cost: {
total: workflowLog.totalCost ? Number.parseFloat(workflowLog.totalCost) : null,
input: workflowLog.totalInputCost ? Number.parseFloat(workflowLog.totalInputCost) : null,
output: workflowLog.totalOutputCost
? Number.parseFloat(workflowLog.totalOutputCost)
: null,
},
totalTokens: workflowLog.totalTokens,
cost: workflowLog.cost || null,
},
}

View File

@@ -0,0 +1,102 @@
import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { createLogger } from '@/lib/logs/console/logger'
import { db } from '@/db'
import { permissions, workflow, workflowExecutionLogs } from '@/db/schema'
const logger = createLogger('LogDetailsByIdAPI')
export const revalidate = 0
export async function GET(_request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const requestId = crypto.randomUUID().slice(0, 8)
try {
const session = await getSession()
if (!session?.user?.id) {
logger.warn(`[${requestId}] Unauthorized log details access attempt`)
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const userId = session.user.id
const { id } = await params
const rows = await db
.select({
id: workflowExecutionLogs.id,
workflowId: workflowExecutionLogs.workflowId,
executionId: workflowExecutionLogs.executionId,
stateSnapshotId: workflowExecutionLogs.stateSnapshotId,
level: workflowExecutionLogs.level,
trigger: workflowExecutionLogs.trigger,
startedAt: workflowExecutionLogs.startedAt,
endedAt: workflowExecutionLogs.endedAt,
totalDurationMs: workflowExecutionLogs.totalDurationMs,
executionData: workflowExecutionLogs.executionData,
cost: workflowExecutionLogs.cost,
files: workflowExecutionLogs.files,
createdAt: workflowExecutionLogs.createdAt,
workflowName: workflow.name,
workflowDescription: workflow.description,
workflowColor: workflow.color,
workflowFolderId: workflow.folderId,
workflowUserId: workflow.userId,
workflowWorkspaceId: workflow.workspaceId,
workflowCreatedAt: workflow.createdAt,
workflowUpdatedAt: workflow.updatedAt,
})
.from(workflowExecutionLogs)
.innerJoin(workflow, eq(workflowExecutionLogs.workflowId, workflow.id))
.innerJoin(
permissions,
and(
eq(permissions.entityType, 'workspace'),
eq(permissions.entityId, workflow.workspaceId),
eq(permissions.userId, userId)
)
)
.where(eq(workflowExecutionLogs.id, id))
.limit(1)
const log = rows[0]
if (!log) {
return NextResponse.json({ error: 'Not found' }, { status: 404 })
}
const workflowSummary = {
id: log.workflowId,
name: log.workflowName,
description: log.workflowDescription,
color: log.workflowColor,
folderId: log.workflowFolderId,
userId: log.workflowUserId,
workspaceId: log.workflowWorkspaceId,
createdAt: log.workflowCreatedAt,
updatedAt: log.workflowUpdatedAt,
}
const response = {
id: log.id,
workflowId: log.workflowId,
executionId: log.executionId,
level: log.level,
duration: log.totalDurationMs ? `${log.totalDurationMs}ms` : null,
trigger: log.trigger,
createdAt: log.startedAt.toISOString(),
files: log.files || undefined,
workflow: workflowSummary,
executionData: {
totalDuration: log.totalDurationMs,
...(log.executionData as any),
enhanced: true,
},
cost: log.cost as any,
}
return NextResponse.json({ data: response })
} catch (error: any) {
logger.error(`[${requestId}] log details fetch error`, error)
return NextResponse.json({ error: error.message }, { status: 500 })
}
}

View File

@@ -99,21 +99,13 @@ export async function GET(request: NextRequest) {
executionId: workflowExecutionLogs.executionId,
stateSnapshotId: workflowExecutionLogs.stateSnapshotId,
level: workflowExecutionLogs.level,
message: workflowExecutionLogs.message,
trigger: workflowExecutionLogs.trigger,
startedAt: workflowExecutionLogs.startedAt,
endedAt: workflowExecutionLogs.endedAt,
totalDurationMs: workflowExecutionLogs.totalDurationMs,
blockCount: workflowExecutionLogs.blockCount,
successCount: workflowExecutionLogs.successCount,
errorCount: workflowExecutionLogs.errorCount,
skippedCount: workflowExecutionLogs.skippedCount,
totalCost: workflowExecutionLogs.totalCost,
totalInputCost: workflowExecutionLogs.totalInputCost,
totalOutputCost: workflowExecutionLogs.totalOutputCost,
totalTokens: workflowExecutionLogs.totalTokens,
executionData: workflowExecutionLogs.executionData,
cost: workflowExecutionLogs.cost,
files: workflowExecutionLogs.files,
metadata: workflowExecutionLogs.metadata,
createdAt: workflowExecutionLogs.createdAt,
})
.from(workflowExecutionLogs)

View File

@@ -1,4 +1,4 @@
import { and, desc, eq, gte, inArray, lte, or, type SQL, sql } from 'drizzle-orm'
import { and, desc, eq, gte, inArray, lte, type SQL, sql } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
@@ -44,8 +44,7 @@ function extractBlockExecutionsFromTraceSpans(traceSpans: any[]): any[] {
export const revalidate = 0
const QueryParamsSchema = z.object({
includeWorkflow: z.coerce.boolean().optional().default(false),
includeBlocks: z.coerce.boolean().optional().default(false),
details: z.enum(['basic', 'full']).optional().default('basic'),
limit: z.coerce.number().optional().default(100),
offset: z.coerce.number().optional().default(0),
level: z.string().optional(),
@@ -81,20 +80,12 @@ export async function GET(request: NextRequest) {
executionId: workflowExecutionLogs.executionId,
stateSnapshotId: workflowExecutionLogs.stateSnapshotId,
level: workflowExecutionLogs.level,
message: workflowExecutionLogs.message,
trigger: workflowExecutionLogs.trigger,
startedAt: workflowExecutionLogs.startedAt,
endedAt: workflowExecutionLogs.endedAt,
totalDurationMs: workflowExecutionLogs.totalDurationMs,
blockCount: workflowExecutionLogs.blockCount,
successCount: workflowExecutionLogs.successCount,
errorCount: workflowExecutionLogs.errorCount,
skippedCount: workflowExecutionLogs.skippedCount,
totalCost: workflowExecutionLogs.totalCost,
totalInputCost: workflowExecutionLogs.totalInputCost,
totalOutputCost: workflowExecutionLogs.totalOutputCost,
totalTokens: workflowExecutionLogs.totalTokens,
metadata: workflowExecutionLogs.metadata,
executionData: workflowExecutionLogs.executionData,
cost: workflowExecutionLogs.cost,
files: workflowExecutionLogs.files,
createdAt: workflowExecutionLogs.createdAt,
workflowName: workflow.name,
@@ -163,13 +154,8 @@ export async function GET(request: NextRequest) {
// Filter by search query
if (params.search) {
const searchTerm = `%${params.search}%`
conditions = and(
conditions,
or(
sql`${workflowExecutionLogs.message} ILIKE ${searchTerm}`,
sql`${workflowExecutionLogs.executionId} ILIKE ${searchTerm}`
)
)
// With message removed, restrict search to executionId only
conditions = and(conditions, sql`${workflowExecutionLogs.executionId} ILIKE ${searchTerm}`)
}
// Execute the query using the optimized join
@@ -290,31 +276,20 @@ export async function GET(request: NextRequest) {
const enhancedLogs = logs.map((log) => {
const blockExecutions = blockExecutionsByExecution[log.executionId] || []
// Use stored trace spans from metadata if available, otherwise create from block executions
const storedTraceSpans = (log.metadata as any)?.traceSpans
// Use stored trace spans if available, otherwise create from block executions
const storedTraceSpans = (log.executionData as any)?.traceSpans
const traceSpans =
storedTraceSpans && Array.isArray(storedTraceSpans) && storedTraceSpans.length > 0
? storedTraceSpans
: createTraceSpans(blockExecutions)
// Use extracted cost summary if available, otherwise use stored values
// Prefer stored cost JSON; otherwise synthesize from blocks
const costSummary =
blockExecutions.length > 0
? extractCostSummary(blockExecutions)
: {
input: Number(log.totalInputCost) || 0,
output: Number(log.totalOutputCost) || 0,
total: Number(log.totalCost) || 0,
tokens: {
total: log.totalTokens || 0,
prompt: (log.metadata as any)?.tokenBreakdown?.prompt || 0,
completion: (log.metadata as any)?.tokenBreakdown?.completion || 0,
},
models: (log.metadata as any)?.models || {},
}
log.cost && Object.keys(log.cost as any).length > 0
? (log.cost as any)
: extractCostSummary(blockExecutions)
// Build workflow object from joined data
const workflow = {
const workflowSummary = {
id: log.workflowId,
name: log.workflowName,
description: log.workflowDescription,
@@ -329,67 +304,28 @@ export async function GET(request: NextRequest) {
return {
id: log.id,
workflowId: log.workflowId,
executionId: log.executionId,
executionId: params.details === 'full' ? log.executionId : undefined,
level: log.level,
message: log.message,
duration: log.totalDurationMs ? `${log.totalDurationMs}ms` : null,
trigger: log.trigger,
createdAt: log.startedAt.toISOString(),
files: log.files || undefined,
workflow: params.includeWorkflow ? workflow : undefined,
metadata: {
totalDuration: log.totalDurationMs,
cost: costSummary,
blockStats: {
total: log.blockCount,
success: log.successCount,
error: log.errorCount,
skipped: log.skippedCount,
},
traceSpans,
blockExecutions,
enhanced: true,
},
files: params.details === 'full' ? log.files || undefined : undefined,
workflow: workflowSummary,
executionData:
params.details === 'full'
? {
totalDuration: log.totalDurationMs,
traceSpans,
blockExecutions,
enhanced: true,
}
: undefined,
cost:
params.details === 'full'
? (costSummary as any)
: { total: (costSummary as any)?.total || 0 },
}
})
// Include block execution data if requested
if (params.includeBlocks) {
// Block executions are now extracted from stored trace spans in metadata
const blockLogsByExecution: Record<string, any[]> = {}
logs.forEach((log) => {
const storedTraceSpans = (log.metadata as any)?.traceSpans
if (storedTraceSpans && Array.isArray(storedTraceSpans)) {
blockLogsByExecution[log.executionId] =
extractBlockExecutionsFromTraceSpans(storedTraceSpans)
} else {
blockLogsByExecution[log.executionId] = []
}
})
// Add block logs to metadata
const logsWithBlocks = enhancedLogs.map((log) => ({
...log,
metadata: {
...log.metadata,
blockExecutions: blockLogsByExecution[log.executionId] || [],
},
}))
return NextResponse.json(
{
data: logsWithBlocks,
total: Number(count),
page: Math.floor(params.offset / params.limit) + 1,
pageSize: params.limit,
totalPages: Math.ceil(Number(count) / params.limit),
},
{ status: 200 }
)
}
// Return basic logs
return NextResponse.json(
{
data: enhancedLogs,

View File

@@ -39,6 +39,8 @@ export async function POST(request: NextRequest) {
stream,
messages,
environmentVariables,
reasoningEffort,
verbosity,
} = body
logger.info(`[${requestId}] Provider request details`, {
@@ -58,6 +60,8 @@ export async function POST(request: NextRequest) {
messageCount: messages?.length || 0,
hasEnvironmentVariables:
!!environmentVariables && Object.keys(environmentVariables).length > 0,
reasoningEffort,
verbosity,
})
let finalApiKey: string
@@ -99,6 +103,8 @@ export async function POST(request: NextRequest) {
stream,
messages,
environmentVariables,
reasoningEffort,
verbosity,
})
const executionTime = Date.now() - startTime

View File

@@ -1,9 +1,11 @@
import { eq, sql } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { createLogger } from '@/lib/logs/console/logger'
import { hasAdminPermission } from '@/lib/permissions/utils'
import { db } from '@/db'
import { templates } from '@/db/schema'
import { templates, workflow } from '@/db/schema'
const logger = createLogger('TemplateByIdAPI')
@@ -62,3 +64,153 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}
const updateTemplateSchema = z.object({
name: z.string().min(1).max(100),
description: z.string().min(1).max(500),
author: z.string().min(1).max(100),
category: z.string().min(1),
icon: z.string().min(1),
color: z.string().regex(/^#[0-9A-F]{6}$/i),
state: z.any().optional(), // Workflow state
})
// PUT /api/templates/[id] - Update a template
export async function PUT(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const requestId = crypto.randomUUID().slice(0, 8)
const { id } = await params
try {
const session = await getSession()
if (!session?.user?.id) {
logger.warn(`[${requestId}] Unauthorized template update attempt for ID: ${id}`)
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const body = await request.json()
const validationResult = updateTemplateSchema.safeParse(body)
if (!validationResult.success) {
logger.warn(`[${requestId}] Invalid template data for update: ${id}`, validationResult.error)
return NextResponse.json(
{ error: 'Invalid template data', details: validationResult.error.errors },
{ status: 400 }
)
}
const { name, description, author, category, icon, color, state } = validationResult.data
// Check if template exists
const existingTemplate = await db.select().from(templates).where(eq(templates.id, id)).limit(1)
if (existingTemplate.length === 0) {
logger.warn(`[${requestId}] Template not found for update: ${id}`)
return NextResponse.json({ error: 'Template not found' }, { status: 404 })
}
// Permission: template owner OR admin of the workflow's workspace (if any)
let canUpdate = existingTemplate[0].userId === session.user.id
if (!canUpdate && existingTemplate[0].workflowId) {
const wfRows = await db
.select({ workspaceId: workflow.workspaceId })
.from(workflow)
.where(eq(workflow.id, existingTemplate[0].workflowId))
.limit(1)
const workspaceId = wfRows[0]?.workspaceId as string | null | undefined
if (workspaceId) {
const hasAdmin = await hasAdminPermission(session.user.id, workspaceId)
if (hasAdmin) canUpdate = true
}
}
if (!canUpdate) {
logger.warn(`[${requestId}] User denied permission to update template ${id}`)
return NextResponse.json({ error: 'Access denied' }, { status: 403 })
}
// Update the template
const updatedTemplate = await db
.update(templates)
.set({
name,
description,
author,
category,
icon,
color,
...(state && { state }),
updatedAt: new Date(),
})
.where(eq(templates.id, id))
.returning()
logger.info(`[${requestId}] Successfully updated template: ${id}`)
return NextResponse.json({
data: updatedTemplate[0],
message: 'Template updated successfully',
})
} catch (error: any) {
logger.error(`[${requestId}] Error updating template: ${id}`, error)
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}
// DELETE /api/templates/[id] - Delete a template
export async function DELETE(
request: NextRequest,
{ params }: { params: Promise<{ id: string }> }
) {
const requestId = crypto.randomUUID().slice(0, 8)
const { id } = await params
try {
const session = await getSession()
if (!session?.user?.id) {
logger.warn(`[${requestId}] Unauthorized template delete attempt for ID: ${id}`)
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
// Fetch template
const existing = await db.select().from(templates).where(eq(templates.id, id)).limit(1)
if (existing.length === 0) {
logger.warn(`[${requestId}] Template not found for delete: ${id}`)
return NextResponse.json({ error: 'Template not found' }, { status: 404 })
}
const template = existing[0]
// Permission: owner or admin of the workflow's workspace (if any)
let canDelete = template.userId === session.user.id
if (!canDelete && template.workflowId) {
// Look up workflow to get workspaceId
const wfRows = await db
.select({ workspaceId: workflow.workspaceId })
.from(workflow)
.where(eq(workflow.id, template.workflowId))
.limit(1)
const workspaceId = wfRows[0]?.workspaceId as string | null | undefined
if (workspaceId) {
const hasAdmin = await hasAdminPermission(session.user.id, workspaceId)
if (hasAdmin) canDelete = true
}
}
if (!canDelete) {
logger.warn(`[${requestId}] User denied permission to delete template ${id}`)
return NextResponse.json({ error: 'Access denied' }, { status: 403 })
}
await db.delete(templates).where(eq(templates.id, id))
logger.info(`[${requestId}] Deleted template: ${id}`)
return NextResponse.json({ success: true })
} catch (error: any) {
logger.error(`[${requestId}] Error deleting template: ${id}`, error)
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}

View File

@@ -80,7 +80,6 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
workspaceId: workspaceId,
name: `${templateData.name} (copy)`,
description: templateData.description,
state: templateData.state,
color: templateData.color,
userId: session.user.id,
createdAt: now,
@@ -158,9 +157,6 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
}))
}
// Update the workflow with the corrected state
await tx.update(workflow).set({ state: updatedState }).where(eq(workflow.id, newWorkflowId))
// Insert blocks and edges
if (blockEntries.length > 0) {
await tx.insert(workflowBlocks).values(blockEntries)

View File

@@ -77,6 +77,7 @@ const QueryParamsSchema = z.object({
limit: z.coerce.number().optional().default(50),
offset: z.coerce.number().optional().default(0),
search: z.string().optional(),
workflowId: z.string().optional(),
})
// GET /api/templates - Retrieve templates
@@ -111,6 +112,11 @@ export async function GET(request: NextRequest) {
)
}
// Apply workflow filter if provided (for getting template by workflow)
if (params.workflowId) {
conditions.push(eq(templates.workflowId, params.workflowId))
}
// Combine conditions
const whereCondition = conditions.length > 0 ? and(...conditions) : undefined

View File

@@ -45,7 +45,7 @@ export async function GET(request: NextRequest) {
// Fetch the file from Google Drive API
logger.info(`[${requestId}] Fetching file ${fileId} from Google Drive API`)
const response = await fetch(
`https://www.googleapis.com/drive/v3/files/${fileId}?fields=id,name,mimeType,iconLink,webViewLink,thumbnailLink,createdTime,modifiedTime,size,owners,exportLinks`,
`https://www.googleapis.com/drive/v3/files/${fileId}?fields=id,name,mimeType,iconLink,webViewLink,thumbnailLink,createdTime,modifiedTime,size,owners,exportLinks,shortcutDetails&supportsAllDrives=true`,
{
headers: {
Authorization: `Bearer ${accessToken}`,
@@ -77,6 +77,34 @@ export async function GET(request: NextRequest) {
'application/vnd.google-apps.presentation': 'application/pdf', // Google Slides to PDF
}
// Resolve shortcuts transparently for UI stability
if (
file.mimeType === 'application/vnd.google-apps.shortcut' &&
file.shortcutDetails?.targetId
) {
const targetId = file.shortcutDetails.targetId
const shortcutResp = await fetch(
`https://www.googleapis.com/drive/v3/files/${targetId}?fields=id,name,mimeType,iconLink,webViewLink,thumbnailLink,createdTime,modifiedTime,size,owners,exportLinks&supportsAllDrives=true`,
{
headers: { Authorization: `Bearer ${accessToken}` },
}
)
if (shortcutResp.ok) {
const targetFile = await shortcutResp.json()
file.id = targetFile.id
file.name = targetFile.name
file.mimeType = targetFile.mimeType
file.iconLink = targetFile.iconLink
file.webViewLink = targetFile.webViewLink
file.thumbnailLink = targetFile.thumbnailLink
file.createdTime = targetFile.createdTime
file.modifiedTime = targetFile.modifiedTime
file.size = targetFile.size
file.owners = targetFile.owners
file.exportLinks = targetFile.exportLinks
}
}
// If the file is a Google Docs, Sheets, or Slides file, we need to provide the export link
if (file.mimeType.startsWith('application/vnd.google-apps.')) {
const format = exportFormats[file.mimeType] || 'application/pdf'

View File

@@ -1,10 +1,8 @@
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { authorizeCredentialUse } from '@/lib/auth/credential-access'
import { createLogger } from '@/lib/logs/console/logger'
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
import { db } from '@/db'
import { account } from '@/db/schema'
export const dynamic = 'force-dynamic'
@@ -32,64 +30,48 @@ export async function GET(request: NextRequest) {
const credentialId = searchParams.get('credentialId')
const mimeType = searchParams.get('mimeType')
const query = searchParams.get('query') || ''
const folderId = searchParams.get('folderId') || searchParams.get('parentId') || ''
const workflowId = searchParams.get('workflowId') || undefined
if (!credentialId) {
logger.warn(`[${requestId}] Missing credential ID`)
return NextResponse.json({ error: 'Credential ID is required' }, { status: 400 })
}
// Get the credential from the database
const credentials = await db.select().from(account).where(eq(account.id, credentialId)).limit(1)
if (!credentials.length) {
logger.warn(`[${requestId}] Credential not found`, { credentialId })
return NextResponse.json({ error: 'Credential not found' }, { status: 404 })
}
const credential = credentials[0]
// Check if the credential belongs to the user
if (credential.userId !== session.user.id) {
logger.warn(`[${requestId}] Unauthorized credential access attempt`, {
credentialUserId: credential.userId,
requestUserId: session.user.id,
})
return NextResponse.json({ error: 'Unauthorized' }, { status: 403 })
// Authorize use of the credential (supports collaborator credentials via workflow)
const authz = await authorizeCredentialUse(request, { credentialId: credentialId!, workflowId })
if (!authz.ok || !authz.credentialOwnerUserId) {
logger.warn(`[${requestId}] Unauthorized credential access attempt`, authz)
return NextResponse.json({ error: authz.error || 'Unauthorized' }, { status: 403 })
}
// Refresh access token if needed using the utility function
const accessToken = await refreshAccessTokenIfNeeded(credentialId, session.user.id, requestId)
const accessToken = await refreshAccessTokenIfNeeded(
credentialId!,
authz.credentialOwnerUserId,
requestId
)
if (!accessToken) {
return NextResponse.json({ error: 'Failed to obtain valid access token' }, { status: 401 })
}
// Build the query parameters for Google Drive API
let queryParams = 'trashed=false'
// Add mimeType filter if provided
// Build Drive 'q' expression safely
const qParts: string[] = ['trashed = false']
if (folderId) {
qParts.push(`'${folderId.replace(/'/g, "\\'")}' in parents`)
}
if (mimeType) {
// For Google Drive API, we need to use 'q' parameter for mimeType filtering
// Instead of using the mimeType parameter directly, we'll add it to the query
if (queryParams.includes('q=')) {
queryParams += ` and mimeType='${mimeType}'`
} else {
queryParams += `&q=mimeType='${mimeType}'`
}
qParts.push(`mimeType = '${mimeType.replace(/'/g, "\\'")}'`)
}
// Add search query if provided
if (query) {
if (queryParams.includes('q=')) {
queryParams += ` and name contains '${query}'`
} else {
queryParams += `&q=name contains '${query}'`
}
qParts.push(`name contains '${query.replace(/'/g, "\\'")}'`)
}
const q = encodeURIComponent(qParts.join(' and '))
// Fetch files from Google Drive API
// Fetch files from Google Drive API with shared drives support
const response = await fetch(
`https://www.googleapis.com/drive/v3/files?${queryParams}&fields=files(id,name,mimeType,iconLink,webViewLink,thumbnailLink,createdTime,modifiedTime,size,owners)`,
`https://www.googleapis.com/drive/v3/files?q=${q}&supportsAllDrives=true&includeItemsFromAllDrives=true&spaces=drive&fields=files(id,name,mimeType,iconLink,webViewLink,thumbnailLink,createdTime,modifiedTime,size,owners,parents)`,
{
headers: {
Authorization: `Bearer ${accessToken}`,

View File

@@ -1,10 +1,10 @@
import { NextResponse } from 'next/server'
import { Logger } from '@/lib/logs/console/logger'
import { createLogger } from '@/lib/logs/console/logger'
import { getJiraCloudId } from '@/tools/jira/utils'
export const dynamic = 'force-dynamic'
const logger = new Logger('JiraIssueAPI')
const logger = createLogger('JiraIssueAPI')
export async function POST(request: Request) {
try {

View File

@@ -1,10 +1,10 @@
import { NextResponse } from 'next/server'
import { Logger } from '@/lib/logs/console/logger'
import { createLogger } from '@/lib/logs/console/logger'
import { getJiraCloudId } from '@/tools/jira/utils'
export const dynamic = 'force-dynamic'
const logger = new Logger('JiraIssuesAPI')
const logger = createLogger('JiraIssuesAPI')
export async function POST(request: Request) {
try {

View File

@@ -1,10 +1,10 @@
import { NextResponse } from 'next/server'
import { Logger } from '@/lib/logs/console/logger'
import { createLogger } from '@/lib/logs/console/logger'
import { getJiraCloudId } from '@/tools/jira/utils'
export const dynamic = 'force-dynamic'
const logger = new Logger('JiraProjectsAPI')
const logger = createLogger('JiraProjectsAPI')
export async function GET(request: Request) {
try {

View File

@@ -1,10 +1,10 @@
import { NextResponse } from 'next/server'
import { Logger } from '@/lib/logs/console/logger'
import { createLogger } from '@/lib/logs/console/logger'
import { getJiraCloudId } from '@/tools/jira/utils'
export const dynamic = 'force-dynamic'
const logger = new Logger('JiraUpdateAPI')
const logger = createLogger('JiraUpdateAPI')
export async function PUT(request: Request) {
try {

View File

@@ -1,10 +1,10 @@
import { NextResponse } from 'next/server'
import { Logger } from '@/lib/logs/console/logger'
import { createLogger } from '@/lib/logs/console/logger'
import { getJiraCloudId } from '@/tools/jira/utils'
export const dynamic = 'force-dynamic'
const logger = new Logger('JiraWriteAPI')
const logger = createLogger('JiraWriteAPI')
export async function POST(request: Request) {
try {

View File

@@ -0,0 +1,120 @@
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { createLogger } from '@/lib/logs/console/logger'
import { db } from '@/db'
import { user } from '@/db/schema'
const logger = createLogger('UpdateUserProfileAPI')
// Schema for updating user profile
const UpdateProfileSchema = z
.object({
name: z.string().min(1, 'Name is required').optional(),
})
.refine((data) => data.name !== undefined, {
message: 'Name field must be provided',
})
export const dynamic = 'force-dynamic'
export async function PATCH(request: NextRequest) {
const requestId = crypto.randomUUID().slice(0, 8)
try {
const session = await getSession()
if (!session?.user?.id) {
logger.warn(`[${requestId}] Unauthorized profile update attempt`)
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const userId = session.user.id
const body = await request.json()
const validatedData = UpdateProfileSchema.parse(body)
// Build update object
const updateData: any = { updatedAt: new Date() }
if (validatedData.name !== undefined) updateData.name = validatedData.name
// Update user profile
const [updatedUser] = await db
.update(user)
.set(updateData)
.where(eq(user.id, userId))
.returning()
if (!updatedUser) {
return NextResponse.json({ error: 'User not found' }, { status: 404 })
}
logger.info(`[${requestId}] User profile updated`, {
userId,
updatedFields: Object.keys(validatedData),
})
return NextResponse.json({
success: true,
user: {
id: updatedUser.id,
name: updatedUser.name,
email: updatedUser.email,
image: updatedUser.image,
},
})
} catch (error: any) {
if (error instanceof z.ZodError) {
logger.warn(`[${requestId}] Invalid profile data`, {
errors: error.errors,
})
return NextResponse.json(
{ error: 'Invalid profile data', details: error.errors },
{ status: 400 }
)
}
logger.error(`[${requestId}] Profile update error`, error)
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}
// GET endpoint to fetch current user profile
export async function GET() {
const requestId = crypto.randomUUID().slice(0, 8)
try {
const session = await getSession()
if (!session?.user?.id) {
logger.warn(`[${requestId}] Unauthorized profile fetch attempt`)
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const userId = session.user.id
const [userRecord] = await db
.select({
id: user.id,
name: user.name,
email: user.email,
image: user.image,
emailVerified: user.emailVerified,
})
.from(user)
.where(eq(user.id, userId))
.limit(1)
if (!userRecord) {
return NextResponse.json({ error: 'User not found' }, { status: 404 })
}
return NextResponse.json({
user: userRecord,
})
} catch (error: any) {
logger.error(`[${requestId}] Profile fetch error`, error)
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}

View File

@@ -1,6 +1,6 @@
import { unstable_noStore as noStore } from 'next/cache'
import { type NextRequest, NextResponse } from 'next/server'
import OpenAI from 'openai'
import OpenAI, { AzureOpenAI } from 'openai'
import { env } from '@/lib/env'
import { createLogger } from '@/lib/logs/console/logger'
@@ -10,14 +10,32 @@ export const maxDuration = 60
const logger = createLogger('WandGenerateAPI')
const openai = env.OPENAI_API_KEY
? new OpenAI({
apiKey: env.OPENAI_API_KEY,
})
: null
const azureApiKey = env.AZURE_OPENAI_API_KEY
const azureEndpoint = env.AZURE_OPENAI_ENDPOINT
const azureApiVersion = env.AZURE_OPENAI_API_VERSION
const wandModelName = env.WAND_OPENAI_MODEL_NAME || 'gpt-4o'
const openaiApiKey = env.OPENAI_API_KEY
if (!env.OPENAI_API_KEY) {
logger.warn('OPENAI_API_KEY not found. Wand generation API will not function.')
const useWandAzure = azureApiKey && azureEndpoint && azureApiVersion
const client = useWandAzure
? new AzureOpenAI({
apiKey: azureApiKey,
apiVersion: azureApiVersion,
endpoint: azureEndpoint,
})
: openaiApiKey
? new OpenAI({
apiKey: openaiApiKey,
})
: null
if (!useWandAzure && !openaiApiKey) {
logger.warn(
'Neither Azure OpenAI nor OpenAI API key found. Wand generation API will not function.'
)
} else {
logger.info(`Using ${useWandAzure ? 'Azure OpenAI' : 'OpenAI'} for wand generation`)
}
interface ChatMessage {
@@ -32,14 +50,12 @@ interface RequestBody {
history?: ChatMessage[]
}
// The endpoint is now generic - system prompts come from wand configs
export async function POST(req: NextRequest) {
const requestId = crypto.randomUUID().slice(0, 8)
logger.info(`[${requestId}] Received wand generation request`)
if (!openai) {
logger.error(`[${requestId}] OpenAI client not initialized. Missing API key.`)
if (!client) {
logger.error(`[${requestId}] AI client not initialized. Missing API key.`)
return NextResponse.json(
{ success: false, error: 'Wand generation service is not configured.' },
{ status: 503 }
@@ -74,16 +90,19 @@ export async function POST(req: NextRequest) {
// Add the current user prompt
messages.push({ role: 'user', content: prompt })
logger.debug(`[${requestId}] Calling OpenAI API for wand generation`, {
stream,
historyLength: history.length,
})
logger.debug(
`[${requestId}] Calling ${useWandAzure ? 'Azure OpenAI' : 'OpenAI'} API for wand generation`,
{
stream,
historyLength: history.length,
}
)
// For streaming responses
if (stream) {
try {
const streamCompletion = await openai?.chat.completions.create({
model: 'gpt-4o',
const streamCompletion = await client.chat.completions.create({
model: useWandAzure ? wandModelName : 'gpt-4o',
messages: messages,
temperature: 0.3,
max_tokens: 10000,
@@ -141,8 +160,8 @@ export async function POST(req: NextRequest) {
}
// For non-streaming responses
const completion = await openai?.chat.completions.create({
model: 'gpt-4o',
const completion = await client.chat.completions.create({
model: useWandAzure ? wandModelName : 'gpt-4o',
messages: messages,
temperature: 0.3,
max_tokens: 10000,
@@ -151,9 +170,11 @@ export async function POST(req: NextRequest) {
const generatedContent = completion.choices[0]?.message?.content?.trim()
if (!generatedContent) {
logger.error(`[${requestId}] OpenAI response was empty or invalid.`)
logger.error(
`[${requestId}] ${useWandAzure ? 'Azure OpenAI' : 'OpenAI'} response was empty or invalid.`
)
return NextResponse.json(
{ success: false, error: 'Failed to generate content. OpenAI response was empty.' },
{ success: false, error: 'Failed to generate content. AI response was empty.' },
{ status: 500 }
)
}
@@ -171,7 +192,9 @@ export async function POST(req: NextRequest) {
if (error instanceof OpenAI.APIError) {
status = error.status || 500
logger.error(`[${requestId}] OpenAI API Error: ${status} - ${error.message}`)
logger.error(
`[${requestId}] ${useWandAzure ? 'Azure OpenAI' : 'OpenAI'} API Error: ${status} - ${error.message}`
)
if (status === 401) {
clientErrorMessage = 'Authentication failed. Please check your API key configuration.'
@@ -181,6 +204,10 @@ export async function POST(req: NextRequest) {
clientErrorMessage =
'The wand generation service is currently unavailable. Please try again later.'
}
} else if (useWandAzure && error.message?.includes('DeploymentNotFound')) {
clientErrorMessage =
'Azure OpenAI deployment not found. Please check your model deployment configuration.'
status = 404
}
return NextResponse.json(

View File

@@ -1,8 +1,10 @@
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { env } from '@/lib/env'
import { createLogger } from '@/lib/logs/console/logger'
import { getUserEntityPermissions } from '@/lib/permissions/utils'
import { getOAuthToken } from '@/app/api/auth/oauth/utils'
import { db } from '@/db'
import { webhook, workflow } from '@/db/schema'
@@ -242,6 +244,167 @@ export async function DELETE(
const foundWebhook = webhookData.webhook
// If it's an Airtable webhook, delete it from Airtable first
if (foundWebhook.provider === 'airtable') {
try {
const { baseId, externalId } = (foundWebhook.providerConfig || {}) as {
baseId?: string
externalId?: string
}
if (!baseId) {
logger.warn(`[${requestId}] Missing baseId for Airtable webhook deletion.`, {
webhookId: id,
})
return NextResponse.json(
{ error: 'Missing baseId for Airtable webhook deletion' },
{ status: 400 }
)
}
// Get access token for the workflow owner
const userIdForToken = webhookData.workflow.userId
const accessToken = await getOAuthToken(userIdForToken, 'airtable')
if (!accessToken) {
logger.warn(
`[${requestId}] Could not retrieve Airtable access token for user ${userIdForToken}. Cannot delete webhook in Airtable.`,
{ webhookId: id }
)
return NextResponse.json(
{ error: 'Airtable access token not found for webhook deletion' },
{ status: 401 }
)
}
// Resolve externalId if missing by listing webhooks and matching our notificationUrl
let resolvedExternalId: string | undefined = externalId
if (!resolvedExternalId) {
try {
const requestOrigin = new URL(request.url).origin
const effectiveOrigin = requestOrigin.includes('localhost')
? env.NEXT_PUBLIC_APP_URL || requestOrigin
: requestOrigin
const expectedNotificationUrl = `${effectiveOrigin}/api/webhooks/trigger/${foundWebhook.path}`
const listUrl = `https://api.airtable.com/v0/bases/${baseId}/webhooks`
const listResp = await fetch(listUrl, {
headers: {
Authorization: `Bearer ${accessToken}`,
},
})
const listBody = await listResp.json().catch(() => null)
if (listResp.ok && listBody && Array.isArray(listBody.webhooks)) {
const match = listBody.webhooks.find((w: any) => {
const url: string | undefined = w?.notificationUrl
if (!url) return false
// Prefer exact match; fallback to suffix match to handle origin/host remaps
return (
url === expectedNotificationUrl ||
url.endsWith(`/api/webhooks/trigger/${foundWebhook.path}`)
)
})
if (match?.id) {
resolvedExternalId = match.id as string
// Persist resolved externalId for future operations
try {
await db
.update(webhook)
.set({
providerConfig: {
...(foundWebhook.providerConfig || {}),
externalId: resolvedExternalId,
},
updatedAt: new Date(),
})
.where(eq(webhook.id, id))
} catch {
// non-fatal persistence error
}
logger.info(`[${requestId}] Resolved Airtable externalId by listing webhooks`, {
baseId,
externalId: resolvedExternalId,
})
} else {
logger.warn(`[${requestId}] Could not resolve Airtable externalId from list`, {
baseId,
expectedNotificationUrl,
})
}
} else {
logger.warn(`[${requestId}] Failed to list Airtable webhooks to resolve externalId`, {
baseId,
status: listResp.status,
body: listBody,
})
}
} catch (e: any) {
logger.warn(`[${requestId}] Error attempting to resolve Airtable externalId`, {
error: e?.message,
})
}
}
// If still not resolvable, skip remote deletion but proceed with local delete
if (!resolvedExternalId) {
logger.info(
`[${requestId}] Airtable externalId not found; skipping remote deletion and proceeding to remove local record`,
{ baseId }
)
}
if (resolvedExternalId) {
const airtableDeleteUrl = `https://api.airtable.com/v0/bases/${baseId}/webhooks/${resolvedExternalId}`
const airtableResponse = await fetch(airtableDeleteUrl, {
method: 'DELETE',
headers: {
Authorization: `Bearer ${accessToken}`,
},
})
// Attempt to parse error body for better diagnostics
if (!airtableResponse.ok) {
let responseBody: any = null
try {
responseBody = await airtableResponse.json()
} catch {
// ignore parse errors
}
logger.error(
`[${requestId}] Failed to delete Airtable webhook in Airtable. Status: ${airtableResponse.status}`,
{ baseId, externalId: resolvedExternalId, response: responseBody }
)
return NextResponse.json(
{
error: 'Failed to delete webhook from Airtable',
details:
(responseBody && (responseBody.error?.message || responseBody.error)) ||
`Status ${airtableResponse.status}`,
},
{ status: 500 }
)
}
logger.info(`[${requestId}] Successfully deleted Airtable webhook in Airtable`, {
baseId,
externalId: resolvedExternalId,
})
}
} catch (error: any) {
logger.error(`[${requestId}] Error deleting Airtable webhook`, {
webhookId: id,
error: error.message,
stack: error.stack,
})
return NextResponse.json(
{ error: 'Failed to delete webhook from Airtable', details: error.message },
{ status: 500 }
)
}
}
// If it's a Telegram webhook, delete it from Telegram first
if (foundWebhook.provider === 'telegram') {
try {

View File

@@ -1,11 +1,11 @@
import { nanoid } from 'nanoid'
import { type NextRequest, NextResponse } from 'next/server'
import { verifyCronAuth } from '@/lib/auth/internal'
import { Logger } from '@/lib/logs/console/logger'
import { createLogger } from '@/lib/logs/console/logger'
import { acquireLock, releaseLock } from '@/lib/redis'
import { pollGmailWebhooks } from '@/lib/webhooks/gmail-polling-service'
const logger = new Logger('GmailPollingAPI')
const logger = createLogger('GmailPollingAPI')
export const dynamic = 'force-dynamic'
export const maxDuration = 180 // Allow up to 3 minutes for polling to complete

View File

@@ -1,11 +1,11 @@
import { nanoid } from 'nanoid'
import { type NextRequest, NextResponse } from 'next/server'
import { verifyCronAuth } from '@/lib/auth/internal'
import { Logger } from '@/lib/logs/console/logger'
import { createLogger } from '@/lib/logs/console/logger'
import { acquireLock, releaseLock } from '@/lib/redis'
import { pollOutlookWebhooks } from '@/lib/webhooks/outlook-polling-service'
const logger = new Logger('OutlookPollingAPI')
const logger = createLogger('OutlookPollingAPI')
export const dynamic = 'force-dynamic'
export const maxDuration = 180 // Allow up to 3 minutes for polling to complete

View File

@@ -329,7 +329,7 @@ export async function POST(request: NextRequest) {
logger.info(`[${requestId}] Gmail provider detected. Setting up Gmail webhook configuration.`)
try {
const { configureGmailPolling } = await import('@/lib/webhooks/utils')
// Use workflow owner for OAuth lookups to support collaborator-saved credentials
// Pass workflow owner for backward-compat fallback (utils prefers credentialId if present)
const success = await configureGmailPolling(workflowRecord.userId, savedWebhook, requestId)
if (!success) {
@@ -364,7 +364,7 @@ export async function POST(request: NextRequest) {
)
try {
const { configureOutlookPolling } = await import('@/lib/webhooks/utils')
// Use workflow owner for OAuth lookups to support collaborator-saved credentials
// Pass workflow owner for backward-compat fallback (utils prefers credentialId if present)
const success = await configureOutlookPolling(
workflowRecord.userId,
savedWebhook,

View File

@@ -7,7 +7,6 @@ import { NextRequest } from 'next/server'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import { createMockRequest, mockExecutionDependencies } from '@/app/api/__test-utils__/utils'
// Define mock functions at the top level to be used in mocks
const hasProcessedMessageMock = vi.fn().mockResolvedValue(false)
const markMessageAsProcessedMock = vi.fn().mockResolvedValue(true)
const closeRedisConnectionMock = vi.fn().mockResolvedValue(undefined)
@@ -33,7 +32,6 @@ const executeMock = vi.fn().mockResolvedValue({
},
})
// Mock the DB schema objects
const webhookMock = {
id: 'webhook-id-column',
path: 'path-column',
@@ -43,10 +41,6 @@ const webhookMock = {
}
const workflowMock = { id: 'workflow-id-column' }
// Mock global timers
vi.useFakeTimers()
// Mock modules at file scope before any tests
vi.mock('@/lib/redis', () => ({
hasProcessedMessage: hasProcessedMessageMock,
markMessageAsProcessed: markMessageAsProcessedMock,
@@ -77,19 +71,6 @@ vi.mock('@/executor', () => ({
})),
}))
// Mock setTimeout and other timer functions
vi.mock('timers', () => {
return {
setTimeout: (callback: any) => {
// Immediately invoke the callback
callback()
// Return a fake timer id
return 123
},
}
})
// Mock the database and schema
vi.mock('@/db', () => {
const dbMock = {
select: vi.fn().mockImplementation((columns) => ({
@@ -128,11 +109,9 @@ describe('Webhook Trigger API Route', () => {
beforeEach(() => {
vi.resetModules()
vi.resetAllMocks()
vi.clearAllTimers()
mockExecutionDependencies()
// Mock services/queue for rate limiting
vi.doMock('@/services/queue', () => ({
RateLimiter: vi.fn().mockImplementation(() => ({
checkRateLimit: vi.fn().mockResolvedValue({
@@ -284,10 +263,340 @@ describe('Webhook Trigger API Route', () => {
expect(text).toMatch(/not found/i) // Response should contain "not found" message
})
/**
* Test Slack-specific webhook handling
* Verifies that Slack signature verification is performed
*/
// TODO: Fix failing test - returns 500 instead of 200
// it('should handle Slack webhooks with signature verification', async () => { ... })
describe('Generic Webhook Authentication', () => {
const setupGenericWebhook = async (config: Record<string, any>) => {
const { db } = await import('@/db')
const limitMock = vi.fn().mockReturnValue([
{
webhook: {
id: 'generic-webhook-id',
provider: 'generic',
path: 'test-path',
isActive: true,
providerConfig: config,
workflowId: 'test-workflow-id',
},
workflow: {
id: 'test-workflow-id',
userId: 'test-user-id',
name: 'Test Workflow',
},
},
])
const whereMock = vi.fn().mockReturnValue({ limit: limitMock })
const innerJoinMock = vi.fn().mockReturnValue({ where: whereMock })
const fromMock = vi.fn().mockReturnValue({ innerJoin: innerJoinMock })
const subscriptionLimitMock = vi.fn().mockReturnValue([{ plan: 'pro' }])
const subscriptionWhereMock = vi.fn().mockReturnValue({ limit: subscriptionLimitMock })
const subscriptionFromMock = vi.fn().mockReturnValue({ where: subscriptionWhereMock })
// @ts-ignore - mocking the query chain
db.select.mockImplementation((columns: any) => {
if (columns.plan) {
return { from: subscriptionFromMock }
}
return { from: fromMock }
})
}
/**
* Test generic webhook without authentication (default behavior)
*/
it('should process generic webhook without authentication', async () => {
await setupGenericWebhook({ requireAuth: false })
const req = createMockRequest('POST', { event: 'test', id: 'test-123' })
const params = Promise.resolve({ path: 'test-path' })
vi.doMock('@trigger.dev/sdk', () => ({
tasks: {
trigger: vi.fn().mockResolvedValue({ id: 'mock-task-id' }),
},
}))
const { POST } = await import('@/app/api/webhooks/trigger/[path]/route')
const response = await POST(req, { params })
// Authentication passed if we don't get 401
expect(response.status).not.toBe(401)
})
/**
* Test generic webhook with Bearer token authentication (no custom header)
*/
it('should authenticate with Bearer token when no custom header is configured', async () => {
await setupGenericWebhook({
requireAuth: true,
token: 'test-token-123',
// No secretHeaderName - should default to Bearer
})
const headers = {
'Content-Type': 'application/json',
Authorization: 'Bearer test-token-123',
}
const req = createMockRequest('POST', { event: 'bearer.test' }, headers)
const params = Promise.resolve({ path: 'test-path' })
vi.doMock('@trigger.dev/sdk', () => ({
tasks: {
trigger: vi.fn().mockResolvedValue({ id: 'mock-task-id' }),
},
}))
const { POST } = await import('@/app/api/webhooks/trigger/[path]/route')
const response = await POST(req, { params })
// Authentication passed if we don't get 401
expect(response.status).not.toBe(401)
})
/**
* Test generic webhook with custom header authentication
*/
it('should authenticate with custom header when configured', async () => {
await setupGenericWebhook({
requireAuth: true,
token: 'secret-token-456',
secretHeaderName: 'X-Custom-Auth',
})
const headers = {
'Content-Type': 'application/json',
'X-Custom-Auth': 'secret-token-456',
}
const req = createMockRequest('POST', { event: 'custom.header.test' }, headers)
const params = Promise.resolve({ path: 'test-path' })
vi.doMock('@trigger.dev/sdk', () => ({
tasks: {
trigger: vi.fn().mockResolvedValue({ id: 'mock-task-id' }),
},
}))
const { POST } = await import('@/app/api/webhooks/trigger/[path]/route')
const response = await POST(req, { params })
// Authentication passed if we don't get 401
expect(response.status).not.toBe(401)
})
/**
* Test case insensitive Bearer token authentication
*/
it('should handle case insensitive Bearer token authentication', async () => {
await setupGenericWebhook({
requireAuth: true,
token: 'case-test-token',
})
vi.doMock('@trigger.dev/sdk', () => ({
tasks: {
trigger: vi.fn().mockResolvedValue({ id: 'mock-task-id' }),
},
}))
const testCases = [
'Bearer case-test-token',
'bearer case-test-token',
'BEARER case-test-token',
'BeArEr case-test-token',
]
for (const authHeader of testCases) {
const headers = {
'Content-Type': 'application/json',
Authorization: authHeader,
}
const req = createMockRequest('POST', { event: 'case.test' }, headers)
const params = Promise.resolve({ path: 'test-path' })
const { POST } = await import('@/app/api/webhooks/trigger/[path]/route')
const response = await POST(req, { params })
// Authentication passed if we don't get 401
expect(response.status).not.toBe(401)
}
})
/**
* Test case insensitive custom header authentication
*/
it('should handle case insensitive custom header authentication', async () => {
await setupGenericWebhook({
requireAuth: true,
token: 'custom-token-789',
secretHeaderName: 'X-Secret-Key',
})
vi.doMock('@trigger.dev/sdk', () => ({
tasks: {
trigger: vi.fn().mockResolvedValue({ id: 'mock-task-id' }),
},
}))
const testCases = ['X-Secret-Key', 'x-secret-key', 'X-SECRET-KEY', 'x-Secret-Key']
for (const headerName of testCases) {
const headers = {
'Content-Type': 'application/json',
[headerName]: 'custom-token-789',
}
const req = createMockRequest('POST', { event: 'custom.case.test' }, headers)
const params = Promise.resolve({ path: 'test-path' })
const { POST } = await import('@/app/api/webhooks/trigger/[path]/route')
const response = await POST(req, { params })
// Authentication passed if we don't get 401
expect(response.status).not.toBe(401)
}
})
/**
* Test rejection of wrong Bearer token
*/
it('should reject wrong Bearer token', async () => {
await setupGenericWebhook({
requireAuth: true,
token: 'correct-token',
})
const headers = {
'Content-Type': 'application/json',
Authorization: 'Bearer wrong-token',
}
const req = createMockRequest('POST', { event: 'wrong.token.test' }, headers)
const params = Promise.resolve({ path: 'test-path' })
const { POST } = await import('@/app/api/webhooks/trigger/[path]/route')
const response = await POST(req, { params })
expect(response.status).toBe(401)
expect(await response.text()).toContain('Unauthorized - Invalid authentication token')
expect(processWebhookMock).not.toHaveBeenCalled()
})
/**
* Test rejection of wrong custom header token
*/
it('should reject wrong custom header token', async () => {
await setupGenericWebhook({
requireAuth: true,
token: 'correct-custom-token',
secretHeaderName: 'X-Auth-Key',
})
const headers = {
'Content-Type': 'application/json',
'X-Auth-Key': 'wrong-custom-token',
}
const req = createMockRequest('POST', { event: 'wrong.custom.test' }, headers)
const params = Promise.resolve({ path: 'test-path' })
const { POST } = await import('@/app/api/webhooks/trigger/[path]/route')
const response = await POST(req, { params })
expect(response.status).toBe(401)
expect(await response.text()).toContain('Unauthorized - Invalid authentication token')
expect(processWebhookMock).not.toHaveBeenCalled()
})
/**
* Test rejection of missing authentication
*/
it('should reject missing authentication when required', async () => {
await setupGenericWebhook({
requireAuth: true,
token: 'required-token',
})
const req = createMockRequest('POST', { event: 'no.auth.test' })
const params = Promise.resolve({ path: 'test-path' })
const { POST } = await import('@/app/api/webhooks/trigger/[path]/route')
const response = await POST(req, { params })
expect(response.status).toBe(401)
expect(await response.text()).toContain('Unauthorized - Invalid authentication token')
expect(processWebhookMock).not.toHaveBeenCalled()
})
/**
* Test exclusivity - Bearer token should be rejected when custom header is configured
*/
it('should reject Bearer token when custom header is configured', async () => {
await setupGenericWebhook({
requireAuth: true,
token: 'exclusive-token',
secretHeaderName: 'X-Only-Header',
})
const headers = {
'Content-Type': 'application/json',
Authorization: 'Bearer exclusive-token', // Correct token but wrong header type
}
const req = createMockRequest('POST', { event: 'exclusivity.test' }, headers)
const params = Promise.resolve({ path: 'test-path' })
const { POST } = await import('@/app/api/webhooks/trigger/[path]/route')
const response = await POST(req, { params })
expect(response.status).toBe(401)
expect(await response.text()).toContain('Unauthorized - Invalid authentication token')
expect(processWebhookMock).not.toHaveBeenCalled()
})
/**
* Test wrong custom header name is rejected
*/
it('should reject wrong custom header name', async () => {
await setupGenericWebhook({
requireAuth: true,
token: 'correct-token',
secretHeaderName: 'X-Expected-Header',
})
const headers = {
'Content-Type': 'application/json',
'X-Wrong-Header': 'correct-token', // Correct token but wrong header name
}
const req = createMockRequest('POST', { event: 'wrong.header.name.test' }, headers)
const params = Promise.resolve({ path: 'test-path' })
const { POST } = await import('@/app/api/webhooks/trigger/[path]/route')
const response = await POST(req, { params })
expect(response.status).toBe(401)
expect(await response.text()).toContain('Unauthorized - Invalid authentication token')
expect(processWebhookMock).not.toHaveBeenCalled()
})
/**
* Test authentication required but no token configured
*/
it('should reject when auth is required but no token is configured', async () => {
await setupGenericWebhook({
requireAuth: true,
// No token configured
})
const headers = {
'Content-Type': 'application/json',
Authorization: 'Bearer any-token',
}
const req = createMockRequest('POST', { event: 'no.token.config.test' }, headers)
const params = Promise.resolve({ path: 'test-path' })
const { POST } = await import('@/app/api/webhooks/trigger/[path]/route')
const response = await POST(req, { params })
expect(response.status).toBe(401)
expect(await response.text()).toContain(
'Unauthorized - Authentication required but not configured'
)
expect(processWebhookMock).not.toHaveBeenCalled()
})
})
})

View File

@@ -1,4 +1,4 @@
import { tasks } from '@trigger.dev/sdk/v3'
import { tasks } from '@trigger.dev/sdk'
import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { checkServerSideUsageLimits } from '@/lib/billing'
@@ -196,6 +196,53 @@ export async function POST(
}
}
// Handle generic webhook authentication if enabled
if (foundWebhook.provider === 'generic') {
const providerConfig = (foundWebhook.providerConfig as Record<string, any>) || {}
if (providerConfig.requireAuth) {
const configToken = providerConfig.token
const secretHeaderName = providerConfig.secretHeaderName
// --- Token Validation ---
if (configToken) {
let isTokenValid = false
if (secretHeaderName) {
// Check custom header (headers are case-insensitive)
const headerValue = request.headers.get(secretHeaderName.toLowerCase())
if (headerValue === configToken) {
isTokenValid = true
}
} else {
// Check standard Authorization header (case-insensitive Bearer keyword)
const authHeader = request.headers.get('authorization')
// Case-insensitive comparison for "Bearer" keyword
if (authHeader?.toLowerCase().startsWith('bearer ')) {
const token = authHeader.substring(7) // Remove "Bearer " (7 characters)
if (token === configToken) {
isTokenValid = true
}
}
}
if (!isTokenValid) {
const expectedHeader = secretHeaderName || 'Authorization: Bearer TOKEN'
logger.warn(
`[${requestId}] Generic webhook authentication failed. Expected header: ${expectedHeader}`
)
return new NextResponse('Unauthorized - Invalid authentication token', { status: 401 })
}
} else {
logger.warn(`[${requestId}] Generic webhook requires auth but no token configured`)
return new NextResponse('Unauthorized - Authentication required but not configured', {
status: 401,
})
}
}
}
// --- PHASE 3: Rate limiting for webhook execution ---
try {
// Get user subscription for rate limiting

View File

@@ -17,12 +17,6 @@ export const dynamic = 'force-dynamic'
const logger = createLogger('AutoLayoutAPI')
// Check API key configuration at module level
const SIM_AGENT_API_KEY = process.env.SIM_AGENT_API_KEY
if (!SIM_AGENT_API_KEY) {
logger.warn('SIM_AGENT_API_KEY not configured - autolayout requests will fail')
}
const AutoLayoutRequestSchema = z.object({
strategy: z
.enum(['smart', 'hierarchical', 'layered', 'force-directed'])
@@ -125,15 +119,6 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
return NextResponse.json({ error: 'Could not load workflow data' }, { status: 500 })
}
// Apply autolayout
logger.info(
`[${requestId}] Applying autolayout to ${Object.keys(currentWorkflowData.blocks).length} blocks`,
{
hasApiKey: !!SIM_AGENT_API_KEY,
simAgentUrl: process.env.SIM_AGENT_API_URL || 'http://localhost:8000',
}
)
// Create workflow state for autolayout
const workflowState = {
blocks: currentWorkflowData.blocks,
@@ -184,7 +169,6 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
resolveOutputType: resolveOutputType.toString(),
},
},
apiKey: SIM_AGENT_API_KEY,
})
// Log the full response for debugging

View File

@@ -7,7 +7,7 @@ import { createLogger } from '@/lib/logs/console/logger'
import { getUserEntityPermissions } from '@/lib/permissions/utils'
import { db } from '@/db'
import { workflow, workflowBlocks, workflowEdges, workflowSubflows } from '@/db/schema'
import type { LoopConfig, ParallelConfig, WorkflowState } from '@/stores/workflows/workflow/types'
import type { LoopConfig, ParallelConfig } from '@/stores/workflows/workflow/types'
const logger = createLogger('WorkflowDuplicateAPI')
@@ -90,7 +90,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
folderId: folderId || source.folderId,
name,
description: description || source.description,
state: source.state, // We'll update this later with new block IDs
color: color || source.color,
lastSynced: now,
createdAt: now,
@@ -112,9 +111,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
// Create a mapping from old block IDs to new block IDs
const blockIdMapping = new Map<string, string>()
// Initialize state for updating with new block IDs
let updatedState: WorkflowState = source.state as WorkflowState
if (sourceBlocks.length > 0) {
// First pass: Create all block ID mappings
sourceBlocks.forEach((block) => {
@@ -265,86 +261,10 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
)
}
// Update the JSON state to use new block IDs
if (updatedState && typeof updatedState === 'object') {
updatedState = JSON.parse(JSON.stringify(updatedState)) as WorkflowState
// Update blocks object keys
if (updatedState.blocks && typeof updatedState.blocks === 'object') {
const newBlocks = {} as Record<string, (typeof updatedState.blocks)[string]>
for (const [oldId, blockData] of Object.entries(updatedState.blocks)) {
const newId = blockIdMapping.get(oldId) || oldId
newBlocks[newId] = {
...blockData,
id: newId,
// Update data.parentId and extent in the JSON state as well
data: (() => {
const block = blockData as any
if (block.data && typeof block.data === 'object' && block.data.parentId) {
return {
...block.data,
parentId: blockIdMapping.get(block.data.parentId) || block.data.parentId,
extent: 'parent', // Ensure extent is set for child blocks
}
}
return block.data
})(),
}
}
updatedState.blocks = newBlocks
}
// Update edges array
if (updatedState.edges && Array.isArray(updatedState.edges)) {
updatedState.edges = updatedState.edges.map((edge) => ({
...edge,
id: crypto.randomUUID(),
source: blockIdMapping.get(edge.source) || edge.source,
target: blockIdMapping.get(edge.target) || edge.target,
}))
}
// Update loops and parallels if they exist
if (updatedState.loops && typeof updatedState.loops === 'object') {
const newLoops = {} as Record<string, (typeof updatedState.loops)[string]>
for (const [oldId, loopData] of Object.entries(updatedState.loops)) {
const newId = blockIdMapping.get(oldId) || oldId
const loopConfig = loopData as any
newLoops[newId] = {
...loopConfig,
id: newId,
// Update node references in loop config
nodes: loopConfig.nodes
? loopConfig.nodes.map((nodeId: string) => blockIdMapping.get(nodeId) || nodeId)
: [],
}
}
updatedState.loops = newLoops
}
if (updatedState.parallels && typeof updatedState.parallels === 'object') {
const newParallels = {} as Record<string, (typeof updatedState.parallels)[string]>
for (const [oldId, parallelData] of Object.entries(updatedState.parallels)) {
const newId = blockIdMapping.get(oldId) || oldId
const parallelConfig = parallelData as any
newParallels[newId] = {
...parallelConfig,
id: newId,
// Update node references in parallel config
nodes: parallelConfig.nodes
? parallelConfig.nodes.map((nodeId: string) => blockIdMapping.get(nodeId) || nodeId)
: [],
}
}
updatedState.parallels = newParallels
}
}
// Update the workflow state with the new block IDs
// Update the workflow timestamp
await tx
.update(workflow)
.set({
state: updatedState,
updatedAt: now,
})
.where(eq(workflow.id, newWorkflowId))

View File

@@ -1,4 +1,4 @@
import { tasks } from '@trigger.dev/sdk/v3'
import { tasks } from '@trigger.dev/sdk'
import { eq, sql } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { v4 as uuidv4 } from 'uuid'

View File

@@ -89,7 +89,14 @@ describe('Workflow By ID API Route', () => {
userId: 'user-123',
name: 'Test Workflow',
workspaceId: null,
state: { blocks: {}, edges: [] },
}
const mockNormalizedData = {
blocks: {},
edges: [],
loops: {},
parallels: {},
isFromNormalizedTables: true,
}
vi.doMock('@/lib/auth', () => ({
@@ -110,6 +117,10 @@ describe('Workflow By ID API Route', () => {
},
}))
vi.doMock('@/lib/workflows/db-helpers', () => ({
loadWorkflowFromNormalizedTables: vi.fn().mockResolvedValue(mockNormalizedData),
}))
const req = new NextRequest('http://localhost:3000/api/workflows/workflow-123')
const params = Promise.resolve({ id: 'workflow-123' })
@@ -127,7 +138,14 @@ describe('Workflow By ID API Route', () => {
userId: 'other-user',
name: 'Test Workflow',
workspaceId: 'workspace-456',
state: { blocks: {}, edges: [] },
}
const mockNormalizedData = {
blocks: {},
edges: [],
loops: {},
parallels: {},
isFromNormalizedTables: true,
}
vi.doMock('@/lib/auth', () => ({
@@ -148,6 +166,10 @@ describe('Workflow By ID API Route', () => {
},
}))
vi.doMock('@/lib/workflows/db-helpers', () => ({
loadWorkflowFromNormalizedTables: vi.fn().mockResolvedValue(mockNormalizedData),
}))
vi.doMock('@/lib/permissions/utils', () => ({
getUserEntityPermissions: vi.fn().mockResolvedValue('read'),
hasAdminPermission: vi.fn().mockResolvedValue(false),
@@ -170,7 +192,6 @@ describe('Workflow By ID API Route', () => {
userId: 'other-user',
name: 'Test Workflow',
workspaceId: 'workspace-456',
state: { blocks: {}, edges: [] },
}
vi.doMock('@/lib/auth', () => ({
@@ -213,7 +234,6 @@ describe('Workflow By ID API Route', () => {
userId: 'user-123',
name: 'Test Workflow',
workspaceId: null,
state: { blocks: {}, edges: [] },
}
const mockNormalizedData = {

View File

@@ -8,7 +8,7 @@ import { createLogger } from '@/lib/logs/console/logger'
import { getUserEntityPermissions, hasAdminPermission } from '@/lib/permissions/utils'
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/db-helpers'
import { db } from '@/db'
import { apiKey as apiKeyTable, workflow } from '@/db/schema'
import { apiKey as apiKeyTable, templates, workflow } from '@/db/schema'
const logger = createLogger('WorkflowByIdAPI')
@@ -120,8 +120,6 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
logger.debug(`[${requestId}] Attempting to load workflow ${workflowId} from normalized tables`)
const normalizedData = await loadWorkflowFromNormalizedTables(workflowId)
const finalWorkflowData = { ...workflowData }
if (normalizedData) {
logger.debug(`[${requestId}] Found normalized data for workflow ${workflowId}:`, {
blocksCount: Object.keys(normalizedData.blocks).length,
@@ -131,38 +129,31 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
loops: normalizedData.loops,
})
// Use normalized table data - reconstruct complete state object
// First get any existing state properties, then override with normalized data
const existingState =
workflowData.state && typeof workflowData.state === 'object' ? workflowData.state : {}
finalWorkflowData.state = {
// Default values for expected properties
deploymentStatuses: {},
hasActiveWebhook: false,
// Preserve any existing state properties
...existingState,
// Override with normalized data (this takes precedence)
blocks: normalizedData.blocks,
edges: normalizedData.edges,
loops: normalizedData.loops,
parallels: normalizedData.parallels,
lastSaved: Date.now(),
isDeployed: workflowData.isDeployed || false,
deployedAt: workflowData.deployedAt,
// Construct response object with workflow data and state from normalized tables
const finalWorkflowData = {
...workflowData,
state: {
// Default values for expected properties
deploymentStatuses: {},
hasActiveWebhook: false,
// Data from normalized tables
blocks: normalizedData.blocks,
edges: normalizedData.edges,
loops: normalizedData.loops,
parallels: normalizedData.parallels,
lastSaved: Date.now(),
isDeployed: workflowData.isDeployed || false,
deployedAt: workflowData.deployedAt,
},
}
logger.info(`[${requestId}] Loaded workflow ${workflowId} from normalized tables`)
} else {
// Fallback to JSON blob
logger.info(
`[${requestId}] Using JSON blob for workflow ${workflowId} - no normalized data found`
)
const elapsed = Date.now() - startTime
logger.info(`[${requestId}] Successfully fetched workflow ${workflowId} in ${elapsed}ms`)
return NextResponse.json({ data: finalWorkflowData }, { status: 200 })
}
const elapsed = Date.now() - startTime
logger.info(`[${requestId}] Successfully fetched workflow ${workflowId} in ${elapsed}ms`)
return NextResponse.json({ data: finalWorkflowData }, { status: 200 })
return NextResponse.json({ error: 'Workflow has no normalized data' }, { status: 400 })
} catch (error: any) {
const elapsed = Date.now() - startTime
logger.error(`[${requestId}] Error fetching workflow ${workflowId} after ${elapsed}ms`, error)
@@ -227,6 +218,48 @@ export async function DELETE(
return NextResponse.json({ error: 'Access denied' }, { status: 403 })
}
// Check if workflow has published templates before deletion
const { searchParams } = new URL(request.url)
const checkTemplates = searchParams.get('check-templates') === 'true'
const deleteTemplatesParam = searchParams.get('deleteTemplates')
if (checkTemplates) {
// Return template information for frontend to handle
const publishedTemplates = await db
.select()
.from(templates)
.where(eq(templates.workflowId, workflowId))
return NextResponse.json({
hasPublishedTemplates: publishedTemplates.length > 0,
count: publishedTemplates.length,
publishedTemplates: publishedTemplates.map((t) => ({
id: t.id,
name: t.name,
views: t.views,
stars: t.stars,
})),
})
}
// Handle template deletion based on user choice
if (deleteTemplatesParam !== null) {
const deleteTemplates = deleteTemplatesParam === 'delete'
if (deleteTemplates) {
// Delete all templates associated with this workflow
await db.delete(templates).where(eq(templates.workflowId, workflowId))
logger.info(`[${requestId}] Deleted templates for workflow ${workflowId}`)
} else {
// Orphan the templates (set workflowId to null)
await db
.update(templates)
.set({ workflowId: null })
.where(eq(templates.workflowId, workflowId))
logger.info(`[${requestId}] Orphaned templates for workflow ${workflowId}`)
}
}
await db.delete(workflow).where(eq(workflow.id, workflowId))
const elapsed = Date.now() - startTime

View File

@@ -220,7 +220,6 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
.set({
lastSynced: new Date(),
updatedAt: new Date(),
state: saveResult.jsonBlob, // Also update JSON blob for backward compatibility
})
.where(eq(workflow.id, workflowId))

View File

@@ -1,9 +1,10 @@
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { env } from '@/lib/env'
import { createLogger } from '@/lib/logs/console/logger'
import { getUserEntityPermissions } from '@/lib/permissions/utils'
import { simAgentClient } from '@/lib/sim-agent'
import { SIM_AGENT_API_URL_DEFAULT, simAgentClient } from '@/lib/sim-agent'
import {
loadWorkflowFromNormalizedTables,
saveWorkflowToNormalizedTables,
@@ -17,15 +18,12 @@ import { db } from '@/db'
import { workflowCheckpoints, workflow as workflowTable } from '@/db/schema'
import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/workflow/utils'
// Sim Agent API configuration
const SIM_AGENT_API_URL = process.env.SIM_AGENT_API_URL || 'http://localhost:8000'
const SIM_AGENT_API_KEY = process.env.SIM_AGENT_API_KEY
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
export const dynamic = 'force-dynamic'
const logger = createLogger('WorkflowYamlAPI')
// Request schema for YAML workflow operations
const YamlWorkflowRequestSchema = z.object({
yamlContent: z.string().min(1, 'YAML content is required'),
description: z.string().optional(),
@@ -74,7 +72,6 @@ async function createWorkflowCheckpoint(
method: 'POST',
headers: {
'Content-Type': 'application/json',
...(SIM_AGENT_API_KEY && { 'x-api-key': SIM_AGENT_API_KEY }),
},
body: JSON.stringify({
workflowState: currentWorkflowData,
@@ -288,7 +285,6 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
method: 'POST',
headers: {
'Content-Type': 'application/json',
...(SIM_AGENT_API_KEY && { 'x-api-key': SIM_AGENT_API_KEY }),
},
body: JSON.stringify({
yamlContent,
@@ -649,14 +645,13 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
.set({
lastSynced: new Date(),
updatedAt: new Date(),
state: saveResult.jsonBlob,
})
.where(eq(workflowTable.id, workflowId))
// Notify socket server for real-time collaboration (for copilot and editor)
if (source === 'copilot' || source === 'editor') {
try {
const socketUrl = process.env.SOCKET_URL || 'http://localhost:3002'
const socketUrl = env.SOCKET_SERVER_URL || 'http://localhost:3002'
await fetch(`${socketUrl}/api/copilot-workflow-edit`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },

View File

@@ -151,7 +151,6 @@ export async function POST(req: NextRequest) {
folderId: folderId || null,
name,
description,
state: initialState,
color,
lastSynced: now,
createdAt: now,

View File

@@ -8,9 +8,6 @@ import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/w
const logger = createLogger('WorkflowYamlAPI')
// Get API key at module level like working routes
const SIM_AGENT_API_KEY = process.env.SIM_AGENT_API_KEY
export async function POST(request: NextRequest) {
const requestId = crypto.randomUUID().slice(0, 8)
@@ -55,7 +52,6 @@ export async function POST(request: NextRequest) {
resolveOutputType: resolveOutputType.toString(),
},
},
apiKey: SIM_AGENT_API_KEY,
})
if (!result.success || !result.data?.yaml) {

View File

@@ -14,9 +14,6 @@ import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/w
const logger = createLogger('WorkflowYamlExportAPI')
// Get API key at module level like working routes
const SIM_AGENT_API_KEY = process.env.SIM_AGENT_API_KEY
export async function GET(request: NextRequest) {
const requestId = crypto.randomUUID().slice(0, 8)
const url = new URL(request.url)
@@ -88,14 +85,10 @@ export async function GET(request: NextRequest) {
edgesCount: normalizedData.edges.length,
})
// Use normalized table data - reconstruct complete state object
const existingState =
workflowData.state && typeof workflowData.state === 'object' ? workflowData.state : {}
// Use normalized table data - construct state from normalized tables
workflowState = {
deploymentStatuses: {},
hasActiveWebhook: false,
...existingState,
blocks: normalizedData.blocks,
edges: normalizedData.edges,
loops: normalizedData.loops,
@@ -119,33 +112,10 @@ export async function GET(request: NextRequest) {
logger.info(`[${requestId}] Loaded workflow ${workflowId} from normalized tables`)
} else {
// Fallback to JSON blob
logger.info(
`[${requestId}] Using JSON blob for workflow ${workflowId} - no normalized data found`
return NextResponse.json(
{ success: false, error: 'Workflow has no normalized data' },
{ status: 400 }
)
if (!workflowData.state || typeof workflowData.state !== 'object') {
return NextResponse.json(
{ success: false, error: 'Workflow has no valid state data' },
{ status: 400 }
)
}
workflowState = workflowData.state as any
// Extract subblock values from JSON blob state
if (workflowState.blocks) {
Object.entries(workflowState.blocks).forEach(([blockId, block]: [string, any]) => {
subBlockValues[blockId] = {}
if (block.subBlocks) {
Object.entries(block.subBlocks).forEach(([subBlockId, subBlock]: [string, any]) => {
if (subBlock && typeof subBlock === 'object' && 'value' in subBlock) {
subBlockValues[blockId][subBlockId] = subBlock.value
}
})
}
})
}
}
// Gather block registry and utilities for sim-agent
@@ -176,7 +146,6 @@ export async function GET(request: NextRequest) {
resolveOutputType: resolveOutputType.toString(),
},
},
apiKey: SIM_AGENT_API_KEY,
})
if (!result.success || !result.data?.yaml) {

View File

@@ -1,4 +1,4 @@
import { and, eq } from 'drizzle-orm'
import { and, eq, inArray } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { createLogger } from '@/lib/logs/console/logger'
@@ -8,7 +8,7 @@ const logger = createLogger('WorkspaceByIdAPI')
import { getUserEntityPermissions } from '@/lib/permissions/utils'
import { db } from '@/db'
import { knowledgeBase, permissions, workspace } from '@/db/schema'
import { knowledgeBase, permissions, templates, workspace } from '@/db/schema'
export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const { id } = await params
@@ -19,6 +19,8 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
}
const workspaceId = id
const url = new URL(request.url)
const checkTemplates = url.searchParams.get('check-templates') === 'true'
// Check if user has any access to this workspace
const userPermission = await getUserEntityPermissions(session.user.id, 'workspace', workspaceId)
@@ -26,6 +28,42 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
return NextResponse.json({ error: 'Workspace not found or access denied' }, { status: 404 })
}
// If checking for published templates before deletion
if (checkTemplates) {
try {
// Get all workflows in this workspace
const workspaceWorkflows = await db
.select({ id: workflow.id })
.from(workflow)
.where(eq(workflow.workspaceId, workspaceId))
if (workspaceWorkflows.length === 0) {
return NextResponse.json({ hasPublishedTemplates: false, publishedTemplates: [] })
}
const workflowIds = workspaceWorkflows.map((w) => w.id)
// Check for published templates that reference these workflows
const publishedTemplates = await db
.select({
id: templates.id,
name: templates.name,
workflowId: templates.workflowId,
})
.from(templates)
.where(inArray(templates.workflowId, workflowIds))
return NextResponse.json({
hasPublishedTemplates: publishedTemplates.length > 0,
publishedTemplates,
count: publishedTemplates.length,
})
} catch (error) {
logger.error(`Error checking published templates for workspace ${workspaceId}:`, error)
return NextResponse.json({ error: 'Failed to check published templates' }, { status: 500 })
}
}
// Get workspace details
const workspaceDetails = await db
.select()
@@ -108,6 +146,8 @@ export async function DELETE(
}
const workspaceId = id
const body = await request.json().catch(() => ({}))
const { deleteTemplates = false } = body // User's choice: false = keep templates (recommended), true = delete templates
// Check if user has admin permissions to delete workspace
const userPermission = await getUserEntityPermissions(session.user.id, 'workspace', workspaceId)
@@ -116,10 +156,39 @@ export async function DELETE(
}
try {
logger.info(`Deleting workspace ${workspaceId} for user ${session.user.id}`)
logger.info(
`Deleting workspace ${workspaceId} for user ${session.user.id}, deleteTemplates: ${deleteTemplates}`
)
// Delete workspace and all related data in a transaction
await db.transaction(async (tx) => {
// Get all workflows in this workspace before deletion
const workspaceWorkflows = await tx
.select({ id: workflow.id })
.from(workflow)
.where(eq(workflow.workspaceId, workspaceId))
if (workspaceWorkflows.length > 0) {
const workflowIds = workspaceWorkflows.map((w) => w.id)
// Handle templates based on user choice
if (deleteTemplates) {
// Delete published templates that reference these workflows
await tx.delete(templates).where(inArray(templates.workflowId, workflowIds))
logger.info(`Deleted templates for workflows in workspace ${workspaceId}`)
} else {
// Set workflowId to null for templates to create "orphaned" templates
// This allows templates to remain in marketplace but without source workflows
await tx
.update(templates)
.set({ workflowId: null })
.where(inArray(templates.workflowId, workflowIds))
logger.info(
`Updated templates to orphaned status for workflows in workspace ${workspaceId}`
)
}
}
// Delete all workflows in the workspace - database cascade will handle all workflow-related data
// The database cascade will handle deleting related workflow_blocks, workflow_edges, workflow_subflows,
// workflow_logs, workflow_execution_snapshots, workflow_execution_logs, workflow_execution_trace_spans,

View File

@@ -91,6 +91,7 @@ describe('Workspace Invitations API Route', () => {
env: {
RESEND_API_KEY: 'test-resend-key',
NEXT_PUBLIC_APP_URL: 'https://test.sim.ai',
FROM_EMAIL_ADDRESS: 'Sim <noreply@test.sim.ai>',
EMAIL_DOMAIN: 'test.sim.ai',
},
}))

View File

@@ -2,12 +2,12 @@ import { randomUUID } from 'crypto'
import { render } from '@react-email/render'
import { and, eq, inArray } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { Resend } from 'resend'
import { WorkspaceInvitationEmail } from '@/components/emails/workspace-invitation'
import { getSession } from '@/lib/auth'
import { sendEmail } from '@/lib/email/mailer'
import { getFromEmailAddress } from '@/lib/email/utils'
import { env } from '@/lib/env'
import { createLogger } from '@/lib/logs/console/logger'
import { getEmailDomain } from '@/lib/urls/utils'
import { db } from '@/db'
import {
permissions,
@@ -20,7 +20,6 @@ import {
export const dynamic = 'force-dynamic'
const logger = createLogger('WorkspaceInvitationsAPI')
const resend = env.RESEND_API_KEY ? new Resend(env.RESEND_API_KEY) : null
type PermissionType = (typeof permissionTypeEnum.enumValues)[number]
@@ -241,30 +240,23 @@ async function sendInvitationEmail({
})
)
if (!resend) {
logger.error('RESEND_API_KEY not configured')
return NextResponse.json(
{
error:
'Email service not configured. Please set RESEND_API_KEY in environment variables.',
},
{ status: 500 }
)
}
const emailDomain = env.EMAIL_DOMAIN || getEmailDomain()
const fromAddress = `noreply@${emailDomain}`
const fromAddress = getFromEmailAddress()
logger.info(`Attempting to send email from ${fromAddress} to ${to}`)
const result = await resend.emails.send({
from: fromAddress,
const result = await sendEmail({
to,
subject: `You've been invited to join "${workspaceName}" on Sim`,
html: emailHtml,
from: fromAddress,
emailType: 'transactional',
})
logger.info(`Invitation email sent successfully to ${to}`, { result })
if (result.success) {
logger.info(`Invitation email sent successfully to ${to}`, { result })
} else {
logger.error(`Failed to send invitation email to ${to}`, { error: result.message })
}
} catch (error) {
logger.error('Error sending invitation email:', error)
// Continue even if email fails - the invitation is still created

View File

@@ -113,64 +113,6 @@ async function createWorkspace(userId: string, name: string) {
// Create initial workflow for the workspace with start block
const starterId = crypto.randomUUID()
const initialState = {
blocks: {
[starterId]: {
id: starterId,
type: 'starter',
name: 'Start',
position: { x: 100, y: 100 },
subBlocks: {
startWorkflow: {
id: 'startWorkflow',
type: 'dropdown',
value: 'manual',
},
webhookPath: {
id: 'webhookPath',
type: 'short-input',
value: '',
},
webhookSecret: {
id: 'webhookSecret',
type: 'short-input',
value: '',
},
scheduleType: {
id: 'scheduleType',
type: 'dropdown',
value: 'daily',
},
minutesInterval: {
id: 'minutesInterval',
type: 'short-input',
value: '',
},
minutesStartingAt: {
id: 'minutesStartingAt',
type: 'short-input',
value: '',
},
},
outputs: {
response: { type: { input: 'any' } },
},
enabled: true,
horizontalHandles: true,
isWide: false,
advancedMode: false,
height: 95,
},
},
edges: [],
subflows: {},
variables: {},
metadata: {
version: '1.0.0',
createdAt: now.toISOString(),
updatedAt: now.toISOString(),
},
}
// Create the workflow
await tx.insert(workflow).values({
@@ -180,7 +122,6 @@ async function createWorkspace(userId: string, name: string) {
folderId: null,
name: 'default-agent',
description: 'Your first workflow - start building here!',
state: initialState,
color: '#3972F6',
lastSynced: now,
createdAt: now,

View File

@@ -1,6 +1,8 @@
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { env } from '@/lib/env'
import { createLogger } from '@/lib/logs/console/logger'
import { SIM_AGENT_API_URL_DEFAULT } from '@/lib/sim-agent'
import { getAllBlocks } from '@/blocks/registry'
import type { BlockConfig } from '@/blocks/types'
import { resolveOutputType } from '@/blocks/utils'
@@ -16,8 +18,7 @@ import {
const logger = createLogger('YamlAutoLayoutAPI')
// Sim Agent API configuration
const SIM_AGENT_API_URL = process.env.SIM_AGENT_API_URL || 'http://localhost:8000'
const SIM_AGENT_API_KEY = process.env.SIM_AGENT_API_KEY
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
const AutoLayoutRequestSchema = z.object({
workflowState: z.object({
@@ -58,7 +59,6 @@ export async function POST(request: NextRequest) {
logger.info(`[${requestId}] Applying auto layout`, {
blockCount: Object.keys(workflowState.blocks).length,
edgeCount: workflowState.edges.length,
hasApiKey: !!SIM_AGENT_API_KEY,
strategy: options?.strategy || 'smart',
simAgentUrl: SIM_AGENT_API_URL,
})
@@ -102,7 +102,6 @@ export async function POST(request: NextRequest) {
method: 'POST',
headers: {
'Content-Type': 'application/json',
...(SIM_AGENT_API_KEY && { 'x-api-key': SIM_AGENT_API_KEY }),
},
body: JSON.stringify({
workflowState: {

View File

@@ -1,6 +1,8 @@
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { env } from '@/lib/env'
import { createLogger } from '@/lib/logs/console/logger'
import { SIM_AGENT_API_URL_DEFAULT } from '@/lib/sim-agent'
import { getAllBlocks } from '@/blocks/registry'
import type { BlockConfig } from '@/blocks/types'
import { resolveOutputType } from '@/blocks/utils'
@@ -16,8 +18,7 @@ import {
const logger = createLogger('YamlDiffCreateAPI')
// Sim Agent API configuration
const SIM_AGENT_API_URL = process.env.SIM_AGENT_API_URL || 'http://localhost:8000'
const SIM_AGENT_API_KEY = process.env.SIM_AGENT_API_KEY
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
const CreateDiffRequestSchema = z.object({
yamlContent: z.string().min(1),
@@ -89,7 +90,6 @@ export async function POST(request: NextRequest) {
hasDiffAnalysis: !!diffAnalysis,
hasOptions: !!options,
options: options,
hasApiKey: !!SIM_AGENT_API_KEY,
hasCurrentWorkflowState: !!currentWorkflowState,
currentBlockCount: currentWorkflowState
? Object.keys(currentWorkflowState.blocks || {}).length
@@ -117,7 +117,6 @@ export async function POST(request: NextRequest) {
method: 'POST',
headers: {
'Content-Type': 'application/json',
...(SIM_AGENT_API_KEY && { 'x-api-key': SIM_AGENT_API_KEY }),
},
body: JSON.stringify({
yamlContent,

View File

@@ -1,6 +1,8 @@
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { env } from '@/lib/env'
import { createLogger } from '@/lib/logs/console/logger'
import { SIM_AGENT_API_URL_DEFAULT } from '@/lib/sim-agent'
import { getAllBlocks } from '@/blocks/registry'
import type { BlockConfig } from '@/blocks/types'
import { resolveOutputType } from '@/blocks/utils'
@@ -16,8 +18,7 @@ import {
const logger = createLogger('YamlDiffMergeAPI')
// Sim Agent API configuration
const SIM_AGENT_API_URL = process.env.SIM_AGENT_API_URL || 'http://localhost:8000'
const SIM_AGENT_API_KEY = process.env.SIM_AGENT_API_KEY
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
const MergeDiffRequestSchema = z.object({
existingDiff: z.object({
@@ -64,7 +65,6 @@ export async function POST(request: NextRequest) {
hasDiffAnalysis: !!diffAnalysis,
hasOptions: !!options,
options: options,
hasApiKey: !!SIM_AGENT_API_KEY,
})
// Gather block registry
@@ -88,7 +88,6 @@ export async function POST(request: NextRequest) {
method: 'POST',
headers: {
'Content-Type': 'application/json',
...(SIM_AGENT_API_KEY && { 'x-api-key': SIM_AGENT_API_KEY }),
},
body: JSON.stringify({
existingDiff,

View File

@@ -1,6 +1,8 @@
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { env } from '@/lib/env'
import { createLogger } from '@/lib/logs/console/logger'
import { SIM_AGENT_API_URL_DEFAULT } from '@/lib/sim-agent'
import { getAllBlocks } from '@/blocks/registry'
import type { BlockConfig } from '@/blocks/types'
import { resolveOutputType } from '@/blocks/utils'
@@ -9,8 +11,7 @@ import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/w
const logger = createLogger('YamlGenerateAPI')
// Sim Agent API configuration
const SIM_AGENT_API_URL = process.env.SIM_AGENT_API_URL || 'http://localhost:8000'
const SIM_AGENT_API_KEY = process.env.SIM_AGENT_API_KEY
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
const GenerateRequestSchema = z.object({
workflowState: z.any(), // Let the yaml service handle validation
@@ -27,7 +28,6 @@ export async function POST(request: NextRequest) {
logger.info(`[${requestId}] Generating YAML from workflow`, {
blocksCount: workflowState.blocks ? Object.keys(workflowState.blocks).length : 0,
edgesCount: workflowState.edges ? workflowState.edges.length : 0,
hasApiKey: !!SIM_AGENT_API_KEY,
})
// Gather block registry and utilities
@@ -51,7 +51,6 @@ export async function POST(request: NextRequest) {
method: 'POST',
headers: {
'Content-Type': 'application/json',
...(SIM_AGENT_API_KEY && { 'x-api-key': SIM_AGENT_API_KEY }),
},
body: JSON.stringify({
workflowState,

View File

@@ -1,26 +1,24 @@
import { NextResponse } from 'next/server'
import { env } from '@/lib/env'
import { createLogger } from '@/lib/logs/console/logger'
import { SIM_AGENT_API_URL_DEFAULT } from '@/lib/sim-agent'
const logger = createLogger('YamlHealthAPI')
// Sim Agent API configuration
const SIM_AGENT_API_URL = process.env.SIM_AGENT_API_URL || 'http://localhost:8000'
const SIM_AGENT_API_KEY = process.env.SIM_AGENT_API_KEY
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
export async function GET() {
const requestId = crypto.randomUUID().slice(0, 8)
try {
logger.info(`[${requestId}] Checking YAML service health`, {
hasApiKey: !!SIM_AGENT_API_KEY,
})
logger.info(`[${requestId}] Checking YAML service health`)
// Check sim-agent health
const response = await fetch(`${SIM_AGENT_API_URL}/health`, {
method: 'GET',
headers: {
'Content-Type': 'application/json',
...(SIM_AGENT_API_KEY && { 'x-api-key': SIM_AGENT_API_KEY }),
},
})

View File

@@ -1,6 +1,8 @@
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { env } from '@/lib/env'
import { createLogger } from '@/lib/logs/console/logger'
import { SIM_AGENT_API_URL_DEFAULT } from '@/lib/sim-agent'
import { getAllBlocks } from '@/blocks/registry'
import type { BlockConfig } from '@/blocks/types'
import { resolveOutputType } from '@/blocks/utils'
@@ -9,11 +11,10 @@ import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/w
const logger = createLogger('YamlParseAPI')
// Sim Agent API configuration
const SIM_AGENT_API_URL = process.env.SIM_AGENT_API_URL || 'http://localhost:8000'
const SIM_AGENT_API_KEY = process.env.SIM_AGENT_API_KEY
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
const ParseRequestSchema = z.object({
yamlContent: z.string().min(1),
yamlContent: z.string(),
})
export async function POST(request: NextRequest) {
@@ -25,7 +26,6 @@ export async function POST(request: NextRequest) {
logger.info(`[${requestId}] Parsing YAML`, {
contentLength: yamlContent.length,
hasApiKey: !!SIM_AGENT_API_KEY,
})
// Gather block registry and utilities
@@ -49,7 +49,6 @@ export async function POST(request: NextRequest) {
method: 'POST',
headers: {
'Content-Type': 'application/json',
...(SIM_AGENT_API_KEY && { 'x-api-key': SIM_AGENT_API_KEY }),
},
body: JSON.stringify({
yamlContent,

View File

@@ -1,6 +1,8 @@
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { env } from '@/lib/env'
import { createLogger } from '@/lib/logs/console/logger'
import { SIM_AGENT_API_URL_DEFAULT } from '@/lib/sim-agent'
import { getAllBlocks } from '@/blocks/registry'
import type { BlockConfig } from '@/blocks/types'
import { resolveOutputType } from '@/blocks/utils'
@@ -9,8 +11,7 @@ import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/w
const logger = createLogger('YamlToWorkflowAPI')
// Sim Agent API configuration
const SIM_AGENT_API_URL = process.env.SIM_AGENT_API_URL || 'http://localhost:8000'
const SIM_AGENT_API_KEY = process.env.SIM_AGENT_API_KEY
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
const ConvertRequestSchema = z.object({
yamlContent: z.string().min(1),
@@ -33,7 +34,6 @@ export async function POST(request: NextRequest) {
logger.info(`[${requestId}] Converting YAML to workflow`, {
contentLength: yamlContent.length,
hasOptions: !!options,
hasApiKey: !!SIM_AGENT_API_KEY,
})
// Gather block registry and utilities
@@ -57,7 +57,6 @@ export async function POST(request: NextRequest) {
method: 'POST',
headers: {
'Content-Type': 'application/json',
...(SIM_AGENT_API_KEY && { 'x-api-key': SIM_AGENT_API_KEY }),
},
body: JSON.stringify({
yamlContent,

View File

@@ -14,7 +14,8 @@
}
.workflow-container .react-flow__node-loopNode,
.workflow-container .react-flow__node-parallelNode {
.workflow-container .react-flow__node-parallelNode,
.workflow-container .react-flow__node-subflowNode {
z-index: -1 !important;
}
@@ -205,23 +206,22 @@
}
::-webkit-scrollbar-track {
background-color: hsl(var(--scrollbar-track));
border-radius: var(--radius);
background: transparent;
}
::-webkit-scrollbar-thumb {
background-color: hsl(var(--scrollbar-thumb));
background-color: hsl(var(--muted-foreground) / 0.3);
border-radius: var(--radius);
}
::-webkit-scrollbar-thumb:hover {
background-color: hsl(var(--scrollbar-thumb-hover));
background-color: hsl(var(--muted-foreground) / 0.3);
}
/* For Firefox */
* {
scrollbar-width: thin;
scrollbar-color: hsl(var(--scrollbar-thumb)) hsl(var(--scrollbar-track));
scrollbar-color: hsl(var(--muted-foreground) / 0.3) transparent;
}
}

View File

@@ -3,6 +3,7 @@ import { SpeedInsights } from '@vercel/speed-insights/next'
import type { Metadata, Viewport } from 'next'
import { PublicEnvScript } from 'next-runtime-env'
import { BrandedLayout } from '@/components/branded-layout'
import { generateThemeCSS } from '@/lib/branding/inject-theme'
import { generateBrandedMetadata, generateStructuredData } from '@/lib/branding/metadata'
import { env } from '@/lib/env'
import { isHosted } from '@/lib/environment'
@@ -10,6 +11,8 @@ import { createLogger } from '@/lib/logs/console/logger'
import { getAssetUrl } from '@/lib/utils'
import '@/app/globals.css'
import { SessionProvider } from '@/lib/session-context'
import { ThemeProvider } from '@/app/theme-provider'
import { ZoomPrevention } from '@/app/zoom-prevention'
const logger = createLogger('RootLayout')
@@ -45,11 +48,14 @@ if (typeof window !== 'undefined') {
}
export const viewport: Viewport = {
themeColor: '#ffffff',
width: 'device-width',
initialScale: 1,
maximumScale: 1,
userScalable: false,
themeColor: [
{ media: '(prefers-color-scheme: light)', color: '#ffffff' },
{ media: '(prefers-color-scheme: dark)', color: '#0c0c0c' },
],
}
// Generate dynamic metadata based on brand configuration
@@ -57,6 +63,7 @@ export const metadata: Metadata = generateBrandedMetadata()
export default function RootLayout({ children }: { children: React.ReactNode }) {
const structuredData = generateStructuredData()
const themeCSS = generateThemeCSS()
return (
<html lang='en' suppressHydrationWarning>
@@ -69,9 +76,18 @@ export default function RootLayout({ children }: { children: React.ReactNode })
}}
/>
{/* Theme CSS Override */}
{themeCSS && (
<style
id='theme-override'
dangerouslySetInnerHTML={{
__html: themeCSS,
}}
/>
)}
{/* Meta tags for better SEO */}
<meta name='theme-color' content='#ffffff' />
<meta name='color-scheme' content='light' />
<meta name='color-scheme' content='light dark' />
<meta name='format-detection' content='telephone=no' />
<meta httpEquiv='x-ua-compatible' content='ie=edge' />
@@ -107,16 +123,20 @@ export default function RootLayout({ children }: { children: React.ReactNode })
)}
</head>
<body suppressHydrationWarning>
<BrandedLayout>
<ZoomPrevention />
{children}
{isHosted && (
<>
<SpeedInsights />
<Analytics />
</>
)}
</BrandedLayout>
<ThemeProvider>
<SessionProvider>
<BrandedLayout>
<ZoomPrevention />
{children}
{isHosted && (
<>
<SpeedInsights />
<Analytics />
</>
)}
</BrandedLayout>
</SessionProvider>
</ThemeProvider>
</body>
</html>
)

View File

@@ -11,8 +11,8 @@ export default function manifest(): MetadataRoute.Manifest {
'Build and deploy AI agents using our Figma-like canvas. Build, write evals, and deploy AI agent workflows that automate workflows and streamline your business processes.',
start_url: '/',
display: 'standalone',
background_color: '#701FFC', // Default Sim brand primary color
theme_color: '#701FFC', // Default Sim brand primary color
background_color: brand.theme?.backgroundColor || '#701FFC',
theme_color: brand.theme?.primaryColor || '#701FFC',
icons: [
{
src: '/favicon/android-chrome-192x192.png',

View File

@@ -0,0 +1,19 @@
'use client'
import type { ThemeProviderProps } from 'next-themes'
import { ThemeProvider as NextThemesProvider } from 'next-themes'
export function ThemeProvider({ children, ...props }: ThemeProviderProps) {
return (
<NextThemesProvider
attribute='class'
defaultTheme='system'
enableSystem
disableTransitionOnChange
storageKey='sim-theme'
{...props}
>
{children}
</NextThemesProvider>
)
}

View File

@@ -86,7 +86,7 @@ const getStatusDisplay = (doc: DocumentData) => {
</>
),
className:
'inline-flex items-center rounded-md bg-[var(--brand-primary-hex)]/10 px-2 py-1 text-xs font-medium text-[var(--brand-primary-hex)] dark:bg-[var(--brand-primary-hex)]/20 dark:text-[var(--brand-primary-hex)]',
'inline-flex items-center rounded-md bg-purple-100 px-2 py-1 text-xs font-medium text-[var(--brand-primary-hex)] dark:bg-purple-900/30 dark:text-[var(--brand-primary-hex)]',
}
case 'failed':
return {

View File

@@ -7,6 +7,7 @@ import { Dialog, DialogContent, DialogHeader, DialogTitle } from '@/components/u
import { Label } from '@/components/ui/label'
import { Progress } from '@/components/ui/progress'
import { createLogger } from '@/lib/logs/console/logger'
import { getDocumentIcon } from '@/app/workspace/[workspaceId]/knowledge/components'
import { useKnowledgeUpload } from '@/app/workspace/[workspaceId]/knowledge/hooks/use-knowledge-upload'
const logger = createLogger('UploadModal')
@@ -152,6 +153,19 @@ export function UploadModal({
}
}
const getFileIcon = (mimeType: string, filename: string) => {
const IconComponent = getDocumentIcon(mimeType, filename)
return <IconComponent className='h-10 w-8' />
}
const formatFileSize = (bytes: number): string => {
if (bytes === 0) return '0 B'
const k = 1024
const sizes = ['B', 'KB', 'MB', 'GB']
const i = Math.floor(Math.log(bytes) / Math.log(k))
return `${Number.parseFloat((bytes / k ** i).toFixed(1))} ${sizes[i]}`
}
// Calculate progress percentage
const progressPercentage =
uploadProgress.totalFiles > 0
@@ -221,11 +235,11 @@ export function UploadModal({
multiple
/>
<p className='text-sm'>
{isDragging ? 'Drop more files here!' : 'Add more files'}
{isDragging ? 'Drop more files here!' : 'Drop more files or click to browse'}
</p>
</div>
<div className='max-h-60 space-y-1.5 overflow-auto'>
<div className='max-h-60 space-y-2 overflow-auto'>
{files.map((file, index) => {
const fileStatus = uploadProgress.fileStatuses?.[index]
const isCurrentlyUploading = fileStatus?.status === 'uploading'
@@ -233,26 +247,31 @@ export function UploadModal({
const isFailed = fileStatus?.status === 'failed'
return (
<div key={index} className='space-y-1.5 rounded-md border p-2'>
<div className='flex items-center justify-between'>
<div key={index} className='rounded-md border p-3'>
<div className='flex items-center gap-3'>
{getFileIcon(file.type, file.name)}
<div className='min-w-0 flex-1'>
<div className='flex items-center gap-2'>
{isCurrentlyUploading && (
<Loader2 className='h-4 w-4 animate-spin text-blue-500' />
<Loader2 className='h-4 w-4 animate-spin text-[var(--brand-primary-hex)]' />
)}
{isCompleted && <Check className='h-4 w-4 text-green-500' />}
{isFailed && <X className='h-4 w-4 text-red-500' />}
{!isCurrentlyUploading && !isCompleted && !isFailed && (
<div className='h-4 w-4' />
)}
<p className='truncate text-sm'>
<span className='font-medium'>{file.name}</span>
<span className='text-muted-foreground'>
{' '}
{(file.size / 1024 / 1024).toFixed(2)} MB
</span>
</p>
<p className='truncate font-medium text-sm'>{file.name}</p>
</div>
<div className='flex items-center gap-2'>
<p className='text-muted-foreground text-xs'>
{formatFileSize(file.size)}
</p>
{isCurrentlyUploading && (
<div className='min-w-0 max-w-32 flex-1'>
<Progress value={fileStatus?.progress || 0} className='h-1' />
</div>
)}
</div>
{isFailed && fileStatus?.error && (
<p className='mt-1 text-red-500 text-xs'>{fileStatus.error}</p>
)}
</div>
<Button
type='button'
@@ -260,17 +279,11 @@ export function UploadModal({
size='sm'
onClick={() => removeFile(index)}
disabled={isUploading}
className='h-8 w-8 p-0'
className='h-8 w-8 p-0 text-muted-foreground hover:text-destructive'
>
<X className='h-4 w-4' />
</Button>
</div>
{isCurrentlyUploading && (
<Progress value={fileStatus?.progress || 0} className='h-1' />
)}
{isFailed && fileStatus?.error && (
<p className='text-red-500 text-xs'>{fileStatus.error}</p>
)}
</div>
)
})}
@@ -287,7 +300,11 @@ export function UploadModal({
<Button variant='outline' onClick={handleClose} disabled={isUploading}>
Cancel
</Button>
<Button onClick={handleUpload} disabled={files.length === 0 || isUploading}>
<Button
onClick={handleUpload}
disabled={files.length === 0 || isUploading}
className='bg-[var(--brand-primary-hex)] font-[480] text-primary-foreground shadow-[0_0_0_0_var(--brand-primary-hex)] transition-all duration-200 hover:bg-[var(--brand-primary-hover-hex)] hover:shadow-[0_0_0_4px_rgba(127,47,255,0.15)]'
>
{isUploading
? uploadProgress.stage === 'uploading'
? `Uploading ${uploadProgress.filesCompleted + 1}/${uploadProgress.totalFiles}...`

View File

@@ -2,7 +2,7 @@
import { useEffect, useRef, useState } from 'react'
import { zodResolver } from '@hookform/resolvers/zod'
import { AlertCircle, CheckCircle2, X } from 'lucide-react'
import { AlertCircle, X } from 'lucide-react'
import { useParams } from 'next/navigation'
import { useForm } from 'react-hook-form'
import { z } from 'zod'
@@ -109,6 +109,7 @@ export function CreateModal({ open, onOpenChange, onKnowledgeBaseCreated }: Crea
register,
handleSubmit,
reset,
watch,
formState: { errors },
} = useForm<FormValues>({
resolver: zodResolver(FormSchema),
@@ -119,9 +120,32 @@ export function CreateModal({ open, onOpenChange, onKnowledgeBaseCreated }: Crea
maxChunkSize: 1024,
overlapSize: 200,
},
mode: 'onChange',
mode: 'onSubmit',
})
// Watch the name field to enable/disable the submit button
const nameValue = watch('name')
// Reset state when modal opens/closes
useEffect(() => {
if (open) {
// Reset states when modal opens
setSubmitStatus(null)
setFileError(null)
setFiles([])
setIsDragging(false)
setDragCounter(0)
// Reset form to default values
reset({
name: '',
description: '',
minChunkSize: 1,
maxChunkSize: 1024,
overlapSize: 200,
})
}
}, [open, reset])
const processFiles = async (fileList: FileList | File[]) => {
setFileError(null)
@@ -292,18 +316,6 @@ export function CreateModal({ open, onOpenChange, onKnowledgeBaseCreated }: Crea
logger.info(`Started processing ${uploadedFiles.length} documents in the background`)
}
setSubmitStatus({
type: 'success',
message: 'Your knowledge base has been created successfully!',
})
reset({
name: '',
description: '',
minChunkSize: 1,
maxChunkSize: 1024,
overlapSize: 200,
})
// Clean up file previews
files.forEach((file) => URL.revokeObjectURL(file.preview))
setFiles([])
@@ -313,10 +325,8 @@ export function CreateModal({ open, onOpenChange, onKnowledgeBaseCreated }: Crea
onKnowledgeBaseCreated(newKnowledgeBase)
}
// Close modal after a short delay to show success message
setTimeout(() => {
onOpenChange(false)
}, 1500)
// Close modal immediately - no need for success message
onOpenChange(false)
} catch (error) {
logger.error('Error creating knowledge base:', error)
setSubmitStatus({
@@ -357,31 +367,13 @@ export function CreateModal({ open, onOpenChange, onKnowledgeBaseCreated }: Crea
className='scrollbar-thin scrollbar-thumb-muted-foreground/20 hover:scrollbar-thumb-muted-foreground/25 scrollbar-track-transparent min-h-0 flex-1 overflow-y-auto px-6'
>
<div className='flex min-h-full flex-col py-4'>
{submitStatus && submitStatus.type === 'success' ? (
<Alert className='mb-6 border-border border-green-200 bg-green-50 dark:border-green-900 dark:bg-green-950/30'>
<div className='flex items-start gap-4 py-1'>
<div className='mt-[-1.5px] flex-shrink-0'>
<CheckCircle2 className='h-4 w-4 text-green-600 dark:text-green-400' />
</div>
<div className='mr-4 flex-1 space-y-2'>
<AlertTitle className='-mt-0.5 flex items-center justify-between'>
<span className='font-medium text-green-600 dark:text-green-400'>
Success
</span>
</AlertTitle>
<AlertDescription className='text-green-600 dark:text-green-400'>
{submitStatus.message}
</AlertDescription>
</div>
</div>
</Alert>
) : submitStatus && submitStatus.type === 'error' ? (
{submitStatus && submitStatus.type === 'error' && (
<Alert variant='destructive' className='mb-6'>
<AlertCircle className='h-4 w-4' />
<AlertTitle>Error</AlertTitle>
<AlertDescription>{submitStatus.message}</AlertDescription>
</Alert>
) : null}
)}
{/* Form Fields Section - Fixed at top */}
<div className='flex-shrink-0 space-y-4'>
@@ -611,8 +603,8 @@ export function CreateModal({ open, onOpenChange, onKnowledgeBaseCreated }: Crea
</Button>
<Button
type='submit'
disabled={isSubmitting}
className='bg-[var(--brand-primary-hex)] font-[480] text-primary-foreground shadow-[0_0_0_0_var(--brand-primary-hex)] transition-all duration-200 hover:bg-[var(--brand-primary-hover-hex)] hover:shadow-[0_0_0_4px_rgba(127,47,255,0.15)]'
disabled={isSubmitting || !nameValue?.trim()}
className='bg-[var(--brand-primary-hex)] font-[480] text-primary-foreground shadow-[0_0_0_0_var(--brand-primary-hex)] transition-all duration-200 hover:bg-[var(--brand-primary-hover-hex)] hover:shadow-[0_0_0_4px_rgba(127,47,255,0.15)] disabled:opacity-50 disabled:hover:shadow-none'
>
{isSubmitting ? 'Creating...' : 'Create Knowledge Base'}
</Button>

View File

@@ -1,7 +1,7 @@
'use client'
import { useEffect, useMemo, useRef, useState } from 'react'
import { ChevronDown, ChevronUp, Eye, X } from 'lucide-react'
import { ChevronDown, ChevronUp, Eye, Loader2, X } from 'lucide-react'
import { Button } from '@/components/ui/button'
import { CopyButton } from '@/components/ui/copy-button'
import { ScrollArea } from '@/components/ui/scroll-area'
@@ -209,29 +209,30 @@ export function Sidebar({
}
}, [log?.id])
const isLoadingDetails = useMemo(() => {
if (!log) return false
// Only show while we expect details to arrive (has executionId)
if (!log.executionId) return false
const hasEnhanced = !!log.executionData?.enhanced
const hasAnyDetails = hasEnhanced || !!log.cost || Array.isArray(log.executionData?.traceSpans)
return !hasAnyDetails
}, [log])
const formattedContent = useMemo(() => {
if (!log) return null
let blockInput: Record<string, any> | undefined
if (log.metadata?.blockInput) {
blockInput = log.metadata.blockInput
} else if (log.metadata?.traceSpans) {
const blockIdMatch = log.message.match(/Block .+?(\d+)/i)
const blockId = blockIdMatch ? blockIdMatch[1] : null
if (blockId) {
const matchingSpan = log.metadata.traceSpans.find(
(span) => span.blockId === blockId || span.name.includes(`Block ${blockId}`)
)
if (matchingSpan?.input) {
blockInput = matchingSpan.input
}
if (log.executionData?.blockInput) {
blockInput = log.executionData.blockInput
} else if (log.executionData?.traceSpans) {
const firstSpanWithInput = log.executionData.traceSpans.find((s) => s.input)
if (firstSpanWithInput?.input) {
blockInput = firstSpanWithInput.input as any
}
}
return formatJsonContent(log.message, blockInput)
return null
}, [log])
useEffect(() => {
@@ -243,22 +244,16 @@ export function Sidebar({
// Determine if this is a workflow execution log
const isWorkflowExecutionLog = useMemo(() => {
if (!log) return false
// Check if message contains workflow execution phrases (success or failure)
return (
log.message.toLowerCase().includes('workflow executed') ||
log.message.toLowerCase().includes('execution completed') ||
log.message.toLowerCase().includes('workflow execution failed') ||
log.message.toLowerCase().includes('execution failed') ||
(log.trigger === 'manual' && log.duration) ||
// Also check if we have enhanced logging metadata with trace spans
(log.metadata?.enhanced && log.metadata?.traceSpans)
(log.trigger === 'manual' && !!log.duration) ||
(log.executionData?.enhanced && log.executionData?.traceSpans)
)
}, [log])
// Helper to determine if we have cost information to display
// All workflow executions now have cost info (base charge + any model costs)
const hasCostInfo = useMemo(() => {
return isWorkflowExecutionLog && log?.metadata?.cost
return isWorkflowExecutionLog && log?.cost
}, [log, isWorkflowExecutionLog])
const isWorkflowWithCost = useMemo(() => {
@@ -490,6 +485,14 @@ export function Sidebar({
</div>
)}
{/* Suspense while details load (positioned after summary fields) */}
{isLoadingDetails && (
<div className='flex w-full items-center justify-start gap-2 py-2 text-muted-foreground'>
<Loader2 className='h-4 w-4 animate-spin' />
<span className='text-sm'>Loading details</span>
</div>
)}
{/* Files */}
{log.files && log.files.length > 0 && (
<div>
@@ -541,19 +544,15 @@ export function Sidebar({
</div>
)}
{/* Message Content */}
<div className='w-full pb-2'>
<h3 className='mb-1 font-medium text-muted-foreground text-xs'>Message</h3>
<div className='w-full'>{formattedContent}</div>
</div>
{/* end suspense */}
{/* Trace Spans (if available and this is a workflow execution log) */}
{isWorkflowExecutionLog && log.metadata?.traceSpans && (
{isWorkflowExecutionLog && log.executionData?.traceSpans && (
<div className='w-full'>
<div className='w-full overflow-x-hidden'>
<TraceSpansDisplay
traceSpans={log.metadata.traceSpans}
totalDuration={log.metadata.totalDuration}
traceSpans={log.executionData.traceSpans}
totalDuration={log.executionData.totalDuration}
onExpansionChange={handleTraceSpanToggle}
/>
</div>
@@ -561,11 +560,11 @@ export function Sidebar({
)}
{/* Tool Calls (if available) */}
{log.metadata?.toolCalls && log.metadata.toolCalls.length > 0 && (
{log.executionData?.toolCalls && log.executionData.toolCalls.length > 0 && (
<div className='w-full'>
<h3 className='mb-1 font-medium text-muted-foreground text-xs'>Tool Calls</h3>
<div className='w-full overflow-x-hidden rounded-md bg-secondary/30 p-3'>
<ToolCallsDisplay metadata={log.metadata} />
<ToolCallsDisplay metadata={log.executionData} />
</div>
</div>
)}
@@ -584,86 +583,80 @@ export function Sidebar({
</div>
<div className='flex items-center justify-between'>
<span className='text-muted-foreground text-sm'>Model Input:</span>
<span className='text-sm'>
{formatCost(log.metadata?.cost?.input || 0)}
</span>
<span className='text-sm'>{formatCost(log.cost?.input || 0)}</span>
</div>
<div className='flex items-center justify-between'>
<span className='text-muted-foreground text-sm'>Model Output:</span>
<span className='text-sm'>
{formatCost(log.metadata?.cost?.output || 0)}
</span>
<span className='text-sm'>{formatCost(log.cost?.output || 0)}</span>
</div>
<div className='mt-1 flex items-center justify-between border-t pt-2'>
<span className='text-muted-foreground text-sm'>Total:</span>
<span className='text-foreground text-sm'>
{formatCost(log.metadata?.cost?.total || 0)}
{formatCost(log.cost?.total || 0)}
</span>
</div>
<div className='flex items-center justify-between'>
<span className='text-muted-foreground text-xs'>Tokens:</span>
<span className='text-muted-foreground text-xs'>
{log.metadata?.cost?.tokens?.prompt || 0} in /{' '}
{log.metadata?.cost?.tokens?.completion || 0} out
{log.cost?.tokens?.prompt || 0} in / {log.cost?.tokens?.completion || 0}{' '}
out
</span>
</div>
</div>
{/* Models Breakdown */}
{log.metadata?.cost?.models &&
Object.keys(log.metadata?.cost?.models).length > 0 && (
<div className='border-t'>
<button
onClick={() => setIsModelsExpanded(!isModelsExpanded)}
className='flex w-full items-center justify-between p-3 text-left transition-colors hover:bg-muted/50'
>
<span className='font-medium text-muted-foreground text-xs'>
Model Breakdown (
{Object.keys(log.metadata?.cost?.models || {}).length})
</span>
{isModelsExpanded ? (
<ChevronUp className='h-3 w-3 text-muted-foreground' />
) : (
<ChevronDown className='h-3 w-3 text-muted-foreground' />
)}
</button>
{log.cost?.models && Object.keys(log.cost?.models).length > 0 && (
<div className='border-t'>
<button
onClick={() => setIsModelsExpanded(!isModelsExpanded)}
className='flex w-full items-center justify-between p-3 text-left transition-colors hover:bg-muted/50'
>
<span className='font-medium text-muted-foreground text-xs'>
Model Breakdown ({Object.keys(log.cost?.models || {}).length})
</span>
{isModelsExpanded ? (
<ChevronUp className='h-3 w-3 text-muted-foreground' />
) : (
<ChevronDown className='h-3 w-3 text-muted-foreground' />
)}
</button>
{isModelsExpanded && (
<div className='space-y-3 border-t bg-muted/30 p-3'>
{Object.entries(log.metadata?.cost?.models || {}).map(
([model, cost]: [string, any]) => (
<div key={model} className='space-y-1'>
<div className='font-medium font-mono text-xs'>{model}</div>
<div className='space-y-1 text-xs'>
<div className='flex justify-between'>
<span className='text-muted-foreground'>Input:</span>
<span>{formatCost(cost.input || 0)}</span>
</div>
<div className='flex justify-between'>
<span className='text-muted-foreground'>Output:</span>
<span>{formatCost(cost.output || 0)}</span>
</div>
<div className='flex justify-between border-t pt-1'>
<span className='text-muted-foreground'>Total:</span>
<span className='font-medium'>
{formatCost(cost.total || 0)}
</span>
</div>
<div className='flex justify-between'>
<span className='text-muted-foreground'>Tokens:</span>
<span>
{cost.tokens?.prompt || 0} in /{' '}
{cost.tokens?.completion || 0} out
</span>
</div>
{isModelsExpanded && (
<div className='space-y-3 border-t bg-muted/30 p-3'>
{Object.entries(log.cost?.models || {}).map(
([model, cost]: [string, any]) => (
<div key={model} className='space-y-1'>
<div className='font-medium font-mono text-xs'>{model}</div>
<div className='space-y-1 text-xs'>
<div className='flex justify-between'>
<span className='text-muted-foreground'>Input:</span>
<span>{formatCost(cost.input || 0)}</span>
</div>
<div className='flex justify-between'>
<span className='text-muted-foreground'>Output:</span>
<span>{formatCost(cost.output || 0)}</span>
</div>
<div className='flex justify-between border-t pt-1'>
<span className='text-muted-foreground'>Total:</span>
<span className='font-medium'>
{formatCost(cost.total || 0)}
</span>
</div>
<div className='flex justify-between'>
<span className='text-muted-foreground'>Tokens:</span>
<span>
{cost.tokens?.prompt || 0} in /{' '}
{cost.tokens?.completion || 0} out
</span>
</div>
</div>
)
)}
</div>
)}
</div>
)}
</div>
)
)}
</div>
)}
</div>
)}
{isWorkflowWithCost && (
<div className='border-t bg-muted p-3 text-muted-foreground text-xs'>
@@ -688,7 +681,7 @@ export function Sidebar({
executionId={log.executionId}
workflowName={log.workflow?.name}
trigger={log.trigger || undefined}
traceSpans={log.metadata?.traceSpans}
traceSpans={log.executionData?.traceSpans}
isOpen={isFrozenCanvasOpen}
onClose={() => setIsFrozenCanvasOpen(false)}
/>

View File

@@ -82,14 +82,21 @@ function transformBlockData(data: any, blockType: string, isInput: boolean) {
interface CollapsibleInputOutputProps {
span: TraceSpan
spanId: string
depth: number
}
function CollapsibleInputOutput({ span, spanId }: CollapsibleInputOutputProps) {
function CollapsibleInputOutput({ span, spanId, depth }: CollapsibleInputOutputProps) {
const [inputExpanded, setInputExpanded] = useState(false)
const [outputExpanded, setOutputExpanded] = useState(false)
// Calculate the left margin based on depth to match the parent span's indentation
const leftMargin = depth * 16 + 8 + 24 // Base depth indentation + icon width + extra padding
return (
<div className='mt-2 mr-4 mb-4 ml-8 space-y-3 overflow-hidden'>
<div
className='mt-2 mr-4 mb-4 space-y-3 overflow-hidden'
style={{ marginLeft: `${leftMargin}px` }}
>
{/* Input Data - Collapsible */}
{span.input && (
<div>
@@ -162,26 +169,30 @@ function BlockDataDisplay({
if (value === undefined) return <span className='text-muted-foreground italic'>undefined</span>
if (typeof value === 'string') {
return <span className='break-all text-green-700 dark:text-green-400'>"{value}"</span>
return <span className='break-all text-emerald-700 dark:text-emerald-400'>"{value}"</span>
}
if (typeof value === 'number') {
return <span className='text-blue-700 dark:text-blue-400'>{value}</span>
return <span className='font-mono text-blue-700 dark:text-blue-400'>{value}</span>
}
if (typeof value === 'boolean') {
return <span className='text-purple-700 dark:text-purple-400'>{value.toString()}</span>
return (
<span className='font-mono text-amber-700 dark:text-amber-400'>{value.toString()}</span>
)
}
if (Array.isArray(value)) {
if (value.length === 0) return <span className='text-muted-foreground'>[]</span>
return (
<div className='space-y-1'>
<div className='space-y-0.5'>
<span className='text-muted-foreground'>[</span>
<div className='ml-4 space-y-1'>
<div className='ml-2 space-y-0.5'>
{value.map((item, index) => (
<div key={index} className='flex min-w-0 gap-2'>
<span className='flex-shrink-0 text-muted-foreground text-xs'>{index}:</span>
<div key={index} className='flex min-w-0 gap-1.5'>
<span className='flex-shrink-0 font-mono text-slate-600 text-xs dark:text-slate-400'>
{index}:
</span>
<div className='min-w-0 flex-1 overflow-hidden'>{renderValue(item)}</div>
</div>
))}
@@ -196,10 +207,10 @@ function BlockDataDisplay({
if (entries.length === 0) return <span className='text-muted-foreground'>{'{}'}</span>
return (
<div className='space-y-1'>
<div className='space-y-0.5'>
{entries.map(([objKey, objValue]) => (
<div key={objKey} className='flex min-w-0 gap-2'>
<span className='flex-shrink-0 font-medium text-orange-700 dark:text-orange-400'>
<div key={objKey} className='flex min-w-0 gap-1.5'>
<span className='flex-shrink-0 font-medium text-indigo-700 dark:text-indigo-400'>
{objKey}:
</span>
<div className='min-w-0 flex-1 overflow-hidden'>{renderValue(objValue, objKey)}</div>
@@ -227,12 +238,12 @@ function BlockDataDisplay({
{transformedData &&
Object.keys(transformedData).filter((key) => key !== 'error' && key !== 'success')
.length > 0 && (
<div className='space-y-1'>
<div className='space-y-0.5'>
{Object.entries(transformedData)
.filter(([key]) => key !== 'error' && key !== 'success')
.map(([key, value]) => (
<div key={key} className='flex gap-2'>
<span className='font-medium text-orange-700 dark:text-orange-400'>{key}:</span>
<div key={key} className='flex gap-1.5'>
<span className='font-medium text-indigo-700 dark:text-indigo-400'>{key}:</span>
{renderValue(value, key)}
</div>
))}
@@ -592,7 +603,9 @@ function TraceSpanItem({
{expanded && (
<div>
{/* Block Input/Output Data - Collapsible */}
{(span.input || span.output) && <CollapsibleInputOutput span={span} spanId={spanId} />}
{(span.input || span.output) && (
<CollapsibleInputOutput span={span} spanId={spanId} depth={depth} />
)}
{/* Children and tool calls */}
{/* Render child spans */}

View File

@@ -85,6 +85,10 @@ export default function Logs() {
const [selectedLog, setSelectedLog] = useState<WorkflowLog | null>(null)
const [selectedLogIndex, setSelectedLogIndex] = useState<number>(-1)
const [isSidebarOpen, setIsSidebarOpen] = useState(false)
const [isDetailsLoading, setIsDetailsLoading] = useState(false)
const detailsCacheRef = useRef<Map<string, any>>(new Map())
const detailsAbortRef = useRef<AbortController | null>(null)
const currentDetailsIdRef = useRef<string | null>(null)
const selectedRowRef = useRef<HTMLTableRowElement | null>(null)
const loaderRef = useRef<HTMLDivElement>(null)
const scrollContainerRef = useRef<HTMLDivElement>(null)
@@ -116,13 +120,122 @@ export default function Logs() {
const index = logs.findIndex((l) => l.id === log.id)
setSelectedLogIndex(index)
setIsSidebarOpen(true)
setIsDetailsLoading(true)
// Fetch details for current, previous, and next concurrently with cache
const currentId = log.id
const prevId = index > 0 ? logs[index - 1]?.id : undefined
const nextId = index < logs.length - 1 ? logs[index + 1]?.id : undefined
// Abort any previous details fetch batch
if (detailsAbortRef.current) {
try {
detailsAbortRef.current.abort()
} catch {
/* no-op */
}
}
const controller = new AbortController()
detailsAbortRef.current = controller
currentDetailsIdRef.current = currentId
const idsToFetch: Array<{ id: string; merge: boolean }> = []
const cachedCurrent = currentId ? detailsCacheRef.current.get(currentId) : undefined
if (currentId && !cachedCurrent) idsToFetch.push({ id: currentId, merge: true })
if (prevId && !detailsCacheRef.current.has(prevId))
idsToFetch.push({ id: prevId, merge: false })
if (nextId && !detailsCacheRef.current.has(nextId))
idsToFetch.push({ id: nextId, merge: false })
// Merge cached current immediately
if (cachedCurrent) {
setSelectedLog((prev) =>
prev && prev.id === currentId
? ({ ...(prev as any), ...(cachedCurrent as any) } as any)
: prev
)
setIsDetailsLoading(false)
}
if (idsToFetch.length === 0) return
Promise.all(
idsToFetch.map(async ({ id, merge }) => {
try {
const res = await fetch(`/api/logs/by-id/${id}`, { signal: controller.signal })
if (!res.ok) return
const body = await res.json()
const detailed = body?.data
if (detailed) {
detailsCacheRef.current.set(id, detailed)
if (merge && id === currentId) {
setSelectedLog((prev) =>
prev && prev.id === id ? ({ ...(prev as any), ...(detailed as any) } as any) : prev
)
if (currentDetailsIdRef.current === id) setIsDetailsLoading(false)
}
}
} catch (e: any) {
if (e?.name === 'AbortError') return
}
})
).catch(() => {})
}
const handleNavigateNext = useCallback(() => {
if (selectedLogIndex < logs.length - 1) {
const nextIndex = selectedLogIndex + 1
setSelectedLogIndex(nextIndex)
setSelectedLog(logs[nextIndex])
const nextLog = logs[nextIndex]
setSelectedLog(nextLog)
// Abort any previous details fetch batch
if (detailsAbortRef.current) {
try {
detailsAbortRef.current.abort()
} catch {
/* no-op */
}
}
const controller = new AbortController()
detailsAbortRef.current = controller
const cached = detailsCacheRef.current.get(nextLog.id)
if (cached) {
setSelectedLog((prev) =>
prev && prev.id === nextLog.id ? ({ ...(prev as any), ...(cached as any) } as any) : prev
)
} else {
const prevId = nextIndex > 0 ? logs[nextIndex - 1]?.id : undefined
const afterId = nextIndex < logs.length - 1 ? logs[nextIndex + 1]?.id : undefined
const idsToFetch: Array<{ id: string; merge: boolean }> = []
if (nextLog.id && !detailsCacheRef.current.has(nextLog.id))
idsToFetch.push({ id: nextLog.id, merge: true })
if (prevId && !detailsCacheRef.current.has(prevId))
idsToFetch.push({ id: prevId, merge: false })
if (afterId && !detailsCacheRef.current.has(afterId))
idsToFetch.push({ id: afterId, merge: false })
Promise.all(
idsToFetch.map(async ({ id, merge }) => {
try {
const res = await fetch(`/api/logs/by-id/${id}`, { signal: controller.signal })
if (!res.ok) return
const body = await res.json()
const detailed = body?.data
if (detailed) {
detailsCacheRef.current.set(id, detailed)
if (merge && id === nextLog.id) {
setSelectedLog((prev) =>
prev && prev.id === id
? ({ ...(prev as any), ...(detailed as any) } as any)
: prev
)
}
}
} catch (e: any) {
if (e?.name === 'AbortError') return
}
})
).catch(() => {})
}
}
}, [selectedLogIndex, logs])
@@ -130,7 +243,57 @@ export default function Logs() {
if (selectedLogIndex > 0) {
const prevIndex = selectedLogIndex - 1
setSelectedLogIndex(prevIndex)
setSelectedLog(logs[prevIndex])
const prevLog = logs[prevIndex]
setSelectedLog(prevLog)
// Abort any previous details fetch batch
if (detailsAbortRef.current) {
try {
detailsAbortRef.current.abort()
} catch {
/* no-op */
}
}
const controller = new AbortController()
detailsAbortRef.current = controller
const cached = detailsCacheRef.current.get(prevLog.id)
if (cached) {
setSelectedLog((prev) =>
prev && prev.id === prevLog.id ? ({ ...(prev as any), ...(cached as any) } as any) : prev
)
} else {
const beforeId = prevIndex > 0 ? logs[prevIndex - 1]?.id : undefined
const afterId = prevIndex < logs.length - 1 ? logs[prevIndex + 1]?.id : undefined
const idsToFetch: Array<{ id: string; merge: boolean }> = []
if (prevLog.id && !detailsCacheRef.current.has(prevLog.id))
idsToFetch.push({ id: prevLog.id, merge: true })
if (beforeId && !detailsCacheRef.current.has(beforeId))
idsToFetch.push({ id: beforeId, merge: false })
if (afterId && !detailsCacheRef.current.has(afterId))
idsToFetch.push({ id: afterId, merge: false })
Promise.all(
idsToFetch.map(async ({ id, merge }) => {
try {
const res = await fetch(`/api/logs/by-id/${id}`, { signal: controller.signal })
if (!res.ok) return
const body = await res.json()
const detailed = body?.data
if (detailed) {
detailsCacheRef.current.set(id, detailed)
if (merge && id === prevLog.id) {
setSelectedLog((prev) =>
prev && prev.id === id
? ({ ...(prev as any), ...(detailed as any) } as any)
: prev
)
}
}
} catch (e: any) {
if (e?.name === 'AbortError') return
}
})
).catch(() => {})
}
}
}, [selectedLogIndex, logs])
@@ -160,7 +323,7 @@ export default function Logs() {
// Get fresh query params by calling buildQueryParams from store
const { buildQueryParams: getCurrentQueryParams } = useFilterStore.getState()
const queryParams = getCurrentQueryParams(pageNum, LOGS_PER_PAGE)
const response = await fetch(`/api/logs?${queryParams}`)
const response = await fetch(`/api/logs?${queryParams}&details=basic`)
if (!response.ok) {
throw new Error(`Error fetching logs: ${response.statusText}`)
@@ -262,7 +425,7 @@ export default function Logs() {
// Build query params inline to avoid dependency issues
const params = new URLSearchParams()
params.set('includeWorkflow', 'true')
params.set('details', 'basic')
params.set('limit', LOGS_PER_PAGE.toString())
params.set('offset', '0') // Always start from page 1
params.set('workspaceId', workspaceId)
@@ -482,7 +645,7 @@ export default function Logs() {
{/* Header */}
<div>
<div className='border-border border-b'>
<div className='grid min-w-[600px] grid-cols-[120px_80px_120px_80px_1fr] gap-2 px-2 pb-3 md:grid-cols-[140px_90px_140px_90px_1fr] md:gap-3 lg:min-w-0 lg:grid-cols-[160px_100px_160px_100px_1fr] lg:gap-4 xl:grid-cols-[160px_100px_160px_100px_100px_1fr_100px]'>
<div className='grid min-w-[600px] grid-cols-[120px_80px_120px_120px] gap-2 px-2 pb-3 md:grid-cols-[140px_90px_140px_120px] md:gap-3 lg:min-w-0 lg:grid-cols-[160px_100px_160px_120px] lg:gap-4 xl:grid-cols-[160px_100px_160px_120px_120px_100px]'>
<div className='font-[480] font-sans text-[13px] text-muted-foreground leading-normal'>
Time
</div>
@@ -493,14 +656,12 @@ export default function Logs() {
Workflow
</div>
<div className='font-[480] font-sans text-[13px] text-muted-foreground leading-normal'>
ID
Cost
</div>
<div className='hidden font-[480] font-sans text-[13px] text-muted-foreground leading-normal xl:block'>
Trigger
</div>
<div className='font-[480] font-sans text-[13px] text-muted-foreground leading-normal'>
Message
</div>
<div className='hidden font-[480] font-sans text-[13px] text-muted-foreground leading-normal xl:block'>
Duration
</div>
@@ -547,7 +708,7 @@ export default function Logs() {
}`}
onClick={() => handleLogClick(log)}
>
<div className='grid min-w-[600px] grid-cols-[120px_80px_120px_80px_1fr] items-center gap-2 px-2 py-4 md:grid-cols-[140px_90px_140px_90px_1fr] md:gap-3 lg:min-w-0 lg:grid-cols-[160px_100px_160px_100px_1fr] lg:gap-4 xl:grid-cols-[160px_100px_160px_100px_100px_1fr_100px]'>
<div className='grid min-w-[600px] grid-cols-[120px_80px_120px_120px] items-center gap-2 px-2 py-4 md:grid-cols-[140px_90px_140px_120px] md:gap-3 lg:min-w-0 lg:grid-cols-[160px_100px_160px_120px] lg:gap-4 xl:grid-cols-[160px_100px_160px_120px_120px_100px]'>
{/* Time */}
<div>
<div className='text-[13px]'>
@@ -584,10 +745,12 @@ export default function Logs() {
</div>
</div>
{/* ID */}
{/* Cost */}
<div>
<div className='font-medium text-muted-foreground text-xs'>
#{log.id.slice(-4)}
{typeof (log as any)?.cost?.total === 'number'
? `$${((log as any).cost.total as number).toFixed(4)}`
: '—'}
</div>
</div>
@@ -614,11 +777,6 @@ export default function Logs() {
)}
</div>
{/* Message */}
<div className='min-w-0'>
<div className='truncate font-[420] text-[13px]'>{log.message}</div>
</div>
{/* Duration */}
<div className='hidden xl:block'>
<div className='text-muted-foreground text-xs'>

View File

@@ -2,8 +2,8 @@
import React from 'react'
import { TooltipProvider } from '@/components/ui/tooltip'
import { ThemeProvider } from '@/app/workspace/[workspaceId]/providers/theme-provider'
import { WorkspacePermissionsProvider } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
import { SettingsLoader } from './settings-loader'
interface ProvidersProps {
children: React.ReactNode
@@ -11,11 +11,12 @@ interface ProvidersProps {
const Providers = React.memo<ProvidersProps>(({ children }) => {
return (
<ThemeProvider>
<>
<SettingsLoader />
<TooltipProvider delayDuration={100} skipDelayDuration={0}>
<WorkspacePermissionsProvider>{children}</WorkspacePermissionsProvider>
</TooltipProvider>
</ThemeProvider>
</>
)
})

View File

@@ -0,0 +1,27 @@
'use client'
import { useEffect, useRef } from 'react'
import { useSession } from '@/lib/auth-client'
import { useGeneralStore } from '@/stores/settings/general/store'
/**
* Loads user settings from database once per workspace session.
* This ensures settings are synced from DB on initial load but uses
* localStorage cache for subsequent navigation within the app.
*/
export function SettingsLoader() {
const { data: session, isPending: isSessionPending } = useSession()
const loadSettings = useGeneralStore((state) => state.loadSettings)
const hasLoadedRef = useRef(false)
useEffect(() => {
// Only load settings once per session for authenticated users
if (!isSessionPending && session?.user && !hasLoadedRef.current) {
hasLoadedRef.current = true
// Force load from DB on initial workspace entry
loadSettings(true)
}
}, [isSessionPending, session?.user, loadSettings])
return null
}

View File

@@ -1,23 +0,0 @@
'use client'
import { useEffect } from 'react'
import { useGeneralStore } from '@/stores/settings/general/store'
export function ThemeProvider({ children }: { children: React.ReactNode }) {
const theme = useGeneralStore((state) => state.theme)
useEffect(() => {
const root = window.document.documentElement
root.classList.remove('light', 'dark')
// If theme is system, check system preference
if (theme === 'system') {
const prefersDark = window.matchMedia('(prefers-color-scheme: dark)').matches
root.classList.add(prefersDark ? 'dark' : 'light')
} else {
root.classList.add(theme)
}
}, [theme])
return children
}

View File

@@ -29,7 +29,7 @@ export type CategoryValue = (typeof categories)[number]['value']
// Template data structure
export interface Template {
id: string
workflowId: string
workflowId: string | null
userId: string
name: string
description: string | null

View File

@@ -1,7 +1,7 @@
'use client'
import { useState } from 'react'
import { Download } from 'lucide-react'
import { Upload } from 'lucide-react'
import { Button } from '@/components/ui/button'
import { Tooltip, TooltipContent, TooltipTrigger } from '@/components/ui/tooltip'
import { createLogger } from '@/lib/logs/console/logger'
@@ -81,7 +81,7 @@ export function ExportControls({ disabled = false }: ExportControlsProps) {
<TooltipTrigger asChild>
{isDisabled ? (
<div className='inline-flex h-12 w-12 cursor-not-allowed items-center justify-center rounded-[11px] border bg-card text-card-foreground opacity-50 shadow-xs transition-colors'>
<Download className='h-5 w-5' />
<Upload className='h-5 w-5' />
</div>
) : (
<Button
@@ -89,7 +89,7 @@ export function ExportControls({ disabled = false }: ExportControlsProps) {
onClick={handleExportYaml}
className='h-12 w-12 rounded-[11px] border bg-card text-card-foreground shadow-xs hover:bg-secondary'
>
<Download className='h-5 w-5' />
<Upload className='h-5 w-5' />
<span className='sr-only'>Export as YAML</span>
</Button>
)}

View File

@@ -1,6 +1,6 @@
'use client'
import { useState } from 'react'
import { useEffect, useState } from 'react'
import { zodResolver } from '@hookform/resolvers/zod'
import {
Award,
@@ -18,6 +18,7 @@ import {
Database,
DollarSign,
Edit,
Eye,
FileText,
Folder,
Globe,
@@ -48,6 +49,16 @@ import {
} from 'lucide-react'
import { useForm } from 'react-hook-form'
import { z } from 'zod'
import {
AlertDialog,
AlertDialogAction,
AlertDialogCancel,
AlertDialogContent,
AlertDialogDescription,
AlertDialogFooter,
AlertDialogHeader,
AlertDialogTitle,
} from '@/components/ui/alert-dialog'
import { Button } from '@/components/ui/button'
import { ColorPicker } from '@/components/ui/color-picker'
import { Dialog, DialogContent, DialogHeader, DialogTitle } from '@/components/ui/dialog'
@@ -68,6 +79,7 @@ import {
SelectTrigger,
SelectValue,
} from '@/components/ui/select'
import { Skeleton } from '@/components/ui/skeleton'
import { Textarea } from '@/components/ui/textarea'
import { useSession } from '@/lib/auth-client'
import { createLogger } from '@/lib/logs/console/logger'
@@ -100,7 +112,6 @@ interface TemplateModalProps {
workflowId: string
}
// Enhanced icon selection with category-relevant icons
const icons = [
// Content & Documentation
{ value: 'FileText', label: 'File Text', component: FileText },
@@ -165,6 +176,10 @@ export function TemplateModal({ open, onOpenChange, workflowId }: TemplateModalP
const { data: session } = useSession()
const [isSubmitting, setIsSubmitting] = useState(false)
const [iconPopoverOpen, setIconPopoverOpen] = useState(false)
const [existingTemplate, setExistingTemplate] = useState<any>(null)
const [isLoadingTemplate, setIsLoadingTemplate] = useState(false)
const [showDeleteDialog, setShowDeleteDialog] = useState(false)
const [isDeleting, setIsDeleting] = useState(false)
const form = useForm<TemplateFormData>({
resolver: zodResolver(templateSchema),
@@ -178,6 +193,63 @@ export function TemplateModal({ open, onOpenChange, workflowId }: TemplateModalP
},
})
// Watch form state to determine if all required fields are valid
const formValues = form.watch()
const isFormValid =
form.formState.isValid &&
formValues.name?.trim() &&
formValues.description?.trim() &&
formValues.author?.trim() &&
formValues.category
// Check for existing template when modal opens
useEffect(() => {
if (open && workflowId) {
checkExistingTemplate()
}
}, [open, workflowId])
const checkExistingTemplate = async () => {
setIsLoadingTemplate(true)
try {
const response = await fetch(`/api/templates?workflowId=${workflowId}&limit=1`)
if (response.ok) {
const result = await response.json()
const template = result.data?.[0] || null
setExistingTemplate(template)
// Pre-fill form with existing template data
if (template) {
form.reset({
name: template.name,
description: template.description,
author: template.author,
category: template.category,
icon: template.icon,
color: template.color,
})
} else {
// No existing template found
setExistingTemplate(null)
// Reset form to defaults
form.reset({
name: '',
description: '',
author: session?.user?.name || session?.user?.email || '',
category: '',
icon: 'FileText',
color: '#3972F6',
})
}
}
} catch (error) {
logger.error('Error checking existing template:', error)
setExistingTemplate(null)
} finally {
setIsLoadingTemplate(false)
}
}
const onSubmit = async (data: TemplateFormData) => {
if (!session?.user) {
logger.error('User not authenticated')
@@ -201,21 +273,36 @@ export function TemplateModal({ open, onOpenChange, workflowId }: TemplateModalP
state: templateState,
}
const response = await fetch('/api/templates', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify(templateData),
})
let response
if (existingTemplate) {
// Update existing template
response = await fetch(`/api/templates/${existingTemplate.id}`, {
method: 'PUT',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify(templateData),
})
} else {
// Create new template
response = await fetch('/api/templates', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify(templateData),
})
}
if (!response.ok) {
const errorData = await response.json()
throw new Error(errorData.error || 'Failed to create template')
throw new Error(
errorData.error || `Failed to ${existingTemplate ? 'update' : 'create'} template`
)
}
const result = await response.json()
logger.info('Template created successfully:', result)
logger.info(`Template ${existingTemplate ? 'updated' : 'created'} successfully:`, result)
// Reset form and close modal
form.reset()
@@ -241,7 +328,35 @@ export function TemplateModal({ open, onOpenChange, workflowId }: TemplateModalP
>
<DialogHeader className='flex-shrink-0 border-b px-6 py-4'>
<div className='flex items-center justify-between'>
<DialogTitle className='font-medium text-lg'>Publish Template</DialogTitle>
<div className='flex items-center gap-3'>
<DialogTitle className='font-medium text-lg'>
{isLoadingTemplate
? 'Loading...'
: existingTemplate
? 'Update Template'
: 'Publish Template'}
</DialogTitle>
{existingTemplate && (
<div className='flex items-center gap-2'>
{existingTemplate.stars > 0 && (
<div className='flex items-center gap-1 rounded-full bg-yellow-50 px-2 py-1 dark:bg-yellow-900/20'>
<Star className='h-3 w-3 fill-yellow-400 text-yellow-400' />
<span className='font-medium text-xs text-yellow-700 dark:text-yellow-300'>
{existingTemplate.stars}
</span>
</div>
)}
{existingTemplate.views > 0 && (
<div className='flex items-center gap-1 rounded-full bg-blue-50 px-2 py-1 dark:bg-blue-900/20'>
<Eye className='h-3 w-3 text-blue-500' />
<span className='font-medium text-blue-700 text-xs dark:text-blue-300'>
{existingTemplate.views}
</span>
</div>
)}
</div>
)}
</div>
<Button
variant='ghost'
size='icon'
@@ -259,65 +374,189 @@ export function TemplateModal({ open, onOpenChange, workflowId }: TemplateModalP
onSubmit={form.handleSubmit(onSubmit)}
className='flex flex-1 flex-col overflow-hidden'
>
<div className='flex-1 overflow-y-auto px-6 py-4'>
<div className='space-y-6'>
<div className='flex gap-3'>
<div className='flex-1 overflow-y-auto px-6 py-6'>
{isLoadingTemplate ? (
<div className='space-y-6'>
{/* Icon and Color row */}
<div className='flex gap-3'>
<div className='w-20'>
<Skeleton className='mb-2 h-4 w-8' /> {/* Label */}
<Skeleton className='h-10 w-20' /> {/* Icon picker */}
</div>
<div className='w-20'>
<Skeleton className='mb-2 h-4 w-10' /> {/* Label */}
<Skeleton className='h-10 w-20' /> {/* Color picker */}
</div>
</div>
{/* Name field */}
<div>
<Skeleton className='mb-2 h-4 w-12' /> {/* Label */}
<Skeleton className='h-10 w-full' /> {/* Input */}
</div>
{/* Author and Category row */}
<div className='grid grid-cols-2 gap-4'>
<div>
<Skeleton className='mb-2 h-4 w-14' /> {/* Label */}
<Skeleton className='h-10 w-full' /> {/* Input */}
</div>
<div>
<Skeleton className='mb-2 h-4 w-16' /> {/* Label */}
<Skeleton className='h-10 w-full' /> {/* Select */}
</div>
</div>
{/* Description field */}
<div>
<Skeleton className='mb-2 h-4 w-20' /> {/* Label */}
<Skeleton className='h-20 w-full' /> {/* Textarea */}
</div>
</div>
) : (
<div className='space-y-6'>
<div className='flex gap-3'>
<FormField
control={form.control}
name='icon'
render={({ field }) => (
<FormItem className='w-20'>
<FormLabel className='!text-foreground font-medium text-sm'>
Icon
</FormLabel>
<Popover open={iconPopoverOpen} onOpenChange={setIconPopoverOpen}>
<PopoverTrigger asChild>
<Button variant='outline' role='combobox' className='h-10 w-20 p-0'>
<SelectedIconComponent className='h-4 w-4' />
</Button>
</PopoverTrigger>
<PopoverContent className='z-50 w-84 p-0' align='start'>
<div className='p-3'>
<div className='grid max-h-80 grid-cols-8 gap-2 overflow-y-auto'>
{icons.map((icon) => {
const IconComponent = icon.component
return (
<button
key={icon.value}
type='button'
onClick={() => {
field.onChange(icon.value)
setIconPopoverOpen(false)
}}
className={cn(
'flex h-8 w-8 items-center justify-center rounded-md border transition-colors hover:bg-muted',
field.value === icon.value &&
'bg-primary text-primary-foreground'
)}
>
<IconComponent className='h-4 w-4' />
</button>
)
})}
</div>
</div>
</PopoverContent>
</Popover>
<FormMessage />
</FormItem>
)}
/>
<FormField
control={form.control}
name='color'
render={({ field }) => (
<FormItem className='w-20'>
<FormLabel className='!text-foreground font-medium text-sm'>
Color
</FormLabel>
<FormControl>
<ColorPicker
value={field.value}
onChange={field.onChange}
onBlur={field.onBlur}
className='h-10 w-20'
/>
</FormControl>
<FormMessage />
</FormItem>
)}
/>
</div>
<FormField
control={form.control}
name='icon'
name='name'
render={({ field }) => (
<FormItem className='w-20'>
<FormLabel>Icon</FormLabel>
<Popover open={iconPopoverOpen} onOpenChange={setIconPopoverOpen}>
<PopoverTrigger asChild>
<Button variant='outline' role='combobox' className='h-10 w-20 p-0'>
<SelectedIconComponent className='h-4 w-4' />
</Button>
</PopoverTrigger>
<PopoverContent className='z-50 w-84 p-0' align='start'>
<div className='p-3'>
<div className='grid max-h-80 grid-cols-8 gap-2 overflow-y-auto'>
{icons.map((icon) => {
const IconComponent = icon.component
return (
<button
key={icon.value}
type='button'
onClick={() => {
field.onChange(icon.value)
setIconPopoverOpen(false)
}}
className={cn(
'flex h-8 w-8 items-center justify-center rounded-md border transition-colors hover:bg-muted',
field.value === icon.value &&
'bg-primary text-primary-foreground'
)}
>
<IconComponent className='h-4 w-4' />
</button>
)
})}
</div>
</div>
</PopoverContent>
</Popover>
<FormItem>
<FormLabel className='!text-foreground font-medium text-sm'>Name</FormLabel>
<FormControl>
<Input placeholder='Enter template name' {...field} />
</FormControl>
<FormMessage />
</FormItem>
)}
/>
<div className='grid grid-cols-2 gap-4'>
<FormField
control={form.control}
name='author'
render={({ field }) => (
<FormItem>
<FormLabel className='!text-foreground font-medium text-sm'>
Author
</FormLabel>
<FormControl>
<Input placeholder='Enter author name' {...field} />
</FormControl>
<FormMessage />
</FormItem>
)}
/>
<FormField
control={form.control}
name='category'
render={({ field }) => (
<FormItem>
<FormLabel className='!text-foreground font-medium text-sm'>
Category
</FormLabel>
<Select onValueChange={field.onChange} defaultValue={field.value}>
<FormControl>
<SelectTrigger>
<SelectValue placeholder='Select a category' />
</SelectTrigger>
</FormControl>
<SelectContent>
{categories.map((category) => (
<SelectItem key={category.value} value={category.value}>
{category.label}
</SelectItem>
))}
</SelectContent>
</Select>
<FormMessage />
</FormItem>
)}
/>
</div>
<FormField
control={form.control}
name='color'
name='description'
render={({ field }) => (
<FormItem className='w-20'>
<FormLabel>Color</FormLabel>
<FormItem>
<FormLabel className='!text-foreground font-medium text-sm'>
Description
</FormLabel>
<FormControl>
<ColorPicker
value={field.value}
onChange={field.onChange}
onBlur={field.onBlur}
className='h-10 w-20'
<Textarea
placeholder='Describe what this template does...'
className='resize-none'
rows={3}
{...field}
/>
</FormControl>
<FormMessage />
@@ -325,91 +564,28 @@ export function TemplateModal({ open, onOpenChange, workflowId }: TemplateModalP
)}
/>
</div>
<FormField
control={form.control}
name='name'
render={({ field }) => (
<FormItem>
<FormLabel>Name</FormLabel>
<FormControl>
<Input placeholder='Enter template name' {...field} />
</FormControl>
<FormMessage />
</FormItem>
)}
/>
<div className='grid grid-cols-2 gap-4'>
<FormField
control={form.control}
name='author'
render={({ field }) => (
<FormItem>
<FormLabel>Author</FormLabel>
<FormControl>
<Input placeholder='Enter author name' {...field} />
</FormControl>
<FormMessage />
</FormItem>
)}
/>
<FormField
control={form.control}
name='category'
render={({ field }) => (
<FormItem>
<FormLabel>Category</FormLabel>
<Select onValueChange={field.onChange} defaultValue={field.value}>
<FormControl>
<SelectTrigger>
<SelectValue placeholder='Select a category' />
</SelectTrigger>
</FormControl>
<SelectContent>
{categories.map((category) => (
<SelectItem key={category.value} value={category.value}>
{category.label}
</SelectItem>
))}
</SelectContent>
</Select>
<FormMessage />
</FormItem>
)}
/>
</div>
<FormField
control={form.control}
name='description'
render={({ field }) => (
<FormItem>
<FormLabel>Description</FormLabel>
<FormControl>
<Textarea
placeholder='Describe what this template does...'
className='resize-none'
rows={3}
{...field}
/>
</FormControl>
<FormMessage />
</FormItem>
)}
/>
</div>
)}
</div>
{/* Fixed Footer */}
<div className='mt-auto border-t px-6 pt-4 pb-6'>
<div className='flex justify-end'>
<div className='flex items-center'>
{existingTemplate && (
<Button
type='button'
variant='destructive'
onClick={() => setShowDeleteDialog(true)}
disabled={isSubmitting || isLoadingTemplate}
className='h-10 rounded-md px-4 py-2'
>
Delete
</Button>
)}
<Button
type='submit'
disabled={isSubmitting}
disabled={isSubmitting || !isFormValid || isLoadingTemplate}
className={cn(
'font-medium',
'ml-auto font-medium',
'bg-[var(--brand-primary-hex)] hover:bg-[var(--brand-primary-hover-hex)]',
'shadow-[0_0_0_0_var(--brand-primary-hex)] hover:shadow-[0_0_0_4px_rgba(127,47,255,0.15)]',
'text-white transition-all duration-200',
@@ -420,16 +596,59 @@ export function TemplateModal({ open, onOpenChange, workflowId }: TemplateModalP
{isSubmitting ? (
<>
<Loader2 className='mr-2 h-4 w-4 animate-spin' />
Publishing...
{existingTemplate ? 'Updating...' : 'Publishing...'}
</>
) : existingTemplate ? (
'Update Template'
) : (
'Publish'
'Publish Template'
)}
</Button>
</div>
</div>
</form>
</Form>
{existingTemplate && (
<AlertDialog open={showDeleteDialog} onOpenChange={setShowDeleteDialog}>
<AlertDialogContent>
<AlertDialogHeader>
<AlertDialogTitle>Delete Template?</AlertDialogTitle>
<AlertDialogDescription>
Deleting this template will remove it from the gallery. This action cannot be
undone.
</AlertDialogDescription>
</AlertDialogHeader>
<AlertDialogFooter>
<AlertDialogCancel disabled={isDeleting}>Cancel</AlertDialogCancel>
<AlertDialogAction
className='bg-destructive text-destructive-foreground hover:bg-destructive/90'
disabled={isDeleting}
onClick={async () => {
if (!existingTemplate) return
setIsDeleting(true)
try {
const resp = await fetch(`/api/templates/${existingTemplate.id}`, {
method: 'DELETE',
})
if (!resp.ok) {
const err = await resp.json().catch(() => ({}))
throw new Error(err.error || 'Failed to delete template')
}
setShowDeleteDialog(false)
onOpenChange(false)
} catch (err) {
logger.error('Failed to delete template', err)
} finally {
setIsDeleting(false)
}
}}
>
{isDeleting ? 'Deleting...' : 'Delete'}
</AlertDialogAction>
</AlertDialogFooter>
</AlertDialogContent>
</AlertDialog>
)}
</DialogContent>
</Dialog>
)

View File

@@ -18,7 +18,6 @@ import {
import { useParams, useRouter } from 'next/navigation'
import {
AlertDialog,
AlertDialogAction,
AlertDialogCancel,
AlertDialogContent,
AlertDialogDescription,
@@ -32,7 +31,6 @@ import {
TooltipTrigger,
} from '@/components/ui'
import { useSession } from '@/lib/auth-client'
import { isDev } from '@/lib/environment'
import { createLogger } from '@/lib/logs/console/logger'
import { cn } from '@/lib/utils'
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
@@ -114,6 +112,15 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
const [isTemplateModalOpen, setIsTemplateModalOpen] = useState(false)
const [isAutoLayouting, setIsAutoLayouting] = useState(false)
// Delete workflow state - grouped for better organization
const [deleteState, setDeleteState] = useState({
showDialog: false,
isDeleting: false,
hasPublishedTemplates: false,
publishedTemplates: [] as any[],
showTemplateChoice: false,
})
// Deployed state management
const [deployedState, setDeployedState] = useState<WorkflowState | null>(null)
const [isLoadingDeployedState, setIsLoadingDeployedState] = useState<boolean>(false)
@@ -338,34 +345,170 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
}
/**
* Handle deleting the current workflow
* Reset delete state
*/
const handleDeleteWorkflow = () => {
if (!activeWorkflowId || !userPermissions.canEdit) return
const resetDeleteState = useCallback(() => {
setDeleteState({
showDialog: false,
isDeleting: false,
hasPublishedTemplates: false,
publishedTemplates: [],
showTemplateChoice: false,
})
}, [])
const sidebarWorkflows = getSidebarOrderedWorkflows()
const currentIndex = sidebarWorkflows.findIndex((w) => w.id === activeWorkflowId)
/**
* Navigate to next workflow after deletion
*/
const navigateAfterDeletion = useCallback(
(currentWorkflowId: string) => {
const sidebarWorkflows = getSidebarOrderedWorkflows()
const currentIndex = sidebarWorkflows.findIndex((w) => w.id === currentWorkflowId)
// Find next workflow: try next, then previous
let nextWorkflowId: string | null = null
if (sidebarWorkflows.length > 1) {
if (currentIndex < sidebarWorkflows.length - 1) {
nextWorkflowId = sidebarWorkflows[currentIndex + 1].id
} else if (currentIndex > 0) {
nextWorkflowId = sidebarWorkflows[currentIndex - 1].id
// Find next workflow: try next, then previous
let nextWorkflowId: string | null = null
if (sidebarWorkflows.length > 1) {
if (currentIndex < sidebarWorkflows.length - 1) {
nextWorkflowId = sidebarWorkflows[currentIndex + 1].id
} else if (currentIndex > 0) {
nextWorkflowId = sidebarWorkflows[currentIndex - 1].id
}
}
// Navigate to next workflow or workspace home
if (nextWorkflowId) {
router.push(`/workspace/${workspaceId}/w/${nextWorkflowId}`)
} else {
router.push(`/workspace/${workspaceId}`)
}
},
[workspaceId, router]
)
/**
* Check if workflow has published templates
*/
const checkPublishedTemplates = useCallback(async (workflowId: string) => {
const checkResponse = await fetch(`/api/workflows/${workflowId}?check-templates=true`, {
method: 'DELETE',
})
if (!checkResponse.ok) {
throw new Error(`Failed to check templates: ${checkResponse.statusText}`)
}
// Navigate to next workflow or workspace home
if (nextWorkflowId) {
router.push(`/workspace/${workspaceId}/w/${nextWorkflowId}`)
} else {
router.push(`/workspace/${workspaceId}`)
}
return await checkResponse.json()
}, [])
// Remove the workflow from the registry
useWorkflowRegistry.getState().removeWorkflow(activeWorkflowId)
}
/**
* Delete workflow with optional template handling
*/
const deleteWorkflowWithTemplates = useCallback(
async (workflowId: string, templateAction?: 'keep' | 'delete') => {
const endpoint = templateAction
? `/api/workflows/${workflowId}?deleteTemplates=${templateAction}`
: null
if (endpoint) {
// Use custom endpoint for template handling
const response = await fetch(endpoint, { method: 'DELETE' })
if (!response.ok) {
throw new Error(`Failed to delete workflow: ${response.statusText}`)
}
// Manual registry cleanup since we used custom API
useWorkflowRegistry.setState((state) => {
const newWorkflows = { ...state.workflows }
delete newWorkflows[workflowId]
return {
...state,
workflows: newWorkflows,
activeWorkflowId: state.activeWorkflowId === workflowId ? null : state.activeWorkflowId,
}
})
} else {
// Use registry's built-in deletion (handles database + state)
await useWorkflowRegistry.getState().removeWorkflow(workflowId)
}
},
[]
)
/**
* Handle deleting the current workflow - called after user confirms
*/
const handleDeleteWorkflow = useCallback(async () => {
const currentWorkflowId = params.workflowId as string
if (!currentWorkflowId || !userPermissions.canEdit) return
setDeleteState((prev) => ({ ...prev, isDeleting: true }))
try {
// Check if workflow has published templates
const checkData = await checkPublishedTemplates(currentWorkflowId)
if (checkData.hasPublishedTemplates) {
setDeleteState((prev) => ({
...prev,
hasPublishedTemplates: true,
publishedTemplates: checkData.publishedTemplates || [],
showTemplateChoice: true,
isDeleting: false, // Stop showing "Deleting..." and show template choice
}))
return
}
// No templates, proceed with standard deletion
navigateAfterDeletion(currentWorkflowId)
await deleteWorkflowWithTemplates(currentWorkflowId)
resetDeleteState()
} catch (error) {
logger.error('Error deleting workflow:', error)
setDeleteState((prev) => ({ ...prev, isDeleting: false }))
}
}, [
params.workflowId,
userPermissions.canEdit,
checkPublishedTemplates,
navigateAfterDeletion,
deleteWorkflowWithTemplates,
resetDeleteState,
])
/**
* Handle template action selection
*/
const handleTemplateAction = useCallback(
async (action: 'keep' | 'delete') => {
const currentWorkflowId = params.workflowId as string
if (!currentWorkflowId || !userPermissions.canEdit) return
setDeleteState((prev) => ({ ...prev, isDeleting: true }))
try {
logger.info(`Deleting workflow ${currentWorkflowId} with template action: ${action}`)
navigateAfterDeletion(currentWorkflowId)
await deleteWorkflowWithTemplates(currentWorkflowId, action)
logger.info(
`Successfully deleted workflow ${currentWorkflowId} with template action: ${action}`
)
resetDeleteState()
} catch (error) {
logger.error('Error deleting workflow:', error)
setDeleteState((prev) => ({ ...prev, isDeleting: false }))
}
},
[
params.workflowId,
userPermissions.canEdit,
navigateAfterDeletion,
deleteWorkflowWithTemplates,
resetDeleteState,
]
)
// Helper function to open subscription settings
const openSubscriptionSettings = () => {
@@ -413,7 +556,7 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
<Tooltip>
<TooltipTrigger asChild>
<div className='inline-flex h-12 w-12 cursor-not-allowed items-center justify-center rounded-[11px] border bg-card text-card-foreground opacity-50 shadow-xs transition-colors'>
<Trash2 className='h-5 w-5' />
<Trash2 className='h-4 w-4' />
</div>
</TooltipTrigger>
<TooltipContent>{getTooltipText()}</TooltipContent>
@@ -422,7 +565,23 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
}
return (
<AlertDialog>
<AlertDialog
open={deleteState.showDialog}
onOpenChange={(open) => {
if (open) {
// Reset all state when opening dialog to ensure clean start
setDeleteState({
showDialog: true,
isDeleting: false,
hasPublishedTemplates: false,
publishedTemplates: [],
showTemplateChoice: false,
})
} else {
resetDeleteState()
}
}}
>
<Tooltip>
<TooltipTrigger asChild>
<AlertDialogTrigger asChild>
@@ -444,21 +603,71 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
<AlertDialogContent>
<AlertDialogHeader>
<AlertDialogTitle>Delete workflow?</AlertDialogTitle>
<AlertDialogDescription>
Deleting this workflow will permanently remove all associated blocks, executions, and
configuration.{' '}
<span className='text-red-500 dark:text-red-500'>This action cannot be undone.</span>
</AlertDialogDescription>
<AlertDialogTitle>
{deleteState.showTemplateChoice ? 'Published Templates Found' : 'Delete workflow?'}
</AlertDialogTitle>
{deleteState.showTemplateChoice ? (
<div className='space-y-3'>
<AlertDialogDescription>
This workflow has {deleteState.publishedTemplates.length} published template
{deleteState.publishedTemplates.length > 1 ? 's' : ''}:
</AlertDialogDescription>
{deleteState.publishedTemplates.length > 0 && (
<ul className='list-disc space-y-1 pl-6'>
{deleteState.publishedTemplates.map((template) => (
<li key={template.id}>{template.name}</li>
))}
</ul>
)}
<AlertDialogDescription>
What would you like to do with the published template
{deleteState.publishedTemplates.length > 1 ? 's' : ''}?
</AlertDialogDescription>
</div>
) : (
<AlertDialogDescription>
Deleting this workflow will permanently remove all associated blocks, executions,
and configuration.{' '}
<span className='text-red-500 dark:text-red-500'>
This action cannot be undone.
</span>
</AlertDialogDescription>
)}
</AlertDialogHeader>
<AlertDialogFooter className='flex'>
<AlertDialogCancel className='h-9 w-full rounded-[8px]'>Cancel</AlertDialogCancel>
<AlertDialogAction
onClick={handleDeleteWorkflow}
className='h-9 w-full rounded-[8px] bg-red-500 text-white transition-all duration-200 hover:bg-red-600 dark:bg-red-500 dark:hover:bg-red-600'
>
Delete
</AlertDialogAction>
{deleteState.showTemplateChoice ? (
<div className='flex w-full gap-2'>
<Button
variant='outline'
onClick={() => handleTemplateAction('keep')}
disabled={deleteState.isDeleting}
className='h-9 flex-1 rounded-[8px]'
>
Keep templates
</Button>
<Button
onClick={() => handleTemplateAction('delete')}
disabled={deleteState.isDeleting}
className='h-9 flex-1 rounded-[8px] bg-red-500 text-white transition-all duration-200 hover:bg-red-600 dark:bg-red-500 dark:hover:bg-red-600'
>
{deleteState.isDeleting ? 'Deleting...' : 'Delete templates'}
</Button>
</div>
) : (
<>
<AlertDialogCancel className='h-9 w-full rounded-[8px]'>Cancel</AlertDialogCancel>
<Button
onClick={(e) => {
e.preventDefault()
handleDeleteWorkflow()
}}
disabled={deleteState.isDeleting}
className='h-9 w-full rounded-[8px] bg-red-500 text-white transition-all duration-200 hover:bg-red-600 dark:bg-red-500 dark:hover:bg-red-600'
>
{deleteState.isDeleting ? 'Deleting...' : 'Delete'}
</Button>
</>
)}
</AlertDialogFooter>
</AlertDialogContent>
</AlertDialog>
@@ -498,7 +707,7 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
<TooltipTrigger asChild>
{isDisabled ? (
<div className='inline-flex h-12 w-12 cursor-not-allowed items-center justify-center rounded-[11px] border bg-card text-card-foreground opacity-50 shadow-xs transition-colors'>
<Copy className='h-5 w-5' />
<Copy className='h-4 w-4' />
</div>
) : (
<Button
@@ -563,9 +772,9 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
{isDisabled ? (
<div className='inline-flex h-12 w-12 cursor-not-allowed items-center justify-center rounded-[11px] border bg-card text-card-foreground opacity-50 shadow-xs transition-colors'>
{isAutoLayouting ? (
<RefreshCw className='h-5 w-5 animate-spin' />
<RefreshCw className='h-4 w-4 animate-spin' />
) : (
<Layers className='h-5 w-5' />
<Layers className='h-4 w-4' />
)}
</div>
) : (
@@ -721,7 +930,7 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
<TooltipTrigger asChild>
{isDisabled ? (
<div className='inline-flex h-12 w-12 cursor-not-allowed items-center justify-center rounded-[11px] border bg-card text-card-foreground opacity-50 shadow-xs transition-colors'>
<Store className='h-5 w-5' />
<Store className='h-4 w-4' />
</div>
) : (
<Button
@@ -775,7 +984,7 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
isDebugging && 'text-amber-500'
)}
>
<Bug className='h-5 w-5' />
<Bug className='h-4 w-4' />
</div>
) : (
<Button variant='outline' onClick={handleDebugToggle} className={buttonClass}>
@@ -999,14 +1208,13 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
return (
<div className='fixed top-4 right-4 z-20 flex items-center gap-1'>
{renderDisconnectionNotice()}
{!isDev && renderToggleButton()}
{isExpanded && !isDev && <ExportControls />}
{isExpanded && !isDev && renderAutoLayoutButton()}
{!isDev && isExpanded && renderDuplicateButton()}
{isDev && renderDuplicateButton()}
{renderToggleButton()}
{isExpanded && <ExportControls />}
{isExpanded && renderAutoLayoutButton()}
{isExpanded && renderPublishButton()}
{renderDeleteButton()}
{renderDuplicateButton()}
{!isDebugging && renderDebugModeToggle()}
{renderPublishButton()}
{renderDeployButton()}
{isDebugging ? renderDebugControlsBar() : renderRunButton()}

View File

@@ -191,27 +191,27 @@ export function DiffControls() {
logger.info('Accepting proposed changes with backup protection')
try {
// Create a checkpoint before applying changes so it appears under the triggering user message
await createCheckpoint().catch((error) => {
logger.warn('Failed to create checkpoint before accept:', error)
})
// Clear preview YAML immediately
await clearPreviewYaml().catch((error) => {
logger.warn('Failed to clear preview YAML:', error)
})
// Accept changes with automatic backup and rollback on failure
await acceptChanges()
// Accept changes without blocking the UI; errors will be logged by the store handler
acceptChanges().catch((error) => {
logger.error('Failed to accept changes (background):', error)
})
logger.info('Successfully accepted and saved workflow changes')
// Show success feedback if needed
logger.info('Accept triggered; UI will update optimistically')
} catch (error) {
logger.error('Failed to accept changes:', error)
// Show error notification to user
// Note: The acceptChanges function has already rolled back the state
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
// You could add toast notification here
console.error('Workflow update failed:', errorMessage)
// Optionally show user-facing error dialog
alert(`Failed to save workflow changes: ${errorMessage}`)
}
}
@@ -224,10 +224,10 @@ export function DiffControls() {
logger.warn('Failed to clear preview YAML:', error)
})
// Reject is immediate (no server save needed)
rejectChanges()
logger.info('Successfully rejected proposed changes')
// Reject changes optimistically
rejectChanges().catch((error) => {
logger.error('Failed to reject changes (background):', error)
})
}
return (

View File

@@ -4,6 +4,8 @@ import { Component, type ReactNode, useEffect } from 'react'
import { BotIcon } from 'lucide-react'
import { Card } from '@/components/ui/card'
import { createLogger } from '@/lib/logs/console/logger'
import { ControlBar } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/control-bar/control-bar'
import { Panel } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/panel'
const logger = createLogger('ErrorBoundary')
@@ -22,18 +24,32 @@ export function ErrorUI({
fullScreen = false,
}: ErrorUIProps) {
const containerClass = fullScreen
? 'flex items-center justify-center w-full h-screen bg-muted/40'
: 'flex items-center justify-center w-full h-full bg-muted/40'
? 'flex flex-col w-full h-screen bg-muted/40'
: 'flex flex-col w-full h-full bg-muted/40'
return (
<div className={containerClass}>
<Card className='max-w-md space-y-4 p-6 text-center'>
<div className='flex justify-center'>
<BotIcon className='h-16 w-16 text-muted-foreground' />
{/* Control bar */}
<ControlBar hasValidationErrors={false} />
{/* Main content area */}
<div className='relative flex flex-1'>
{/* Error message */}
<div className='flex flex-1 items-center justify-center'>
<Card className='max-w-md space-y-4 p-6 text-center'>
<div className='flex justify-center'>
<BotIcon className='h-16 w-16 text-muted-foreground' />
</div>
<h3 className='font-semibold text-lg'>{title}</h3>
<p className='text-muted-foreground'>{message}</p>
</Card>
</div>
<h3 className='font-semibold text-lg'>{title}</h3>
<p className='text-muted-foreground'>{message}</p>
</Card>
{/* Console panel */}
<div className='fixed top-0 right-0 z-10'>
<Panel />
</div>
</div>
</div>
)
}

View File

@@ -2,8 +2,7 @@ export { ControlBar } from './control-bar/control-bar'
export { ErrorBoundary } from './error/index'
export { Panel } from './panel/panel'
export { SkeletonLoading } from './skeleton-loading/skeleton-loading'
export { LoopNodeComponent } from './subflows/loop/loop-node'
export { ParallelNodeComponent } from './subflows/parallel/parallel-node'
export { SubflowNodeComponent } from './subflows/subflow-node'
export { WandPromptBar } from './wand-prompt-bar/wand-prompt-bar'
export { WorkflowBlock } from './workflow-block/workflow-block'
export { WorkflowEdge } from './workflow-edge/workflow-edge'

View File

@@ -13,6 +13,7 @@ import {
import Image from 'next/image'
import { Button } from '@/components/ui/button'
import { createLogger } from '@/lib/logs/console/logger'
import { redactApiKeys } from '@/lib/utils'
import {
CodeDisplay,
JSONView,
@@ -349,9 +350,10 @@ export function ConsoleEntry({ entry, consoleWidth }: ConsoleEntryProps) {
// For code display, copy just the code string
textToCopy = entry.input.code
} else {
// For regular JSON display, copy the full JSON
// For regular JSON display, copy the full JSON with redaction applied
const dataToCopy = showInput ? entry.input : entry.output
textToCopy = JSON.stringify(dataToCopy, null, 2)
const redactedData = redactApiKeys(dataToCopy)
textToCopy = JSON.stringify(redactedData, null, 2)
}
navigator.clipboard.writeText(textToCopy)

View File

@@ -1,5 +1,6 @@
import { useEffect, useState } from 'react'
import { Button } from '@/components/ui/button'
import { redactApiKeys } from '@/lib/utils'
interface JSONViewProps {
data: any
@@ -154,6 +155,9 @@ export const JSONView = ({ data }: JSONViewProps) => {
y: number
} | null>(null)
// Apply redaction to the data before displaying
const redactedData = redactApiKeys(data)
const handleContextMenu = (e: React.MouseEvent) => {
e.preventDefault()
setContextMenuPosition({ x: e.clientX, y: e.clientY })
@@ -167,18 +171,18 @@ export const JSONView = ({ data }: JSONViewProps) => {
}
}, [contextMenuPosition])
if (data === null)
if (redactedData === null)
return <span className='font-[380] text-muted-foreground leading-normal'>null</span>
// For non-object data, show simple JSON
if (typeof data !== 'object') {
const stringValue = JSON.stringify(data)
if (typeof redactedData !== 'object') {
const stringValue = JSON.stringify(redactedData)
return (
<span
onContextMenu={handleContextMenu}
className='relative max-w-full overflow-hidden break-all font-[380] font-mono text-muted-foreground leading-normal'
>
{typeof data === 'string' ? (
{typeof redactedData === 'string' ? (
<TruncatedValue value={stringValue} />
) : (
<span className='break-all font-[380] text-muted-foreground leading-normal'>
@@ -192,7 +196,7 @@ export const JSONView = ({ data }: JSONViewProps) => {
>
<button
className='w-full px-3 py-1.5 text-left font-[380] text-sm hover:bg-accent'
onClick={() => copyToClipboard(data)}
onClick={() => copyToClipboard(redactedData)}
>
Copy value
</button>
@@ -206,7 +210,7 @@ export const JSONView = ({ data }: JSONViewProps) => {
return (
<div onContextMenu={handleContextMenu}>
<pre className='max-w-full overflow-hidden whitespace-pre-wrap break-all font-mono'>
<CollapsibleJSON data={data} />
<CollapsibleJSON data={redactedData} />
</pre>
{contextMenuPosition && (
<div
@@ -215,7 +219,7 @@ export const JSONView = ({ data }: JSONViewProps) => {
>
<button
className='w-full px-3 py-1.5 text-left font-[380] text-sm hover:bg-accent'
onClick={() => copyToClipboard(data)}
onClick={() => copyToClipboard(redactedData)}
>
Copy object
</button>

View File

@@ -27,6 +27,18 @@ export function ThinkingBlock({
}
}, [persistedStartTime])
useEffect(() => {
// Auto-collapse when streaming ends
if (!isStreaming) {
setIsExpanded(false)
return
}
// Expand once there is visible content while streaming
if (content && content.trim().length > 0) {
setIsExpanded(true)
}
}, [isStreaming, content])
useEffect(() => {
// If we already have a persisted duration, just use it
if (typeof persistedDuration === 'number') {
@@ -52,29 +64,10 @@ export function ThinkingBlock({
return `${seconds}s`
}
if (!isExpanded) {
return (
<button
onClick={() => setIsExpanded(true)}
className={cn(
'inline-flex items-center gap-1 text-gray-400 text-xs transition-colors hover:text-gray-500',
'font-normal italic'
)}
type='button'
>
<Brain className='h-3 w-3' />
<span>Thought for {formatDuration(duration)}</span>
{isStreaming && (
<span className='inline-flex h-1 w-1 animate-pulse rounded-full bg-gray-400' />
)}
</button>
)
}
return (
<div className='my-1'>
<button
onClick={() => setIsExpanded(false)}
onClick={() => setIsExpanded((v) => !v)}
className={cn(
'mb-1 inline-flex items-center gap-1 text-gray-400 text-xs transition-colors hover:text-gray-500',
'font-normal italic'
@@ -82,14 +75,25 @@ export function ThinkingBlock({
type='button'
>
<Brain className='h-3 w-3' />
<span>Thought for {formatDuration(duration)} (click to collapse)</span>
<span>
Thought for {formatDuration(duration)}
{isExpanded ? ' (click to collapse)' : ''}
</span>
{isStreaming && (
<span className='inline-flex h-1 w-1 animate-pulse rounded-full bg-gray-400' />
)}
</button>
<div className='ml-1 border-gray-200 border-l-2 pl-2 dark:border-gray-700'>
<pre className='whitespace-pre-wrap font-mono text-gray-400 text-xs dark:text-gray-500'>
{content}
{isStreaming && <span className='ml-1 inline-block h-2 w-1 animate-pulse bg-gray-400' />}
</pre>
</div>
{isExpanded && (
<div className='ml-1 border-gray-200 border-l-2 pl-2 dark:border-gray-700'>
<pre className='whitespace-pre-wrap font-mono text-gray-400 text-xs dark:text-gray-500'>
{content}
{isStreaming && (
<span className='ml-1 inline-block h-2 w-1 animate-pulse bg-gray-400' />
)}
</pre>
</div>
)}
</div>
)
}

View File

@@ -643,41 +643,49 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
{/* Checkpoints below message */}
{hasCheckpoints && (
<div className='mt-1 flex justify-end'>
{showRestoreConfirmation ? (
<div className='flex items-center gap-2'>
<span className='text-muted-foreground text-xs'>Restore?</span>
<button
onClick={handleConfirmRevert}
disabled={isRevertingCheckpoint}
className='text-muted-foreground text-xs transition-colors hover:bg-muted disabled:cursor-not-allowed disabled:opacity-50'
title='Confirm restore'
>
{isRevertingCheckpoint ? (
<Loader2 className='h-3 w-3 animate-spin' />
) : (
<Check className='h-3 w-3' />
)}
</button>
<button
onClick={handleCancelRevert}
disabled={isRevertingCheckpoint}
className='text-muted-foreground text-xs transition-colors hover:bg-muted disabled:cursor-not-allowed disabled:opacity-50'
title='Cancel restore'
>
<X className='h-3 w-3' />
</button>
<div className='inline-flex items-center gap-0.5 text-muted-foreground text-xs'>
<span className='select-none'>
Restore{showRestoreConfirmation && <span className='ml-0.5'>?</span>}
</span>
<div className='inline-flex w-8 items-center justify-center'>
{showRestoreConfirmation ? (
<div className='inline-flex items-center gap-1'>
<button
onClick={handleConfirmRevert}
disabled={isRevertingCheckpoint}
className='text-muted-foreground transition-colors hover:bg-muted disabled:cursor-not-allowed disabled:opacity-50'
title='Confirm restore'
aria-label='Confirm restore'
>
{isRevertingCheckpoint ? (
<Loader2 className='h-3 w-3 animate-spin' />
) : (
<Check className='h-3 w-3' />
)}
</button>
<button
onClick={handleCancelRevert}
disabled={isRevertingCheckpoint}
className='text-muted-foreground transition-colors hover:bg-muted disabled:cursor-not-allowed disabled:opacity-50'
title='Cancel restore'
aria-label='Cancel restore'
>
<X className='h-3 w-3' />
</button>
</div>
) : (
<button
onClick={handleRevertToCheckpoint}
disabled={isRevertingCheckpoint}
className='text-muted-foreground transition-colors hover:bg-muted disabled:cursor-not-allowed disabled:opacity-50'
title='Restore workflow to this checkpoint state'
aria-label='Restore'
>
<RotateCcw className='h-3 w-3' />
</button>
)}
</div>
) : (
<button
onClick={handleRevertToCheckpoint}
disabled={isRevertingCheckpoint}
className='flex items-center gap-1.5 rounded-md px-2 py-1 text-muted-foreground text-xs transition-colors hover:bg-muted hover:text-foreground disabled:cursor-not-allowed disabled:opacity-50'
title='Restore workflow to this checkpoint state'
>
<RotateCcw className='h-3 w-3' />
Restore
</button>
)}
</div>
</div>
)}
</div>

View File

@@ -0,0 +1,25 @@
'use client'
import * as React from 'react'
import * as SliderPrimitive from '@radix-ui/react-slider'
import { cn } from '@/lib/utils'
export const CopilotSlider = React.forwardRef<
React.ElementRef<typeof SliderPrimitive.Root>,
React.ComponentPropsWithoutRef<typeof SliderPrimitive.Root>
>(({ className, ...props }, ref) => (
<SliderPrimitive.Root
ref={ref}
className={cn(
'relative flex w-full cursor-pointer touch-none select-none items-center',
className
)}
{...props}
>
<SliderPrimitive.Track className='relative h-2 w-full grow cursor-pointer overflow-hidden rounded-full bg-input'>
<SliderPrimitive.Range className='absolute h-full bg-primary' />
</SliderPrimitive.Track>
<SliderPrimitive.Thumb className='block h-5 w-5 cursor-pointer rounded-full border-2 border-primary bg-background ring-offset-background transition-colors focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:pointer-events-none disabled:opacity-50' />
</SliderPrimitive.Root>
))
CopilotSlider.displayName = 'CopilotSlider'

View File

@@ -10,12 +10,13 @@ import {
} from 'react'
import {
ArrowUp,
Boxes,
Brain,
BrainCircuit,
BrainCog,
Check,
FileText,
Image,
Infinity as InfinityIcon,
Info,
Loader2,
MessageCircle,
Package,
@@ -30,11 +31,13 @@ import {
DropdownMenuItem,
DropdownMenuTrigger,
} from '@/components/ui/dropdown-menu'
import { Switch } from '@/components/ui/switch'
import { Textarea } from '@/components/ui/textarea'
import { Tooltip, TooltipContent, TooltipProvider, TooltipTrigger } from '@/components/ui/tooltip'
import { useSession } from '@/lib/auth-client'
import { cn } from '@/lib/utils'
import { useCopilotStore } from '@/stores/copilot/store'
import { CopilotSlider as Slider } from './copilot-slider'
export interface MessageFileAttachment {
id: string
@@ -426,32 +429,31 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
}
// Depth toggle state comes from global store; access via useCopilotStore
const { agentDepth, setAgentDepth } = useCopilotStore()
const { agentDepth, agentPrefetch, setAgentDepth, setAgentPrefetch } = useCopilotStore()
const cycleDepth = () => {
// Allowed UI values: 0 (Lite), 1 (Default), 2 (Pro), 3 (Max)
const next = agentDepth === 0 ? 1 : agentDepth === 1 ? 2 : agentDepth === 2 ? 3 : 0
setAgentDepth(next)
// 8 modes: depths 0-3, each with prefetch off/on. Cycle depth, then toggle prefetch when wrapping.
const nextDepth = agentDepth === 3 ? 0 : ((agentDepth + 1) as 0 | 1 | 2 | 3)
if (nextDepth === 0 && agentDepth === 3) {
setAgentPrefetch(!agentPrefetch)
}
setAgentDepth(nextDepth)
}
const getDepthLabel = () => {
if (agentDepth === 0) return 'Lite'
if (agentDepth === 1) return 'Auto'
if (agentDepth === 2) return 'Pro'
return 'Max'
const getCollapsedModeLabel = () => {
const base = getDepthLabelFor(agentDepth)
return !agentPrefetch ? `${base} MAX` : base
}
const getDepthLabelFor = (value: 0 | 1 | 2 | 3) => {
if (value === 0) return 'Lite'
if (value === 1) return 'Auto'
if (value === 2) return 'Pro'
return 'Max'
return value === 0 ? 'Fast' : value === 1 ? 'Balanced' : value === 2 ? 'Advanced' : 'Expert'
}
// Removed descriptive suffixes; concise labels only
const getDepthDescription = (value: 0 | 1 | 2 | 3) => {
if (value === 0)
return 'Fastest and cheapest. Good for small edits, simple workflows, and small tasks.'
if (value === 1) return 'Automatically balances speed and reasoning. Good fit for most tasks.'
if (value === 1) return 'Balances speed and reasoning. Good fit for most tasks.'
if (value === 2)
return 'More reasoning for larger workflows and complex edits, still balanced for speed.'
return 'Maximum reasoning power. Best for complex workflow building and debugging.'
@@ -459,9 +461,9 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
const getDepthIconFor = (value: 0 | 1 | 2 | 3) => {
if (value === 0) return <Zap className='h-3 w-3 text-muted-foreground' />
if (value === 1) return <Boxes className='h-3 w-3 text-muted-foreground' />
if (value === 2) return <BrainCircuit className='h-3 w-3 text-muted-foreground' />
return <BrainCog className='h-3 w-3 text-muted-foreground' />
if (value === 1) return <InfinityIcon className='h-3 w-3 text-muted-foreground' />
if (value === 2) return <Brain className='h-3 w-3 text-muted-foreground' />
return <BrainCircuit className='h-3 w-3 text-muted-foreground' />
}
const getDepthIcon = () => getDepthIconFor(agentDepth)
@@ -548,7 +550,7 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
placeholder={isDragging ? 'Drop files here...' : placeholder}
disabled={disabled}
rows={1}
className='mb-2 min-h-[32px] w-full resize-none overflow-hidden border-0 bg-transparent px-[2px] py-1 text-muted-foreground focus-visible:ring-0 focus-visible:ring-offset-0'
className='mb-2 min-h-[32px] w-full resize-none overflow-hidden border-0 bg-transparent px-[2px] py-1 text-foreground focus-visible:ring-0 focus-visible:ring-offset-0'
style={{ height: 'auto' }}
/>
@@ -635,126 +637,72 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
variant='ghost'
size='sm'
className='flex h-6 items-center gap-1.5 rounded-full border px-2 py-1 font-medium text-xs'
title='Choose depth'
title='Choose mode'
>
{getDepthIcon()}
<span>{getDepthLabel()}</span>
<span>{getCollapsedModeLabel()}</span>
</Button>
</DropdownMenuTrigger>
<DropdownMenuContent align='start' className='p-0'>
<TooltipProvider>
<div className='w-[180px] p-1'>
<Tooltip>
<TooltipTrigger asChild>
<DropdownMenuItem
onSelect={() => setAgentDepth(1)}
className={cn(
'flex items-center justify-between rounded-sm px-2 py-1.5 text-xs leading-4',
agentDepth === 1 && 'bg-muted/40'
)}
>
<span className='flex items-center gap-1.5'>
<Boxes className='h-3 w-3 text-muted-foreground' />
Auto
</span>
{agentDepth === 1 && (
<Check className='h-3 w-3 text-muted-foreground' />
)}
</DropdownMenuItem>
</TooltipTrigger>
<TooltipContent
side='right'
sideOffset={6}
align='center'
className='max-w-[220px] border bg-popover p-2 text-[11px] text-popover-foreground leading-snug shadow-md'
>
Automatically balances speed and reasoning. Good fit for most tasks.
</TooltipContent>
</Tooltip>
<Tooltip>
<TooltipTrigger asChild>
<DropdownMenuItem
onSelect={() => setAgentDepth(0)}
className={cn(
'flex items-center justify-between rounded-sm px-2 py-1.5 text-xs leading-4',
agentDepth === 0 && 'bg-muted/40'
)}
>
<span className='flex items-center gap-1.5'>
<Zap className='h-3 w-3 text-muted-foreground' />
Lite
</span>
{agentDepth === 0 && (
<Check className='h-3 w-3 text-muted-foreground' />
)}
</DropdownMenuItem>
</TooltipTrigger>
<TooltipContent
side='right'
sideOffset={6}
align='center'
className='max-w-[220px] border bg-popover p-2 text-[11px] text-popover-foreground leading-snug shadow-md'
>
Fastest and cheapest. Good for small edits, simple workflows, and small
tasks.
</TooltipContent>
</Tooltip>
<Tooltip>
<TooltipTrigger asChild>
<DropdownMenuItem
onSelect={() => setAgentDepth(2)}
className={cn(
'flex items-center justify-between rounded-sm px-2 py-1.5 text-xs leading-4',
agentDepth === 2 && 'bg-muted/40'
)}
>
<span className='flex items-center gap-1.5'>
<BrainCircuit className='h-3 w-3 text-muted-foreground' />
Pro
</span>
{agentDepth === 2 && (
<Check className='h-3 w-3 text-muted-foreground' />
)}
</DropdownMenuItem>
</TooltipTrigger>
<TooltipContent
side='right'
sideOffset={6}
align='center'
className='max-w-[220px] border bg-popover p-2 text-[11px] text-popover-foreground leading-snug shadow-md'
>
More reasoning for larger workflows and complex edits, still balanced
for speed.
</TooltipContent>
</Tooltip>
<Tooltip>
<TooltipTrigger asChild>
<DropdownMenuItem
onSelect={() => setAgentDepth(3)}
className={cn(
'flex items-center justify-between rounded-sm px-2 py-1.5 text-xs leading-4',
agentDepth === 3 && 'bg-muted/40'
)}
>
<span className='flex items-center gap-1.5'>
<BrainCog className='h-3 w-3 text-muted-foreground' />
Max
</span>
{agentDepth === 3 && (
<Check className='h-3 w-3 text-muted-foreground' />
)}
</DropdownMenuItem>
</TooltipTrigger>
<TooltipContent
side='right'
sideOffset={6}
align='center'
className='max-w-[220px] border bg-popover p-2 text-[11px] text-popover-foreground leading-snug shadow-md'
>
Maximum reasoning power. Best for complex workflow building and
debugging.
</TooltipContent>
</Tooltip>
<TooltipProvider delayDuration={100} skipDelayDuration={0}>
<div className='w-[260px] p-3'>
<div className='mb-3 flex items-center justify-between'>
<div className='flex items-center gap-1.5'>
<span className='font-medium text-xs'>MAX mode</span>
<Tooltip>
<TooltipTrigger asChild>
<button
type='button'
className='h-3.5 w-3.5 rounded text-muted-foreground transition-colors hover:text-foreground'
aria-label='MAX mode info'
>
<Info className='h-3.5 w-3.5' />
</button>
</TooltipTrigger>
<TooltipContent
side='right'
sideOffset={6}
align='center'
className='max-w-[220px] border bg-popover p-2 text-[11px] text-popover-foreground leading-snug shadow-md'
>
Significantly increases depth of reasoning
</TooltipContent>
</Tooltip>
</div>
<Switch
checked={!agentPrefetch}
onCheckedChange={(checked) => setAgentPrefetch(!checked)}
/>
</div>
<div className='my-2 flex justify-center'>
<div className='h-px w-[100%] bg-border' />
</div>
<div className='mb-3'>
<div className='mb-2 flex items-center justify-between'>
<span className='font-medium text-xs'>Mode</span>
<span className='text-muted-foreground text-xs'>
{getDepthLabelFor(agentDepth)}
</span>
</div>
<div className='relative'>
<Slider
min={0}
max={3}
step={1}
value={[agentDepth]}
onValueChange={(val) =>
setAgentDepth((val?.[0] ?? 0) as 0 | 1 | 2 | 3)
}
/>
<div className='pointer-events-none absolute inset-0'>
<div className='-translate-x-1/2 -translate-y-1/2 absolute top-1/2 left-[33.333%] h-2 w-[3px] bg-background' />
<div className='-translate-x-1/2 -translate-y-1/2 absolute top-1/2 left-[66.667%] h-2 w-[3px] bg-background' />
</div>
</div>
</div>
<div className='mt-3 text-[11px] text-muted-foreground'>
{getDepthDescription(agentDepth)}
</div>
</div>
</TooltipProvider>
</DropdownMenuContent>

View File

@@ -44,6 +44,9 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
// Scroll state
const [isNearBottom, setIsNearBottom] = useState(true)
const [showScrollButton, setShowScrollButton] = useState(false)
// New state to track if user has intentionally scrolled during streaming
const [userHasScrolledDuringStream, setUserHasScrolledDuringStream] = useState(false)
const isUserScrollingRef = useRef(false) // Track if scroll event is user-initiated
const { activeWorkflowId } = useWorkflowRegistry()
@@ -119,6 +122,8 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
'[data-radix-scroll-area-viewport]'
)
if (scrollContainer) {
// Mark that we're programmatically scrolling
isUserScrollingRef.current = false
scrollContainer.scrollTo({
top: scrollContainer.scrollHeight,
behavior: 'smooth',
@@ -143,7 +148,15 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
const nearBottom = distanceFromBottom <= 100
setIsNearBottom(nearBottom)
setShowScrollButton(!nearBottom)
}, [])
// If user scrolled up during streaming, mark it
if (isSendingMessage && !nearBottom && isUserScrollingRef.current) {
setUserHasScrolledDuringStream(true)
}
// Reset the user scrolling flag after processing
isUserScrollingRef.current = true
}, [isSendingMessage])
// Attach scroll listener
useEffect(() => {
@@ -154,7 +167,13 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
const viewport = scrollArea.querySelector('[data-radix-scroll-area-viewport]')
if (!viewport) return
viewport.addEventListener('scroll', handleScroll, { passive: true })
// Mark user-initiated scrolls
const handleUserScroll = () => {
isUserScrollingRef.current = true
handleScroll()
}
viewport.addEventListener('scroll', handleUserScroll, { passive: true })
// Also listen for scrollend event if available (for smooth scrolling)
if ('onscrollend' in viewport) {
@@ -165,34 +184,63 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
setTimeout(handleScroll, 100)
return () => {
viewport.removeEventListener('scroll', handleScroll)
viewport.removeEventListener('scroll', handleUserScroll)
if ('onscrollend' in viewport) {
viewport.removeEventListener('scrollend', handleScroll)
}
}
}, [handleScroll])
// Smart auto-scroll: only scroll if user is near bottom or for user messages
// Smart auto-scroll: only scroll if user hasn't intentionally scrolled up during streaming
useEffect(() => {
if (messages.length === 0) return
const lastMessage = messages[messages.length - 1]
const isNewUserMessage = lastMessage?.role === 'user'
// Always scroll for new user messages, or only if near bottom for assistant messages
if ((isNewUserMessage || isNearBottom) && scrollAreaRef.current) {
// Conditions for auto-scrolling:
// 1. Always scroll for new user messages (resets the user scroll state)
// 2. For assistant messages during streaming: only if user hasn't scrolled up
// 3. For assistant messages when not streaming: only if near bottom
const shouldAutoScroll =
isNewUserMessage ||
(isSendingMessage && !userHasScrolledDuringStream) ||
(!isSendingMessage && isNearBottom)
if (shouldAutoScroll && scrollAreaRef.current) {
const scrollContainer = scrollAreaRef.current.querySelector(
'[data-radix-scroll-area-viewport]'
)
if (scrollContainer) {
// Mark that we're programmatically scrolling
isUserScrollingRef.current = false
scrollContainer.scrollTo({
top: scrollContainer.scrollHeight,
behavior: 'smooth',
})
// Let the scroll event handler update the state naturally after animation completes
}
}
}, [messages, isNearBottom])
}, [messages, isNearBottom, isSendingMessage, userHasScrolledDuringStream])
// Reset user scroll state when streaming starts or when user sends a message
useEffect(() => {
const lastMessage = messages[messages.length - 1]
if (lastMessage?.role === 'user') {
// User sent a new message - reset scroll state
setUserHasScrolledDuringStream(false)
isUserScrollingRef.current = false
}
}, [messages])
// Reset user scroll state when streaming completes
const prevIsSendingRef = useRef(false)
useEffect(() => {
// When streaming transitions from true to false, reset the user scroll state
if (prevIsSendingRef.current && !isSendingMessage) {
setUserHasScrolledDuringStream(false)
}
prevIsSendingRef.current = isSendingMessage
}, [isSendingMessage])
// Auto-scroll to bottom when chat loads in
useEffect(() => {

View File

@@ -9,7 +9,6 @@ import {
} from '@/components/ui/dropdown-menu'
import { ScrollArea } from '@/components/ui/scroll-area'
import { Tooltip, TooltipContent, TooltipTrigger } from '@/components/ui/tooltip'
import { isDev } from '@/lib/environment'
import { useCopilotStore } from '@/stores/copilot/store'
import { useChatStore } from '@/stores/panel/chat/store'
import { useConsoleStore } from '@/stores/panel/console/store'
@@ -305,16 +304,14 @@ export function Panel() {
>
Console
</button>
{!isDev && (
<button
onClick={() => handleTabClick('copilot')}
className={`panel-tab-base inline-flex flex-1 cursor-pointer items-center justify-center rounded-[10px] border border-transparent py-1 font-[450] text-sm outline-none transition-colors duration-200 ${
isOpen && activeTab === 'copilot' ? 'panel-tab-active' : 'panel-tab-inactive'
}`}
>
Copilot
</button>
)}
<button
onClick={() => handleTabClick('copilot')}
className={`panel-tab-base inline-flex flex-1 cursor-pointer items-center justify-center rounded-[10px] border border-transparent py-1 font-[450] text-sm outline-none transition-colors duration-200 ${
isOpen && activeTab === 'copilot' ? 'panel-tab-active' : 'panel-tab-inactive'
}`}
>
Copilot
</button>
<button
onClick={() => handleTabClick('variables')}
className={`panel-tab-base inline-flex flex-1 cursor-pointer items-center justify-center rounded-[10px] border border-transparent py-1 font-[450] text-sm outline-none transition-colors duration-200 ${

View File

@@ -0,0 +1,388 @@
import { beforeEach, describe, expect, it, vi } from 'vitest'
// Mock hooks
const mockCollaborativeUpdates = {
collaborativeUpdateLoopType: vi.fn(),
collaborativeUpdateParallelType: vi.fn(),
collaborativeUpdateIterationCount: vi.fn(),
collaborativeUpdateIterationCollection: vi.fn(),
}
const mockStoreData = {
loops: {},
parallels: {},
}
vi.mock('@/hooks/use-collaborative-workflow', () => ({
useCollaborativeWorkflow: () => mockCollaborativeUpdates,
}))
vi.mock('@/stores/workflows/workflow/store', () => ({
useWorkflowStore: () => mockStoreData,
}))
vi.mock('@/components/ui/badge', () => ({
Badge: ({ children, ...props }: any) => (
<div data-testid='badge' {...props}>
{children}
</div>
),
}))
vi.mock('@/components/ui/input', () => ({
Input: (props: any) => <input data-testid='input' {...props} />,
}))
vi.mock('@/components/ui/popover', () => ({
Popover: ({ children }: any) => <div data-testid='popover'>{children}</div>,
PopoverContent: ({ children }: any) => <div data-testid='popover-content'>{children}</div>,
PopoverTrigger: ({ children }: any) => <div data-testid='popover-trigger'>{children}</div>,
}))
vi.mock('@/components/ui/tag-dropdown', () => ({
checkTagTrigger: vi.fn(() => ({ show: false })),
TagDropdown: ({ children }: any) => <div data-testid='tag-dropdown'>{children}</div>,
}))
vi.mock('react-simple-code-editor', () => ({
default: (props: any) => <textarea data-testid='code-editor' {...props} />,
}))
describe('IterationBadges', () => {
const defaultProps = {
nodeId: 'test-node-1',
data: {
width: 500,
height: 300,
isPreview: false,
},
iterationType: 'loop' as const,
}
beforeEach(() => {
vi.clearAllMocks()
mockStoreData.loops = {}
mockStoreData.parallels = {}
})
describe('Component Interface', () => {
it.concurrent('should accept required props', () => {
expect(defaultProps.nodeId).toBeDefined()
expect(defaultProps.data).toBeDefined()
expect(defaultProps.iterationType).toBeDefined()
})
it.concurrent('should handle loop iteration type prop', () => {
const loopProps = { ...defaultProps, iterationType: 'loop' as const }
expect(loopProps.iterationType).toBe('loop')
})
it.concurrent('should handle parallel iteration type prop', () => {
const parallelProps = { ...defaultProps, iterationType: 'parallel' as const }
expect(parallelProps.iterationType).toBe('parallel')
})
})
describe('Configuration System', () => {
it.concurrent('should use correct config for loop type', () => {
const CONFIG = {
loop: {
typeLabels: { for: 'For Loop', forEach: 'For Each' },
typeKey: 'loopType' as const,
storeKey: 'loops' as const,
maxIterations: 100,
configKeys: {
iterations: 'iterations' as const,
items: 'forEachItems' as const,
},
},
}
expect(CONFIG.loop.typeLabels.for).toBe('For Loop')
expect(CONFIG.loop.typeLabels.forEach).toBe('For Each')
expect(CONFIG.loop.maxIterations).toBe(100)
expect(CONFIG.loop.storeKey).toBe('loops')
})
it.concurrent('should use correct config for parallel type', () => {
const CONFIG = {
parallel: {
typeLabels: { count: 'Parallel Count', collection: 'Parallel Each' },
typeKey: 'parallelType' as const,
storeKey: 'parallels' as const,
maxIterations: 20,
configKeys: {
iterations: 'count' as const,
items: 'distribution' as const,
},
},
}
expect(CONFIG.parallel.typeLabels.count).toBe('Parallel Count')
expect(CONFIG.parallel.typeLabels.collection).toBe('Parallel Each')
expect(CONFIG.parallel.maxIterations).toBe(20)
expect(CONFIG.parallel.storeKey).toBe('parallels')
})
})
describe('Type Determination Logic', () => {
it.concurrent('should default to "for" for loop type', () => {
type IterationType = 'loop' | 'parallel'
const determineDefaultType = (iterationType: IterationType) => {
return iterationType === 'loop' ? 'for' : 'count'
}
const currentType = determineDefaultType('loop')
expect(currentType).toBe('for')
})
it.concurrent('should default to "count" for parallel type', () => {
type IterationType = 'loop' | 'parallel'
const determineDefaultType = (iterationType: IterationType) => {
return iterationType === 'loop' ? 'for' : 'count'
}
const currentType = determineDefaultType('parallel')
expect(currentType).toBe('count')
})
it.concurrent('should use explicit loopType when provided', () => {
type IterationType = 'loop' | 'parallel'
const determineType = (explicitType: string | undefined, iterationType: IterationType) => {
return explicitType || (iterationType === 'loop' ? 'for' : 'count')
}
const currentType = determineType('forEach', 'loop')
expect(currentType).toBe('forEach')
})
it.concurrent('should use explicit parallelType when provided', () => {
type IterationType = 'loop' | 'parallel'
const determineType = (explicitType: string | undefined, iterationType: IterationType) => {
return explicitType || (iterationType === 'loop' ? 'for' : 'count')
}
const currentType = determineType('collection', 'parallel')
expect(currentType).toBe('collection')
})
})
describe('Count Mode Detection', () => {
it.concurrent('should be in count mode for loop + for combination', () => {
type IterationType = 'loop' | 'parallel'
type LoopType = 'for' | 'forEach'
type ParallelType = 'count' | 'collection'
const iterationType: IterationType = 'loop'
const currentType: LoopType = 'for'
const isCountMode = iterationType === 'loop' && currentType === 'for'
expect(isCountMode).toBe(true)
})
it.concurrent('should be in count mode for parallel + count combination', () => {
type IterationType = 'loop' | 'parallel'
type ParallelType = 'count' | 'collection'
const iterationType: IterationType = 'parallel'
const currentType: ParallelType = 'count'
const isCountMode = iterationType === 'parallel' && currentType === 'count'
expect(isCountMode).toBe(true)
})
it.concurrent('should not be in count mode for loop + forEach combination', () => {
type IterationType = 'loop' | 'parallel'
const testCountMode = (iterationType: IterationType, currentType: string) => {
return iterationType === 'loop' && currentType === 'for'
}
const isCountMode = testCountMode('loop', 'forEach')
expect(isCountMode).toBe(false)
})
it.concurrent('should not be in count mode for parallel + collection combination', () => {
type IterationType = 'loop' | 'parallel'
const testCountMode = (iterationType: IterationType, currentType: string) => {
return iterationType === 'parallel' && currentType === 'count'
}
const isCountMode = testCountMode('parallel', 'collection')
expect(isCountMode).toBe(false)
})
})
describe('Configuration Values', () => {
it.concurrent('should handle default iteration count', () => {
const data = { count: undefined }
const configIterations = data.count ?? 5
expect(configIterations).toBe(5)
})
it.concurrent('should use provided iteration count', () => {
const data = { count: 10 }
const configIterations = data.count ?? 5
expect(configIterations).toBe(10)
})
it.concurrent('should handle string collection', () => {
const collection = '[1, 2, 3, 4, 5]'
const collectionString =
typeof collection === 'string' ? collection : JSON.stringify(collection) || ''
expect(collectionString).toBe('[1, 2, 3, 4, 5]')
})
it.concurrent('should handle object collection', () => {
const collection = { items: [1, 2, 3] }
const collectionString =
typeof collection === 'string' ? collection : JSON.stringify(collection) || ''
expect(collectionString).toBe('{"items":[1,2,3]}')
})
it.concurrent('should handle array collection', () => {
const collection = [1, 2, 3, 4, 5]
const collectionString =
typeof collection === 'string' ? collection : JSON.stringify(collection) || ''
expect(collectionString).toBe('[1,2,3,4,5]')
})
})
describe('Preview Mode Handling', () => {
it.concurrent('should handle preview mode for loops', () => {
const previewProps = {
...defaultProps,
data: { ...defaultProps.data, isPreview: true },
iterationType: 'loop' as const,
}
expect(previewProps.data.isPreview).toBe(true)
// In preview mode, collaborative functions shouldn't be called
expect(mockCollaborativeUpdates.collaborativeUpdateLoopType).not.toHaveBeenCalled()
})
it.concurrent('should handle preview mode for parallels', () => {
const previewProps = {
...defaultProps,
data: { ...defaultProps.data, isPreview: true },
iterationType: 'parallel' as const,
}
expect(previewProps.data.isPreview).toBe(true)
// In preview mode, collaborative functions shouldn't be called
expect(mockCollaborativeUpdates.collaborativeUpdateParallelType).not.toHaveBeenCalled()
})
})
describe('Store Integration', () => {
it.concurrent('should access loops store for loop iteration type', () => {
const nodeId = 'loop-node-1'
;(mockStoreData.loops as any)[nodeId] = { iterations: 10 }
const nodeConfig = (mockStoreData.loops as any)[nodeId]
expect(nodeConfig).toBeDefined()
expect(nodeConfig.iterations).toBe(10)
})
it.concurrent('should access parallels store for parallel iteration type', () => {
const nodeId = 'parallel-node-1'
;(mockStoreData.parallels as any)[nodeId] = { count: 5 }
const nodeConfig = (mockStoreData.parallels as any)[nodeId]
expect(nodeConfig).toBeDefined()
expect(nodeConfig.count).toBe(5)
})
it.concurrent('should handle missing node configuration gracefully', () => {
const nodeId = 'missing-node'
const nodeConfig = (mockStoreData.loops as any)[nodeId]
expect(nodeConfig).toBeUndefined()
})
})
describe('Max Iterations Limits', () => {
it.concurrent('should enforce max iterations for loops (100)', () => {
const maxIterations = 100
const testValue = 150
const clampedValue = Math.min(maxIterations, testValue)
expect(clampedValue).toBe(100)
})
it.concurrent('should enforce max iterations for parallels (20)', () => {
const maxIterations = 20
const testValue = 50
const clampedValue = Math.min(maxIterations, testValue)
expect(clampedValue).toBe(20)
})
it.concurrent('should allow values within limits', () => {
const loopMaxIterations = 100
const parallelMaxIterations = 20
expect(Math.min(loopMaxIterations, 50)).toBe(50)
expect(Math.min(parallelMaxIterations, 10)).toBe(10)
})
})
describe('Collaborative Update Functions', () => {
it.concurrent('should have the correct collaborative functions available', () => {
expect(mockCollaborativeUpdates.collaborativeUpdateLoopType).toBeDefined()
expect(mockCollaborativeUpdates.collaborativeUpdateParallelType).toBeDefined()
expect(mockCollaborativeUpdates.collaborativeUpdateIterationCount).toBeDefined()
expect(mockCollaborativeUpdates.collaborativeUpdateIterationCollection).toBeDefined()
})
it.concurrent('should call correct function for loop type updates', () => {
const handleTypeChange = (newType: string, iterationType: string, nodeId: string) => {
if (iterationType === 'loop') {
mockCollaborativeUpdates.collaborativeUpdateLoopType(nodeId, newType)
} else {
mockCollaborativeUpdates.collaborativeUpdateParallelType(nodeId, newType)
}
}
handleTypeChange('forEach', 'loop', 'test-node')
expect(mockCollaborativeUpdates.collaborativeUpdateLoopType).toHaveBeenCalledWith(
'test-node',
'forEach'
)
})
it.concurrent('should call correct function for parallel type updates', () => {
const handleTypeChange = (newType: string, iterationType: string, nodeId: string) => {
if (iterationType === 'loop') {
mockCollaborativeUpdates.collaborativeUpdateLoopType(nodeId, newType)
} else {
mockCollaborativeUpdates.collaborativeUpdateParallelType(nodeId, newType)
}
}
handleTypeChange('collection', 'parallel', 'test-node')
expect(mockCollaborativeUpdates.collaborativeUpdateParallelType).toHaveBeenCalledWith(
'test-node',
'collection'
)
})
})
describe('Input Sanitization', () => {
it.concurrent('should sanitize numeric input by removing non-digits', () => {
const testInput = 'abc123def456'
const sanitized = testInput.replace(/[^0-9]/g, '')
expect(sanitized).toBe('123456')
})
it.concurrent('should handle empty input', () => {
const testInput = ''
const sanitized = testInput.replace(/[^0-9]/g, '')
expect(sanitized).toBe('')
})
it.concurrent('should preserve valid numeric input', () => {
const testInput = '42'
const sanitized = testInput.replace(/[^0-9]/g, '')
expect(sanitized).toBe('42')
})
})
})

Some files were not shown because too many files have changed in this diff Show More