Compare commits

...

85 Commits

Author SHA1 Message Date
Theodore Li
eea22258e6 fix(ui): remove ugly grey background from docx preview 2026-04-10 17:11:56 -07:00
Siddharth Ganesan
0f6c16dc64 Fixes 2026-04-10 15:54:53 -07:00
Siddharth Ganesan
22cd5e4814 POSSIBLE BREAKAGE: SCROLLING 2026-04-10 15:34:17 -07:00
Vikhyath Mondreti
d11d3df349 one more fix 2026-04-10 15:31:02 -07:00
Vikhyath Mondreti
09f06cb683 durable stream for files 2026-04-10 14:58:36 -07:00
Vikhyath Mondreti
ac84c6224b Merge branch 'dev' of github.com:simstudioai/sim into dev 2026-04-10 12:13:14 -07:00
Vikhyath Mondreti
da28f8a4b8 improved autolayout 2026-04-10 12:11:52 -07:00
Waleed Latif
3690dc265c fix(chat): make inline code inherit parent font size in markdown headers
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-04-10 11:04:22 -07:00
Siddharth Ganesan
386d0aa6fc Html code 2026-04-10 10:59:53 -07:00
Vikhyath Mondreti
e2b4eb370d fix type error 2026-04-10 10:01:16 -07:00
Siddharth Ganesan
734a4d1f5b Fix editable 2026-04-10 03:03:03 -07:00
Siddharth Ganesan
91301df20a Update request tracing and skills and handlers 2026-04-10 02:15:23 -07:00
Vikhyath Mondreti
949601ca02 lint 2026-04-09 23:46:43 -07:00
Siddharth Ganesan
ca361a3115 Fix 2026-04-09 21:02:01 -07:00
Siddharth Ganesan
c74c4a915f Restore and mass delete tools 2026-04-09 20:30:12 -07:00
Siddharth Ganesan
2156f49951 Folder vfs 2026-04-09 19:40:35 -07:00
Siddharth Ganesan
5b94db6905 Options ordering 2026-04-09 19:38:42 -07:00
Siddharth Ganesan
c026ce715a Clickable resources 2026-04-09 19:34:48 -07:00
Vikhyath Mondreti
33d1342452 remove worker code 2026-04-09 19:15:02 -07:00
Vikhyath Mondreti
c61cbb04a5 Merge branch 'staging' into dev 2026-04-09 18:48:45 -07:00
Siddharth Ganesan
32527363f3 Fix 2026-04-09 18:40:59 -07:00
Siddharth Ganesan
9272b1546c fix 2026-04-09 18:14:56 -07:00
Vikhyath Mondreti
24abd8764e Merge branch 'dev' of github.com:simstudioai/sim into dev 2026-04-09 17:56:16 -07:00
Vikhyath Mondreti
7835df4c99 fix snapshot crash bug 2026-04-09 17:55:03 -07:00
Siddharth Ganesan
8f3c8e4b73 Fix name 2026-04-09 17:36:01 -07:00
Vikhyath Mondreti
e321e99c09 Merge branch 'dev' of github.com:simstudioai/sim into dev 2026-04-09 17:28:25 -07:00
Vikhyath Mondreti
f509e33601 fix payload accesses 2026-04-09 17:28:16 -07:00
Siddharth Ganesan
485dce7bed Tool call names 2026-04-09 17:15:45 -07:00
Siddharth Ganesan
2d2f7828c9 Fix persistence 2026-04-09 15:56:06 -07:00
Siddharth Ganesan
3ef87e55a3 Add req id 2026-04-09 15:23:41 -07:00
Siddharth Ganesan
649ee9c869 Speed up dev ci 2026-04-09 14:54:33 -07:00
Siddharth Ganesan
9da574a112 Fix dev 2026-04-09 14:43:01 -07:00
Siddharth Ganesan
0638604780 Fix tests 2026-04-09 14:32:18 -07:00
Siddharth Ganesan
f0d3819093 Fix dev 2026-04-09 14:23:34 -07:00
Siddharth Ganesan
fe5baf7569 Tool display intetns 2026-04-09 13:45:02 -07:00
Vikhyath Mondreti
e610df6843 Merge branch 'dev' of github.com:simstudioai/sim into dev 2026-04-09 13:35:43 -07:00
Vikhyath Mondreti
c77f204d02 streamed text change 2026-04-09 13:35:38 -07:00
Vikhyath Mondreti
b1caeb0b28 cleanup subagent + streaming issues 2026-04-09 13:33:18 -07:00
Siddharth Ganesan
6f04c48c31 tool joining 2026-04-09 13:01:49 -07:00
Siddharth Ganesan
d25c2430dc Fix file tools 2026-04-09 12:53:03 -07:00
Siddharth Ganesan
69d69ee95b temp 2026-04-09 11:57:49 -07:00
Vikhyath Mondreti
a738a6d711 fix stream persistence 2026-04-09 11:54:00 -07:00
Vikhyath Mondreti
2abf6ac245 Merge branch 'dev' of github.com:simstudioai/sim into dev 2026-04-09 11:35:40 -07:00
Vikhyath Mondreti
81ac66f104 fix code block 2026-04-09 11:35:27 -07:00
Siddharth Ganesan
2ba4228669 Checkpoitn 2026-04-09 11:28:05 -07:00
Vikhyath Mondreti
817833c3a3 replace react markdown with streamdown 2026-04-09 11:27:49 -07:00
Siddharth Ganesan
f2fcfe7e6e Bad hook 2026-04-09 10:55:24 -07:00
Siddharth Ganesan
b49d67e46c Remove tool truncation limits 2026-04-08 18:24:22 -07:00
Siddharth Ganesan
d22f3678fe Fix 2026-04-08 17:41:57 -07:00
Siddharth Ganesan
fd4fa1ce8d File writes 2026-04-08 16:39:02 -07:00
Siddharth Ganesan
ce1f00c8a3 Checkpoint 2026-04-08 16:17:12 -07:00
Siddharth Ganesan
3893afd424 Temp 2026-04-08 12:24:07 -07:00
Siddharth Ganesan
89f88426e2 Fix patch tool 2026-04-08 12:24:07 -07:00
Siddharth Ganesan
5be55d2b15 Patch 2026-04-08 12:24:07 -07:00
Siddharth Ganesan
5b22f1f8c3 Remove debug logs 2026-04-08 12:24:06 -07:00
Siddharth Ganesan
2b799f3beb Appends 2026-04-08 12:24:06 -07:00
Siddharth Ganesan
ed2dad0eed Fix 2026-04-08 12:24:06 -07:00
Vikhyath Mondreti
75d5d13876 revert back to bun runtime 2026-04-08 12:24:06 -07:00
Vikhyath Mondreti
674695edc4 change to node runtime 2026-04-08 12:24:06 -07:00
Siddharth Ganesan
1a18ebb82e Fix run workflow 2026-04-08 12:24:06 -07:00
Siddharth Ganesan
a6fbb51478 Empty folders should show in vfs 2026-04-08 12:24:06 -07:00
Siddharth Ganesan
c31ae46a30 Auth vfs 2026-04-08 12:24:06 -07:00
Siddharth Ganesan
ebc030be7e Fix spacing 2026-04-08 12:24:06 -07:00
Siddharth Ganesan
5c47c1f1d7 Dont show internal tool result reads 2026-04-08 12:24:06 -07:00
Siddharth Ganesan
d25632cc32 Hide function code inline 2026-04-08 12:24:05 -07:00
Siddharth Ganesan
0dd1ee041c Folder tools 2026-04-08 12:24:05 -07:00
Siddharth Ganesan
c52d63303d file/folder tools 2026-04-08 12:24:05 -07:00
Siddharth Ganesan
2da0cbe365 Remove dead tools 2026-04-08 12:24:05 -07:00
Siddharth Ganesan
c29941e701 Fix trailing leak 2026-04-08 12:24:05 -07:00
Siddharth Ganesan
8dfbe8ae3f Fix 10mb tool response limit 2026-04-08 12:24:05 -07:00
Siddharth Ganesan
ca2afaa5a2 Remove auto injected tasks 2026-04-08 12:22:39 -07:00
Siddharth Ganesan
33433b1149 Fix shell var injection 2026-04-08 12:22:39 -07:00
Siddharth Ganesan
4ee6fa8524 Security 2026-04-08 12:22:39 -07:00
Siddharth Ganesan
87ff68ce11 Fix client side tools 2026-04-08 12:22:39 -07:00
Siddharth Ganesan
b74cf283c8 Eager tool streaming 2026-04-08 12:22:39 -07:00
Siddharth Ganesan
d85775ead5 Fix stream reconnect 2026-04-08 12:22:39 -07:00
Siddharth Ganesan
0a41b8b287 Add client retry logic 2026-04-08 12:22:39 -07:00
Siddharth Ganesan
0b3f3ed728 File types 2026-04-08 12:22:39 -07:00
Siddharth Ganesan
0d09d11c33 improvement(mothership): docs 2026-04-08 12:22:38 -07:00
Siddharth Ganesan
cac100a145 Add deps 2026-04-08 12:22:38 -07:00
Siddharth Ganesan
acc00df6a5 feat(mothership): append 2026-04-08 12:22:18 -07:00
Siddharth Ganesan
b6e1df4ffd feat(motheship): add docx support 2026-04-08 12:22:18 -07:00
Siddharth Ganesan
e2de4d2a9a Force redeploy 2026-04-08 12:21:28 -07:00
Siddharth Ganesan
1fc84b8e4d improvement(mothership): new agent loop (#3920)
* feat(transport): replace shared chat transport with mothership-stream module

* improvement(contracts): regenerate contracts from go

* feat(tools): add tool catalog codegen from go tool contracts

* feat(tools): add tool-executor dispatch framework for sim side tool routing

* feat(orchestrator): rewrite tool dispatch with catalog-driven executor and simplified resume loop

* feat(orchestrator): checkpoint resume flow

* refactor(copilot): consolidate orchestrator into request/ layer

* refactor(mothership): reorganize lib/copilot into structured subdirectories

* refactor(mothership): canonical transcript layer, dead code cleanup, type consolidation

* refactor(mothership): rebase onto latest staging

* refactor(mothership): rename request continue to lifecycle

* feat(trace): add initial version of request traces

* improvement(stream): batch stream from redis

* fix(resume): fix the resume checkpoint

* fix(resume): fix resume client tool

* fix(subagents): subagent resume should join on existing subagent text block

* improvement(reconnect): harden reconnect logic

* fix(superagent): fix superagent integration tools

* improvement(stream): improve stream perf

* Rebase with origin dev

* fix(tests): fix failing test

* fix(build): fix type errors

* fix(build): fix build errors

* fix(build): fix type errors

* feat(mothership): add cli execution

* fix(mothership): fix function execute tests
2026-04-08 12:21:28 -07:00
Vikhyath Mondreti
2d2448fe9a fix build error 2026-04-08 12:16:35 -07:00
339 changed files with 28347 additions and 17254 deletions

View File

@@ -16,6 +16,7 @@ permissions:
jobs:
test-build:
name: Test and Build
if: github.ref != 'refs/heads/dev' || github.event_name == 'pull_request'
uses: ./.github/workflows/test-build.yml
secrets: inherit
@@ -45,11 +46,66 @@ jobs:
echo " Not a release commit"
fi
# Build AMD64 images and push to ECR immediately (+ GHCR for main)
# Dev: build all 3 images for ECR only (no GHCR, no ARM64)
build-dev:
name: Build Dev ECR
needs: [detect-version]
if: github.event_name == 'push' && github.ref == 'refs/heads/dev'
runs-on: blacksmith-8vcpu-ubuntu-2404
permissions:
contents: read
id-token: write
strategy:
fail-fast: false
matrix:
include:
- dockerfile: ./docker/app.Dockerfile
ecr_repo_secret: ECR_APP
- dockerfile: ./docker/db.Dockerfile
ecr_repo_secret: ECR_MIGRATIONS
- dockerfile: ./docker/realtime.Dockerfile
ecr_repo_secret: ECR_REALTIME
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v4
with:
role-to-assume: ${{ secrets.DEV_AWS_ROLE_TO_ASSUME }}
aws-region: ${{ secrets.DEV_AWS_REGION }}
- name: Login to Amazon ECR
id: login-ecr
uses: aws-actions/amazon-ecr-login@v2
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Set up Docker Buildx
uses: useblacksmith/setup-docker-builder@v1
- name: Build and push
uses: useblacksmith/build-push-action@v2
with:
context: .
file: ${{ matrix.dockerfile }}
platforms: linux/amd64
push: true
tags: ${{ steps.login-ecr.outputs.registry }}/${{ secrets[matrix.ecr_repo_secret] }}:dev
provenance: false
sbom: false
# Main/staging: build AMD64 images and push to ECR + GHCR
build-amd64:
name: Build AMD64
needs: [detect-version]
if: github.event_name == 'push' && (github.ref == 'refs/heads/main' || github.ref == 'refs/heads/staging' || github.ref == 'refs/heads/dev')
needs: [test-build, detect-version]
if: >-
github.event_name == 'push' &&
(github.ref == 'refs/heads/main' || github.ref == 'refs/heads/staging')
runs-on: blacksmith-8vcpu-ubuntu-2404
permissions:
contents: read
@@ -75,8 +131,8 @@ jobs:
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v4
with:
role-to-assume: ${{ github.ref == 'refs/heads/main' && secrets.AWS_ROLE_TO_ASSUME || github.ref == 'refs/heads/dev' && secrets.DEV_AWS_ROLE_TO_ASSUME || secrets.STAGING_AWS_ROLE_TO_ASSUME }}
aws-region: ${{ github.ref == 'refs/heads/main' && secrets.AWS_REGION || github.ref == 'refs/heads/dev' && secrets.DEV_AWS_REGION || secrets.STAGING_AWS_REGION }}
role-to-assume: ${{ github.ref == 'refs/heads/main' && secrets.AWS_ROLE_TO_ASSUME || secrets.STAGING_AWS_ROLE_TO_ASSUME }}
aws-region: ${{ github.ref == 'refs/heads/main' && secrets.AWS_REGION || secrets.STAGING_AWS_REGION }}
- name: Login to Amazon ECR
id: login-ecr
@@ -106,26 +162,20 @@ jobs:
ECR_REPO="${{ secrets[matrix.ecr_repo_secret] }}"
GHCR_IMAGE="${{ matrix.ghcr_image }}"
# ECR tags (always build for ECR)
if [ "${{ github.ref }}" = "refs/heads/main" ]; then
ECR_TAG="latest"
elif [ "${{ github.ref }}" = "refs/heads/dev" ]; then
ECR_TAG="dev"
else
ECR_TAG="staging"
fi
ECR_IMAGE="${ECR_REGISTRY}/${ECR_REPO}:${ECR_TAG}"
# Build tags list
TAGS="${ECR_IMAGE}"
# Add GHCR tags only for main branch
if [ "${{ github.ref }}" = "refs/heads/main" ]; then
GHCR_AMD64="${GHCR_IMAGE}:latest-amd64"
GHCR_SHA="${GHCR_IMAGE}:${{ github.sha }}-amd64"
TAGS="${TAGS},$GHCR_AMD64,$GHCR_SHA"
# Add version tag if this is a release commit
if [ "${{ needs.detect-version.outputs.is_release }}" = "true" ]; then
VERSION="${{ needs.detect-version.outputs.version }}"
GHCR_VERSION="${GHCR_IMAGE}:${VERSION}-amd64"
@@ -256,6 +306,14 @@ jobs:
docker manifest push "${IMAGE_BASE}:${VERSION}"
fi
# Run database migrations for dev
migrate-dev:
name: Migrate Dev DB
needs: [build-dev]
if: github.event_name == 'push' && github.ref == 'refs/heads/dev'
uses: ./.github/workflows/migrations.yml
secrets: inherit
# Check if docs changed
check-docs-changes:
name: Check Docs Changes

View File

@@ -38,5 +38,5 @@ jobs:
- name: Apply migrations
working-directory: ./packages/db
env:
DATABASE_URL: ${{ github.ref == 'refs/heads/main' && secrets.DATABASE_URL || secrets.STAGING_DATABASE_URL }}
DATABASE_URL: ${{ github.ref == 'refs/heads/main' && secrets.DATABASE_URL || github.ref == 'refs/heads/dev' && secrets.DEV_DATABASE_URL || secrets.STAGING_DATABASE_URL }}
run: bunx drizzle-kit migrate --config=./drizzle.config.ts

View File

@@ -74,10 +74,6 @@ docker compose -f docker-compose.prod.yml up -d
Open [http://localhost:3000](http://localhost:3000)
#### Background worker note
The Docker Compose stack starts a dedicated worker container by default. If `REDIS_URL` is not configured, the worker will start, log that it is idle, and do no queue processing. This is expected. Queue-backed API, webhook, and schedule execution requires Redis; installs without Redis continue to use the inline execution path.
Sim also supports local models via [Ollama](https://ollama.ai) and [vLLM](https://docs.vllm.ai/) — see the [Docker self-hosting docs](https://docs.sim.ai/self-hosting/docker) for setup details.
### Self-hosted: Manual Setup
@@ -123,12 +119,10 @@ cd packages/db && bun run db:migrate
5. Start development servers:
```bash
bun run dev:full # Starts Next.js app, realtime socket server, and the BullMQ worker
bun run dev:full # Starts Next.js app and realtime socket server
```
If `REDIS_URL` is not configured, the worker will remain idle and execution continues inline.
Or run separately: `bun run dev` (Next.js), `cd apps/sim && bun run dev:sockets` (realtime), and `cd apps/sim && bun run worker` (BullMQ worker).
Or run separately: `bun run dev` (Next.js) and `cd apps/sim && bun run dev:sockets` (realtime).
## Copilot API Keys

View File

@@ -2,8 +2,8 @@
import { type SVGProps, useEffect, useRef, useState } from 'react'
import { AnimatePresence, motion, useInView } from 'framer-motion'
import ReactMarkdown, { type Components } from 'react-markdown'
import remarkGfm from 'remark-gfm'
import { Streamdown } from 'streamdown'
import 'streamdown/styles.css'
import { ChevronDown } from '@/components/emcn'
import { Database, File, Library, Table } from '@/components/emcn/icons'
import {
@@ -557,8 +557,8 @@ The team agreed to prioritize the new onboarding flow. Key decisions:
Follow up with engineering on the timeline for the API v2 migration. Draft the proposal for the board meeting next week.`
const MD_COMPONENTS: Components = {
h1: ({ children }) => (
const MD_COMPONENTS = {
h1: ({ children }: { children?: React.ReactNode }) => (
<p
role='presentation'
className='mb-4 border-[#E5E5E5] border-b pb-2 font-semibold text-[#1C1C1C] text-[20px]'
@@ -566,17 +566,23 @@ const MD_COMPONENTS: Components = {
{children}
</p>
),
h2: ({ children }) => (
h2: ({ children }: { children?: React.ReactNode }) => (
<h2 className='mt-5 mb-3 border-[#E5E5E5] border-b pb-1.5 font-semibold text-[#1C1C1C] text-[16px]'>
{children}
</h2>
),
ul: ({ children }) => <ul className='mb-3 list-disc pl-6'>{children}</ul>,
ol: ({ children }) => <ol className='mb-3 list-decimal pl-6'>{children}</ol>,
li: ({ children }) => (
ul: ({ children }: { children?: React.ReactNode }) => (
<ul className='mb-3 list-disc pl-6'>{children}</ul>
),
ol: ({ children }: { children?: React.ReactNode }) => (
<ol className='mb-3 list-decimal pl-6'>{children}</ol>
),
li: ({ children }: { children?: React.ReactNode }) => (
<li className='mb-1 text-[#1C1C1C] text-[14px] leading-[1.6]'>{children}</li>
),
p: ({ children }) => <p className='mb-3 text-[#1C1C1C] text-[14px] leading-[1.6]'>{children}</p>,
p: ({ children }: { children?: React.ReactNode }) => (
<p className='mb-3 text-[#1C1C1C] text-[14px] leading-[1.6]'>{children}</p>
),
}
function MockFullFiles() {
@@ -618,9 +624,9 @@ function MockFullFiles() {
transition={{ duration: 0.4, delay: 0.5 }}
>
<div className='h-full overflow-auto p-6'>
<ReactMarkdown remarkPlugins={[remarkGfm]} components={MD_COMPONENTS}>
<Streamdown mode='static' components={MD_COMPONENTS}>
{source}
</ReactMarkdown>
</Streamdown>
</div>
</motion.div>
</div>

View File

@@ -0,0 +1,144 @@
import { db } from '@sim/db'
import { user } from '@sim/db/schema'
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { env } from '@/lib/core/config/env'
const ENV_URLS: Record<string, string | undefined> = {
dev: env.MOTHERSHIP_DEV_URL,
staging: env.MOTHERSHIP_STAGING_URL,
prod: env.MOTHERSHIP_PROD_URL,
}
function getMothershipUrl(environment: string): string | null {
return ENV_URLS[environment] ?? null
}
async function isAdminRequestAuthorized() {
const session = await getSession()
if (!session?.user?.id) return false
const [currentUser] = await db
.select({ role: user.role })
.from(user)
.where(eq(user.id, session.user.id))
.limit(1)
return currentUser?.role === 'admin'
}
/**
* Proxy to the mothership admin API.
*
* Query params:
* env - "dev" | "staging" | "prod"
* endpoint - the admin endpoint path, e.g. "requests", "licenses", "traces"
*
* The request body (for POST) is forwarded as-is. Additional query params
* (e.g. requestId for GET /traces) are forwarded.
*/
export async function POST(req: NextRequest) {
if (!(await isAdminRequestAuthorized())) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const adminKey = env.MOTHERSHIP_API_ADMIN_KEY
if (!adminKey) {
return NextResponse.json({ error: 'MOTHERSHIP_API_ADMIN_KEY not configured' }, { status: 500 })
}
const { searchParams } = new URL(req.url)
const environment = searchParams.get('env') || 'dev'
const endpoint = searchParams.get('endpoint')
if (!endpoint) {
return NextResponse.json({ error: 'endpoint query param required' }, { status: 400 })
}
const baseUrl = getMothershipUrl(environment)
if (!baseUrl) {
return NextResponse.json(
{ error: `No URL configured for environment: ${environment}` },
{ status: 400 }
)
}
const targetUrl = `${baseUrl}/api/admin/${endpoint}`
try {
const body = await req.text()
const upstream = await fetch(targetUrl, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'x-api-key': adminKey,
},
...(body ? { body } : {}),
})
const data = await upstream.json()
return NextResponse.json(data, { status: upstream.status })
} catch (error) {
return NextResponse.json(
{
error: `Failed to reach mothership (${environment}): ${error instanceof Error ? error.message : 'Unknown error'}`,
},
{ status: 502 }
)
}
}
export async function GET(req: NextRequest) {
if (!(await isAdminRequestAuthorized())) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const adminKey = env.MOTHERSHIP_API_ADMIN_KEY
if (!adminKey) {
return NextResponse.json({ error: 'MOTHERSHIP_API_ADMIN_KEY not configured' }, { status: 500 })
}
const { searchParams } = new URL(req.url)
const environment = searchParams.get('env') || 'dev'
const endpoint = searchParams.get('endpoint')
if (!endpoint) {
return NextResponse.json({ error: 'endpoint query param required' }, { status: 400 })
}
const baseUrl = getMothershipUrl(environment)
if (!baseUrl) {
return NextResponse.json(
{ error: `No URL configured for environment: ${environment}` },
{ status: 400 }
)
}
const forwardParams = new URLSearchParams()
searchParams.forEach((value, key) => {
if (key !== 'env' && key !== 'endpoint') {
forwardParams.set(key, value)
}
})
const qs = forwardParams.toString()
const targetUrl = `${baseUrl}/api/admin/${endpoint}${qs ? `?${qs}` : ''}`
try {
const upstream = await fetch(targetUrl, {
method: 'GET',
headers: { 'x-api-key': adminKey },
})
const data = await upstream.json()
return NextResponse.json(data, { status: upstream.status })
} catch (error) {
return NextResponse.json(
{
error: `Failed to reach mothership (${environment}): ${error instanceof Error ? error.message : 'Unknown error'}`,
},
{ status: 502 }
)
}
}

View File

@@ -4,7 +4,7 @@ import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { recordUsage } from '@/lib/billing/core/usage-log'
import { checkAndBillOverageThreshold } from '@/lib/billing/threshold-billing'
import { checkInternalApiKey } from '@/lib/copilot/utils'
import { checkInternalApiKey } from '@/lib/copilot/request/http'
import { isBillingEnabled } from '@/lib/core/config/feature-flags'
import { generateRequestId } from '@/lib/core/utils/request'

View File

@@ -1,8 +1,11 @@
import { db } from '@sim/db'
import { user } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkServerSideUsageLimits } from '@/lib/billing/calculations/usage-monitor'
import { checkInternalApiKey } from '@/lib/copilot/utils'
import { checkInternalApiKey } from '@/lib/copilot/request/http'
const logger = createLogger('CopilotApiKeysValidate')
@@ -34,6 +37,12 @@ export async function POST(req: NextRequest) {
const { userId } = validationResult.data
const [existingUser] = await db.select().from(user).where(eq(user.id, userId)).limit(1)
if (!existingUser) {
logger.warn('[API VALIDATION] userId does not exist', { userId })
return NextResponse.json({ error: 'User not found' }, { status: 403 })
}
logger.info('[API VALIDATION] Validating usage limit', { userId })
const { isExceeded, currentUsage, limit } = await checkServerSideUsageLimits(userId)

View File

@@ -1,10 +1,12 @@
import { createLogger } from '@sim/logger'
import { NextResponse } from 'next/server'
import { getLatestRunForStream } from '@/lib/copilot/async-runs/repository'
import { abortActiveStream, waitForPendingChatStream } from '@/lib/copilot/chat-streaming'
import { SIM_AGENT_API_URL } from '@/lib/copilot/constants'
import { authenticateCopilotRequestSessionOnly } from '@/lib/copilot/request-helpers'
import { authenticateCopilotRequestSessionOnly } from '@/lib/copilot/request/http'
import { abortActiveStream } from '@/lib/copilot/request/session/abort'
import { env } from '@/lib/core/config/env'
const logger = createLogger('CopilotChatAbortAPI')
const GO_EXPLICIT_ABORT_TIMEOUT_MS = 3000
export async function POST(request: Request) {
@@ -15,7 +17,12 @@ export async function POST(request: Request) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const body = await request.json().catch(() => ({}))
const body = await request.json().catch((err) => {
logger.warn('Abort request body parse failed; continuing with empty object', {
error: err instanceof Error ? err.message : String(err),
})
return {}
})
const streamId = typeof body.streamId === 'string' ? body.streamId : ''
let chatId = typeof body.chatId === 'string' ? body.chatId : ''
@@ -24,7 +31,13 @@ export async function POST(request: Request) {
}
if (!chatId) {
const run = await getLatestRunForStream(streamId, authenticatedUserId).catch(() => null)
const run = await getLatestRunForStream(streamId, authenticatedUserId).catch((err) => {
logger.warn('getLatestRunForStream failed while resolving chatId for abort', {
streamId,
error: err instanceof Error ? err.message : String(err),
})
return null
})
if (run?.chatId) {
chatId = run.chatId
}
@@ -36,7 +49,10 @@ export async function POST(request: Request) {
headers['x-api-key'] = env.COPILOT_API_KEY
}
const controller = new AbortController()
const timeout = setTimeout(() => controller.abort(), GO_EXPLICIT_ABORT_TIMEOUT_MS)
const timeout = setTimeout(
() => controller.abort('timeout:go_explicit_abort_fetch'),
GO_EXPLICIT_ABORT_TIMEOUT_MS
)
const response = await fetch(`${SIM_AGENT_API_URL}/api/streams/explicit-abort`, {
method: 'POST',
headers,
@@ -50,15 +66,13 @@ export async function POST(request: Request) {
if (!response.ok) {
throw new Error(`Explicit abort marker request failed: ${response.status}`)
}
} catch {
// best effort: local abort should still proceed even if Go marker fails
} catch (err) {
logger.warn('Explicit abort marker request failed; proceeding with local abort', {
streamId,
error: err instanceof Error ? err.message : String(err),
})
}
const aborted = await abortActiveStream(streamId)
if (chatId) {
await waitForPendingChatStream(chatId, GO_EXPLICIT_ABORT_TIMEOUT_MS + 1000, streamId).catch(
() => false
)
}
return NextResponse.json({ aborted })
}

View File

@@ -36,11 +36,11 @@ vi.mock('drizzle-orm', () => ({
eq: vi.fn((field: unknown, value: unknown) => ({ field, value, type: 'eq' })),
}))
vi.mock('@/lib/copilot/chat-lifecycle', () => ({
vi.mock('@/lib/copilot/chat/lifecycle', () => ({
getAccessibleCopilotChat: mockGetAccessibleCopilotChat,
}))
vi.mock('@/lib/copilot/task-events', () => ({
vi.mock('@/lib/copilot/tasks', () => ({
taskPubSub: { publishStatusChanged: vi.fn() },
}))

View File

@@ -5,8 +5,8 @@ import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { getAccessibleCopilotChat } from '@/lib/copilot/chat-lifecycle'
import { taskPubSub } from '@/lib/copilot/task-events'
import { getAccessibleCopilotChat } from '@/lib/copilot/chat/lifecycle'
import { taskPubSub } from '@/lib/copilot/tasks'
const logger = createLogger('DeleteChatAPI')

View File

@@ -0,0 +1,175 @@
import { db } from '@sim/db'
import { copilotChats } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, desc, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getLatestRunForStream } from '@/lib/copilot/async-runs/repository'
import { getAccessibleCopilotChat } from '@/lib/copilot/chat/lifecycle'
import {
authenticateCopilotRequestSessionOnly,
createBadRequestResponse,
createInternalServerErrorResponse,
createUnauthorizedResponse,
} from '@/lib/copilot/request/http'
import { readFilePreviewSessions } from '@/lib/copilot/request/session'
import { readEvents } from '@/lib/copilot/request/session/buffer'
import { toStreamBatchEvent } from '@/lib/copilot/request/session/types'
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
import { assertActiveWorkspaceAccess } from '@/lib/workspaces/permissions/utils'
const logger = createLogger('CopilotChatAPI')
function transformChat(chat: {
id: string
title: string | null
model: string | null
messages: unknown
planArtifact?: unknown
config?: unknown
conversationId?: string | null
resources?: unknown
createdAt: Date | null
updatedAt: Date | null
}) {
return {
id: chat.id,
title: chat.title,
model: chat.model,
messages: Array.isArray(chat.messages) ? chat.messages : [],
messageCount: Array.isArray(chat.messages) ? chat.messages.length : 0,
planArtifact: chat.planArtifact || null,
config: chat.config || null,
...('conversationId' in chat ? { activeStreamId: chat.conversationId || null } : {}),
...('resources' in chat
? { resources: Array.isArray(chat.resources) ? chat.resources : [] }
: {}),
createdAt: chat.createdAt,
updatedAt: chat.updatedAt,
}
}
export async function GET(req: NextRequest) {
try {
const { searchParams } = new URL(req.url)
const workflowId = searchParams.get('workflowId')
const workspaceId = searchParams.get('workspaceId')
const chatId = searchParams.get('chatId')
const { userId: authenticatedUserId, isAuthenticated } =
await authenticateCopilotRequestSessionOnly()
if (!isAuthenticated || !authenticatedUserId) {
return createUnauthorizedResponse()
}
if (chatId) {
const chat = await getAccessibleCopilotChat(chatId, authenticatedUserId)
if (!chat) {
return NextResponse.json({ success: false, error: 'Chat not found' }, { status: 404 })
}
let streamSnapshot: {
events: ReturnType<typeof toStreamBatchEvent>[]
previewSessions: Awaited<ReturnType<typeof readFilePreviewSessions>>
status: string
} | null = null
if (chat.conversationId) {
try {
const [events, previewSessions, run] = await Promise.all([
readEvents(chat.conversationId, '0'),
readFilePreviewSessions(chat.conversationId).catch((error) => {
logger.warn('Failed to read preview sessions for copilot chat', {
chatId,
conversationId: chat.conversationId,
error: error instanceof Error ? error.message : String(error),
})
return []
}),
getLatestRunForStream(chat.conversationId, authenticatedUserId).catch((error) => {
logger.warn('Failed to fetch latest run for copilot chat snapshot', {
chatId,
conversationId: chat.conversationId,
error: error instanceof Error ? error.message : String(error),
})
return null
}),
])
streamSnapshot = {
events: events.map(toStreamBatchEvent),
previewSessions,
status:
typeof run?.status === 'string'
? run.status
: events.length > 0
? 'active'
: 'unknown',
}
} catch (error) {
logger.warn('Failed to load copilot chat stream snapshot', {
chatId,
conversationId: chat.conversationId,
error: error instanceof Error ? error.message : String(error),
})
}
}
logger.info(`Retrieved chat ${chatId}`)
return NextResponse.json({
success: true,
chat: {
...transformChat(chat),
...(streamSnapshot ? { streamSnapshot } : {}),
},
})
}
if (!workflowId && !workspaceId) {
return createBadRequestResponse('workflowId, workspaceId, or chatId is required')
}
if (workspaceId) {
await assertActiveWorkspaceAccess(workspaceId, authenticatedUserId)
}
if (workflowId) {
const authorization = await authorizeWorkflowByWorkspacePermission({
workflowId,
userId: authenticatedUserId,
action: 'read',
})
if (!authorization.allowed) {
return createUnauthorizedResponse()
}
}
const scopeFilter = workflowId
? eq(copilotChats.workflowId, workflowId)
: eq(copilotChats.workspaceId, workspaceId!)
const chats = await db
.select({
id: copilotChats.id,
title: copilotChats.title,
model: copilotChats.model,
messages: copilotChats.messages,
planArtifact: copilotChats.planArtifact,
config: copilotChats.config,
createdAt: copilotChats.createdAt,
updatedAt: copilotChats.updatedAt,
})
.from(copilotChats)
.where(and(eq(copilotChats.userId, authenticatedUserId), scopeFilter))
.orderBy(desc(copilotChats.updatedAt))
const scope = workflowId ? `workflow ${workflowId}` : `workspace ${workspaceId}`
logger.info(`Retrieved ${chats.length} chats for ${scope}`)
return NextResponse.json({
success: true,
chats: chats.map(transformChat),
})
} catch (error) {
logger.error('Error fetching copilot chats:', error)
return createInternalServerErrorResponse('Failed to fetch chats')
}
}

View File

@@ -0,0 +1,65 @@
import { db } from '@sim/db'
import { copilotChats } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { getAccessibleCopilotChat } from '@/lib/copilot/chat/lifecycle'
import { taskPubSub } from '@/lib/copilot/tasks'
const logger = createLogger('RenameChatAPI')
const RenameChatSchema = z.object({
chatId: z.string().min(1),
title: z.string().min(1).max(200),
})
export async function PATCH(request: NextRequest) {
try {
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ success: false, error: 'Unauthorized' }, { status: 401 })
}
const body = await request.json()
const { chatId, title } = RenameChatSchema.parse(body)
const chat = await getAccessibleCopilotChat(chatId, session.user.id)
if (!chat) {
return NextResponse.json({ success: false, error: 'Chat not found' }, { status: 404 })
}
const now = new Date()
const [updated] = await db
.update(copilotChats)
.set({ title, updatedAt: now, lastSeenAt: now })
.where(and(eq(copilotChats.id, chatId), eq(copilotChats.userId, session.user.id)))
.returning({ id: copilotChats.id, workspaceId: copilotChats.workspaceId })
if (!updated) {
return NextResponse.json({ success: false, error: 'Chat not found' }, { status: 404 })
}
logger.info('Chat renamed', { chatId, title })
if (updated.workspaceId) {
taskPubSub?.publishStatusChanged({
workspaceId: updated.workspaceId,
chatId,
type: 'renamed',
})
}
return NextResponse.json({ success: true })
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ success: false, error: 'Invalid request data', details: error.errors },
{ status: 400 }
)
}
logger.error('Error renaming chat:', error)
return NextResponse.json({ success: false, error: 'Failed to rename chat' }, { status: 500 })
}
}

View File

@@ -10,8 +10,8 @@ import {
createInternalServerErrorResponse,
createNotFoundResponse,
createUnauthorizedResponse,
} from '@/lib/copilot/request-helpers'
import type { ChatResource, ResourceType } from '@/lib/copilot/resources'
} from '@/lib/copilot/request/http'
import type { ChatResource, ResourceType } from '@/lib/copilot/resources/persistence'
const logger = createLogger('CopilotChatResourcesAPI')

View File

@@ -1,47 +1,46 @@
import { db } from '@sim/db'
import { copilotChats } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, desc, eq, sql } from 'drizzle-orm'
import { eq, sql } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { createRunSegment } from '@/lib/copilot/async-runs/repository'
import { getAccessibleCopilotChat, resolveOrCreateChat } from '@/lib/copilot/chat-lifecycle'
import { buildCopilotRequestPayload } from '@/lib/copilot/chat-payload'
import { type ChatLoadResult, resolveOrCreateChat } from '@/lib/copilot/chat/lifecycle'
import { buildCopilotRequestPayload } from '@/lib/copilot/chat/payload'
import {
acquirePendingChatStream,
createSSEStream,
releasePendingChatStream,
requestChatTitle,
SSE_RESPONSE_HEADERS,
} from '@/lib/copilot/chat-streaming'
import { COPILOT_REQUEST_MODES } from '@/lib/copilot/models'
import { orchestrateCopilotStream } from '@/lib/copilot/orchestrator'
import { getStreamMeta, readStreamEvents } from '@/lib/copilot/orchestrator/stream/buffer'
import type { OrchestratorResult } from '@/lib/copilot/orchestrator/types'
import { resolveActiveResourceContext } from '@/lib/copilot/process-contents'
buildPersistedAssistantMessage,
buildPersistedUserMessage,
} from '@/lib/copilot/chat/persisted-message'
import {
processContextsServer,
resolveActiveResourceContext,
} from '@/lib/copilot/chat/process-contents'
import { finalizeAssistantTurn } from '@/lib/copilot/chat/terminal-state'
import { COPILOT_REQUEST_MODES } from '@/lib/copilot/constants'
import {
authenticateCopilotRequestSessionOnly,
createBadRequestResponse,
createInternalServerErrorResponse,
createRequestTracker,
createUnauthorizedResponse,
} from '@/lib/copilot/request-helpers'
import { generateId } from '@/lib/core/utils/uuid'
import { captureServerEvent } from '@/lib/posthog/server'
} from '@/lib/copilot/request/http'
import { createSSEStream, SSE_RESPONSE_HEADERS } from '@/lib/copilot/request/lifecycle/start'
import {
authorizeWorkflowByWorkspacePermission,
resolveWorkflowIdForUser,
} from '@/lib/workflows/utils'
import {
assertActiveWorkspaceAccess,
getUserEntityPermissions,
} from '@/lib/workspaces/permissions/utils'
acquirePendingChatStream,
getPendingChatStreamId,
releasePendingChatStream,
} from '@/lib/copilot/request/session'
import type { OrchestratorResult } from '@/lib/copilot/request/types'
import { getWorkflowById, resolveWorkflowIdForUser } from '@/lib/workflows/utils'
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
import type { ChatContext } from '@/stores/panel'
export const maxDuration = 3600
const logger = createLogger('CopilotChatAPI')
// ---------------------------------------------------------------------------
// Schemas
// ---------------------------------------------------------------------------
const FileAttachmentSchema = z.object({
id: z.string(),
key: z.string(),
@@ -68,7 +67,6 @@ const ChatMessageSchema = z.object({
mode: z.enum(COPILOT_REQUEST_MODES).optional().default('agent'),
prefetch: z.boolean().optional(),
createNewChat: z.boolean().optional().default(false),
stream: z.boolean().optional().default(true),
implicitFeedback: z.string().optional(),
fileAttachments: z.array(FileAttachmentSchema).optional(),
resourceAttachments: z.array(ResourceAttachmentSchema).optional(),
@@ -108,27 +106,25 @@ const ChatMessageSchema = z.object({
userTimezone: z.string().optional(),
})
/**
* POST /api/copilot/chat
* Send messages to sim agent and handle chat persistence
*/
// ---------------------------------------------------------------------------
// POST /api/copilot/chat
// ---------------------------------------------------------------------------
export async function POST(req: NextRequest) {
const tracker = createRequestTracker()
let actualChatId: string | undefined
let pendingChatStreamAcquired = false
let pendingChatStreamHandedOff = false
let pendingChatStreamID: string | undefined
let chatStreamLockAcquired = false
let userMessageIdToUse = ''
try {
// Get session to access user information including name
// 1. Auth
const session = await getSession()
if (!session?.user?.id) {
return createUnauthorizedResponse()
}
const authenticatedUserId = session.user.id
// 2. Parse & validate
const body = await req.json()
const {
message,
@@ -141,7 +137,6 @@ export async function POST(req: NextRequest) {
mode,
prefetch,
createNewChat,
stream,
implicitFeedback,
fileAttachments,
resourceAttachments,
@@ -155,17 +150,12 @@ export async function POST(req: NextRequest) {
? contexts.map((ctx) => {
if (ctx.kind !== 'blocks') return ctx
if (Array.isArray(ctx.blockIds) && ctx.blockIds.length > 0) return ctx
if (ctx.blockId) {
return {
...ctx,
blockIds: [ctx.blockId],
}
}
if (ctx.blockId) return { ...ctx, blockIds: [ctx.blockId] }
return ctx
})
: contexts
// Copilot route always requires a workflow scope
// 3. Resolve workflow & workspace
const resolved = await resolveWorkflowIdForUser(
authenticatedUserId,
providedWorkflowId,
@@ -177,64 +167,29 @@ export async function POST(req: NextRequest) {
'No workflows found. Create a workflow first or provide a valid workflowId.'
)
}
const workflowId = resolved.workflowId
const workflowResolvedName = resolved.workflowName
const { workflowId, workflowName: workflowResolvedName } = resolved
// Resolve workspace from workflow so it can be sent as implicit context to the copilot.
let resolvedWorkspaceId: string | undefined
try {
const { getWorkflowById } = await import('@/lib/workflows/utils')
const wf = await getWorkflowById(workflowId)
resolvedWorkspaceId = wf?.workspaceId ?? undefined
} catch {
logger
.withMetadata({ requestId: tracker.requestId, messageId: userMessageId })
.warn('Failed to resolve workspaceId from workflow')
logger.warn(`[${tracker.requestId}] Failed to resolve workspaceId from workflow`)
}
captureServerEvent(
authenticatedUserId,
'copilot_chat_sent',
{
workflow_id: workflowId,
workspace_id: resolvedWorkspaceId ?? '',
has_file_attachments: Array.isArray(fileAttachments) && fileAttachments.length > 0,
has_contexts: Array.isArray(contexts) && contexts.length > 0,
mode,
},
{
groups: resolvedWorkspaceId ? { workspace: resolvedWorkspaceId } : undefined,
setOnce: { first_copilot_use_at: new Date().toISOString() },
}
)
const userMessageIdToUse = userMessageId || generateId()
const reqLogger = logger.withMetadata({
requestId: tracker.requestId,
messageId: userMessageIdToUse,
})
try {
reqLogger.info('Received chat POST', {
workflowId,
hasContexts: Array.isArray(normalizedContexts),
contextsCount: Array.isArray(normalizedContexts) ? normalizedContexts.length : 0,
contextsPreview: Array.isArray(normalizedContexts)
? normalizedContexts.map((c: any) => ({
kind: c?.kind,
chatId: c?.chatId,
workflowId: c?.workflowId,
executionId: (c as any)?.executionId,
label: c?.label,
}))
: undefined,
})
} catch {}
let currentChat: any = null
let conversationHistory: any[] = []
actualChatId = chatId
userMessageIdToUse = userMessageId || crypto.randomUUID()
const selectedModel = model || 'claude-opus-4-6'
logger.info(`[${tracker.requestId}] Received chat POST`, {
workflowId,
contextsCount: Array.isArray(normalizedContexts) ? normalizedContexts.length : 0,
})
// 4. Resolve or create chat
let currentChat: ChatLoadResult['chat'] = null
let conversationHistory: unknown[] = []
actualChatId = chatId
if (chatId || createNewChat) {
const chatResult = await resolveOrCreateChat({
chatId,
@@ -253,37 +208,48 @@ export async function POST(req: NextRequest) {
}
}
if (actualChatId) {
chatStreamLockAcquired = await acquirePendingChatStream(actualChatId, userMessageIdToUse)
if (!chatStreamLockAcquired) {
const activeStreamId = await getPendingChatStreamId(actualChatId)
return NextResponse.json(
{
error: 'A response is already in progress for this chat.',
...(activeStreamId ? { activeStreamId } : {}),
},
{ status: 409 }
)
}
}
// 5. Process contexts
let agentContexts: Array<{ type: string; content: string }> = []
if (Array.isArray(normalizedContexts) && normalizedContexts.length > 0) {
try {
const { processContextsServer } = await import('@/lib/copilot/process-contents')
const processed = await processContextsServer(
normalizedContexts as any,
normalizedContexts as ChatContext[],
authenticatedUserId,
message,
resolvedWorkspaceId,
actualChatId
)
agentContexts = processed
reqLogger.info('Contexts processed for request', {
logger.info(`[${tracker.requestId}] Contexts processed`, {
processedCount: agentContexts.length,
kinds: agentContexts.map((c) => c.type),
lengthPreview: agentContexts.map((c) => c.content?.length ?? 0),
})
if (
Array.isArray(normalizedContexts) &&
normalizedContexts.length > 0 &&
agentContexts.length === 0
) {
reqLogger.warn(
'Contexts provided but none processed. Check executionId for logs contexts.'
if (agentContexts.length === 0) {
logger.warn(
`[${tracker.requestId}] Contexts provided but none processed. Check executionId for logs contexts.`
)
}
} catch (e) {
reqLogger.error('Failed to process contexts', e)
logger.error(`[${tracker.requestId}] Failed to process contexts`, e)
}
}
// 5b. Process resource attachments
if (
Array.isArray(resourceAttachments) &&
resourceAttachments.length > 0 &&
@@ -299,26 +265,30 @@ export async function POST(req: NextRequest) {
actualChatId
)
if (!ctx) return null
return {
...ctx,
tag: r.active ? '@active_tab' : '@open_tab',
}
return { ...ctx, tag: r.active ? '@active_tab' : '@open_tab' }
})
)
for (const result of results) {
if (result.status === 'fulfilled' && result.value) {
agentContexts.push(result.value)
} else if (result.status === 'rejected') {
reqLogger.error('Failed to resolve resource attachment', result.reason)
logger.error(
`[${tracker.requestId}] Failed to resolve resource attachment`,
result.reason
)
}
}
}
const effectiveMode = mode === 'agent' ? 'build' : mode
// 6. Build copilot request payload
const userPermission = resolvedWorkspaceId
? await getUserEntityPermissions(authenticatedUserId, 'workspace', resolvedWorkspaceId).catch(
() => null
(err) => {
logger.warn('Failed to load user permissions', {
error: err instanceof Error ? err.message : String(err),
})
return null
}
)
: null
@@ -342,55 +312,24 @@ export async function POST(req: NextRequest) {
userPermission: userPermission ?? undefined,
userTimezone,
},
{
selectedModel,
}
{ selectedModel }
)
try {
reqLogger.info('About to call Sim Agent', {
hasContext: agentContexts.length > 0,
contextCount: agentContexts.length,
hasFileAttachments: Array.isArray(requestPayload.fileAttachments),
messageLength: message.length,
mode: effectiveMode,
hasTools: Array.isArray(requestPayload.tools),
toolCount: Array.isArray(requestPayload.tools) ? requestPayload.tools.length : 0,
hasBaseTools: Array.isArray(requestPayload.baseTools),
baseToolCount: Array.isArray(requestPayload.baseTools)
? requestPayload.baseTools.length
: 0,
hasCredentials: !!requestPayload.credentials,
})
} catch {}
if (stream && actualChatId) {
const acquired = await acquirePendingChatStream(actualChatId, userMessageIdToUse)
if (!acquired) {
return NextResponse.json(
{
error:
'A response is already in progress for this chat. Wait for it to finish or use Stop.',
},
{ status: 409 }
)
}
pendingChatStreamAcquired = true
pendingChatStreamID = userMessageIdToUse
}
logger.info(`[${tracker.requestId}] About to call Sim Agent`, {
contextCount: agentContexts.length,
hasFileAttachments: Array.isArray(requestPayload.fileAttachments),
messageLength: message.length,
mode,
})
// 7. Persist user message
if (actualChatId) {
const userMsg = {
const userMsg = buildPersistedUserMessage({
id: userMessageIdToUse,
role: 'user' as const,
content: message,
timestamp: new Date().toISOString(),
...(fileAttachments && fileAttachments.length > 0 && { fileAttachments }),
...(Array.isArray(normalizedContexts) &&
normalizedContexts.length > 0 && {
contexts: normalizedContexts,
}),
}
fileAttachments,
contexts: normalizedContexts,
})
const [updated] = await db
.update(copilotChats)
@@ -403,268 +342,67 @@ export async function POST(req: NextRequest) {
.returning({ messages: copilotChats.messages })
if (updated) {
const freshMessages: any[] = Array.isArray(updated.messages) ? updated.messages : []
conversationHistory = freshMessages.filter((m: any) => m.id !== userMessageIdToUse)
const freshMessages: Record<string, unknown>[] = Array.isArray(updated.messages)
? updated.messages
: []
conversationHistory = freshMessages.filter(
(m: Record<string, unknown>) => m.id !== userMessageIdToUse
)
}
}
if (stream) {
const executionId = generateId()
const runId = generateId()
const sseStream = createSSEStream({
requestPayload,
userId: authenticatedUserId,
streamId: userMessageIdToUse,
executionId,
runId,
chatId: actualChatId,
currentChat,
isNewChat: conversationHistory.length === 0,
message,
titleModel: selectedModel,
titleProvider: provider,
requestId: tracker.requestId,
workspaceId: resolvedWorkspaceId,
pendingChatStreamAlreadyRegistered: Boolean(actualChatId && stream),
orchestrateOptions: {
userId: authenticatedUserId,
workflowId,
chatId: actualChatId,
executionId,
runId,
goRoute: '/api/copilot',
autoExecuteTools: true,
interactive: true,
onComplete: async (result: OrchestratorResult) => {
if (!actualChatId) return
if (!result.success) return
// 8. Create SSE stream with onComplete for assistant message persistence
const executionId = crypto.randomUUID()
const runId = crypto.randomUUID()
const assistantMessage: Record<string, unknown> = {
id: generateId(),
role: 'assistant' as const,
content: result.content,
timestamp: new Date().toISOString(),
...(result.requestId ? { requestId: result.requestId } : {}),
}
if (result.toolCalls.length > 0) {
assistantMessage.toolCalls = result.toolCalls
}
if (result.contentBlocks.length > 0) {
assistantMessage.contentBlocks = result.contentBlocks.map((block) => {
const stored: Record<string, unknown> = { type: block.type }
if (block.content) stored.content = block.content
if (block.type === 'tool_call' && block.toolCall) {
const state =
block.toolCall.result?.success !== undefined
? block.toolCall.result.success
? 'success'
: 'error'
: block.toolCall.status
const isSubagentTool = !!block.calledBy
const isNonTerminal =
state === 'cancelled' || state === 'pending' || state === 'executing'
stored.toolCall = {
id: block.toolCall.id,
name: block.toolCall.name,
state,
...(isSubagentTool && isNonTerminal ? {} : { result: block.toolCall.result }),
...(isSubagentTool && isNonTerminal
? {}
: block.toolCall.params
? { params: block.toolCall.params }
: {}),
...(block.calledBy ? { calledBy: block.calledBy } : {}),
}
}
return stored
})
}
try {
const [row] = await db
.select({ messages: copilotChats.messages })
.from(copilotChats)
.where(eq(copilotChats.id, actualChatId))
.limit(1)
const msgs: any[] = Array.isArray(row?.messages) ? row.messages : []
const userIdx = msgs.findIndex((m: any) => m.id === userMessageIdToUse)
const alreadyHasResponse =
userIdx >= 0 &&
userIdx + 1 < msgs.length &&
(msgs[userIdx + 1] as any)?.role === 'assistant'
if (!alreadyHasResponse) {
await db
.update(copilotChats)
.set({
messages: sql`${copilotChats.messages} || ${JSON.stringify([assistantMessage])}::jsonb`,
conversationId: sql`CASE WHEN ${copilotChats.conversationId} = ${userMessageIdToUse} THEN NULL ELSE ${copilotChats.conversationId} END`,
updatedAt: new Date(),
})
.where(eq(copilotChats.id, actualChatId))
}
} catch (error) {
reqLogger.error('Failed to persist chat messages', {
chatId: actualChatId,
error: error instanceof Error ? error.message : 'Unknown error',
})
}
},
},
})
pendingChatStreamHandedOff = true
return new Response(sseStream, { headers: SSE_RESPONSE_HEADERS })
}
const nsExecutionId = generateId()
const nsRunId = generateId()
if (actualChatId) {
await createRunSegment({
id: nsRunId,
executionId: nsExecutionId,
chatId: actualChatId,
const sseStream = createSSEStream({
requestPayload,
userId: authenticatedUserId,
streamId: userMessageIdToUse,
executionId,
runId,
chatId: actualChatId,
currentChat,
isNewChat: conversationHistory.length === 0,
message,
titleModel: selectedModel,
titleProvider: provider,
requestId: tracker.requestId,
workspaceId: resolvedWorkspaceId,
orchestrateOptions: {
userId: authenticatedUserId,
workflowId,
streamId: userMessageIdToUse,
}).catch(() => {})
}
const nonStreamingResult = await orchestrateCopilotStream(requestPayload, {
userId: authenticatedUserId,
workflowId,
chatId: actualChatId,
executionId: nsExecutionId,
runId: nsRunId,
goRoute: '/api/copilot',
autoExecuteTools: true,
interactive: true,
})
const responseData = {
content: nonStreamingResult.content,
toolCalls: nonStreamingResult.toolCalls,
model: selectedModel,
provider: typeof requestPayload?.provider === 'string' ? requestPayload.provider : undefined,
}
reqLogger.info('Non-streaming response from orchestrator', {
hasContent: !!responseData.content,
contentLength: responseData.content?.length || 0,
model: responseData.model,
provider: responseData.provider,
toolCallsCount: responseData.toolCalls?.length || 0,
})
// Save messages if we have a chat
if (currentChat && responseData.content) {
const userMessage = {
id: userMessageIdToUse, // Consistent ID used for request and persistence
role: 'user',
content: message,
timestamp: new Date().toISOString(),
...(fileAttachments && fileAttachments.length > 0 && { fileAttachments }),
...(Array.isArray(normalizedContexts) &&
normalizedContexts.length > 0 && {
contexts: normalizedContexts,
}),
...(Array.isArray(normalizedContexts) &&
normalizedContexts.length > 0 && {
contentBlocks: [
{ type: 'contexts', contexts: normalizedContexts as any, timestamp: Date.now() },
],
}),
}
const assistantMessage = {
id: generateId(),
role: 'assistant',
content: responseData.content,
timestamp: new Date().toISOString(),
}
const updatedMessages = [...conversationHistory, userMessage, assistantMessage]
// Start title generation in parallel if this is first message (non-streaming)
if (actualChatId && !currentChat.title && conversationHistory.length === 0) {
reqLogger.info('Starting title generation for non-streaming response')
requestChatTitle({ message, model: selectedModel, provider, messageId: userMessageIdToUse })
.then(async (title) => {
if (title) {
await db
.update(copilotChats)
.set({
title,
updatedAt: new Date(),
})
.where(eq(copilotChats.id, actualChatId!))
reqLogger.info(`Generated and saved title: ${title}`)
}
})
.catch((error) => {
reqLogger.error('Title generation failed', error)
})
}
// Update chat in database immediately (without blocking for title)
await db
.update(copilotChats)
.set({
messages: updatedMessages,
updatedAt: new Date(),
})
.where(eq(copilotChats.id, actualChatId!))
}
reqLogger.info('Returning non-streaming response', {
duration: tracker.getDuration(),
chatId: actualChatId,
responseLength: responseData.content?.length || 0,
})
return NextResponse.json({
success: true,
response: responseData,
chatId: actualChatId,
metadata: {
requestId: tracker.requestId,
message,
duration: tracker.getDuration(),
chatId: actualChatId,
executionId,
runId,
goRoute: '/api/copilot',
autoExecuteTools: true,
interactive: true,
onComplete: buildOnComplete(actualChatId, userMessageIdToUse, tracker.requestId),
onError: buildOnError(actualChatId, userMessageIdToUse, tracker.requestId),
},
})
return new Response(sseStream, { headers: SSE_RESPONSE_HEADERS })
} catch (error) {
if (
actualChatId &&
pendingChatStreamAcquired &&
!pendingChatStreamHandedOff &&
pendingChatStreamID
) {
await releasePendingChatStream(actualChatId, pendingChatStreamID).catch(() => {})
if (chatStreamLockAcquired && actualChatId && userMessageIdToUse) {
await releasePendingChatStream(actualChatId, userMessageIdToUse)
}
const duration = tracker.getDuration()
if (error instanceof z.ZodError) {
logger
.withMetadata({ requestId: tracker.requestId, messageId: pendingChatStreamID ?? undefined })
.error('Validation error', {
duration,
errors: error.errors,
})
logger.error(`[${tracker.requestId}] Validation error:`, { duration, errors: error.errors })
return NextResponse.json(
{ error: 'Invalid request data', details: error.errors },
{ status: 400 }
)
}
logger
.withMetadata({ requestId: tracker.requestId, messageId: pendingChatStreamID ?? undefined })
.error('Error handling copilot chat', {
duration,
error: error instanceof Error ? error.message : 'Unknown error',
stack: error instanceof Error ? error.stack : undefined,
})
logger.error(`[${tracker.requestId}] Error handling copilot chat:`, {
duration,
error: error instanceof Error ? error.message : 'Unknown error',
stack: error instanceof Error ? error.stack : undefined,
})
return NextResponse.json(
{ error: error instanceof Error ? error.message : 'Internal server error' },
@@ -673,132 +411,56 @@ export async function POST(req: NextRequest) {
}
}
export async function GET(req: NextRequest) {
try {
const { searchParams } = new URL(req.url)
const workflowId = searchParams.get('workflowId')
const workspaceId = searchParams.get('workspaceId')
const chatId = searchParams.get('chatId')
// ---------------------------------------------------------------------------
// onComplete: persist assistant message after streaming finishes
// ---------------------------------------------------------------------------
const { userId: authenticatedUserId, isAuthenticated } =
await authenticateCopilotRequestSessionOnly()
if (!isAuthenticated || !authenticatedUserId) {
return createUnauthorizedResponse()
}
function buildOnComplete(
chatId: string | undefined,
userMessageId: string,
requestId: string
): (result: OrchestratorResult) => Promise<void> {
return async (result) => {
if (!chatId) return
if (chatId) {
const chat = await getAccessibleCopilotChat(chatId, authenticatedUserId)
if (!chat) {
return NextResponse.json({ success: false, error: 'Chat not found' }, { status: 404 })
}
let streamSnapshot: {
events: Array<{ eventId: number; streamId: string; event: Record<string, unknown> }>
status: string
} | null = null
if (chat.conversationId) {
try {
const [meta, events] = await Promise.all([
getStreamMeta(chat.conversationId),
readStreamEvents(chat.conversationId, 0),
])
streamSnapshot = {
events: events || [],
status: meta?.status || 'unknown',
}
} catch (err) {
logger
.withMetadata({ messageId: chat.conversationId || undefined })
.warn('Failed to read stream snapshot for chat', {
chatId,
conversationId: chat.conversationId,
error: err instanceof Error ? err.message : String(err),
})
}
}
const transformedChat = {
id: chat.id,
title: chat.title,
model: chat.model,
messages: Array.isArray(chat.messages) ? chat.messages : [],
messageCount: Array.isArray(chat.messages) ? chat.messages.length : 0,
planArtifact: chat.planArtifact || null,
config: chat.config || null,
conversationId: chat.conversationId || null,
resources: Array.isArray(chat.resources) ? chat.resources : [],
createdAt: chat.createdAt,
updatedAt: chat.updatedAt,
...(streamSnapshot ? { streamSnapshot } : {}),
}
logger
.withMetadata({ messageId: chat.conversationId || undefined })
.info(`Retrieved chat ${chatId}`)
return NextResponse.json({ success: true, chat: transformedChat })
}
if (!workflowId && !workspaceId) {
return createBadRequestResponse('workflowId, workspaceId, or chatId is required')
}
if (workspaceId) {
await assertActiveWorkspaceAccess(workspaceId, authenticatedUserId)
}
if (workflowId) {
const authorization = await authorizeWorkflowByWorkspacePermission({
workflowId,
userId: authenticatedUserId,
action: 'read',
try {
await finalizeAssistantTurn({
chatId,
userMessageId,
...(result.success
? { assistantMessage: buildPersistedAssistantMessage(result, result.requestId) }
: {}),
})
if (!authorization.allowed) {
return createUnauthorizedResponse()
}
}
const scopeFilter = workflowId
? eq(copilotChats.workflowId, workflowId)
: eq(copilotChats.workspaceId, workspaceId!)
const chats = await db
.select({
id: copilotChats.id,
title: copilotChats.title,
model: copilotChats.model,
messages: copilotChats.messages,
planArtifact: copilotChats.planArtifact,
config: copilotChats.config,
createdAt: copilotChats.createdAt,
updatedAt: copilotChats.updatedAt,
} catch (error) {
logger.error(`[${requestId}] Failed to persist chat messages`, {
chatId,
error: error instanceof Error ? error.message : 'Unknown error',
})
.from(copilotChats)
.where(and(eq(copilotChats.userId, authenticatedUserId), scopeFilter))
.orderBy(desc(copilotChats.updatedAt))
const transformedChats = chats.map((chat) => ({
id: chat.id,
title: chat.title,
model: chat.model,
messages: Array.isArray(chat.messages) ? chat.messages : [],
messageCount: Array.isArray(chat.messages) ? chat.messages.length : 0,
planArtifact: chat.planArtifact || null,
config: chat.config || null,
createdAt: chat.createdAt,
updatedAt: chat.updatedAt,
}))
const scope = workflowId ? `workflow ${workflowId}` : `workspace ${workspaceId}`
logger.info(`Retrieved ${transformedChats.length} chats for ${scope}`)
return NextResponse.json({
success: true,
chats: transformedChats,
})
} catch (error) {
logger.error('Error fetching copilot chats', error)
return createInternalServerErrorResponse('Failed to fetch chats')
}
}
}
function buildOnError(
chatId: string | undefined,
userMessageId: string,
requestId: string
): () => Promise<void> {
return async () => {
if (!chatId) return
try {
await finalizeAssistantTurn({ chatId, userMessageId })
} catch (error) {
logger.error(`[${requestId}] Failed to finalize errored chat stream`, {
chatId,
error: error instanceof Error ? error.message : 'Unknown error',
})
}
}
}
// ---------------------------------------------------------------------------
// GET handler (read-only queries, extracted to queries.ts)
// ---------------------------------------------------------------------------
export { GET } from './queries'

View File

@@ -4,25 +4,70 @@
import { NextRequest } from 'next/server'
import { beforeEach, describe, expect, it, vi } from 'vitest'
import {
MothershipStreamV1CompletionStatus,
MothershipStreamV1EventType,
} from '@/lib/copilot/generated/mothership-stream-v1'
const { getStreamMeta, readStreamEvents, authenticateCopilotRequestSessionOnly } = vi.hoisted(
() => ({
getStreamMeta: vi.fn(),
readStreamEvents: vi.fn(),
authenticateCopilotRequestSessionOnly: vi.fn(),
})
)
vi.mock('@/lib/copilot/orchestrator/stream/buffer', () => ({
getStreamMeta,
readStreamEvents,
const {
getLatestRunForStream,
readEvents,
readFilePreviewSessions,
checkForReplayGap,
authenticateCopilotRequestSessionOnly,
} = vi.hoisted(() => ({
getLatestRunForStream: vi.fn(),
readEvents: vi.fn(),
readFilePreviewSessions: vi.fn(),
checkForReplayGap: vi.fn(),
authenticateCopilotRequestSessionOnly: vi.fn(),
}))
vi.mock('@/lib/copilot/request-helpers', () => ({
vi.mock('@/lib/copilot/async-runs/repository', () => ({
getLatestRunForStream,
}))
vi.mock('@/lib/copilot/request/session', () => ({
readEvents,
readFilePreviewSessions,
checkForReplayGap,
createEvent: (event: Record<string, unknown>) => ({
stream: {
streamId: event.streamId,
cursor: event.cursor,
},
seq: event.seq,
trace: { requestId: event.requestId ?? '' },
type: event.type,
payload: event.payload,
}),
encodeSSEEnvelope: (event: Record<string, unknown>) =>
new TextEncoder().encode(`data: ${JSON.stringify(event)}\n\n`),
SSE_RESPONSE_HEADERS: {
'Content-Type': 'text/event-stream',
},
}))
vi.mock('@/lib/copilot/request/http', () => ({
authenticateCopilotRequestSessionOnly,
}))
import { GET } from '@/app/api/copilot/chat/stream/route'
import { GET } from './route'
async function readAllChunks(response: Response): Promise<string[]> {
const reader = response.body?.getReader()
expect(reader).toBeTruthy()
const chunks: string[] = []
while (true) {
const { done, value } = await reader!.read()
if (done) {
break
}
chunks.push(new TextDecoder().decode(value))
}
return chunks
}
describe('copilot chat stream replay route', () => {
beforeEach(() => {
@@ -31,29 +76,95 @@ describe('copilot chat stream replay route', () => {
userId: 'user-1',
isAuthenticated: true,
})
readStreamEvents.mockResolvedValue([])
readEvents.mockResolvedValue([])
readFilePreviewSessions.mockResolvedValue([])
checkForReplayGap.mockResolvedValue(null)
})
it('stops replay polling when stream meta becomes cancelled', async () => {
getStreamMeta
it('returns preview sessions in batch mode', async () => {
getLatestRunForStream.mockResolvedValue({
status: 'active',
executionId: 'exec-1',
id: 'run-1',
})
readFilePreviewSessions.mockResolvedValue([
{
schemaVersion: 1,
id: 'preview-1',
streamId: 'stream-1',
toolCallId: 'preview-1',
status: 'streaming',
fileName: 'draft.md',
previewText: 'hello',
previewVersion: 2,
updatedAt: '2026-04-10T00:00:00.000Z',
},
])
const response = await GET(
new NextRequest(
'http://localhost:3000/api/copilot/chat/stream?streamId=stream-1&after=0&batch=true'
)
)
expect(response.status).toBe(200)
await expect(response.json()).resolves.toMatchObject({
success: true,
previewSessions: [
expect.objectContaining({
id: 'preview-1',
previewText: 'hello',
previewVersion: 2,
}),
],
status: 'active',
})
})
it('stops replay polling when run becomes cancelled', async () => {
getLatestRunForStream
.mockResolvedValueOnce({
status: 'active',
userId: 'user-1',
executionId: 'exec-1',
id: 'run-1',
})
.mockResolvedValueOnce({
status: 'cancelled',
userId: 'user-1',
executionId: 'exec-1',
id: 'run-1',
})
const response = await GET(
new NextRequest('http://localhost:3000/api/copilot/chat/stream?streamId=stream-1')
new NextRequest('http://localhost:3000/api/copilot/chat/stream?streamId=stream-1&after=0')
)
const reader = response.body?.getReader()
expect(reader).toBeTruthy()
const chunks = await readAllChunks(response)
expect(chunks.join('')).toContain(
JSON.stringify({
status: MothershipStreamV1CompletionStatus.cancelled,
reason: 'terminal_status',
})
)
expect(getLatestRunForStream).toHaveBeenCalledTimes(2)
})
const first = await reader!.read()
expect(first.done).toBe(true)
expect(getStreamMeta).toHaveBeenCalledTimes(2)
it('emits structured terminal replay error when run metadata disappears', async () => {
getLatestRunForStream
.mockResolvedValueOnce({
status: 'active',
executionId: 'exec-1',
id: 'run-1',
})
.mockResolvedValueOnce(null)
const response = await GET(
new NextRequest('http://localhost:3000/api/copilot/chat/stream?streamId=stream-1&after=0')
)
const chunks = await readAllChunks(response)
const body = chunks.join('')
expect(body).toContain(`"type":"${MothershipStreamV1EventType.error}"`)
expect(body).toContain('"code":"resume_run_unavailable"')
expect(body).toContain(`"type":"${MothershipStreamV1EventType.complete}"`)
})
})

View File

@@ -1,12 +1,20 @@
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { getLatestRunForStream } from '@/lib/copilot/async-runs/repository'
import {
getStreamMeta,
readStreamEvents,
type StreamMeta,
} from '@/lib/copilot/orchestrator/stream/buffer'
import { authenticateCopilotRequestSessionOnly } from '@/lib/copilot/request-helpers'
import { SSE_HEADERS } from '@/lib/core/utils/sse'
MothershipStreamV1CompletionStatus,
MothershipStreamV1EventType,
} from '@/lib/copilot/generated/mothership-stream-v1'
import { authenticateCopilotRequestSessionOnly } from '@/lib/copilot/request/http'
import {
checkForReplayGap,
createEvent,
encodeSSEEnvelope,
readEvents,
readFilePreviewSessions,
SSE_RESPONSE_HEADERS,
} from '@/lib/copilot/request/session'
import { toStreamBatchEvent } from '@/lib/copilot/request/session/types'
export const maxDuration = 3600
@@ -14,8 +22,59 @@ const logger = createLogger('CopilotChatStreamAPI')
const POLL_INTERVAL_MS = 250
const MAX_STREAM_MS = 60 * 60 * 1000
function encodeEvent(event: Record<string, any>): Uint8Array {
return new TextEncoder().encode(`data: ${JSON.stringify(event)}\n\n`)
function isTerminalStatus(
status: string | null | undefined
): status is MothershipStreamV1CompletionStatus {
return (
status === MothershipStreamV1CompletionStatus.complete ||
status === MothershipStreamV1CompletionStatus.error ||
status === MothershipStreamV1CompletionStatus.cancelled
)
}
function buildResumeTerminalEnvelopes(options: {
streamId: string
afterCursor: string
status: MothershipStreamV1CompletionStatus
message?: string
code: string
reason?: string
}) {
const baseSeq = Number(options.afterCursor || '0')
const seq = Number.isFinite(baseSeq) ? baseSeq : 0
const envelopes: ReturnType<typeof createEvent>[] = []
if (options.status === MothershipStreamV1CompletionStatus.error) {
envelopes.push(
createEvent({
streamId: options.streamId,
cursor: String(seq + 1),
seq: seq + 1,
requestId: '',
type: MothershipStreamV1EventType.error,
payload: {
message: options.message || 'Stream recovery failed before completion.',
code: options.code,
},
})
)
}
envelopes.push(
createEvent({
streamId: options.streamId,
cursor: String(seq + envelopes.length + 1),
seq: seq + envelopes.length + 1,
requestId: '',
type: MothershipStreamV1EventType.complete,
payload: {
status: options.status,
...(options.reason ? { reason: options.reason } : {}),
},
})
)
return envelopes
}
export async function GET(request: NextRequest) {
@@ -28,58 +87,56 @@ export async function GET(request: NextRequest) {
const url = new URL(request.url)
const streamId = url.searchParams.get('streamId') || ''
const fromParam = url.searchParams.get('from') || '0'
const fromEventId = Number(fromParam || 0)
// If batch=true, return buffered events as JSON instead of SSE
const afterCursor = url.searchParams.get('after') || ''
const batchMode = url.searchParams.get('batch') === 'true'
const toParam = url.searchParams.get('to')
const toEventId = toParam ? Number(toParam) : undefined
const reqLogger = logger.withMetadata({ messageId: streamId || undefined })
reqLogger.info('[Resume] Received resume request', {
streamId: streamId || undefined,
fromEventId,
toEventId,
batchMode,
})
if (!streamId) {
return NextResponse.json({ error: 'streamId is required' }, { status: 400 })
}
const meta = (await getStreamMeta(streamId)) as StreamMeta | null
reqLogger.info('[Resume] Stream lookup', {
streamId,
fromEventId,
toEventId,
batchMode,
hasMeta: !!meta,
metaStatus: meta?.status,
const run = await getLatestRunForStream(streamId, authenticatedUserId).catch((err) => {
logger.warn('Failed to fetch latest run for stream', {
streamId,
error: err instanceof Error ? err.message : String(err),
})
return null
})
if (!meta) {
logger.info('[Resume] Stream lookup', {
streamId,
afterCursor,
batchMode,
hasRun: !!run,
runStatus: run?.status,
})
if (!run) {
return NextResponse.json({ error: 'Stream not found' }, { status: 404 })
}
if (meta.userId && meta.userId !== authenticatedUserId) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 403 })
}
// Batch mode: return all buffered events as JSON
if (batchMode) {
const events = await readStreamEvents(streamId, fromEventId)
const filteredEvents = toEventId ? events.filter((e) => e.eventId <= toEventId) : events
reqLogger.info('[Resume] Batch response', {
const afterSeq = afterCursor || '0'
const [events, previewSessions] = await Promise.all([
readEvents(streamId, afterSeq),
readFilePreviewSessions(streamId).catch((error) => {
logger.warn('Failed to read preview sessions for stream batch', {
streamId,
error: error instanceof Error ? error.message : String(error),
})
return []
}),
])
const batchEvents = events.map(toStreamBatchEvent)
logger.info('[Resume] Batch response', {
streamId,
fromEventId,
toEventId,
eventCount: filteredEvents.length,
afterCursor: afterSeq,
eventCount: batchEvents.length,
previewSessionCount: previewSessions.length,
runStatus: run.status,
})
return NextResponse.json({
success: true,
events: filteredEvents,
status: meta.status,
executionId: meta.executionId,
runId: meta.runId,
events: batchEvents,
previewSessions,
status: run.status,
})
}
@@ -87,9 +144,9 @@ export async function GET(request: NextRequest) {
const stream = new ReadableStream({
async start(controller) {
let lastEventId = Number.isFinite(fromEventId) ? fromEventId : 0
let latestMeta = meta
let cursor = afterCursor || '0'
let controllerClosed = false
let sawTerminalEvent = false
const closeController = () => {
if (controllerClosed) return
@@ -97,14 +154,14 @@ export async function GET(request: NextRequest) {
try {
controller.close()
} catch {
// Controller already closed by runtime/client - treat as normal.
// Controller already closed by runtime/client
}
}
const enqueueEvent = (payload: Record<string, any>) => {
const enqueueEvent = (payload: unknown) => {
if (controllerClosed) return false
try {
controller.enqueue(encodeEvent(payload))
controller.enqueue(encodeSSEEnvelope(payload))
return true
} catch {
controllerClosed = true
@@ -118,47 +175,96 @@ export async function GET(request: NextRequest) {
request.signal.addEventListener('abort', abortListener, { once: true })
const flushEvents = async () => {
const events = await readStreamEvents(streamId, lastEventId)
const events = await readEvents(streamId, cursor)
if (events.length > 0) {
reqLogger.info('[Resume] Flushing events', {
logger.info('[Resume] Flushing events', {
streamId,
fromEventId: lastEventId,
afterCursor: cursor,
eventCount: events.length,
})
}
for (const entry of events) {
lastEventId = entry.eventId
const payload = {
...entry.event,
eventId: entry.eventId,
streamId: entry.streamId,
executionId: latestMeta?.executionId,
runId: latestMeta?.runId,
for (const envelope of events) {
cursor = envelope.stream.cursor ?? String(envelope.seq)
if (envelope.type === MothershipStreamV1EventType.complete) {
sawTerminalEvent = true
}
if (!enqueueEvent(payload)) {
if (!enqueueEvent(envelope)) {
break
}
}
}
const emitTerminalIfMissing = (
status: MothershipStreamV1CompletionStatus,
options?: { message?: string; code: string; reason?: string }
) => {
if (controllerClosed || sawTerminalEvent) {
return
}
for (const envelope of buildResumeTerminalEnvelopes({
streamId,
afterCursor: cursor,
status,
message: options?.message,
code: options?.code ?? 'resume_terminal',
reason: options?.reason,
})) {
cursor = envelope.stream.cursor ?? String(envelope.seq)
if (envelope.type === MothershipStreamV1EventType.complete) {
sawTerminalEvent = true
}
if (!enqueueEvent(envelope)) {
break
}
}
}
try {
const gap = await checkForReplayGap(streamId, afterCursor)
if (gap) {
for (const envelope of gap.envelopes) {
enqueueEvent(envelope)
}
return
}
await flushEvents()
while (!controllerClosed && Date.now() - startTime < MAX_STREAM_MS) {
const currentMeta = await getStreamMeta(streamId)
if (!currentMeta) break
latestMeta = currentMeta
const currentRun = await getLatestRunForStream(streamId, authenticatedUserId).catch(
(err) => {
logger.warn('Failed to poll latest run for stream', {
streamId,
error: err instanceof Error ? err.message : String(err),
})
return null
}
)
if (!currentRun) {
emitTerminalIfMissing(MothershipStreamV1CompletionStatus.error, {
message: 'The stream could not be recovered because its run metadata is unavailable.',
code: 'resume_run_unavailable',
reason: 'run_unavailable',
})
break
}
await flushEvents()
if (controllerClosed) {
break
}
if (
currentMeta.status === 'complete' ||
currentMeta.status === 'error' ||
currentMeta.status === 'cancelled'
) {
if (isTerminalStatus(currentRun.status)) {
emitTerminalIfMissing(currentRun.status, {
message:
currentRun.status === MothershipStreamV1CompletionStatus.error
? typeof currentRun.error === 'string'
? currentRun.error
: 'The recovered stream ended with an error.'
: undefined,
code: 'resume_terminal_status',
reason: 'terminal_status',
})
break
}
@@ -169,12 +275,24 @@ export async function GET(request: NextRequest) {
await new Promise((resolve) => setTimeout(resolve, POLL_INTERVAL_MS))
}
if (!controllerClosed && Date.now() - startTime >= MAX_STREAM_MS) {
emitTerminalIfMissing(MothershipStreamV1CompletionStatus.error, {
message: 'The stream recovery timed out before completion.',
code: 'resume_timeout',
reason: 'timeout',
})
}
} catch (error) {
if (!controllerClosed && !request.signal.aborted) {
reqLogger.warn('Stream replay failed', {
logger.warn('Stream replay failed', {
streamId,
error: error instanceof Error ? error.message : String(error),
})
emitTerminalIfMissing(MothershipStreamV1CompletionStatus.error, {
message: 'The stream replay failed before completion.',
code: 'resume_internal',
reason: 'stream_replay_failed',
})
}
} finally {
request.signal.removeEventListener('abort', abortListener)
@@ -183,5 +301,5 @@ export async function GET(request: NextRequest) {
},
})
return new Response(stream, { headers: SSE_HEADERS })
return new Response(stream, { headers: SSE_RESPONSE_HEADERS })
}

View File

@@ -327,7 +327,35 @@ describe('Copilot Chat Update Messages API Route', () => {
})
expect(mockSet).toHaveBeenCalledWith({
messages,
messages: [
{
id: 'msg-1',
role: 'user',
content: 'Hello',
timestamp: '2024-01-01T10:00:00.000Z',
},
{
id: 'msg-2',
role: 'assistant',
content: 'Hi there!',
timestamp: '2024-01-01T10:01:00.000Z',
contentBlocks: [
{
type: 'text',
content: 'Here is the weather information',
},
{
type: 'tool',
phase: 'call',
toolCall: {
id: 'tool-1',
name: 'get_weather',
state: 'pending',
},
},
],
},
],
updatedAt: expect.any(Date),
})
})

View File

@@ -4,15 +4,16 @@ import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getAccessibleCopilotChat } from '@/lib/copilot/chat-lifecycle'
import { COPILOT_MODES } from '@/lib/copilot/models'
import { getAccessibleCopilotChat } from '@/lib/copilot/chat/lifecycle'
import { normalizeMessage, type PersistedMessage } from '@/lib/copilot/chat/persisted-message'
import { COPILOT_MODES } from '@/lib/copilot/constants'
import {
authenticateCopilotRequestSessionOnly,
createInternalServerErrorResponse,
createNotFoundResponse,
createRequestTracker,
createUnauthorizedResponse,
} from '@/lib/copilot/request-helpers'
} from '@/lib/copilot/request/http'
const logger = createLogger('CopilotChatUpdateAPI')
@@ -78,12 +79,15 @@ export async function POST(req: NextRequest) {
}
const { chatId, messages, planArtifact, config } = UpdateMessagesSchema.parse(body)
const normalizedMessages: PersistedMessage[] = messages.map((message) =>
normalizeMessage(message as Record<string, unknown>)
)
// Debug: Log what we're about to save
const lastMsgParsed = messages[messages.length - 1]
const lastMsgParsed = normalizedMessages[normalizedMessages.length - 1]
if (lastMsgParsed?.role === 'assistant') {
logger.info(`[${tracker.requestId}] Parsed messages to save`, {
messageCount: messages.length,
messageCount: normalizedMessages.length,
lastMsgId: lastMsgParsed.id,
lastMsgContentLength: lastMsgParsed.content?.length || 0,
lastMsgContentBlockCount: lastMsgParsed.contentBlocks?.length || 0,
@@ -99,8 +103,8 @@ export async function POST(req: NextRequest) {
}
// Update chat with new messages, plan artifact, and config
const updateData: Record<string, any> = {
messages: messages,
const updateData: Record<string, unknown> = {
messages: normalizedMessages,
updatedAt: new Date(),
}
@@ -116,14 +120,14 @@ export async function POST(req: NextRequest) {
logger.info(`[${tracker.requestId}] Successfully updated chat`, {
chatId,
newMessageCount: messages.length,
newMessageCount: normalizedMessages.length,
hasPlanArtifact: !!planArtifact,
hasConfig: !!config,
})
return NextResponse.json({
success: true,
messageCount: messages.length,
messageCount: normalizedMessages.length,
})
} catch (error) {
logger.error(`[${tracker.requestId}] Error updating chat messages:`, error)

View File

@@ -66,7 +66,7 @@ vi.mock('drizzle-orm', () => ({
sql: vi.fn(),
}))
vi.mock('@/lib/copilot/request-helpers', () => ({
vi.mock('@/lib/copilot/request/http', () => ({
authenticateCopilotRequestSessionOnly: mockAuthenticate,
createUnauthorizedResponse: mockCreateUnauthorizedResponse,
createInternalServerErrorResponse: mockCreateInternalServerErrorResponse,

View File

@@ -4,14 +4,14 @@ import { createLogger } from '@sim/logger'
import { and, desc, eq, isNull, or, sql } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { resolveOrCreateChat } from '@/lib/copilot/chat-lifecycle'
import { resolveOrCreateChat } from '@/lib/copilot/chat/lifecycle'
import {
authenticateCopilotRequestSessionOnly,
createBadRequestResponse,
createInternalServerErrorResponse,
createUnauthorizedResponse,
} from '@/lib/copilot/request-helpers'
import { taskPubSub } from '@/lib/copilot/task-events'
} from '@/lib/copilot/request/http'
import { taskPubSub } from '@/lib/copilot/tasks'
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
import { assertActiveWorkspaceAccess } from '@/lib/workspaces/permissions/utils'
@@ -37,7 +37,7 @@ export async function GET(_request: NextRequest) {
title: copilotChats.title,
workflowId: copilotChats.workflowId,
workspaceId: copilotChats.workspaceId,
conversationId: copilotChats.conversationId,
activeStreamId: copilotChats.conversationId,
updatedAt: copilotChats.updatedAt,
})
.from(copilotChats)

View File

@@ -43,7 +43,7 @@ vi.mock('@/lib/workflows/utils', () => ({
authorizeWorkflowByWorkspacePermission: mockAuthorize,
}))
vi.mock('@/lib/copilot/chat-lifecycle', () => ({
vi.mock('@/lib/copilot/chat/lifecycle', () => ({
getAccessibleCopilotChat: mockGetAccessibleCopilotChat,
}))

View File

@@ -4,14 +4,14 @@ import { createLogger } from '@sim/logger'
import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getAccessibleCopilotChat } from '@/lib/copilot/chat-lifecycle'
import { getAccessibleCopilotChat } from '@/lib/copilot/chat/lifecycle'
import {
authenticateCopilotRequestSessionOnly,
createInternalServerErrorResponse,
createNotFoundResponse,
createRequestTracker,
createUnauthorizedResponse,
} from '@/lib/copilot/request-helpers'
} from '@/lib/copilot/request/http'
import { getInternalApiBaseUrl } from '@/lib/core/utils/urls'
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
import { isUuidV4 } from '@/executor/constants'

View File

@@ -62,7 +62,7 @@ vi.mock('drizzle-orm', () => ({
desc: vi.fn((field: unknown) => ({ field, type: 'desc' })),
}))
vi.mock('@/lib/copilot/chat-lifecycle', () => ({
vi.mock('@/lib/copilot/chat/lifecycle', () => ({
getAccessibleCopilotChat: mockGetAccessibleCopilotChat,
}))

View File

@@ -4,14 +4,14 @@ import { createLogger } from '@sim/logger'
import { and, desc, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getAccessibleCopilotChat } from '@/lib/copilot/chat-lifecycle'
import { getAccessibleCopilotChat } from '@/lib/copilot/chat/lifecycle'
import {
authenticateCopilotRequestSessionOnly,
createBadRequestResponse,
createInternalServerErrorResponse,
createRequestTracker,
createUnauthorizedResponse,
} from '@/lib/copilot/request-helpers'
} from '@/lib/copilot/request/http'
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
const logger = createLogger('WorkflowCheckpointsAPI')

View File

@@ -38,7 +38,7 @@ const {
publishToolConfirmation: vi.fn(),
}))
vi.mock('@/lib/copilot/request-helpers', () => ({
vi.mock('@/lib/copilot/request/http', () => ({
authenticateCopilotRequestSessionOnly,
createBadRequestResponse,
createInternalServerErrorResponse,
@@ -54,7 +54,7 @@ vi.mock('@/lib/copilot/async-runs/repository', () => ({
completeAsyncToolCall,
}))
vi.mock('@/lib/copilot/orchestrator/persistence', () => ({
vi.mock('@/lib/copilot/persistence/tool-confirm', () => ({
publishToolConfirmation,
}))

View File

@@ -1,13 +1,14 @@
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { ASYNC_TOOL_STATUS } from '@/lib/copilot/async-runs/lifecycle'
import {
completeAsyncToolCall,
getAsyncToolCall,
getRunSegment,
upsertAsyncToolCall,
} from '@/lib/copilot/async-runs/repository'
import { publishToolConfirmation } from '@/lib/copilot/orchestrator/persistence'
import { publishToolConfirmation } from '@/lib/copilot/persistence/tool-confirm'
import {
authenticateCopilotRequestSessionOnly,
createBadRequestResponse,
@@ -16,7 +17,7 @@ import {
createRequestTracker,
createUnauthorizedResponse,
type NotificationStatus,
} from '@/lib/copilot/request-helpers'
} from '@/lib/copilot/request/http'
const logger = createLogger('CopilotConfirmAPI')
@@ -42,17 +43,17 @@ async function updateToolCallStatus(
const toolCallId = existing.toolCallId
const durableStatus =
status === 'success'
? 'completed'
? ASYNC_TOOL_STATUS.completed
: status === 'cancelled'
? 'cancelled'
? ASYNC_TOOL_STATUS.cancelled
: status === 'error' || status === 'rejected'
? 'failed'
: 'pending'
? ASYNC_TOOL_STATUS.failed
: ASYNC_TOOL_STATUS.pending
try {
if (
durableStatus === 'completed' ||
durableStatus === 'failed' ||
durableStatus === 'cancelled'
durableStatus === ASYNC_TOOL_STATUS.completed ||
durableStatus === ASYNC_TOOL_STATUS.failed ||
durableStatus === ASYNC_TOOL_STATUS.cancelled
) {
await completeAsyncToolCall({
toolCallId,
@@ -107,13 +108,25 @@ export async function POST(req: NextRequest) {
const body = await req.json()
const { toolCallId, status, message, data } = ConfirmationSchema.parse(body)
const existing = await getAsyncToolCall(toolCallId).catch(() => null)
const existing = await getAsyncToolCall(toolCallId).catch((err) => {
logger.warn('Failed to fetch async tool call', {
toolCallId,
error: err instanceof Error ? err.message : String(err),
})
return null
})
if (!existing) {
return createNotFoundResponse('Tool call not found')
}
const run = await getRunSegment(existing.runId).catch(() => null)
const run = await getRunSegment(existing.runId).catch((err) => {
logger.warn('Failed to fetch run segment', {
runId: existing.runId,
error: err instanceof Error ? err.message : String(err),
})
return null
})
if (!run) {
return createNotFoundResponse('Tool call run not found')
}

View File

@@ -1,5 +1,5 @@
import { type NextRequest, NextResponse } from 'next/server'
import { authenticateCopilotRequestSessionOnly } from '@/lib/copilot/request-helpers'
import { authenticateCopilotRequestSessionOnly } from '@/lib/copilot/request/http'
import { routeExecution } from '@/lib/copilot/tools/server/router'
/**

View File

@@ -57,7 +57,7 @@ vi.mock('drizzle-orm', () => ({
eq: vi.fn((field: unknown, value: unknown) => ({ field, value, type: 'eq' })),
}))
vi.mock('@/lib/copilot/request-helpers', () => ({
vi.mock('@/lib/copilot/request/http', () => ({
authenticateCopilotRequestSessionOnly: mockAuthenticate,
createUnauthorizedResponse: mockCreateUnauthorizedResponse,
createBadRequestResponse: mockCreateBadRequestResponse,

View File

@@ -10,7 +10,7 @@ import {
createInternalServerErrorResponse,
createRequestTracker,
createUnauthorizedResponse,
} from '@/lib/copilot/request-helpers'
} from '@/lib/copilot/request/http'
import { captureServerEvent } from '@/lib/posthog/server'
const logger = createLogger('CopilotFeedbackAPI')

View File

@@ -1,8 +1,14 @@
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { SIM_AGENT_API_URL } from '@/lib/copilot/constants'
import { authenticateCopilotRequestSessionOnly } from '@/lib/copilot/request-helpers'
import type { AvailableModel } from '@/lib/copilot/types'
import { authenticateCopilotRequestSessionOnly } from '@/lib/copilot/request/http'
interface AvailableModel {
id: string
friendlyName: string
provider: string
}
import { env } from '@/lib/core/config/env'
const logger = createLogger('CopilotModelsAPI')

View File

@@ -23,7 +23,7 @@ const {
mockFetch: vi.fn(),
}))
vi.mock('@/lib/copilot/request-helpers', () => ({
vi.mock('@/lib/copilot/request/http', () => ({
authenticateCopilotRequestSessionOnly: mockAuthenticateCopilotRequestSessionOnly,
createUnauthorizedResponse: mockCreateUnauthorizedResponse,
createBadRequestResponse: mockCreateBadRequestResponse,

View File

@@ -7,7 +7,7 @@ import {
createInternalServerErrorResponse,
createRequestTracker,
createUnauthorizedResponse,
} from '@/lib/copilot/request-helpers'
} from '@/lib/copilot/request/http'
import { env } from '@/lib/core/config/env'
const BodySchema = z.object({

View File

@@ -4,7 +4,7 @@ import { z } from 'zod'
import {
authenticateCopilotRequestSessionOnly,
createUnauthorizedResponse,
} from '@/lib/copilot/request-helpers'
} from '@/lib/copilot/request/http'
import { env } from '@/lib/core/config/env'
const logger = createLogger('CopilotTrainingExamplesAPI')

View File

@@ -4,7 +4,7 @@ import { z } from 'zod'
import {
authenticateCopilotRequestSessionOnly,
createUnauthorizedResponse,
} from '@/lib/copilot/request-helpers'
} from '@/lib/copilot/request/http'
import { env } from '@/lib/core/config/env'
const logger = createLogger('CopilotTrainingAPI')

View File

@@ -75,6 +75,16 @@ vi.mock('@/lib/uploads/utils/file-utils', () => ({
vi.mock('@/lib/uploads/setup.server', () => ({}))
vi.mock('@/lib/execution/doc-vm', () => ({
generatePdfFromCode: vi.fn().mockResolvedValue(Buffer.from('%PDF-compiled')),
generateDocxFromCode: vi.fn().mockResolvedValue(Buffer.from('PK\x03\x04compiled')),
generatePptxFromCode: vi.fn().mockResolvedValue(Buffer.from('PK\x03\x04compiled')),
}))
vi.mock('@/lib/uploads/contexts/workspace/workspace-file-manager', () => ({
parseWorkspaceFileKey: vi.fn().mockReturnValue(undefined),
}))
vi.mock('@/app/api/files/utils', () => ({
FileNotFoundError,
createFileResponse: mockCreateFileResponse,

View File

@@ -4,7 +4,11 @@ import { createLogger } from '@sim/logger'
import type { NextRequest } from 'next/server'
import { NextResponse } from 'next/server'
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
import { generatePptxFromCode } from '@/lib/execution/pptx-vm'
import {
generateDocxFromCode,
generatePdfFromCode,
generatePptxFromCode,
} from '@/lib/execution/doc-vm'
import { CopilotFiles, isUsingCloudStorage } from '@/lib/uploads'
import type { StorageContext } from '@/lib/uploads/config'
import { parseWorkspaceFileKey } from '@/lib/uploads/contexts/workspace/workspace-file-manager'
@@ -22,47 +26,73 @@ import {
const logger = createLogger('FilesServeAPI')
const ZIP_MAGIC = Buffer.from([0x50, 0x4b, 0x03, 0x04])
const PDF_MAGIC = Buffer.from([0x25, 0x50, 0x44, 0x46, 0x2d]) // %PDF-
const MAX_COMPILED_PPTX_CACHE = 10
const compiledPptxCache = new Map<string, Buffer>()
function compiledCacheSet(key: string, buffer: Buffer): void {
if (compiledPptxCache.size >= MAX_COMPILED_PPTX_CACHE) {
compiledPptxCache.delete(compiledPptxCache.keys().next().value as string)
}
compiledPptxCache.set(key, buffer)
interface CompilableFormat {
magic: Buffer
compile: (code: string, workspaceId: string) => Promise<Buffer>
contentType: string
}
async function compilePptxIfNeeded(
const COMPILABLE_FORMATS: Record<string, CompilableFormat> = {
'.pptx': {
magic: ZIP_MAGIC,
compile: generatePptxFromCode,
contentType: 'application/vnd.openxmlformats-officedocument.presentationml.presentation',
},
'.docx': {
magic: ZIP_MAGIC,
compile: generateDocxFromCode,
contentType: 'application/vnd.openxmlformats-officedocument.wordprocessingml.document',
},
'.pdf': {
magic: PDF_MAGIC,
compile: generatePdfFromCode,
contentType: 'application/pdf',
},
}
const MAX_COMPILED_DOC_CACHE = 10
const compiledDocCache = new Map<string, Buffer>()
function compiledCacheSet(key: string, buffer: Buffer): void {
if (compiledDocCache.size >= MAX_COMPILED_DOC_CACHE) {
compiledDocCache.delete(compiledDocCache.keys().next().value as string)
}
compiledDocCache.set(key, buffer)
}
async function compileDocumentIfNeeded(
buffer: Buffer,
filename: string,
workspaceId?: string,
raw?: boolean
): Promise<{ buffer: Buffer; contentType: string }> {
const isPptx = filename.toLowerCase().endsWith('.pptx')
if (raw || !isPptx || buffer.subarray(0, 4).equals(ZIP_MAGIC)) {
if (raw) return { buffer, contentType: getContentType(filename) }
const ext = filename.slice(filename.lastIndexOf('.')).toLowerCase()
const format = COMPILABLE_FORMATS[ext]
if (!format) return { buffer, contentType: getContentType(filename) }
const magicLen = format.magic.length
if (buffer.length >= magicLen && buffer.subarray(0, magicLen).equals(format.magic)) {
return { buffer, contentType: getContentType(filename) }
}
const code = buffer.toString('utf-8')
const cacheKey = createHash('sha256')
.update(ext)
.update(code)
.update(workspaceId ?? '')
.digest('hex')
const cached = compiledPptxCache.get(cacheKey)
const cached = compiledDocCache.get(cacheKey)
if (cached) {
return {
buffer: cached,
contentType: 'application/vnd.openxmlformats-officedocument.presentationml.presentation',
}
return { buffer: cached, contentType: format.contentType }
}
const compiled = await generatePptxFromCode(code, workspaceId || '')
const compiled = await format.compile(code, workspaceId || '')
compiledCacheSet(cacheKey, compiled)
return {
buffer: compiled,
contentType: 'application/vnd.openxmlformats-officedocument.presentationml.presentation',
}
return { buffer: compiled, contentType: format.contentType }
}
const STORAGE_KEY_PREFIX_RE = /^\d{13}-[a-z0-9]{7}-/
@@ -169,7 +199,7 @@ async function handleLocalFile(
const segment = filename.split('/').pop() || filename
const displayName = stripStorageKeyPrefix(segment)
const workspaceId = getWorkspaceIdForCompile(filename)
const { buffer: fileBuffer, contentType } = await compilePptxIfNeeded(
const { buffer: fileBuffer, contentType } = await compileDocumentIfNeeded(
rawBuffer,
displayName,
workspaceId,
@@ -226,7 +256,7 @@ async function handleCloudProxy(
const segment = cloudKey.split('/').pop() || 'download'
const displayName = stripStorageKeyPrefix(segment)
const workspaceId = getWorkspaceIdForCompile(cloudKey)
const { buffer: fileBuffer, contentType } = await compilePptxIfNeeded(
const { buffer: fileBuffer, contentType } = await compileDocumentIfNeeded(
rawBuffer,
displayName,
workspaceId,

View File

@@ -24,6 +24,27 @@ vi.mock('@/lib/auth/hybrid', () => ({
vi.mock('@/lib/execution/e2b', () => ({
executeInE2B: mockExecuteInE2B,
executeShellInE2B: vi.fn(),
}))
vi.mock('@/lib/copilot/request/tools/files', () => ({
FORMAT_TO_CONTENT_TYPE: {
json: 'application/json',
csv: 'text/csv',
txt: 'text/plain',
md: 'text/markdown',
html: 'text/html',
},
normalizeOutputWorkspaceFileName: vi.fn((p: string) => p.replace(/^files\//, '')),
resolveOutputFormat: vi.fn(() => 'json'),
}))
vi.mock('@/lib/uploads/contexts/workspace/workspace-file-manager', () => ({
uploadWorkspaceFile: vi.fn(),
}))
vi.mock('@/lib/workflows/utils', () => ({
getWorkflowById: vi.fn(),
}))
vi.mock('@/lib/core/config/feature-flags', () => ({
@@ -32,6 +53,7 @@ vi.mock('@/lib/core/config/feature-flags', () => ({
isProd: false,
isDev: false,
isTest: true,
isEmailVerificationEnabled: false,
}))
import { validateProxyUrl } from '@/lib/core/security/input-validation'

View File

@@ -1,11 +1,18 @@
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { checkInternalAuth } from '@/lib/auth/hybrid'
import {
FORMAT_TO_CONTENT_TYPE,
normalizeOutputWorkspaceFileName,
resolveOutputFormat,
} from '@/lib/copilot/request/tools/files'
import { isE2bEnabled } from '@/lib/core/config/feature-flags'
import { generateRequestId } from '@/lib/core/utils/request'
import { executeInE2B } from '@/lib/execution/e2b'
import { executeInE2B, executeShellInE2B } from '@/lib/execution/e2b'
import { executeInIsolatedVM } from '@/lib/execution/isolated-vm'
import { CodeLanguage, DEFAULT_CODE_LANGUAGE, isValidCodeLanguage } from '@/lib/execution/languages'
import { uploadWorkspaceFile } from '@/lib/uploads/contexts/workspace/workspace-file-manager'
import { getWorkflowById } from '@/lib/workflows/utils'
import { escapeRegExp, normalizeName, REFERENCE } from '@/executor/constants'
import { type OutputSchema, resolveBlockReference } from '@/executor/utils/block-reference'
import { formatLiteralForCode } from '@/executor/utils/code-formatting'
@@ -580,6 +587,107 @@ function cleanStdout(stdout: string): string {
return stdout
}
async function maybeExportSandboxFileToWorkspace(args: {
authUserId: string
workflowId?: string
workspaceId?: string
outputPath?: string
outputFormat?: string
outputMimeType?: string
outputSandboxPath?: string
exportedFileContent?: string
stdout: string
executionTime: number
}) {
const {
authUserId,
workflowId,
workspaceId,
outputPath,
outputFormat,
outputMimeType,
outputSandboxPath,
exportedFileContent,
stdout,
executionTime,
} = args
if (!outputSandboxPath) return null
if (!outputPath) {
return NextResponse.json(
{
success: false,
error:
'outputSandboxPath requires outputPath. Set outputPath to the destination workspace file, e.g. "files/result.csv".',
output: { result: null, stdout: cleanStdout(stdout), executionTime },
},
{ status: 400 }
)
}
const resolvedWorkspaceId =
workspaceId || (workflowId ? (await getWorkflowById(workflowId))?.workspaceId : undefined)
if (!resolvedWorkspaceId) {
return NextResponse.json(
{
success: false,
error: 'Workspace context required to save sandbox file to workspace',
output: { result: null, stdout: cleanStdout(stdout), executionTime },
},
{ status: 400 }
)
}
if (exportedFileContent === undefined) {
return NextResponse.json(
{
success: false,
error: `Sandbox file "${outputSandboxPath}" was not found or could not be read`,
output: { result: null, stdout: cleanStdout(stdout), executionTime },
},
{ status: 500 }
)
}
const fileName = normalizeOutputWorkspaceFileName(outputPath)
const TEXT_MIMES = new Set(Object.values(FORMAT_TO_CONTENT_TYPE))
const resolvedMimeType =
outputMimeType ||
FORMAT_TO_CONTENT_TYPE[resolveOutputFormat(fileName, outputFormat)] ||
'application/octet-stream'
const isBinary = !TEXT_MIMES.has(resolvedMimeType)
const fileBuffer = isBinary
? Buffer.from(exportedFileContent, 'base64')
: Buffer.from(exportedFileContent, 'utf-8')
const uploaded = await uploadWorkspaceFile(
resolvedWorkspaceId,
authUserId,
fileBuffer,
fileName,
resolvedMimeType
)
return NextResponse.json({
success: true,
output: {
result: {
message: `Sandbox file exported to files/${fileName}`,
fileId: uploaded.id,
fileName,
downloadUrl: uploaded.url,
sandboxPath: outputSandboxPath,
},
stdout: cleanStdout(stdout),
executionTime,
},
resources: [{ type: 'file', id: uploaded.id, title: fileName }],
})
}
export async function POST(req: NextRequest) {
const requestId = generateRequestId()
const startTime = Date.now()
@@ -603,12 +711,17 @@ export async function POST(req: NextRequest) {
params = {},
timeout = DEFAULT_EXECUTION_TIMEOUT_MS,
language = DEFAULT_CODE_LANGUAGE,
outputPath,
outputFormat,
outputMimeType,
outputSandboxPath,
envVars = {},
blockData = {},
blockNameMapping = {},
blockOutputSchemas = {},
workflowVariables = {},
workflowId,
workspaceId,
isCustomTool = false,
_sandboxFiles,
} = body
@@ -626,18 +739,25 @@ export async function POST(req: NextRequest) {
const lang = isValidCodeLanguage(language) ? language : DEFAULT_CODE_LANGUAGE
const codeResolution = resolveCodeVariables(
code,
executionParams,
envVars,
blockData,
blockNameMapping,
blockOutputSchemas,
workflowVariables,
lang
)
resolvedCode = codeResolution.resolvedCode
const contextVariables = codeResolution.contextVariables
let contextVariables: Record<string, unknown> = {}
if (lang === CodeLanguage.Shell) {
// For shell, env vars are injected as OS env vars via shellEnvs.
// Replace {{VAR}} placeholders with $VAR so the shell can access them natively.
resolvedCode = code.replace(/\{\{([A-Za-z_][A-Za-z0-9_]*)\}\}/g, '$$$1')
} else {
const codeResolution = resolveCodeVariables(
code,
executionParams,
envVars,
blockData,
blockNameMapping,
blockOutputSchemas,
workflowVariables,
lang
)
resolvedCode = codeResolution.resolvedCode
contextVariables = codeResolution.contextVariables
}
let jsImports = ''
let jsRemainingCode = resolvedCode
@@ -652,6 +772,83 @@ export async function POST(req: NextRequest) {
hasImports = jsImports.trim().length > 0 || hasRequireStatements
}
if (lang === CodeLanguage.Shell) {
if (!isE2bEnabled) {
throw new Error(
'Shell execution requires E2B to be enabled. Please contact your administrator to enable E2B.'
)
}
const shellEnvs: Record<string, string> = {}
for (const [k, v] of Object.entries(envVars)) {
shellEnvs[k] = String(v)
}
for (const [k, v] of Object.entries(contextVariables)) {
shellEnvs[k] = String(v)
}
logger.info(`[${requestId}] E2B shell execution`, {
enabled: isE2bEnabled,
hasApiKey: Boolean(process.env.E2B_API_KEY),
envVarCount: Object.keys(shellEnvs).length,
})
const execStart = Date.now()
const {
result: shellResult,
stdout: shellStdout,
sandboxId,
error: shellError,
exportedFileContent,
} = await executeShellInE2B({
code: resolvedCode,
envs: shellEnvs,
timeoutMs: timeout,
sandboxFiles: _sandboxFiles,
outputSandboxPath,
})
const executionTime = Date.now() - execStart
logger.info(`[${requestId}] E2B shell sandbox`, {
sandboxId,
stdoutPreview: shellStdout?.slice(0, 200),
error: shellError,
executionTime,
})
if (shellError) {
return NextResponse.json(
{
success: false,
error: shellError,
output: { result: null, stdout: cleanStdout(shellStdout), executionTime },
},
{ status: 500 }
)
}
if (outputSandboxPath) {
const fileExportResponse = await maybeExportSandboxFileToWorkspace({
authUserId: auth.userId,
workflowId,
workspaceId,
outputPath,
outputFormat,
outputMimeType,
outputSandboxPath,
exportedFileContent,
stdout: shellStdout,
executionTime,
})
if (fileExportResponse) return fileExportResponse
}
return NextResponse.json({
success: true,
output: { result: shellResult ?? null, stdout: cleanStdout(shellStdout), executionTime },
})
}
if (lang === CodeLanguage.Python && !isE2bEnabled) {
throw new Error(
'Python execution requires E2B to be enabled. Please contact your administrator to enable E2B, or use JavaScript instead.'
@@ -719,11 +916,13 @@ export async function POST(req: NextRequest) {
stdout: e2bStdout,
sandboxId,
error: e2bError,
exportedFileContent,
} = await executeInE2B({
code: codeForE2B,
language: CodeLanguage.JavaScript,
timeoutMs: timeout,
sandboxFiles: _sandboxFiles,
outputSandboxPath,
})
const executionTime = Date.now() - execStart
stdout += e2bStdout
@@ -752,6 +951,22 @@ export async function POST(req: NextRequest) {
)
}
if (outputSandboxPath) {
const fileExportResponse = await maybeExportSandboxFileToWorkspace({
authUserId: auth.userId,
workflowId,
workspaceId,
outputPath,
outputFormat,
outputMimeType,
outputSandboxPath,
exportedFileContent,
stdout,
executionTime,
})
if (fileExportResponse) return fileExportResponse
}
return NextResponse.json({
success: true,
output: { result: e2bResult ?? null, stdout: cleanStdout(stdout), executionTime },
@@ -783,11 +998,13 @@ export async function POST(req: NextRequest) {
stdout: e2bStdout,
sandboxId,
error: e2bError,
exportedFileContent,
} = await executeInE2B({
code: codeForE2B,
language: CodeLanguage.Python,
timeoutMs: timeout,
sandboxFiles: _sandboxFiles,
outputSandboxPath,
})
const executionTime = Date.now() - execStart
stdout += e2bStdout
@@ -816,6 +1033,22 @@ export async function POST(req: NextRequest) {
)
}
if (outputSandboxPath) {
const fileExportResponse = await maybeExportSandboxFileToWorkspace({
authUserId: auth.userId,
workflowId,
workspaceId,
outputPath,
outputFormat,
outputMimeType,
outputSandboxPath,
exportedFileContent,
stdout,
executionTime,
})
if (fileExportResponse) return fileExportResponse
}
return NextResponse.json({
success: true,
output: { result: e2bResult ?? null, stdout: cleanStdout(stdout), executionTime },

View File

@@ -6,16 +6,16 @@ import { beforeEach, describe, expect, it, vi } from 'vitest'
const {
mockCheckHybridAuth,
mockGetDispatchJobRecord,
mockGetJobQueue,
mockVerifyWorkflowAccess,
mockGetWorkflowById,
mockGetJob,
} = vi.hoisted(() => ({
mockCheckHybridAuth: vi.fn(),
mockGetDispatchJobRecord: vi.fn(),
mockGetJobQueue: vi.fn(),
mockVerifyWorkflowAccess: vi.fn(),
mockGetWorkflowById: vi.fn(),
mockGetJob: vi.fn(),
}))
vi.mock('@sim/logger', () => ({
@@ -32,19 +32,9 @@ vi.mock('@/lib/auth/hybrid', () => ({
}))
vi.mock('@/lib/core/async-jobs', () => ({
JOB_STATUS: {
PENDING: 'pending',
PROCESSING: 'processing',
COMPLETED: 'completed',
FAILED: 'failed',
},
getJobQueue: mockGetJobQueue,
}))
vi.mock('@/lib/core/workspace-dispatch/store', () => ({
getDispatchJobRecord: mockGetDispatchJobRecord,
}))
vi.mock('@/lib/core/utils/request', () => ({
generateRequestId: vi.fn().mockReturnValue('request-1'),
}))
@@ -85,71 +75,51 @@ describe('GET /api/jobs/[jobId]', () => {
})
mockGetJobQueue.mockResolvedValue({
getJob: vi.fn().mockResolvedValue(null),
getJob: mockGetJob,
})
})
it('returns dispatcher-aware waiting status with metadata', async () => {
mockGetDispatchJobRecord.mockResolvedValue({
id: 'dispatch-1',
workspaceId: 'workspace-1',
lane: 'runtime',
queueName: 'workflow-execution',
bullmqJobName: 'workflow-execution',
bullmqPayload: {},
it('returns job status with metadata', async () => {
mockGetJob.mockResolvedValue({
id: 'job-1',
status: 'pending',
metadata: {
workflowId: 'workflow-1',
},
priority: 10,
status: 'waiting',
createdAt: 1000,
admittedAt: 2000,
})
const response = await GET(createMockRequest(), {
params: Promise.resolve({ jobId: 'dispatch-1' }),
params: Promise.resolve({ jobId: 'job-1' }),
})
const body = await response.json()
expect(response.status).toBe(200)
expect(body.status).toBe('waiting')
expect(body.metadata.queueName).toBe('workflow-execution')
expect(body.metadata.lane).toBe('runtime')
expect(body.metadata.workspaceId).toBe('workspace-1')
expect(body.status).toBe('pending')
expect(body.metadata.workflowId).toBe('workflow-1')
})
it('returns completed output from dispatch state', async () => {
mockGetDispatchJobRecord.mockResolvedValue({
id: 'dispatch-2',
workspaceId: 'workspace-1',
lane: 'interactive',
queueName: 'workflow-execution',
bullmqJobName: 'direct-workflow-execution',
bullmqPayload: {},
it('returns completed output from job', async () => {
mockGetJob.mockResolvedValue({
id: 'job-2',
status: 'completed',
metadata: {
workflowId: 'workflow-1',
},
priority: 1,
status: 'completed',
createdAt: 1000,
startedAt: 2000,
completedAt: 7000,
output: { success: true },
})
const response = await GET(createMockRequest(), {
params: Promise.resolve({ jobId: 'dispatch-2' }),
params: Promise.resolve({ jobId: 'job-2' }),
})
const body = await response.json()
expect(response.status).toBe(200)
expect(body.status).toBe('completed')
expect(body.output).toEqual({ success: true })
expect(body.metadata.duration).toBe(5000)
})
it('returns 404 when neither dispatch nor BullMQ job exists', async () => {
mockGetDispatchJobRecord.mockResolvedValue(null)
it('returns 404 when job does not exist', async () => {
mockGetJob.mockResolvedValue(null)
const response = await GET(createMockRequest(), {
params: Promise.resolve({ jobId: 'missing-job' }),

View File

@@ -3,8 +3,6 @@ import { type NextRequest, NextResponse } from 'next/server'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { getJobQueue } from '@/lib/core/async-jobs'
import { generateRequestId } from '@/lib/core/utils/request'
import { presentDispatchOrJobStatus } from '@/lib/core/workspace-dispatch/status'
import { getDispatchJobRecord } from '@/lib/core/workspace-dispatch/store'
import { createErrorResponse } from '@/app/api/workflows/utils'
const logger = createLogger('TaskStatusAPI')
@@ -25,15 +23,14 @@ export async function GET(
const authenticatedUserId = authResult.userId
const dispatchJob = await getDispatchJobRecord(taskId)
const jobQueue = await getJobQueue()
const job = dispatchJob ? null : await jobQueue.getJob(taskId)
const job = await jobQueue.getJob(taskId)
if (!job && !dispatchJob) {
if (!job) {
return createErrorResponse('Task not found', 404)
}
const metadataToCheck = dispatchJob?.metadata ?? job?.metadata
const metadataToCheck = job.metadata
if (metadataToCheck?.workflowId) {
const { verifyWorkflowAccess } = await import('@/socket/middleware/permissions')
@@ -61,25 +58,22 @@ export async function GET(
return createErrorResponse('Access denied', 403)
}
const presented = presentDispatchOrJobStatus(dispatchJob, job)
const response: any = {
const response: Record<string, unknown> = {
success: true,
taskId,
status: presented.status,
metadata: presented.metadata,
status: job.status,
metadata: job.metadata,
}
if (presented.output !== undefined) response.output = presented.output
if (presented.error !== undefined) response.error = presented.error
if (presented.estimatedDuration !== undefined) {
response.estimatedDuration = presented.estimatedDuration
}
if (job.output !== undefined) response.output = job.output
if (job.error !== undefined) response.error = job.error
return NextResponse.json(response)
} catch (error: any) {
} catch (error: unknown) {
const errorMessage = error instanceof Error ? error.message : String(error)
logger.error(`[${requestId}] Error fetching task status:`, error)
if (error.message?.includes('not found') || error.status === 404) {
if (errorMessage?.includes('not found')) {
return createErrorResponse('Task not found', 404)
}

View File

@@ -17,14 +17,11 @@ import { eq, sql } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { validateOAuthAccessToken } from '@/lib/auth/oauth-token'
import { getHighestPrioritySubscription } from '@/lib/billing/core/subscription'
import { createRunSegment } from '@/lib/copilot/async-runs/repository'
import { ORCHESTRATION_TIMEOUT_MS, SIM_AGENT_API_URL } from '@/lib/copilot/constants'
import { orchestrateCopilotStream } from '@/lib/copilot/orchestrator'
import { orchestrateSubagentStream } from '@/lib/copilot/orchestrator/subagent'
import {
executeToolServerSide,
prepareExecutionContext,
} from '@/lib/copilot/orchestrator/tool-executor'
import { runCopilotLifecycle } from '@/lib/copilot/request/lifecycle/run'
import { orchestrateSubagentStream } from '@/lib/copilot/request/subagent'
import { ensureHandlersRegistered, executeTool } from '@/lib/copilot/tool-executor'
import { prepareExecutionContext } from '@/lib/copilot/tools/handlers/context'
import { DIRECT_TOOL_DEFS, SUBAGENT_TOOL_DEFS } from '@/lib/copilot/tools/mcp/definitions'
import { env } from '@/lib/core/config/env'
import { RateLimiter } from '@/lib/core/rate-limiter'
@@ -125,12 +122,10 @@ Sim is a workflow automation platform. Workflows are visual pipelines of connect
1. \`list_workspaces\` → know where to work
2. \`create_workflow(name, workspaceId)\` → get a workflowId
3. \`sim_build(request, workflowId)\` → plan and build in one pass
3. \`sim_workflow(request, workflowId)\` → plan and build in one pass
4. \`sim_test(request, workflowId)\` → verify it works
5. \`sim_deploy("deploy as api", workflowId)\` → make it accessible externally (optional)
For fine-grained control, use \`sim_plan\`\`sim_edit\` instead of \`sim_build\`. Pass the plan object from sim_plan EXACTLY as-is to sim_edit's context.plan field.
### Working with Existing Workflows
When the user refers to a workflow by name or description ("the email one", "my Slack bot"):
@@ -148,8 +143,8 @@ When the user refers to a workflow by name or description ("the email one", "my
### Key Rules
- You can test workflows immediately after building — deployment is only needed for external access (API, chat, MCP).
- All copilot tools (build, plan, edit, deploy, test, debug) require workflowId.
- If the user reports errors → use \`sim_debug\` first, don't guess.
- All workflow-scoped copilot tools require \`workflowId\`.
- If the user reports errors, route through \`sim_workflow\` and ask it to reproduce, inspect logs, and fix the issue end to end.
- Variable syntax: \`<blockname.field>\` for block outputs, \`{{ENV_VAR}}\` for env vars.
`
@@ -645,7 +640,8 @@ async function handleDirectToolCall(
startTime: Date.now(),
}
const result = await executeToolServerSide(toolCall, execContext)
ensureHandlersRegistered()
const result = await executeTool(toolCall.name, toolCall.params || {}, execContext)
return {
content: [
@@ -672,7 +668,7 @@ async function handleDirectToolCall(
/**
* Build mode uses the main chat orchestrator with the 'fast' command instead of
* the subagent endpoint. In Go, 'build' is not a registered subagent — it's a mode
* the subagent endpoint. In Go, 'workflow' is not a registered subagent — it's a mode
* (ModeFast) on the main chat processor that bypasses subagent orchestration and
* executes all tools directly.
*/
@@ -728,25 +724,10 @@ async function handleBuildToolCall(
chatId,
}
const executionId = generateId()
const runId = generateId()
const messageId = requestPayload.messageId as string
await createRunSegment({
id: runId,
executionId,
chatId,
userId,
workflowId: resolved.workflowId,
streamId: messageId,
}).catch(() => {})
const result = await orchestrateCopilotStream(requestPayload, {
const result = await runCopilotLifecycle(requestPayload, {
userId,
workflowId: resolved.workflowId,
chatId,
executionId,
runId,
goRoute: '/api/mcp',
autoExecuteTools: true,
timeout: ORCHESTRATION_TIMEOUT_MS,
@@ -785,7 +766,7 @@ async function handleSubagentToolCall(
userId: string,
abortSignal?: AbortSignal
): Promise<CallToolResult> {
if (toolDef.agentId === 'build') {
if (toolDef.agentId === 'workflow') {
return handleBuildToolCall(args, userId, abortSignal)
}

View File

@@ -5,19 +5,27 @@ import { eq, sql } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { resolveOrCreateChat } from '@/lib/copilot/chat-lifecycle'
import { buildCopilotRequestPayload } from '@/lib/copilot/chat-payload'
import { resolveOrCreateChat } from '@/lib/copilot/chat/lifecycle'
import { buildCopilotRequestPayload } from '@/lib/copilot/chat/payload'
import {
buildPersistedAssistantMessage,
buildPersistedUserMessage,
} from '@/lib/copilot/chat/persisted-message'
import {
processContextsServer,
resolveActiveResourceContext,
} from '@/lib/copilot/chat/process-contents'
import { finalizeAssistantTurn } from '@/lib/copilot/chat/terminal-state'
import { generateWorkspaceContext } from '@/lib/copilot/chat/workspace-context'
import { createRequestTracker, createUnauthorizedResponse } from '@/lib/copilot/request/http'
import { createSSEStream, SSE_RESPONSE_HEADERS } from '@/lib/copilot/request/lifecycle/start'
import {
acquirePendingChatStream,
createSSEStream,
SSE_RESPONSE_HEADERS,
} from '@/lib/copilot/chat-streaming'
import type { OrchestratorResult } from '@/lib/copilot/orchestrator/types'
import { processContextsServer, resolveActiveResourceContext } from '@/lib/copilot/process-contents'
import { createRequestTracker, createUnauthorizedResponse } from '@/lib/copilot/request-helpers'
import { taskPubSub } from '@/lib/copilot/task-events'
import { generateWorkspaceContext } from '@/lib/copilot/workspace-context'
import { generateId } from '@/lib/core/utils/uuid'
getPendingChatStreamId,
releasePendingChatStream,
} from '@/lib/copilot/request/session'
import type { OrchestratorResult } from '@/lib/copilot/request/types'
import { taskPubSub } from '@/lib/copilot/tasks'
import {
assertActiveWorkspaceAccess,
getUserEntityPermissions,
@@ -38,7 +46,6 @@ const FileAttachmentSchema = z.object({
const ResourceAttachmentSchema = z.object({
type: z.enum(['workflow', 'table', 'file', 'knowledgebase', 'folder']),
id: z.string().min(1),
title: z.string().optional(),
active: z.boolean().optional(),
})
@@ -90,7 +97,9 @@ const MothershipMessageSchema = z.object({
*/
export async function POST(req: NextRequest) {
const tracker = createRequestTracker()
let userMessageIdForLogs: string | undefined
let lockChatId: string | undefined
let lockStreamId = ''
let chatStreamLockAcquired = false
try {
const session = await getSession()
@@ -112,28 +121,24 @@ export async function POST(req: NextRequest) {
userTimezone,
} = MothershipMessageSchema.parse(body)
const userMessageId = providedMessageId || generateId()
userMessageIdForLogs = userMessageId
const reqLogger = logger.withMetadata({
requestId: tracker.requestId,
messageId: userMessageId,
})
const userMessageId = providedMessageId || crypto.randomUUID()
lockStreamId = userMessageId
reqLogger.info('Received mothership chat start request', {
workspaceId,
chatId,
createNewChat,
hasContexts: Array.isArray(contexts) && contexts.length > 0,
contextsCount: Array.isArray(contexts) ? contexts.length : 0,
hasResourceAttachments: Array.isArray(resourceAttachments) && resourceAttachments.length > 0,
resourceAttachmentCount: Array.isArray(resourceAttachments) ? resourceAttachments.length : 0,
hasFileAttachments: Array.isArray(fileAttachments) && fileAttachments.length > 0,
fileAttachmentCount: Array.isArray(fileAttachments) ? fileAttachments.length : 0,
})
// Phase 1: workspace access + chat resolution in parallel
const [accessResult, chatResult] = await Promise.allSettled([
assertActiveWorkspaceAccess(workspaceId, authenticatedUserId),
chatId || createNewChat
? resolveOrCreateChat({
chatId,
userId: authenticatedUserId,
workspaceId,
model: 'claude-opus-4-6',
type: 'mothership',
})
: null,
])
try {
await assertActiveWorkspaceAccess(workspaceId, authenticatedUserId)
} catch {
if (accessResult.status === 'rejected') {
return NextResponse.json({ error: 'Workspace not found or access denied' }, { status: 403 })
}
@@ -141,18 +146,12 @@ export async function POST(req: NextRequest) {
let conversationHistory: any[] = []
let actualChatId = chatId
if (chatId || createNewChat) {
const chatResult = await resolveOrCreateChat({
chatId,
userId: authenticatedUserId,
workspaceId,
model: 'claude-opus-4-6',
type: 'mothership',
})
currentChat = chatResult.chat
actualChatId = chatResult.chatId || chatId
conversationHistory = Array.isArray(chatResult.conversationHistory)
? chatResult.conversationHistory
if (chatResult.status === 'fulfilled' && chatResult.value) {
const resolved = chatResult.value
currentChat = resolved.chat
actualChatId = resolved.chatId || chatId
conversationHistory = Array.isArray(resolved.conversationHistory)
? resolved.conversationHistory
: []
if (chatId && !currentChat) {
@@ -160,77 +159,73 @@ export async function POST(req: NextRequest) {
}
}
let agentContexts: Array<{ type: string; content: string }> = []
if (Array.isArray(contexts) && contexts.length > 0) {
try {
agentContexts = await processContextsServer(
contexts as any,
authenticatedUserId,
message,
workspaceId,
actualChatId
if (actualChatId) {
chatStreamLockAcquired = await acquirePendingChatStream(actualChatId, userMessageId)
if (!chatStreamLockAcquired) {
const activeStreamId = await getPendingChatStreamId(actualChatId)
return NextResponse.json(
{
error: 'A response is already in progress for this chat.',
...(activeStreamId ? { activeStreamId } : {}),
},
{ status: 409 }
)
} catch (e) {
reqLogger.error('Failed to process contexts', e)
}
lockChatId = actualChatId
}
if (Array.isArray(resourceAttachments) && resourceAttachments.length > 0) {
const results = await Promise.allSettled(
resourceAttachments.map(async (r) => {
const ctx = await resolveActiveResourceContext(
r.type,
r.id,
workspaceId,
// Phase 2: contexts + workspace context + user message persistence in parallel
const contextPromise = (async () => {
let agentCtxs: Array<{ type: string; content: string }> = []
if (Array.isArray(contexts) && contexts.length > 0) {
try {
agentCtxs = await processContextsServer(
contexts as any,
authenticatedUserId,
message,
workspaceId,
actualChatId
)
if (!ctx) return null
return {
...ctx,
tag: r.active ? '@active_tab' : '@open_tab',
}
})
)
for (const result of results) {
if (result.status === 'fulfilled' && result.value) {
agentContexts.push(result.value)
} else if (result.status === 'rejected') {
reqLogger.error('Failed to resolve resource attachment', result.reason)
} catch (e) {
logger.error(`[${tracker.requestId}] Failed to process contexts`, e)
}
}
}
if (actualChatId) {
const userMsg = {
id: userMessageId,
role: 'user' as const,
content: message,
timestamp: new Date().toISOString(),
...(fileAttachments &&
fileAttachments.length > 0 && {
fileAttachments: fileAttachments.map((f) => ({
id: f.id,
key: f.key,
filename: f.filename,
media_type: f.media_type,
size: f.size,
})),
}),
...(contexts &&
contexts.length > 0 && {
contexts: contexts.map((c) => ({
kind: c.kind,
label: c.label,
...(c.workflowId && { workflowId: c.workflowId }),
...(c.knowledgeId && { knowledgeId: c.knowledgeId }),
...(c.tableId && { tableId: c.tableId }),
...(c.fileId && { fileId: c.fileId }),
...(c.folderId && { folderId: c.folderId }),
})),
}),
if (Array.isArray(resourceAttachments) && resourceAttachments.length > 0) {
const results = await Promise.allSettled(
resourceAttachments.map(async (r) => {
const ctx = await resolveActiveResourceContext(
r.type,
r.id,
workspaceId,
authenticatedUserId,
actualChatId
)
if (!ctx) return null
return { ...ctx, tag: r.active ? '@active_tab' : '@open_tab' }
})
)
for (const result of results) {
if (result.status === 'fulfilled' && result.value) {
agentCtxs.push(result.value)
} else if (result.status === 'rejected') {
logger.error(
`[${tracker.requestId}] Failed to resolve resource attachment`,
result.reason
)
}
}
}
return agentCtxs
})()
const userMsgPromise = (async () => {
if (!actualChatId) return
const userMsg = buildPersistedUserMessage({
id: userMessageId,
content: message,
fileAttachments,
contexts,
})
const [updated] = await db
.update(copilotChats)
.set({
@@ -246,11 +241,15 @@ export async function POST(req: NextRequest) {
conversationHistory = freshMessages.filter((m: any) => m.id !== userMessageId)
taskPubSub?.publishStatusChanged({ workspaceId, chatId: actualChatId, type: 'started' })
}
}
})()
const [workspaceContext, userPermission] = await Promise.all([
generateWorkspaceContext(workspaceId, authenticatedUserId),
getUserEntityPermissions(authenticatedUserId, 'workspace', workspaceId).catch(() => null),
const [agentContexts, [workspaceContext, userPermission]] = await Promise.all([
contextPromise,
Promise.all([
generateWorkspaceContext(workspaceId, authenticatedUserId),
getUserEntityPermissions(authenticatedUserId, 'workspace', workspaceId).catch(() => null),
]),
userMsgPromise,
])
const requestPayload = await buildCopilotRequestPayload(
@@ -271,21 +270,8 @@ export async function POST(req: NextRequest) {
{ selectedModel: '' }
)
if (actualChatId) {
const acquired = await acquirePendingChatStream(actualChatId, userMessageId)
if (!acquired) {
return NextResponse.json(
{
error:
'A response is already in progress for this chat. Wait for it to finish or use Stop.',
},
{ status: 409 }
)
}
}
const executionId = generateId()
const runId = generateId()
const executionId = crypto.randomUUID()
const runId = crypto.randomUUID()
const stream = createSSEStream({
requestPayload,
userId: authenticatedUserId,
@@ -299,7 +285,6 @@ export async function POST(req: NextRequest) {
titleModel: 'claude-opus-4-6',
requestId: tracker.requestId,
workspaceId,
pendingChatStreamAlreadyRegistered: Boolean(actualChatId),
orchestrateOptions: {
userId: authenticatedUserId,
workspaceId,
@@ -311,81 +296,41 @@ export async function POST(req: NextRequest) {
interactive: true,
onComplete: async (result: OrchestratorResult) => {
if (!actualChatId) return
if (!result.success) return
const assistantMessage: Record<string, unknown> = {
id: generateId(),
role: 'assistant' as const,
content: result.content,
timestamp: new Date().toISOString(),
...(result.requestId ? { requestId: result.requestId } : {}),
}
if (result.toolCalls.length > 0) {
assistantMessage.toolCalls = result.toolCalls
}
if (result.contentBlocks.length > 0) {
assistantMessage.contentBlocks = result.contentBlocks.map((block) => {
const stored: Record<string, unknown> = { type: block.type }
if (block.content) stored.content = block.content
if (block.type === 'tool_call' && block.toolCall) {
const state =
block.toolCall.result?.success !== undefined
? block.toolCall.result.success
? 'success'
: 'error'
: block.toolCall.status
const isSubagentTool = !!block.calledBy
const isNonTerminal =
state === 'cancelled' || state === 'pending' || state === 'executing'
stored.toolCall = {
id: block.toolCall.id,
name: block.toolCall.name,
state,
...(isSubagentTool && isNonTerminal ? {} : { result: block.toolCall.result }),
...(isSubagentTool && isNonTerminal
? {}
: block.toolCall.params
? { params: block.toolCall.params }
: {}),
...(block.calledBy ? { calledBy: block.calledBy } : {}),
}
}
return stored
})
}
try {
const [row] = await db
.select({ messages: copilotChats.messages })
.from(copilotChats)
.where(eq(copilotChats.id, actualChatId))
.limit(1)
const msgs: any[] = Array.isArray(row?.messages) ? row.messages : []
const userIdx = msgs.findIndex((m: any) => m.id === userMessageId)
const alreadyHasResponse =
userIdx >= 0 &&
userIdx + 1 < msgs.length &&
(msgs[userIdx + 1] as any)?.role === 'assistant'
if (!alreadyHasResponse) {
await db
.update(copilotChats)
.set({
messages: sql`${copilotChats.messages} || ${JSON.stringify([assistantMessage])}::jsonb`,
conversationId: sql`CASE WHEN ${copilotChats.conversationId} = ${userMessageId} THEN NULL ELSE ${copilotChats.conversationId} END`,
updatedAt: new Date(),
})
.where(eq(copilotChats.id, actualChatId))
taskPubSub?.publishStatusChanged({
workspaceId,
chatId: actualChatId,
type: 'completed',
})
}
await finalizeAssistantTurn({
chatId: actualChatId,
userMessageId,
...(result.success
? { assistantMessage: buildPersistedAssistantMessage(result, result.requestId) }
: {}),
})
taskPubSub?.publishStatusChanged({
workspaceId,
chatId: actualChatId,
type: 'completed',
})
} catch (error) {
reqLogger.error('Failed to persist chat messages', {
logger.error(`[${tracker.requestId}] Failed to persist chat messages`, {
chatId: actualChatId,
error: error instanceof Error ? error.message : 'Unknown error',
})
}
},
onError: async () => {
if (!actualChatId) return
try {
await finalizeAssistantTurn({
chatId: actualChatId,
userMessageId,
})
taskPubSub?.publishStatusChanged({
workspaceId,
chatId: actualChatId,
type: 'completed',
})
} catch (error) {
logger.error(`[${tracker.requestId}] Failed to finalize errored chat stream`, {
chatId: actualChatId,
error: error instanceof Error ? error.message : 'Unknown error',
})
@@ -396,6 +341,9 @@ export async function POST(req: NextRequest) {
return new Response(stream, { headers: SSE_RESPONSE_HEADERS })
} catch (error) {
if (chatStreamLockAcquired && lockChatId && lockStreamId) {
await releasePendingChatStream(lockChatId, lockStreamId)
}
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: 'Invalid request data', details: error.errors },
@@ -403,11 +351,9 @@ export async function POST(req: NextRequest) {
)
}
logger
.withMetadata({ requestId: tracker.requestId, messageId: userMessageIdForLogs })
.error('Error handling mothership chat', {
error: error instanceof Error ? error.message : 'Unknown error',
})
logger.error(`[${tracker.requestId}] Error handling mothership chat:`, {
error: error instanceof Error ? error.message : 'Unknown error',
})
return NextResponse.json(
{ error: error instanceof Error ? error.message : 'Internal server error' },

View File

@@ -5,9 +5,9 @@ import { and, eq, sql } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { releasePendingChatStream } from '@/lib/copilot/chat-streaming'
import { taskPubSub } from '@/lib/copilot/task-events'
import { generateId } from '@/lib/core/utils/uuid'
import { normalizeMessage, type PersistedMessage } from '@/lib/copilot/chat/persisted-message'
import { releasePendingChatStream } from '@/lib/copilot/request/session'
import { taskPubSub } from '@/lib/copilot/tasks'
const logger = createLogger('MothershipChatStopAPI')
@@ -27,15 +27,25 @@ const StoredToolCallSchema = z
display: z
.object({
text: z.string().optional(),
title: z.string().optional(),
phaseLabel: z.string().optional(),
})
.optional(),
calledBy: z.string().optional(),
durationMs: z.number().optional(),
error: z.string().optional(),
})
.nullable()
const ContentBlockSchema = z.object({
type: z.string(),
lane: z.enum(['main', 'subagent']).optional(),
content: z.string().optional(),
channel: z.enum(['assistant', 'thinking']).optional(),
phase: z.enum(['call', 'args_delta', 'result']).optional(),
kind: z.enum(['subagent', 'structured_result', 'subagent_result']).optional(),
lifecycle: z.enum(['start', 'end']).optional(),
status: z.enum(['complete', 'error', 'cancelled']).optional(),
toolCall: StoredToolCallSchema.optional(),
})
@@ -60,41 +70,57 @@ export async function POST(req: NextRequest) {
const { chatId, streamId, content, contentBlocks } = StopSchema.parse(await req.json())
await releasePendingChatStream(chatId, streamId)
const [row] = await db
.select({
workspaceId: copilotChats.workspaceId,
messages: copilotChats.messages,
})
.from(copilotChats)
.where(and(eq(copilotChats.id, chatId), eq(copilotChats.userId, session.user.id)))
.limit(1)
if (!row) {
await releasePendingChatStream(chatId, streamId)
return NextResponse.json({ success: true })
}
const messages: Record<string, unknown>[] = Array.isArray(row.messages) ? row.messages : []
const userIdx = messages.findIndex((message) => message.id === streamId)
const alreadyHasResponse =
userIdx >= 0 &&
userIdx + 1 < messages.length &&
(messages[userIdx + 1] as Record<string, unknown>)?.role === 'assistant'
const canAppendAssistant =
userIdx >= 0 && userIdx === messages.length - 1 && !alreadyHasResponse
const setClause: Record<string, unknown> = {
conversationId: null,
conversationId: sql`CASE WHEN ${copilotChats.conversationId} = ${streamId} THEN NULL ELSE ${copilotChats.conversationId} END`,
updatedAt: new Date(),
}
const hasContent = content.trim().length > 0
const hasBlocks = Array.isArray(contentBlocks) && contentBlocks.length > 0
if (hasContent || hasBlocks) {
const assistantMessage: Record<string, unknown> = {
id: generateId(),
role: 'assistant' as const,
if ((hasContent || hasBlocks) && canAppendAssistant) {
const normalized = normalizeMessage({
id: crypto.randomUUID(),
role: 'assistant',
content,
timestamp: new Date().toISOString(),
}
if (hasBlocks) {
assistantMessage.contentBlocks = contentBlocks
}
...(hasBlocks ? { contentBlocks } : {}),
})
const assistantMessage: PersistedMessage = normalized
setClause.messages = sql`${copilotChats.messages} || ${JSON.stringify([assistantMessage])}::jsonb`
}
const [updated] = await db
.update(copilotChats)
.set(setClause)
.where(
and(
eq(copilotChats.id, chatId),
eq(copilotChats.userId, session.user.id),
eq(copilotChats.conversationId, streamId)
)
)
.where(and(eq(copilotChats.id, chatId), eq(copilotChats.userId, session.user.id)))
.returning({ workspaceId: copilotChats.workspaceId })
await releasePendingChatStream(chatId, streamId)
if (updated?.workspaceId) {
taskPubSub?.publishStatusChanged({
workspaceId: updated.workspaceId,

View File

@@ -4,15 +4,19 @@ import { createLogger } from '@sim/logger'
import { and, eq, sql } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getAccessibleCopilotChat } from '@/lib/copilot/chat-lifecycle'
import { getStreamMeta, readStreamEvents } from '@/lib/copilot/orchestrator/stream/buffer'
import { getLatestRunForStream } from '@/lib/copilot/async-runs/repository'
import { getAccessibleCopilotChat } from '@/lib/copilot/chat/lifecycle'
import {
authenticateCopilotRequestSessionOnly,
createBadRequestResponse,
createInternalServerErrorResponse,
createUnauthorizedResponse,
} from '@/lib/copilot/request-helpers'
import { taskPubSub } from '@/lib/copilot/task-events'
} from '@/lib/copilot/request/http'
import type { FilePreviewSession } from '@/lib/copilot/request/session'
import { readEvents } from '@/lib/copilot/request/session/buffer'
import { readFilePreviewSessions } from '@/lib/copilot/request/session/file-preview-session'
import { type StreamBatchEvent, toStreamBatchEvent } from '@/lib/copilot/request/session/types'
import { taskPubSub } from '@/lib/copilot/tasks'
import { captureServerEvent } from '@/lib/posthog/server'
const logger = createLogger('MothershipChatAPI')
@@ -47,29 +51,45 @@ export async function GET(
}
let streamSnapshot: {
events: Array<{ eventId: number; streamId: string; event: Record<string, unknown> }>
events: StreamBatchEvent[]
previewSessions: FilePreviewSession[]
status: string
} | null = null
if (chat.conversationId) {
try {
const [meta, events] = await Promise.all([
getStreamMeta(chat.conversationId),
readStreamEvents(chat.conversationId, 0),
const [events, previewSessions] = await Promise.all([
readEvents(chat.conversationId, '0'),
readFilePreviewSessions(chat.conversationId).catch((error) => {
logger.warn('Failed to read preview sessions for mothership chat', {
chatId,
conversationId: chat.conversationId,
error: error instanceof Error ? error.message : String(error),
})
return []
}),
])
streamSnapshot = {
events: events || [],
status: meta?.status || 'unknown',
}
} catch (error) {
logger
.withMetadata({ messageId: chat.conversationId || undefined })
.warn('Failed to read stream snapshot for mothership chat', {
const run = await getLatestRunForStream(chat.conversationId, userId).catch((error) => {
logger.warn('Failed to fetch latest run for mothership chat snapshot', {
chatId,
conversationId: chat.conversationId,
error: error instanceof Error ? error.message : String(error),
})
return null
})
streamSnapshot = {
events: events.map(toStreamBatchEvent),
previewSessions,
status:
typeof run?.status === 'string' ? run.status : events.length > 0 ? 'active' : 'unknown',
}
} catch (error) {
logger.warn('Failed to read stream snapshot for mothership chat', {
chatId,
conversationId: chat.conversationId,
error: error instanceof Error ? error.message : String(error),
})
}
}

View File

@@ -0,0 +1,43 @@
import { db } from '@sim/db'
import { copilotChats } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq, sql } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import {
authenticateCopilotRequestSessionOnly,
createBadRequestResponse,
createInternalServerErrorResponse,
createUnauthorizedResponse,
} from '@/lib/copilot/request/http'
const logger = createLogger('MarkTaskReadAPI')
const MarkReadSchema = z.object({
chatId: z.string().min(1),
})
export async function POST(request: NextRequest) {
try {
const { userId, isAuthenticated } = await authenticateCopilotRequestSessionOnly()
if (!isAuthenticated || !userId) {
return createUnauthorizedResponse()
}
const body = await request.json()
const { chatId } = MarkReadSchema.parse(body)
await db
.update(copilotChats)
.set({ lastSeenAt: sql`GREATEST(${copilotChats.updatedAt}, NOW())` })
.where(and(eq(copilotChats.id, chatId), eq(copilotChats.userId, userId)))
return NextResponse.json({ success: true })
} catch (error) {
if (error instanceof z.ZodError) {
return createBadRequestResponse('chatId is required')
}
logger.error('Error marking task as read:', error)
return createInternalServerErrorResponse('Failed to mark task as read')
}
}

View File

@@ -9,8 +9,8 @@ import {
createBadRequestResponse,
createInternalServerErrorResponse,
createUnauthorizedResponse,
} from '@/lib/copilot/request-helpers'
import { taskPubSub } from '@/lib/copilot/task-events'
} from '@/lib/copilot/request/http'
import { taskPubSub } from '@/lib/copilot/tasks'
import { captureServerEvent } from '@/lib/posthog/server'
import { assertActiveWorkspaceAccess } from '@/lib/workspaces/permissions/utils'
@@ -39,7 +39,7 @@ export async function GET(request: NextRequest) {
id: copilotChats.id,
title: copilotChats.title,
updatedAt: copilotChats.updatedAt,
conversationId: copilotChats.conversationId,
activeStreamId: copilotChats.conversationId,
lastSeenAt: copilotChats.lastSeenAt,
})
.from(copilotChats)

View File

@@ -7,7 +7,7 @@
* Auth is handled via session cookies (EventSource sends cookies automatically).
*/
import { taskPubSub } from '@/lib/copilot/task-events'
import { taskPubSub } from '@/lib/copilot/tasks'
import { createWorkspaceSSE } from '@/lib/events/sse-endpoint'
export const dynamic = 'force-dynamic'

View File

@@ -2,10 +2,9 @@ import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkInternalAuth } from '@/lib/auth/hybrid'
import { createRunSegment } from '@/lib/copilot/async-runs/repository'
import { buildIntegrationToolSchemas } from '@/lib/copilot/chat-payload'
import { orchestrateCopilotStream } from '@/lib/copilot/orchestrator'
import { generateWorkspaceContext } from '@/lib/copilot/workspace-context'
import { buildIntegrationToolSchemas } from '@/lib/copilot/chat/payload'
import { generateWorkspaceContext } from '@/lib/copilot/chat/workspace-context'
import { runCopilotLifecycle } from '@/lib/copilot/request/lifecycle/run'
import { generateId } from '@/lib/core/utils/uuid'
import {
assertActiveWorkspaceAccess,
@@ -73,34 +72,25 @@ export async function POST(req: NextRequest) {
...(userPermission ? { userPermission } : {}),
}
const executionId = generateId()
const runId = generateId()
await createRunSegment({
id: runId,
executionId,
chatId: effectiveChatId,
userId,
workspaceId,
streamId: messageId,
}).catch(() => {})
const result = await orchestrateCopilotStream(requestPayload, {
const result = await runCopilotLifecycle(requestPayload, {
userId,
workspaceId,
chatId: effectiveChatId,
executionId,
runId,
goRoute: '/api/mothership/execute',
autoExecuteTools: true,
interactive: false,
})
if (!result.success) {
reqLogger.error('Mothership execute failed', {
error: result.error,
errors: result.errors,
})
logger.error(
messageId
? `Mothership execute failed [messageId:${messageId}]`
: 'Mothership execute failed',
{
error: result.error,
errors: result.errors,
}
)
return NextResponse.json(
{
error: result.error || 'Mothership execution failed',
@@ -136,9 +126,12 @@ export async function POST(req: NextRequest) {
)
}
logger.withMetadata({ messageId }).error('Mothership execute error', {
error: error instanceof Error ? error.message : 'Unknown error',
})
logger.error(
messageId ? `Mothership execute error [messageId:${messageId}]` : 'Mothership execute error',
{
error: error instanceof Error ? error.message : 'Unknown error',
}
)
return NextResponse.json(
{ error: error instanceof Error ? error.message : 'Internal server error' },

View File

@@ -1,13 +1,11 @@
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { AuthType } from '@/lib/auth/hybrid'
import { getJobQueue, shouldUseBullMQ } from '@/lib/core/async-jobs'
import { createBullMQJobData } from '@/lib/core/bullmq'
import { getJobQueue } from '@/lib/core/async-jobs'
import { generateRequestId } from '@/lib/core/utils/request'
import { SSE_HEADERS } from '@/lib/core/utils/sse'
import { getBaseUrl } from '@/lib/core/utils/urls'
import { generateId } from '@/lib/core/utils/uuid'
import { enqueueWorkspaceDispatch } from '@/lib/core/workspace-dispatch'
import { setExecutionMeta } from '@/lib/execution/event-buffer'
import { preprocessExecution } from '@/lib/execution/preprocessing'
import { PauseResumeManager } from '@/lib/workflows/executor/human-in-the-loop-manager'
@@ -227,26 +225,10 @@ export async function POST(
let jobId: string
try {
const useBullMQ = shouldUseBullMQ()
if (useBullMQ) {
jobId = await enqueueWorkspaceDispatch({
id: enqueueResult.resumeExecutionId,
workspaceId: workflow.workspaceId,
lane: 'runtime',
queueName: 'resume-execution',
bullmqJobName: 'resume-execution',
bullmqPayload: createBullMQJobData(resumePayload, {
workflowId,
userId,
}),
metadata: { workflowId, userId },
})
} else {
const jobQueue = await getJobQueue()
jobId = await jobQueue.enqueue('resume-execution', resumePayload, {
metadata: { workflowId, workspaceId: workflow.workspaceId, userId },
})
}
const jobQueue = await getJobQueue()
jobId = await jobQueue.enqueue('resume-execution', resumePayload, {
metadata: { workflowId, workspaceId: workflow.workspaceId, userId },
})
logger.info('Enqueued async resume execution', {
jobId,
resumeExecutionId: enqueueResult.resumeExecutionId,

View File

@@ -14,7 +14,6 @@ const {
mockDbReturning,
mockDbUpdate,
mockEnqueue,
mockEnqueueWorkspaceDispatch,
mockStartJob,
mockCompleteJob,
mockMarkJobFailed,
@@ -24,7 +23,6 @@ const {
const mockDbSet = vi.fn().mockReturnValue({ where: mockDbWhere })
const mockDbUpdate = vi.fn().mockReturnValue({ set: mockDbSet })
const mockEnqueue = vi.fn().mockResolvedValue('job-id-1')
const mockEnqueueWorkspaceDispatch = vi.fn().mockResolvedValue('job-id-1')
const mockStartJob = vi.fn().mockResolvedValue(undefined)
const mockCompleteJob = vi.fn().mockResolvedValue(undefined)
const mockMarkJobFailed = vi.fn().mockResolvedValue(undefined)
@@ -42,7 +40,6 @@ const {
mockDbReturning,
mockDbUpdate,
mockEnqueue,
mockEnqueueWorkspaceDispatch,
mockStartJob,
mockCompleteJob,
mockMarkJobFailed,
@@ -75,15 +72,6 @@ vi.mock('@/lib/core/async-jobs', () => ({
shouldExecuteInline: vi.fn().mockReturnValue(false),
}))
vi.mock('@/lib/core/bullmq', () => ({
isBullMQEnabled: vi.fn().mockReturnValue(true),
createBullMQJobData: vi.fn((payload: unknown) => ({ payload })),
}))
vi.mock('@/lib/core/workspace-dispatch', () => ({
enqueueWorkspaceDispatch: mockEnqueueWorkspaceDispatch,
}))
vi.mock('@/lib/workflows/utils', () => ({
getWorkflowById: vi.fn().mockResolvedValue({
id: 'workflow-1',
@@ -175,8 +163,6 @@ const SINGLE_JOB = [
cronExpression: '0 * * * *',
failedCount: 0,
lastQueuedAt: undefined,
sourceUserId: 'user-1',
sourceWorkspaceId: 'workspace-1',
sourceType: 'job',
},
]
@@ -250,56 +236,48 @@ describe('Scheduled Workflow Execution API Route', () => {
expect(data).toHaveProperty('executedCount', 2)
})
it('should queue mothership jobs to BullMQ when available', async () => {
it('should execute mothership jobs inline', async () => {
mockDbReturning.mockReturnValueOnce([]).mockReturnValueOnce(SINGLE_JOB)
const response = await GET(createMockRequest())
expect(response.status).toBe(200)
expect(mockEnqueueWorkspaceDispatch).toHaveBeenCalledWith(
expect(mockExecuteJobInline).toHaveBeenCalledWith(
expect.objectContaining({
workspaceId: 'workspace-1',
lane: 'runtime',
queueName: 'mothership-job-execution',
bullmqJobName: 'mothership-job-execution',
bullmqPayload: {
payload: {
scheduleId: 'job-1',
cronExpression: '0 * * * *',
failedCount: 0,
now: expect.any(String),
},
},
scheduleId: 'job-1',
cronExpression: '0 * * * *',
failedCount: 0,
now: expect.any(String),
})
)
expect(mockExecuteJobInline).not.toHaveBeenCalled()
})
it('should enqueue preassigned correlation metadata for schedules', async () => {
mockDbReturning.mockReturnValue(SINGLE_SCHEDULE)
it('should enqueue schedule with correlation metadata via job queue', async () => {
mockDbReturning.mockReturnValueOnce(SINGLE_SCHEDULE).mockReturnValueOnce([])
const response = await GET(createMockRequest())
expect(response.status).toBe(200)
expect(mockEnqueueWorkspaceDispatch).toHaveBeenCalledWith(
expect(mockEnqueue).toHaveBeenCalledWith(
'schedule-execution',
expect.objectContaining({
id: 'schedule-execution-1',
workspaceId: 'workspace-1',
lane: 'runtime',
queueName: 'schedule-execution',
bullmqJobName: 'schedule-execution',
metadata: {
scheduleId: 'schedule-1',
workflowId: 'workflow-1',
executionId: 'schedule-execution-1',
requestId: 'test-request-id',
}),
expect.objectContaining({
metadata: expect.objectContaining({
workflowId: 'workflow-1',
correlation: {
workspaceId: 'workspace-1',
correlation: expect.objectContaining({
executionId: 'schedule-execution-1',
requestId: 'test-request-id',
source: 'schedule',
workflowId: 'workflow-1',
scheduleId: 'schedule-1',
triggerType: 'schedule',
scheduledFor: '2025-01-01T00:00:00.000Z',
},
},
}),
}),
})
)
})

View File

@@ -4,11 +4,8 @@ import { and, eq, isNull, lt, lte, ne, not, or, sql } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { verifyCronAuth } from '@/lib/auth/internal'
import { getJobQueue, shouldExecuteInline } from '@/lib/core/async-jobs'
import { createBullMQJobData, isBullMQEnabled } from '@/lib/core/bullmq'
import { generateRequestId } from '@/lib/core/utils/request'
import { generateId } from '@/lib/core/utils/uuid'
import { enqueueWorkspaceDispatch } from '@/lib/core/workspace-dispatch'
import { getWorkflowById } from '@/lib/workflows/utils'
import {
executeJobInline,
executeScheduleJob,
@@ -76,8 +73,6 @@ export async function GET(request: NextRequest) {
cronExpression: workflowSchedule.cronExpression,
failedCount: workflowSchedule.failedCount,
lastQueuedAt: workflowSchedule.lastQueuedAt,
sourceWorkspaceId: workflowSchedule.sourceWorkspaceId,
sourceUserId: workflowSchedule.sourceUserId,
sourceType: workflowSchedule.sourceType,
})
@@ -88,6 +83,9 @@ export async function GET(request: NextRequest) {
const jobQueue = await getJobQueue()
const workflowUtils =
dueSchedules.length > 0 ? await import('@/lib/workflows/utils') : undefined
const schedulePromises = dueSchedules.map(async (schedule) => {
const queueTime = schedule.lastQueuedAt ?? queuedAt
const executionId = generateId()
@@ -117,42 +115,17 @@ export async function GET(request: NextRequest) {
try {
const resolvedWorkflow = schedule.workflowId
? await getWorkflowById(schedule.workflowId)
? await workflowUtils?.getWorkflowById(schedule.workflowId)
: null
const resolvedWorkspaceId = resolvedWorkflow?.workspaceId
let jobId: string
if (isBullMQEnabled()) {
if (!resolvedWorkspaceId) {
throw new Error(
`Missing workspace for scheduled workflow ${schedule.workflowId}; refusing to bypass workspace admission`
)
}
jobId = await enqueueWorkspaceDispatch({
id: executionId,
workspaceId: resolvedWorkspaceId,
lane: 'runtime',
queueName: 'schedule-execution',
bullmqJobName: 'schedule-execution',
bullmqPayload: createBullMQJobData(payload, {
workflowId: schedule.workflowId ?? undefined,
correlation,
}),
metadata: {
workflowId: schedule.workflowId ?? undefined,
correlation,
},
})
} else {
jobId = await jobQueue.enqueue('schedule-execution', payload, {
metadata: {
workflowId: schedule.workflowId ?? undefined,
workspaceId: resolvedWorkspaceId ?? undefined,
correlation,
},
})
}
const jobId = await jobQueue.enqueue('schedule-execution', payload, {
metadata: {
workflowId: schedule.workflowId ?? undefined,
workspaceId: resolvedWorkspaceId ?? undefined,
correlation,
},
})
logger.info(
`[${requestId}] Queued schedule execution task ${jobId} for workflow ${schedule.workflowId}`
)
@@ -204,7 +177,7 @@ export async function GET(request: NextRequest) {
}
})
// Mothership jobs use BullMQ when available, otherwise direct inline execution.
// Mothership jobs are executed inline directly.
const jobPromises = dueJobs.map(async (job) => {
const queueTime = job.lastQueuedAt ?? queuedAt
const payload = {
@@ -215,24 +188,7 @@ export async function GET(request: NextRequest) {
}
try {
if (isBullMQEnabled()) {
if (!job.sourceWorkspaceId || !job.sourceUserId) {
throw new Error(`Mothership job ${job.id} is missing workspace/user ownership`)
}
await enqueueWorkspaceDispatch({
workspaceId: job.sourceWorkspaceId!,
lane: 'runtime',
queueName: 'mothership-job-execution',
bullmqJobName: 'mothership-job-execution',
bullmqPayload: createBullMQJobData(payload),
metadata: {
userId: job.sourceUserId,
},
})
} else {
await executeJobInline(payload)
}
await executeJobInline(payload)
} catch (error) {
logger.error(`[${requestId}] Job execution failed for ${job.id}`, {
error: error instanceof Error ? error.message : String(error),

View File

@@ -3,7 +3,7 @@ import { templates } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { checkInternalApiKey } from '@/lib/copilot/utils'
import { checkInternalApiKey } from '@/lib/copilot/request/http'
import { generateRequestId } from '@/lib/core/utils/request'
import { sanitizeForCopilot } from '@/lib/workflows/sanitization/json-sanitizer'

View File

@@ -1,9 +1,8 @@
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { createRunSegment } from '@/lib/copilot/async-runs/repository'
import { COPILOT_REQUEST_MODES } from '@/lib/copilot/models'
import { orchestrateCopilotStream } from '@/lib/copilot/orchestrator'
import { COPILOT_REQUEST_MODES } from '@/lib/copilot/constants'
import { runCopilotLifecycle } from '@/lib/copilot/request/lifecycle/run'
import { generateId } from '@/lib/core/utils/uuid'
import { getWorkflowById, resolveWorkflowIdForUser } from '@/lib/workflows/utils'
import { authenticateV1Request } from '@/app/api/v1/auth'
@@ -83,16 +82,20 @@ export async function POST(req: NextRequest) {
// Always generate a chatId - required for artifacts system to work with subagents
const chatId = parsed.chatId || generateId()
messageId = generateId()
const reqLogger = logger.withMetadata({ messageId })
reqLogger.info('Received headless copilot chat start request', {
workflowId: resolved.workflowId,
workflowName: parsed.workflowName,
chatId,
mode: transportMode,
autoExecuteTools: parsed.autoExecuteTools,
timeout: parsed.timeout,
})
messageId = crypto.randomUUID()
logger.info(
messageId
? `Received headless copilot chat start request [messageId:${messageId}]`
: 'Received headless copilot chat start request',
{
workflowId: resolved.workflowId,
workflowName: parsed.workflowName,
chatId,
mode: transportMode,
autoExecuteTools: parsed.autoExecuteTools,
timeout: parsed.timeout,
}
)
const requestPayload = {
message: parsed.message,
workflowId: resolved.workflowId,
@@ -103,24 +106,10 @@ export async function POST(req: NextRequest) {
chatId,
}
const executionId = generateId()
const runId = generateId()
await createRunSegment({
id: runId,
executionId,
chatId,
userId: auth.userId,
workflowId: resolved.workflowId,
streamId: messageId,
}).catch(() => {})
const result = await orchestrateCopilotStream(requestPayload, {
const result = await runCopilotLifecycle(requestPayload, {
userId: auth.userId,
workflowId: resolved.workflowId,
chatId,
executionId,
runId,
goRoute: '/api/mcp',
autoExecuteTools: parsed.autoExecuteTools,
timeout: parsed.timeout,
@@ -142,9 +131,14 @@ export async function POST(req: NextRequest) {
)
}
logger.withMetadata({ messageId }).error('Headless copilot request failed', {
error: error instanceof Error ? error.message : String(error),
})
logger.error(
messageId
? `Headless copilot request failed [messageId:${messageId}]`
: 'Headless copilot request failed',
{
error: error instanceof Error ? error.message : String(error),
}
)
return NextResponse.json({ success: false, error: 'Internal server error' }, { status: 500 })
}
}

View File

@@ -2,7 +2,6 @@ import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { admissionRejectedResponse, tryAdmit } from '@/lib/core/admission/gate'
import { generateRequestId } from '@/lib/core/utils/request'
import { DispatchQueueFullError } from '@/lib/core/workspace-dispatch'
import {
checkWebhookPreprocessing,
findAllWebhooksForPath,
@@ -156,29 +155,14 @@ async function handleWebhookPost(
if (shouldSkipWebhookEvent(foundWebhook, body, requestId)) {
continue
}
try {
const response = await queueWebhookExecution(foundWebhook, foundWorkflow, body, request, {
requestId,
path,
actorUserId: preprocessResult.actorUserId,
executionId: preprocessResult.executionId,
correlation: preprocessResult.correlation,
})
responses.push(response)
} catch (error) {
if (error instanceof DispatchQueueFullError) {
return NextResponse.json(
{
error: 'Service temporarily at capacity',
message: error.message,
retryAfterSeconds: 10,
},
{ status: 503, headers: { 'Retry-After': '10' } }
)
}
throw error
}
const response = await queueWebhookExecution(foundWebhook, foundWorkflow, body, request, {
requestId,
path,
actorUserId: preprocessResult.actorUserId,
executionId: preprocessResult.executionId,
correlation: preprocessResult.correlation,
})
responses.push(response)
}
if (responses.length === 0) {

View File

@@ -10,13 +10,11 @@ const {
mockAuthorizeWorkflowByWorkspacePermission,
mockPreprocessExecution,
mockEnqueue,
mockEnqueueWorkspaceDispatch,
} = vi.hoisted(() => ({
mockCheckHybridAuth: vi.fn(),
mockAuthorizeWorkflowByWorkspacePermission: vi.fn(),
mockPreprocessExecution: vi.fn(),
mockEnqueue: vi.fn().mockResolvedValue('job-123'),
mockEnqueueWorkspaceDispatch: vi.fn().mockResolvedValue('job-123'),
}))
vi.mock('@/lib/auth/hybrid', () => ({
@@ -47,16 +45,6 @@ vi.mock('@/lib/core/async-jobs', () => ({
markJobFailed: vi.fn(),
}),
shouldExecuteInline: vi.fn().mockReturnValue(false),
shouldUseBullMQ: vi.fn().mockReturnValue(true),
}))
vi.mock('@/lib/core/bullmq', () => ({
createBullMQJobData: vi.fn((payload: unknown, metadata?: unknown) => ({ payload, metadata })),
}))
vi.mock('@/lib/core/workspace-dispatch', () => ({
enqueueWorkspaceDispatch: mockEnqueueWorkspaceDispatch,
waitForDispatchJob: vi.fn(),
}))
vi.mock('@/lib/core/utils/request', () => ({
@@ -150,24 +138,28 @@ describe('workflow execute async route', () => {
expect(response.status).toBe(202)
expect(body.executionId).toBe('execution-123')
expect(body.jobId).toBe('job-123')
expect(mockEnqueueWorkspaceDispatch).toHaveBeenCalledWith(
expect(mockEnqueue).toHaveBeenCalledWith(
'workflow-execution',
expect.objectContaining({
id: 'execution-123',
workflowId: 'workflow-1',
userId: 'actor-1',
workspaceId: 'workspace-1',
lane: 'runtime',
queueName: 'workflow-execution',
bullmqJobName: 'workflow-execution',
metadata: {
executionId: 'execution-123',
executionMode: 'async',
}),
expect.objectContaining({
metadata: expect.objectContaining({
workflowId: 'workflow-1',
userId: 'actor-1',
correlation: {
workspaceId: 'workspace-1',
correlation: expect.objectContaining({
executionId: 'execution-123',
requestId: 'req-12345678',
source: 'workflow',
workflowId: 'workflow-1',
triggerType: 'manual',
},
},
}),
}),
})
)
})

View File

@@ -3,8 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { AuthType, checkHybridAuth, hasExternalApiCredentials } from '@/lib/auth/hybrid'
import { admissionRejectedResponse, tryAdmit } from '@/lib/core/admission/gate'
import { getJobQueue, shouldExecuteInline, shouldUseBullMQ } from '@/lib/core/async-jobs'
import { createBullMQJobData } from '@/lib/core/bullmq'
import { getJobQueue, shouldExecuteInline } from '@/lib/core/async-jobs'
import {
createTimeoutAbortController,
getTimeoutErrorMessage,
@@ -14,13 +13,6 @@ import { generateRequestId } from '@/lib/core/utils/request'
import { SSE_HEADERS } from '@/lib/core/utils/sse'
import { getBaseUrl } from '@/lib/core/utils/urls'
import { generateId, isValidUuid } from '@/lib/core/utils/uuid'
import {
DispatchQueueFullError,
enqueueWorkspaceDispatch,
type WorkspaceDispatchLane,
waitForDispatchJob,
} from '@/lib/core/workspace-dispatch'
import { createBufferedExecutionStream } from '@/lib/execution/buffered-stream'
import {
buildNextCallChain,
parseCallChain,
@@ -43,11 +35,6 @@ import { executeWorkflow } from '@/lib/workflows/executor/execute-workflow'
import { executeWorkflowCore } from '@/lib/workflows/executor/execution-core'
import { type ExecutionEvent, encodeSSEEvent } from '@/lib/workflows/executor/execution-events'
import { handlePostExecutionPauseState } from '@/lib/workflows/executor/pause-persistence'
import {
DIRECT_WORKFLOW_JOB_NAME,
type QueuedWorkflowExecutionPayload,
type QueuedWorkflowExecutionResult,
} from '@/lib/workflows/executor/queued-workflow-execution'
import {
loadDeployedWorkflowState,
loadWorkflowFromNormalizedTables,
@@ -119,8 +106,6 @@ const ExecuteWorkflowSchema = z.object({
export const runtime = 'nodejs'
export const dynamic = 'force-dynamic'
const INLINE_TRIGGER_TYPES = new Set<CoreTriggerType>(['manual', 'workflow'])
function resolveOutputIds(
selectedOutputs: string[] | undefined,
blocks: Record<string, any>
@@ -218,39 +203,19 @@ async function handleAsyncExecution(params: AsyncExecutionParams): Promise<NextR
}
try {
const useBullMQ = shouldUseBullMQ()
const jobQueue = useBullMQ ? null : await getJobQueue()
const jobId = useBullMQ
? await enqueueWorkspaceDispatch({
id: executionId,
workspaceId,
lane: 'runtime',
queueName: 'workflow-execution',
bullmqJobName: 'workflow-execution',
bullmqPayload: createBullMQJobData(payload, {
workflowId,
userId,
correlation,
}),
metadata: {
workflowId,
userId,
correlation,
},
})
: await jobQueue!.enqueue('workflow-execution', payload, {
metadata: { workflowId, workspaceId, userId, correlation },
})
const jobQueue = await getJobQueue()
const jobId = await jobQueue.enqueue('workflow-execution', payload, {
metadata: { workflowId, workspaceId, userId, correlation },
})
asyncLogger.info('Queued async workflow execution', { jobId })
if (shouldExecuteInline() && jobQueue) {
const inlineJobQueue = jobQueue
if (shouldExecuteInline()) {
void (async () => {
try {
await inlineJobQueue.startJob(jobId)
await jobQueue.startJob(jobId)
const output = await executeWorkflowJob(payload)
await inlineJobQueue.completeJob(jobId, output)
await jobQueue.completeJob(jobId, output)
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error)
asyncLogger.error('Async workflow execution failed', {
@@ -258,7 +223,7 @@ async function handleAsyncExecution(params: AsyncExecutionParams): Promise<NextR
error: errorMessage,
})
try {
await inlineJobQueue.markJobFailed(jobId, errorMessage)
await jobQueue.markJobFailed(jobId, errorMessage)
} catch (markFailedError) {
asyncLogger.error('Failed to mark job as failed', {
jobId,
@@ -284,17 +249,6 @@ async function handleAsyncExecution(params: AsyncExecutionParams): Promise<NextR
{ status: 202 }
)
} catch (error: any) {
if (error instanceof DispatchQueueFullError) {
return NextResponse.json(
{
error: 'Service temporarily at capacity',
message: error.message,
retryAfterSeconds: 10,
},
{ status: 503, headers: { 'Retry-After': '10' } }
)
}
asyncLogger.error('Failed to queue async execution', error)
return NextResponse.json(
{ error: `Failed to queue async execution: ${error.message}` },
@@ -303,31 +257,6 @@ async function handleAsyncExecution(params: AsyncExecutionParams): Promise<NextR
}
}
async function enqueueDirectWorkflowExecution(
payload: QueuedWorkflowExecutionPayload,
priority: number,
lane: WorkspaceDispatchLane
) {
return enqueueWorkspaceDispatch({
id: payload.metadata.executionId,
workspaceId: payload.metadata.workspaceId,
lane,
queueName: 'workflow-execution',
bullmqJobName: DIRECT_WORKFLOW_JOB_NAME,
bullmqPayload: createBullMQJobData(payload, {
workflowId: payload.metadata.workflowId,
userId: payload.metadata.userId,
correlation: payload.metadata.correlation,
}),
metadata: {
workflowId: payload.metadata.workflowId,
userId: payload.metadata.userId,
correlation: payload.metadata.correlation,
},
priority,
})
}
/**
* POST /api/workflows/[id]/execute
*
@@ -796,92 +725,6 @@ async function handleExecutePost(
const executionVariables = cachedWorkflowData?.variables ?? workflow.variables ?? {}
if (shouldUseBullMQ() && !INLINE_TRIGGER_TYPES.has(triggerType)) {
try {
const dispatchJobId = await enqueueDirectWorkflowExecution(
{
workflow,
metadata,
input: processedInput,
variables: executionVariables,
selectedOutputs,
includeFileBase64,
base64MaxBytes,
stopAfterBlockId,
timeoutMs: preprocessResult.executionTimeout?.sync,
runFromBlock: resolvedRunFromBlock,
},
5,
'interactive'
)
const resultRecord = await waitForDispatchJob(
dispatchJobId,
(preprocessResult.executionTimeout?.sync ?? 300000) + 30000
)
if (resultRecord.status === 'failed') {
return NextResponse.json(
{
success: false,
executionId,
error: resultRecord.error ?? 'Workflow execution failed',
},
{ status: 500 }
)
}
const result = resultRecord.output as QueuedWorkflowExecutionResult
const resultForResponseBlock = {
success: result.success,
logs: result.logs,
output: result.output,
}
if (
auth.authType !== AuthType.INTERNAL_JWT &&
workflowHasResponseBlock(resultForResponseBlock)
) {
return createHttpResponseFromBlock(resultForResponseBlock)
}
return NextResponse.json(
{
success: result.success,
executionId,
output: result.output,
error: result.error,
metadata: result.metadata,
},
{ status: result.statusCode ?? 200 }
)
} catch (error: unknown) {
if (error instanceof DispatchQueueFullError) {
return NextResponse.json(
{
error: 'Service temporarily at capacity',
message: error.message,
retryAfterSeconds: 10,
},
{ status: 503, headers: { 'Retry-After': '10' } }
)
}
const errorMessage = error instanceof Error ? error.message : 'Unknown error'
reqLogger.error(`Queued non-SSE execution failed: ${errorMessage}`)
return NextResponse.json(
{
success: false,
error: errorMessage,
},
{ status: 500 }
)
}
}
const timeoutController = createTimeoutAbortController(
preprocessResult.executionTimeout?.sync
)
@@ -998,54 +841,6 @@ async function handleExecutePost(
}
if (shouldUseDraftState) {
const shouldDispatchViaQueue = shouldUseBullMQ() && !INLINE_TRIGGER_TYPES.has(triggerType)
if (shouldDispatchViaQueue) {
const metadata: ExecutionMetadata = {
requestId,
executionId,
workflowId,
workspaceId,
userId: actorUserId,
sessionUserId: isClientSession ? userId : undefined,
workflowUserId: workflow.userId,
triggerType,
useDraftState: shouldUseDraftState,
startTime: new Date().toISOString(),
isClientSession,
enforceCredentialAccess: useAuthenticatedUserAsActor,
workflowStateOverride: effectiveWorkflowStateOverride,
callChain,
executionMode: 'sync',
}
const executionVariables = cachedWorkflowData?.variables ?? workflow.variables ?? {}
await enqueueDirectWorkflowExecution(
{
workflow,
metadata,
input: processedInput,
variables: executionVariables,
selectedOutputs,
includeFileBase64,
base64MaxBytes,
stopAfterBlockId,
timeoutMs: preprocessResult.executionTimeout?.sync,
runFromBlock: resolvedRunFromBlock,
streamEvents: true,
},
1,
'interactive'
)
return new NextResponse(createBufferedExecutionStream(executionId), {
headers: {
...SSE_HEADERS,
'X-Execution-Id': executionId,
},
})
}
reqLogger.info('Using SSE console log streaming (manual execution)')
} else {
reqLogger.info('Using streaming API response')
@@ -1524,17 +1319,6 @@ async function handleExecutePost(
},
})
} catch (error: any) {
if (error instanceof DispatchQueueFullError) {
return NextResponse.json(
{
error: 'Service temporarily at capacity',
message: error.message,
retryAfterSeconds: 10,
},
{ status: 503, headers: { 'Retry-After': '10' } }
)
}
reqLogger.error('Failed to start workflow execution:', error)
return NextResponse.json(
{ error: error.message || 'Failed to start workflow execution' },

View File

@@ -1,7 +1,7 @@
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { generatePptxFromCode } from '@/lib/execution/pptx-vm'
import { generatePptxFromCode } from '@/lib/execution/doc-vm'
import { verifyWorkspaceMembership } from '@/app/api/workflows/utils'
export const dynamic = 'force-dynamic'

View File

@@ -1,7 +1,8 @@
'use client'
import React from 'react'
import ReactMarkdown from 'react-markdown'
import { Streamdown } from 'streamdown'
import 'streamdown/styles.css'
import { Avatar, AvatarFallback, AvatarImage } from '@/components/emcn'
import type { ChangelogEntry } from '@/app/changelog/components/changelog-content'
@@ -141,7 +142,8 @@ export default function ChangelogList({ initialEntries }: Props) {
</div>
<div className='max-w-none'>
<ReactMarkdown
<Streamdown
mode='static'
components={{
h2: ({ children, ...props }) =>
isContributorsLabel(children) ? null : (
@@ -192,11 +194,8 @@ export default function ChangelogList({ initialEntries }: Props) {
{children}
</strong>
),
code: ({ children, ...props }) => (
<code
className='rounded bg-[var(--landing-bg-elevated)] px-1 py-0.5 font-mono text-[var(--landing-text)] text-xs'
{...props}
>
inlineCode: ({ children }) => (
<code className='rounded bg-[var(--landing-bg-elevated)] px-1 py-0.5 font-mono text-[var(--landing-text)] text-xs'>
{children}
</code>
),
@@ -212,7 +211,7 @@ export default function ChangelogList({ initialEntries }: Props) {
}}
>
{cleanMarkdown(entry.content)}
</ReactMarkdown>
</Streamdown>
</div>
</div>
))}

View File

@@ -1,6 +1,6 @@
import React, { type HTMLAttributes, memo, type ReactNode, useMemo } from 'react'
import ReactMarkdown from 'react-markdown'
import remarkGfm from 'remark-gfm'
import { Streamdown } from 'streamdown'
import 'streamdown/styles.css'
import { Tooltip } from '@/components/emcn'
import { CopyCodeButton } from '@/components/ui/copy-code-button'
import { extractTextContent } from '@/lib/core/utils/react-node-text'
@@ -25,8 +25,6 @@ export function LinkWithPreview({ href, children }: { href: string; children: Re
)
}
const REMARK_PLUGINS = [remarkGfm]
function createCustomComponents(LinkComponent: typeof LinkWithPreview) {
return {
p: ({ children }: React.HTMLAttributes<HTMLParagraphElement>) => (
@@ -72,11 +70,7 @@ function createCustomComponents(LinkComponent: typeof LinkWithPreview) {
{children}
</ol>
),
li: ({
children,
ordered,
...props
}: React.LiHTMLAttributes<HTMLLIElement> & { ordered?: boolean }) => (
li: ({ children }: React.LiHTMLAttributes<HTMLLIElement>) => (
<li className='font-sans text-gray-800 dark:text-gray-200' style={{ display: 'list-item' }}>
{children}
</li>
@@ -116,28 +110,11 @@ function createCustomComponents(LinkComponent: typeof LinkWithPreview) {
)
},
code: ({
inline,
className,
children,
...props
}: React.HTMLAttributes<HTMLElement> & { className?: string; inline?: boolean }) => {
if (inline) {
return (
<code
className='rounded bg-gray-200 px-1 py-0.5 font-mono text-[0.9em] text-gray-800 dark:bg-gray-700 dark:text-gray-200'
{...props}
>
{children}
</code>
)
}
return (
<code className={className} {...props}>
{children}
</code>
)
},
inlineCode: ({ children }: { children?: React.ReactNode }) => (
<code className='rounded bg-gray-200 px-1 py-0.5 font-mono text-gray-800 text-inherit dark:bg-gray-700 dark:text-gray-200'>
{children}
</code>
),
blockquote: ({ children }: React.HTMLAttributes<HTMLQuoteElement>) => (
<blockquote className='my-4 border-gray-300 border-l-4 py-1 pl-4 font-sans text-gray-700 italic dark:border-gray-600 dark:text-gray-300'>
@@ -215,9 +192,9 @@ const MarkdownRenderer = memo(function MarkdownRenderer({
return (
<div className='space-y-4 break-words font-sans text-[var(--landing-text)] text-base leading-relaxed'>
<ReactMarkdown remarkPlugins={REMARK_PLUGINS} components={components}>
<Streamdown mode='static' components={components}>
{processedContent}
</ReactMarkdown>
</Streamdown>
</div>
)
})

View File

@@ -14,7 +14,8 @@ import {
User,
} from 'lucide-react'
import { useParams, useRouter, useSearchParams } from 'next/navigation'
import ReactMarkdown from 'react-markdown'
import { Streamdown } from 'streamdown'
import 'streamdown/styles.css'
import {
Breadcrumb,
Button,
@@ -875,7 +876,8 @@ export default function TemplateDetails({ isWorkspaceContext = false }: Template
About this Workflow
</h3>
<div className='max-w-none space-y-2'>
<ReactMarkdown
<Streamdown
mode='static'
components={{
p: ({ children }) => (
<p className='mb-2 font-sans text-muted-foreground text-sm leading-[1.4rem] last:mb-0'>
@@ -913,16 +915,16 @@ export default function TemplateDetails({ isWorkspaceContext = false }: Template
</ol>
),
li: ({ children }) => <li className='leading-[1.4rem]'>{children}</li>,
code: ({ inline, children }: any) =>
inline ? (
<code className='rounded bg-muted px-1.5 py-0.5 font-mono text-[var(--caution)] text-xs'>
{children}
</code>
) : (
<code className='my-2 block overflow-x-auto rounded-md bg-muted p-3 font-mono text-foreground text-xs'>
{children}
</code>
),
inlineCode: ({ children }) => (
<code className='rounded bg-muted px-1.5 py-0.5 font-mono text-[var(--caution)] text-xs'>
{children}
</code>
),
code: ({ children }) => (
<code className='my-2 block overflow-x-auto rounded-md bg-muted p-3 font-mono text-foreground text-xs'>
{children}
</code>
),
a: ({ href, children }) => (
<a
href={href}
@@ -942,7 +944,7 @@ export default function TemplateDetails({ isWorkspaceContext = false }: Template
}}
>
{template.details.about}
</ReactMarkdown>
</Streamdown>
</div>
</div>
)}
@@ -1056,7 +1058,8 @@ export default function TemplateDetails({ isWorkspaceContext = false }: Template
{/* Creator bio */}
{template.creator.details?.about && (
<div className='max-w-none'>
<ReactMarkdown
<Streamdown
mode='static'
components={{
p: ({ children }) => (
<p className='mb-2 font-sans text-muted-foreground text-sm leading-[1.4rem] last:mb-0'>
@@ -1081,7 +1084,7 @@ export default function TemplateDetails({ isWorkspaceContext = false }: Template
}}
>
{template.creator.details.about}
</ReactMarkdown>
</Streamdown>
</div>
)}
</div>

View File

@@ -10,6 +10,7 @@ import {
ModalContent,
ModalFooter,
ModalHeader,
TagIcon,
Textarea,
ThumbsDown,
ThumbsUp,
@@ -46,13 +47,16 @@ interface MessageActionsProps {
content: string
chatId?: string
userQuery?: string
requestId?: string
}
export function MessageActions({ content, chatId, userQuery }: MessageActionsProps) {
export function MessageActions({ content, chatId, userQuery, requestId }: MessageActionsProps) {
const [copied, setCopied] = useState(false)
const [copiedRequestId, setCopiedRequestId] = useState(false)
const [pendingFeedback, setPendingFeedback] = useState<'up' | 'down' | null>(null)
const [feedbackText, setFeedbackText] = useState('')
const resetTimeoutRef = useRef<number | null>(null)
const requestIdTimeoutRef = useRef<number | null>(null)
const submitFeedback = useSubmitCopilotFeedback()
useEffect(() => {
@@ -60,6 +64,9 @@ export function MessageActions({ content, chatId, userQuery }: MessageActionsPro
if (resetTimeoutRef.current !== null) {
window.clearTimeout(resetTimeoutRef.current)
}
if (requestIdTimeoutRef.current !== null) {
window.clearTimeout(requestIdTimeoutRef.current)
}
}
}, [])
@@ -79,6 +86,20 @@ export function MessageActions({ content, chatId, userQuery }: MessageActionsPro
}
}, [content])
const copyRequestId = useCallback(async () => {
if (!requestId) return
try {
await navigator.clipboard.writeText(requestId)
setCopiedRequestId(true)
if (requestIdTimeoutRef.current !== null) {
window.clearTimeout(requestIdTimeoutRef.current)
}
requestIdTimeoutRef.current = window.setTimeout(() => setCopiedRequestId(false), 1500)
} catch {
/* clipboard unavailable */
}
}, [requestId])
const handleFeedbackClick = useCallback(
(type: 'up' | 'down') => {
if (chatId && userQuery) {
@@ -144,6 +165,21 @@ export function MessageActions({ content, chatId, userQuery }: MessageActionsPro
>
<ThumbsDown className={ICON_CLASS} />
</button>
{requestId && (
<button
type='button'
aria-label='Copy request ID'
onClick={copyRequestId}
className={BUTTON_CLASS}
title={copiedRequestId ? 'Copied!' : 'Copy request ID'}
>
{copiedRequestId ? (
<Check className={ICON_CLASS} />
) : (
<TagIcon className={ICON_CLASS} />
)}
</button>
)}
</div>
<Modal open={pendingFeedback !== null} onOpenChange={handleModalClose}>

View File

@@ -17,7 +17,7 @@ export function FileViewer() {
return null
}
const serveUrl = `/api/files/serve/${encodeURIComponent(file.key)}?context=workspace`
const serveUrl = `/api/files/serve/${encodeURIComponent(file.key)}?context=workspace&t=${file.size}`
return (
<div className='fixed inset-0 z-50 bg-[var(--bg)]'>

View File

@@ -1,9 +1,24 @@
'use client'
import { memo, useCallback, useEffect, useRef, useState } from 'react'
import { memo, type ReactElement, useCallback, useEffect, useMemo, useRef, useState } from 'react'
import Editor from 'react-simple-code-editor'
import 'prismjs/components/prism-bash'
import 'prismjs/components/prism-css'
import 'prismjs/components/prism-markup'
import 'prismjs/components/prism-sql'
import 'prismjs/components/prism-typescript'
import 'prismjs/components/prism-yaml'
import { createLogger } from '@sim/logger'
import { ZoomIn, ZoomOut } from 'lucide-react'
import { Skeleton } from '@/components/emcn'
import {
CODE_LINE_HEIGHT_PX,
Code as CodeEditor,
calculateGutterWidth,
getCodeEditorProps,
highlight,
languages,
Skeleton,
} from '@/components/emcn'
import { cn } from '@/lib/core/utils/cn'
import type { WorkspaceFileRecord } from '@/lib/uploads/contexts/workspace'
import { getFileExtension } from '@/lib/uploads/utils/file-utils'
@@ -14,7 +29,6 @@ import {
useWorkspaceFileContent,
} from '@/hooks/queries/workspace-files'
import { useAutosave } from '@/hooks/use-autosave'
import { useStreamingText } from '@/hooks/use-streaming-text'
import { DataTable } from './data-table'
import { PreviewPanel, resolvePreviewType } from './preview-panel'
@@ -57,7 +71,7 @@ const TEXT_EDITABLE_EXTENSIONS = new Set([
...SUPPORTED_CODE_EXTENSIONS,
])
const IFRAME_PREVIEWABLE_MIME_TYPES = new Set(['application/pdf'])
const IFRAME_PREVIEWABLE_MIME_TYPES = new Set(['application/pdf', 'text/x-pdflibjs'])
const IFRAME_PREVIEWABLE_EXTENSIONS = new Set(['pdf'])
const IMAGE_PREVIEWABLE_MIME_TYPES = new Set(['image/png', 'image/jpeg', 'image/gif', 'image/webp'])
@@ -65,11 +79,13 @@ const IMAGE_PREVIEWABLE_EXTENSIONS = new Set(['png', 'jpg', 'jpeg', 'gif', 'webp
const PPTX_PREVIEWABLE_MIME_TYPES = new Set([
'application/vnd.openxmlformats-officedocument.presentationml.presentation',
'text/x-pptxgenjs',
])
const PPTX_PREVIEWABLE_EXTENSIONS = new Set(['pptx'])
const DOCX_PREVIEWABLE_MIME_TYPES = new Set([
'application/vnd.openxmlformats-officedocument.wordprocessingml.document',
'text/x-docxjs',
])
const DOCX_PREVIEWABLE_EXTENSIONS = new Set(['docx'])
@@ -87,12 +103,65 @@ type FileCategory =
| 'xlsx-previewable'
| 'unsupported'
type CodeEditorLanguage =
| 'javascript'
| 'json'
| 'python'
| 'typescript'
| 'bash'
| 'css'
| 'markup'
| 'sql'
| 'yaml'
const CODE_EDITOR_LANGUAGE_BY_EXTENSION: Partial<Record<string, CodeEditorLanguage>> = {
js: 'javascript',
jsx: 'javascript',
ts: 'typescript',
tsx: 'typescript',
py: 'python',
json: 'json',
sh: 'bash',
bash: 'bash',
zsh: 'bash',
fish: 'bash',
css: 'css',
scss: 'css',
less: 'css',
html: 'markup',
htm: 'markup',
xml: 'markup',
svg: 'markup',
sql: 'sql',
yaml: 'yaml',
yml: 'yaml',
}
const CODE_EDITOR_LANGUAGE_BY_MIME: Partial<Record<string, CodeEditorLanguage>> = {
'text/javascript': 'javascript',
'application/javascript': 'javascript',
'text/typescript': 'typescript',
'application/typescript': 'typescript',
'text/x-python': 'python',
'application/json': 'json',
'text/x-shellscript': 'bash',
'text/css': 'css',
'text/html': 'markup',
'text/xml': 'markup',
'application/xml': 'markup',
'image/svg+xml': 'markup',
'text/x-sql': 'sql',
'application/x-yaml': 'yaml',
}
const CODE_EDITOR_LINE_HEIGHT_PX = CODE_LINE_HEIGHT_PX
function resolveFileCategory(mimeType: string | null, filename: string): FileCategory {
if (mimeType && TEXT_EDITABLE_MIME_TYPES.has(mimeType)) return 'text-editable'
if (mimeType && IFRAME_PREVIEWABLE_MIME_TYPES.has(mimeType)) return 'iframe-previewable'
if (mimeType && IMAGE_PREVIEWABLE_MIME_TYPES.has(mimeType)) return 'image-previewable'
if (mimeType && PPTX_PREVIEWABLE_MIME_TYPES.has(mimeType)) return 'pptx-previewable'
if (mimeType && DOCX_PREVIEWABLE_MIME_TYPES.has(mimeType)) return 'docx-previewable'
if (mimeType && PPTX_PREVIEWABLE_MIME_TYPES.has(mimeType)) return 'pptx-previewable'
if (mimeType && XLSX_PREVIEWABLE_MIME_TYPES.has(mimeType)) return 'xlsx-previewable'
const ext = getFileExtension(filename)
@@ -100,8 +169,8 @@ function resolveFileCategory(mimeType: string | null, filename: string): FileCat
if (TEXT_EDITABLE_EXTENSIONS.has(nameKey)) return 'text-editable'
if (IFRAME_PREVIEWABLE_EXTENSIONS.has(ext)) return 'iframe-previewable'
if (IMAGE_PREVIEWABLE_EXTENSIONS.has(ext)) return 'image-previewable'
if (PPTX_PREVIEWABLE_EXTENSIONS.has(ext)) return 'pptx-previewable'
if (DOCX_PREVIEWABLE_EXTENSIONS.has(ext)) return 'docx-previewable'
if (PPTX_PREVIEWABLE_EXTENSIONS.has(ext)) return 'pptx-previewable'
if (XLSX_PREVIEWABLE_EXTENSIONS.has(ext)) return 'xlsx-previewable'
return 'unsupported'
@@ -128,6 +197,29 @@ interface FileViewerProps {
onSaveStatusChange?: (status: 'idle' | 'saving' | 'saved' | 'error') => void
saveRef?: React.MutableRefObject<(() => Promise<void>) | null>
streamingContent?: string
streamingMode?: 'append' | 'replace'
disableStreamingAutoScroll?: boolean
useCodeRendererForCodeFiles?: boolean
}
function isCodeFile(file: { type: string; name: string }): boolean {
const ext = getFileExtension(file.name)
return (
SUPPORTED_CODE_EXTENSIONS.includes(ext as (typeof SUPPORTED_CODE_EXTENSIONS)[number]) ||
ext === 'html' ||
ext === 'htm' ||
ext === 'xml' ||
ext === 'svg'
)
}
function resolveCodeEditorLanguage(file: { type: string; name: string }): CodeEditorLanguage {
const ext = getFileExtension(file.name)
return (
CODE_EDITOR_LANGUAGE_BY_EXTENSION[ext] ??
CODE_EDITOR_LANGUAGE_BY_MIME[file.type] ??
(ext === 'json' ? 'json' : 'javascript')
)
}
export function FileViewer({
@@ -141,6 +233,9 @@ export function FileViewer({
onSaveStatusChange,
saveRef,
streamingContent,
streamingMode,
disableStreamingAutoScroll = false,
useCodeRendererForCodeFiles = false,
}: FileViewerProps) {
const category = resolveFileCategory(file.type, file.name)
@@ -149,33 +244,36 @@ export function FileViewer({
<TextEditor
file={file}
workspaceId={workspaceId}
canEdit={streamingContent !== undefined ? false : canEdit}
canEdit={canEdit}
previewMode={previewMode ?? (showPreview ? 'preview' : 'editor')}
autoFocus={autoFocus}
onDirtyChange={onDirtyChange}
onSaveStatusChange={onSaveStatusChange}
saveRef={saveRef}
streamingContent={streamingContent}
streamingMode={streamingMode}
disableStreamingAutoScroll={disableStreamingAutoScroll}
useCodeRendererForCodeFiles={useCodeRendererForCodeFiles}
/>
)
}
if (category === 'iframe-previewable') {
return <IframePreview file={file} />
return <IframePreview file={file} workspaceId={workspaceId} />
}
if (category === 'image-previewable') {
return <ImagePreview file={file} />
}
if (category === 'pptx-previewable') {
return <PptxPreview file={file} workspaceId={workspaceId} streamingContent={streamingContent} />
}
if (category === 'docx-previewable') {
return <DocxPreview file={file} workspaceId={workspaceId} />
}
if (category === 'pptx-previewable') {
return <PptxPreview file={file} workspaceId={workspaceId} streamingContent={streamingContent} />
}
if (category === 'xlsx-previewable') {
return <XlsxPreview file={file} workspaceId={workspaceId} />
}
@@ -193,6 +291,9 @@ interface TextEditorProps {
onSaveStatusChange?: (status: 'idle' | 'saving' | 'saved' | 'error') => void
saveRef?: React.MutableRefObject<(() => Promise<void>) | null>
streamingContent?: string
streamingMode?: 'append' | 'replace'
disableStreamingAutoScroll: boolean
useCodeRendererForCodeFiles?: boolean
}
function TextEditor({
@@ -205,21 +306,35 @@ function TextEditor({
onSaveStatusChange,
saveRef,
streamingContent,
streamingMode = 'append',
disableStreamingAutoScroll,
useCodeRendererForCodeFiles = false,
}: TextEditorProps) {
const initializedRef = useRef(false)
const contentRef = useRef('')
const textareaRef = useRef<HTMLTextAreaElement>(null)
const containerRef = useRef<HTMLDivElement>(null)
const codeEditorRef = useRef<HTMLDivElement>(null)
const codeScrollRef = useRef<HTMLDivElement>(null)
const [splitPct, setSplitPct] = useState(SPLIT_DEFAULT_PCT)
const [isResizing, setIsResizing] = useState(false)
const [visualLineHeights, setVisualLineHeights] = useState<number[]>([])
const [activeLineNumber, setActiveLineNumber] = useState(1)
const {
data: fetchedContent,
isLoading,
error,
dataUpdatedAt,
} = useWorkspaceFileContent(workspaceId, file.id, file.key, file.type === 'text/x-pptxgenjs')
} = useWorkspaceFileContent(
workspaceId,
file.id,
file.key,
file.type === 'text/x-pptxgenjs' ||
file.type === 'text/x-docxjs' ||
file.type === 'text/x-pdflibjs'
)
const updateContent = useUpdateWorkspaceFileContent()
const updateContentRef = useRef(updateContent)
@@ -228,15 +343,82 @@ function TextEditor({
const [content, setContent] = useState('')
const [savedContent, setSavedContent] = useState('')
const savedContentRef = useRef('')
const [isStreamInteractionLocked, setIsStreamInteractionLocked] = useState(false)
const wasStreamingRef = useRef(false)
const pendingStreamReconcileRef = useRef(false)
const lastStreamedContentRef = useRef<string | null>(null)
const shouldUseCodeRenderer = useCodeRendererForCodeFiles && isCodeFile(file)
const codeLanguage = useMemo(() => resolveCodeEditorLanguage(file), [file])
useEffect(() => {
const lastStreamed = lastStreamedContentRef.current
const hasPendingReconcile = wasStreamingRef.current && pendingStreamReconcileRef.current
const hasFetchedAdvanced =
fetchedContent !== undefined && fetchedContent !== savedContentRef.current
const fetchedMatchesLastStream =
fetchedContent !== undefined && lastStreamed !== null && fetchedContent === lastStreamed
if (streamingContent !== undefined) {
setContent(streamingContent)
contentRef.current = streamingContent
const nextContent =
streamingMode === 'replace' || fetchedContent === undefined
? streamingContent
: fetchedContent.endsWith(streamingContent) ||
fetchedContent.endsWith(`\n${streamingContent}`)
? fetchedContent
: `${fetchedContent}\n${streamingContent}`
const fetchedMatchesNextStream =
fetchedContent !== undefined && fetchedContent === nextContent
if (
hasPendingReconcile &&
(hasFetchedAdvanced || fetchedMatchesLastStream || fetchedMatchesNextStream)
) {
pendingStreamReconcileRef.current = false
wasStreamingRef.current = false
lastStreamedContentRef.current = null
setIsStreamInteractionLocked(false)
setContent(fetchedContent)
contentRef.current = fetchedContent
setSavedContent(fetchedContent)
savedContentRef.current = fetchedContent
initializedRef.current = true
return
}
wasStreamingRef.current = true
setIsStreamInteractionLocked(true)
if (nextContent === contentRef.current) {
pendingStreamReconcileRef.current = true
lastStreamedContentRef.current = nextContent
initializedRef.current = true
return
}
pendingStreamReconcileRef.current = true
lastStreamedContentRef.current = nextContent
setContent(nextContent)
contentRef.current = nextContent
initializedRef.current = true
return
}
if (hasPendingReconcile) {
if (hasFetchedAdvanced || fetchedMatchesLastStream) {
pendingStreamReconcileRef.current = false
wasStreamingRef.current = false
lastStreamedContentRef.current = null
setIsStreamInteractionLocked(false)
setContent(fetchedContent)
contentRef.current = fetchedContent
setSavedContent(fetchedContent)
savedContentRef.current = fetchedContent
return
}
}
if (streamingContent === undefined) {
setIsStreamInteractionLocked(false)
}
if (fetchedContent === undefined) return
if (!initializedRef.current) {
@@ -247,7 +429,14 @@ function TextEditor({
initializedRef.current = true
if (autoFocus) {
requestAnimationFrame(() => textareaRef.current?.focus())
requestAnimationFrame(() => {
const editorTextarea = codeEditorRef.current?.querySelector('textarea')
if (editorTextarea instanceof HTMLTextAreaElement) {
editorTextarea.focus()
return
}
textareaRef.current?.focus()
})
}
return
}
@@ -260,7 +449,7 @@ function TextEditor({
savedContentRef.current = fetchedContent
contentRef.current = fetchedContent
}
}, [streamingContent, fetchedContent, dataUpdatedAt, autoFocus])
}, [streamingContent, fetchedContent, streamingMode, dataUpdatedAt, autoFocus])
const handleContentChange = useCallback((value: string) => {
setContent(value)
@@ -284,7 +473,7 @@ function TextEditor({
content,
savedContent,
onSave,
enabled: canEdit && initializedRef.current,
enabled: canEdit && initializedRef.current && streamingContent === undefined,
})
useEffect(() => {
@@ -339,20 +528,195 @@ function TextEditor({
[handleContentChange]
)
const isStreaming = streamingContent !== undefined
const revealedContent = useStreamingText(content, isStreaming)
const isStreaming = isStreamInteractionLocked
const isEditorReadOnly = isStreamInteractionLocked || !canEdit
const renderedContent = content
const gutterWidthPx = useMemo(() => {
const lineCount = renderedContent.split('\n').length
return calculateGutterWidth(lineCount)
}, [renderedContent])
const sharedCodeEditorProps = useMemo(
() =>
getCodeEditorProps({
disabled: !canEdit,
}),
[canEdit]
)
const highlightCode = useMemo(() => {
return (value: string) => {
const grammar = languages[codeLanguage] || languages.javascript
return highlight(value, grammar, codeLanguage)
}
}, [codeLanguage])
const handleCodeContentChange = useCallback(
(value: string) => {
if (isEditorReadOnly) return
handleContentChange(value)
},
[handleContentChange, isEditorReadOnly]
)
const textareaStuckRef = useRef(true)
useEffect(() => {
if (!shouldUseCodeRenderer || !codeEditorRef.current) return
const setCodeEditorReadOnly = () => {
const textarea = codeEditorRef.current?.querySelector('textarea')
if (!(textarea instanceof HTMLTextAreaElement)) return
textarea.readOnly = isEditorReadOnly
textarea.spellcheck = false
}
setCodeEditorReadOnly()
const timeoutId = setTimeout(setCodeEditorReadOnly, 0)
const observer = new MutationObserver(setCodeEditorReadOnly)
observer.observe(codeEditorRef.current, {
childList: true,
subtree: true,
})
return () => {
clearTimeout(timeoutId)
observer.disconnect()
}
}, [isEditorReadOnly, shouldUseCodeRenderer])
useEffect(() => {
if (!shouldUseCodeRenderer) return
const textarea = codeEditorRef.current?.querySelector('textarea')
if (!(textarea instanceof HTMLTextAreaElement)) return
const updateActiveLineNumber = () => {
const pos = textarea.selectionStart
const textBeforeCursor = renderedContent.substring(0, pos)
setActiveLineNumber(textBeforeCursor.split('\n').length)
}
updateActiveLineNumber()
textarea.addEventListener('click', updateActiveLineNumber)
textarea.addEventListener('keyup', updateActiveLineNumber)
textarea.addEventListener('focus', updateActiveLineNumber)
return () => {
textarea.removeEventListener('click', updateActiveLineNumber)
textarea.removeEventListener('keyup', updateActiveLineNumber)
textarea.removeEventListener('focus', updateActiveLineNumber)
}
}, [renderedContent, shouldUseCodeRenderer])
useEffect(() => {
if (!shouldUseCodeRenderer || !codeEditorRef.current) return
const calculateVisualLines = () => {
const preElement = codeEditorRef.current?.querySelector('pre')
if (!(preElement instanceof HTMLElement)) return
const lines = renderedContent.split('\n')
const newVisualLineHeights: number[] = []
const tempContainer = document.createElement('div')
tempContainer.style.cssText = `
position: absolute;
visibility: hidden;
height: auto;
width: ${preElement.clientWidth}px;
font-family: ${window.getComputedStyle(preElement).fontFamily};
font-size: ${window.getComputedStyle(preElement).fontSize};
line-height: ${CODE_EDITOR_LINE_HEIGHT_PX}px;
padding: 8px;
white-space: pre-wrap;
word-break: break-word;
box-sizing: border-box;
`
document.body.appendChild(tempContainer)
lines.forEach((line) => {
const lineDiv = document.createElement('div')
lineDiv.textContent = line || ' '
tempContainer.appendChild(lineDiv)
const actualHeight = lineDiv.getBoundingClientRect().height
const lineUnits = Math.max(1, Math.ceil(actualHeight / CODE_EDITOR_LINE_HEIGHT_PX))
newVisualLineHeights.push(lineUnits)
tempContainer.removeChild(lineDiv)
})
document.body.removeChild(tempContainer)
setVisualLineHeights(newVisualLineHeights)
}
const timeoutId = setTimeout(calculateVisualLines, 50)
const resizeObserver = new ResizeObserver(calculateVisualLines)
resizeObserver.observe(codeEditorRef.current)
return () => {
clearTimeout(timeoutId)
resizeObserver.disconnect()
}
}, [renderedContent, shouldUseCodeRenderer])
const renderCodeLineNumbers = useCallback((): ReactElement[] => {
const numbers: ReactElement[] = []
let lineNumber = 1
visualLineHeights.forEach((height) => {
const isActive = lineNumber === activeLineNumber
numbers.push(
<div
key={`${lineNumber}-0`}
className={cn(
'text-right text-xs tabular-nums leading-[21px]',
isActive
? 'text-[var(--text-primary)] dark:text-[var(--code-foreground)]'
: 'text-[var(--text-muted)] dark:text-[var(--code-line-number)]'
)}
>
{lineNumber}
</div>
)
for (let i = 1; i < height; i++) {
numbers.push(
<div
key={`${lineNumber}-${i}`}
className='invisible text-right text-xs tabular-nums leading-[21px]'
>
{lineNumber}
</div>
)
}
lineNumber++
})
if (numbers.length === 0) {
numbers.push(
<div
key='1-0'
className='text-right text-[var(--text-muted)] text-xs tabular-nums leading-[21px] dark:text-[var(--code-line-number)]'
>
1
</div>
)
}
return numbers
}, [activeLineNumber, visualLineHeights])
useEffect(() => {
if (!isStreaming) return
if (disableStreamingAutoScroll) {
textareaStuckRef.current = false
return
}
textareaStuckRef.current = true
const el = textareaRef.current
const el = (shouldUseCodeRenderer ? codeScrollRef.current : textareaRef.current) ?? null
if (!el) return
const onWheel = (e: WheelEvent) => {
if (e.deltaY < 0) textareaStuckRef.current = false
const onWheel = (e: Event) => {
if ((e as WheelEvent).deltaY < 0) textareaStuckRef.current = false
}
const onScroll = () => {
@@ -367,14 +731,14 @@ function TextEditor({
el.removeEventListener('wheel', onWheel)
el.removeEventListener('scroll', onScroll)
}
}, [isStreaming])
}, [disableStreamingAutoScroll, isStreaming, shouldUseCodeRenderer])
useEffect(() => {
if (!isStreaming || !textareaStuckRef.current) return
const el = textareaRef.current
if (!isStreaming || !textareaStuckRef.current || disableStreamingAutoScroll) return
const el = (shouldUseCodeRenderer ? codeScrollRef.current : textareaRef.current) ?? null
if (!el) return
el.scrollTop = el.scrollHeight
}, [isStreaming, revealedContent])
}, [disableStreamingAutoScroll, isStreaming, renderedContent, shouldUseCodeRenderer])
if (streamingContent === undefined) {
if (isLoading) return DOCUMENT_SKELETON
@@ -396,21 +760,49 @@ function TextEditor({
return (
<div ref={containerRef} className='relative flex flex-1 overflow-hidden'>
{showEditor && (
<textarea
ref={textareaRef}
value={isStreaming ? revealedContent : content}
onChange={(e) => handleContentChange(e.target.value)}
readOnly={!canEdit}
spellCheck={false}
style={showPreviewPane ? { width: `${splitPct}%`, flexShrink: 0 } : undefined}
className={cn(
'h-full resize-none border-0 bg-transparent p-[24px] font-mono text-[14px] text-[var(--text-body)] outline-none placeholder:text-[var(--text-subtle)]',
!showPreviewPane && 'w-full',
isResizing && 'pointer-events-none'
)}
/>
)}
{showEditor &&
(shouldUseCodeRenderer ? (
<div
style={showPreviewPane ? { width: `${splitPct}%`, flexShrink: 0 } : undefined}
className={cn(
'min-w-0',
!showPreviewPane && 'w-full',
isResizing && 'pointer-events-none'
)}
>
<div ref={codeScrollRef} className='h-full overflow-auto'>
<CodeEditor.Container className='min-h-full min-w-full overflow-visible rounded-none border-0 bg-transparent'>
<CodeEditor.Gutter width={gutterWidthPx}>
{renderCodeLineNumbers()}
</CodeEditor.Gutter>
<CodeEditor.Content paddingLeft={`${gutterWidthPx}px`} editorRef={codeEditorRef}>
<Editor
value={renderedContent}
onValueChange={handleCodeContentChange}
highlight={highlightCode}
padding={sharedCodeEditorProps.padding}
className={cn(sharedCodeEditorProps.className, 'min-h-full')}
textareaClassName={cn(sharedCodeEditorProps.textareaClassName, 'min-h-full')}
/>
</CodeEditor.Content>
</CodeEditor.Container>
</div>
</div>
) : (
<textarea
ref={textareaRef}
value={renderedContent}
onChange={(e) => handleContentChange(e.target.value)}
readOnly={isEditorReadOnly}
spellCheck={false}
style={showPreviewPane ? { width: `${splitPct}%`, flexShrink: 0 } : undefined}
className={cn(
'h-full resize-none border-0 bg-transparent p-[24px] font-mono text-[14px] text-[var(--text-body)] outline-none placeholder:text-[var(--text-subtle)]',
!showPreviewPane && 'w-full',
isResizing && 'pointer-events-none'
)}
/>
))}
{showPreviewPane && (
<>
{showEditor && (
@@ -432,7 +824,7 @@ function TextEditor({
className={cn('min-w-0 flex-1 overflow-hidden', isResizing && 'pointer-events-none')}
>
<PreviewPanel
content={isStreaming ? revealedContent : content}
content={renderedContent}
mimeType={file.type}
filename={file.name}
isStreaming={isStreaming}
@@ -445,13 +837,36 @@ function TextEditor({
)
}
const IframePreview = memo(function IframePreview({ file }: { file: WorkspaceFileRecord }) {
const serveUrl = `/api/files/serve/${encodeURIComponent(file.key)}?context=workspace`
const IframePreview = memo(function IframePreview({
file,
workspaceId,
}: {
file: WorkspaceFileRecord
workspaceId: string
}) {
const { data: fileData, isLoading } = useWorkspaceFileBinary(workspaceId, file.id, file.key)
const [blobUrl, setBlobUrl] = useState<string | null>(null)
useEffect(() => {
if (!fileData) return
const blob = new Blob([fileData], { type: 'application/pdf' })
const url = URL.createObjectURL(blob)
setBlobUrl(url)
return () => URL.revokeObjectURL(url)
}, [fileData])
if (isLoading || !blobUrl) {
return (
<div className='flex h-full items-center justify-center'>
<Skeleton className='h-[200px] w-[80%]' />
</div>
)
}
return (
<div className='flex flex-1 overflow-hidden'>
<iframe
src={serveUrl}
src={blobUrl}
className='h-full w-full border-0'
title={file.name}
onError={() => {
@@ -470,7 +885,7 @@ const ZOOM_BUTTON_FACTOR = 1.2
const clampZoom = (z: number) => Math.min(Math.max(z, ZOOM_MIN), ZOOM_MAX)
const ImagePreview = memo(function ImagePreview({ file }: { file: WorkspaceFileRecord }) {
const serveUrl = `/api/files/serve/${encodeURIComponent(file.key)}?context=workspace`
const serveUrl = `/api/files/serve/${encodeURIComponent(file.key)}?context=workspace&t=${file.size}`
const [zoom, setZoom] = useState(1)
const [offset, setOffset] = useState({ x: 0, y: 0 })
const isDragging = useRef(false)
@@ -603,6 +1018,143 @@ const DOCUMENT_SKELETON = (
</div>
)
const DocxPreview = memo(function DocxPreview({
file,
workspaceId,
}: {
file: WorkspaceFileRecord
workspaceId: string
}) {
const viewportRef = useRef<HTMLDivElement>(null)
const containerRef = useRef<HTMLDivElement>(null)
const {
data: fileData,
isLoading,
error: fetchError,
} = useWorkspaceFileBinary(workspaceId, file.id, file.key)
const [renderError, setRenderError] = useState<string | null>(null)
const [docxScale, setDocxScale] = useState(1)
const [scaledSize, setScaledSize] = useState<{ width: number; height: number } | null>(null)
const updateDocxScale = useCallback(() => {
const viewport = viewportRef.current
const container = containerRef.current
if (!viewport || !container) return
const intrinsicWidth = container.scrollWidth
const intrinsicHeight = container.scrollHeight
if (intrinsicWidth === 0 || intrinsicHeight === 0) return
const viewportStyle = window.getComputedStyle(viewport)
const paddingX =
Number.parseFloat(viewportStyle.paddingLeft) + Number.parseFloat(viewportStyle.paddingRight)
const availableWidth = Math.max(viewport.clientWidth - paddingX, 0)
const nextScale = availableWidth > 0 ? Math.min(1, availableWidth / intrinsicWidth) : 1
setDocxScale((prev) => (Math.abs(prev - nextScale) < 0.001 ? prev : nextScale))
setScaledSize((prev) => {
const next = {
width: intrinsicWidth * nextScale,
height: intrinsicHeight * nextScale,
}
if (
prev &&
Math.abs(prev.width - next.width) < 1 &&
Math.abs(prev.height - next.height) < 1
) {
return prev
}
return next
})
}, [])
useEffect(() => {
if (!containerRef.current || !fileData) return
let cancelled = false
async function render() {
try {
const { renderAsync } = await import('docx-preview')
if (cancelled || !containerRef.current) return
setRenderError(null)
setDocxScale(1)
setScaledSize(null)
containerRef.current.innerHTML = ''
await renderAsync(fileData, containerRef.current, undefined, {
inWrapper: true,
ignoreWidth: false,
ignoreHeight: false,
})
if (!cancelled) {
requestAnimationFrame(updateDocxScale)
}
} catch (err) {
if (!cancelled) {
const msg = err instanceof Error ? err.message : 'Failed to render document'
logger.error('DOCX render failed', { error: msg })
setRenderError(msg)
}
}
}
render()
return () => {
cancelled = true
}
}, [fileData, updateDocxScale])
useEffect(() => {
const viewport = viewportRef.current
const container = containerRef.current
if (!viewport || !container) return
updateDocxScale()
const resizeObserver = new ResizeObserver(() => {
updateDocxScale()
})
resizeObserver.observe(viewport)
resizeObserver.observe(container)
return () => {
resizeObserver.disconnect()
}
}, [fileData, updateDocxScale])
const error = resolvePreviewError(fetchError, renderError)
if (error) return <PreviewError label='document' error={error} />
if (isLoading) return DOCUMENT_SKELETON
return (
<div ref={viewportRef} className='h-full overflow-auto bg-[var(--surface-1)] p-4 sm:p-6'>
<div className='flex min-h-full justify-center'>
<div
className='shrink-0'
style={
scaledSize
? {
width: scaledSize.width,
minHeight: scaledSize.height,
}
: undefined
}
>
<div
ref={containerRef}
className='origin-top [&>.docx-wrapper]:bg-transparent'
style={{
transform: `scale(${docxScale})`,
transformOrigin: 'top center',
}}
/>
</div>
</div>
</div>
)
})
const pptxSlideCache = new Map<string, string[]>()
function pptxCacheKey(fileId: string, dataUpdatedAt: number, byteLength: number): string {
@@ -715,6 +1267,15 @@ function PptxPreview({
const [rendering, setRendering] = useState(false)
const [renderError, setRenderError] = useState<string | null>(null)
const shouldSuppressStreamingPptxError = (message: string): boolean => {
return (
message.includes('SyntaxError: Invalid or unexpected token') ||
message.includes('PPTX generation cancelled') ||
message.includes('Preview failed') ||
message.includes('AbortError')
)
}
// Streaming preview: only re-triggers when the streaming source code or
// workspace changes. Isolated from fileData/dataUpdatedAt so that file-list
// refreshes don't abort the in-flight compilation request.
@@ -756,8 +1317,12 @@ function PptxPreview({
} catch (err) {
if (!cancelled && !(err instanceof DOMException && err.name === 'AbortError')) {
const msg = err instanceof Error ? err.message : 'Failed to render presentation'
logger.error('PPTX render failed', { error: msg })
setRenderError(msg)
if (shouldSuppressStreamingPptxError(msg)) {
logger.info('Suppressing transient PPTX streaming preview error', { error: msg })
} else {
logger.error('PPTX render failed', { error: msg })
setRenderError(msg)
}
}
} finally {
if (!cancelled) setRendering(false)
@@ -865,77 +1430,6 @@ function toggleMarkdownCheckbox(markdown: string, targetIndex: number, checked:
})
}
const DocxPreview = memo(function DocxPreview({
file,
workspaceId,
}: {
file: WorkspaceFileRecord
workspaceId: string
}) {
const {
data: fileData,
isLoading,
error: fetchError,
} = useWorkspaceFileBinary(workspaceId, file.id, file.key)
const [html, setHtml] = useState<string | null>(null)
const [renderError, setRenderError] = useState<string | null>(null)
useEffect(() => {
if (!fileData) return
const data = fileData
let cancelled = false
async function convert() {
try {
setRenderError(null)
const mammoth = await import('mammoth')
const result = await mammoth.convertToHtml({ arrayBuffer: data })
if (!cancelled) setHtml(result.value)
} catch (err) {
if (!cancelled) {
const msg = err instanceof Error ? err.message : 'Failed to render document'
logger.error('DOCX render failed', { error: msg })
setRenderError(msg)
}
}
}
convert()
return () => {
cancelled = true
}
}, [fileData])
const error = resolvePreviewError(fetchError, renderError)
if (error) return <PreviewError label='document' error={error} />
if (isLoading || html === null) return DOCUMENT_SKELETON
return (
<div className='flex flex-1 overflow-hidden'>
<iframe
srcDoc={buildDocxPreviewHtml(html)}
sandbox=''
title={file.name}
className='h-full w-full border-0'
/>
</div>
)
})
/** Wraps mammoth HTML output with base styles. Uses raw hex colors because iframes cannot inherit CSS variables from the parent document. */
function buildDocxPreviewHtml(html: string): string {
return `<!DOCTYPE html><html><head><style>
body { margin: 0; padding: 24px; font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif; font-size: 14px; line-height: 1.6; color: #e4e4e7; background: transparent; }
table { border-collapse: collapse; width: 100%; margin: 12px 0; }
td, th { border: 1px solid #3f3f46; padding: 6px 10px; }
img { max-width: 100%; height: auto; }
p { margin: 0 0 8px; }
h1, h2, h3, h4, h5, h6 { margin: 16px 0 8px; }
</style></head><body>${html}</body></html>`
}
const XLSX_MAX_ROWS = 1_000
interface XlsxSheet {

View File

@@ -2,18 +2,21 @@
import { createContext, memo, useCallback, useContext, useEffect, useMemo, useRef } from 'react'
import { useRouter } from 'next/navigation'
import type { Components, ExtraProps } from 'react-markdown'
import ReactMarkdown from 'react-markdown'
import rehypeSlug from 'rehype-slug'
import remarkBreaks from 'remark-breaks'
import remarkGfm from 'remark-gfm'
import { Streamdown } from 'streamdown'
import 'streamdown/styles.css'
import { Checkbox } from '@/components/emcn'
import { cn } from '@/lib/core/utils/cn'
import { getFileExtension } from '@/lib/uploads/utils/file-utils'
import { useAutoScroll } from '@/hooks/use-auto-scroll'
import { useStreamingReveal } from '@/hooks/use-streaming-reveal'
import { DataTable } from './data-table'
interface HastNode {
position?: { start?: { offset?: number } }
}
type PreviewType = 'markdown' | 'html' | 'csv' | 'svg' | null
const PREVIEWABLE_MIME_TYPES: Record<string, PreviewType> = {
@@ -127,34 +130,11 @@ const STATIC_MARKDOWN_COMPONENTS = {
{children}
</h4>
),
code: ({
className,
children,
node: _node,
...props
}: React.HTMLAttributes<HTMLElement> & ExtraProps) => {
const isInline = !className?.includes('language-')
if (isInline) {
return (
<code
{...props}
className='whitespace-normal rounded bg-[var(--surface-5)] px-1.5 py-0.5 font-mono text-[13px] text-[var(--caution)]'
>
{children}
</code>
)
}
return (
<code
{...props}
className='my-3 block whitespace-pre-wrap break-words rounded-md bg-[var(--surface-5)] p-4 font-mono text-[13px] text-[var(--text-primary)]'
>
{children}
</code>
)
},
inlineCode: ({ children }: { children?: React.ReactNode }) => (
<code className='whitespace-normal rounded bg-[var(--surface-5)] px-1.5 py-0.5 font-mono text-[13px] text-[var(--caution)]'>
{children}
</code>
),
pre: ({ children }: { children?: React.ReactNode }) => <>{children}</>,
strong: ({ children }: { children?: React.ReactNode }) => (
<strong className='break-words font-semibold text-[var(--text-primary)]'>{children}</strong>
@@ -168,8 +148,13 @@ const STATIC_MARKDOWN_COMPONENTS = {
</blockquote>
),
hr: () => <hr className='my-6 border-[var(--border)]' />,
img: ({ src, alt, node: _node }: React.ComponentPropsWithoutRef<'img'> & ExtraProps) => (
<img src={src} alt={alt ?? ''} className='my-3 max-w-full rounded-md' loading='lazy' />
img: ({ src, alt }: React.ImgHTMLAttributes<HTMLImageElement>) => (
<img
src={src as string}
alt={alt ?? ''}
className='my-3 max-w-full rounded-md'
loading='lazy'
/>
),
table: ({ children }: { children?: React.ReactNode }) => (
<div className='my-4 max-w-full overflow-x-auto'>
@@ -193,7 +178,7 @@ const STATIC_MARKDOWN_COMPONENTS = {
),
}
function UlRenderer({ className, children }: React.ComponentPropsWithoutRef<'ul'> & ExtraProps) {
function UlRenderer({ className, children }: { className?: string; children?: React.ReactNode }) {
const isTaskList = typeof className === 'string' && className.includes('contains-task-list')
return (
<ul
@@ -207,7 +192,7 @@ function UlRenderer({ className, children }: React.ComponentPropsWithoutRef<'ul'
)
}
function OlRenderer({ className, children }: React.ComponentPropsWithoutRef<'ol'> & ExtraProps) {
function OlRenderer({ className, children }: { className?: string; children?: React.ReactNode }) {
const isTaskList = typeof className === 'string' && className.includes('contains-task-list')
return (
<ol
@@ -225,7 +210,11 @@ function LiRenderer({
className,
children,
node,
}: React.ComponentPropsWithoutRef<'li'> & ExtraProps) {
}: {
className?: string
children?: React.ReactNode
node?: HastNode
}) {
const ctx = useContext(MarkdownCheckboxCtx)
const isTaskItem = typeof className === 'string' && className.includes('task-list-item')
@@ -249,12 +238,7 @@ function LiRenderer({
return <li className='break-words leading-[1.6]'>{children}</li>
}
function InputRenderer({
type,
checked,
node: _node,
...props
}: React.ComponentPropsWithoutRef<'input'> & ExtraProps) {
function InputRenderer({ type, checked, ...props }: React.ComponentPropsWithoutRef<'input'>) {
const ctx = useContext(MarkdownCheckboxCtx)
const index = useContext(CheckboxIndexCtx)
@@ -348,7 +332,7 @@ const MARKDOWN_COMPONENTS = {
ol: OlRenderer,
li: LiRenderer,
input: InputRenderer,
} satisfies Components
}
const MarkdownPreview = memo(function MarkdownPreview({
content,
@@ -361,7 +345,6 @@ const MarkdownPreview = memo(function MarkdownPreview({
}) {
const { push: navigate } = useRouter()
const { ref: scrollRef } = useAutoScroll(isStreaming)
const { committed, incoming, generation } = useStreamingReveal(content, isStreaming)
const contentRef = useRef(content)
contentRef.current = content
@@ -387,32 +370,19 @@ const MarkdownPreview = memo(function MarkdownPreview({
}
}, [content])
const committedMarkdown = useMemo(
() =>
committed ? (
<ReactMarkdown
remarkPlugins={REMARK_PLUGINS}
rehypePlugins={REHYPE_PLUGINS}
components={MARKDOWN_COMPONENTS}
>
{committed}
</ReactMarkdown>
) : null,
[committed]
)
if (onCheckboxToggle) {
return (
<NavigateCtx.Provider value={navigate}>
<MarkdownCheckboxCtx.Provider value={ctxValue}>
<div ref={scrollRef} className='h-full overflow-auto p-6'>
<ReactMarkdown
<Streamdown
mode='static'
remarkPlugins={REMARK_PLUGINS}
rehypePlugins={REHYPE_PLUGINS}
components={MARKDOWN_COMPONENTS}
>
{content}
</ReactMarkdown>
</Streamdown>
</div>
</MarkdownCheckboxCtx.Provider>
</NavigateCtx.Provider>
@@ -422,32 +392,93 @@ const MarkdownPreview = memo(function MarkdownPreview({
return (
<NavigateCtx.Provider value={navigate}>
<div ref={scrollRef} className='h-full overflow-auto p-6'>
{committedMarkdown}
{incoming && (
<div
key={generation}
className={cn(isStreaming && 'animate-stream-fade-in', '[&>:first-child]:mt-0')}
>
<ReactMarkdown
remarkPlugins={REMARK_PLUGINS}
rehypePlugins={REHYPE_PLUGINS}
components={MARKDOWN_COMPONENTS}
>
{incoming}
</ReactMarkdown>
</div>
)}
<Streamdown
mode='static'
remarkPlugins={REMARK_PLUGINS}
rehypePlugins={REHYPE_PLUGINS}
components={MARKDOWN_COMPONENTS}
>
{content}
</Streamdown>
</div>
</NavigateCtx.Provider>
)
})
const HTML_PREVIEW_BASE_URL = 'about:srcdoc'
const HTML_PREVIEW_CSP = [
"default-src 'none'",
"script-src 'unsafe-inline'",
"style-src 'unsafe-inline'",
'img-src data: blob:',
'font-src data:',
'media-src data: blob:',
"connect-src 'none'",
"form-action 'none'",
"frame-src 'none'",
"child-src 'none'",
"object-src 'none'",
].join('; ')
const HTML_PREVIEW_BOOTSTRAP = `<script>
(() => {
const allowHref = (href) => href.startsWith('#') || /^\\s*javascript:/i.test(href)
document.addEventListener(
'click',
(event) => {
if (!(event.target instanceof Element)) return
const anchor = event.target.closest('a[href]')
if (!(anchor instanceof HTMLAnchorElement)) return
const href = anchor.getAttribute('href') || ''
if (allowHref(href)) return
event.preventDefault()
},
true
)
document.addEventListener(
'submit',
(event) => {
event.preventDefault()
},
true
)
})()
</script>`
function buildHtmlPreviewDocument(content: string): string {
const headInjection = [
'<meta charset="utf-8">',
`<base href="${HTML_PREVIEW_BASE_URL}">`,
`<meta http-equiv="Content-Security-Policy" content="${HTML_PREVIEW_CSP}">`,
HTML_PREVIEW_BOOTSTRAP,
].join('')
if (/<head[\s>]/i.test(content)) {
return content.replace(/<head(\s[^>]*)?>/i, (match) => `${match}${headInjection}`)
}
if (/<html[\s>]/i.test(content)) {
return content.replace(/<html(\s[^>]*)?>/i, (match) => `${match}<head>${headInjection}</head>`)
}
return `<!DOCTYPE html><html><head>${headInjection}</head><body>${content}</body></html>`
}
const HtmlPreview = memo(function HtmlPreview({ content }: { content: string }) {
// Run inline HTML/JS in an isolated iframe while blocking any navigation
// that would replace the preview with another document.
const wrappedContent = useMemo(() => buildHtmlPreviewDocument(content), [content])
return (
<div className='h-full overflow-hidden'>
<iframe
srcDoc={content}
sandbox='allow-same-origin'
srcDoc={wrappedContent}
sandbox='allow-scripts'
referrerPolicy='no-referrer'
title='HTML Preview'
className='h-full w-full border-0 bg-white'
/>

View File

@@ -111,6 +111,7 @@ export function AgentGroup({
toolName={item.data.toolName}
displayTitle={item.data.displayTitle}
status={item.data.status}
streamingArgs={item.data.streamingArgs}
/>
) : (
<span

View File

@@ -1,4 +1,6 @@
import { useMemo } from 'react'
import { PillsRing } from '@/components/emcn'
import { FunctionExecute, WorkspaceFile } from '@/lib/copilot/generated/tool-catalog-v1'
import type { ToolCallStatus } from '../../../../types'
import { getToolIcon } from '../../utils'
@@ -54,19 +56,95 @@ function StatusIcon({ status, toolName }: { status: ToolCallStatus; toolName: st
return <CircleCheck className='h-[15px] w-[15px] text-[var(--text-tertiary)]' />
}
const LANG_ALIASES: Record<string, string> = {
javascript: 'javascript',
python: 'python',
shell: 'bash',
bash: 'bash',
}
function extractFunctionExecutePreview(raw: string): { code: string; lang: string } | null {
if (!raw) return null
const langMatch = raw.match(/"language"\s*:\s*"(\w+)"/)
const lang = langMatch ? (LANG_ALIASES[langMatch[1]] ?? langMatch[1]) : 'javascript'
const codeStart = raw.indexOf('"code"')
if (codeStart === -1) return null
const colonIdx = raw.indexOf(':', codeStart + 6)
if (colonIdx === -1) return null
const quoteIdx = raw.indexOf('"', colonIdx + 1)
if (quoteIdx === -1) return null
let value = raw.slice(quoteIdx + 1)
if (value.endsWith('"}') || value.endsWith('"\n}')) {
value = value.replace(/"\s*\}?\s*$/, '')
}
if (value.endsWith('"')) {
value = value.slice(0, -1)
}
const code = value
.replace(/\\n/g, '\n')
.replace(/\\t/g, '\t')
.replace(/\\"/g, '"')
.replace(/\\\\/g, '\\')
return code.length > 0 ? { code, lang } : null
}
interface ToolCallItemProps {
toolName: string
displayTitle: string
status: ToolCallStatus
streamingArgs?: string
}
export function ToolCallItem({ toolName, displayTitle, status }: ToolCallItemProps) {
export function ToolCallItem({ toolName, displayTitle, status, streamingArgs }: ToolCallItemProps) {
const liveWorkspaceFileTitle = useMemo(() => {
if (toolName !== WorkspaceFile.id || !streamingArgs) return null
const titleMatch = streamingArgs.match(/"title"\s*:\s*"([^"]+)"/)
if (!titleMatch?.[1]) return null
const opMatch = streamingArgs.match(/"operation"\s*:\s*"(\w+)"/)
const op = opMatch?.[1] ?? ''
const verb =
op === 'create'
? 'Creating'
: op === 'append'
? 'Adding'
: op === 'patch'
? 'Editing'
: op === 'update'
? 'Writing'
: op === 'rename'
? 'Renaming'
: op === 'delete'
? 'Deleting'
: 'Writing'
const unescaped = titleMatch[1]
.replace(/\\u([0-9a-fA-F]{4})/g, (_, hex: string) =>
String.fromCharCode(Number.parseInt(hex, 16))
)
.replace(/\\"/g, '"')
.replace(/\\\\/g, '\\')
return `${verb} ${unescaped}`
}, [toolName, streamingArgs])
const extracted = useMemo(() => {
if (toolName !== FunctionExecute.id || !streamingArgs) return null
return extractFunctionExecutePreview(streamingArgs)
}, [toolName, streamingArgs])
const markdown = useMemo(
() => (extracted ? `\`\`\`${extracted.lang}\n${extracted.code}\n\`\`\`` : null),
[extracted]
)
return (
<div className='flex items-center gap-[8px] pl-[24px]'>
<div className='flex h-[16px] w-[16px] flex-shrink-0 items-center justify-center'>
<StatusIcon status={status} toolName={toolName} />
</div>
<span className='font-base text-[13px] text-[var(--text-secondary)]'>{displayTitle}</span>
<span className='font-base text-[13px] text-[var(--text-secondary)]'>
{liveWorkspaceFileTitle || displayTitle}
</span>
</div>
)
}

View File

@@ -1,8 +1,8 @@
'use client'
import { Children, type ComponentPropsWithoutRef, isValidElement, useMemo } from 'react'
import ReactMarkdown from 'react-markdown'
import remarkGfm from 'remark-gfm'
import { type ComponentPropsWithoutRef, useEffect, useMemo, useRef } from 'react'
import { Streamdown } from 'streamdown'
import 'streamdown/styles.css'
import 'prismjs/components/prism-typescript'
import 'prismjs/components/prism-bash'
import 'prismjs/components/prism-css'
@@ -13,15 +13,14 @@ import { CopyCodeButton } from '@/components/ui/copy-code-button'
import { cn } from '@/lib/core/utils/cn'
import { extractTextContent } from '@/lib/core/utils/react-node-text'
import {
type ContentSegment,
PendingTagIndicator,
parseSpecialTags,
SpecialTags,
} from '@/app/workspace/[workspaceId]/home/components/message-content/components/special-tags'
import { useStreamingReveal } from '@/hooks/use-streaming-reveal'
import type { MothershipResource } from '@/app/workspace/[workspaceId]/home/types'
import { useStreamingText } from '@/hooks/use-streaming-text'
const REMARK_PLUGINS = [remarkGfm]
const LANG_ALIASES: Record<string, string> = {
js: 'javascript',
ts: 'typescript',
@@ -46,8 +45,6 @@ const PROSE_CLASSES = cn(
'prose-ul:my-4 prose-ol:my-4',
'prose-strong:font-[600] prose-strong:text-[var(--text-primary)]',
'prose-a:text-[var(--text-primary)] prose-a:underline prose-a:decoration-dashed prose-a:underline-offset-4',
'prose-code:rounded prose-code:bg-[var(--surface-5)] prose-code:px-1.5 prose-code:py-0.5 prose-code:text-small prose-code:font-mono prose-code:font-[400] prose-code:text-[var(--text-primary)]',
'prose-code:before:content-none prose-code:after:content-none',
'prose-hr:border-[var(--divider)] prose-hr:my-6',
'prose-table:my-0'
)
@@ -55,8 +52,8 @@ const PROSE_CLASSES = cn(
type TdProps = ComponentPropsWithoutRef<'td'>
type ThProps = ComponentPropsWithoutRef<'th'>
const MARKDOWN_COMPONENTS: React.ComponentProps<typeof ReactMarkdown>['components'] = {
table({ children }) {
const MARKDOWN_COMPONENTS = {
table({ children }: { children?: React.ReactNode }) {
return (
<div className='not-prose my-4 w-full overflow-x-auto [&_strong]:font-[600]'>
<table className='min-w-full border-collapse [&_tbody_tr:last-child_td]:border-b-0'>
@@ -65,7 +62,7 @@ const MARKDOWN_COMPONENTS: React.ComponentProps<typeof ReactMarkdown>['component
</div>
)
},
thead({ children }) {
thead({ children }: { children?: React.ReactNode }) {
return <thead>{children}</thead>
},
th({ children, style }: ThProps) {
@@ -88,25 +85,15 @@ const MARKDOWN_COMPONENTS: React.ComponentProps<typeof ReactMarkdown>['component
</td>
)
},
pre({ children }) {
let codeString = ''
let language = ''
for (const child of Children.toArray(children)) {
if (isValidElement(child) && child.type === 'code') {
const props = child.props as { className?: string; children?: React.ReactNode }
codeString = extractTextContent(props.children)
if (props.className?.startsWith('language-')) {
language = props.className.slice(9)
}
break
}
}
code({ children, className }: { children?: React.ReactNode; className?: string }) {
const langMatch = className?.match(/language-(\w+)/)
const language = langMatch ? langMatch[1] : ''
const codeString = extractTextContent(children)
if (!codeString) {
return (
<pre className='not-prose my-6 overflow-x-auto rounded-lg bg-[var(--surface-5)] p-4 font-[430] font-mono text-[var(--text-primary)] text-small leading-[21px] dark:bg-[var(--code-bg)]'>
{children}
<code>{children}</code>
</pre>
)
}
@@ -133,7 +120,29 @@ const MARKDOWN_COMPONENTS: React.ComponentProps<typeof ReactMarkdown>['component
</div>
)
},
a({ children, href }) {
a({ children, href }: { children?: React.ReactNode; href?: string }) {
if (href?.startsWith('#wsres-')) {
return (
<a
href={href}
className='text-[var(--text-primary)] underline decoration-dashed underline-offset-4'
onClick={(e) => {
e.preventDefault()
const match = href.match(/^#wsres-(\w+)-(.+)$/)
if (match) {
const linkText = e.currentTarget.textContent || match[2]
window.dispatchEvent(
new CustomEvent('wsres-click', {
detail: { type: match[1], id: match[2], title: linkText },
})
)
}
}}
>
{children}
</a>
)
}
return (
<a
href={href}
@@ -145,16 +154,16 @@ const MARKDOWN_COMPONENTS: React.ComponentProps<typeof ReactMarkdown>['component
</a>
)
},
ul({ children, className }) {
ul({ children, className }: { children?: React.ReactNode; className?: string }) {
if (className?.includes('contains-task-list')) {
return <ul className='my-4 list-none space-y-2 pl-0'>{children}</ul>
}
return <ul className='my-4 list-disc pl-5 marker:text-[var(--text-primary)]'>{children}</ul>
},
ol({ children }) {
ol({ children }: { children?: React.ReactNode }) {
return <ol className='my-4 list-decimal pl-5 marker:text-[var(--text-primary)]'>{children}</ol>
},
li({ children, className }) {
li({ children, className }: { children?: React.ReactNode; className?: string }) {
if (className?.includes('task-list-item')) {
return (
<li className='flex list-none items-start gap-2 text-[var(--text-primary)] text-base leading-[25px] [&>p:only-child]:inline [&>p]:my-0'>
@@ -168,7 +177,14 @@ const MARKDOWN_COMPONENTS: React.ComponentProps<typeof ReactMarkdown>['component
</li>
)
},
input({ type, checked }) {
inlineCode({ children }: { children?: React.ReactNode }) {
return (
<code className='rounded bg-[var(--surface-5)] px-1.5 py-0.5 font-[400] font-mono text-[var(--text-primary)] text-small before:content-none after:content-none'>
{children}
</code>
)
},
input({ type, checked }: { type?: string; checked?: boolean }) {
if (type === 'checkbox') {
return <Checkbox checked={checked || false} disabled size='sm' className='mt-1.5 shrink-0' />
}
@@ -180,61 +196,105 @@ interface ChatContentProps {
content: string
isStreaming?: boolean
onOptionSelect?: (id: string) => void
onWorkspaceResourceSelect?: (resource: MothershipResource) => void
smoothStreaming?: boolean
}
function MarkdownChunk({
export function ChatContent({
content,
animate = false,
trimTop = true,
trimBottom = true,
}: {
content: string
animate?: boolean
trimTop?: boolean
trimBottom?: boolean
}) {
return (
<div
className={cn(
PROSE_CLASSES,
trimTop && '[&>:first-child]:mt-0',
trimBottom && '[&>:last-child]:mb-0',
animate && 'animate-stream-fade-in'
)}
>
<ReactMarkdown remarkPlugins={REMARK_PLUGINS} components={MARKDOWN_COMPONENTS}>
{content}
</ReactMarkdown>
</div>
)
}
isStreaming = false,
onOptionSelect,
onWorkspaceResourceSelect,
smoothStreaming = true,
}: ChatContentProps) {
const hydratedStreamingRef = useRef(isStreaming && content.trim().length > 0)
const previousIsStreamingRef = useRef(isStreaming)
export function ChatContent({ content, isStreaming = false, onOptionSelect }: ChatContentProps) {
const rendered = useStreamingText(content, isStreaming)
useEffect(() => {
if (!previousIsStreamingRef.current && isStreaming && content.trim().length > 0) {
hydratedStreamingRef.current = true
} else if (!isStreaming) {
hydratedStreamingRef.current = false
}
previousIsStreamingRef.current = isStreaming
}, [content, isStreaming])
const onWorkspaceResourceSelectRef = useRef(onWorkspaceResourceSelect)
onWorkspaceResourceSelectRef.current = onWorkspaceResourceSelect
useEffect(() => {
const handler = (e: Event) => {
const { type, id, title } = (e as CustomEvent).detail
const RESOURCE_TYPE_MAP: Record<string, string> = {}
onWorkspaceResourceSelectRef.current?.({
type: RESOURCE_TYPE_MAP[type] || type,
id,
title: title || id,
})
}
window.addEventListener('wsres-click', handler)
return () => window.removeEventListener('wsres-click', handler)
}, [])
const rendered = useStreamingText(content, isStreaming && smoothStreaming)
const parsed = useMemo(() => parseSpecialTags(rendered, isStreaming), [rendered, isStreaming])
const hasSpecialContent = parsed.hasPendingTag || parsed.segments.some((s) => s.type !== 'text')
const plainText = hasSpecialContent ? '' : rendered
const { committed, incoming, generation } = useStreamingReveal(
plainText,
!hasSpecialContent && isStreaming
)
const committedMarkdown = useMemo(
() => (committed ? <MarkdownChunk content={committed} trimTop trimBottom={!incoming} /> : null),
[committed, incoming]
)
if (hasSpecialContent) {
type BlockSegment = Exclude<
ContentSegment,
{ type: 'text' } | { type: 'thinking' } | { type: 'workspace_resource' }
>
type RenderGroup =
| { kind: 'inline'; markdown: string }
| { kind: 'block'; segment: BlockSegment; index: number }
const groups: RenderGroup[] = []
let pendingMarkdown = ''
const flushMarkdown = () => {
if (pendingMarkdown.trim()) {
groups.push({ kind: 'inline', markdown: pendingMarkdown })
}
pendingMarkdown = ''
}
for (let i = 0; i < parsed.segments.length; i++) {
const s = parsed.segments[i]
if (s.type === 'workspace_resource') {
const label = s.data.title || s.data.id
pendingMarkdown += `[${label}](#wsres-${s.data.type}-${s.data.id})`
} else if (s.type === 'text' || s.type === 'thinking') {
pendingMarkdown += s.content
} else {
flushMarkdown()
groups.push({ kind: 'block', segment: s, index: i })
}
}
flushMarkdown()
return (
<div className='space-y-3'>
{parsed.segments.map((segment, i) => {
if (segment.type === 'text' || segment.type === 'thinking') {
return <MarkdownChunk key={`${segment.type}-${i}`} content={segment.content} />
{groups.map((group, i) => {
if (group.kind === 'inline') {
return (
<div
key={`inline-${i}`}
className={cn(PROSE_CLASSES, '[&>:first-child]:mt-0 [&>:last-child]:mb-0')}
>
<Streamdown mode='static' components={MARKDOWN_COMPONENTS}>
{group.markdown}
</Streamdown>
</div>
)
}
return (
<SpecialTags key={`special-${i}`} segment={segment} onOptionSelect={onOptionSelect} />
<SpecialTags
key={`special-${group.index}`}
segment={group.segment}
onOptionSelect={onOptionSelect}
/>
)
})}
{parsed.hasPendingTag && isStreaming && <PendingTagIndicator />}
@@ -243,17 +303,15 @@ export function ChatContent({ content, isStreaming = false, onOptionSelect }: Ch
}
return (
<div>
{committedMarkdown}
{incoming && (
<MarkdownChunk
key={generation}
content={incoming}
trimTop
trimBottom
animate={isStreaming}
/>
)}
<div className={cn(PROSE_CLASSES, '[&>:first-child]:mt-0 [&>:last-child]:mb-0')}>
<Streamdown
mode={isStreaming ? undefined : 'static'}
isAnimating={isStreaming}
animated={isStreaming && !hydratedStreamingRef.current}
components={MARKDOWN_COMPONENTS}
>
{rendered}
</Streamdown>
</div>
)
}

View File

@@ -9,6 +9,8 @@ export type {
RuntimeSpecialTagName,
UsageUpgradeAction,
UsageUpgradeTagData,
WorkspaceResourceTagData,
WorkspaceResourceTagType,
} from './special-tags'
export {
CREDENTIAL_TAG_TYPES,
@@ -20,4 +22,6 @@ export {
parseTextTagBody,
SpecialTags,
USAGE_UPGRADE_ACTIONS,
WORKSPACE_RESOURCE_TAG_TYPES,
WorkspaceResourceDisplay,
} from './special-tags'

View File

@@ -1,10 +1,19 @@
'use client'
import { createElement, useState } from 'react'
import { createElement, useMemo, useState } from 'react'
import { useParams } from 'next/navigation'
import { ArrowRight, ChevronDown, Expandable, ExpandableContent } from '@/components/emcn'
import { cn } from '@/lib/core/utils/cn'
import { OAUTH_PROVIDERS } from '@/lib/oauth/oauth'
import { ContextMentionIcon } from '@/app/workspace/[workspaceId]/home/components/context-mention-icon'
import type {
ChatMessageContext,
MothershipResource,
} from '@/app/workspace/[workspaceId]/home/types'
import { useKnowledgeBasesQuery } from '@/hooks/queries/kb/knowledge'
import { useTablesList } from '@/hooks/queries/tables'
import { useWorkflows } from '@/hooks/queries/workflows'
import { useWorkspaceFiles } from '@/hooks/queries/workspace-files'
export interface OptionsItemData {
title: string
@@ -55,6 +64,16 @@ export interface FileTagData {
content: string
}
export const WORKSPACE_RESOURCE_TAG_TYPES = ['workflow', 'table', 'file'] as const
export type WorkspaceResourceTagType = (typeof WORKSPACE_RESOURCE_TAG_TYPES)[number]
export interface WorkspaceResourceTagData {
type: WorkspaceResourceTagType
id: string
title?: string
}
export type ContentSegment =
| { type: 'text'; content: string }
| { type: 'thinking'; content: string }
@@ -62,6 +81,7 @@ export type ContentSegment =
| { type: 'usage_upgrade'; data: UsageUpgradeTagData }
| { type: 'credential'; data: CredentialTagData }
| { type: 'mothership-error'; data: MothershipErrorTagData }
| { type: 'workspace_resource'; data: WorkspaceResourceTagData }
export type RuntimeSpecialTagName =
| 'thinking'
@@ -69,6 +89,7 @@ export type RuntimeSpecialTagName =
| 'credential'
| 'mothership-error'
| 'file'
| 'workspace_resource'
export interface ParsedSpecialContent {
segments: ContentSegment[]
@@ -81,6 +102,7 @@ const RUNTIME_SPECIAL_TAG_NAMES = [
'credential',
'mothership-error',
'file',
'workspace_resource',
] as const
const SPECIAL_TAG_NAMES = [
@@ -89,6 +111,7 @@ const SPECIAL_TAG_NAMES = [
'usage_upgrade',
'credential',
'mothership-error',
'workspace_resource',
] as const
function isRecord(value: unknown): value is Record<string, unknown> {
@@ -134,6 +157,16 @@ function isMothershipErrorTagData(value: unknown): value is MothershipErrorTagDa
)
}
function isWorkspaceResourceTagData(value: unknown): value is WorkspaceResourceTagData {
if (!isRecord(value)) return false
return (
typeof value.type === 'string' &&
(WORKSPACE_RESOURCE_TAG_TYPES as readonly string[]).includes(value.type) &&
typeof value.id === 'string' &&
value.id.trim().length > 0
)
}
export function parseJsonTagBody<T>(
body: string,
isExpectedShape: (value: unknown) => value is T
@@ -181,6 +214,7 @@ function parseSpecialTagData(
| { type: 'usage_upgrade'; data: UsageUpgradeTagData }
| { type: 'credential'; data: CredentialTagData }
| { type: 'mothership-error'; data: MothershipErrorTagData }
| { type: 'workspace_resource'; data: WorkspaceResourceTagData }
| null {
if (tagName === 'thinking') {
const content = parseTextTagBody(body)
@@ -207,11 +241,16 @@ function parseSpecialTagData(
return data ? { type: 'mothership-error', data } : null
}
if (tagName === 'workspace_resource') {
const data = parseJsonTagBody(body, isWorkspaceResourceTagData)
return data ? { type: 'workspace_resource', data } : null
}
return null
}
/**
* Parses inline special tags (`<options>`, `<usage_upgrade>`) from streamed
* Parses inline special tags (`<options>`, `<usage_upgrade>`, `<workspace_resource>`) from streamed
* text content. Complete tags are extracted into typed segments; incomplete
* tags (still streaming) are suppressed from display and flagged via
* `hasPendingTag` so the caller can show a loading indicator.
@@ -307,12 +346,18 @@ const THINKING_BLOCKS = [
interface SpecialTagsProps {
segment: Exclude<ContentSegment, { type: 'text' }>
onOptionSelect?: (id: string) => void
onWorkspaceResourceSelect?: (resource: MothershipResource) => void
}
/**
* Unified renderer for inline special tags: `<options>`, `<usage_upgrade>`, and `<credential>`.
* Unified renderer for inline special tags: `<options>`, `<usage_upgrade>`, `<credential>`,
* and `<workspace_resource>`.
*/
export function SpecialTags({ segment, onOptionSelect }: SpecialTagsProps) {
export function SpecialTags({
segment,
onOptionSelect,
onWorkspaceResourceSelect,
}: SpecialTagsProps) {
switch (segment.type) {
case 'thinking':
return null
@@ -324,6 +369,8 @@ export function SpecialTags({ segment, onOptionSelect }: SpecialTagsProps) {
return <CredentialDisplay data={segment.data} />
case 'mothership-error':
return <MothershipErrorDisplay data={segment.data} />
case 'workspace_resource':
return <WorkspaceResourceDisplay data={segment.data} onSelect={onWorkspaceResourceSelect} />
default:
return null
}
@@ -413,6 +460,102 @@ function OptionsDisplay({ data, onSelect }: OptionsDisplayProps) {
)
}
function fallbackWorkspaceResourceTitle(type: WorkspaceResourceTagType): string {
switch (type) {
case 'workflow':
return 'Workflow'
case 'table':
return 'Table'
case 'file':
return 'File'
}
}
function toMothershipResourceType(type: WorkspaceResourceTagType): MothershipResource['type'] {
return type
}
function toChatMessageContext(data: WorkspaceResourceTagData, label: string): ChatMessageContext {
switch (data.type) {
case 'workflow':
return { kind: 'workflow', label, workflowId: data.id }
case 'table':
return { kind: 'table', label, tableId: data.id }
case 'file':
return { kind: 'file', label, fileId: data.id }
}
}
export function WorkspaceResourceDisplay({
data,
onSelect,
}: {
data: WorkspaceResourceTagData
onSelect?: (resource: MothershipResource) => void
}) {
const { workspaceId } = useParams<{ workspaceId: string }>()
const { data: workflows = [] } = useWorkflows(workspaceId)
const { data: tables = [] } = useTablesList(workspaceId)
const { data: files = [] } = useWorkspaceFiles(workspaceId)
const { data: knowledgeBases = [] } = useKnowledgeBasesQuery(workspaceId)
const resource = useMemo<MothershipResource>(() => {
const title =
data.type === 'workflow'
? (workflows.find((workflow) => workflow.id === data.id)?.name ??
fallbackWorkspaceResourceTitle(data.type))
: data.type === 'table'
? (tables.find((table) => table.id === data.id)?.name ??
fallbackWorkspaceResourceTitle(data.type))
: data.type === 'file'
? (files.find((file) => file.id === data.id)?.name ??
fallbackWorkspaceResourceTitle(data.type))
: (knowledgeBases.find((knowledgeBase) => knowledgeBase.id === data.id)?.name ??
fallbackWorkspaceResourceTitle(data.type))
return {
type: toMothershipResourceType(data.type),
id: data.id,
title,
}
}, [data.id, data.type, files, knowledgeBases, tables, workflows])
const context = useMemo(() => toChatMessageContext(data, resource.title), [data, resource.title])
const workflowColor = useMemo(() => {
if (data.type !== 'workflow') return null
return workflows.find((workflow) => workflow.id === data.id)?.color ?? null
}, [data.id, data.type, workflows])
const mentionContent = (
<>
<ContextMentionIcon
context={context}
workflowColor={workflowColor}
className='relative top-0.5 h-[12px] w-[12px] flex-shrink-0 text-[var(--text-icon)]'
/>
{resource.title}
</>
)
const classes =
'inline-flex items-baseline gap-1 rounded-[5px] bg-[var(--surface-5)] px-[5px] align-baseline font-[inherit] text-[inherit] leading-[inherit]'
if (!onSelect) {
return <span className={classes}>{mentionContent}</span>
}
return (
<button
type='button'
onClick={() => onSelect(resource)}
className={cn(classes, 'cursor-pointer transition-colors hover-hover:bg-[var(--surface-6)]')}
>
{mentionContent}
</button>
)
}
function getCredentialIcon(provider: string): React.ComponentType<{ className?: string }> | null {
const lower = provider.toLowerCase()

View File

@@ -1,21 +1,17 @@
'use client'
import type { AgentGroupItem } from '@/app/workspace/[workspaceId]/home/components/message-content/components'
import {
AgentGroup,
ChatContent,
CircleStop,
Options,
PendingTagIndicator,
} from '@/app/workspace/[workspaceId]/home/components/message-content/components'
import type {
ContentBlock,
MothershipToolName,
OptionItem,
SubagentName,
ToolCallData,
} from '@/app/workspace/[workspaceId]/home/types'
import { SUBAGENT_LABELS, TOOL_UI_METADATA } from '@/app/workspace/[workspaceId]/home/types'
File as FileTool,
Read as ReadTool,
ToolSearchToolRegex,
WorkspaceFile,
} from '@/lib/copilot/generated/tool-catalog-v1'
import { resolveToolDisplay } from '@/lib/copilot/tools/client/store-utils'
import { ClientToolCallState } from '@/lib/copilot/tools/client/tool-display-registry'
import type { ContentBlock, MothershipResource, OptionItem, ToolCallData } from '../../types'
import { SUBAGENT_LABELS, TOOL_UI_METADATA } from '../../types'
import type { AgentGroupItem } from './components'
import { AgentGroup, ChatContent, CircleStop, Options, PendingTagIndicator } from './components'
interface TextSegment {
type: 'text'
@@ -47,16 +43,29 @@ const SUBAGENT_KEYS = new Set(Object.keys(SUBAGENT_LABELS))
/**
* Maps subagent names to the Mothership tool that dispatches them when the
* tool name differs from the subagent name (e.g. `workspace_file` → `file_write`).
* tool name differs from the subagent name (e.g. `workspace_file` → `file`).
* When a `subagent` block arrives, any trailing dispatch tool in the previous
* group is absorbed so it doesn't render as a separate Mothership entry.
*/
const SUBAGENT_DISPATCH_TOOLS: Record<string, string> = {
file_write: 'workspace_file',
[FileTool.id]: WorkspaceFile.id,
}
function isToolResultRead(params?: Record<string, unknown>): boolean {
const path = params?.path
return typeof path === 'string' && path.startsWith('internal/tool-results/')
}
function formatToolName(name: string): string {
return name
.replace(/_v\d+$/, '')
.split('_')
.map((w) => w.charAt(0).toUpperCase() + w.slice(1))
.join(' ')
}
function resolveAgentLabel(key: string): string {
return SUBAGENT_LABELS[key as SubagentName] ?? key
return SUBAGENT_LABELS[key] ?? formatToolName(key)
}
function isToolDone(status: ToolCallData['status']): boolean {
@@ -67,12 +76,41 @@ function isDelegatingTool(tc: NonNullable<ContentBlock['toolCall']>): boolean {
return tc.status === 'executing'
}
function mapToolStatusToClientState(
status: ContentBlock['toolCall'] extends { status: infer T } ? T : string
) {
switch (status) {
case 'success':
return ClientToolCallState.success
case 'error':
return ClientToolCallState.error
case 'cancelled':
return ClientToolCallState.cancelled
default:
return ClientToolCallState.executing
}
}
function getOverrideDisplayTitle(tc: NonNullable<ContentBlock['toolCall']>): string | undefined {
if (tc.name === ReadTool.id || tc.name === 'respond' || tc.name.endsWith('_respond')) {
return resolveToolDisplay(tc.name, mapToolStatusToClientState(tc.status), tc.id, tc.params)
?.text
}
return undefined
}
function toToolData(tc: NonNullable<ContentBlock['toolCall']>): ToolCallData {
const overrideDisplayTitle = getOverrideDisplayTitle(tc)
const displayTitle =
overrideDisplayTitle ||
tc.displayTitle ||
TOOL_UI_METADATA[tc.name as keyof typeof TOOL_UI_METADATA]?.title ||
formatToolName(tc.name)
return {
id: tc.id,
toolName: tc.name,
displayTitle:
tc.displayTitle ?? TOOL_UI_METADATA[tc.name as MothershipToolName]?.title ?? tc.name,
displayTitle,
status: tc.status,
params: tc.params,
result: tc.result,
@@ -172,7 +210,8 @@ function parseBlocks(blocks: ContentBlock[]): MessageSegment[] {
if (block.type === 'tool_call') {
if (!block.toolCall) continue
const tc = block.toolCall
if (tc.name === 'tool_search_tool_regex') continue
if (tc.name === ToolSearchToolRegex.id) continue
if (tc.name === ReadTool.id && isToolResultRead(tc.params)) continue
const isDispatch = SUBAGENT_KEYS.has(tc.name) && !tc.calledBy
if (isDispatch) {
@@ -292,6 +331,7 @@ interface MessageContentProps {
fallbackContent: string
isStreaming: boolean
onOptionSelect?: (id: string) => void
onWorkspaceResourceSelect?: (resource: MothershipResource) => void
}
export function MessageContent({
@@ -299,6 +339,7 @@ export function MessageContent({
fallbackContent,
isStreaming = false,
onOptionSelect,
onWorkspaceResourceSelect,
}: MessageContentProps) {
const parsed = blocks.length > 0 ? parseBlocks(blocks) : []
@@ -312,7 +353,7 @@ export function MessageContent({
if (segments.length === 0) {
if (isStreaming) {
return (
<div className='space-y-2.5'>
<div className='space-y-[10px]'>
<PendingTagIndicator />
</div>
)
@@ -333,6 +374,7 @@ export function MessageContent({
const hasSubagentEnded = blocks.some((b) => b.type === 'subagent_end')
const showTrailingThinking =
isStreaming && !hasTrailingContent && (hasSubagentEnded || allLastGroupToolsDone)
const hasStructuredSegments = segments.some((segment) => segment.type !== 'text')
const lastOpenSubagentGroupId = [...segments]
.reverse()
.find(
@@ -341,7 +383,7 @@ export function MessageContent({
)?.id
return (
<div className='space-y-2.5'>
<div className='space-y-[10px]'>
{segments.map((segment, i) => {
switch (segment.type) {
case 'text':
@@ -351,6 +393,8 @@ export function MessageContent({
content={segment.content}
isStreaming={isStreaming}
onOptionSelect={onOptionSelect}
onWorkspaceResourceSelect={onWorkspaceResourceSelect}
smoothStreaming={!hasStructuredSegments}
/>
)
case 'agent_group': {
@@ -384,9 +428,11 @@ export function MessageContent({
)
case 'stopped':
return (
<div key={`stopped-${i}`} className='flex items-center gap-2'>
<div key={`stopped-${i}`} className='flex items-center gap-[8px]'>
<CircleStop className='h-[16px] w-[16px] flex-shrink-0 text-[var(--text-icon)]' />
<span className='font-base text-[var(--text-body)] text-sm'>Stopped by user</span>
<span className='font-base text-[14px] text-[var(--text-body)]'>
Stopped by user
</span>
</div>
)
}

View File

@@ -2,9 +2,7 @@ import type { ComponentType, SVGProps } from 'react'
import {
Asterisk,
Blimp,
Bug,
Calendar,
ClipboardList,
Database,
Eye,
File,
@@ -23,106 +21,42 @@ import {
} from '@/components/emcn'
import { Table as TableIcon } from '@/components/emcn/icons'
import { AgentIcon } from '@/components/icons'
import type { MothershipToolName, SubagentName } from '@/app/workspace/[workspaceId]/home/types'
export type IconComponent = ComponentType<SVGProps<SVGSVGElement>>
const TOOL_ICONS: Record<MothershipToolName | SubagentName | 'mothership', IconComponent> = {
const TOOL_ICONS: Record<string, IconComponent> = {
mothership: Blimp,
// Workspace
glob: FolderCode,
grep: Search,
read: File,
// Search
search_online: Search,
scrape_page: Search,
get_page_contents: Search,
search_library_docs: Library,
crawl_website: Search,
// Execution
function_execute: TerminalWindow,
superagent: Blimp,
run_workflow: PlayOutline,
run_block: PlayOutline,
run_from_block: PlayOutline,
run_workflow_until_block: PlayOutline,
complete_job: PlayOutline,
get_execution_summary: ClipboardList,
get_job_logs: ClipboardList,
get_workflow_logs: ClipboardList,
get_workflow_data: Layout,
get_block_outputs: ClipboardList,
get_block_upstream_references: ClipboardList,
get_deployed_workflow_state: Rocket,
check_deployment_status: Rocket,
// Workflows & folders
create_workflow: Layout,
delete_workflow: Layout,
edit_workflow: Pencil,
rename_workflow: Pencil,
move_workflow: Layout,
create_folder: FolderCode,
delete_folder: FolderCode,
move_folder: FolderCode,
list_folders: FolderCode,
list_user_workspaces: Layout,
revert_to_version: Rocket,
get_deployment_version: Rocket,
open_resource: Eye,
// Files
workspace_file: File,
download_to_workspace_file: File,
materialize_file: File,
generate_image: File,
generate_visualization: File,
// Tables & knowledge
user_table: TableIcon,
knowledge_base: Database,
// Jobs
create_job: Calendar,
manage_job: Calendar,
update_job_history: Calendar,
job_respond: Calendar,
// Management
manage_mcp_tool: Settings,
manage_skill: Asterisk,
manage_credential: Integration,
manage_custom_tool: Wrench,
update_workspace_mcp_server: Settings,
delete_workspace_mcp_server: Settings,
create_workspace_mcp_server: Settings,
list_workspace_mcp_servers: Settings,
oauth_get_auth_link: Integration,
oauth_request_access: Integration,
set_environment_variables: Settings,
set_global_workflow_variables: Settings,
get_platform_actions: Settings,
search_documentation: Library,
search_patterns: Search,
deploy_api: Rocket,
deploy_chat: Rocket,
deploy_mcp: Rocket,
redeploy: Rocket,
generate_api_key: Asterisk,
user_memory: Database,
context_write: Pencil,
context_compaction: Asterisk,
// Subagents
build: Hammer,
function_execute: TerminalWindow,
superagent: Blimp,
user_table: TableIcon,
workspace_file: File,
edit_content: File,
create_workflow: Layout,
edit_workflow: Pencil,
workflow: Hammer,
run: PlayOutline,
deploy: Rocket,
auth: Integration,
knowledge: Database,
knowledge_base: Database,
table: TableIcon,
job: Calendar,
agent: AgentIcon,
custom_tool: Wrench,
research: Search,
plan: ClipboardList,
debug: Bug,
edit: Pencil,
fast_edit: Pencil,
file_write: File,
context_compaction: Asterisk,
open_resource: Eye,
file: File,
}
export function getAgentIcon(name: string): IconComponent {

View File

@@ -15,6 +15,7 @@ import { UserMessageContent } from '@/app/workspace/[workspaceId]/home/component
import type {
ChatMessage,
FileAttachmentForApi,
MothershipResource,
QueuedMessage,
} from '@/app/workspace/[workspaceId]/home/types'
import { useAutoScroll } from '@/hooks/use-auto-scroll'
@@ -38,6 +39,7 @@ interface MothershipChatProps {
chatId?: string
onContextAdd?: (context: ChatContext) => void
onContextRemove?: (context: ChatContext) => void
onWorkspaceResourceSelect?: (resource: MothershipResource) => void
editValue?: string
onEditValueConsumed?: () => void
layout?: 'mothership-view' | 'copilot-view'
@@ -85,6 +87,7 @@ export function MothershipChat({
chatId,
onContextAdd,
onContextRemove,
onWorkspaceResourceSelect,
editValue,
onEditValueConsumed,
layout = 'mothership-view',
@@ -175,6 +178,7 @@ export function MothershipChat({
fallbackContent={msg.content}
isStreaming={isThisStreaming}
onOptionSelect={isLastMessage ? onSubmit : undefined}
onWorkspaceResourceSelect={onWorkspaceResourceSelect}
/>
{!isThisStreaming && (msg.content || msg.contentBlocks?.length) && (
<div className='mt-2.5'>
@@ -182,6 +186,7 @@ export function MothershipChat({
content={msg.content}
chatId={chatId}
userQuery={precedingUserMsg?.content}
requestId={msg.requestId}
/>
</div>
)}

View File

@@ -12,11 +12,12 @@ import {
SquareArrowUpRight,
WorkflowX,
} from '@/components/emcn/icons'
import type { FilePreviewSession } from '@/lib/copilot/request/session'
import {
cancelRunToolExecution,
markRunToolManuallyStopped,
reportManualRunToolStop,
} from '@/lib/copilot/client-sse/run-tool-execution'
} from '@/lib/copilot/tools/client/run-tool-execution'
import {
downloadWorkspaceFile,
getFileExtension,
@@ -65,7 +66,7 @@ interface ResourceContentProps {
workspaceId: string
resource: MothershipResource
previewMode?: PreviewMode
streamingFile?: { fileName: string; content: string } | null
previewSession?: FilePreviewSession | null
genericResourceData?: GenericResourceData
}
@@ -80,18 +81,18 @@ export const ResourceContent = memo(function ResourceContent({
workspaceId,
resource,
previewMode,
streamingFile,
previewSession,
genericResourceData,
}: ResourceContentProps) {
const streamFileName = streamingFile?.fileName || 'file.md'
const streamingExtractedContent = useMemo(() => {
if (!streamingFile) return undefined
const extracted = extractFileContent(streamingFile.content)
return extracted.length > 0 ? extracted : undefined
}, [streamingFile])
const streamFileName = previewSession?.fileName || 'file.md'
const syntheticFile = useMemo(() => {
const ext = getFileExtension(streamFileName)
const type = ext === 'pptx' ? 'text/x-pptxgenjs' : getMimeTypeFromExtension(ext)
const SOURCE_MIME_MAP: Record<string, string> = {
pptx: 'text/x-pptxgenjs',
docx: 'text/x-docxjs',
pdf: 'text/x-pdflibjs',
}
const type = SOURCE_MIME_MAP[ext] ?? getMimeTypeFromExtension(ext)
return {
id: 'streaming-file',
workspaceId,
@@ -105,16 +106,25 @@ export const ResourceContent = memo(function ResourceContent({
}
}, [workspaceId, streamFileName])
if (streamingFile && resource.id === 'streaming-file') {
const streamingFileMode: 'append' | 'replace' = 'replace'
const disableStreamingAutoScroll = previewSession?.operation === 'patch'
const rawPreviewText = previewSession?.previewText
const streamingPreviewText =
typeof rawPreviewText === 'string' && rawPreviewText.length > 0 ? rawPreviewText : undefined
if (previewSession && resource.id === 'streaming-file') {
return (
<div className='flex h-full flex-col overflow-hidden'>
{streamingExtractedContent !== undefined ? (
{streamingPreviewText !== undefined ? (
<FileViewer
file={syntheticFile}
workspaceId={workspaceId}
canEdit={false}
previewMode={previewMode ?? 'preview'}
streamingContent={streamingExtractedContent}
streamingContent={streamingPreviewText}
streamingMode={streamingFileMode}
disableStreamingAutoScroll={disableStreamingAutoScroll}
useCodeRendererForCodeFiles
/>
) : (
<div className='flex h-full items-center justify-center'>
@@ -136,7 +146,11 @@ export const ResourceContent = memo(function ResourceContent({
workspaceId={workspaceId}
fileId={resource.id}
previewMode={previewMode}
streamingContent={streamingExtractedContent}
streamingContent={
previewSession?.fileId === resource.id ? streamingPreviewText : undefined
}
streamingMode={streamingFileMode}
disableStreamingAutoScroll={disableStreamingAutoScroll}
/>
)
@@ -425,9 +439,18 @@ interface EmbeddedFileProps {
fileId: string
previewMode?: PreviewMode
streamingContent?: string
streamingMode?: 'append' | 'replace'
disableStreamingAutoScroll?: boolean
}
function EmbeddedFile({ workspaceId, fileId, previewMode, streamingContent }: EmbeddedFileProps) {
function EmbeddedFile({
workspaceId,
fileId,
previewMode,
streamingContent,
streamingMode,
disableStreamingAutoScroll = false,
}: EmbeddedFileProps) {
const { canEdit } = useUserPermissionsContext()
const { data: files = [], isLoading, isFetching } = useWorkspaceFiles(workspaceId)
const file = useMemo(() => files.find((f) => f.id === fileId), [files, fileId])
@@ -455,8 +478,11 @@ function EmbeddedFile({ workspaceId, fileId, previewMode, streamingContent }: Em
file={file}
workspaceId={workspaceId}
canEdit={canEdit}
streamingMode={streamingMode}
previewMode={previewMode}
streamingContent={streamingContent}
disableStreamingAutoScroll={disableStreamingAutoScroll}
useCodeRendererForCodeFiles
/>
</div>
)
@@ -527,16 +553,3 @@ function EmbeddedFolder({ workspaceId, folderId }: EmbeddedFolderProps) {
</div>
)
}
function extractFileContent(raw: string): string {
const marker = '"content":'
const idx = raw.indexOf(marker)
if (idx === -1) return ''
let rest = raw.slice(idx + marker.length).trimStart()
if (rest.startsWith('"')) rest = rest.slice(1)
return rest
.replace(/\\n/g, '\n')
.replace(/\\t/g, '\t')
.replace(/\\"/g, '"')
.replace(/\\\\/g, '\\')
}

View File

@@ -9,8 +9,8 @@ import {
} from 'react'
import { Button, Tooltip } from '@/components/emcn'
import { Columns3, Eye, PanelLeft, Pencil } from '@/components/emcn/icons'
import { isEphemeralResource } from '@/lib/copilot/resource-extraction'
import { SIM_RESOURCE_DRAG_TYPE } from '@/lib/copilot/resource-types'
import { isEphemeralResource } from '@/lib/copilot/resources/types'
import { cn } from '@/lib/core/utils/cn'
import type { PreviewMode } from '@/app/workspace/[workspaceId]/files/components/file-viewer'
import { AddResourceDropdown } from '@/app/workspace/[workspaceId]/home/components/mothership-view/components/add-resource-dropdown'

View File

@@ -1,6 +1,7 @@
'use client'
import { forwardRef, memo, useCallback, useState } from 'react'
import { forwardRef, memo, useCallback, useEffect, useState } from 'react'
import type { FilePreviewSession } from '@/lib/copilot/request/session'
import { cn } from '@/lib/core/utils/cn'
import { getFileExtension } from '@/lib/uploads/utils/file-utils'
import type { PreviewMode } from '@/app/workspace/[workspaceId]/files/components/file-viewer'
@@ -19,37 +20,19 @@ const PREVIEW_CYCLE: Record<PreviewMode, PreviewMode> = {
preview: 'editor',
} as const
function streamFileBasename(name: string): string {
const n = name.replace(/\\/g, '/').trim()
const parts = n.split('/').filter(Boolean)
return parts.length ? parts[parts.length - 1]! : n
}
function fileTitlesEquivalent(streamFileName: string, resourceTitle: string): boolean {
return streamFileBasename(streamFileName) === streamFileBasename(resourceTitle)
}
/**
* Whether the active resource should show the in-progress file_write stream.
* The synthetic `streaming-file` tab always shows it; a real file tab shows it when
* the streamed `fileName` matches that resource (so users who stay on the open file see live text).
* Whether the active resource should show the in-progress file stream.
* The synthetic `streaming-file` tab always shows it; a real file tab only shows it
* when the streamed fileId matches that exact resource.
*/
function streamReferencesFileId(raw: string, fileId: string): boolean {
if (!fileId) return false
const escaped = fileId.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')
return new RegExp(`"fileId"\\s*:\\s*"${escaped}"`).test(raw)
}
function shouldShowStreamingFilePanel(
streamingFile: { fileName: string; content: string } | null | undefined,
previewSession: FilePreviewSession | null | undefined,
active: MothershipResource | null
): boolean {
if (!streamingFile || !active) return false
if (!previewSession || previewSession.status === 'complete' || !active) return false
if (active.id === 'streaming-file') return true
if (active.type !== 'file') return false
const fn = streamingFile.fileName.trim()
if (fn && fileTitlesEquivalent(fn, active.title)) return true
if (active.id && streamReferencesFileId(streamingFile.content, active.id)) return true
if (active.id && previewSession.fileId === active.id) return true
return false
}
@@ -65,7 +48,7 @@ interface MothershipViewProps {
onCollapse: () => void
isCollapsed: boolean
className?: string
streamingFile?: { fileName: string; content: string } | null
previewSession?: FilePreviewSession | null
genericResourceData?: GenericResourceData
}
@@ -83,7 +66,7 @@ export const MothershipView = memo(
onCollapse,
isCollapsed,
className,
streamingFile,
previewSession,
genericResourceData,
}: MothershipViewProps,
ref
@@ -91,20 +74,21 @@ export const MothershipView = memo(
const active = resources.find((r) => r.id === activeResourceId) ?? resources[0] ?? null
const { canEdit } = useUserPermissionsContext()
const streamingForActive =
streamingFile && active && shouldShowStreamingFilePanel(streamingFile, active)
? streamingFile
const previewForActive =
previewSession && active && shouldShowStreamingFilePanel(previewSession, active)
? previewSession
: undefined
const [previewMode, setPreviewMode] = useState<PreviewMode>('preview')
const [prevActiveId, setPrevActiveId] = useState<string | null | undefined>(active?.id)
const handleCyclePreview = useCallback(() => setPreviewMode((m) => PREVIEW_CYCLE[m]), [])
// Reset preview mode to default when the active resource changes (guarded render-phase update)
if (active?.id !== prevActiveId) {
setPrevActiveId(active?.id)
setPreviewMode('preview')
}
useEffect(() => {
if (active?.id !== prevActiveId) {
setPrevActiveId(active?.id)
setPreviewMode('preview')
}
}, [active?.id, prevActiveId])
const isActivePreviewable =
canEdit &&
@@ -143,7 +127,7 @@ export const MothershipView = memo(
workspaceId={workspaceId}
resource={active}
previewMode={isActivePreviewable ? previewMode : undefined}
streamingFile={streamingForActive}
previewSession={previewForActive}
genericResourceData={active.type === 'generic' ? genericResourceData : undefined}
/>
) : (

View File

@@ -65,14 +65,16 @@ function MentionHighlight({ context }: { context: ChatMessageContext }) {
}
export function UserMessageContent({ content, contexts }: UserMessageContentProps) {
const trimmed = content.trim()
if (!contexts || contexts.length === 0) {
return <p className={USER_MESSAGE_CLASSES}>{content}</p>
return <p className={USER_MESSAGE_CLASSES}>{trimmed}</p>
}
const ranges = computeMentionRanges(content, contexts)
if (ranges.length === 0) {
return <p className={USER_MESSAGE_CLASSES}>{content}</p>
return <p className={USER_MESSAGE_CLASSES}>{trimmed}</p>
}
const elements: React.ReactNode[] = []

View File

@@ -17,7 +17,7 @@ import { useChatHistory, useMarkTaskRead } from '@/hooks/queries/tasks'
import type { ChatContext } from '@/stores/panel'
import { MothershipChat, MothershipView, TemplatePrompts, UserInput } from './components'
import { getMothershipUseChatOptions, useChat, useMothershipResize } from './hooks'
import type { FileAttachmentForApi, MothershipResourceType } from './types'
import type { FileAttachmentForApi, MothershipResource, MothershipResourceType } from './types'
const logger = createLogger('Home')
@@ -157,7 +157,7 @@ export function Home({ chatId }: HomeProps = {}) {
removeFromQueue,
sendNow,
editQueuedMessage,
streamingFile,
previewSession,
genericResourceData,
} = useChat(
workspaceId,
@@ -299,6 +299,17 @@ export function Home({ chatId }: HomeProps = {}) {
[resolveResourceFromContext, removeResource]
)
const handleWorkspaceResourceSelect = useCallback(
(resource: MothershipResource) => {
const wasAdded = addResource(resource)
if (!wasAdded) {
setActiveResourceId(resource.id)
}
handleResourceEvent()
},
[addResource, handleResourceEvent, setActiveResourceId]
)
const hasMessages = messages.length > 0
useEffect(() => {
@@ -368,6 +379,7 @@ export function Home({ chatId }: HomeProps = {}) {
chatId={resolvedChatId}
onContextAdd={handleContextAdd}
onContextRemove={handleContextRemove}
onWorkspaceResourceSelect={handleWorkspaceResourceSelect}
editValue={editingInputValue}
onEditValueConsumed={clearEditingValue}
animateInput={isInputEntering}
@@ -401,8 +413,8 @@ export function Home({ chatId }: HomeProps = {}) {
onReorderResources={reorderResources}
onCollapse={collapseResource}
isCollapsed={isResourceCollapsed}
streamingFile={streamingFile}
genericResourceData={genericResourceData}
previewSession={previewSession}
genericResourceData={genericResourceData ?? undefined}
className={skipResourceTransition ? '!transition-none' : undefined}
/>

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,182 @@
/**
* @vitest-environment node
*/
import { describe, expect, it } from 'vitest'
import type { FilePreviewSession } from '@/lib/copilot/request/session'
import {
buildCompletedPreviewSessions,
INITIAL_FILE_PREVIEW_SESSIONS_STATE,
reduceFilePreviewSessions,
} from '@/app/workspace/[workspaceId]/home/hooks/use-file-preview-sessions'
function createSession(
overrides: Partial<FilePreviewSession> & Pick<FilePreviewSession, 'id' | 'toolCallId'>
): FilePreviewSession {
return {
schemaVersion: 1,
id: overrides.id,
streamId: overrides.streamId ?? 'stream-1',
toolCallId: overrides.toolCallId,
status: overrides.status ?? 'streaming',
fileName: overrides.fileName ?? `${overrides.id}.md`,
previewText: overrides.previewText ?? '',
previewVersion: overrides.previewVersion ?? 1,
updatedAt: overrides.updatedAt ?? '2026-04-10T00:00:00.000Z',
...(overrides.fileId ? { fileId: overrides.fileId } : {}),
...(overrides.targetKind ? { targetKind: overrides.targetKind } : {}),
...(overrides.operation ? { operation: overrides.operation } : {}),
...(overrides.edit ? { edit: overrides.edit } : {}),
...(overrides.completedAt ? { completedAt: overrides.completedAt } : {}),
}
}
describe('reduceFilePreviewSessions', () => {
it('builds complete sessions for terminal stream reconciliation', () => {
const completedAt = '2026-04-10T00:00:10.000Z'
const nextSessions = buildCompletedPreviewSessions(
{
'preview-1': createSession({
id: 'preview-1',
toolCallId: 'preview-1',
status: 'pending',
previewText: 'draft',
}),
'preview-2': createSession({
id: 'preview-2',
toolCallId: 'preview-2',
status: 'streaming',
previewText: 'partial',
}),
'preview-3': createSession({
id: 'preview-3',
toolCallId: 'preview-3',
status: 'complete',
previewText: 'done',
completedAt: '2026-04-10T00:00:03.000Z',
}),
},
completedAt
)
expect(nextSessions).toHaveLength(2)
expect(nextSessions.map((session) => session.id)).toEqual(['preview-1', 'preview-2'])
expect(nextSessions.every((session) => session.status === 'complete')).toBe(true)
expect(nextSessions.every((session) => session.updatedAt === completedAt)).toBe(true)
expect(nextSessions.every((session) => session.completedAt === completedAt)).toBe(true)
})
it('hydrates the latest active preview session', () => {
const state = reduceFilePreviewSessions(INITIAL_FILE_PREVIEW_SESSIONS_STATE, {
type: 'hydrate',
sessions: [
createSession({
id: 'preview-1',
toolCallId: 'preview-1',
previewVersion: 1,
updatedAt: '2026-04-10T00:00:00.000Z',
}),
createSession({
id: 'preview-2',
toolCallId: 'preview-2',
previewVersion: 2,
updatedAt: '2026-04-10T00:00:01.000Z',
previewText: 'latest',
}),
],
})
expect(state.activeSessionId).toBe('preview-2')
expect(state.sessions['preview-2']?.previewText).toBe('latest')
})
it('drops the active session when it completes and promotes the next active session', () => {
const hydratedState = reduceFilePreviewSessions(INITIAL_FILE_PREVIEW_SESSIONS_STATE, {
type: 'hydrate',
sessions: [
createSession({
id: 'preview-1',
toolCallId: 'preview-1',
previewVersion: 1,
updatedAt: '2026-04-10T00:00:00.000Z',
}),
createSession({
id: 'preview-2',
toolCallId: 'preview-2',
previewVersion: 2,
updatedAt: '2026-04-10T00:00:01.000Z',
}),
],
})
const completedState = reduceFilePreviewSessions(hydratedState, {
type: 'complete',
session: createSession({
id: 'preview-2',
toolCallId: 'preview-2',
status: 'complete',
previewVersion: 3,
updatedAt: '2026-04-10T00:00:02.000Z',
completedAt: '2026-04-10T00:00:02.000Z',
}),
})
expect(completedState.activeSessionId).toBe('preview-1')
expect(completedState.sessions['preview-1']?.id).toBe('preview-1')
})
it('clears active session when the only session completes', () => {
const onlyStreaming = reduceFilePreviewSessions(INITIAL_FILE_PREVIEW_SESSIONS_STATE, {
type: 'upsert',
session: createSession({
id: 'preview-1',
toolCallId: 'preview-1',
previewVersion: 2,
updatedAt: '2026-04-10T00:00:01.000Z',
previewText: 'final',
}),
})
const completed = reduceFilePreviewSessions(onlyStreaming, {
type: 'complete',
session: createSession({
id: 'preview-1',
toolCallId: 'preview-1',
status: 'complete',
previewVersion: 3,
updatedAt: '2026-04-10T00:00:02.000Z',
completedAt: '2026-04-10T00:00:02.000Z',
previewText: 'final',
}),
})
expect(completed.activeSessionId).toBeNull()
expect(completed.sessions['preview-1']?.status).toBe('complete')
})
it('ignores stale complete events for a newer active session', () => {
const activeState = reduceFilePreviewSessions(INITIAL_FILE_PREVIEW_SESSIONS_STATE, {
type: 'upsert',
session: createSession({
id: 'preview-1',
toolCallId: 'preview-1',
previewVersion: 3,
updatedAt: '2026-04-10T00:00:03.000Z',
}),
})
const staleCompleteState = reduceFilePreviewSessions(activeState, {
type: 'complete',
session: createSession({
id: 'preview-1',
toolCallId: 'preview-1',
status: 'complete',
previewVersion: 2,
updatedAt: '2026-04-10T00:00:02.000Z',
completedAt: '2026-04-10T00:00:02.000Z',
}),
})
expect(staleCompleteState.activeSessionId).toBe('preview-1')
expect(staleCompleteState.sessions['preview-1']?.status).toBe('streaming')
expect(staleCompleteState.sessions['preview-1']?.previewVersion).toBe(3)
})
})

View File

@@ -0,0 +1,201 @@
import { useCallback, useMemo, useReducer } from 'react'
import type { FilePreviewSession } from '@/lib/copilot/request/session'
export interface FilePreviewSessionsState {
activeSessionId: string | null
sessions: Record<string, FilePreviewSession>
}
export type FilePreviewSessionsAction =
| { type: 'hydrate'; sessions: FilePreviewSession[] }
| { type: 'upsert'; session: FilePreviewSession; activate?: boolean }
| { type: 'complete'; session: FilePreviewSession }
| { type: 'remove'; sessionId: string }
| { type: 'reset' }
export const INITIAL_FILE_PREVIEW_SESSIONS_STATE: FilePreviewSessionsState = {
activeSessionId: null,
sessions: {},
}
export function shouldReplaceSession(
current: FilePreviewSession | undefined,
next: FilePreviewSession
): boolean {
if (!current) return true
if (next.previewVersion !== current.previewVersion) {
return next.previewVersion > current.previewVersion
}
return next.updatedAt >= current.updatedAt
}
export function pickActiveSessionId(
sessions: Record<string, FilePreviewSession>,
preferredId?: string | null
): string | null {
if (preferredId && sessions[preferredId]?.status !== 'complete') {
return preferredId
}
let latestActive: FilePreviewSession | null = null
for (const session of Object.values(sessions)) {
if (session.status === 'complete') continue
if (!latestActive || shouldReplaceSession(latestActive, session)) {
latestActive = session
}
}
return latestActive?.id ?? null
}
export function buildCompletedPreviewSessions(
sessions: Record<string, FilePreviewSession>,
completedAt: string
): FilePreviewSession[] {
return Object.values(sessions)
.filter((session) => session.status !== 'complete')
.map((session) => ({
...session,
status: 'complete' as const,
updatedAt: completedAt,
completedAt,
}))
}
export function reduceFilePreviewSessions(
state: FilePreviewSessionsState,
action: FilePreviewSessionsAction
): FilePreviewSessionsState {
switch (action.type) {
case 'hydrate': {
if (action.sessions.length === 0) {
return state
}
const nextSessions = { ...state.sessions }
for (const session of action.sessions) {
if (shouldReplaceSession(nextSessions[session.id], session)) {
nextSessions[session.id] = session
}
}
return {
sessions: nextSessions,
activeSessionId: pickActiveSessionId(nextSessions, state.activeSessionId),
}
}
case 'upsert': {
if (!shouldReplaceSession(state.sessions[action.session.id], action.session)) {
return state
}
const nextSessions = {
...state.sessions,
[action.session.id]: action.session,
}
return {
sessions: nextSessions,
activeSessionId:
action.activate === false
? pickActiveSessionId(nextSessions, state.activeSessionId)
: action.session.status === 'complete'
? pickActiveSessionId(nextSessions, state.activeSessionId)
: action.session.id,
}
}
case 'complete': {
if (!shouldReplaceSession(state.sessions[action.session.id], action.session)) {
return state
}
const nextSessions = {
...state.sessions,
[action.session.id]: action.session,
}
return {
sessions: nextSessions,
activeSessionId:
state.activeSessionId === action.session.id
? pickActiveSessionId(nextSessions, null)
: state.activeSessionId,
}
}
case 'remove': {
if (!state.sessions[action.sessionId]) {
return state
}
const nextSessions = { ...state.sessions }
delete nextSessions[action.sessionId]
return {
sessions: nextSessions,
activeSessionId:
state.activeSessionId === action.sessionId
? pickActiveSessionId(nextSessions, null)
: state.activeSessionId,
}
}
case 'reset':
return INITIAL_FILE_PREVIEW_SESSIONS_STATE
default:
return state
}
}
export function useFilePreviewSessions() {
const [state, dispatch] = useReducer(
reduceFilePreviewSessions,
INITIAL_FILE_PREVIEW_SESSIONS_STATE
)
const previewSession = useMemo(
() => (state.activeSessionId ? (state.sessions[state.activeSessionId] ?? null) : null),
[state.activeSessionId, state.sessions]
)
const hydratePreviewSessions = useCallback((sessions: FilePreviewSession[]) => {
dispatch({ type: 'hydrate', sessions })
}, [])
const upsertPreviewSession = useCallback(
(session: FilePreviewSession, options?: { activate?: boolean }) => {
dispatch({
type: 'upsert',
session,
...(options?.activate === false ? { activate: false } : {}),
})
},
[]
)
const completePreviewSession = useCallback((session: FilePreviewSession) => {
dispatch({ type: 'complete', session })
}, [])
const removePreviewSession = useCallback((sessionId: string) => {
dispatch({ type: 'remove', sessionId })
}, [])
const resetPreviewSessions = useCallback(() => {
dispatch({ type: 'reset' })
}, [])
return {
previewSession,
previewSessionsById: state.sessions,
activePreviewSessionId: state.activeSessionId,
hydratePreviewSessions,
upsertPreviewSession,
completePreviewSession,
removePreviewSession,
resetPreviewSessions,
}
}

View File

@@ -1,10 +1,39 @@
import type { MothershipResourceType } from '@/lib/copilot/resource-types'
import {
Agent,
Auth,
CreateWorkflow,
Deploy,
EditContent,
EditWorkflow,
FunctionExecute,
GetPageContents,
Glob,
Grep,
Job,
Knowledge,
KnowledgeBase,
ManageMcpTool,
ManageSkill,
OpenResource,
Read as ReadTool,
Research,
Run,
ScrapePage,
SearchLibraryDocs,
SearchOnline,
Superagent,
Table,
UserMemory,
UserTable,
Workflow,
WorkspaceFile,
} from '@/lib/copilot/generated/tool-catalog-v1'
import type { ChatContext } from '@/stores/panel'
export type {
MothershipResource,
MothershipResourceType,
} from '@/lib/copilot/resource-types'
} from '@/lib/copilot/resources/types'
/** Union of all valid context kind strings, derived from {@link ChatContext}. */
export type ChatContextKind = ChatContext['kind']
@@ -24,169 +53,34 @@ export interface QueuedMessage {
contexts?: ChatContext[]
}
/**
* SSE event types emitted by the Go orchestrator backend.
*
* @example
* ```json
* { "type": "content", "data": "Hello world" }
* { "type": "tool_call", "state": "executing", "toolCallId": "toolu_...", "toolName": "glob", "ui": { "title": "..." } }
* { "type": "subagent_start", "subagent": "build" }
* ```
*/
export type SSEEventType =
| 'chat_id'
| 'request_id'
| 'title_updated'
| 'content'
| 'reasoning' // openai reasoning - render as thinking text
| 'tool_call' // tool call name
| 'tool_call_delta' // chunk of tool call
| 'tool_generating' // start a tool call
| 'tool_result' // tool call result
| 'tool_error' // tool call error
| 'resource_added' // add a resource to the chat
| 'resource_deleted' // delete a resource from the chat
| 'subagent_start' // start a subagent
| 'subagent_end' // end a subagent
| 'structured_result' // structured result from a tool call
| 'subagent_result' // result from a subagent
| 'done' // end of the chat
| 'context_compaction_start' // context compaction started
| 'context_compaction' // conversation context was compacted
| 'error' // error in the chat
| 'start' // start of the chat
/**
* All tool names observed in the mothership SSE stream, grouped by phase.
*
* @example
* ```json
* { "type": "tool_generating", "toolName": "glob" }
* { "type": "tool_call", "toolName": "function_execute", "ui": { "title": "Running code", "icon": "code" } }
* { "type": "tool", "phase": "call", "toolName": "glob" }
* { "type": "tool", "phase": "call", "toolName": "function_execute", "ui": { "title": "Running code", "icon": "code" } }
* ```
* Stream `type` is `MothershipStreamV1EventType.tool` (`mothership-stream-v1`) with `phase: 'call'`.
*/
export type MothershipToolName =
| 'glob'
| 'grep'
| 'read'
| 'search_online'
| 'scrape_page'
| 'get_page_contents'
| 'search_library_docs'
| 'manage_mcp_tool'
| 'manage_skill'
| 'manage_credential'
| 'manage_custom_tool'
| 'manage_job'
| 'user_memory'
| 'function_execute'
| 'superagent'
| 'user_table'
| 'workspace_file'
| 'create_workflow'
| 'delete_workflow'
| 'edit_workflow'
| 'rename_workflow'
| 'move_workflow'
| 'run_workflow'
| 'run_block'
| 'run_from_block'
| 'run_workflow_until_block'
| 'create_folder'
| 'delete_folder'
| 'move_folder'
| 'list_folders'
| 'list_user_workspaces'
| 'create_job'
| 'complete_job'
| 'update_job_history'
| 'job_respond'
| 'download_to_workspace_file'
| 'materialize_file'
| 'context_write'
| 'generate_image'
| 'generate_visualization'
| 'crawl_website'
| 'get_execution_summary'
| 'get_job_logs'
| 'get_deployment_version'
| 'revert_to_version'
| 'check_deployment_status'
| 'get_deployed_workflow_state'
| 'get_workflow_data'
| 'get_workflow_logs'
| 'get_block_outputs'
| 'get_block_upstream_references'
| 'set_global_workflow_variables'
| 'set_environment_variables'
| 'get_platform_actions'
| 'search_documentation'
| 'search_patterns'
| 'update_workspace_mcp_server'
| 'delete_workspace_mcp_server'
| 'create_workspace_mcp_server'
| 'list_workspace_mcp_servers'
| 'deploy_api'
| 'deploy_chat'
| 'deploy_mcp'
| 'redeploy'
| 'generate_api_key'
| 'oauth_get_auth_link'
| 'oauth_request_access'
| 'build'
| 'run'
| 'deploy'
| 'auth'
| 'knowledge'
| 'knowledge_base'
| 'table'
| 'job'
| 'agent'
| 'custom_tool'
| 'research'
| 'plan'
| 'debug'
| 'edit'
| 'fast_edit'
| 'open_resource'
| 'context_compaction'
/**
* Subagent identifiers dispatched via `subagent_start` SSE events.
*
* @example
* ```json
* { "type": "subagent_start", "subagent": "build" }
* ```
*/
export type SubagentName =
| 'build'
| 'deploy'
| 'auth'
| 'research'
| 'knowledge'
| 'table'
| 'custom_tool'
| 'superagent'
| 'plan'
| 'debug'
| 'edit'
| 'fast_edit'
| 'run'
| 'agent'
| 'job'
| 'file_write'
export const ToolPhase = {
workspace: 'workspace',
search: 'search',
management: 'management',
execution: 'execution',
resource: 'resource',
subagent: 'subagent',
} as const
export type ToolPhase = (typeof ToolPhase)[keyof typeof ToolPhase]
export type ToolPhase =
| 'workspace'
| 'search'
| 'management'
| 'execution'
| 'resource'
| 'subagent'
export type ToolCallStatus = 'executing' | 'success' | 'error' | 'cancelled'
export const ToolCallStatus = {
executing: 'executing',
success: 'success',
error: 'error',
cancelled: 'cancelled',
} as const
export type ToolCallStatus = (typeof ToolCallStatus)[keyof typeof ToolCallStatus]
export interface ToolCallResult {
success: boolean
@@ -194,7 +88,6 @@ export interface ToolCallResult {
error?: string
}
/** A single tool call result entry in the generic Results resource tab. */
export interface GenericResourceEntry {
toolCallId: string
toolName: string
@@ -205,7 +98,6 @@ export interface GenericResourceEntry {
result?: ToolCallResult
}
/** Accumulated feed of tool call results shown in the generic Results tab. */
export interface GenericResourceData {
entries: GenericResourceEntry[]
}
@@ -228,7 +120,7 @@ export interface ToolCallInfo {
phaseLabel?: string
params?: Record<string, unknown>
calledBy?: string
result?: { success: boolean; output?: unknown; error?: string }
result?: ToolCallResult
streamingArgs?: string
}
@@ -237,14 +129,17 @@ export interface OptionItem {
label: string
}
export type ContentBlockType =
| 'text'
| 'tool_call'
| 'subagent'
| 'subagent_end'
| 'subagent_text'
| 'options'
| 'stopped'
export const ContentBlockType = {
text: 'text',
tool_call: 'tool_call',
subagent: 'subagent',
subagent_end: 'subagent_end',
subagent_text: 'subagent_text',
subagent_thinking: 'subagent_thinking',
options: 'options',
stopped: 'stopped',
} as const
export type ContentBlockType = (typeof ContentBlockType)[keyof typeof ContentBlockType]
export interface ContentBlock {
type: ContentBlockType
@@ -283,23 +178,19 @@ export interface ChatMessage {
requestId?: string
}
export const SUBAGENT_LABELS: Record<SubagentName, string> = {
build: 'Build agent',
deploy: 'Deploy agent',
auth: 'Integration agent',
research: 'Research agent',
knowledge: 'Knowledge agent',
table: 'Table agent',
custom_tool: 'Custom Tool agent',
export const SUBAGENT_LABELS: Record<string, string> = {
workflow: 'Workflow Agent',
deploy: 'Deploy Agent',
auth: 'Auth Agent',
research: 'Research Agent',
knowledge: 'Knowledge Agent',
table: 'Table Agent',
custom_tool: 'Custom Tool Agent',
superagent: 'Superagent',
plan: 'Plan agent',
debug: 'Debug agent',
edit: 'Edit agent',
fast_edit: 'Build agent',
run: 'Run agent',
agent: 'Agent manager',
job: 'Job agent',
file_write: 'File Write',
run: 'Run Agent',
agent: 'Tools Agent',
job: 'Job Agent',
file: 'File Agent',
} as const
export interface ToolUIMetadata {
@@ -309,206 +200,127 @@ export interface ToolUIMetadata {
}
/**
* Primary UI metadata for tools observed in the SSE stream.
* Maps tool IDs to human-readable display names shown in the chat.
* This is the single source of truth — server-sent `ui.title` values are not used.
* Default UI metadata for tools observed in the SSE stream.
* The backend may send `ui` on some `MothershipStreamV1EventType.tool` payloads (`phase: 'call'`);
* this map provides fallback metadata when `ui` is absent.
*/
export const TOOL_UI_METADATA: Record<MothershipToolName, ToolUIMetadata> = {
// Workspace
glob: { title: 'Searching workspace', phaseLabel: 'Workspace', phase: 'workspace' },
grep: { title: 'Searching workspace', phaseLabel: 'Workspace', phase: 'workspace' },
read: { title: 'Reading file', phaseLabel: 'Workspace', phase: 'workspace' },
// Search
search_online: { title: 'Searching online', phaseLabel: 'Search', phase: 'search' },
scrape_page: { title: 'Reading webpage', phaseLabel: 'Search', phase: 'search' },
get_page_contents: { title: 'Reading page', phaseLabel: 'Search', phase: 'search' },
search_library_docs: { title: 'Searching docs', phaseLabel: 'Search', phase: 'search' },
crawl_website: { title: 'Browsing website', phaseLabel: 'Search', phase: 'search' },
// Execution
function_execute: { title: 'Running code', phaseLabel: 'Code', phase: 'execution' },
superagent: { title: 'Taking action', phaseLabel: 'Action', phase: 'execution' },
run_workflow: { title: 'Running workflow', phaseLabel: 'Execution', phase: 'execution' },
run_block: { title: 'Running block', phaseLabel: 'Execution', phase: 'execution' },
run_from_block: { title: 'Running from block', phaseLabel: 'Execution', phase: 'execution' },
run_workflow_until_block: {
title: 'Running partial workflow',
phaseLabel: 'Execution',
export const TOOL_UI_METADATA: Record<string, ToolUIMetadata> = {
[Glob.id]: {
title: 'Finding files',
phaseLabel: 'Workspace',
phase: 'workspace',
},
[Grep.id]: {
title: 'Searching',
phaseLabel: 'Workspace',
phase: 'workspace',
},
[ReadTool.id]: { title: 'Reading file', phaseLabel: 'Workspace', phase: 'workspace' },
[SearchOnline.id]: {
title: 'Searching online',
phaseLabel: 'Search',
phase: 'search',
},
[ScrapePage.id]: {
title: 'Scraping page',
phaseLabel: 'Search',
phase: 'search',
},
[GetPageContents.id]: {
title: 'Getting page contents',
phaseLabel: 'Search',
phase: 'search',
},
[SearchLibraryDocs.id]: {
title: 'Searching library docs',
phaseLabel: 'Search',
phase: 'search',
},
[ManageMcpTool.id]: {
title: 'MCP server action',
phaseLabel: 'Management',
phase: 'management',
},
[ManageSkill.id]: {
title: 'Skill action',
phaseLabel: 'Management',
phase: 'management',
},
[UserMemory.id]: {
title: 'Accessing memory',
phaseLabel: 'Management',
phase: 'management',
},
[FunctionExecute.id]: {
title: 'Running code',
phaseLabel: 'Code',
phase: 'execution',
},
complete_job: { title: 'Completing job', phaseLabel: 'Execution', phase: 'execution' },
get_execution_summary: { title: 'Checking results', phaseLabel: 'Execution', phase: 'execution' },
get_job_logs: { title: 'Checking logs', phaseLabel: 'Execution', phase: 'execution' },
get_workflow_logs: { title: 'Checking logs', phaseLabel: 'Execution', phase: 'execution' },
get_workflow_data: { title: 'Loading workflow', phaseLabel: 'Execution', phase: 'execution' },
get_block_outputs: {
title: 'Checking block outputs',
phaseLabel: 'Execution',
[Superagent.id]: {
title: 'Executing action',
phaseLabel: 'Action',
phase: 'execution',
},
get_block_upstream_references: {
title: 'Checking references',
phaseLabel: 'Execution',
phase: 'execution',
},
get_deployed_workflow_state: {
title: 'Checking deployment',
phaseLabel: 'Execution',
phase: 'execution',
},
check_deployment_status: {
title: 'Checking deployment',
phaseLabel: 'Execution',
phase: 'execution',
},
// Workflows & folders
create_workflow: { title: 'Creating workflow', phaseLabel: 'Resource', phase: 'resource' },
delete_workflow: { title: 'Deleting workflow', phaseLabel: 'Resource', phase: 'resource' },
edit_workflow: { title: 'Editing workflow', phaseLabel: 'Resource', phase: 'resource' },
rename_workflow: { title: 'Renaming workflow', phaseLabel: 'Resource', phase: 'resource' },
move_workflow: { title: 'Moving workflow', phaseLabel: 'Resource', phase: 'resource' },
create_folder: { title: 'Creating folder', phaseLabel: 'Resource', phase: 'resource' },
delete_folder: { title: 'Deleting folder', phaseLabel: 'Resource', phase: 'resource' },
move_folder: { title: 'Moving folder', phaseLabel: 'Resource', phase: 'resource' },
list_folders: { title: 'Browsing folders', phaseLabel: 'Resource', phase: 'resource' },
list_user_workspaces: { title: 'Browsing workspaces', phaseLabel: 'Resource', phase: 'resource' },
revert_to_version: { title: 'Restoring version', phaseLabel: 'Resource', phase: 'resource' },
get_deployment_version: {
title: 'Checking deployment',
[UserTable.id]: {
title: 'Managing table',
phaseLabel: 'Resource',
phase: 'resource',
},
open_resource: { title: 'Opening resource', phaseLabel: 'Resource', phase: 'resource' },
// Files
workspace_file: { title: 'Working with files', phaseLabel: 'Resource', phase: 'resource' },
download_to_workspace_file: {
title: 'Downloading file',
[WorkspaceFile.id]: {
title: 'Editing file',
phaseLabel: 'Resource',
phase: 'resource',
},
materialize_file: { title: 'Saving file', phaseLabel: 'Resource', phase: 'resource' },
generate_image: { title: 'Generating image', phaseLabel: 'Resource', phase: 'resource' },
generate_visualization: {
title: 'Generating visualization',
[EditContent.id]: {
title: 'Applying file content',
phaseLabel: 'Resource',
phase: 'resource',
},
// Tables & knowledge
user_table: { title: 'Editing table', phaseLabel: 'Resource', phase: 'resource' },
knowledge_base: { title: 'Updating knowledge base', phaseLabel: 'Resource', phase: 'resource' },
// Jobs
create_job: { title: 'Creating job', phaseLabel: 'Resource', phase: 'resource' },
manage_job: { title: 'Updating job', phaseLabel: 'Management', phase: 'management' },
update_job_history: { title: 'Updating job', phaseLabel: 'Management', phase: 'management' },
job_respond: { title: 'Explaining job scheduled', phaseLabel: 'Execution', phase: 'execution' },
// Management
manage_mcp_tool: { title: 'Updating integration', phaseLabel: 'Management', phase: 'management' },
manage_skill: { title: 'Updating skill', phaseLabel: 'Management', phase: 'management' },
manage_credential: { title: 'Connecting account', phaseLabel: 'Management', phase: 'management' },
manage_custom_tool: { title: 'Updating tool', phaseLabel: 'Management', phase: 'management' },
update_workspace_mcp_server: {
title: 'Updating MCP server',
phaseLabel: 'Management',
phase: 'management',
[CreateWorkflow.id]: {
title: 'Creating workflow',
phaseLabel: 'Resource',
phase: 'resource',
},
delete_workspace_mcp_server: {
title: 'Removing MCP server',
phaseLabel: 'Management',
phase: 'management',
[EditWorkflow.id]: {
title: 'Editing workflow',
phaseLabel: 'Resource',
phase: 'resource',
},
create_workspace_mcp_server: {
title: 'Creating MCP server',
phaseLabel: 'Management',
phase: 'management',
[Workflow.id]: { title: 'Workflow Agent', phaseLabel: 'Workflow', phase: 'subagent' },
[Run.id]: { title: 'Run Agent', phaseLabel: 'Run', phase: 'subagent' },
[Deploy.id]: { title: 'Deploy Agent', phaseLabel: 'Deploy', phase: 'subagent' },
[Auth.id]: {
title: 'Auth Agent',
phaseLabel: 'Auth',
phase: 'subagent',
},
list_workspace_mcp_servers: {
title: 'Browsing MCP servers',
phaseLabel: 'Management',
phase: 'management',
[Knowledge.id]: {
title: 'Knowledge Agent',
phaseLabel: 'Knowledge',
phase: 'subagent',
},
oauth_get_auth_link: {
title: 'Connecting account',
phaseLabel: 'Management',
phase: 'management',
[KnowledgeBase.id]: {
title: 'Managing knowledge base',
phaseLabel: 'Resource',
phase: 'resource',
},
oauth_request_access: {
title: 'Connecting account',
phaseLabel: 'Management',
phase: 'management',
[Table.id]: { title: 'Table Agent', phaseLabel: 'Table', phase: 'subagent' },
[Job.id]: { title: 'Job Agent', phaseLabel: 'Job', phase: 'subagent' },
[Agent.id]: { title: 'Tools Agent', phaseLabel: 'Agent', phase: 'subagent' },
custom_tool: {
title: 'Creating tool',
phaseLabel: 'Tool',
phase: 'subagent',
},
set_environment_variables: {
title: 'Updating environment',
phaseLabel: 'Management',
phase: 'management',
[Research.id]: { title: 'Research Agent', phaseLabel: 'Research', phase: 'subagent' },
[OpenResource.id]: {
title: 'Opening resource',
phaseLabel: 'Resource',
phase: 'resource',
},
set_global_workflow_variables: {
title: 'Updating variables',
phaseLabel: 'Management',
phase: 'management',
},
get_platform_actions: { title: 'Loading actions', phaseLabel: 'Management', phase: 'management' },
search_documentation: { title: 'Searching docs', phaseLabel: 'Search', phase: 'search' },
search_patterns: { title: 'Searching patterns', phaseLabel: 'Search', phase: 'search' },
deploy_api: { title: 'Deploying API', phaseLabel: 'Deploy', phase: 'management' },
deploy_chat: { title: 'Deploying chat', phaseLabel: 'Deploy', phase: 'management' },
deploy_mcp: { title: 'Deploying MCP', phaseLabel: 'Deploy', phase: 'management' },
redeploy: { title: 'Redeploying', phaseLabel: 'Deploy', phase: 'management' },
generate_api_key: { title: 'Generating API key', phaseLabel: 'Deploy', phase: 'management' },
user_memory: { title: 'Updating memory', phaseLabel: 'Management', phase: 'management' },
context_write: { title: 'Writing notes', phaseLabel: 'Management', phase: 'management' },
context_compaction: {
title: 'Optimizing context',
phaseLabel: 'Management',
title: 'Compacted context',
phaseLabel: 'Context',
phase: 'management',
},
// Subagents
build: { title: 'Building', phaseLabel: 'Build', phase: 'subagent' },
run: { title: 'Running', phaseLabel: 'Run', phase: 'subagent' },
deploy: { title: 'Deploying', phaseLabel: 'Deploy', phase: 'subagent' },
auth: { title: 'Connecting integration', phaseLabel: 'Auth', phase: 'subagent' },
knowledge: { title: 'Working with knowledge', phaseLabel: 'Knowledge', phase: 'subagent' },
table: { title: 'Working with tables', phaseLabel: 'Table', phase: 'subagent' },
job: { title: 'Working with jobs', phaseLabel: 'Job', phase: 'subagent' },
agent: { title: 'Taking action', phaseLabel: 'Agent', phase: 'subagent' },
custom_tool: { title: 'Creating tool', phaseLabel: 'Tool', phase: 'subagent' },
research: { title: 'Researching', phaseLabel: 'Research', phase: 'subagent' },
plan: { title: 'Planning', phaseLabel: 'Plan', phase: 'subagent' },
debug: { title: 'Debugging', phaseLabel: 'Debug', phase: 'subagent' },
edit: { title: 'Editing workflow', phaseLabel: 'Edit', phase: 'subagent' },
fast_edit: { title: 'Editing workflow', phaseLabel: 'Edit', phase: 'subagent' },
}
export interface SSEPayloadUI {
hidden?: boolean
title?: string
phaseLabel?: string
icon?: string
internal?: boolean
clientExecutable?: boolean
}
export interface SSEPayloadData {
name?: string
ui?: SSEPayloadUI
id?: string
agent?: string
partial?: boolean
arguments?: Record<string, unknown>
input?: Record<string, unknown>
result?: unknown
error?: string
}
export interface SSEPayload {
type: SSEEventType | (string & {})
chatId?: string
data?: string | SSEPayloadData
content?: string
toolCallId?: string
toolName?: string
ui?: SSEPayloadUI
success?: boolean
result?: unknown
error?: string
subagent?: string
resource?: { type: MothershipResourceType; id: string; title: string }
}

View File

@@ -142,6 +142,13 @@ const Admin = dynamic(
import('@/app/workspace/[workspaceId]/settings/components/admin/admin').then((m) => m.Admin),
{ loading: () => <AdminSkeleton /> }
)
const Mothership = dynamic(
() =>
import('@/app/workspace/[workspaceId]/settings/components/mothership/mothership').then(
(m) => m.Mothership
),
{ loading: () => <SettingsSectionSkeleton /> }
)
const RecentlyDeleted = dynamic(
() =>
import(
@@ -182,7 +189,9 @@ export function SettingsPage({ section }: SettingsPageProps) {
? 'general'
: section === 'admin' && !sessionLoading && !isAdminRole
? 'general'
: section
: section === 'mothership' && !sessionLoading && !isAdminRole
? 'general'
: section
const label =
allNavigationItems.find((item) => item.id === effectiveSection)?.label ?? effectiveSection
@@ -215,6 +224,7 @@ export function SettingsPage({ section }: SettingsPageProps) {
{effectiveSection === 'inbox' && <Inbox />}
{effectiveSection === 'recently-deleted' && <RecentlyDeleted />}
{effectiveSection === 'admin' && <Admin />}
{effectiveSection === 'mothership' && <Mothership />}
</div>
)
}

View File

@@ -0,0 +1,908 @@
'use client'
import { useCallback, useMemo, useState } from 'react'
import { Badge, Button, Input as EmcnInput, Label, Skeleton } from '@/components/emcn'
import { cn } from '@/lib/core/utils/cn'
import {
type MothershipEnv,
useGenerateLicense,
useMothershipEnterpriseStats,
useMothershipLicenses,
useMothershipRequests,
useMothershipTrace,
useMothershipUserBreakdown,
} from '@/hooks/queries/mothership-admin'
type Tab = 'overview' | 'licenses' | 'enterprise' | 'traces'
const TABS: { id: Tab; label: string }[] = [
{ id: 'overview', label: 'Overview' },
{ id: 'licenses', label: 'Licenses' },
{ id: 'enterprise', label: 'Enterprise' },
{ id: 'traces', label: 'Traces' },
]
const ENV_OPTIONS: { id: MothershipEnv; label: string }[] = [
{ id: 'dev', label: 'Dev' },
{ id: 'staging', label: 'Staging' },
{ id: 'prod', label: 'Prod' },
]
function defaultTimeRange() {
const end = new Date()
const start = new Date()
start.setDate(start.getDate() - 7)
return {
start: start.toISOString().slice(0, 16),
end: end.toISOString().slice(0, 16),
}
}
function toRFC3339(local: string) {
if (!local) return ''
return new Date(local).toISOString()
}
function formatCost(cost: number) {
return `$${cost.toFixed(4)}`
}
function formatDate(d: string | null | undefined) {
if (!d) return '—'
return new Date(d).toLocaleString()
}
function Divider() {
return <div className='h-px bg-[var(--border-secondary)]' />
}
function SectionLabel({ children }: { children: React.ReactNode }) {
return <p className='font-medium text-[var(--text-primary)] text-sm'>{children}</p>
}
export function Mothership() {
const [activeTab, setActiveTab] = useState<Tab>('overview')
const [environment, setEnvironment] = useState<MothershipEnv>('dev')
const defaults = useMemo(() => defaultTimeRange(), [])
const [start, setStart] = useState(defaults.start)
const [end, setEnd] = useState(defaults.end)
return (
<div className='flex h-full flex-col gap-5'>
{/* Environment selector */}
<div className='flex items-center gap-2'>
<Label className='text-[var(--text-secondary)] text-sm'>Environment</Label>
<div className='flex gap-1'>
{ENV_OPTIONS.map((opt) => (
<button
key={opt.id}
type='button'
onClick={() => setEnvironment(opt.id)}
className={cn(
'rounded-md px-3 py-1 font-medium text-sm transition-colors',
environment === opt.id
? 'bg-[var(--surface-hover)] text-[var(--text-primary)]'
: 'text-[var(--text-tertiary)] hover-hover:hover:text-[var(--text-secondary)]'
)}
>
{opt.label}
</button>
))}
</div>
</div>
{/* Tab bar */}
<div className='flex gap-1 border-[var(--border-secondary)] border-b pb-px'>
{TABS.map((tab) => (
<button
key={tab.id}
type='button'
onClick={() => setActiveTab(tab.id)}
className={cn(
'relative px-3 py-2 font-medium text-sm transition-colors',
activeTab === tab.id
? 'text-[var(--text-primary)]'
: 'text-[var(--text-tertiary)] hover-hover:hover:text-[var(--text-secondary)]'
)}
>
{tab.label}
{activeTab === tab.id && (
<span className='absolute right-0 bottom-0 left-0 h-[2px] bg-[var(--text-primary)]' />
)}
</button>
))}
</div>
{/* Time range (shared across tabs) */}
<div className='flex items-center gap-3'>
<div className='flex items-center gap-2'>
<Label className='text-[var(--text-secondary)] text-caption'>From</Label>
<EmcnInput
type='datetime-local'
value={start}
onChange={(e) => setStart(e.target.value)}
className='h-[30px] text-caption'
/>
</div>
<div className='flex items-center gap-2'>
<Label className='text-[var(--text-secondary)] text-caption'>To</Label>
<EmcnInput
type='datetime-local'
value={end}
onChange={(e) => setEnd(e.target.value)}
className='h-[30px] text-caption'
/>
</div>
</div>
<Divider />
{activeTab === 'overview' && (
<OverviewTab environment={environment} start={toRFC3339(start)} end={toRFC3339(end)} />
)}
{activeTab === 'licenses' && <LicensesTab environment={environment} />}
{activeTab === 'enterprise' && (
<EnterpriseTab environment={environment} start={toRFC3339(start)} end={toRFC3339(end)} />
)}
{activeTab === 'traces' && <TracesTab environment={environment} />}
</div>
)
}
/* ─── Overview Tab ─── */
function OverviewTab({
environment,
start,
end,
}: {
environment: MothershipEnv
start: string
end: string
}) {
const { data: breakdown, isLoading: breakdownLoading } = useMothershipUserBreakdown(
environment,
start,
end
)
const { data: requests, isLoading: requestsLoading } = useMothershipRequests(
environment,
start,
end
)
return (
<div className='flex flex-col gap-5'>
{/* Summary cards */}
<div className='grid grid-cols-4 gap-3'>
<StatCard
label='Total Requests'
value={breakdown?.total_requests}
loading={breakdownLoading}
/>
<StatCard label='Unique Users' value={breakdown?.total_users} loading={breakdownLoading} />
<StatCard
label='Total Cost'
value={
breakdown?.users
? formatCost(
breakdown.users.reduce(
(s: number, u: { total_cost: number }) => s + u.total_cost,
0
)
)
: undefined
}
loading={breakdownLoading}
/>
<StatCard
label='Avg Cost/Request'
value={
breakdown?.total_requests && breakdown.users
? formatCost(
breakdown.users.reduce(
(s: number, u: { total_cost: number }) => s + u.total_cost,
0
) / breakdown.total_requests
)
: undefined
}
loading={breakdownLoading}
/>
</div>
{/* User breakdown */}
<SectionLabel>User Breakdown</SectionLabel>
{breakdownLoading && (
<div className='flex flex-col gap-2'>
{Array.from({ length: 5 }).map((_, i) => (
<Skeleton key={i} className='h-[36px] w-full rounded-md' />
))}
</div>
)}
{breakdown?.users && (
<div className='flex flex-col gap-0.5'>
<div className='flex items-center gap-3 border-[var(--border-secondary)] border-b px-3 py-2 text-[var(--text-tertiary)] text-caption'>
<span className='flex-1'>User ID</span>
<span className='w-[100px] text-right'>Requests</span>
<span className='w-[100px] text-right'>Cost</span>
<span className='w-[160px] text-right'>Last Request</span>
</div>
{breakdown.users.map(
(u: {
user_id: string
request_count: number
total_cost: number
last_request: string
}) => (
<div
key={u.user_id}
className='flex items-center gap-3 border-[var(--border-secondary)] border-b px-3 py-2 text-small last:border-b-0'
>
<span className='flex-1 truncate font-mono text-[12px] text-[var(--text-primary)]'>
{u.user_id}
</span>
<span className='w-[100px] text-right text-[var(--text-secondary)]'>
{u.request_count}
</span>
<span className='w-[100px] text-right text-[var(--text-secondary)]'>
{formatCost(u.total_cost)}
</span>
<span className='w-[160px] text-right text-[var(--text-tertiary)] text-caption'>
{formatDate(u.last_request)}
</span>
</div>
)
)}
</div>
)}
{/* Recent requests */}
<Divider />
<SectionLabel>Recent Requests ({requests?.count ?? '…'})</SectionLabel>
{requestsLoading && (
<div className='flex flex-col gap-2'>
{Array.from({ length: 5 }).map((_, i) => (
<Skeleton key={i} className='h-[36px] w-full rounded-md' />
))}
</div>
)}
{requests?.requests && (
<div className='max-h-[400px] overflow-auto'>
<div className='flex flex-col gap-0.5'>
<div className='sticky top-0 z-10 flex items-center gap-3 border-[var(--border-secondary)] border-b bg-[var(--surface-1)] px-3 py-2 text-[var(--text-tertiary)] text-caption'>
<span className='w-[180px]'>Request ID</span>
<span className='w-[80px]'>Model</span>
<span className='w-[80px] text-right'>Duration</span>
<span className='w-[80px] text-right'>Cost</span>
<span className='w-[60px] text-right'>Tools</span>
<span className='w-[70px] text-right'>Status</span>
<span className='flex-1 text-right'>Time</span>
</div>
{requests.requests
.slice(0, 100)
.map(
(r: {
request_id: string
model: string
duration_ms: number
billed_total_cost: number
tool_call_count: number
error: boolean
aborted: boolean
created_at: string
}) => (
<div
key={r.request_id}
className='flex items-center gap-3 border-[var(--border-secondary)] border-b px-3 py-1.5 text-small last:border-b-0'
>
<span className='w-[180px] truncate font-mono text-[11px] text-[var(--text-primary)]'>
{r.request_id ?? '—'}
</span>
<span className='w-[80px] truncate text-[var(--text-secondary)] text-caption'>
{(r.model ?? '').replace('claude-', '')}
</span>
<span className='w-[80px] text-right text-[var(--text-secondary)] text-caption'>
{r.duration_ms ? `${(r.duration_ms / 1000).toFixed(1)}s` : '—'}
</span>
<span className='w-[80px] text-right text-[var(--text-secondary)] text-caption'>
{formatCost(r.billed_total_cost ?? 0)}
</span>
<span className='w-[60px] text-right text-[var(--text-secondary)] text-caption'>
{r.tool_call_count ?? 0}
</span>
<span className='w-[70px] text-right'>
{r.error ? (
<Badge variant='red'>Error</Badge>
) : r.aborted ? (
<Badge variant='amber'>Abort</Badge>
) : (
<Badge variant='green'>OK</Badge>
)}
</span>
<span className='flex-1 text-right text-[var(--text-tertiary)] text-caption'>
{formatDate(r.created_at)}
</span>
</div>
)
)}
</div>
</div>
)}
</div>
)
}
/* ─── Licenses Tab ─── */
function LicensesTab({ environment }: { environment: MothershipEnv }) {
const { data, isLoading, refetch } = useMothershipLicenses(environment)
const generateLicense = useGenerateLicense(environment)
const [newName, setNewName] = useState('')
const [newExpiry, setNewExpiry] = useState('')
const [generatedKey, setGeneratedKey] = useState<string | null>(null)
const handleGenerate = useCallback(() => {
if (!newName.trim()) return
generateLicense.mutate(
{
name: newName.trim(),
...(newExpiry ? { expirationDate: newExpiry } : {}),
},
{
onSuccess: (result) => {
setGeneratedKey(result.license_key)
setNewName('')
setNewExpiry('')
refetch()
},
}
)
}, [newName, newExpiry, generateLicense, refetch])
return (
<div className='flex flex-col gap-5'>
<SectionLabel>Generate License</SectionLabel>
<div className='flex items-end gap-2'>
<div className='flex flex-col gap-1'>
<Label className='text-[var(--text-secondary)] text-caption'>Enterprise Name</Label>
<EmcnInput
value={newName}
onChange={(e) => {
setNewName(e.target.value)
setGeneratedKey(null)
}}
placeholder='e.g. Acme Corp'
className='h-[32px] w-[200px]'
/>
</div>
<div className='flex flex-col gap-1'>
<Label className='text-[var(--text-secondary)] text-caption'>Expiration (optional)</Label>
<EmcnInput
type='date'
value={newExpiry}
onChange={(e) => setNewExpiry(e.target.value)}
className='h-[32px] w-[160px]'
/>
</div>
<Button
variant='primary'
className='h-[32px]'
onClick={handleGenerate}
disabled={generateLicense.isPending || !newName.trim()}
>
{generateLicense.isPending ? 'Generating...' : 'Generate'}
</Button>
</div>
{generatedKey && (
<div className='rounded-md border border-[var(--border-secondary)] bg-[var(--surface-hover)] p-3'>
<p className='mb-1 text-[var(--text-secondary)] text-caption'>
License key (only shown once):
</p>
<code className='block break-all font-mono text-[12px] text-[var(--text-primary)]'>
{generatedKey}
</code>
</div>
)}
{generateLicense.error && (
<p className='text-[var(--text-error)] text-small'>{generateLicense.error.message}</p>
)}
<Divider />
<SectionLabel>All Licenses</SectionLabel>
{isLoading && (
<div className='flex flex-col gap-2'>
{Array.from({ length: 3 }).map((_, i) => (
<Skeleton key={i} className='h-[40px] w-full rounded-md' />
))}
</div>
)}
{data?.licenses && (
<div className='flex flex-col gap-0.5'>
<div className='flex items-center gap-3 border-[var(--border-secondary)] border-b px-3 py-2 text-[var(--text-tertiary)] text-caption'>
<span className='flex-1'>Name</span>
<span className='w-[100px] text-right'>Validations</span>
<span className='w-[140px] text-right'>Expiration</span>
<span className='w-[140px] text-right'>Created</span>
</div>
{data.licenses.length === 0 && (
<div className='py-4 text-center text-[var(--text-tertiary)] text-small'>
No licenses found.
</div>
)}
{data.licenses.map(
(lic: {
id: string
name: string
count: number
expiration_date?: string
created_at: string
}) => (
<div
key={lic.id}
className='flex items-center gap-3 border-[var(--border-secondary)] border-b px-3 py-2 text-small last:border-b-0'
>
<span className='flex-1 text-[var(--text-primary)]'>{lic.name}</span>
<span className='w-[100px] text-right text-[var(--text-secondary)]'>
{lic.count}
</span>
<span className='w-[140px] text-right text-[var(--text-tertiary)] text-caption'>
{lic.expiration_date ? formatDate(lic.expiration_date) : 'Never'}
</span>
<span className='w-[140px] text-right text-[var(--text-tertiary)] text-caption'>
{formatDate(lic.created_at)}
</span>
</div>
)
)}
</div>
)}
</div>
)
}
/* ─── Enterprise Tab ─── */
function EnterpriseTab({
environment,
start,
end,
}: {
environment: MothershipEnv
start: string
end: string
}) {
const [customerType, setCustomerType] = useState('')
const [searchInput, setSearchInput] = useState('')
const { data, isLoading, error } = useMothershipEnterpriseStats(
environment,
customerType,
start,
end
)
const handleSearch = () => {
setCustomerType(searchInput.trim())
}
return (
<div className='flex flex-col gap-5'>
<div className='flex items-center gap-2'>
<EmcnInput
value={searchInput}
onChange={(e) => setSearchInput(e.target.value)}
onKeyDown={(e) => e.key === 'Enter' && handleSearch()}
placeholder='Enter customer type (e.g. enterprise name)...'
/>
<Button variant='primary' onClick={handleSearch} disabled={!searchInput.trim()}>
Search
</Button>
</div>
{error && <p className='text-[var(--text-error)] text-small'>{error.message}</p>}
{isLoading && customerType && (
<div className='flex flex-col gap-2'>
{Array.from({ length: 4 }).map((_, i) => (
<Skeleton key={i} className='h-[60px] w-full rounded-md' />
))}
</div>
)}
{data && (
<>
<div className='grid grid-cols-4 gap-3'>
<StatCard label='Total Requests' value={data.total_requests} />
<StatCard label='Unique Users' value={data.unique_users} />
<StatCard label='Total Cost' value={formatCost(data.total_cost ?? 0)} />
<StatCard
label='Total Tokens'
value={(
(data.total_input_tokens ?? 0) + (data.total_output_tokens ?? 0)
).toLocaleString()}
/>
</div>
{data.top_models && (
<>
<Divider />
<SectionLabel>Top Models</SectionLabel>
<div className='flex flex-wrap gap-2'>
{data.top_models.map((m: { model: string; count: number }) => (
<Badge key={m.model} variant='gray'>
{m.model} ({m.count})
</Badge>
))}
</div>
</>
)}
{data.users && (
<>
<Divider />
<SectionLabel>User Breakdown</SectionLabel>
<div className='flex flex-col gap-0.5'>
<div className='flex items-center gap-3 border-[var(--border-secondary)] border-b px-3 py-2 text-[var(--text-tertiary)] text-caption'>
<span className='flex-1'>User ID</span>
<span className='w-[100px] text-right'>Requests</span>
<span className='w-[100px] text-right'>Cost</span>
<span className='w-[160px] text-right'>Last Request</span>
</div>
{data.users.map(
(u: {
user_id: string
request_count: number
total_cost: number
last_request: string
}) => (
<div
key={u.user_id}
className='flex items-center gap-3 border-[var(--border-secondary)] border-b px-3 py-2 text-small last:border-b-0'
>
<span className='flex-1 truncate font-mono text-[12px] text-[var(--text-primary)]'>
{u.user_id}
</span>
<span className='w-[100px] text-right text-[var(--text-secondary)]'>
{u.request_count}
</span>
<span className='w-[100px] text-right text-[var(--text-secondary)]'>
{formatCost(u.total_cost)}
</span>
<span className='w-[160px] text-right text-[var(--text-tertiary)] text-caption'>
{formatDate(u.last_request)}
</span>
</div>
)
)}
</div>
</>
)}
</>
)}
</div>
)
}
/* ─── Traces Tab ─── */
function TracesTab({ environment }: { environment: MothershipEnv }) {
const [requestIdInput, setRequestIdInput] = useState('')
const [activeRequestId, setActiveRequestId] = useState('')
const { data: trace, isLoading, error } = useMothershipTrace(environment, activeRequestId)
const handleLookup = () => {
setActiveRequestId(requestIdInput.trim())
}
return (
<div className='flex flex-col gap-5'>
<div className='flex items-center gap-2'>
<EmcnInput
value={requestIdInput}
onChange={(e) => setRequestIdInput(e.target.value)}
onKeyDown={(e) => e.key === 'Enter' && handleLookup()}
placeholder='Paste a request ID (go_trace_id)...'
className='font-mono text-[13px]'
/>
<Button variant='primary' onClick={handleLookup} disabled={!requestIdInput.trim()}>
Lookup
</Button>
</div>
{error && <p className='text-[var(--text-error)] text-small'>{error.message}</p>}
{isLoading && activeRequestId && (
<div className='flex flex-col gap-2'>
{Array.from({ length: 4 }).map((_, i) => (
<Skeleton key={i} className='h-[50px] w-full rounded-md' />
))}
</div>
)}
{trace && <TraceDetail trace={trace} />}
</div>
)
}
/* ─── Trace Detail ─── */
interface TraceSpan {
name: string
kind?: string
startMs: number
endMs?: number
durationMs?: number
status: string
parentName?: string
source?: string
attributes?: Record<string, unknown>
}
interface TraceData {
id: string
simRequestId: string
goTraceId: string
streamId?: string
chatId?: string
userId?: string
startMs: number
endMs: number
durationMs: number
outcome: string
spans: TraceSpan[]
model?: string
provider?: string
mode?: string
source?: string
message?: string
inputTokens?: number
outputTokens?: number
cacheReadTokens?: number
cacheWriteTokens?: number
rawTotalCost?: number
billedTotalCost?: number
toolCallCount?: number
error?: boolean
aborted?: boolean
errorMsg?: string
}
function TraceDetail({ trace }: { trace: TraceData }) {
const rootSpans = trace.spans.filter((s) => !s.parentName)
const childMap = new Map<string, TraceSpan[]>()
for (const span of trace.spans) {
if (span.parentName) {
const existing = childMap.get(span.parentName) || []
existing.push(span)
childMap.set(span.parentName, existing)
}
}
return (
<div className='flex flex-col gap-4'>
{/* Trace metadata */}
<div className='grid grid-cols-2 gap-x-6 gap-y-2 rounded-md border border-[var(--border-secondary)] p-4'>
<MetaRow label='Go Trace ID' value={trace.goTraceId} mono />
<MetaRow label='Sim Request ID' value={trace.simRequestId} mono />
<MetaRow label='Outcome'>
<Badge
variant={
trace.outcome === 'success'
? 'green'
: trace.outcome === 'cancelled'
? 'amber'
: 'red'
}
>
{trace.outcome}
</Badge>
</MetaRow>
<MetaRow label='Duration' value={`${(trace.durationMs / 1000).toFixed(2)}s`} />
<MetaRow label='Model' value={trace.model || '—'} />
<MetaRow label='Provider' value={trace.provider || '—'} />
<MetaRow label='Source' value={trace.source || '—'} />
<MetaRow label='Mode' value={trace.mode || '—'} />
{trace.userId && <MetaRow label='User ID' value={trace.userId} mono />}
{trace.chatId && <MetaRow label='Chat ID' value={trace.chatId} mono />}
<MetaRow
label='Tokens'
value={`${(trace.inputTokens ?? 0).toLocaleString()} in / ${(trace.outputTokens ?? 0).toLocaleString()} out`}
/>
<MetaRow label='Billed Cost' value={formatCost(trace.billedTotalCost ?? 0)} />
{trace.toolCallCount != null && trace.toolCallCount > 0 && (
<MetaRow label='Tool Calls' value={String(trace.toolCallCount)} />
)}
{trace.message && (
<div className='col-span-2'>
<MetaRow label='Message' value={trace.message} />
</div>
)}
{trace.errorMsg && (
<div className='col-span-2'>
<MetaRow label='Error'>
<span className='text-[var(--text-error)]'>{trace.errorMsg}</span>
</MetaRow>
</div>
)}
</div>
{/* Span tree */}
<SectionLabel>Spans ({trace.spans.length})</SectionLabel>
<div className='flex flex-col gap-1'>
{rootSpans
.sort((a, b) => a.startMs - b.startMs)
.map((span) => (
<SpanNode
key={span.name + span.startMs}
span={span}
childMap={childMap}
traceStartMs={trace.startMs}
traceDurationMs={trace.durationMs}
depth={0}
/>
))}
</div>
</div>
)
}
function SpanNode({
span,
childMap,
traceStartMs,
traceDurationMs,
depth,
}: {
span: TraceSpan
childMap: Map<string, TraceSpan[]>
traceStartMs: number
traceDurationMs: number
depth: number
}) {
const [expanded, setExpanded] = useState(depth < 2)
const children = childMap.get(span.name) || []
const hasChildren = children.length > 0
const durationMs = span.durationMs ?? (span.endMs ? span.endMs - span.startMs : 0)
const offsetPct =
traceDurationMs > 0 ? ((span.startMs - traceStartMs) / traceDurationMs) * 100 : 0
const widthPct = traceDurationMs > 0 ? (durationMs / traceDurationMs) * 100 : 0
const statusColor =
span.status === 'ok'
? 'bg-emerald-500/70'
: span.status === 'error'
? 'bg-red-500/70'
: span.status === 'cancelled'
? 'bg-yellow-500/70'
: 'bg-[var(--text-tertiary)]'
const attrs = span.attributes || {}
const attrEntries = Object.entries(attrs).filter(
([, v]) => v !== null && v !== undefined && v !== ''
)
return (
<div style={{ marginLeft: depth * 16 }}>
<button
type='button'
onClick={() => setExpanded((e) => !e)}
className='flex w-full items-center gap-2 rounded-md px-2 py-1.5 text-left transition-colors hover-hover:hover:bg-[var(--surface-hover)]'
>
{hasChildren ? (
<span className='w-[14px] text-center text-[10px] text-[var(--text-tertiary)]'>
{expanded ? '▼' : '▶'}
</span>
) : (
<span className='w-[14px]' />
)}
<span className='min-w-0 flex-1'>
<span className='block truncate text-[13px] text-[var(--text-primary)]'>{span.name}</span>
{/* Waterfall bar */}
<span className='mt-0.5 block h-[4px] w-full rounded-full bg-[var(--border-secondary)]'>
<span
className={cn('block h-full rounded-full', statusColor)}
style={{
marginLeft: `${Math.max(0, Math.min(offsetPct, 100))}%`,
width: `${Math.max(0.5, Math.min(widthPct, 100 - offsetPct))}%`,
}}
/>
</span>
</span>
<Badge variant={span.source === 'go' ? 'blue' : 'gray'} className='shrink-0'>
{span.source || '?'}
</Badge>
<span className='w-[70px] shrink-0 text-right font-mono text-[11px] text-[var(--text-secondary)]'>
{durationMs >= 1000 ? `${(durationMs / 1000).toFixed(2)}s` : `${durationMs}ms`}
</span>
</button>
{expanded && attrEntries.length > 0 && (
<div
className='mb-1 ml-[30px] rounded border border-[var(--border-secondary)] bg-[var(--surface-hover)] px-3 py-2'
style={{ marginLeft: 30 + depth * 16 }}
>
{attrEntries.map(([key, val]) => (
<div key={key} className='flex gap-2 py-0.5 text-[11px]'>
<span className='shrink-0 text-[var(--text-tertiary)]'>{key}:</span>
<span className='min-w-0 break-all text-[var(--text-secondary)]'>
{typeof val === 'object' ? JSON.stringify(val) : String(val)}
</span>
</div>
))}
</div>
)}
{expanded &&
children
.sort((a, b) => a.startMs - b.startMs)
.map((child) => (
<SpanNode
key={child.name + child.startMs}
span={child}
childMap={childMap}
traceStartMs={traceStartMs}
traceDurationMs={traceDurationMs}
depth={depth + 1}
/>
))}
</div>
)
}
/* ─── Shared components ─── */
function StatCard({
label,
value,
loading,
}: {
label: string
value?: string | number
loading?: boolean
}) {
return (
<div className='rounded-md border border-[var(--border-secondary)] p-3'>
<p className='text-[var(--text-tertiary)] text-caption'>{label}</p>
{loading ? (
<Skeleton className='mt-1 h-[24px] w-[80px] rounded-sm' />
) : (
<p className='mt-1 font-medium text-[18px] text-[var(--text-primary)]'>{value ?? '—'}</p>
)}
</div>
)
}
function MetaRow({
label,
value,
mono,
children,
}: {
label: string
value?: string
mono?: boolean
children?: React.ReactNode
}) {
return (
<div className='flex items-baseline gap-2'>
<span className='shrink-0 text-[var(--text-tertiary)] text-caption'>{label}</span>
{children || (
<span
className={cn(
'min-w-0 break-all text-[13px] text-[var(--text-primary)]',
mono && 'font-mono text-[12px]'
)}
>
{value}
</span>
)}
</div>
)
}

View File

@@ -40,6 +40,7 @@ export type SettingsSection =
| 'workflow-mcp-servers'
| 'inbox'
| 'admin'
| 'mothership'
| 'recently-deleted'
export type NavigationSection =
@@ -180,4 +181,11 @@ export const allNavigationItems: NavigationItem[] = [
section: 'superuser',
requiresAdminRole: true,
},
{
id: 'mothership',
label: 'Mothership',
icon: Server,
section: 'superuser',
requiresAdminRole: true,
},
]

View File

@@ -1,8 +1,8 @@
import { memo, useCallback, useMemo } from 'react'
import ReactMarkdown from 'react-markdown'
import type { NodeProps } from 'reactflow'
import remarkBreaks from 'remark-breaks'
import remarkGfm from 'remark-gfm'
import { Streamdown } from 'streamdown'
import 'streamdown/styles.css'
import { cn } from '@/lib/core/utils/cn'
import { BLOCK_DIMENSIONS } from '@/lib/workflows/blocks/block-dimensions'
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
@@ -303,174 +303,161 @@ function getEmbedInfo(url: string): EmbedInfo | null {
/**
* Compact markdown renderer for note blocks with tight spacing
*/
const NOTE_REMARK_PLUGINS = [remarkBreaks]
const NOTE_COMPONENTS = {
p: ({ children }: { children?: React.ReactNode }) => (
<p className='mb-1 break-words text-[var(--text-primary)] text-sm leading-[1.25rem] last:mb-0'>
{children}
</p>
),
h1: ({ children }: { children?: React.ReactNode }) => (
<h1 className='mt-3 mb-3 break-words font-semibold text-[var(--text-primary)] text-lg first:mt-0'>
{children}
</h1>
),
h2: ({ children }: { children?: React.ReactNode }) => (
<h2 className='mt-2.5 mb-2.5 break-words font-semibold text-[var(--text-primary)] text-base first:mt-0'>
{children}
</h2>
),
h3: ({ children }: { children?: React.ReactNode }) => (
<h3 className='mt-2 mb-2 break-words font-semibold text-[var(--text-primary)] text-sm first:mt-0'>
{children}
</h3>
),
h4: ({ children }: { children?: React.ReactNode }) => (
<h4 className='mt-2 mb-2 break-words font-semibold text-[var(--text-primary)] text-xs first:mt-0'>
{children}
</h4>
),
ul: ({ children }: { children?: React.ReactNode }) => (
<ul className='mt-1 mb-1 list-disc space-y-1 break-words pl-6 text-[var(--text-primary)] text-sm'>
{children}
</ul>
),
ol: ({ children }: { children?: React.ReactNode }) => (
<ol className='mt-1 mb-1 list-decimal space-y-1 break-words pl-6 text-[var(--text-primary)] text-sm'>
{children}
</ol>
),
li: ({ children }: { children?: React.ReactNode }) => <li className='break-words'>{children}</li>,
inlineCode: ({ children }: { children?: React.ReactNode }) => (
<code className='whitespace-normal rounded bg-[var(--surface-5)] px-1 py-0.5 font-mono text-[var(--caution)] text-xs'>
{children}
</code>
),
code: ({ children, className, ...props }: { children?: React.ReactNode; className?: string }) => (
<code
{...props}
className='block whitespace-pre-wrap break-words rounded bg-[var(--surface-5)] p-2 text-[var(--text-primary)] text-xs'
>
{children}
</code>
),
a: ({ href, children }: { href?: string; children?: React.ReactNode }) => {
const embedInfo = href ? getEmbedInfo(href) : null
if (embedInfo) {
return (
<span className='my-2 block w-full'>
<a
href={href}
target='_blank'
rel='noopener noreferrer'
className='mb-1 block break-all text-[var(--brand-secondary)] underline-offset-2 hover-hover:underline'
>
{children}
</a>
<span className='block w-full overflow-hidden rounded-md'>
{embedInfo.type === 'iframe' && (
<span
className='block overflow-hidden'
style={{
width: '100%',
aspectRatio: embedInfo.aspectRatio || '16/9',
}}
>
<iframe
src={embedInfo.url}
title='Media'
allow='accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture; web-share'
allowFullScreen
loading='lazy'
className='origin-top-left'
style={{
width: EMBED_INVERSE_SCALE,
height: EMBED_INVERSE_SCALE,
transform: `scale(${EMBED_SCALE})`,
}}
/>
</span>
)}
{embedInfo.type === 'video' && (
<video
src={embedInfo.url}
controls
preload='metadata'
className='aspect-video w-full'
>
<track kind='captions' src='' default />
</video>
)}
{embedInfo.type === 'audio' && (
<audio src={embedInfo.url} controls preload='metadata' className='w-full'>
<track kind='captions' src='' default />
</audio>
)}
</span>
</span>
)
}
return (
<a
href={href}
target='_blank'
rel='noopener noreferrer'
className='break-all text-[var(--brand-secondary)] underline-offset-2 hover-hover:underline'
>
{children}
</a>
)
},
strong: ({ children }: { children?: React.ReactNode }) => (
<strong className='break-words font-semibold text-[var(--text-primary)]'>{children}</strong>
),
em: ({ children }: { children?: React.ReactNode }) => (
<em className='break-words text-[var(--text-tertiary)]'>{children}</em>
),
blockquote: ({ children }: { children?: React.ReactNode }) => (
<blockquote className='my-4 break-words border-[var(--border-1)] border-l-4 py-1 pl-4 text-[var(--text-tertiary)] italic'>
{children}
</blockquote>
),
table: ({ children }: { children?: React.ReactNode }) => (
<div className='my-2 max-w-full overflow-x-auto'>
<table className='w-full border-collapse text-xs'>{children}</table>
</div>
),
thead: ({ children }: { children?: React.ReactNode }) => (
<thead className='border-[var(--border)] border-b'>{children}</thead>
),
tbody: ({ children }: { children?: React.ReactNode }) => <tbody>{children}</tbody>,
tr: ({ children }: { children?: React.ReactNode }) => (
<tr className='border-[var(--border)] border-b last:border-b-0'>{children}</tr>
),
th: ({ children }: { children?: React.ReactNode }) => (
<th className='px-2 py-1 text-left font-semibold text-[var(--text-primary)]'>{children}</th>
),
td: ({ children }: { children?: React.ReactNode }) => (
<td className='px-2 py-1 text-[var(--text-secondary)]'>{children}</td>
),
}
const NoteMarkdown = memo(function NoteMarkdown({ content }: { content: string }) {
return (
<ReactMarkdown
remarkPlugins={[remarkGfm, remarkBreaks]}
components={{
p: ({ children }: any) => (
<p className='mb-1 break-words text-[var(--text-primary)] text-sm leading-[1.25rem] last:mb-0'>
{children}
</p>
),
h1: ({ children }: any) => (
<h1 className='mt-3 mb-3 break-words font-semibold text-[var(--text-primary)] text-lg first:mt-0'>
{children}
</h1>
),
h2: ({ children }: any) => (
<h2 className='mt-2.5 mb-2.5 break-words font-semibold text-[var(--text-primary)] text-base first:mt-0'>
{children}
</h2>
),
h3: ({ children }: any) => (
<h3 className='mt-2 mb-2 break-words font-semibold text-[var(--text-primary)] text-sm first:mt-0'>
{children}
</h3>
),
h4: ({ children }: any) => (
<h4 className='mt-2 mb-2 break-words font-semibold text-[var(--text-primary)] text-xs first:mt-0'>
{children}
</h4>
),
ul: ({ children }: any) => (
<ul className='mt-1 mb-1 list-disc space-y-1 break-words pl-6 text-[var(--text-primary)] text-sm'>
{children}
</ul>
),
ol: ({ children }: any) => (
<ol className='mt-1 mb-1 list-decimal space-y-1 break-words pl-6 text-[var(--text-primary)] text-sm'>
{children}
</ol>
),
li: ({ children }: any) => <li className='break-words'>{children}</li>,
code: ({ inline, className, children, ...props }: any) => {
const isInline = inline || !className?.includes('language-')
if (isInline) {
return (
<code
{...props}
className='whitespace-normal rounded bg-[var(--surface-5)] px-1 py-0.5 font-mono text-[var(--caution)] text-xs'
>
{children}
</code>
)
}
return (
<code
{...props}
className='block whitespace-pre-wrap break-words rounded bg-[var(--surface-5)] p-2 text-[var(--text-primary)] text-xs'
>
{children}
</code>
)
},
a: ({ href, children }: any) => {
const embedInfo = href ? getEmbedInfo(href) : null
if (embedInfo) {
return (
<span className='my-2 block w-full'>
<a
href={href}
target='_blank'
rel='noopener noreferrer'
className='mb-1 block break-all text-[var(--brand-secondary)] underline-offset-2 hover-hover:underline'
>
{children}
</a>
<span className='block w-full overflow-hidden rounded-md'>
{embedInfo.type === 'iframe' && (
<span
className='block overflow-hidden'
style={{
width: '100%',
aspectRatio: embedInfo.aspectRatio || '16/9',
}}
>
<iframe
src={embedInfo.url}
title='Media'
allow='accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture; web-share'
allowFullScreen
loading='lazy'
className='origin-top-left'
style={{
width: EMBED_INVERSE_SCALE,
height: EMBED_INVERSE_SCALE,
transform: `scale(${EMBED_SCALE})`,
}}
/>
</span>
)}
{embedInfo.type === 'video' && (
<video
src={embedInfo.url}
controls
preload='metadata'
className='aspect-video w-full'
>
<track kind='captions' src='' default />
</video>
)}
{embedInfo.type === 'audio' && (
<audio src={embedInfo.url} controls preload='metadata' className='w-full'>
<track kind='captions' src='' default />
</audio>
)}
</span>
</span>
)
}
return (
<a
href={href}
target='_blank'
rel='noopener noreferrer'
className='break-all text-[var(--brand-secondary)] underline-offset-2 hover-hover:underline'
>
{children}
</a>
)
},
strong: ({ children }: any) => (
<strong className='break-words font-semibold text-[var(--text-primary)]'>
{children}
</strong>
),
em: ({ children }: any) => (
<em className='break-words text-[var(--text-tertiary)]'>{children}</em>
),
blockquote: ({ children }: any) => (
<blockquote className='my-4 break-words border-[var(--border-1)] border-l-4 py-1 pl-4 text-[var(--text-tertiary)] italic'>
{children}
</blockquote>
),
table: ({ children }: any) => (
<div className='my-2 max-w-full overflow-x-auto'>
<table className='w-full border-collapse text-xs'>{children}</table>
</div>
),
thead: ({ children }: any) => (
<thead className='border-[var(--border)] border-b'>{children}</thead>
),
tbody: ({ children }: any) => <tbody>{children}</tbody>,
tr: ({ children }: any) => (
<tr className='border-[var(--border)] border-b last:border-b-0'>{children}</tr>
),
th: ({ children }: any) => (
<th className='px-2 py-1 text-left font-semibold text-[var(--text-primary)]'>
{children}
</th>
),
td: ({ children }: any) => (
<td className='px-2 py-1 text-[var(--text-secondary)]'>{children}</td>
),
}}
>
<Streamdown mode='static' remarkPlugins={NOTE_REMARK_PLUGINS} components={NOTE_COMPONENTS}>
{content}
</ReactMarkdown>
</Streamdown>
)
})

View File

@@ -14,12 +14,11 @@ import {
} from '@/components/emcn'
import { cn } from '@/lib/core/utils/cn'
import {
getEffectiveBlockOutputPaths,
getEffectiveBlockOutputType,
getOutputPathsFromSchema,
} from '@/lib/workflows/blocks/block-outputs'
import { getBlockReferenceTags } from '@/lib/workflows/blocks/block-reference-tags'
import { hasTriggerCapability } from '@/lib/workflows/triggers/trigger-utils'
import { TRIGGER_TYPES } from '@/lib/workflows/triggers/triggers'
import { KeyboardNavigationHandler } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/tag-dropdown/components/keyboard-navigation-handler'
import type {
BlockTagGroup,
@@ -177,17 +176,6 @@ const ensureRootTag = (tags: string[], rootTag: string): string[] => {
return [rootTag, ...tags]
}
/**
* Gets a subblock value from the store.
*
* @param blockId - The block identifier
* @param property - The property name to retrieve
* @returns The value from the subblock store
*/
const getSubBlockValue = (blockId: string, property: string): any => {
return useSubBlockStore.getState().getValue(blockId, property)
}
/**
* Gets the output type for a specific path in a block's outputs.
*
@@ -1055,53 +1043,19 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
return { tags: [], variableInfoMap: emptyVariableInfoMap, blockTagGroups: [] }
}
const blockName = sourceBlock.name || sourceBlock.type
const normalizedBlockName = normalizeName(blockName)
const mergedSubBlocks = getMergedSubBlocks(activeSourceBlockId)
let blockTags: string[]
if (sourceBlock.type === 'variables') {
const variablesValue = getSubBlockValue(activeSourceBlockId, 'variables')
if (variablesValue && Array.isArray(variablesValue) && variablesValue.length > 0) {
const validAssignments = variablesValue.filter((assignment: { variableName?: string }) =>
assignment?.variableName?.trim()
)
blockTags = validAssignments.map(
(assignment: { variableName: string }) =>
`${normalizedBlockName}.${assignment.variableName.trim()}`
)
} else {
blockTags = [normalizedBlockName]
}
} else {
const sourceBlockConfig = getBlock(sourceBlock.type)
const isTriggerCapable = sourceBlockConfig ? hasTriggerCapability(sourceBlockConfig) : false
const effectiveTriggerMode = Boolean(sourceBlock.triggerMode && isTriggerCapable)
const outputPaths = getEffectiveBlockOutputPaths(sourceBlock.type, mergedSubBlocks, {
triggerMode: effectiveTriggerMode,
preferToolOutputs: !effectiveTriggerMode,
})
const allTags = outputPaths.map((path) => `${normalizedBlockName}.${path}`)
if (sourceBlock.type === 'human_in_the_loop' && activeSourceBlockId === blockId) {
blockTags = allTags.filter(
(tag) => tag.endsWith('.url') || tag.endsWith('.resumeEndpoint')
)
} else if (allTags.length === 0) {
blockTags = [normalizedBlockName]
} else {
blockTags = allTags
}
}
blockTags = ensureRootTag(blockTags, normalizedBlockName)
const shouldShowRootTag =
sourceBlock.type === TRIGGER_TYPES.GENERIC_WEBHOOK || sourceBlock.type === 'start_trigger'
if (!shouldShowRootTag) {
blockTags = blockTags.filter((tag) => tag !== normalizedBlockName)
}
const blockName = sourceBlock.name || sourceBlock.type
const blockTags = getBlockReferenceTags({
block: {
id: activeSourceBlockId,
type: sourceBlock.type,
name: sourceBlock.name,
triggerMode: sourceBlock.triggerMode,
subBlocks: mergedSubBlocks,
},
currentBlockId: blockId,
subBlocks: mergedSubBlocks,
})
const blockTagGroups: BlockTagGroup[] = [
{
@@ -1331,57 +1285,19 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
continue
}
const blockName = accessibleBlock.name || accessibleBlock.type
const normalizedBlockName = normalizeName(blockName)
const mergedSubBlocks = getMergedSubBlocks(accessibleBlockId)
let blockTags: string[]
if (accessibleBlock.type === 'variables') {
const variablesValue = getSubBlockValue(accessibleBlockId, 'variables')
if (variablesValue && Array.isArray(variablesValue) && variablesValue.length > 0) {
const validAssignments = variablesValue.filter((assignment: { variableName?: string }) =>
assignment?.variableName?.trim()
)
blockTags = validAssignments.map(
(assignment: { variableName: string }) =>
`${normalizedBlockName}.${assignment.variableName.trim()}`
)
} else {
blockTags = [normalizedBlockName]
}
} else {
const accessibleBlockConfig = getBlock(accessibleBlock.type)
const isTriggerCapable = accessibleBlockConfig
? hasTriggerCapability(accessibleBlockConfig)
: false
const effectiveTriggerMode = Boolean(accessibleBlock.triggerMode && isTriggerCapable)
const outputPaths = getEffectiveBlockOutputPaths(accessibleBlock.type, mergedSubBlocks, {
triggerMode: effectiveTriggerMode,
preferToolOutputs: !effectiveTriggerMode,
})
const allTags = outputPaths.map((path) => `${normalizedBlockName}.${path}`)
if (accessibleBlock.type === 'human_in_the_loop' && accessibleBlockId === blockId) {
blockTags = allTags.filter(
(tag) => tag.endsWith('.url') || tag.endsWith('.resumeEndpoint')
)
} else if (allTags.length === 0) {
blockTags = [normalizedBlockName]
} else {
blockTags = allTags
}
}
blockTags = ensureRootTag(blockTags, normalizedBlockName)
const shouldShowRootTag =
accessibleBlock.type === TRIGGER_TYPES.GENERIC_WEBHOOK ||
accessibleBlock.type === 'start_trigger'
if (!shouldShowRootTag) {
blockTags = blockTags.filter((tag) => tag !== normalizedBlockName)
}
const blockName = accessibleBlock.name || accessibleBlock.type
const blockTags = getBlockReferenceTags({
block: {
id: accessibleBlockId,
type: accessibleBlock.type,
name: accessibleBlock.name,
triggerMode: accessibleBlock.triggerMode,
subBlocks: mergedSubBlocks,
},
currentBlockId: blockId,
subBlocks: mergedSubBlocks,
})
blockTagGroups.push({
blockName,

View File

@@ -224,7 +224,7 @@ export const Panel = memo(function Panel({ workspaceId: propWorkspaceId }: Panel
const [copilotChatId, setCopilotChatId] = useState<string | undefined>(undefined)
const [copilotChatTitle, setCopilotChatTitle] = useState<string | null>(null)
const [copilotChatList, setCopilotChatList] = useState<
{ id: string; title: string | null; updatedAt: string; conversationId: string | null }[]
{ id: string; title: string | null; updatedAt: string; activeStreamId: string | null }[]
>([])
const [isCopilotHistoryOpen, setIsCopilotHistoryOpen] = useState(false)
@@ -244,7 +244,7 @@ export const Panel = memo(function Panel({ workspaceId: propWorkspaceId }: Panel
id: string
title: string | null
updatedAt: string
conversationId: string | null
activeStreamId: string | null
}>
setCopilotChatList(filtered)
@@ -813,7 +813,7 @@ export const Panel = memo(function Panel({ workspaceId: propWorkspaceId }: Panel
>
<ConversationListItem
title={chat.title || 'New Chat'}
isActive={Boolean(chat.conversationId)}
isActive={Boolean(chat.activeStreamId)}
titleClassName='text-[13px]'
actions={
<div

View File

@@ -10,7 +10,8 @@ import { getBaseUrl } from '@/lib/core/utils/urls'
import { createMcpToolId } from '@/lib/mcp/shared'
import { getProviderIdFromServiceId } from '@/lib/oauth'
import type { FilterRule, SortRule } from '@/lib/table/types'
import { BLOCK_DIMENSIONS, HANDLE_POSITIONS } from '@/lib/workflows/blocks/block-dimensions'
import { HANDLE_POSITIONS } from '@/lib/workflows/blocks/block-dimensions'
import { calculateWorkflowBlockDimensions } from '@/lib/workflows/blocks/deterministic-dimensions'
import { getConditionRows, getRouterRows } from '@/lib/workflows/dynamic-handle-topology'
import {
buildCanonicalIndex,
@@ -1145,33 +1146,14 @@ export const WorkflowBlock = memo(function WorkflowBlock({
useBlockDimensions({
blockId: id,
calculateDimensions: () => {
const shouldShowDefaultHandles =
config.category !== 'triggers' && type !== 'starter' && !displayTriggerMode
const hasContentBelowHeader = subBlockRows.length > 0 || shouldShowDefaultHandles
const defaultHandlesRow = shouldShowDefaultHandles ? 1 : 0
let rowsCount = 0
if (type === 'condition') {
rowsCount = conditionRows.length + defaultHandlesRow
} else if (type === 'router_v2') {
// +1 for context row, plus route rows
rowsCount = 1 + routerRows.length + defaultHandlesRow
} else {
const subblockRowCount = subBlockRows.reduce((acc, row) => acc + row.length, 0)
rowsCount = subblockRowCount + defaultHandlesRow
}
const contentHeight = hasContentBelowHeader
? BLOCK_DIMENSIONS.WORKFLOW_CONTENT_PADDING +
rowsCount * BLOCK_DIMENSIONS.WORKFLOW_ROW_HEIGHT
: 0
const calculatedHeight = Math.max(
BLOCK_DIMENSIONS.HEADER_HEIGHT + contentHeight,
BLOCK_DIMENSIONS.MIN_HEIGHT
)
return { width: BLOCK_DIMENSIONS.FIXED_WIDTH, height: calculatedHeight }
return calculateWorkflowBlockDimensions({
blockType: type,
category: config.category,
displayTriggerMode,
visibleSubBlockCount: subBlockRows.reduce((acc, row) => acc + row.length, 0),
conditionRowCount: conditionRows.length,
routerRowCount: routerRows.length,
})
},
dependencies: [
type,

View File

@@ -16,15 +16,12 @@ import {
import { checkUsageStatus } from '@/lib/billing/calculations/usage-monitor'
import { getHighestPrioritySubscription } from '@/lib/billing/core/subscription'
import { dollarsToCredits } from '@/lib/billing/credits/conversion'
import { createBullMQJobData, isBullMQEnabled } from '@/lib/core/bullmq'
import { acquireLock } from '@/lib/core/config/redis'
import { RateLimiter } from '@/lib/core/rate-limiter'
import { decryptSecret } from '@/lib/core/security/encryption'
import { secureFetchWithValidation } from '@/lib/core/security/input-validation.server'
import { formatDuration } from '@/lib/core/utils/formatting'
import { getBaseUrl } from '@/lib/core/utils/urls'
import { generateId } from '@/lib/core/utils/uuid'
import { enqueueWorkspaceDispatch } from '@/lib/core/workspace-dispatch'
import type { TraceSpan, WorkflowExecutionLog } from '@/lib/logs/types'
import { sendEmail } from '@/lib/messaging/email/mailer'
import type { AlertConfig } from '@/lib/notifications/alert-rules'
@@ -35,8 +32,6 @@ const logger = createLogger('WorkspaceNotificationDelivery')
const MAX_ATTEMPTS = 5
const RETRY_DELAYS = [5 * 1000, 15 * 1000, 60 * 1000, 3 * 60 * 1000, 10 * 60 * 1000]
const NOTIFICATION_DISPATCH_LOCK_TTL_SECONDS = 3
function getRetryDelayWithJitter(baseDelay: number): number {
const jitter = Math.random() * 0.1 * baseDelay
return Math.floor(baseDelay + jitter)
@@ -534,42 +529,14 @@ async function buildRetryLog(params: NotificationDeliveryParams): Promise<Workfl
}
export async function enqueueNotificationDeliveryDispatch(
params: NotificationDeliveryParams
_params: NotificationDeliveryParams
): Promise<boolean> {
if (!isBullMQEnabled()) {
return false
}
const lockAcquired = await acquireLock(
`workspace-notification-dispatch:${params.deliveryId}`,
params.deliveryId,
NOTIFICATION_DISPATCH_LOCK_TTL_SECONDS
)
if (!lockAcquired) {
return false
}
await enqueueWorkspaceDispatch({
workspaceId: params.workspaceId,
lane: 'lightweight',
queueName: 'workspace-notification-delivery',
bullmqJobName: 'workspace-notification-delivery',
bullmqPayload: createBullMQJobData(params),
metadata: {
workflowId: params.log.workflowId ?? undefined,
},
})
return true
return false
}
const STUCK_IN_PROGRESS_THRESHOLD_MS = 5 * 60 * 1000
export async function sweepPendingNotificationDeliveries(limit = 50): Promise<number> {
if (!isBullMQEnabled()) {
return 0
}
const stuckThreshold = new Date(Date.now() - STUCK_IN_PROGRESS_THRESHOLD_MS)
await db

View File

@@ -177,6 +177,7 @@ export interface ExecutionContext {
userId?: string
isDeployedContext?: boolean
enforceCredentialAccess?: boolean
copilotToolExecution?: boolean
permissionConfig?: PermissionGroupConfig | null
permissionConfigLoaded?: boolean

View File

@@ -0,0 +1,131 @@
import { keepPreviousData, useMutation, useQuery } from '@tanstack/react-query'
export type MothershipEnv = 'dev' | 'staging' | 'prod'
const BASE = '/api/admin/mothership'
async function mothershipPost(
endpoint: string,
environment: MothershipEnv,
body?: Record<string, unknown>
) {
const res = await fetch(`${BASE}?env=${environment}&endpoint=${endpoint}`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
...(body ? { body: JSON.stringify(body) } : {}),
})
if (!res.ok) {
const err = await res.json().catch(() => ({ error: res.statusText }))
throw new Error(err.message || err.error || `Request failed (${res.status})`)
}
return res.json()
}
async function mothershipGet(
endpoint: string,
environment: MothershipEnv,
params?: Record<string, string>
) {
const qs = new URLSearchParams({ env: environment, endpoint, ...params })
const res = await fetch(`${BASE}?${qs.toString()}`, { method: 'GET' })
if (!res.ok) {
const err = await res.json().catch(() => ({ error: res.statusText }))
throw new Error(err.message || err.error || `Request failed (${res.status})`)
}
return res.json()
}
export const mothershipKeys = {
all: ['mothership-admin'] as const,
requests: (env: MothershipEnv, start: string, end: string, userId?: string) =>
[...mothershipKeys.all, 'requests', env, start, end, userId] as const,
userBreakdown: (env: MothershipEnv, start: string, end: string) =>
[...mothershipKeys.all, 'user-breakdown', env, start, end] as const,
licenses: (env: MothershipEnv) => [...mothershipKeys.all, 'licenses', env] as const,
licenseDetails: (env: MothershipEnv, id?: string, name?: string) =>
[...mothershipKeys.all, 'license-details', env, id, name] as const,
enterpriseStats: (env: MothershipEnv, customerType: string, start: string, end: string) =>
[...mothershipKeys.all, 'enterprise-stats', env, customerType, start, end] as const,
trace: (env: MothershipEnv, requestId: string) =>
[...mothershipKeys.all, 'trace', env, requestId] as const,
}
export function useMothershipRequests(
environment: MothershipEnv,
start: string,
end: string,
userId?: string
) {
return useQuery({
queryKey: mothershipKeys.requests(environment, start, end, userId),
queryFn: () =>
mothershipPost('requests', environment, {
start,
end,
...(userId ? { userId } : {}),
}),
enabled: !!start && !!end,
placeholderData: keepPreviousData,
})
}
export function useMothershipUserBreakdown(environment: MothershipEnv, start: string, end: string) {
return useQuery({
queryKey: mothershipKeys.userBreakdown(environment, start, end),
queryFn: () => mothershipPost('user-breakdown', environment, { start, end }),
enabled: !!start && !!end,
placeholderData: keepPreviousData,
})
}
export function useMothershipLicenses(environment: MothershipEnv) {
return useQuery({
queryKey: mothershipKeys.licenses(environment),
queryFn: () => mothershipGet('licenses', environment),
})
}
export function useMothershipLicenseDetails(
environment: MothershipEnv,
id?: string,
name?: string
) {
return useQuery({
queryKey: mothershipKeys.licenseDetails(environment, id, name),
queryFn: () =>
mothershipPost('licenses/details', environment, {
...(id ? { id } : {}),
...(name ? { name } : {}),
}),
enabled: !!(id || name),
})
}
export function useGenerateLicense(environment: MothershipEnv) {
return useMutation({
mutationFn: (params: { name: string; expirationDate?: string }) =>
mothershipPost('licenses/generate', environment, params),
})
}
export function useMothershipEnterpriseStats(
environment: MothershipEnv,
customerType: string,
start: string,
end: string
) {
return useQuery({
queryKey: mothershipKeys.enterpriseStats(environment, customerType, start, end),
queryFn: () => mothershipPost('enterprise-stats', environment, { customerType, start, end }),
enabled: !!customerType && !!start && !!end,
placeholderData: keepPreviousData,
})
}
export function useMothershipTrace(environment: MothershipEnv, requestId: string) {
return useQuery({
queryKey: mothershipKeys.trace(environment, requestId),
queryFn: () => mothershipGet('traces', environment, { requestId }),
enabled: !!requestId,
})
}

View File

@@ -1,5 +1,9 @@
import { keepPreviousData, useMutation, useQuery, useQueryClient } from '@tanstack/react-query'
import type { ChatContextKind, MothershipResource } from '@/app/workspace/[workspaceId]/home/types'
import type { PersistedMessage } from '@/lib/copilot/chat/persisted-message'
import { normalizeMessage } from '@/lib/copilot/chat/persisted-message'
import type { FilePreviewSession } from '@/lib/copilot/request/session'
import type { StreamBatchEvent } from '@/lib/copilot/request/session/types'
import type { MothershipResource } from '@/app/workspace/[workspaceId]/home/types'
export interface TaskMetadata {
id: string
@@ -9,71 +13,16 @@ export interface TaskMetadata {
isUnread: boolean
}
export interface StreamSnapshot {
events: Array<{ eventId: number; streamId: string; event: Record<string, unknown> }>
status: string
}
export interface TaskChatHistory {
id: string
title: string | null
messages: TaskStoredMessage[]
messages: PersistedMessage[]
activeStreamId: string | null
resources: MothershipResource[]
streamSnapshot?: StreamSnapshot | null
}
export interface TaskStoredToolCall {
id: string
name: string
status: string
params?: Record<string, unknown>
result?: unknown
error?: string
durationMs?: number
}
export interface TaskStoredFileAttachment {
id: string
key: string
filename: string
media_type: string
size: number
}
export interface TaskStoredMessageContext {
kind: ChatContextKind
label: string
workflowId?: string
knowledgeId?: string
tableId?: string
fileId?: string
folderId?: string
chatId?: string
}
export interface TaskStoredMessage {
id: string
role: 'user' | 'assistant'
content: string
requestId?: string
toolCalls?: TaskStoredToolCall[]
contentBlocks?: TaskStoredContentBlock[]
fileAttachments?: TaskStoredFileAttachment[]
contexts?: TaskStoredMessageContext[]
}
export interface TaskStoredContentBlock {
type: string
content?: string
toolCall?: {
id?: string
name?: string
state?: string
params?: Record<string, unknown>
result?: { success: boolean; output?: unknown; error?: string }
display?: { text?: string }
calledBy?: string
streamSnapshot?: {
events: StreamBatchEvent[]
previewSessions: FilePreviewSession[]
status: string
} | null
}
@@ -89,7 +38,7 @@ interface TaskResponse {
id: string
title: string | null
updatedAt: string
conversationId: string | null
activeStreamId: string | null
lastSeenAt: string | null
}
@@ -99,9 +48,9 @@ function mapTask(chat: TaskResponse): TaskMetadata {
id: chat.id,
name: chat.title ?? 'New task',
updatedAt,
isActive: chat.conversationId !== null,
isActive: chat.activeStreamId !== null,
isUnread:
chat.conversationId === null &&
chat.activeStreamId === null &&
(chat.lastSeenAt === null || updatedAt > new Date(chat.lastSeenAt)),
}
}
@@ -142,7 +91,9 @@ export async function fetchChatHistory(
return {
id: chat.id,
title: chat.title,
messages: Array.isArray(chat.messages) ? chat.messages : [],
messages: Array.isArray(chat.messages)
? chat.messages.map((m: Record<string, unknown>) => normalizeMessage(m))
: [],
activeStreamId: chat.conversationId || null,
resources: Array.isArray(chat.resources) ? chat.resources : [],
streamSnapshot: chat.streamSnapshot || null,
@@ -161,8 +112,10 @@ export async function fetchChatHistory(
return {
id: chat.id,
title: chat.title,
messages: Array.isArray(chat.messages) ? chat.messages : [],
activeStreamId: chat.conversationId || null,
messages: Array.isArray(chat.messages)
? chat.messages.map((m: Record<string, unknown>) => normalizeMessage(m))
: [],
activeStreamId: chat.activeStreamId || null,
resources: Array.isArray(chat.resources) ? chat.resources : [],
streamSnapshot: chat.streamSnapshot || null,
}

View File

@@ -1,107 +0,0 @@
'use client'
import { useEffect, useRef, useState } from 'react'
/**
* Finds the last paragraph break (`\n\n`) that is not inside a fenced code
* block. Returns the index immediately after the break — the start of the
* next paragraph — so the caller can slice cleanly.
*/
function findSafeSplitPoint(content: string): number {
let inCodeBlock = false
let lastSafeBreak = 0
let i = 0
while (i < content.length) {
if (content[i] === '`' && content[i + 1] === '`' && content[i + 2] === '`') {
inCodeBlock = !inCodeBlock
i += 3
continue
}
if (!inCodeBlock && content[i] === '\n' && content[i + 1] === '\n') {
lastSafeBreak = i + 2
i += 2
continue
}
i++
}
return lastSafeBreak
}
interface StreamingRevealResult {
/** Stable head — paragraphs that have fully arrived. Ideal for memoisation. */
committed: string
/** Active tail — the paragraph currently being streamed, animated on mount. */
incoming: string
/** Increments each time committed advances; use as React key to retrigger the fade animation. */
generation: number
}
/**
* Splits streaming markdown into a stable *committed* head and an animated
* *incoming* tail. The split always occurs at a paragraph boundary (`\n\n`)
* that is outside fenced code blocks, so both halves are valid markdown.
*
* Committed content changes infrequently (only at paragraph breaks), making
* it safe to wrap in `useMemo`. The incoming tail is small and re-renders at
* the caller's throttle rate.
*
* The split is preserved after streaming ends to prevent layout shifts caused
* by DOM restructuring. It only resets when content clears (new message).
*/
export function useStreamingReveal(content: string, isStreaming: boolean): StreamingRevealResult {
const [committedEnd, setCommittedEnd] = useState(0)
const [generation, setGeneration] = useState(0)
const prevSplitRef = useRef(0)
useEffect(() => {
if (content.length === 0) {
prevSplitRef.current = 0
setCommittedEnd(0)
return
}
if (!isStreaming) return
const splitPoint = findSafeSplitPoint(content)
if (splitPoint > prevSplitRef.current) {
prevSplitRef.current = splitPoint
setCommittedEnd(splitPoint)
setGeneration((g) => g + 1)
}
}, [content, isStreaming])
if (!isStreaming) {
const preservedSplit = prevSplitRef.current
if (preservedSplit > 0 && preservedSplit < content.length) {
return {
committed: content.slice(0, preservedSplit),
incoming: content.slice(preservedSplit),
generation,
}
}
return { committed: content, incoming: '', generation }
}
if (committedEnd > 0 && committedEnd < content.length) {
return {
committed: content.slice(0, committedEnd),
incoming: content.slice(committedEnd),
generation,
}
}
// No paragraph split yet: keep the growing markdown in `incoming` only so ReactMarkdown
// re-parses one tail block (same as the paragraph-tail path). Putting everything in
// `committed` would re-render the full document every tick and makes tables jump.
if (committedEnd === 0 && content.length > 0) {
return { committed: '', incoming: content, generation }
}
return { committed: content, incoming: '', generation }
}

View File

@@ -54,6 +54,7 @@ export function useStreamingText(target: string, isStreaming: boolean): string {
useEffect(() => {
if (isStreaming) return
if (revealedRef.current === target) return
revealedRef.current = target
lastTargetChangeAtRef.current = Date.now()
lastTargetLengthRef.current = target.length

View File

@@ -1,146 +0,0 @@
/**
* @vitest-environment node
*/
import { beforeEach, describe, expect, it, vi } from 'vitest'
const {
mockGetHighestPrioritySubscription,
mockGetWorkspaceBilledAccountUserId,
mockFeatureFlags,
mockRedisGet,
mockRedisSet,
mockRedisDel,
mockRedisKeys,
mockGetRedisClient,
} = vi.hoisted(() => ({
mockGetHighestPrioritySubscription: vi.fn(),
mockGetWorkspaceBilledAccountUserId: vi.fn(),
mockFeatureFlags: {
isBillingEnabled: true,
},
mockRedisGet: vi.fn(),
mockRedisSet: vi.fn(),
mockRedisDel: vi.fn(),
mockRedisKeys: vi.fn(),
mockGetRedisClient: vi.fn(),
}))
vi.mock('@sim/logger', () => ({
createLogger: () => ({
info: vi.fn(),
warn: vi.fn(),
error: vi.fn(),
debug: vi.fn(),
}),
}))
vi.mock('@/lib/billing/core/plan', () => ({
getHighestPrioritySubscription: mockGetHighestPrioritySubscription,
}))
vi.mock('@/lib/workspaces/utils', () => ({
getWorkspaceBilledAccountUserId: mockGetWorkspaceBilledAccountUserId,
}))
vi.mock('@/lib/core/config/redis', () => ({
getRedisClient: mockGetRedisClient,
}))
vi.mock('@/lib/core/config/feature-flags', () => mockFeatureFlags)
import {
getWorkspaceConcurrencyLimit,
resetWorkspaceConcurrencyLimitCache,
} from '@/lib/billing/workspace-concurrency'
describe('workspace concurrency billing', () => {
beforeEach(() => {
vi.clearAllMocks()
mockFeatureFlags.isBillingEnabled = true
mockRedisGet.mockResolvedValue(null)
mockRedisSet.mockResolvedValue('OK')
mockRedisDel.mockResolvedValue(1)
mockRedisKeys.mockResolvedValue([])
mockGetRedisClient.mockReturnValue({
get: mockRedisGet,
set: mockRedisSet,
del: mockRedisDel,
keys: mockRedisKeys,
})
})
it('returns free tier when no billed account exists', async () => {
mockGetWorkspaceBilledAccountUserId.mockResolvedValue(null)
await expect(getWorkspaceConcurrencyLimit('workspace-1')).resolves.toBe(5)
})
it('returns pro limit for pro billing accounts', async () => {
mockGetWorkspaceBilledAccountUserId.mockResolvedValue('user-1')
mockGetHighestPrioritySubscription.mockResolvedValue({
plan: 'pro_6000',
metadata: null,
})
await expect(getWorkspaceConcurrencyLimit('workspace-1')).resolves.toBe(50)
})
it('returns max limit for max plan tiers', async () => {
mockGetWorkspaceBilledAccountUserId.mockResolvedValue('user-1')
mockGetHighestPrioritySubscription.mockResolvedValue({
plan: 'pro_25000',
metadata: null,
})
await expect(getWorkspaceConcurrencyLimit('workspace-1')).resolves.toBe(200)
})
it('returns max limit for legacy team plans', async () => {
mockGetWorkspaceBilledAccountUserId.mockResolvedValue('user-1')
mockGetHighestPrioritySubscription.mockResolvedValue({
plan: 'team',
metadata: null,
})
await expect(getWorkspaceConcurrencyLimit('workspace-1')).resolves.toBe(200)
})
it('returns enterprise metadata override when present', async () => {
mockGetWorkspaceBilledAccountUserId.mockResolvedValue('user-1')
mockGetHighestPrioritySubscription.mockResolvedValue({
plan: 'enterprise',
metadata: {
workspaceConcurrencyLimit: '350',
},
})
await expect(getWorkspaceConcurrencyLimit('workspace-1')).resolves.toBe(350)
})
it('uses free-tier limit when billing is disabled', async () => {
mockFeatureFlags.isBillingEnabled = false
mockGetWorkspaceBilledAccountUserId.mockResolvedValue('user-1')
mockGetHighestPrioritySubscription.mockResolvedValue({
plan: 'pro_25000',
metadata: {
workspaceConcurrencyLimit: 999,
},
})
await expect(getWorkspaceConcurrencyLimit('workspace-1')).resolves.toBe(5)
})
it('uses redis cache when available', async () => {
mockRedisGet.mockResolvedValueOnce('123')
await expect(getWorkspaceConcurrencyLimit('workspace-1')).resolves.toBe(123)
expect(mockGetWorkspaceBilledAccountUserId).not.toHaveBeenCalled()
})
it('can clear a specific workspace cache entry', async () => {
await resetWorkspaceConcurrencyLimitCache('workspace-1')
expect(mockRedisDel).toHaveBeenCalledWith('workspace-concurrency-limit:workspace-1')
})
})

View File

@@ -1,170 +0,0 @@
import { createLogger } from '@sim/logger'
import { getHighestPrioritySubscription } from '@/lib/billing/core/plan'
import { getPlanTierCredits, isEnterprise, isPro, isTeam } from '@/lib/billing/plan-helpers'
import { parseEnterpriseWorkspaceConcurrencyMetadata } from '@/lib/billing/types'
import { env } from '@/lib/core/config/env'
import { isBillingEnabled } from '@/lib/core/config/feature-flags'
import { getRedisClient } from '@/lib/core/config/redis'
import { getWorkspaceBilledAccountUserId } from '@/lib/workspaces/utils'
const logger = createLogger('WorkspaceConcurrencyBilling')
const CACHE_TTL_MS = 60_000
const CACHE_TTL_SECONDS = Math.floor(CACHE_TTL_MS / 1000)
interface CacheEntry {
value: number
expiresAt: number
}
const inMemoryConcurrencyCache = new Map<string, CacheEntry>()
function cacheKey(workspaceId: string): string {
return `workspace-concurrency-limit:${workspaceId}`
}
function parsePositiveLimit(value: unknown): number | null {
if (typeof value === 'number' && Number.isFinite(value) && value > 0) {
return Math.floor(value)
}
if (typeof value === 'string') {
const parsed = Number.parseInt(value, 10)
if (Number.isFinite(parsed) && parsed > 0) {
return parsed
}
}
return null
}
function getFreeConcurrencyLimit(): number {
return Number.parseInt(env.WORKSPACE_CONCURRENCY_FREE, 10) || 5
}
function getProConcurrencyLimit(): number {
return Number.parseInt(env.WORKSPACE_CONCURRENCY_PRO, 10) || 50
}
function getTeamConcurrencyLimit(): number {
return Number.parseInt(env.WORKSPACE_CONCURRENCY_TEAM, 10) || 200
}
function getEnterpriseDefaultConcurrencyLimit(): number {
return Number.parseInt(env.WORKSPACE_CONCURRENCY_ENTERPRISE, 10) || 200
}
function getEnterpriseConcurrencyLimit(metadata: unknown): number {
const enterpriseMetadata = parseEnterpriseWorkspaceConcurrencyMetadata(metadata)
return enterpriseMetadata?.workspaceConcurrencyLimit ?? getEnterpriseDefaultConcurrencyLimit()
}
function getPlanConcurrencyLimit(plan: string | null | undefined, metadata: unknown): number {
if (!isBillingEnabled) {
return getFreeConcurrencyLimit()
}
if (!plan) {
return getFreeConcurrencyLimit()
}
if (isEnterprise(plan)) {
return getEnterpriseConcurrencyLimit(metadata)
}
if (isTeam(plan)) {
return getTeamConcurrencyLimit()
}
const credits = getPlanTierCredits(plan)
if (credits >= 25_000) {
return getTeamConcurrencyLimit()
}
if (isPro(plan)) {
return getProConcurrencyLimit()
}
return getFreeConcurrencyLimit()
}
export async function getWorkspaceConcurrencyLimit(workspaceId: string): Promise<number> {
const redis = getRedisClient()
if (redis) {
const cached = await redis.get(cacheKey(workspaceId))
const cachedValue = parsePositiveLimit(cached)
if (cachedValue !== null) {
return cachedValue
}
} else {
const cached = inMemoryConcurrencyCache.get(workspaceId)
if (cached && cached.expiresAt > Date.now()) {
return cached.value
}
}
try {
const billedAccountUserId = await getWorkspaceBilledAccountUserId(workspaceId)
if (!billedAccountUserId) {
if (redis) {
await redis.set(
cacheKey(workspaceId),
String(getFreeConcurrencyLimit()),
'EX',
CACHE_TTL_SECONDS
)
} else {
inMemoryConcurrencyCache.set(workspaceId, {
value: getFreeConcurrencyLimit(),
expiresAt: Date.now() + CACHE_TTL_MS,
})
}
return getFreeConcurrencyLimit()
}
const subscription = await getHighestPrioritySubscription(billedAccountUserId)
const limit = getPlanConcurrencyLimit(subscription?.plan, subscription?.metadata)
if (redis) {
await redis.set(cacheKey(workspaceId), String(limit), 'EX', CACHE_TTL_SECONDS)
} else {
inMemoryConcurrencyCache.set(workspaceId, {
value: limit,
expiresAt: Date.now() + CACHE_TTL_MS,
})
}
return limit
} catch (error) {
logger.error('Failed to resolve workspace concurrency limit, using free tier', {
workspaceId,
error,
})
return getFreeConcurrencyLimit()
}
}
export async function resetWorkspaceConcurrencyLimitCache(workspaceId?: string): Promise<void> {
if (!workspaceId) {
inMemoryConcurrencyCache.clear()
} else {
inMemoryConcurrencyCache.delete(workspaceId)
}
const redis = getRedisClient()
if (!redis) {
return
}
if (workspaceId) {
await redis.del(cacheKey(workspaceId))
return
}
const keys = await redis.keys('workspace-concurrency-limit:*')
if (keys.length > 0) {
await redis.del(...keys)
}
}

View File

@@ -1,13 +1,7 @@
import type { CopilotAsyncToolStatus } from '@sim/db/schema'
import { MothershipStreamV1AsyncToolRecordStatus } from '@/lib/copilot/generated/mothership-stream-v1'
export const ASYNC_TOOL_STATUS = {
pending: 'pending',
running: 'running',
completed: 'completed',
failed: 'failed',
cancelled: 'cancelled',
delivered: 'delivered',
} as const
export const ASYNC_TOOL_STATUS = MothershipStreamV1AsyncToolRecordStatus
export type AsyncLifecycleStatus =
| typeof ASYNC_TOOL_STATUS.pending

View File

@@ -1,53 +0,0 @@
import { createLogger } from '@sim/logger'
import { CopilotFiles } from '@/lib/uploads'
import { createFileContent } from '@/lib/uploads/utils/file-utils'
const logger = createLogger('CopilotChatContext')
export interface FileAttachmentInput {
id: string
key: string
name?: string
filename?: string
mimeType?: string
media_type?: string
size: number
}
export interface FileContent {
type: string
[key: string]: unknown
}
/**
* Process file attachments into content for the payload.
*/
export async function processFileAttachments(
fileAttachments: FileAttachmentInput[],
userId: string
): Promise<FileContent[]> {
if (!Array.isArray(fileAttachments) || fileAttachments.length === 0) return []
const processedFileContents: FileContent[] = []
const requestId = `copilot-${userId}-${Date.now()}`
const processedAttachments = await CopilotFiles.processCopilotAttachments(
fileAttachments as Parameters<typeof CopilotFiles.processCopilotAttachments>[0],
requestId
)
for (const { buffer, attachment } of processedAttachments) {
const fileContent = createFileContent(buffer, attachment.media_type)
if (fileContent) {
const enriched: FileContent = { ...fileContent, filename: attachment.filename }
processedFileContents.push(enriched)
}
}
logger.debug('Processed file attachments for payload', {
userId,
inputCount: fileAttachments.length,
outputCount: processedFileContents.length,
})
return processedFileContents
}

View File

@@ -1,140 +0,0 @@
/**
* @vitest-environment node
*/
import { beforeEach, describe, expect, it, vi } from 'vitest'
const {
orchestrateCopilotStream,
createRunSegment,
updateRunStatus,
resetStreamBuffer,
setStreamMeta,
createStreamEventWriter,
} = vi.hoisted(() => ({
orchestrateCopilotStream: vi.fn(),
createRunSegment: vi.fn(),
updateRunStatus: vi.fn(),
resetStreamBuffer: vi.fn(),
setStreamMeta: vi.fn(),
createStreamEventWriter: vi.fn(),
}))
vi.mock('@/lib/copilot/orchestrator', () => ({
orchestrateCopilotStream,
}))
vi.mock('@/lib/copilot/async-runs/repository', () => ({
createRunSegment,
updateRunStatus,
}))
vi.mock('@/lib/copilot/orchestrator/stream/buffer', () => ({
createStreamEventWriter,
resetStreamBuffer,
setStreamMeta,
}))
vi.mock('@sim/db', () => ({
db: {
update: vi.fn(() => ({
set: vi.fn(() => ({
where: vi.fn(),
})),
})),
},
}))
vi.mock('@/lib/copilot/task-events', () => ({
taskPubSub: null,
}))
import { createSSEStream } from '@/lib/copilot/chat-streaming'
async function drainStream(stream: ReadableStream) {
const reader = stream.getReader()
while (true) {
const { done } = await reader.read()
if (done) break
}
}
describe('createSSEStream terminal error handling', () => {
const write = vi.fn().mockResolvedValue({ eventId: 1, streamId: 'stream-1', event: {} })
const flush = vi.fn().mockResolvedValue(undefined)
const close = vi.fn().mockResolvedValue(undefined)
beforeEach(() => {
vi.clearAllMocks()
write.mockResolvedValue({ eventId: 1, streamId: 'stream-1', event: {} })
flush.mockResolvedValue(undefined)
close.mockResolvedValue(undefined)
createStreamEventWriter.mockReturnValue({ write, flush, close })
resetStreamBuffer.mockResolvedValue(undefined)
setStreamMeta.mockResolvedValue(undefined)
createRunSegment.mockResolvedValue(null)
updateRunStatus.mockResolvedValue(null)
})
it('writes a terminal error event before close when orchestration returns success=false', async () => {
orchestrateCopilotStream.mockResolvedValue({
success: false,
error: 'resume failed',
content: '',
contentBlocks: [],
toolCalls: [],
})
const stream = createSSEStream({
requestPayload: { message: 'hello' },
userId: 'user-1',
streamId: 'stream-1',
executionId: 'exec-1',
runId: 'run-1',
currentChat: null,
isNewChat: false,
message: 'hello',
titleModel: 'gpt-5.4',
requestId: 'req-1',
orchestrateOptions: {},
})
await drainStream(stream)
expect(write).toHaveBeenCalledWith(
expect.objectContaining({
type: 'error',
error: 'resume failed',
})
)
expect(write.mock.invocationCallOrder.at(-1)).toBeLessThan(close.mock.invocationCallOrder[0])
})
it('writes the thrown terminal error event before close for replay durability', async () => {
orchestrateCopilotStream.mockRejectedValue(new Error('kaboom'))
const stream = createSSEStream({
requestPayload: { message: 'hello' },
userId: 'user-1',
streamId: 'stream-1',
executionId: 'exec-1',
runId: 'run-1',
currentChat: null,
isNewChat: false,
message: 'hello',
titleModel: 'gpt-5.4',
requestId: 'req-1',
orchestrateOptions: {},
})
await drainStream(stream)
expect(write).toHaveBeenCalledWith(
expect.objectContaining({
type: 'error',
error: 'kaboom',
})
)
expect(write.mock.invocationCallOrder.at(-1)).toBeLessThan(close.mock.invocationCallOrder[0])
})
})

View File

@@ -1,579 +0,0 @@
import { db } from '@sim/db'
import { copilotChats } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm'
import { createRunSegment, updateRunStatus } from '@/lib/copilot/async-runs/repository'
import { SIM_AGENT_API_URL } from '@/lib/copilot/constants'
import type { OrchestrateStreamOptions } from '@/lib/copilot/orchestrator'
import { orchestrateCopilotStream } from '@/lib/copilot/orchestrator'
import {
createStreamEventWriter,
getStreamMeta,
resetStreamBuffer,
setStreamMeta,
} from '@/lib/copilot/orchestrator/stream/buffer'
import { taskPubSub } from '@/lib/copilot/task-events'
import { env } from '@/lib/core/config/env'
import { acquireLock, getRedisClient, releaseLock } from '@/lib/core/config/redis'
import { SSE_HEADERS } from '@/lib/core/utils/sse'
const logger = createLogger('CopilotChatStreaming')
const CHAT_STREAM_LOCK_TTL_SECONDS = 2 * 60 * 60
const STREAM_ABORT_TTL_SECONDS = 10 * 60
const STREAM_ABORT_POLL_MS = 1000
interface ActiveStreamEntry {
abortController: AbortController
userStopController: AbortController
}
const activeStreams = new Map<string, ActiveStreamEntry>()
// Tracks in-flight streams by chatId so that a subsequent request for the
// same chat can force-abort the previous stream and wait for it to settle
// before forwarding to Go.
const pendingChatStreams = new Map<
string,
{ promise: Promise<void>; resolve: () => void; streamId: string }
>()
function registerPendingChatStream(chatId: string, streamId: string): void {
if (pendingChatStreams.has(chatId)) {
logger.warn(`registerPendingChatStream: overwriting existing entry for chatId ${chatId}`)
}
let resolve!: () => void
const promise = new Promise<void>((r) => {
resolve = r
})
pendingChatStreams.set(chatId, { promise, resolve, streamId })
}
function resolvePendingChatStream(chatId: string, streamId: string): void {
const entry = pendingChatStreams.get(chatId)
if (entry && entry.streamId === streamId) {
entry.resolve()
pendingChatStreams.delete(chatId)
}
}
function getChatStreamLockKey(chatId: string): string {
return `copilot:chat-stream-lock:${chatId}`
}
function getStreamAbortKey(streamId: string): string {
return `copilot:stream-abort:${streamId}`
}
/**
* Wait for any in-flight stream on `chatId` to settle without force-aborting it.
* Returns true when no stream is active (or it settles in time), false on timeout.
*/
export async function waitForPendingChatStream(
chatId: string,
timeoutMs = 5_000,
expectedStreamId?: string
): Promise<boolean> {
const redis = getRedisClient()
const deadline = Date.now() + timeoutMs
for (;;) {
const entry = pendingChatStreams.get(chatId)
const localPending = !!entry && (!expectedStreamId || entry.streamId === expectedStreamId)
if (redis) {
try {
const ownerStreamId = await redis.get(getChatStreamLockKey(chatId))
const lockReleased =
!ownerStreamId || (expectedStreamId !== undefined && ownerStreamId !== expectedStreamId)
if (!localPending && lockReleased) {
return true
}
} catch (error) {
logger.warn('Failed to check distributed chat stream lock while waiting', {
chatId,
expectedStreamId,
error: error instanceof Error ? error.message : String(error),
})
}
} else if (!localPending) {
return true
}
if (Date.now() >= deadline) return false
await new Promise((resolve) => setTimeout(resolve, 200))
}
}
export async function releasePendingChatStream(chatId: string, streamId: string): Promise<void> {
const redis = getRedisClient()
if (redis) {
await releaseLock(getChatStreamLockKey(chatId), streamId).catch(() => false)
}
resolvePendingChatStream(chatId, streamId)
}
export async function acquirePendingChatStream(
chatId: string,
streamId: string,
timeoutMs = 5_000
): Promise<boolean> {
const redis = getRedisClient()
if (redis) {
const deadline = Date.now() + timeoutMs
for (;;) {
try {
const acquired = await acquireLock(
getChatStreamLockKey(chatId),
streamId,
CHAT_STREAM_LOCK_TTL_SECONDS
)
if (acquired) {
registerPendingChatStream(chatId, streamId)
return true
}
if (!pendingChatStreams.has(chatId)) {
const ownerStreamId = await redis.get(getChatStreamLockKey(chatId))
if (ownerStreamId) {
const ownerMeta = await getStreamMeta(ownerStreamId)
const ownerTerminal =
ownerMeta?.status === 'complete' ||
ownerMeta?.status === 'error' ||
ownerMeta?.status === 'cancelled'
if (ownerTerminal) {
await releaseLock(getChatStreamLockKey(chatId), ownerStreamId).catch(() => false)
continue
}
}
}
} catch (error) {
logger.warn('Distributed chat stream lock failed; retrying distributed coordination', {
chatId,
streamId,
error: error instanceof Error ? error.message : String(error),
})
}
if (Date.now() >= deadline) return false
await new Promise((resolve) => setTimeout(resolve, 200))
}
}
for (;;) {
const existing = pendingChatStreams.get(chatId)
if (!existing) {
registerPendingChatStream(chatId, streamId)
return true
}
const settled = await Promise.race([
existing.promise.then(() => true),
new Promise<boolean>((r) => setTimeout(() => r(false), timeoutMs)),
])
if (!settled) return false
}
}
export async function abortActiveStream(streamId: string): Promise<boolean> {
const redis = getRedisClient()
let published = false
if (redis) {
try {
await redis.set(getStreamAbortKey(streamId), '1', 'EX', STREAM_ABORT_TTL_SECONDS)
published = true
} catch (error) {
logger.warn('Failed to publish distributed stream abort', {
streamId,
error: error instanceof Error ? error.message : String(error),
})
}
}
const entry = activeStreams.get(streamId)
if (!entry) return published
entry.userStopController.abort()
entry.abortController.abort()
activeStreams.delete(streamId)
return true
}
const FLUSH_EVENT_TYPES = new Set([
'tool_call',
'tool_result',
'tool_error',
'subagent_end',
'structured_result',
'subagent_result',
'done',
'error',
])
export async function requestChatTitle(params: {
message: string
model: string
provider?: string
messageId?: string
}): Promise<string | null> {
const { message, model, provider, messageId } = params
if (!message || !model) return null
const headers: Record<string, string> = { 'Content-Type': 'application/json' }
if (env.COPILOT_API_KEY) {
headers['x-api-key'] = env.COPILOT_API_KEY
}
try {
const response = await fetch(`${SIM_AGENT_API_URL}/api/generate-chat-title`, {
method: 'POST',
headers,
body: JSON.stringify({ message, model, ...(provider ? { provider } : {}) }),
})
const payload = await response.json().catch(() => ({}))
if (!response.ok) {
logger.withMetadata({ messageId }).warn('Failed to generate chat title via copilot backend', {
status: response.status,
error: payload,
})
return null
}
const title = typeof payload?.title === 'string' ? payload.title.trim() : ''
return title || null
} catch (error) {
logger.withMetadata({ messageId }).error('Error generating chat title', error)
return null
}
}
export interface StreamingOrchestrationParams {
requestPayload: Record<string, unknown>
userId: string
streamId: string
executionId: string
runId: string
chatId?: string
currentChat: any
isNewChat: boolean
message: string
titleModel: string
titleProvider?: string
requestId: string
workspaceId?: string
orchestrateOptions: Omit<OrchestrateStreamOptions, 'onEvent'>
pendingChatStreamAlreadyRegistered?: boolean
}
export function createSSEStream(params: StreamingOrchestrationParams): ReadableStream {
const {
requestPayload,
userId,
streamId,
executionId,
runId,
chatId,
currentChat,
isNewChat,
message,
titleModel,
titleProvider,
requestId,
workspaceId,
orchestrateOptions,
pendingChatStreamAlreadyRegistered = false,
} = params
const messageId =
typeof requestPayload.messageId === 'string' ? requestPayload.messageId : streamId
const reqLogger = logger.withMetadata({ requestId, messageId })
let eventWriter: ReturnType<typeof createStreamEventWriter> | null = null
let clientDisconnected = false
const abortController = new AbortController()
const userStopController = new AbortController()
const clientDisconnectedController = new AbortController()
activeStreams.set(streamId, { abortController, userStopController })
if (chatId && !pendingChatStreamAlreadyRegistered) {
registerPendingChatStream(chatId, streamId)
}
return new ReadableStream({
async start(controller) {
const encoder = new TextEncoder()
const markClientDisconnected = (reason: string) => {
if (clientDisconnected) return
clientDisconnected = true
if (!clientDisconnectedController.signal.aborted) {
clientDisconnectedController.abort()
}
reqLogger.info('Client disconnected from live SSE stream', {
streamId,
runId,
reason,
})
}
await resetStreamBuffer(streamId)
await setStreamMeta(streamId, { status: 'active', userId, executionId, runId })
if (chatId) {
await createRunSegment({
id: runId,
executionId,
chatId,
userId,
workflowId: (requestPayload.workflowId as string | undefined) || null,
workspaceId,
streamId,
model: (requestPayload.model as string | undefined) || null,
provider: (requestPayload.provider as string | undefined) || null,
requestContext: { requestId },
}).catch((error) => {
reqLogger.warn('Failed to create copilot run segment', {
error: error instanceof Error ? error.message : String(error),
})
})
}
eventWriter = createStreamEventWriter(streamId)
let localSeq = 0
let abortPoller: ReturnType<typeof setInterval> | null = null
const redis = getRedisClient()
if (redis) {
abortPoller = setInterval(() => {
void (async () => {
try {
const shouldAbort = await redis.get(getStreamAbortKey(streamId))
if (shouldAbort && !abortController.signal.aborted) {
userStopController.abort()
abortController.abort()
await redis.del(getStreamAbortKey(streamId))
}
} catch (error) {
reqLogger.warn('Failed to poll distributed stream abort', {
streamId,
error: error instanceof Error ? error.message : String(error),
})
}
})()
}, STREAM_ABORT_POLL_MS)
}
const pushEvent = async (event: Record<string, any>) => {
if (!eventWriter) return
const eventId = ++localSeq
try {
await eventWriter.write(event)
if (FLUSH_EVENT_TYPES.has(event.type)) {
await eventWriter.flush()
}
} catch (error) {
reqLogger.error('Failed to persist stream event', {
eventType: event.type,
eventId,
error: error instanceof Error ? error.message : String(error),
})
// Keep the live SSE stream going even if durable buffering hiccups.
}
try {
if (!clientDisconnected) {
controller.enqueue(
encoder.encode(`data: ${JSON.stringify({ ...event, eventId, streamId })}\n\n`)
)
}
} catch {
markClientDisconnected('enqueue_failed')
}
}
const pushEventBestEffort = async (event: Record<string, any>) => {
try {
await pushEvent(event)
} catch (error) {
reqLogger.error('Failed to push event', {
eventType: event.type,
error: error instanceof Error ? error.message : String(error),
})
}
}
if (chatId) {
await pushEvent({ type: 'chat_id', chatId })
}
if (chatId && !currentChat?.title && isNewChat) {
requestChatTitle({ message, model: titleModel, provider: titleProvider, messageId })
.then(async (title) => {
if (title) {
await db.update(copilotChats).set({ title }).where(eq(copilotChats.id, chatId!))
await pushEvent({ type: 'title_updated', title })
if (workspaceId) {
taskPubSub?.publishStatusChanged({ workspaceId, chatId: chatId!, type: 'renamed' })
}
}
})
.catch((error) => {
reqLogger.error('Title generation failed', error)
})
}
const keepaliveInterval = setInterval(() => {
if (clientDisconnected) return
try {
controller.enqueue(encoder.encode(': keepalive\n\n'))
} catch {
markClientDisconnected('keepalive_failed')
}
}, 15_000)
try {
const result = await orchestrateCopilotStream(requestPayload, {
...orchestrateOptions,
executionId,
runId,
abortSignal: abortController.signal,
userStopSignal: userStopController.signal,
clientDisconnectedSignal: clientDisconnectedController.signal,
onEvent: async (event) => {
await pushEvent(event)
},
})
if (abortController.signal.aborted) {
reqLogger.info('Stream aborted by explicit stop')
await eventWriter.close().catch(() => {})
await setStreamMeta(streamId, { status: 'cancelled', userId, executionId, runId })
await updateRunStatus(runId, 'cancelled', { completedAt: new Date() }).catch(() => {})
return
}
if (!result.success) {
const errorMessage =
result.error ||
result.errors?.[0] ||
'An unexpected error occurred while processing the response.'
if (clientDisconnected) {
reqLogger.info('Stream failed after client disconnect', {
error: errorMessage,
})
}
reqLogger.error('Orchestration returned failure', {
error: errorMessage,
})
await pushEventBestEffort({
type: 'error',
error: errorMessage,
data: {
displayMessage: errorMessage,
},
})
await eventWriter.close()
await setStreamMeta(streamId, {
status: 'error',
userId,
executionId,
runId,
error: errorMessage,
})
await updateRunStatus(runId, 'error', {
completedAt: new Date(),
error: errorMessage,
}).catch(() => {})
return
}
await eventWriter.close()
await setStreamMeta(streamId, { status: 'complete', userId, executionId, runId })
await updateRunStatus(runId, 'complete', { completedAt: new Date() }).catch(() => {})
if (clientDisconnected) {
reqLogger.info('Orchestration completed after client disconnect', {
streamId,
runId,
})
}
} catch (error) {
if (abortController.signal.aborted) {
reqLogger.info('Stream aborted by explicit stop')
await eventWriter.close().catch(() => {})
await setStreamMeta(streamId, { status: 'cancelled', userId, executionId, runId })
await updateRunStatus(runId, 'cancelled', { completedAt: new Date() }).catch(() => {})
return
}
if (clientDisconnected) {
reqLogger.info('Stream errored after client disconnect', {
error: error instanceof Error ? error.message : 'Stream error',
})
}
reqLogger.error('Orchestration error', error)
const errorMessage = error instanceof Error ? error.message : 'Stream error'
await pushEventBestEffort({
type: 'error',
error: errorMessage,
data: {
displayMessage: 'An unexpected error occurred while processing the response.',
},
})
await eventWriter.close()
await setStreamMeta(streamId, {
status: 'error',
userId,
executionId,
runId,
error: errorMessage,
})
await updateRunStatus(runId, 'error', {
completedAt: new Date(),
error: errorMessage,
}).catch(() => {})
} finally {
reqLogger.info('Closing live SSE stream', {
streamId,
runId,
clientDisconnected,
aborted: abortController.signal.aborted,
})
clearInterval(keepaliveInterval)
if (abortPoller) {
clearInterval(abortPoller)
}
activeStreams.delete(streamId)
if (chatId) {
if (redis) {
await releaseLock(getChatStreamLockKey(chatId), streamId).catch(() => false)
}
resolvePendingChatStream(chatId, streamId)
}
if (redis) {
await redis.del(getStreamAbortKey(streamId)).catch(() => {})
}
try {
controller.close()
} catch {
// Controller already closed from cancel() — safe to ignore
}
}
},
cancel() {
reqLogger.info('ReadableStream cancel received from client', {
streamId,
runId,
})
if (!clientDisconnected) {
clientDisconnected = true
if (!clientDisconnectedController.signal.aborted) {
clientDisconnectedController.abort()
}
}
if (eventWriter) {
eventWriter.flush().catch(() => {})
}
},
})
}
export const SSE_RESPONSE_HEADERS = {
...SSE_HEADERS,
'Content-Encoding': 'none',
} as const

Some files were not shown because too many files have changed in this diff Show More