mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-23 13:58:08 -05:00
Compare commits
46 Commits
improvemen
...
feat/creds
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4f09d07383 | ||
|
|
3bb85f2218 | ||
|
|
1e89d147ed | ||
|
|
9b72b52b33 | ||
|
|
1467862488 | ||
|
|
7f2262857c | ||
|
|
1b309b50e6 | ||
|
|
f765b83a26 | ||
|
|
aa99db6fdd | ||
|
|
748793e07d | ||
|
|
91da7e183a | ||
|
|
ab09a5ad23 | ||
|
|
fcd0240db6 | ||
|
|
4e4149792a | ||
|
|
9a8b591257 | ||
|
|
f3ae3f8442 | ||
|
|
66dfe2c6b2 | ||
|
|
376f7cb571 | ||
|
|
42159c23b9 | ||
|
|
2f0f246002 | ||
|
|
900d3ef9ea | ||
|
|
f3fcc28f89 | ||
|
|
7cfdf46724 | ||
|
|
d681451297 | ||
|
|
5987a6d060 | ||
|
|
e2ccefb2f4 | ||
|
|
103b31a569 | ||
|
|
004e058353 | ||
|
|
5157f0bbb2 | ||
|
|
8bbcf31b83 | ||
|
|
9e814315dd | ||
|
|
0ea0256623 | ||
|
|
fb8868c854 | ||
|
|
ea4964052d | ||
|
|
268e2f114f | ||
|
|
5988d0e46f | ||
|
|
145db9d8c3 | ||
|
|
294b168ed9 | ||
|
|
0dc2c1fe0d | ||
|
|
fb90c4e9b1 | ||
|
|
0af96d06c6 | ||
|
|
1d450578c8 | ||
|
|
c6d408c65b | ||
|
|
16716ea26a | ||
|
|
563098ca0a | ||
|
|
1f1f015031 |
@@ -14,7 +14,7 @@
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://deepwiki.com/simstudioai/sim" target="_blank" rel="noopener noreferrer"><img src="https://deepwiki.com/badge.svg" alt="Ask DeepWiki"></a> <a href="https://cursor.com/link/prompt?text=Help%20me%20set%20up%20Sim%20Studio%20locally.%20Follow%20these%20steps%3A%0A%0A1.%20First%2C%20verify%20Docker%20is%20installed%20and%20running%3A%0A%20%20%20docker%20--version%0A%20%20%20docker%20info%0A%0A2.%20Clone%20the%20repository%3A%0A%20%20%20git%20clone%20https%3A%2F%2Fgithub.com%2Fsimstudioai%2Fsim.git%0A%20%20%20cd%20sim%0A%0A3.%20Start%20the%20services%20with%20Docker%20Compose%3A%0A%20%20%20docker%20compose%20-f%20docker-compose.prod.yml%20up%20-d%0A%0A4.%20Wait%20for%20all%20containers%20to%20be%20healthy%20(this%20may%20take%201-2%20minutes)%3A%0A%20%20%20docker%20compose%20-f%20docker-compose.prod.yml%20ps%0A%0A5.%20Verify%20the%20app%20is%20accessible%20at%20http%3A%2F%2Flocalhost%3A3000%0A%0AIf%20there%20are%20any%20errors%2C%20help%20me%20troubleshoot%20them.%20Common%20issues%3A%0A-%20Port%203000%2C%203002%2C%20or%205432%20already%20in%20use%0A-%20Docker%20not%20running%0A-%20Insufficient%20memory%20(needs%2012GB%2B%20RAM)%0A%0AFor%20local%20AI%20models%20with%20Ollama%2C%20use%20this%20instead%20of%20step%203%3A%0A%20%20%20docker%20compose%20-f%20docker-compose.ollama.yml%20--profile%20setup%20up%20-d"><img src="https://img.shields.io/badge/Set%20Up%20with-Cursor-000000?logo=cursor&logoColor=white" alt="Set Up with Cursor"></a>
|
||||
<a href="https://deepwiki.com/simstudioai/sim" target="_blank" rel="noopener noreferrer"><img src="https://deepwiki.com/badge.svg" alt="Ask DeepWiki"></a> <a href="https://cursor.com/link/prompt?text=Help%20me%20set%20up%20Sim%20locally.%20Follow%20these%20steps%3A%0A%0A1.%20First%2C%20verify%20Docker%20is%20installed%20and%20running%3A%0A%20%20%20docker%20--version%0A%20%20%20docker%20info%0A%0A2.%20Clone%20the%20repository%3A%0A%20%20%20git%20clone%20https%3A%2F%2Fgithub.com%2Fsimstudioai%2Fsim.git%0A%20%20%20cd%20sim%0A%0A3.%20Start%20the%20services%20with%20Docker%20Compose%3A%0A%20%20%20docker%20compose%20-f%20docker-compose.prod.yml%20up%20-d%0A%0A4.%20Wait%20for%20all%20containers%20to%20be%20healthy%20(this%20may%20take%201-2%20minutes)%3A%0A%20%20%20docker%20compose%20-f%20docker-compose.prod.yml%20ps%0A%0A5.%20Verify%20the%20app%20is%20accessible%20at%20http%3A%2F%2Flocalhost%3A3000%0A%0AIf%20there%20are%20any%20errors%2C%20help%20me%20troubleshoot%20them.%20Common%20issues%3A%0A-%20Port%203000%2C%203002%2C%20or%205432%20already%20in%20use%0A-%20Docker%20not%20running%0A-%20Insufficient%20memory%20(needs%2012GB%2B%20RAM)%0A%0AFor%20local%20AI%20models%20with%20Ollama%2C%20use%20this%20instead%20of%20step%203%3A%0A%20%20%20docker%20compose%20-f%20docker-compose.ollama.yml%20--profile%20setup%20up%20-d"><img src="https://img.shields.io/badge/Set%20Up%20with-Cursor-000000?logo=cursor&logoColor=white" alt="Set Up with Cursor"></a>
|
||||
</p>
|
||||
|
||||
### Build Workflows with Ease
|
||||
|
||||
@@ -4093,6 +4093,23 @@ export function SQSIcon(props: SVGProps<SVGSVGElement>) {
|
||||
)
|
||||
}
|
||||
|
||||
export function TextractIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg
|
||||
{...props}
|
||||
viewBox='10 14 60 52'
|
||||
version='1.1'
|
||||
xmlns='http://www.w3.org/2000/svg'
|
||||
xmlnsXlink='http://www.w3.org/1999/xlink'
|
||||
>
|
||||
<path
|
||||
d='M22.0624102,50 C24.3763895,53.603 28.4103535,56 33.0003125,56 C40.1672485,56 45.9991964,50.168 45.9991964,43 C45.9991964,35.832 40.1672485,30 33.0003125,30 C27.6033607,30 22.9664021,33.307 21.0024196,38 L23.2143999,38 C25.0393836,34.444 28.7363506,32 33.0003125,32 C39.0652583,32 43.9992143,36.935 43.9992143,43 C43.9992143,49.065 39.0652583,54 33.0003125,54 C29.5913429,54 26.5413702,52.441 24.5213882,50 L22.0624102,50 Z M37.0002768,45 L37.0002768,43 L41.9992321,43 C41.9992321,38.038 37.9622682,34 33.0003125,34 C28.0373568,34 23.9993929,38.038 23.9993929,43 L28.9993482,43 L28.9993482,45 L24.2313908,45 C25.1443826,49.002 28.7253507,52 33.0003125,52 C35.1362934,52 37.0992759,51.249 38.6442621,50 L34.0003036,50 L34.0003036,48 L40.4782457,48 C41.0812403,47.102 41.5202364,46.087 41.7682342,45 L37.0002768,45 Z M21.0024196,48 L23.2143999,48 C22.4434068,46.498 22.0004107,44.801 22.0004107,43 C22.0004107,41.959 22.1554093,40.955 22.4264069,40 L20.3634253,40 C20.1344274,40.965 19.9994286,41.966 19.9994286,43 C19.9994286,44.771 20.3584254,46.46 21.0024196,48 L21.0024196,48 Z M19.7434309,50 L17.0004554,50 L17.0004554,48 L18.8744386,48 C18.5344417,47.04 18.2894438,46.038 18.1494451,45 L15.4144695,45 L16.707458,46.293 L15.2924706,47.707 L12.2924974,44.707 C11.9025009,44.316 11.9025009,43.684 12.2924974,43.293 L15.2924706,40.293 L16.707458,41.707 L15.4144695,43 L18.0004464,43 C18.0004464,41.973 18.1044455,40.97 18.3024437,40 L17.0004554,40 L17.0004554,38 L18.8744386,38 C20.9404202,32.184 26.4833707,28 33.0003125,28 C37.427273,28 41.4002375,29.939 44.148213,33 L59.0000804,33 L59.0000804,35 L45.6661994,35 C47.1351863,37.318 47.9991786,40.058 47.9991786,43 L59.0000804,43 L59.0000804,45 L47.8501799,45 C46.8681887,52.327 40.5912447,58 33.0003125,58 C27.2563638,58 22.2624084,54.752 19.7434309,50 L19.7434309,50 Z M37.0002768,39 C37.0002768,38.448 36.5522808,38 36.0002857,38 L29.9993482,38 C29.4473442,38 28.9993482,38.448 28.9993482,39 L28.9993482,41 L31.0003304,41 L31.0003304,40 L32.0003214,40 L32.0003214,43 L31.0003304,43 L31.0003304,45 L35.0002946,45 L35.0002946,43 L34.0003036,43 L34.0003036,40 L35.0002946,40 L35.0002946,41 L37.0002768,41 L37.0002768,39 Z M49.0001696,40 L59.0000804,40 L59.0000804,38 L49.0001696,38 L49.0001696,40 Z M49.0001696,50 L59.0000804,50 L59.0000804,48 L49.0001696,48 L49.0001696,50 Z M57.0000982,27 L60.5850662,27 L57.0000982,23.414 L57.0000982,27 Z M63.7070383,27.293 C63.8940367,27.48 64.0000357,27.735 64.0000357,28 L64.0000357,63 C64.0000357,63.552 63.5520397,64 63.0000446,64 L32.0003304,64 C31.4473264,64 31.0003304,63.552 31.0003304,63 L31.0003304,59 L33.0003125,59 L33.0003125,62 L62.0000536,62 L62.0000536,29 L56.0001071,29 C55.4471121,29 55.0001161,28.552 55.0001161,28 L55.0001161,22 L33.0003125,22 L33.0003125,27 L31.0003304,27 L31.0003304,21 C31.0003304,20.448 31.4473264,20 32.0003304,20 L56.0001071,20 C56.2651048,20 56.5191025,20.105 56.7071008,20.293 L63.7070383,27.293 Z M68,24.166 L68,61 C68,61.552 67.552004,62 67.0000089,62 L65.0000268,62 L65.0000268,60 L66.0000179,60 L66.0000179,24.612 L58.6170838,18 L36.0002857,18 L36.0002857,19 L34.0003036,19 L34.0003036,17 C34.0003036,16.448 34.4472996,16 35.0003036,16 L59.0000804,16 C59.2460782,16 59.483076,16.091 59.6660744,16.255 L67.666003,23.42 C67.8780011,23.61 68,23.881 68,24.166 L68,24.166 Z'
|
||||
fill='currentColor'
|
||||
/>
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
export function McpIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg
|
||||
|
||||
@@ -110,6 +110,7 @@ import {
|
||||
SupabaseIcon,
|
||||
TavilyIcon,
|
||||
TelegramIcon,
|
||||
TextractIcon,
|
||||
TinybirdIcon,
|
||||
TranslateIcon,
|
||||
TrelloIcon,
|
||||
@@ -143,7 +144,7 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
|
||||
calendly: CalendlyIcon,
|
||||
circleback: CirclebackIcon,
|
||||
clay: ClayIcon,
|
||||
confluence: ConfluenceIcon,
|
||||
confluence_v2: ConfluenceIcon,
|
||||
cursor_v2: CursorIcon,
|
||||
datadog: DatadogIcon,
|
||||
discord: DiscordIcon,
|
||||
@@ -153,7 +154,7 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
|
||||
elasticsearch: ElasticsearchIcon,
|
||||
elevenlabs: ElevenLabsIcon,
|
||||
exa: ExaAIIcon,
|
||||
file: DocumentIcon,
|
||||
file_v2: DocumentIcon,
|
||||
firecrawl: FirecrawlIcon,
|
||||
fireflies: FirefliesIcon,
|
||||
github_v2: GithubIcon,
|
||||
@@ -195,7 +196,7 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
|
||||
microsoft_excel_v2: MicrosoftExcelIcon,
|
||||
microsoft_planner: MicrosoftPlannerIcon,
|
||||
microsoft_teams: MicrosoftTeamsIcon,
|
||||
mistral_parse: MistralIcon,
|
||||
mistral_parse_v2: MistralIcon,
|
||||
mongodb: MongoDBIcon,
|
||||
mysql: MySQLIcon,
|
||||
neo4j: Neo4jIcon,
|
||||
@@ -237,6 +238,7 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
|
||||
supabase: SupabaseIcon,
|
||||
tavily: TavilyIcon,
|
||||
telegram: TelegramIcon,
|
||||
textract: TextractIcon,
|
||||
tinybird: TinybirdIcon,
|
||||
translate: TranslateIcon,
|
||||
trello: TrelloIcon,
|
||||
@@ -244,7 +246,7 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
|
||||
twilio_sms: TwilioIcon,
|
||||
twilio_voice: TwilioIcon,
|
||||
typeform: TypeformIcon,
|
||||
video_generator: VideoIcon,
|
||||
video_generator_v2: VideoIcon,
|
||||
vision: EyeIcon,
|
||||
wealthbox: WealthboxIcon,
|
||||
webflow: WebflowIcon,
|
||||
|
||||
@@ -6,7 +6,7 @@ description: Interact with Confluence
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="confluence"
|
||||
type="confluence_v2"
|
||||
color="#E0E0E0"
|
||||
/>
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ description: Read and parse multiple files
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="file"
|
||||
type="file_v2"
|
||||
color="#40916C"
|
||||
/>
|
||||
|
||||
@@ -48,7 +48,7 @@ Parse one or more uploaded files or files from URLs (text, PDF, CSV, images, etc
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `files` | array | Array of parsed files |
|
||||
| `combinedContent` | string | Combined content of all parsed files |
|
||||
| `files` | array | Array of parsed files with content, metadata, and file properties |
|
||||
| `combinedContent` | string | All file contents merged into a single text string |
|
||||
|
||||
|
||||
|
||||
@@ -106,6 +106,7 @@
|
||||
"supabase",
|
||||
"tavily",
|
||||
"telegram",
|
||||
"textract",
|
||||
"tinybird",
|
||||
"translate",
|
||||
"trello",
|
||||
|
||||
@@ -6,7 +6,7 @@ description: Extract text from PDF documents
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="mistral_parse"
|
||||
type="mistral_parse_v2"
|
||||
color="#000000"
|
||||
/>
|
||||
|
||||
@@ -54,18 +54,37 @@ Parse PDF documents using Mistral OCR API
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Whether the PDF was parsed successfully |
|
||||
| `content` | string | Extracted content in the requested format \(markdown, text, or JSON\) |
|
||||
| `metadata` | object | Processing metadata including jobId, fileType, pageCount, and usage info |
|
||||
| ↳ `jobId` | string | Unique job identifier |
|
||||
| ↳ `fileType` | string | File type \(e.g., pdf\) |
|
||||
| ↳ `fileName` | string | Original file name |
|
||||
| ↳ `source` | string | Source type \(url\) |
|
||||
| ↳ `pageCount` | number | Number of pages processed |
|
||||
| ↳ `model` | string | Mistral model used |
|
||||
| ↳ `resultType` | string | Output format \(markdown, text, json\) |
|
||||
| ↳ `processedAt` | string | Processing timestamp |
|
||||
| ↳ `sourceUrl` | string | Source URL if applicable |
|
||||
| ↳ `usageInfo` | object | Usage statistics from OCR processing |
|
||||
| `pages` | array | Array of page objects from Mistral OCR |
|
||||
| ↳ `index` | number | Page index \(zero-based\) |
|
||||
| ↳ `markdown` | string | Extracted markdown content |
|
||||
| ↳ `images` | array | Images extracted from this page with bounding boxes |
|
||||
| ↳ `id` | string | Image identifier \(e.g., img-0.jpeg\) |
|
||||
| ↳ `top_left_x` | number | Top-left X coordinate in pixels |
|
||||
| ↳ `top_left_y` | number | Top-left Y coordinate in pixels |
|
||||
| ↳ `bottom_right_x` | number | Bottom-right X coordinate in pixels |
|
||||
| ↳ `bottom_right_y` | number | Bottom-right Y coordinate in pixels |
|
||||
| ↳ `image_base64` | string | Base64-encoded image data \(when include_image_base64=true\) |
|
||||
| ↳ `id` | string | Image identifier \(e.g., img-0.jpeg\) |
|
||||
| ↳ `top_left_x` | number | Top-left X coordinate in pixels |
|
||||
| ↳ `top_left_y` | number | Top-left Y coordinate in pixels |
|
||||
| ↳ `bottom_right_x` | number | Bottom-right X coordinate in pixels |
|
||||
| ↳ `bottom_right_y` | number | Bottom-right Y coordinate in pixels |
|
||||
| ↳ `image_base64` | string | Base64-encoded image data \(when include_image_base64=true\) |
|
||||
| ↳ `dimensions` | object | Page dimensions |
|
||||
| ↳ `dpi` | number | Dots per inch |
|
||||
| ↳ `height` | number | Page height in pixels |
|
||||
| ↳ `width` | number | Page width in pixels |
|
||||
| ↳ `dpi` | number | Dots per inch |
|
||||
| ↳ `height` | number | Page height in pixels |
|
||||
| ↳ `width` | number | Page width in pixels |
|
||||
| ↳ `tables` | array | Extracted tables as HTML/markdown \(when table_format is set\). Referenced via placeholders like \[tbl-0.html\] |
|
||||
| ↳ `hyperlinks` | array | Array of URL strings detected in the page \(e.g., \[ |
|
||||
| ↳ `header` | string | Page header content \(when extract_header=true\) |
|
||||
| ↳ `footer` | string | Page footer content \(when extract_footer=true\) |
|
||||
| `model` | string | Mistral OCR model identifier \(e.g., mistral-ocr-latest\) |
|
||||
| `usage_info` | object | Usage and processing statistics |
|
||||
| ↳ `pages_processed` | number | Total number of pages processed |
|
||||
| ↳ `doc_size_bytes` | number | Document file size in bytes |
|
||||
| `document_annotation` | string | Structured annotation data as JSON string \(when applicable\) |
|
||||
|
||||
|
||||
|
||||
@@ -58,6 +58,7 @@ Upload a file to an AWS S3 bucket
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `url` | string | URL of the uploaded S3 object |
|
||||
| `uri` | string | S3 URI of the uploaded object \(s3://bucket/key\) |
|
||||
| `metadata` | object | Upload metadata including ETag and location |
|
||||
|
||||
### `s3_get_object`
|
||||
@@ -149,6 +150,7 @@ Copy an object within or between AWS S3 buckets
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `url` | string | URL of the copied S3 object |
|
||||
| `uri` | string | S3 URI of the copied object \(s3://bucket/key\) |
|
||||
| `metadata` | object | Copy operation metadata |
|
||||
|
||||
|
||||
|
||||
120
apps/docs/content/docs/en/tools/textract.mdx
Normal file
120
apps/docs/content/docs/en/tools/textract.mdx
Normal file
@@ -0,0 +1,120 @@
|
||||
---
|
||||
title: AWS Textract
|
||||
description: Extract text, tables, and forms from documents
|
||||
---
|
||||
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="textract"
|
||||
color="linear-gradient(135deg, #055F4E 0%, #56C0A7 100%)"
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
[AWS Textract](https://aws.amazon.com/textract/) is a powerful AI service from Amazon Web Services designed to automatically extract printed text, handwriting, tables, forms, key-value pairs, and other structured data from scanned documents and images. Textract leverages advanced optical character recognition (OCR) and document analysis to transform documents into actionable data, enabling automation, analytics, compliance, and more.
|
||||
|
||||
With AWS Textract, you can:
|
||||
|
||||
- **Extract text from images and documents**: Recognize printed text and handwriting in formats such as PDF, JPEG, PNG, or TIFF
|
||||
- **Detect and extract tables**: Automatically find tables and output their structured content
|
||||
- **Parse forms and key-value pairs**: Pull structured data from forms, including fields and their corresponding values
|
||||
- **Identify signatures and layout features**: Detect signatures, geometric layout, and relationships between document elements
|
||||
- **Customize extraction with queries**: Extract specific fields and answers using query-based extraction (e.g., "What is the invoice number?")
|
||||
|
||||
In Sim, the AWS Textract integration empowers your agents to intelligently process documents as part of their workflows. This unlocks automation scenarios such as data entry from invoices, onboarding documents, contracts, receipts, and more. Your agents can extract relevant data, analyze structured forms, and generate summaries or reports directly from document uploads or URLs. By connecting Sim with AWS Textract, you can reduce manual effort, improve data accuracy, and streamline your business processes with robust document understanding.
|
||||
{/* MANUAL-CONTENT-END */}
|
||||
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Integrate AWS Textract into your workflow to extract text, tables, forms, and key-value pairs from documents. Single-page mode supports JPEG, PNG, and single-page PDF. Multi-page mode supports multi-page PDF and TIFF.
|
||||
|
||||
|
||||
|
||||
## Tools
|
||||
|
||||
### `textract_parser`
|
||||
|
||||
Parse documents using AWS Textract OCR and document analysis
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `accessKeyId` | string | Yes | AWS Access Key ID |
|
||||
| `secretAccessKey` | string | Yes | AWS Secret Access Key |
|
||||
| `region` | string | Yes | AWS region for Textract service \(e.g., us-east-1\) |
|
||||
| `processingMode` | string | No | Document type: single-page or multi-page. Defaults to single-page. |
|
||||
| `filePath` | string | No | URL to a document to be processed \(JPEG, PNG, or single-page PDF\). |
|
||||
| `s3Uri` | string | No | S3 URI for multi-page processing \(s3://bucket/key\). |
|
||||
| `fileUpload` | object | No | File upload data from file-upload component |
|
||||
| `featureTypes` | array | No | Feature types to detect: TABLES, FORMS, QUERIES, SIGNATURES, LAYOUT. If not specified, only text detection is performed. |
|
||||
| `items` | string | No | Feature type |
|
||||
| `queries` | array | No | Custom queries to extract specific information. Only used when featureTypes includes QUERIES. |
|
||||
| `items` | object | No | Query configuration |
|
||||
| `properties` | string | No | The query text |
|
||||
| `Text` | string | No | No description |
|
||||
| `Alias` | string | No | No description |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `blocks` | array | Array of Block objects containing detected text, tables, forms, and other elements |
|
||||
| ↳ `BlockType` | string | Type of block \(PAGE, LINE, WORD, TABLE, CELL, KEY_VALUE_SET, etc.\) |
|
||||
| ↳ `Id` | string | Unique identifier for the block |
|
||||
| ↳ `Text` | string | Query text |
|
||||
| ↳ `TextType` | string | Type of text \(PRINTED or HANDWRITING\) |
|
||||
| ↳ `Confidence` | number | Confidence score \(0-100\) |
|
||||
| ↳ `Page` | number | Page number |
|
||||
| ↳ `Geometry` | object | Location and bounding box information |
|
||||
| ↳ `BoundingBox` | object | Height as ratio of document height |
|
||||
| ↳ `Height` | number | Height as ratio of document height |
|
||||
| ↳ `Left` | number | Left position as ratio of document width |
|
||||
| ↳ `Top` | number | Top position as ratio of document height |
|
||||
| ↳ `Width` | number | Width as ratio of document width |
|
||||
| ↳ `Height` | number | Height as ratio of document height |
|
||||
| ↳ `Left` | number | Left position as ratio of document width |
|
||||
| ↳ `Top` | number | Top position as ratio of document height |
|
||||
| ↳ `Width` | number | Width as ratio of document width |
|
||||
| ↳ `Polygon` | array | Polygon coordinates |
|
||||
| ↳ `X` | number | X coordinate |
|
||||
| ↳ `Y` | number | Y coordinate |
|
||||
| ↳ `X` | number | X coordinate |
|
||||
| ↳ `Y` | number | Y coordinate |
|
||||
| ↳ `BoundingBox` | object | Height as ratio of document height |
|
||||
| ↳ `Height` | number | Height as ratio of document height |
|
||||
| ↳ `Left` | number | Left position as ratio of document width |
|
||||
| ↳ `Top` | number | Top position as ratio of document height |
|
||||
| ↳ `Width` | number | Width as ratio of document width |
|
||||
| ↳ `Height` | number | Height as ratio of document height |
|
||||
| ↳ `Left` | number | Left position as ratio of document width |
|
||||
| ↳ `Top` | number | Top position as ratio of document height |
|
||||
| ↳ `Width` | number | Width as ratio of document width |
|
||||
| ↳ `Polygon` | array | Polygon coordinates |
|
||||
| ↳ `X` | number | X coordinate |
|
||||
| ↳ `Y` | number | Y coordinate |
|
||||
| ↳ `X` | number | X coordinate |
|
||||
| ↳ `Y` | number | Y coordinate |
|
||||
| ↳ `Relationships` | array | Relationships to other blocks |
|
||||
| ↳ `Type` | string | Relationship type \(CHILD, VALUE, ANSWER, etc.\) |
|
||||
| ↳ `Ids` | array | IDs of related blocks |
|
||||
| ↳ `Type` | string | Relationship type \(CHILD, VALUE, ANSWER, etc.\) |
|
||||
| ↳ `Ids` | array | IDs of related blocks |
|
||||
| ↳ `EntityTypes` | array | Entity types for KEY_VALUE_SET \(KEY or VALUE\) |
|
||||
| ↳ `SelectionStatus` | string | For checkboxes: SELECTED or NOT_SELECTED |
|
||||
| ↳ `RowIndex` | number | Row index for table cells |
|
||||
| ↳ `ColumnIndex` | number | Column index for table cells |
|
||||
| ↳ `RowSpan` | number | Row span for merged cells |
|
||||
| ↳ `ColumnSpan` | number | Column span for merged cells |
|
||||
| ↳ `Query` | object | Query information for QUERY blocks |
|
||||
| ↳ `Text` | string | Query text |
|
||||
| ↳ `Alias` | string | Query alias |
|
||||
| ↳ `Pages` | array | Pages to search |
|
||||
| ↳ `Alias` | string | Query alias |
|
||||
| ↳ `Pages` | array | Pages to search |
|
||||
| `documentMetadata` | object | Metadata about the analyzed document |
|
||||
| ↳ `pages` | number | Number of pages in the document |
|
||||
| `modelVersion` | string | Version of the Textract model used for processing |
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ description: Generate videos from text using AI
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="video_generator"
|
||||
type="video_generator_v2"
|
||||
color="#181C1E"
|
||||
/>
|
||||
|
||||
|
||||
@@ -2,10 +2,9 @@
|
||||
|
||||
import { useEffect, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { ArrowRight, ChevronRight, Eye, EyeOff } from 'lucide-react'
|
||||
import { Eye, EyeOff } from 'lucide-react'
|
||||
import Link from 'next/link'
|
||||
import { useRouter, useSearchParams } from 'next/navigation'
|
||||
import { Button } from '@/components/ui/button'
|
||||
import {
|
||||
Dialog,
|
||||
DialogContent,
|
||||
@@ -22,8 +21,10 @@ import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { quickValidateEmail } from '@/lib/messaging/email/validation'
|
||||
import { inter } from '@/app/_styles/fonts/inter/inter'
|
||||
import { soehne } from '@/app/_styles/fonts/soehne/soehne'
|
||||
import { BrandedButton } from '@/app/(auth)/components/branded-button'
|
||||
import { SocialLoginButtons } from '@/app/(auth)/components/social-login-buttons'
|
||||
import { SSOLoginButton } from '@/app/(auth)/components/sso-login-button'
|
||||
import { useBrandedButtonClass } from '@/hooks/use-branded-button-class'
|
||||
|
||||
const logger = createLogger('LoginForm')
|
||||
|
||||
@@ -105,8 +106,7 @@ export default function LoginPage({
|
||||
const [password, setPassword] = useState('')
|
||||
const [passwordErrors, setPasswordErrors] = useState<string[]>([])
|
||||
const [showValidationError, setShowValidationError] = useState(false)
|
||||
const [buttonClass, setButtonClass] = useState('branded-button-gradient')
|
||||
const [isButtonHovered, setIsButtonHovered] = useState(false)
|
||||
const buttonClass = useBrandedButtonClass()
|
||||
|
||||
const [callbackUrl, setCallbackUrl] = useState('/workspace')
|
||||
const [isInviteFlow, setIsInviteFlow] = useState(false)
|
||||
@@ -114,7 +114,6 @@ export default function LoginPage({
|
||||
const [forgotPasswordOpen, setForgotPasswordOpen] = useState(false)
|
||||
const [forgotPasswordEmail, setForgotPasswordEmail] = useState('')
|
||||
const [isSubmittingReset, setIsSubmittingReset] = useState(false)
|
||||
const [isResetButtonHovered, setIsResetButtonHovered] = useState(false)
|
||||
const [resetStatus, setResetStatus] = useState<{
|
||||
type: 'success' | 'error' | null
|
||||
message: string
|
||||
@@ -123,6 +122,7 @@ export default function LoginPage({
|
||||
const [email, setEmail] = useState('')
|
||||
const [emailErrors, setEmailErrors] = useState<string[]>([])
|
||||
const [showEmailValidationError, setShowEmailValidationError] = useState(false)
|
||||
const [resetSuccessMessage, setResetSuccessMessage] = useState<string | null>(null)
|
||||
|
||||
useEffect(() => {
|
||||
setMounted(true)
|
||||
@@ -139,32 +139,12 @@ export default function LoginPage({
|
||||
|
||||
const inviteFlow = searchParams.get('invite_flow') === 'true'
|
||||
setIsInviteFlow(inviteFlow)
|
||||
}
|
||||
|
||||
const checkCustomBrand = () => {
|
||||
const computedStyle = getComputedStyle(document.documentElement)
|
||||
const brandAccent = computedStyle.getPropertyValue('--brand-accent-hex').trim()
|
||||
|
||||
if (brandAccent && brandAccent !== '#6f3dfa') {
|
||||
setButtonClass('branded-button-custom')
|
||||
} else {
|
||||
setButtonClass('branded-button-gradient')
|
||||
const resetSuccess = searchParams.get('resetSuccess') === 'true'
|
||||
if (resetSuccess) {
|
||||
setResetSuccessMessage('Password reset successful. Please sign in with your new password.')
|
||||
}
|
||||
}
|
||||
|
||||
checkCustomBrand()
|
||||
|
||||
window.addEventListener('resize', checkCustomBrand)
|
||||
const observer = new MutationObserver(checkCustomBrand)
|
||||
observer.observe(document.documentElement, {
|
||||
attributes: true,
|
||||
attributeFilter: ['style', 'class'],
|
||||
})
|
||||
|
||||
return () => {
|
||||
window.removeEventListener('resize', checkCustomBrand)
|
||||
observer.disconnect()
|
||||
}
|
||||
}, [searchParams])
|
||||
|
||||
useEffect(() => {
|
||||
@@ -202,6 +182,13 @@ export default function LoginPage({
|
||||
e.preventDefault()
|
||||
setIsLoading(true)
|
||||
|
||||
const redirectToVerify = (emailToVerify: string) => {
|
||||
if (typeof window !== 'undefined') {
|
||||
sessionStorage.setItem('verificationEmail', emailToVerify)
|
||||
}
|
||||
router.push('/verify')
|
||||
}
|
||||
|
||||
const formData = new FormData(e.currentTarget)
|
||||
const emailRaw = formData.get('email') as string
|
||||
const email = emailRaw.trim().toLowerCase()
|
||||
@@ -221,6 +208,7 @@ export default function LoginPage({
|
||||
|
||||
try {
|
||||
const safeCallbackUrl = validateCallbackUrl(callbackUrl) ? callbackUrl : '/workspace'
|
||||
let errorHandled = false
|
||||
|
||||
const result = await client.signIn.email(
|
||||
{
|
||||
@@ -231,11 +219,16 @@ export default function LoginPage({
|
||||
{
|
||||
onError: (ctx) => {
|
||||
logger.error('Login error:', ctx.error)
|
||||
const errorMessage: string[] = ['Invalid email or password']
|
||||
|
||||
if (ctx.error.code?.includes('EMAIL_NOT_VERIFIED')) {
|
||||
errorHandled = true
|
||||
redirectToVerify(email)
|
||||
return
|
||||
}
|
||||
|
||||
errorHandled = true
|
||||
const errorMessage: string[] = ['Invalid email or password']
|
||||
|
||||
if (
|
||||
ctx.error.code?.includes('BAD_REQUEST') ||
|
||||
ctx.error.message?.includes('Email and password sign in is not enabled')
|
||||
@@ -271,6 +264,7 @@ export default function LoginPage({
|
||||
errorMessage.push('Too many requests. Please wait a moment before trying again.')
|
||||
}
|
||||
|
||||
setResetSuccessMessage(null)
|
||||
setPasswordErrors(errorMessage)
|
||||
setShowValidationError(true)
|
||||
},
|
||||
@@ -278,15 +272,25 @@ export default function LoginPage({
|
||||
)
|
||||
|
||||
if (!result || result.error) {
|
||||
// Show error if not already handled by onError callback
|
||||
if (!errorHandled) {
|
||||
setResetSuccessMessage(null)
|
||||
const errorMessage = result?.error?.message || 'Login failed. Please try again.'
|
||||
setPasswordErrors([errorMessage])
|
||||
setShowValidationError(true)
|
||||
}
|
||||
setIsLoading(false)
|
||||
return
|
||||
}
|
||||
|
||||
// Clear reset success message on successful login
|
||||
setResetSuccessMessage(null)
|
||||
|
||||
// Explicit redirect fallback if better-auth doesn't redirect
|
||||
router.push(safeCallbackUrl)
|
||||
} catch (err: any) {
|
||||
if (err.message?.includes('not verified') || err.code?.includes('EMAIL_NOT_VERIFIED')) {
|
||||
if (typeof window !== 'undefined') {
|
||||
sessionStorage.setItem('verificationEmail', email)
|
||||
}
|
||||
router.push('/verify')
|
||||
redirectToVerify(email)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -400,6 +404,13 @@ export default function LoginPage({
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Password reset success message */}
|
||||
{resetSuccessMessage && (
|
||||
<div className={`${inter.className} mt-1 space-y-1 text-[#4CAF50] text-xs`}>
|
||||
<p>{resetSuccessMessage}</p>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Email/Password Form - show unless explicitly disabled */}
|
||||
{!isFalsy(getEnv('NEXT_PUBLIC_EMAIL_PASSWORD_SIGNUP_ENABLED')) && (
|
||||
<form onSubmit={onSubmit} className={`${inter.className} mt-8 space-y-8`}>
|
||||
@@ -482,24 +493,14 @@ export default function LoginPage({
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<Button
|
||||
<BrandedButton
|
||||
type='submit'
|
||||
onMouseEnter={() => setIsButtonHovered(true)}
|
||||
onMouseLeave={() => setIsButtonHovered(false)}
|
||||
className='group inline-flex w-full items-center justify-center gap-2 rounded-[10px] border border-[#6F3DFA] bg-gradient-to-b from-[#8357FF] to-[#6F3DFA] py-[6px] pr-[10px] pl-[12px] text-[15px] text-white shadow-[inset_0_2px_4px_0_#9B77FF] transition-all'
|
||||
disabled={isLoading}
|
||||
loading={isLoading}
|
||||
loadingText='Signing in'
|
||||
>
|
||||
<span className='flex items-center gap-1'>
|
||||
{isLoading ? 'Signing in...' : 'Sign in'}
|
||||
<span className='inline-flex transition-transform duration-200 group-hover:translate-x-0.5'>
|
||||
{isButtonHovered ? (
|
||||
<ArrowRight className='h-4 w-4' aria-hidden='true' />
|
||||
) : (
|
||||
<ChevronRight className='h-4 w-4' aria-hidden='true' />
|
||||
)}
|
||||
</span>
|
||||
</span>
|
||||
</Button>
|
||||
Sign in
|
||||
</BrandedButton>
|
||||
</form>
|
||||
)}
|
||||
|
||||
@@ -610,25 +611,15 @@ export default function LoginPage({
|
||||
<p>{resetStatus.message}</p>
|
||||
</div>
|
||||
)}
|
||||
<Button
|
||||
<BrandedButton
|
||||
type='button'
|
||||
onClick={handleForgotPassword}
|
||||
onMouseEnter={() => setIsResetButtonHovered(true)}
|
||||
onMouseLeave={() => setIsResetButtonHovered(false)}
|
||||
className='group inline-flex w-full items-center justify-center gap-2 rounded-[10px] border border-[#6F3DFA] bg-gradient-to-b from-[#8357FF] to-[#6F3DFA] py-[6px] pr-[10px] pl-[12px] text-[15px] text-white shadow-[inset_0_2px_4px_0_#9B77FF] transition-all'
|
||||
disabled={isSubmittingReset}
|
||||
loading={isSubmittingReset}
|
||||
loadingText='Sending'
|
||||
>
|
||||
<span className='flex items-center gap-1'>
|
||||
{isSubmittingReset ? 'Sending...' : 'Send Reset Link'}
|
||||
<span className='inline-flex transition-transform duration-200 group-hover:translate-x-0.5'>
|
||||
{isResetButtonHovered ? (
|
||||
<ArrowRight className='h-4 w-4' aria-hidden='true' />
|
||||
) : (
|
||||
<ChevronRight className='h-4 w-4' aria-hidden='true' />
|
||||
)}
|
||||
</span>
|
||||
</span>
|
||||
</Button>
|
||||
Send Reset Link
|
||||
</BrandedButton>
|
||||
</div>
|
||||
</DialogContent>
|
||||
</Dialog>
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
'use client'
|
||||
|
||||
import { useEffect, useState } from 'react'
|
||||
import { ArrowRight, ChevronRight, Eye, EyeOff } from 'lucide-react'
|
||||
import { Button } from '@/components/ui/button'
|
||||
import { useState } from 'react'
|
||||
import { Eye, EyeOff } from 'lucide-react'
|
||||
import { Input } from '@/components/ui/input'
|
||||
import { Label } from '@/components/ui/label'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { inter } from '@/app/_styles/fonts/inter/inter'
|
||||
import { BrandedButton } from '@/app/(auth)/components/branded-button'
|
||||
|
||||
interface RequestResetFormProps {
|
||||
email: string
|
||||
@@ -27,36 +27,6 @@ export function RequestResetForm({
|
||||
statusMessage,
|
||||
className,
|
||||
}: RequestResetFormProps) {
|
||||
const [buttonClass, setButtonClass] = useState('branded-button-gradient')
|
||||
const [isButtonHovered, setIsButtonHovered] = useState(false)
|
||||
|
||||
useEffect(() => {
|
||||
const checkCustomBrand = () => {
|
||||
const computedStyle = getComputedStyle(document.documentElement)
|
||||
const brandAccent = computedStyle.getPropertyValue('--brand-accent-hex').trim()
|
||||
|
||||
if (brandAccent && brandAccent !== '#6f3dfa') {
|
||||
setButtonClass('branded-button-custom')
|
||||
} else {
|
||||
setButtonClass('branded-button-gradient')
|
||||
}
|
||||
}
|
||||
|
||||
checkCustomBrand()
|
||||
|
||||
window.addEventListener('resize', checkCustomBrand)
|
||||
const observer = new MutationObserver(checkCustomBrand)
|
||||
observer.observe(document.documentElement, {
|
||||
attributes: true,
|
||||
attributeFilter: ['style', 'class'],
|
||||
})
|
||||
|
||||
return () => {
|
||||
window.removeEventListener('resize', checkCustomBrand)
|
||||
observer.disconnect()
|
||||
}
|
||||
}, [])
|
||||
|
||||
const handleSubmit = async (e: React.FormEvent) => {
|
||||
e.preventDefault()
|
||||
onSubmit(email)
|
||||
@@ -94,24 +64,14 @@ export function RequestResetForm({
|
||||
)}
|
||||
</div>
|
||||
|
||||
<Button
|
||||
<BrandedButton
|
||||
type='submit'
|
||||
disabled={isSubmitting}
|
||||
onMouseEnter={() => setIsButtonHovered(true)}
|
||||
onMouseLeave={() => setIsButtonHovered(false)}
|
||||
className='group inline-flex w-full items-center justify-center gap-2 rounded-[10px] border border-[#6F3DFA] bg-gradient-to-b from-[#8357FF] to-[#6F3DFA] py-[6px] pr-[10px] pl-[12px] text-[15px] text-white shadow-[inset_0_2px_4px_0_#9B77FF] transition-all'
|
||||
loading={isSubmitting}
|
||||
loadingText='Sending'
|
||||
>
|
||||
<span className='flex items-center gap-1'>
|
||||
{isSubmitting ? 'Sending...' : 'Send Reset Link'}
|
||||
<span className='inline-flex transition-transform duration-200 group-hover:translate-x-0.5'>
|
||||
{isButtonHovered ? (
|
||||
<ArrowRight className='h-4 w-4' aria-hidden='true' />
|
||||
) : (
|
||||
<ChevronRight className='h-4 w-4' aria-hidden='true' />
|
||||
)}
|
||||
</span>
|
||||
</span>
|
||||
</Button>
|
||||
Send Reset Link
|
||||
</BrandedButton>
|
||||
</form>
|
||||
)
|
||||
}
|
||||
@@ -138,35 +98,6 @@ export function SetNewPasswordForm({
|
||||
const [validationMessage, setValidationMessage] = useState('')
|
||||
const [showPassword, setShowPassword] = useState(false)
|
||||
const [showConfirmPassword, setShowConfirmPassword] = useState(false)
|
||||
const [buttonClass, setButtonClass] = useState('branded-button-gradient')
|
||||
const [isButtonHovered, setIsButtonHovered] = useState(false)
|
||||
|
||||
useEffect(() => {
|
||||
const checkCustomBrand = () => {
|
||||
const computedStyle = getComputedStyle(document.documentElement)
|
||||
const brandAccent = computedStyle.getPropertyValue('--brand-accent-hex').trim()
|
||||
|
||||
if (brandAccent && brandAccent !== '#6f3dfa') {
|
||||
setButtonClass('branded-button-custom')
|
||||
} else {
|
||||
setButtonClass('branded-button-gradient')
|
||||
}
|
||||
}
|
||||
|
||||
checkCustomBrand()
|
||||
|
||||
window.addEventListener('resize', checkCustomBrand)
|
||||
const observer = new MutationObserver(checkCustomBrand)
|
||||
observer.observe(document.documentElement, {
|
||||
attributes: true,
|
||||
attributeFilter: ['style', 'class'],
|
||||
})
|
||||
|
||||
return () => {
|
||||
window.removeEventListener('resize', checkCustomBrand)
|
||||
observer.disconnect()
|
||||
}
|
||||
}, [])
|
||||
|
||||
const handleSubmit = async (e: React.FormEvent) => {
|
||||
e.preventDefault()
|
||||
@@ -296,24 +227,14 @@ export function SetNewPasswordForm({
|
||||
)}
|
||||
</div>
|
||||
|
||||
<Button
|
||||
disabled={isSubmitting || !token}
|
||||
<BrandedButton
|
||||
type='submit'
|
||||
onMouseEnter={() => setIsButtonHovered(true)}
|
||||
onMouseLeave={() => setIsButtonHovered(false)}
|
||||
className='group inline-flex w-full items-center justify-center gap-2 rounded-[10px] border border-[#6F3DFA] bg-gradient-to-b from-[#8357FF] to-[#6F3DFA] py-[6px] pr-[10px] pl-[12px] text-[15px] text-white shadow-[inset_0_2px_4px_0_#9B77FF] transition-all'
|
||||
disabled={isSubmitting || !token}
|
||||
loading={isSubmitting}
|
||||
loadingText='Resetting'
|
||||
>
|
||||
<span className='flex items-center gap-1'>
|
||||
{isSubmitting ? 'Resetting...' : 'Reset Password'}
|
||||
<span className='inline-flex transition-transform duration-200 group-hover:translate-x-0.5'>
|
||||
{isButtonHovered ? (
|
||||
<ArrowRight className='h-4 w-4' aria-hidden='true' />
|
||||
) : (
|
||||
<ChevronRight className='h-4 w-4' aria-hidden='true' />
|
||||
)}
|
||||
</span>
|
||||
</span>
|
||||
</Button>
|
||||
Reset Password
|
||||
</BrandedButton>
|
||||
</form>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -2,10 +2,9 @@
|
||||
|
||||
import { Suspense, useEffect, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { ArrowRight, ChevronRight, Eye, EyeOff } from 'lucide-react'
|
||||
import { Eye, EyeOff } from 'lucide-react'
|
||||
import Link from 'next/link'
|
||||
import { useRouter, useSearchParams } from 'next/navigation'
|
||||
import { Button } from '@/components/ui/button'
|
||||
import { Input } from '@/components/ui/input'
|
||||
import { Label } from '@/components/ui/label'
|
||||
import { client, useSession } from '@/lib/auth/auth-client'
|
||||
@@ -14,8 +13,10 @@ import { cn } from '@/lib/core/utils/cn'
|
||||
import { quickValidateEmail } from '@/lib/messaging/email/validation'
|
||||
import { inter } from '@/app/_styles/fonts/inter/inter'
|
||||
import { soehne } from '@/app/_styles/fonts/soehne/soehne'
|
||||
import { BrandedButton } from '@/app/(auth)/components/branded-button'
|
||||
import { SocialLoginButtons } from '@/app/(auth)/components/social-login-buttons'
|
||||
import { SSOLoginButton } from '@/app/(auth)/components/sso-login-button'
|
||||
import { useBrandedButtonClass } from '@/hooks/use-branded-button-class'
|
||||
|
||||
const logger = createLogger('SignupForm')
|
||||
|
||||
@@ -95,8 +96,7 @@ function SignupFormContent({
|
||||
const [showEmailValidationError, setShowEmailValidationError] = useState(false)
|
||||
const [redirectUrl, setRedirectUrl] = useState('')
|
||||
const [isInviteFlow, setIsInviteFlow] = useState(false)
|
||||
const [buttonClass, setButtonClass] = useState('branded-button-gradient')
|
||||
const [isButtonHovered, setIsButtonHovered] = useState(false)
|
||||
const buttonClass = useBrandedButtonClass()
|
||||
|
||||
const [name, setName] = useState('')
|
||||
const [nameErrors, setNameErrors] = useState<string[]>([])
|
||||
@@ -126,31 +126,6 @@ function SignupFormContent({
|
||||
if (inviteFlowParam === 'true') {
|
||||
setIsInviteFlow(true)
|
||||
}
|
||||
|
||||
const checkCustomBrand = () => {
|
||||
const computedStyle = getComputedStyle(document.documentElement)
|
||||
const brandAccent = computedStyle.getPropertyValue('--brand-accent-hex').trim()
|
||||
|
||||
if (brandAccent && brandAccent !== '#6f3dfa') {
|
||||
setButtonClass('branded-button-custom')
|
||||
} else {
|
||||
setButtonClass('branded-button-gradient')
|
||||
}
|
||||
}
|
||||
|
||||
checkCustomBrand()
|
||||
|
||||
window.addEventListener('resize', checkCustomBrand)
|
||||
const observer = new MutationObserver(checkCustomBrand)
|
||||
observer.observe(document.documentElement, {
|
||||
attributes: true,
|
||||
attributeFilter: ['style', 'class'],
|
||||
})
|
||||
|
||||
return () => {
|
||||
window.removeEventListener('resize', checkCustomBrand)
|
||||
observer.disconnect()
|
||||
}
|
||||
}, [searchParams])
|
||||
|
||||
const validatePassword = (passwordValue: string): string[] => {
|
||||
@@ -500,24 +475,14 @@ function SignupFormContent({
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<Button
|
||||
<BrandedButton
|
||||
type='submit'
|
||||
onMouseEnter={() => setIsButtonHovered(true)}
|
||||
onMouseLeave={() => setIsButtonHovered(false)}
|
||||
className='group inline-flex w-full items-center justify-center gap-2 rounded-[10px] border border-[#6F3DFA] bg-gradient-to-b from-[#8357FF] to-[#6F3DFA] py-[6px] pr-[10px] pl-[12px] text-[15px] text-white shadow-[inset_0_2px_4px_0_#9B77FF] transition-all'
|
||||
disabled={isLoading}
|
||||
loading={isLoading}
|
||||
loadingText='Creating account'
|
||||
>
|
||||
<span className='flex items-center gap-1'>
|
||||
{isLoading ? 'Creating account' : 'Create account'}
|
||||
<span className='inline-flex transition-transform duration-200 group-hover:translate-x-0.5'>
|
||||
{isButtonHovered ? (
|
||||
<ArrowRight className='h-4 w-4' aria-hidden='true' />
|
||||
) : (
|
||||
<ChevronRight className='h-4 w-4' aria-hidden='true' />
|
||||
)}
|
||||
</span>
|
||||
</span>
|
||||
</Button>
|
||||
Create account
|
||||
</BrandedButton>
|
||||
</form>
|
||||
)}
|
||||
|
||||
|
||||
@@ -13,6 +13,7 @@ import { cn } from '@/lib/core/utils/cn'
|
||||
import { quickValidateEmail } from '@/lib/messaging/email/validation'
|
||||
import { inter } from '@/app/_styles/fonts/inter/inter'
|
||||
import { soehne } from '@/app/_styles/fonts/soehne/soehne'
|
||||
import { useBrandedButtonClass } from '@/hooks/use-branded-button-class'
|
||||
|
||||
const logger = createLogger('SSOForm')
|
||||
|
||||
@@ -57,7 +58,7 @@ export default function SSOForm() {
|
||||
const [email, setEmail] = useState('')
|
||||
const [emailErrors, setEmailErrors] = useState<string[]>([])
|
||||
const [showEmailValidationError, setShowEmailValidationError] = useState(false)
|
||||
const [buttonClass, setButtonClass] = useState('branded-button-gradient')
|
||||
const buttonClass = useBrandedButtonClass()
|
||||
const [callbackUrl, setCallbackUrl] = useState('/workspace')
|
||||
|
||||
useEffect(() => {
|
||||
@@ -90,31 +91,6 @@ export default function SSOForm() {
|
||||
setShowEmailValidationError(true)
|
||||
}
|
||||
}
|
||||
|
||||
const checkCustomBrand = () => {
|
||||
const computedStyle = getComputedStyle(document.documentElement)
|
||||
const brandAccent = computedStyle.getPropertyValue('--brand-accent-hex').trim()
|
||||
|
||||
if (brandAccent && brandAccent !== '#6f3dfa') {
|
||||
setButtonClass('branded-button-custom')
|
||||
} else {
|
||||
setButtonClass('branded-button-gradient')
|
||||
}
|
||||
}
|
||||
|
||||
checkCustomBrand()
|
||||
|
||||
window.addEventListener('resize', checkCustomBrand)
|
||||
const observer = new MutationObserver(checkCustomBrand)
|
||||
observer.observe(document.documentElement, {
|
||||
attributes: true,
|
||||
attributeFilter: ['style', 'class'],
|
||||
})
|
||||
|
||||
return () => {
|
||||
window.removeEventListener('resize', checkCustomBrand)
|
||||
observer.disconnect()
|
||||
}
|
||||
}, [searchParams])
|
||||
|
||||
const handleEmailChange = (e: React.ChangeEvent<HTMLInputElement>) => {
|
||||
|
||||
@@ -8,6 +8,7 @@ import { cn } from '@/lib/core/utils/cn'
|
||||
import { inter } from '@/app/_styles/fonts/inter/inter'
|
||||
import { soehne } from '@/app/_styles/fonts/soehne/soehne'
|
||||
import { useVerification } from '@/app/(auth)/verify/use-verification'
|
||||
import { useBrandedButtonClass } from '@/hooks/use-branded-button-class'
|
||||
|
||||
interface VerifyContentProps {
|
||||
hasEmailService: boolean
|
||||
@@ -58,34 +59,7 @@ function VerificationForm({
|
||||
setCountdown(30)
|
||||
}
|
||||
|
||||
const [buttonClass, setButtonClass] = useState('branded-button-gradient')
|
||||
|
||||
useEffect(() => {
|
||||
const checkCustomBrand = () => {
|
||||
const computedStyle = getComputedStyle(document.documentElement)
|
||||
const brandAccent = computedStyle.getPropertyValue('--brand-accent-hex').trim()
|
||||
|
||||
if (brandAccent && brandAccent !== '#6f3dfa') {
|
||||
setButtonClass('branded-button-custom')
|
||||
} else {
|
||||
setButtonClass('branded-button-gradient')
|
||||
}
|
||||
}
|
||||
|
||||
checkCustomBrand()
|
||||
|
||||
window.addEventListener('resize', checkCustomBrand)
|
||||
const observer = new MutationObserver(checkCustomBrand)
|
||||
observer.observe(document.documentElement, {
|
||||
attributes: true,
|
||||
attributeFilter: ['style', 'class'],
|
||||
})
|
||||
|
||||
return () => {
|
||||
window.removeEventListener('resize', checkCustomBrand)
|
||||
observer.disconnect()
|
||||
}
|
||||
}, [])
|
||||
const buttonClass = useBrandedButtonClass()
|
||||
|
||||
return (
|
||||
<>
|
||||
|
||||
@@ -4,7 +4,6 @@ import { useRef, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { X } from 'lucide-react'
|
||||
import { Textarea } from '@/components/emcn'
|
||||
import { Button } from '@/components/ui/button'
|
||||
import { Input } from '@/components/ui/input'
|
||||
import { Label } from '@/components/ui/label'
|
||||
import {
|
||||
@@ -18,6 +17,7 @@ import { isHosted } from '@/lib/core/config/feature-flags'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { quickValidateEmail } from '@/lib/messaging/email/validation'
|
||||
import { soehne } from '@/app/_styles/fonts/soehne/soehne'
|
||||
import { BrandedButton } from '@/app/(auth)/components/branded-button'
|
||||
import Footer from '@/app/(landing)/components/footer/footer'
|
||||
import Nav from '@/app/(landing)/components/nav/nav'
|
||||
|
||||
@@ -493,18 +493,17 @@ export default function CareersPage() {
|
||||
|
||||
{/* Submit Button */}
|
||||
<div className='flex justify-end pt-2'>
|
||||
<Button
|
||||
<BrandedButton
|
||||
type='submit'
|
||||
disabled={isSubmitting || submitStatus === 'success'}
|
||||
className='min-w-[200px] rounded-[10px] border border-[#6F3DFA] bg-gradient-to-b from-[#8357FF] to-[#6F3DFA] text-white shadow-[inset_0_2px_4px_0_#9B77FF] transition-all duration-300 hover:opacity-90 disabled:opacity-50'
|
||||
size='lg'
|
||||
loading={isSubmitting}
|
||||
loadingText='Submitting'
|
||||
showArrow={false}
|
||||
fullWidth={false}
|
||||
className='min-w-[200px]'
|
||||
>
|
||||
{isSubmitting
|
||||
? 'Submitting...'
|
||||
: submitStatus === 'success'
|
||||
? 'Submitted'
|
||||
: 'Submit Application'}
|
||||
</Button>
|
||||
{submitStatus === 'success' ? 'Submitted' : 'Submit Application'}
|
||||
</BrandedButton>
|
||||
</div>
|
||||
</form>
|
||||
</section>
|
||||
|
||||
@@ -11,6 +11,7 @@ import { useBrandConfig } from '@/lib/branding/branding'
|
||||
import { isHosted } from '@/lib/core/config/feature-flags'
|
||||
import { soehne } from '@/app/_styles/fonts/soehne/soehne'
|
||||
import { getFormattedGitHubStars } from '@/app/(landing)/actions/github'
|
||||
import { useBrandedButtonClass } from '@/hooks/use-branded-button-class'
|
||||
|
||||
const logger = createLogger('nav')
|
||||
|
||||
@@ -20,11 +21,12 @@ interface NavProps {
|
||||
}
|
||||
|
||||
export default function Nav({ hideAuthButtons = false, variant = 'landing' }: NavProps = {}) {
|
||||
const [githubStars, setGithubStars] = useState('25.1k')
|
||||
const [githubStars, setGithubStars] = useState('25.8k')
|
||||
const [isHovered, setIsHovered] = useState(false)
|
||||
const [isLoginHovered, setIsLoginHovered] = useState(false)
|
||||
const router = useRouter()
|
||||
const brand = useBrandConfig()
|
||||
const buttonClass = useBrandedButtonClass()
|
||||
|
||||
useEffect(() => {
|
||||
if (variant !== 'landing') return
|
||||
@@ -183,7 +185,7 @@ export default function Nav({ hideAuthButtons = false, variant = 'landing' }: Na
|
||||
href='/signup'
|
||||
onMouseEnter={() => setIsHovered(true)}
|
||||
onMouseLeave={() => setIsHovered(false)}
|
||||
className='group inline-flex items-center justify-center gap-2 rounded-[10px] border border-[#6F3DFA] bg-gradient-to-b from-[#8357FF] to-[#6F3DFA] py-[6px] pr-[10px] pl-[12px] text-[14px] text-white shadow-[inset_0_2px_4px_0_#9B77FF] transition-all sm:text-[16px]'
|
||||
className={`${buttonClass} group inline-flex items-center justify-center gap-2 rounded-[10px] py-[6px] pr-[10px] pl-[12px] text-[15px] text-white transition-all`}
|
||||
aria-label='Get started with Sim - Sign up for free'
|
||||
prefetch={true}
|
||||
>
|
||||
|
||||
27
apps/sim/app/(landing)/studio/[slug]/back-link.tsx
Normal file
27
apps/sim/app/(landing)/studio/[slug]/back-link.tsx
Normal file
@@ -0,0 +1,27 @@
|
||||
'use client'
|
||||
|
||||
import { useState } from 'react'
|
||||
import { ArrowLeft, ChevronLeft } from 'lucide-react'
|
||||
import Link from 'next/link'
|
||||
|
||||
export function BackLink() {
|
||||
const [isHovered, setIsHovered] = useState(false)
|
||||
|
||||
return (
|
||||
<Link
|
||||
href='/studio'
|
||||
className='group flex items-center gap-1 text-gray-600 text-sm hover:text-gray-900'
|
||||
onMouseEnter={() => setIsHovered(true)}
|
||||
onMouseLeave={() => setIsHovered(false)}
|
||||
>
|
||||
<span className='group-hover:-translate-x-0.5 inline-flex transition-transform duration-200'>
|
||||
{isHovered ? (
|
||||
<ArrowLeft className='h-4 w-4' aria-hidden='true' />
|
||||
) : (
|
||||
<ChevronLeft className='h-4 w-4' aria-hidden='true' />
|
||||
)}
|
||||
</span>
|
||||
Back to Sim Studio
|
||||
</Link>
|
||||
)
|
||||
}
|
||||
@@ -5,7 +5,10 @@ import { Avatar, AvatarFallback, AvatarImage } from '@/components/emcn'
|
||||
import { FAQ } from '@/lib/blog/faq'
|
||||
import { getAllPostMeta, getPostBySlug, getRelatedPosts } from '@/lib/blog/registry'
|
||||
import { buildArticleJsonLd, buildBreadcrumbJsonLd, buildPostMetadata } from '@/lib/blog/seo'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { soehne } from '@/app/_styles/fonts/soehne/soehne'
|
||||
import { BackLink } from '@/app/(landing)/studio/[slug]/back-link'
|
||||
import { ShareButton } from '@/app/(landing)/studio/[slug]/share-button'
|
||||
|
||||
export async function generateStaticParams() {
|
||||
const posts = await getAllPostMeta()
|
||||
@@ -48,9 +51,7 @@ export default async function Page({ params }: { params: Promise<{ slug: string
|
||||
/>
|
||||
<header className='mx-auto max-w-[1450px] px-6 pt-8 sm:px-8 sm:pt-12 md:px-12 md:pt-16'>
|
||||
<div className='mb-6'>
|
||||
<Link href='/studio' className='text-gray-600 text-sm hover:text-gray-900'>
|
||||
← Back to Sim Studio
|
||||
</Link>
|
||||
<BackLink />
|
||||
</div>
|
||||
<div className='flex flex-col gap-8 md:flex-row md:gap-12'>
|
||||
<div className='w-full flex-shrink-0 md:w-[450px]'>
|
||||
@@ -75,28 +76,31 @@ export default async function Page({ params }: { params: Promise<{ slug: string
|
||||
>
|
||||
{post.title}
|
||||
</h1>
|
||||
<div className='mt-4 flex items-center gap-3'>
|
||||
{(post.authors || [post.author]).map((a, idx) => (
|
||||
<div key={idx} className='flex items-center gap-2'>
|
||||
{a?.avatarUrl ? (
|
||||
<Avatar className='size-6'>
|
||||
<AvatarImage src={a.avatarUrl} alt={a.name} />
|
||||
<AvatarFallback>{a.name.slice(0, 2)}</AvatarFallback>
|
||||
</Avatar>
|
||||
) : null}
|
||||
<Link
|
||||
href={a?.url || '#'}
|
||||
target='_blank'
|
||||
rel='noopener noreferrer author'
|
||||
className='text-[14px] text-gray-600 leading-[1.5] hover:text-gray-900 sm:text-[16px]'
|
||||
itemProp='author'
|
||||
itemScope
|
||||
itemType='https://schema.org/Person'
|
||||
>
|
||||
<span itemProp='name'>{a?.name}</span>
|
||||
</Link>
|
||||
</div>
|
||||
))}
|
||||
<div className='mt-4 flex items-center justify-between'>
|
||||
<div className='flex items-center gap-3'>
|
||||
{(post.authors || [post.author]).map((a, idx) => (
|
||||
<div key={idx} className='flex items-center gap-2'>
|
||||
{a?.avatarUrl ? (
|
||||
<Avatar className='size-6'>
|
||||
<AvatarImage src={a.avatarUrl} alt={a.name} />
|
||||
<AvatarFallback>{a.name.slice(0, 2)}</AvatarFallback>
|
||||
</Avatar>
|
||||
) : null}
|
||||
<Link
|
||||
href={a?.url || '#'}
|
||||
target='_blank'
|
||||
rel='noopener noreferrer author'
|
||||
className='text-[14px] text-gray-600 leading-[1.5] hover:text-gray-900 sm:text-[16px]'
|
||||
itemProp='author'
|
||||
itemScope
|
||||
itemType='https://schema.org/Person'
|
||||
>
|
||||
<span itemProp='name'>{a?.name}</span>
|
||||
</Link>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
<ShareButton url={`${getBaseUrl()}/studio/${slug}`} title={post.title} />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
65
apps/sim/app/(landing)/studio/[slug]/share-button.tsx
Normal file
65
apps/sim/app/(landing)/studio/[slug]/share-button.tsx
Normal file
@@ -0,0 +1,65 @@
|
||||
'use client'
|
||||
|
||||
import { useState } from 'react'
|
||||
import { Share2 } from 'lucide-react'
|
||||
import { Popover, PopoverContent, PopoverItem, PopoverTrigger } from '@/components/emcn'
|
||||
|
||||
interface ShareButtonProps {
|
||||
url: string
|
||||
title: string
|
||||
}
|
||||
|
||||
export function ShareButton({ url, title }: ShareButtonProps) {
|
||||
const [open, setOpen] = useState(false)
|
||||
const [copied, setCopied] = useState(false)
|
||||
|
||||
const handleCopyLink = async () => {
|
||||
try {
|
||||
await navigator.clipboard.writeText(url)
|
||||
setCopied(true)
|
||||
setTimeout(() => {
|
||||
setCopied(false)
|
||||
setOpen(false)
|
||||
}, 1000)
|
||||
} catch {
|
||||
setOpen(false)
|
||||
}
|
||||
}
|
||||
|
||||
const handleShareTwitter = () => {
|
||||
const tweetUrl = `https://twitter.com/intent/tweet?url=${encodeURIComponent(url)}&text=${encodeURIComponent(title)}`
|
||||
window.open(tweetUrl, '_blank', 'noopener,noreferrer')
|
||||
setOpen(false)
|
||||
}
|
||||
|
||||
const handleShareLinkedIn = () => {
|
||||
const linkedInUrl = `https://www.linkedin.com/sharing/share-offsite/?url=${encodeURIComponent(url)}`
|
||||
window.open(linkedInUrl, '_blank', 'noopener,noreferrer')
|
||||
setOpen(false)
|
||||
}
|
||||
|
||||
return (
|
||||
<Popover
|
||||
open={open}
|
||||
onOpenChange={setOpen}
|
||||
variant='secondary'
|
||||
size='sm'
|
||||
colorScheme='inverted'
|
||||
>
|
||||
<PopoverTrigger asChild>
|
||||
<button
|
||||
className='flex items-center gap-1.5 text-gray-600 text-sm hover:text-gray-900'
|
||||
aria-label='Share this post'
|
||||
>
|
||||
<Share2 className='h-4 w-4' />
|
||||
<span>Share</span>
|
||||
</button>
|
||||
</PopoverTrigger>
|
||||
<PopoverContent align='end' minWidth={140}>
|
||||
<PopoverItem onClick={handleCopyLink}>{copied ? 'Copied!' : 'Copy link'}</PopoverItem>
|
||||
<PopoverItem onClick={handleShareTwitter}>Share on X</PopoverItem>
|
||||
<PopoverItem onClick={handleShareLinkedIn}>Share on LinkedIn</PopoverItem>
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
)
|
||||
}
|
||||
@@ -4,6 +4,11 @@ import { createLogger } from '@sim/logger'
|
||||
import { and, desc, eq, inArray } from 'drizzle-orm'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { refreshOAuthToken } from '@/lib/oauth'
|
||||
import {
|
||||
getMicrosoftRefreshTokenExpiry,
|
||||
isMicrosoftProvider,
|
||||
PROACTIVE_REFRESH_THRESHOLD_DAYS,
|
||||
} from '@/lib/oauth/microsoft'
|
||||
|
||||
const logger = createLogger('OAuthUtilsAPI')
|
||||
|
||||
@@ -205,15 +210,32 @@ export async function refreshAccessTokenIfNeeded(
|
||||
}
|
||||
|
||||
// Decide if we should refresh: token missing OR expired
|
||||
const expiresAt = credential.accessTokenExpiresAt
|
||||
const accessTokenExpiresAt = credential.accessTokenExpiresAt
|
||||
const refreshTokenExpiresAt = credential.refreshTokenExpiresAt
|
||||
const now = new Date()
|
||||
const shouldRefresh =
|
||||
!!credential.refreshToken && (!credential.accessToken || (expiresAt && expiresAt <= now))
|
||||
|
||||
// Check if access token needs refresh (missing or expired)
|
||||
const accessTokenNeedsRefresh =
|
||||
!!credential.refreshToken &&
|
||||
(!credential.accessToken || (accessTokenExpiresAt && accessTokenExpiresAt <= now))
|
||||
|
||||
// Check if we should proactively refresh to prevent refresh token expiry
|
||||
// This applies to Microsoft providers whose refresh tokens expire after 90 days of inactivity
|
||||
const proactiveRefreshThreshold = new Date(
|
||||
now.getTime() + PROACTIVE_REFRESH_THRESHOLD_DAYS * 24 * 60 * 60 * 1000
|
||||
)
|
||||
const refreshTokenNeedsProactiveRefresh =
|
||||
!!credential.refreshToken &&
|
||||
isMicrosoftProvider(credential.providerId) &&
|
||||
refreshTokenExpiresAt &&
|
||||
refreshTokenExpiresAt <= proactiveRefreshThreshold
|
||||
|
||||
const shouldRefresh = accessTokenNeedsRefresh || refreshTokenNeedsProactiveRefresh
|
||||
|
||||
const accessToken = credential.accessToken
|
||||
|
||||
if (shouldRefresh) {
|
||||
logger.info(`[${requestId}] Token expired, attempting to refresh for credential`)
|
||||
logger.info(`[${requestId}] Refreshing token for credential`)
|
||||
try {
|
||||
const refreshedToken = await refreshOAuthToken(
|
||||
credential.providerId,
|
||||
@@ -227,11 +249,15 @@ export async function refreshAccessTokenIfNeeded(
|
||||
userId: credential.userId,
|
||||
hasRefreshToken: !!credential.refreshToken,
|
||||
})
|
||||
if (!accessTokenNeedsRefresh && accessToken) {
|
||||
logger.info(`[${requestId}] Proactive refresh failed but access token still valid`)
|
||||
return accessToken
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
// Prepare update data
|
||||
const updateData: any = {
|
||||
const updateData: Record<string, unknown> = {
|
||||
accessToken: refreshedToken.accessToken,
|
||||
accessTokenExpiresAt: new Date(Date.now() + refreshedToken.expiresIn * 1000),
|
||||
updatedAt: new Date(),
|
||||
@@ -243,6 +269,10 @@ export async function refreshAccessTokenIfNeeded(
|
||||
updateData.refreshToken = refreshedToken.refreshToken
|
||||
}
|
||||
|
||||
if (isMicrosoftProvider(credential.providerId)) {
|
||||
updateData.refreshTokenExpiresAt = getMicrosoftRefreshTokenExpiry()
|
||||
}
|
||||
|
||||
// Update the token in the database
|
||||
await db.update(account).set(updateData).where(eq(account.id, credentialId))
|
||||
|
||||
@@ -256,6 +286,10 @@ export async function refreshAccessTokenIfNeeded(
|
||||
credentialId,
|
||||
userId: credential.userId,
|
||||
})
|
||||
if (!accessTokenNeedsRefresh && accessToken) {
|
||||
logger.info(`[${requestId}] Proactive refresh failed but access token still valid`)
|
||||
return accessToken
|
||||
}
|
||||
return null
|
||||
}
|
||||
} else if (!accessToken) {
|
||||
@@ -277,10 +311,27 @@ export async function refreshTokenIfNeeded(
|
||||
credentialId: string
|
||||
): Promise<{ accessToken: string; refreshed: boolean }> {
|
||||
// Decide if we should refresh: token missing OR expired
|
||||
const expiresAt = credential.accessTokenExpiresAt
|
||||
const accessTokenExpiresAt = credential.accessTokenExpiresAt
|
||||
const refreshTokenExpiresAt = credential.refreshTokenExpiresAt
|
||||
const now = new Date()
|
||||
const shouldRefresh =
|
||||
!!credential.refreshToken && (!credential.accessToken || (expiresAt && expiresAt <= now))
|
||||
|
||||
// Check if access token needs refresh (missing or expired)
|
||||
const accessTokenNeedsRefresh =
|
||||
!!credential.refreshToken &&
|
||||
(!credential.accessToken || (accessTokenExpiresAt && accessTokenExpiresAt <= now))
|
||||
|
||||
// Check if we should proactively refresh to prevent refresh token expiry
|
||||
// This applies to Microsoft providers whose refresh tokens expire after 90 days of inactivity
|
||||
const proactiveRefreshThreshold = new Date(
|
||||
now.getTime() + PROACTIVE_REFRESH_THRESHOLD_DAYS * 24 * 60 * 60 * 1000
|
||||
)
|
||||
const refreshTokenNeedsProactiveRefresh =
|
||||
!!credential.refreshToken &&
|
||||
isMicrosoftProvider(credential.providerId) &&
|
||||
refreshTokenExpiresAt &&
|
||||
refreshTokenExpiresAt <= proactiveRefreshThreshold
|
||||
|
||||
const shouldRefresh = accessTokenNeedsRefresh || refreshTokenNeedsProactiveRefresh
|
||||
|
||||
// If token appears valid and present, return it directly
|
||||
if (!shouldRefresh) {
|
||||
@@ -293,13 +344,17 @@ export async function refreshTokenIfNeeded(
|
||||
|
||||
if (!refreshResult) {
|
||||
logger.error(`[${requestId}] Failed to refresh token for credential`)
|
||||
if (!accessTokenNeedsRefresh && credential.accessToken) {
|
||||
logger.info(`[${requestId}] Proactive refresh failed but access token still valid`)
|
||||
return { accessToken: credential.accessToken, refreshed: false }
|
||||
}
|
||||
throw new Error('Failed to refresh token')
|
||||
}
|
||||
|
||||
const { accessToken: refreshedToken, expiresIn, refreshToken: newRefreshToken } = refreshResult
|
||||
|
||||
// Prepare update data
|
||||
const updateData: any = {
|
||||
const updateData: Record<string, unknown> = {
|
||||
accessToken: refreshedToken,
|
||||
accessTokenExpiresAt: new Date(Date.now() + expiresIn * 1000), // Use provider's expiry
|
||||
updatedAt: new Date(),
|
||||
@@ -311,6 +366,10 @@ export async function refreshTokenIfNeeded(
|
||||
updateData.refreshToken = newRefreshToken
|
||||
}
|
||||
|
||||
if (isMicrosoftProvider(credential.providerId)) {
|
||||
updateData.refreshTokenExpiresAt = getMicrosoftRefreshTokenExpiry()
|
||||
}
|
||||
|
||||
await db.update(account).set(updateData).where(eq(account.id, credentialId))
|
||||
|
||||
logger.info(`[${requestId}] Successfully refreshed access token`)
|
||||
@@ -331,6 +390,11 @@ export async function refreshTokenIfNeeded(
|
||||
}
|
||||
}
|
||||
|
||||
if (!accessTokenNeedsRefresh && credential.accessToken) {
|
||||
logger.info(`[${requestId}] Proactive refresh failed but access token still valid`)
|
||||
return { accessToken: credential.accessToken, refreshed: false }
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Refresh failed and no valid token found in DB`, error)
|
||||
throw error
|
||||
}
|
||||
|
||||
@@ -15,7 +15,8 @@ const resetPasswordSchema = z.object({
|
||||
.max(100, 'Password must not exceed 100 characters')
|
||||
.regex(/[A-Z]/, 'Password must contain at least one uppercase letter')
|
||||
.regex(/[a-z]/, 'Password must contain at least one lowercase letter')
|
||||
.regex(/[0-9]/, 'Password must contain at least one number'),
|
||||
.regex(/[0-9]/, 'Password must contain at least one number')
|
||||
.regex(/[^A-Za-z0-9]/, 'Password must contain at least one special character'),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
|
||||
@@ -224,7 +224,7 @@ export async function POST(req: NextRequest) {
|
||||
hasApiKey: !!executionParams.apiKey,
|
||||
})
|
||||
|
||||
const result = await executeTool(resolvedToolName, executionParams, true)
|
||||
const result = await executeTool(resolvedToolName, executionParams)
|
||||
|
||||
logger.info(`[${tracker.requestId}] Tool execution complete`, {
|
||||
toolName,
|
||||
|
||||
@@ -6,9 +6,10 @@ import { createLogger } from '@sim/logger'
|
||||
import binaryExtensionsList from 'binary-extensions'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { createPinnedUrl, validateUrlWithDNS } from '@/lib/core/security/input-validation'
|
||||
import { secureFetchWithPinnedIP, validateUrlWithDNS } from '@/lib/core/security/input-validation'
|
||||
import { isSupportedFileType, parseFile } from '@/lib/file-parsers'
|
||||
import { isUsingCloudStorage, type StorageContext, StorageService } from '@/lib/uploads'
|
||||
import { uploadExecutionFile } from '@/lib/uploads/contexts/execution'
|
||||
import { UPLOAD_DIR_SERVER } from '@/lib/uploads/core/setup.server'
|
||||
import { getFileMetadataByKey } from '@/lib/uploads/server/metadata'
|
||||
import {
|
||||
@@ -21,6 +22,7 @@ import {
|
||||
} from '@/lib/uploads/utils/file-utils'
|
||||
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
|
||||
import { verifyFileAccess } from '@/app/api/files/authorization'
|
||||
import type { UserFile } from '@/executor/types'
|
||||
import '@/lib/uploads/core/setup.server'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
@@ -30,6 +32,12 @@ const logger = createLogger('FilesParseAPI')
|
||||
const MAX_DOWNLOAD_SIZE_BYTES = 100 * 1024 * 1024 // 100 MB
|
||||
const DOWNLOAD_TIMEOUT_MS = 30000 // 30 seconds
|
||||
|
||||
interface ExecutionContext {
|
||||
workspaceId: string
|
||||
workflowId: string
|
||||
executionId: string
|
||||
}
|
||||
|
||||
interface ParseResult {
|
||||
success: boolean
|
||||
content?: string
|
||||
@@ -37,6 +45,7 @@ interface ParseResult {
|
||||
filePath: string
|
||||
originalName?: string // Original filename from database (for workspace files)
|
||||
viewerUrl?: string | null // Viewer URL for the file if available
|
||||
userFile?: UserFile // UserFile object for the raw file
|
||||
metadata?: {
|
||||
fileType: string
|
||||
size: number
|
||||
@@ -70,27 +79,45 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
const userId = authResult.userId
|
||||
const requestData = await request.json()
|
||||
const { filePath, fileType, workspaceId } = requestData
|
||||
const { filePath, fileType, workspaceId, workflowId, executionId } = requestData
|
||||
|
||||
if (!filePath || (typeof filePath === 'string' && filePath.trim() === '')) {
|
||||
return NextResponse.json({ success: false, error: 'No file path provided' }, { status: 400 })
|
||||
}
|
||||
|
||||
logger.info('File parse request received:', { filePath, fileType, workspaceId, userId })
|
||||
// Build execution context if all required fields are present
|
||||
const executionContext: ExecutionContext | undefined =
|
||||
workspaceId && workflowId && executionId
|
||||
? { workspaceId, workflowId, executionId }
|
||||
: undefined
|
||||
|
||||
logger.info('File parse request received:', {
|
||||
filePath,
|
||||
fileType,
|
||||
workspaceId,
|
||||
userId,
|
||||
hasExecutionContext: !!executionContext,
|
||||
})
|
||||
|
||||
if (Array.isArray(filePath)) {
|
||||
const results = []
|
||||
for (const path of filePath) {
|
||||
if (!path || (typeof path === 'string' && path.trim() === '')) {
|
||||
for (const singlePath of filePath) {
|
||||
if (!singlePath || (typeof singlePath === 'string' && singlePath.trim() === '')) {
|
||||
results.push({
|
||||
success: false,
|
||||
error: 'Empty file path in array',
|
||||
filePath: path || '',
|
||||
filePath: singlePath || '',
|
||||
})
|
||||
continue
|
||||
}
|
||||
|
||||
const result = await parseFileSingle(path, fileType, workspaceId, userId)
|
||||
const result = await parseFileSingle(
|
||||
singlePath,
|
||||
fileType,
|
||||
workspaceId,
|
||||
userId,
|
||||
executionContext
|
||||
)
|
||||
if (result.metadata) {
|
||||
result.metadata.processingTime = Date.now() - startTime
|
||||
}
|
||||
@@ -106,6 +133,7 @@ export async function POST(request: NextRequest) {
|
||||
fileType: result.metadata?.fileType || 'application/octet-stream',
|
||||
size: result.metadata?.size || 0,
|
||||
binary: false,
|
||||
file: result.userFile,
|
||||
},
|
||||
filePath: result.filePath,
|
||||
viewerUrl: result.viewerUrl,
|
||||
@@ -121,7 +149,7 @@ export async function POST(request: NextRequest) {
|
||||
})
|
||||
}
|
||||
|
||||
const result = await parseFileSingle(filePath, fileType, workspaceId, userId)
|
||||
const result = await parseFileSingle(filePath, fileType, workspaceId, userId, executionContext)
|
||||
|
||||
if (result.metadata) {
|
||||
result.metadata.processingTime = Date.now() - startTime
|
||||
@@ -137,6 +165,7 @@ export async function POST(request: NextRequest) {
|
||||
fileType: result.metadata?.fileType || 'application/octet-stream',
|
||||
size: result.metadata?.size || 0,
|
||||
binary: false,
|
||||
file: result.userFile,
|
||||
},
|
||||
filePath: result.filePath,
|
||||
viewerUrl: result.viewerUrl,
|
||||
@@ -164,7 +193,8 @@ async function parseFileSingle(
|
||||
filePath: string,
|
||||
fileType: string,
|
||||
workspaceId: string,
|
||||
userId: string
|
||||
userId: string,
|
||||
executionContext?: ExecutionContext
|
||||
): Promise<ParseResult> {
|
||||
logger.info('Parsing file:', filePath)
|
||||
|
||||
@@ -186,18 +216,18 @@ async function parseFileSingle(
|
||||
}
|
||||
|
||||
if (filePath.includes('/api/files/serve/')) {
|
||||
return handleCloudFile(filePath, fileType, undefined, userId)
|
||||
return handleCloudFile(filePath, fileType, undefined, userId, executionContext)
|
||||
}
|
||||
|
||||
if (filePath.startsWith('http://') || filePath.startsWith('https://')) {
|
||||
return handleExternalUrl(filePath, fileType, workspaceId, userId)
|
||||
return handleExternalUrl(filePath, fileType, workspaceId, userId, executionContext)
|
||||
}
|
||||
|
||||
if (isUsingCloudStorage()) {
|
||||
return handleCloudFile(filePath, fileType, undefined, userId)
|
||||
return handleCloudFile(filePath, fileType, undefined, userId, executionContext)
|
||||
}
|
||||
|
||||
return handleLocalFile(filePath, fileType, userId)
|
||||
return handleLocalFile(filePath, fileType, userId, executionContext)
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -230,12 +260,14 @@ function validateFilePath(filePath: string): { isValid: boolean; error?: string
|
||||
/**
|
||||
* Handle external URL
|
||||
* If workspaceId is provided, checks if file already exists and saves to workspace if not
|
||||
* If executionContext is provided, also stores the file in execution storage and returns UserFile
|
||||
*/
|
||||
async function handleExternalUrl(
|
||||
url: string,
|
||||
fileType: string,
|
||||
workspaceId: string,
|
||||
userId: string
|
||||
userId: string,
|
||||
executionContext?: ExecutionContext
|
||||
): Promise<ParseResult> {
|
||||
try {
|
||||
logger.info('Fetching external URL:', url)
|
||||
@@ -312,17 +344,13 @@ async function handleExternalUrl(
|
||||
|
||||
if (existingFile) {
|
||||
const storageFilePath = `/api/files/serve/${existingFile.key}`
|
||||
return handleCloudFile(storageFilePath, fileType, 'workspace', userId)
|
||||
return handleCloudFile(storageFilePath, fileType, 'workspace', userId, executionContext)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const pinnedUrl = createPinnedUrl(url, urlValidation.resolvedIP!)
|
||||
const response = await fetch(pinnedUrl, {
|
||||
signal: AbortSignal.timeout(DOWNLOAD_TIMEOUT_MS),
|
||||
headers: {
|
||||
Host: urlValidation.originalHostname!,
|
||||
},
|
||||
const response = await secureFetchWithPinnedIP(url, urlValidation.resolvedIP!, {
|
||||
timeout: DOWNLOAD_TIMEOUT_MS,
|
||||
})
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to fetch URL: ${response.status} ${response.statusText}`)
|
||||
@@ -341,6 +369,19 @@ async function handleExternalUrl(
|
||||
|
||||
logger.info(`Downloaded file from URL: ${url}, size: ${buffer.length} bytes`)
|
||||
|
||||
let userFile: UserFile | undefined
|
||||
const mimeType = response.headers.get('content-type') || getMimeTypeFromExtension(extension)
|
||||
|
||||
if (executionContext) {
|
||||
try {
|
||||
userFile = await uploadExecutionFile(executionContext, buffer, filename, mimeType, userId)
|
||||
logger.info(`Stored file in execution storage: ${filename}`, { key: userFile.key })
|
||||
} catch (uploadError) {
|
||||
logger.warn(`Failed to store file in execution storage:`, uploadError)
|
||||
// Continue without userFile - parsing can still work
|
||||
}
|
||||
}
|
||||
|
||||
if (shouldCheckWorkspace) {
|
||||
try {
|
||||
const permission = await getUserEntityPermissions(userId, 'workspace', workspaceId)
|
||||
@@ -353,8 +394,6 @@ async function handleExternalUrl(
|
||||
})
|
||||
} else {
|
||||
const { uploadWorkspaceFile } = await import('@/lib/uploads/contexts/workspace')
|
||||
const mimeType =
|
||||
response.headers.get('content-type') || getMimeTypeFromExtension(extension)
|
||||
await uploadWorkspaceFile(workspaceId, userId, buffer, filename, mimeType)
|
||||
logger.info(`Saved URL file to workspace storage: ${filename}`)
|
||||
}
|
||||
@@ -363,17 +402,23 @@ async function handleExternalUrl(
|
||||
}
|
||||
}
|
||||
|
||||
let parseResult: ParseResult
|
||||
if (extension === 'pdf') {
|
||||
return await handlePdfBuffer(buffer, filename, fileType, url)
|
||||
}
|
||||
if (extension === 'csv') {
|
||||
return await handleCsvBuffer(buffer, filename, fileType, url)
|
||||
}
|
||||
if (isSupportedFileType(extension)) {
|
||||
return await handleGenericTextBuffer(buffer, filename, extension, fileType, url)
|
||||
parseResult = await handlePdfBuffer(buffer, filename, fileType, url)
|
||||
} else if (extension === 'csv') {
|
||||
parseResult = await handleCsvBuffer(buffer, filename, fileType, url)
|
||||
} else if (isSupportedFileType(extension)) {
|
||||
parseResult = await handleGenericTextBuffer(buffer, filename, extension, fileType, url)
|
||||
} else {
|
||||
parseResult = handleGenericBuffer(buffer, filename, extension, fileType)
|
||||
}
|
||||
|
||||
return handleGenericBuffer(buffer, filename, extension, fileType)
|
||||
// Attach userFile to the result
|
||||
if (userFile) {
|
||||
parseResult.userFile = userFile
|
||||
}
|
||||
|
||||
return parseResult
|
||||
} catch (error) {
|
||||
logger.error(`Error handling external URL ${url}:`, error)
|
||||
return {
|
||||
@@ -386,12 +431,15 @@ async function handleExternalUrl(
|
||||
|
||||
/**
|
||||
* Handle file stored in cloud storage
|
||||
* If executionContext is provided and file is not already from execution storage,
|
||||
* copies the file to execution storage and returns UserFile
|
||||
*/
|
||||
async function handleCloudFile(
|
||||
filePath: string,
|
||||
fileType: string,
|
||||
explicitContext: string | undefined,
|
||||
userId: string
|
||||
userId: string,
|
||||
executionContext?: ExecutionContext
|
||||
): Promise<ParseResult> {
|
||||
try {
|
||||
const cloudKey = extractStorageKey(filePath)
|
||||
@@ -438,6 +486,7 @@ async function handleCloudFile(
|
||||
|
||||
const filename = originalFilename || cloudKey.split('/').pop() || cloudKey
|
||||
const extension = path.extname(filename).toLowerCase().substring(1)
|
||||
const mimeType = getMimeTypeFromExtension(extension)
|
||||
|
||||
const normalizedFilePath = `/api/files/serve/${encodeURIComponent(cloudKey)}?context=${context}`
|
||||
let workspaceIdFromKey: string | undefined
|
||||
@@ -453,6 +502,39 @@ async function handleCloudFile(
|
||||
|
||||
const viewerUrl = getViewerUrl(cloudKey, workspaceIdFromKey)
|
||||
|
||||
// Store file in execution storage if executionContext is provided
|
||||
let userFile: UserFile | undefined
|
||||
|
||||
if (executionContext) {
|
||||
// If file is already from execution context, create UserFile reference without re-uploading
|
||||
if (context === 'execution') {
|
||||
userFile = {
|
||||
id: `file_${Date.now()}_${Math.random().toString(36).substring(2, 9)}`,
|
||||
name: filename,
|
||||
url: normalizedFilePath,
|
||||
size: fileBuffer.length,
|
||||
type: mimeType,
|
||||
key: cloudKey,
|
||||
context: 'execution',
|
||||
}
|
||||
logger.info(`Created UserFile reference for existing execution file: ${filename}`)
|
||||
} else {
|
||||
// Copy from workspace/other storage to execution storage
|
||||
try {
|
||||
userFile = await uploadExecutionFile(
|
||||
executionContext,
|
||||
fileBuffer,
|
||||
filename,
|
||||
mimeType,
|
||||
userId
|
||||
)
|
||||
logger.info(`Copied file to execution storage: ${filename}`, { key: userFile.key })
|
||||
} catch (uploadError) {
|
||||
logger.warn(`Failed to copy file to execution storage:`, uploadError)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let parseResult: ParseResult
|
||||
if (extension === 'pdf') {
|
||||
parseResult = await handlePdfBuffer(fileBuffer, filename, fileType, normalizedFilePath)
|
||||
@@ -477,6 +559,11 @@ async function handleCloudFile(
|
||||
|
||||
parseResult.viewerUrl = viewerUrl
|
||||
|
||||
// Attach userFile to the result
|
||||
if (userFile) {
|
||||
parseResult.userFile = userFile
|
||||
}
|
||||
|
||||
return parseResult
|
||||
} catch (error) {
|
||||
logger.error(`Error handling cloud file ${filePath}:`, error)
|
||||
@@ -500,7 +587,8 @@ async function handleCloudFile(
|
||||
async function handleLocalFile(
|
||||
filePath: string,
|
||||
fileType: string,
|
||||
userId: string
|
||||
userId: string,
|
||||
executionContext?: ExecutionContext
|
||||
): Promise<ParseResult> {
|
||||
try {
|
||||
const filename = filePath.split('/').pop() || filePath
|
||||
@@ -540,13 +628,32 @@ async function handleLocalFile(
|
||||
const hash = createHash('md5').update(fileBuffer).digest('hex')
|
||||
|
||||
const extension = path.extname(filename).toLowerCase().substring(1)
|
||||
const mimeType = fileType || getMimeTypeFromExtension(extension)
|
||||
|
||||
// Store file in execution storage if executionContext is provided
|
||||
let userFile: UserFile | undefined
|
||||
if (executionContext) {
|
||||
try {
|
||||
userFile = await uploadExecutionFile(
|
||||
executionContext,
|
||||
fileBuffer,
|
||||
filename,
|
||||
mimeType,
|
||||
userId
|
||||
)
|
||||
logger.info(`Stored local file in execution storage: ${filename}`, { key: userFile.key })
|
||||
} catch (uploadError) {
|
||||
logger.warn(`Failed to store local file in execution storage:`, uploadError)
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
content: result.content,
|
||||
filePath,
|
||||
userFile,
|
||||
metadata: {
|
||||
fileType: fileType || getMimeTypeFromExtension(extension),
|
||||
fileType: mimeType,
|
||||
size: stats.size,
|
||||
hash,
|
||||
processingTime: 0,
|
||||
|
||||
@@ -11,7 +11,7 @@ import { preprocessExecution } from '@/lib/execution/preprocessing'
|
||||
import { LoggingSession } from '@/lib/logs/execution/logging-session'
|
||||
import { normalizeInputFormatValue } from '@/lib/workflows/input-format'
|
||||
import { createStreamingResponse } from '@/lib/workflows/streaming/streaming'
|
||||
import { isValidStartBlockType } from '@/lib/workflows/triggers/start-block-types'
|
||||
import { isInputDefinitionTrigger } from '@/lib/workflows/triggers/input-definition-triggers'
|
||||
import { setFormAuthCookie, validateFormAuth } from '@/app/api/form/utils'
|
||||
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
|
||||
|
||||
@@ -36,7 +36,7 @@ async function getWorkflowInputSchema(workflowId: string): Promise<any[]> {
|
||||
.from(workflowBlocks)
|
||||
.where(eq(workflowBlocks.workflowId, workflowId))
|
||||
|
||||
const startBlock = blocks.find((block) => isValidStartBlockType(block.type))
|
||||
const startBlock = blocks.find((block) => isInputDefinitionTrigger(block.type))
|
||||
|
||||
if (!startBlock) {
|
||||
return []
|
||||
|
||||
@@ -276,8 +276,11 @@ describe('Function Execute API Route', () => {
|
||||
it.concurrent('should resolve tag variables with <tag_name> syntax', async () => {
|
||||
const req = createMockRequest('POST', {
|
||||
code: 'return <email>',
|
||||
params: {
|
||||
email: { id: '123', subject: 'Test Email' },
|
||||
blockData: {
|
||||
'block-123': { id: '123', subject: 'Test Email' },
|
||||
},
|
||||
blockNameMapping: {
|
||||
email: 'block-123',
|
||||
},
|
||||
})
|
||||
|
||||
@@ -305,9 +308,13 @@ describe('Function Execute API Route', () => {
|
||||
it.concurrent('should only match valid variable names in angle brackets', async () => {
|
||||
const req = createMockRequest('POST', {
|
||||
code: 'return <validVar> + "<invalid@email.com>" + <another_valid>',
|
||||
params: {
|
||||
validVar: 'hello',
|
||||
another_valid: 'world',
|
||||
blockData: {
|
||||
'block-1': 'hello',
|
||||
'block-2': 'world',
|
||||
},
|
||||
blockNameMapping: {
|
||||
validvar: 'block-1',
|
||||
another_valid: 'block-2',
|
||||
},
|
||||
})
|
||||
|
||||
@@ -321,28 +328,22 @@ describe('Function Execute API Route', () => {
|
||||
it.concurrent(
|
||||
'should handle Gmail webhook data with email addresses containing angle brackets',
|
||||
async () => {
|
||||
const gmailData = {
|
||||
email: {
|
||||
id: '123',
|
||||
from: 'Waleed Latif <waleed@sim.ai>',
|
||||
to: 'User <user@example.com>',
|
||||
subject: 'Test Email',
|
||||
bodyText: 'Hello world',
|
||||
},
|
||||
rawEmail: {
|
||||
id: '123',
|
||||
payload: {
|
||||
headers: [
|
||||
{ name: 'From', value: 'Waleed Latif <waleed@sim.ai>' },
|
||||
{ name: 'To', value: 'User <user@example.com>' },
|
||||
],
|
||||
},
|
||||
},
|
||||
const emailData = {
|
||||
id: '123',
|
||||
from: 'Waleed Latif <waleed@sim.ai>',
|
||||
to: 'User <user@example.com>',
|
||||
subject: 'Test Email',
|
||||
bodyText: 'Hello world',
|
||||
}
|
||||
|
||||
const req = createMockRequest('POST', {
|
||||
code: 'return <email>',
|
||||
params: gmailData,
|
||||
blockData: {
|
||||
'block-email': emailData,
|
||||
},
|
||||
blockNameMapping: {
|
||||
email: 'block-email',
|
||||
},
|
||||
})
|
||||
|
||||
const response = await POST(req)
|
||||
@@ -356,17 +357,20 @@ describe('Function Execute API Route', () => {
|
||||
it.concurrent(
|
||||
'should properly serialize complex email objects with special characters',
|
||||
async () => {
|
||||
const complexEmailData = {
|
||||
email: {
|
||||
from: 'Test User <test@example.com>',
|
||||
bodyHtml: '<div>HTML content with "quotes" and \'apostrophes\'</div>',
|
||||
bodyText: 'Text with\nnewlines\tand\ttabs',
|
||||
},
|
||||
const emailData = {
|
||||
from: 'Test User <test@example.com>',
|
||||
bodyHtml: '<div>HTML content with "quotes" and \'apostrophes\'</div>',
|
||||
bodyText: 'Text with\nnewlines\tand\ttabs',
|
||||
}
|
||||
|
||||
const req = createMockRequest('POST', {
|
||||
code: 'return <email>',
|
||||
params: complexEmailData,
|
||||
blockData: {
|
||||
'block-email': emailData,
|
||||
},
|
||||
blockNameMapping: {
|
||||
email: 'block-email',
|
||||
},
|
||||
})
|
||||
|
||||
const response = await POST(req)
|
||||
@@ -519,18 +523,23 @@ describe('Function Execute API Route', () => {
|
||||
})
|
||||
|
||||
it.concurrent('should handle JSON serialization edge cases', async () => {
|
||||
const complexData = {
|
||||
special: 'chars"with\'quotes',
|
||||
unicode: '🎉 Unicode content',
|
||||
nested: {
|
||||
deep: {
|
||||
value: 'test',
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
const req = createMockRequest('POST', {
|
||||
code: 'return <complexData>',
|
||||
params: {
|
||||
complexData: {
|
||||
special: 'chars"with\'quotes',
|
||||
unicode: '🎉 Unicode content',
|
||||
nested: {
|
||||
deep: {
|
||||
value: 'test',
|
||||
},
|
||||
},
|
||||
},
|
||||
blockData: {
|
||||
'block-complex': complexData,
|
||||
},
|
||||
blockNameMapping: {
|
||||
complexdata: 'block-complex',
|
||||
},
|
||||
})
|
||||
|
||||
|
||||
@@ -6,10 +6,10 @@ import { executeInE2B } from '@/lib/execution/e2b'
|
||||
import { executeInIsolatedVM } from '@/lib/execution/isolated-vm'
|
||||
import { CodeLanguage, DEFAULT_CODE_LANGUAGE, isValidCodeLanguage } from '@/lib/execution/languages'
|
||||
import { escapeRegExp, normalizeName, REFERENCE } from '@/executor/constants'
|
||||
import { type OutputSchema, resolveBlockReference } from '@/executor/utils/block-reference'
|
||||
import {
|
||||
createEnvVarPattern,
|
||||
createWorkflowVariablePattern,
|
||||
resolveEnvVarReferences,
|
||||
} from '@/executor/utils/reference-validation'
|
||||
export const dynamic = 'force-dynamic'
|
||||
export const runtime = 'nodejs'
|
||||
@@ -18,8 +18,8 @@ export const MAX_DURATION = 210
|
||||
|
||||
const logger = createLogger('FunctionExecuteAPI')
|
||||
|
||||
const E2B_JS_WRAPPER_LINES = 3 // Lines before user code: ';(async () => {', ' try {', ' const __sim_result = await (async () => {'
|
||||
const E2B_PYTHON_WRAPPER_LINES = 1 // Lines before user code: 'def __sim_main__():'
|
||||
const E2B_JS_WRAPPER_LINES = 3
|
||||
const E2B_PYTHON_WRAPPER_LINES = 1
|
||||
|
||||
type TypeScriptModule = typeof import('typescript')
|
||||
|
||||
@@ -134,33 +134,21 @@ function extractEnhancedError(
|
||||
if (error.stack) {
|
||||
enhanced.stack = error.stack
|
||||
|
||||
// Parse stack trace to extract line and column information
|
||||
// Handle both compilation errors and runtime errors
|
||||
const stackLines: string[] = error.stack.split('\n')
|
||||
|
||||
for (const line of stackLines) {
|
||||
// Pattern 1: Compilation errors - "user-function.js:6"
|
||||
let match = line.match(/user-function\.js:(\d+)(?::(\d+))?/)
|
||||
|
||||
// Pattern 2: Runtime errors - "at user-function.js:5:12"
|
||||
if (!match) {
|
||||
match = line.match(/at\s+user-function\.js:(\d+):(\d+)/)
|
||||
}
|
||||
|
||||
// Pattern 3: Generic patterns for any line containing our filename
|
||||
if (!match) {
|
||||
match = line.match(/user-function\.js:(\d+)(?::(\d+))?/)
|
||||
}
|
||||
|
||||
if (match) {
|
||||
const stackLine = Number.parseInt(match[1], 10)
|
||||
const stackColumn = match[2] ? Number.parseInt(match[2], 10) : undefined
|
||||
|
||||
// Adjust line number to account for wrapper code
|
||||
// The user code starts at a specific line in our wrapper
|
||||
const adjustedLine = stackLine - userCodeStartLine + 1
|
||||
|
||||
// Check if this is a syntax error in wrapper code caused by incomplete user code
|
||||
const isWrapperSyntaxError =
|
||||
stackLine > userCodeStartLine &&
|
||||
error.name === 'SyntaxError' &&
|
||||
@@ -168,7 +156,6 @@ function extractEnhancedError(
|
||||
error.message.includes('Unexpected end of input'))
|
||||
|
||||
if (isWrapperSyntaxError && userCode) {
|
||||
// Map wrapper syntax errors to the last line of user code
|
||||
const codeLines = userCode.split('\n')
|
||||
const lastUserLine = codeLines.length
|
||||
enhanced.line = lastUserLine
|
||||
@@ -181,7 +168,6 @@ function extractEnhancedError(
|
||||
enhanced.line = adjustedLine
|
||||
enhanced.column = stackColumn
|
||||
|
||||
// Extract the actual line content from user code
|
||||
if (userCode) {
|
||||
const codeLines = userCode.split('\n')
|
||||
if (adjustedLine <= codeLines.length) {
|
||||
@@ -192,7 +178,6 @@ function extractEnhancedError(
|
||||
}
|
||||
|
||||
if (stackLine <= userCodeStartLine) {
|
||||
// Error is in wrapper code itself
|
||||
enhanced.line = stackLine
|
||||
enhanced.column = stackColumn
|
||||
break
|
||||
@@ -200,7 +185,6 @@ function extractEnhancedError(
|
||||
}
|
||||
}
|
||||
|
||||
// Clean up stack trace to show user-relevant information
|
||||
const cleanedStackLines: string[] = stackLines
|
||||
.filter(
|
||||
(line: string) =>
|
||||
@@ -214,9 +198,6 @@ function extractEnhancedError(
|
||||
}
|
||||
}
|
||||
|
||||
// Keep original message without adding error type prefix
|
||||
// The error type will be added later in createUserFriendlyErrorMessage
|
||||
|
||||
return enhanced
|
||||
}
|
||||
|
||||
@@ -231,7 +212,6 @@ function formatE2BError(
|
||||
userCode: string,
|
||||
prologueLineCount: number
|
||||
): { formattedError: string; cleanedOutput: string } {
|
||||
// Calculate line offset based on language and prologue
|
||||
const wrapperLines =
|
||||
language === CodeLanguage.Python ? E2B_PYTHON_WRAPPER_LINES : E2B_JS_WRAPPER_LINES
|
||||
const totalOffset = prologueLineCount + wrapperLines
|
||||
@@ -241,27 +221,20 @@ function formatE2BError(
|
||||
let cleanErrorMsg = ''
|
||||
|
||||
if (language === CodeLanguage.Python) {
|
||||
// Python error format: "Cell In[X], line Y" followed by error details
|
||||
// Extract line number from the Cell reference
|
||||
const cellMatch = errorOutput.match(/Cell In\[\d+\], line (\d+)/)
|
||||
if (cellMatch) {
|
||||
const originalLine = Number.parseInt(cellMatch[1], 10)
|
||||
userLine = originalLine - totalOffset
|
||||
}
|
||||
|
||||
// Extract clean error message from the error string
|
||||
// Remove file references like "(detected at line X) (file.py, line Y)"
|
||||
cleanErrorMsg = errorMessage
|
||||
.replace(/\s*\(detected at line \d+\)/g, '')
|
||||
.replace(/\s*\([^)]+\.py, line \d+\)/g, '')
|
||||
.trim()
|
||||
} else if (language === CodeLanguage.JavaScript) {
|
||||
// JavaScript error format from E2B: "SyntaxError: /path/file.ts: Message. (line:col)\n\n 9 | ..."
|
||||
// First, extract the error type and message from the first line
|
||||
const firstLineEnd = errorMessage.indexOf('\n')
|
||||
const firstLine = firstLineEnd > 0 ? errorMessage.substring(0, firstLineEnd) : errorMessage
|
||||
|
||||
// Parse: "SyntaxError: /home/user/index.ts: Missing semicolon. (11:9)"
|
||||
const jsErrorMatch = firstLine.match(/^(\w+Error):\s*[^:]+:\s*([^(]+)\.\s*\((\d+):(\d+)\)/)
|
||||
if (jsErrorMatch) {
|
||||
cleanErrorType = jsErrorMatch[1]
|
||||
@@ -269,13 +242,11 @@ function formatE2BError(
|
||||
const originalLine = Number.parseInt(jsErrorMatch[3], 10)
|
||||
userLine = originalLine - totalOffset
|
||||
} else {
|
||||
// Fallback: look for line number in the arrow pointer line (> 11 |)
|
||||
const arrowMatch = errorMessage.match(/^>\s*(\d+)\s*\|/m)
|
||||
if (arrowMatch) {
|
||||
const originalLine = Number.parseInt(arrowMatch[1], 10)
|
||||
userLine = originalLine - totalOffset
|
||||
}
|
||||
// Try to extract error type and message
|
||||
const errorMatch = firstLine.match(/^(\w+Error):\s*(.+)/)
|
||||
if (errorMatch) {
|
||||
cleanErrorType = errorMatch[1]
|
||||
@@ -289,13 +260,11 @@ function formatE2BError(
|
||||
}
|
||||
}
|
||||
|
||||
// Build the final clean error message
|
||||
const finalErrorMsg =
|
||||
cleanErrorType && cleanErrorMsg
|
||||
? `${cleanErrorType}: ${cleanErrorMsg}`
|
||||
: cleanErrorMsg || errorMessage
|
||||
|
||||
// Format with line number if available
|
||||
let formattedError = finalErrorMsg
|
||||
if (userLine && userLine > 0) {
|
||||
const codeLines = userCode.split('\n')
|
||||
@@ -311,7 +280,6 @@ function formatE2BError(
|
||||
}
|
||||
}
|
||||
|
||||
// For stdout, just return the clean error message without the full traceback
|
||||
const cleanedOutput = finalErrorMsg
|
||||
|
||||
return { formattedError, cleanedOutput }
|
||||
@@ -327,7 +295,6 @@ function createUserFriendlyErrorMessage(
|
||||
): string {
|
||||
let errorMessage = enhanced.message
|
||||
|
||||
// Add line information if available
|
||||
if (enhanced.line !== undefined) {
|
||||
let lineInfo = `Line ${enhanced.line}`
|
||||
|
||||
@@ -338,18 +305,14 @@ function createUserFriendlyErrorMessage(
|
||||
|
||||
errorMessage = `${lineInfo} - ${errorMessage}`
|
||||
} else {
|
||||
// If no line number, try to extract it from stack trace for display
|
||||
if (enhanced.stack) {
|
||||
const stackMatch = enhanced.stack.match(/user-function\.js:(\d+)(?::(\d+))?/)
|
||||
if (stackMatch) {
|
||||
const line = Number.parseInt(stackMatch[1], 10)
|
||||
let lineInfo = `Line ${line}`
|
||||
|
||||
// Try to get line content if we have userCode
|
||||
if (userCode) {
|
||||
const codeLines = userCode.split('\n')
|
||||
// Note: stackMatch gives us VM line number, need to adjust
|
||||
// This is a fallback case, so we might not have perfect line mapping
|
||||
if (line <= codeLines.length) {
|
||||
const lineContent = codeLines[line - 1]?.trim()
|
||||
if (lineContent) {
|
||||
@@ -363,7 +326,6 @@ function createUserFriendlyErrorMessage(
|
||||
}
|
||||
}
|
||||
|
||||
// Add error type prefix with consistent naming
|
||||
if (enhanced.name !== 'Error') {
|
||||
const errorTypePrefix =
|
||||
enhanced.name === 'SyntaxError'
|
||||
@@ -374,7 +336,6 @@ function createUserFriendlyErrorMessage(
|
||||
? 'Reference Error'
|
||||
: enhanced.name
|
||||
|
||||
// Only add prefix if not already present
|
||||
if (!errorMessage.toLowerCase().includes(errorTypePrefix.toLowerCase())) {
|
||||
errorMessage = `${errorTypePrefix}: ${errorMessage}`
|
||||
}
|
||||
@@ -383,9 +344,6 @@ function createUserFriendlyErrorMessage(
|
||||
return errorMessage
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves workflow variables with <variable.name> syntax
|
||||
*/
|
||||
function resolveWorkflowVariables(
|
||||
code: string,
|
||||
workflowVariables: Record<string, any>,
|
||||
@@ -405,39 +363,35 @@ function resolveWorkflowVariables(
|
||||
while ((match = regex.exec(code)) !== null) {
|
||||
const variableName = match[1].trim()
|
||||
|
||||
// Find the variable by name (workflowVariables is indexed by ID, values are variable objects)
|
||||
const foundVariable = Object.entries(workflowVariables).find(
|
||||
([_, variable]) => normalizeName(variable.name || '') === variableName
|
||||
)
|
||||
|
||||
let variableValue: unknown = ''
|
||||
if (foundVariable) {
|
||||
const variable = foundVariable[1]
|
||||
variableValue = variable.value
|
||||
if (!foundVariable) {
|
||||
const availableVars = Object.values(workflowVariables)
|
||||
.map((v) => v.name)
|
||||
.filter(Boolean)
|
||||
throw new Error(
|
||||
`Variable "${variableName}" doesn't exist.` +
|
||||
(availableVars.length > 0 ? ` Available: ${availableVars.join(', ')}` : '')
|
||||
)
|
||||
}
|
||||
|
||||
if (variable.value !== undefined && variable.value !== null) {
|
||||
const variable = foundVariable[1]
|
||||
let variableValue: unknown = variable.value
|
||||
|
||||
if (variable.value !== undefined && variable.value !== null) {
|
||||
const type = variable.type === 'string' ? 'plain' : variable.type
|
||||
|
||||
if (type === 'number') {
|
||||
variableValue = Number(variableValue)
|
||||
} else if (type === 'boolean') {
|
||||
variableValue = variableValue === 'true' || variableValue === true
|
||||
} else if (type === 'json' && typeof variableValue === 'string') {
|
||||
try {
|
||||
// Handle 'string' type the same as 'plain' for backward compatibility
|
||||
const type = variable.type === 'string' ? 'plain' : variable.type
|
||||
|
||||
// For plain text, use exactly what's entered without modifications
|
||||
if (type === 'plain' && typeof variableValue === 'string') {
|
||||
// Use as-is for plain text
|
||||
} else if (type === 'number') {
|
||||
variableValue = Number(variableValue)
|
||||
} else if (type === 'boolean') {
|
||||
variableValue = variableValue === 'true' || variableValue === true
|
||||
} else if (type === 'json') {
|
||||
try {
|
||||
variableValue =
|
||||
typeof variableValue === 'string' ? JSON.parse(variableValue) : variableValue
|
||||
} catch {
|
||||
// Keep original value if JSON parsing fails
|
||||
}
|
||||
}
|
||||
variableValue = JSON.parse(variableValue)
|
||||
} catch {
|
||||
// Fallback to original value on error
|
||||
variableValue = variable.value
|
||||
// Keep as-is
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -450,11 +404,9 @@ function resolveWorkflowVariables(
|
||||
})
|
||||
}
|
||||
|
||||
// Process replacements in reverse order to maintain correct indices
|
||||
for (let i = replacements.length - 1; i >= 0; i--) {
|
||||
const { match: matchStr, index, variableName, variableValue } = replacements[i]
|
||||
|
||||
// Use variable reference approach
|
||||
const safeVarName = `__variable_${variableName.replace(/[^a-zA-Z0-9_]/g, '_')}`
|
||||
contextVariables[safeVarName] = variableValue
|
||||
resolvedCode =
|
||||
@@ -464,9 +416,6 @@ function resolveWorkflowVariables(
|
||||
return resolvedCode
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves environment variables with {{var_name}} syntax
|
||||
*/
|
||||
function resolveEnvironmentVariables(
|
||||
code: string,
|
||||
params: Record<string, any>,
|
||||
@@ -482,32 +431,28 @@ function resolveEnvironmentVariables(
|
||||
|
||||
const resolverVars: Record<string, string> = {}
|
||||
Object.entries(params).forEach(([key, value]) => {
|
||||
if (value) {
|
||||
if (value !== undefined && value !== null) {
|
||||
resolverVars[key] = String(value)
|
||||
}
|
||||
})
|
||||
Object.entries(envVars).forEach(([key, value]) => {
|
||||
if (value) {
|
||||
if (value !== undefined && value !== null) {
|
||||
resolverVars[key] = value
|
||||
}
|
||||
})
|
||||
|
||||
while ((match = regex.exec(code)) !== null) {
|
||||
const varName = match[1].trim()
|
||||
const resolved = resolveEnvVarReferences(match[0], resolverVars, {
|
||||
allowEmbedded: true,
|
||||
resolveExactMatch: true,
|
||||
trimKeys: true,
|
||||
onMissing: 'empty',
|
||||
deep: false,
|
||||
})
|
||||
const varValue =
|
||||
typeof resolved === 'string' ? resolved : resolved == null ? '' : String(resolved)
|
||||
|
||||
if (!(varName in resolverVars)) {
|
||||
continue
|
||||
}
|
||||
|
||||
replacements.push({
|
||||
match: match[0],
|
||||
index: match.index,
|
||||
varName,
|
||||
varValue: String(varValue),
|
||||
varValue: resolverVars[varName],
|
||||
})
|
||||
}
|
||||
|
||||
@@ -523,64 +468,59 @@ function resolveEnvironmentVariables(
|
||||
return resolvedCode
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves tags with <tag_name> syntax (including nested paths like <block.response.data>)
|
||||
*/
|
||||
function resolveTagVariables(
|
||||
code: string,
|
||||
params: Record<string, any>,
|
||||
blockData: Record<string, any>,
|
||||
blockData: Record<string, unknown>,
|
||||
blockNameMapping: Record<string, string>,
|
||||
contextVariables: Record<string, any>
|
||||
blockOutputSchemas: Record<string, OutputSchema>,
|
||||
contextVariables: Record<string, unknown>,
|
||||
language = 'javascript'
|
||||
): string {
|
||||
let resolvedCode = code
|
||||
const undefinedLiteral = language === 'python' ? 'None' : 'undefined'
|
||||
|
||||
const tagPattern = new RegExp(
|
||||
`${REFERENCE.START}([a-zA-Z_][a-zA-Z0-9_${REFERENCE.PATH_DELIMITER}]*[a-zA-Z0-9_])${REFERENCE.END}`,
|
||||
`${REFERENCE.START}([a-zA-Z_](?:[a-zA-Z0-9_${REFERENCE.PATH_DELIMITER}]*[a-zA-Z0-9_])?)${REFERENCE.END}`,
|
||||
'g'
|
||||
)
|
||||
const tagMatches = resolvedCode.match(tagPattern) || []
|
||||
|
||||
for (const match of tagMatches) {
|
||||
const tagName = match.slice(REFERENCE.START.length, -REFERENCE.END.length).trim()
|
||||
const pathParts = tagName.split(REFERENCE.PATH_DELIMITER)
|
||||
const blockName = pathParts[0]
|
||||
const fieldPath = pathParts.slice(1)
|
||||
|
||||
// Handle nested paths like "getrecord.response.data" or "function1.response.result"
|
||||
// First try params, then blockData directly, then try with block name mapping
|
||||
let tagValue = getNestedValue(params, tagName) || getNestedValue(blockData, tagName) || ''
|
||||
const result = resolveBlockReference(blockName, fieldPath, {
|
||||
blockNameMapping,
|
||||
blockData,
|
||||
blockOutputSchemas,
|
||||
})
|
||||
|
||||
// If not found and the path starts with a block name, try mapping the block name to ID
|
||||
if (!tagValue && tagName.includes(REFERENCE.PATH_DELIMITER)) {
|
||||
const pathParts = tagName.split(REFERENCE.PATH_DELIMITER)
|
||||
const normalizedBlockName = pathParts[0] // This should already be normalized like "function1"
|
||||
if (!result) {
|
||||
continue
|
||||
}
|
||||
|
||||
// Direct lookup using normalized block name
|
||||
const blockId = blockNameMapping[normalizedBlockName] ?? null
|
||||
let tagValue = result.value
|
||||
|
||||
if (blockId) {
|
||||
const remainingPath = pathParts.slice(1).join('.')
|
||||
const fullPath = `${blockId}.${remainingPath}`
|
||||
tagValue = getNestedValue(blockData, fullPath) || ''
|
||||
if (tagValue === undefined) {
|
||||
resolvedCode = resolvedCode.replace(new RegExp(escapeRegExp(match), 'g'), undefinedLiteral)
|
||||
continue
|
||||
}
|
||||
|
||||
if (typeof tagValue === 'string') {
|
||||
const trimmed = tagValue.trimStart()
|
||||
if (trimmed.startsWith('{') || trimmed.startsWith('[')) {
|
||||
try {
|
||||
tagValue = JSON.parse(tagValue)
|
||||
} catch {
|
||||
// Keep as string if not valid JSON
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If the value is a stringified JSON, parse it back to object
|
||||
if (
|
||||
typeof tagValue === 'string' &&
|
||||
tagValue.length > 100 &&
|
||||
(tagValue.startsWith('{') || tagValue.startsWith('['))
|
||||
) {
|
||||
try {
|
||||
tagValue = JSON.parse(tagValue)
|
||||
} catch (e) {
|
||||
// Keep as string if parsing fails
|
||||
}
|
||||
}
|
||||
|
||||
// Instead of injecting large JSON directly, create a variable reference
|
||||
const safeVarName = `__tag_${tagName.replace(/[^a-zA-Z0-9_]/g, '_')}`
|
||||
const safeVarName = `__tag_${tagName.replace(/_/g, '_1').replace(/\./g, '_0')}`
|
||||
contextVariables[safeVarName] = tagValue
|
||||
|
||||
// Replace the template with a variable reference
|
||||
resolvedCode = resolvedCode.replace(new RegExp(escapeRegExp(match), 'g'), safeVarName)
|
||||
}
|
||||
|
||||
@@ -596,44 +536,31 @@ function resolveTagVariables(
|
||||
*/
|
||||
function resolveCodeVariables(
|
||||
code: string,
|
||||
params: Record<string, any>,
|
||||
params: Record<string, unknown>,
|
||||
envVars: Record<string, string> = {},
|
||||
blockData: Record<string, any> = {},
|
||||
blockData: Record<string, unknown> = {},
|
||||
blockNameMapping: Record<string, string> = {},
|
||||
workflowVariables: Record<string, any> = {}
|
||||
): { resolvedCode: string; contextVariables: Record<string, any> } {
|
||||
blockOutputSchemas: Record<string, OutputSchema> = {},
|
||||
workflowVariables: Record<string, unknown> = {},
|
||||
language = 'javascript'
|
||||
): { resolvedCode: string; contextVariables: Record<string, unknown> } {
|
||||
let resolvedCode = code
|
||||
const contextVariables: Record<string, any> = {}
|
||||
const contextVariables: Record<string, unknown> = {}
|
||||
|
||||
// Resolve workflow variables with <variable.name> syntax first
|
||||
resolvedCode = resolveWorkflowVariables(resolvedCode, workflowVariables, contextVariables)
|
||||
|
||||
// Resolve environment variables with {{var_name}} syntax
|
||||
resolvedCode = resolveEnvironmentVariables(resolvedCode, params, envVars, contextVariables)
|
||||
|
||||
// Resolve tags with <tag_name> syntax (including nested paths like <block.response.data>)
|
||||
resolvedCode = resolveTagVariables(
|
||||
resolvedCode,
|
||||
params,
|
||||
blockData,
|
||||
blockNameMapping,
|
||||
contextVariables
|
||||
blockOutputSchemas,
|
||||
contextVariables,
|
||||
language
|
||||
)
|
||||
|
||||
return { resolvedCode, contextVariables }
|
||||
}
|
||||
|
||||
/**
|
||||
* Get nested value from object using dot notation path
|
||||
*/
|
||||
function getNestedValue(obj: any, path: string): any {
|
||||
if (!obj || !path) return undefined
|
||||
|
||||
return path.split('.').reduce((current, key) => {
|
||||
return current && typeof current === 'object' ? current[key] : undefined
|
||||
}, obj)
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove one trailing newline from stdout
|
||||
* This handles the common case where print() or console.log() adds a trailing \n
|
||||
@@ -666,12 +593,12 @@ export async function POST(req: NextRequest) {
|
||||
envVars = {},
|
||||
blockData = {},
|
||||
blockNameMapping = {},
|
||||
blockOutputSchemas = {},
|
||||
workflowVariables = {},
|
||||
workflowId,
|
||||
isCustomTool = false,
|
||||
} = body
|
||||
|
||||
// Extract internal parameters that shouldn't be passed to the execution context
|
||||
const executionParams = { ...params }
|
||||
executionParams._context = undefined
|
||||
|
||||
@@ -683,21 +610,21 @@ export async function POST(req: NextRequest) {
|
||||
isCustomTool,
|
||||
})
|
||||
|
||||
// Resolve variables in the code with workflow environment variables
|
||||
const lang = isValidCodeLanguage(language) ? language : DEFAULT_CODE_LANGUAGE
|
||||
|
||||
const codeResolution = resolveCodeVariables(
|
||||
code,
|
||||
executionParams,
|
||||
envVars,
|
||||
blockData,
|
||||
blockNameMapping,
|
||||
workflowVariables
|
||||
blockOutputSchemas,
|
||||
workflowVariables,
|
||||
lang
|
||||
)
|
||||
resolvedCode = codeResolution.resolvedCode
|
||||
const contextVariables = codeResolution.contextVariables
|
||||
|
||||
const lang = isValidCodeLanguage(language) ? language : DEFAULT_CODE_LANGUAGE
|
||||
|
||||
// Extract imports once for JavaScript code (reuse later to avoid double extraction)
|
||||
let jsImports = ''
|
||||
let jsRemainingCode = resolvedCode
|
||||
let hasImports = false
|
||||
@@ -707,31 +634,22 @@ export async function POST(req: NextRequest) {
|
||||
jsImports = extractionResult.imports
|
||||
jsRemainingCode = extractionResult.remainingCode
|
||||
|
||||
// Check for ES6 imports or CommonJS require statements
|
||||
// ES6 imports are extracted by the TypeScript parser
|
||||
// Also check for require() calls which indicate external dependencies
|
||||
const hasRequireStatements = /require\s*\(\s*['"`]/.test(resolvedCode)
|
||||
hasImports = jsImports.trim().length > 0 || hasRequireStatements
|
||||
}
|
||||
|
||||
// Python always requires E2B
|
||||
if (lang === CodeLanguage.Python && !isE2bEnabled) {
|
||||
throw new Error(
|
||||
'Python execution requires E2B to be enabled. Please contact your administrator to enable E2B, or use JavaScript instead.'
|
||||
)
|
||||
}
|
||||
|
||||
// JavaScript with imports requires E2B
|
||||
if (lang === CodeLanguage.JavaScript && hasImports && !isE2bEnabled) {
|
||||
throw new Error(
|
||||
'JavaScript code with import statements requires E2B to be enabled. Please remove the import statements, or contact your administrator to enable E2B.'
|
||||
)
|
||||
}
|
||||
|
||||
// Use E2B if:
|
||||
// - E2B is enabled AND
|
||||
// - Not a custom tool AND
|
||||
// - (Python OR JavaScript with imports)
|
||||
const useE2B =
|
||||
isE2bEnabled &&
|
||||
!isCustomTool &&
|
||||
@@ -744,13 +662,10 @@ export async function POST(req: NextRequest) {
|
||||
language: lang,
|
||||
})
|
||||
let prologue = ''
|
||||
const epilogue = ''
|
||||
|
||||
if (lang === CodeLanguage.JavaScript) {
|
||||
// Track prologue lines for error adjustment
|
||||
let prologueLineCount = 0
|
||||
|
||||
// Reuse the imports we already extracted earlier
|
||||
const imports = jsImports
|
||||
const remainingCode = jsRemainingCode
|
||||
|
||||
@@ -765,7 +680,11 @@ export async function POST(req: NextRequest) {
|
||||
prologue += `const environmentVariables = JSON.parse(${JSON.stringify(JSON.stringify(envVars))});\n`
|
||||
prologueLineCount++
|
||||
for (const [k, v] of Object.entries(contextVariables)) {
|
||||
prologue += `const ${k} = JSON.parse(${JSON.stringify(JSON.stringify(v))});\n`
|
||||
if (v === undefined) {
|
||||
prologue += `const ${k} = undefined;\n`
|
||||
} else {
|
||||
prologue += `const ${k} = JSON.parse(${JSON.stringify(JSON.stringify(v))});\n`
|
||||
}
|
||||
prologueLineCount++
|
||||
}
|
||||
|
||||
@@ -782,7 +701,7 @@ export async function POST(req: NextRequest) {
|
||||
' }',
|
||||
'})();',
|
||||
].join('\n')
|
||||
const codeForE2B = importSection + prologue + wrapped + epilogue
|
||||
const codeForE2B = importSection + prologue + wrapped
|
||||
|
||||
const execStart = Date.now()
|
||||
const {
|
||||
@@ -804,7 +723,6 @@ export async function POST(req: NextRequest) {
|
||||
error: e2bError,
|
||||
})
|
||||
|
||||
// If there was an execution error, format it properly
|
||||
if (e2bError) {
|
||||
const { formattedError, cleanedOutput } = formatE2BError(
|
||||
e2bError,
|
||||
@@ -828,7 +746,7 @@ export async function POST(req: NextRequest) {
|
||||
output: { result: e2bResult ?? null, stdout: cleanStdout(stdout), executionTime },
|
||||
})
|
||||
}
|
||||
// Track prologue lines for error adjustment
|
||||
|
||||
let prologueLineCount = 0
|
||||
prologue += 'import json\n'
|
||||
prologueLineCount++
|
||||
@@ -837,7 +755,11 @@ export async function POST(req: NextRequest) {
|
||||
prologue += `environmentVariables = json.loads(${JSON.stringify(JSON.stringify(envVars))})\n`
|
||||
prologueLineCount++
|
||||
for (const [k, v] of Object.entries(contextVariables)) {
|
||||
prologue += `${k} = json.loads(${JSON.stringify(JSON.stringify(v))})\n`
|
||||
if (v === undefined) {
|
||||
prologue += `${k} = None\n`
|
||||
} else {
|
||||
prologue += `${k} = json.loads(${JSON.stringify(JSON.stringify(v))})\n`
|
||||
}
|
||||
prologueLineCount++
|
||||
}
|
||||
const wrapped = [
|
||||
@@ -846,7 +768,7 @@ export async function POST(req: NextRequest) {
|
||||
'__sim_result__ = __sim_main__()',
|
||||
"print('__SIM_RESULT__=' + json.dumps(__sim_result__))",
|
||||
].join('\n')
|
||||
const codeForE2B = prologue + wrapped + epilogue
|
||||
const codeForE2B = prologue + wrapped
|
||||
|
||||
const execStart = Date.now()
|
||||
const {
|
||||
@@ -868,7 +790,6 @@ export async function POST(req: NextRequest) {
|
||||
error: e2bError,
|
||||
})
|
||||
|
||||
// If there was an execution error, format it properly
|
||||
if (e2bError) {
|
||||
const { formattedError, cleanedOutput } = formatE2BError(
|
||||
e2bError,
|
||||
@@ -897,7 +818,6 @@ export async function POST(req: NextRequest) {
|
||||
|
||||
const wrapperLines = ['(async () => {', ' try {']
|
||||
if (isCustomTool) {
|
||||
wrapperLines.push(' // For custom tools, make parameters directly accessible')
|
||||
Object.keys(executionParams).forEach((key) => {
|
||||
wrapperLines.push(` const ${key} = params.${key};`)
|
||||
})
|
||||
@@ -931,12 +851,10 @@ export async function POST(req: NextRequest) {
|
||||
})
|
||||
|
||||
const ivmError = isolatedResult.error
|
||||
// Adjust line number for prepended param destructuring in custom tools
|
||||
let adjustedLine = ivmError.line
|
||||
let adjustedLineContent = ivmError.lineContent
|
||||
if (prependedLineCount > 0 && ivmError.line !== undefined) {
|
||||
adjustedLine = Math.max(1, ivmError.line - prependedLineCount)
|
||||
// Get line content from original user code, not the prepended code
|
||||
const codeLines = resolvedCode.split('\n')
|
||||
if (adjustedLine <= codeLines.length) {
|
||||
adjustedLineContent = codeLines[adjustedLine - 1]?.trim()
|
||||
|
||||
@@ -157,7 +157,7 @@ describe('Knowledge Base Documents API Route', () => {
|
||||
expect(vi.mocked(getDocuments)).toHaveBeenCalledWith(
|
||||
'kb-123',
|
||||
{
|
||||
includeDisabled: false,
|
||||
enabledFilter: undefined,
|
||||
search: undefined,
|
||||
limit: 50,
|
||||
offset: 0,
|
||||
@@ -166,7 +166,7 @@ describe('Knowledge Base Documents API Route', () => {
|
||||
)
|
||||
})
|
||||
|
||||
it('should filter disabled documents by default', async () => {
|
||||
it('should return documents with default filter', async () => {
|
||||
const { checkKnowledgeBaseAccess } = await import('@/app/api/knowledge/utils')
|
||||
const { getDocuments } = await import('@/lib/knowledge/documents/service')
|
||||
|
||||
@@ -194,7 +194,7 @@ describe('Knowledge Base Documents API Route', () => {
|
||||
expect(vi.mocked(getDocuments)).toHaveBeenCalledWith(
|
||||
'kb-123',
|
||||
{
|
||||
includeDisabled: false,
|
||||
enabledFilter: undefined,
|
||||
search: undefined,
|
||||
limit: 50,
|
||||
offset: 0,
|
||||
@@ -203,7 +203,7 @@ describe('Knowledge Base Documents API Route', () => {
|
||||
)
|
||||
})
|
||||
|
||||
it('should include disabled documents when requested', async () => {
|
||||
it('should filter documents by enabled status when requested', async () => {
|
||||
const { checkKnowledgeBaseAccess } = await import('@/app/api/knowledge/utils')
|
||||
const { getDocuments } = await import('@/lib/knowledge/documents/service')
|
||||
|
||||
@@ -223,7 +223,7 @@ describe('Knowledge Base Documents API Route', () => {
|
||||
},
|
||||
})
|
||||
|
||||
const url = 'http://localhost:3000/api/knowledge/kb-123/documents?includeDisabled=true'
|
||||
const url = 'http://localhost:3000/api/knowledge/kb-123/documents?enabledFilter=disabled'
|
||||
const req = new Request(url, { method: 'GET' }) as any
|
||||
|
||||
const { GET } = await import('@/app/api/knowledge/[id]/documents/route')
|
||||
@@ -233,7 +233,7 @@ describe('Knowledge Base Documents API Route', () => {
|
||||
expect(vi.mocked(getDocuments)).toHaveBeenCalledWith(
|
||||
'kb-123',
|
||||
{
|
||||
includeDisabled: true,
|
||||
enabledFilter: 'disabled',
|
||||
search: undefined,
|
||||
limit: 50,
|
||||
offset: 0,
|
||||
@@ -361,8 +361,7 @@ describe('Knowledge Base Documents API Route', () => {
|
||||
expect(vi.mocked(createSingleDocument)).toHaveBeenCalledWith(
|
||||
validDocumentData,
|
||||
'kb-123',
|
||||
expect.any(String),
|
||||
'user-123'
|
||||
expect.any(String)
|
||||
)
|
||||
})
|
||||
|
||||
@@ -470,8 +469,7 @@ describe('Knowledge Base Documents API Route', () => {
|
||||
expect(vi.mocked(createDocumentRecords)).toHaveBeenCalledWith(
|
||||
validBulkData.documents,
|
||||
'kb-123',
|
||||
expect.any(String),
|
||||
'user-123'
|
||||
expect.any(String)
|
||||
)
|
||||
expect(vi.mocked(processDocumentsWithQueue)).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
@@ -5,6 +5,7 @@ import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import {
|
||||
bulkDocumentOperation,
|
||||
bulkDocumentOperationByFilter,
|
||||
createDocumentRecords,
|
||||
createSingleDocument,
|
||||
getDocuments,
|
||||
@@ -57,13 +58,20 @@ const BulkCreateDocumentsSchema = z.object({
|
||||
bulk: z.literal(true),
|
||||
})
|
||||
|
||||
const BulkUpdateDocumentsSchema = z.object({
|
||||
operation: z.enum(['enable', 'disable', 'delete']),
|
||||
documentIds: z
|
||||
.array(z.string())
|
||||
.min(1, 'At least one document ID is required')
|
||||
.max(100, 'Cannot operate on more than 100 documents at once'),
|
||||
})
|
||||
const BulkUpdateDocumentsSchema = z
|
||||
.object({
|
||||
operation: z.enum(['enable', 'disable', 'delete']),
|
||||
documentIds: z
|
||||
.array(z.string())
|
||||
.min(1, 'At least one document ID is required')
|
||||
.max(100, 'Cannot operate on more than 100 documents at once')
|
||||
.optional(),
|
||||
selectAll: z.boolean().optional(),
|
||||
enabledFilter: z.enum(['all', 'enabled', 'disabled']).optional(),
|
||||
})
|
||||
.refine((data) => data.selectAll || (data.documentIds && data.documentIds.length > 0), {
|
||||
message: 'Either selectAll must be true or documentIds must be provided',
|
||||
})
|
||||
|
||||
export async function GET(req: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
@@ -90,14 +98,17 @@ export async function GET(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
}
|
||||
|
||||
const url = new URL(req.url)
|
||||
const includeDisabled = url.searchParams.get('includeDisabled') === 'true'
|
||||
const enabledFilter = url.searchParams.get('enabledFilter') as
|
||||
| 'all'
|
||||
| 'enabled'
|
||||
| 'disabled'
|
||||
| null
|
||||
const search = url.searchParams.get('search') || undefined
|
||||
const limit = Number.parseInt(url.searchParams.get('limit') || '50')
|
||||
const offset = Number.parseInt(url.searchParams.get('offset') || '0')
|
||||
const sortByParam = url.searchParams.get('sortBy')
|
||||
const sortOrderParam = url.searchParams.get('sortOrder')
|
||||
|
||||
// Validate sort parameters
|
||||
const validSortFields: DocumentSortField[] = [
|
||||
'filename',
|
||||
'fileSize',
|
||||
@@ -105,6 +116,7 @@ export async function GET(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
'chunkCount',
|
||||
'uploadedAt',
|
||||
'processingStatus',
|
||||
'enabled',
|
||||
]
|
||||
const validSortOrders: SortOrder[] = ['asc', 'desc']
|
||||
|
||||
@@ -120,7 +132,7 @@ export async function GET(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
const result = await getDocuments(
|
||||
knowledgeBaseId,
|
||||
{
|
||||
includeDisabled,
|
||||
enabledFilter: enabledFilter || undefined,
|
||||
search,
|
||||
limit,
|
||||
offset,
|
||||
@@ -190,8 +202,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
const createdDocuments = await createDocumentRecords(
|
||||
validatedData.documents,
|
||||
knowledgeBaseId,
|
||||
requestId,
|
||||
userId
|
||||
requestId
|
||||
)
|
||||
|
||||
logger.info(
|
||||
@@ -250,16 +261,10 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
throw validationError
|
||||
}
|
||||
} else {
|
||||
// Handle single document creation
|
||||
try {
|
||||
const validatedData = CreateDocumentSchema.parse(body)
|
||||
|
||||
const newDocument = await createSingleDocument(
|
||||
validatedData,
|
||||
knowledgeBaseId,
|
||||
requestId,
|
||||
userId
|
||||
)
|
||||
const newDocument = await createSingleDocument(validatedData, knowledgeBaseId, requestId)
|
||||
|
||||
try {
|
||||
const { PlatformEvents } = await import('@/lib/core/telemetry')
|
||||
@@ -294,7 +299,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error creating document`, error)
|
||||
|
||||
// Check if it's a storage limit error
|
||||
const errorMessage = error instanceof Error ? error.message : 'Failed to create document'
|
||||
const isStorageLimitError =
|
||||
errorMessage.includes('Storage limit exceeded') || errorMessage.includes('storage limit')
|
||||
@@ -331,16 +335,22 @@ export async function PATCH(req: NextRequest, { params }: { params: Promise<{ id
|
||||
|
||||
try {
|
||||
const validatedData = BulkUpdateDocumentsSchema.parse(body)
|
||||
const { operation, documentIds } = validatedData
|
||||
const { operation, documentIds, selectAll, enabledFilter } = validatedData
|
||||
|
||||
try {
|
||||
const result = await bulkDocumentOperation(
|
||||
knowledgeBaseId,
|
||||
operation,
|
||||
documentIds,
|
||||
requestId,
|
||||
session.user.id
|
||||
)
|
||||
let result
|
||||
if (selectAll) {
|
||||
result = await bulkDocumentOperationByFilter(
|
||||
knowledgeBaseId,
|
||||
operation,
|
||||
enabledFilter,
|
||||
requestId
|
||||
)
|
||||
} else if (documentIds && documentIds.length > 0) {
|
||||
result = await bulkDocumentOperation(knowledgeBaseId, operation, documentIds, requestId)
|
||||
} else {
|
||||
return NextResponse.json({ error: 'No documents specified' }, { status: 400 })
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
|
||||
@@ -1,395 +0,0 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateInternalToken } from '@/lib/auth/internal'
|
||||
import { isDev } from '@/lib/core/config/feature-flags'
|
||||
import { createPinnedUrl, validateUrlWithDNS } from '@/lib/core/security/input-validation'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { executeTool } from '@/tools'
|
||||
import { getTool, validateRequiredParametersAfterMerge } from '@/tools/utils'
|
||||
|
||||
const logger = createLogger('ProxyAPI')
|
||||
|
||||
const proxyPostSchema = z.object({
|
||||
toolId: z.string().min(1, 'toolId is required'),
|
||||
params: z.record(z.any()).optional().default({}),
|
||||
executionContext: z
|
||||
.object({
|
||||
workflowId: z.string().optional(),
|
||||
workspaceId: z.string().optional(),
|
||||
executionId: z.string().optional(),
|
||||
userId: z.string().optional(),
|
||||
})
|
||||
.optional(),
|
||||
})
|
||||
|
||||
/**
|
||||
* Creates a minimal set of default headers for proxy requests
|
||||
* @returns Record of HTTP headers
|
||||
*/
|
||||
const getProxyHeaders = (): Record<string, string> => {
|
||||
return {
|
||||
'User-Agent':
|
||||
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/135.0.0.0 Safari/537.36',
|
||||
Accept: '*/*',
|
||||
'Accept-Encoding': 'gzip, deflate, br',
|
||||
'Cache-Control': 'no-cache',
|
||||
Connection: 'keep-alive',
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats a response with CORS headers
|
||||
* @param responseData Response data object
|
||||
* @param status HTTP status code
|
||||
* @returns NextResponse with CORS headers
|
||||
*/
|
||||
const formatResponse = (responseData: any, status = 200) => {
|
||||
return NextResponse.json(responseData, {
|
||||
status,
|
||||
headers: {
|
||||
'Access-Control-Allow-Origin': '*',
|
||||
'Access-Control-Allow-Methods': 'GET, POST, PUT, DELETE, OPTIONS',
|
||||
'Access-Control-Allow-Headers': 'Content-Type, Authorization',
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an error response with consistent formatting
|
||||
* @param error Error object or message
|
||||
* @param status HTTP status code
|
||||
* @param additionalData Additional data to include in the response
|
||||
* @returns Formatted error response
|
||||
*/
|
||||
const createErrorResponse = (error: any, status = 500, additionalData = {}) => {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error)
|
||||
const errorStack = error instanceof Error ? error.stack : undefined
|
||||
|
||||
logger.error('Creating error response', {
|
||||
errorMessage,
|
||||
status,
|
||||
stack: isDev ? errorStack : undefined,
|
||||
})
|
||||
|
||||
return formatResponse(
|
||||
{
|
||||
success: false,
|
||||
error: errorMessage,
|
||||
stack: isDev ? errorStack : undefined,
|
||||
...additionalData,
|
||||
},
|
||||
status
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* GET handler for direct external URL proxying
|
||||
* This allows for GET requests to external APIs
|
||||
*/
|
||||
export async function GET(request: Request) {
|
||||
const url = new URL(request.url)
|
||||
const targetUrl = url.searchParams.get('url')
|
||||
const requestId = generateRequestId()
|
||||
|
||||
// Vault download proxy: /api/proxy?vaultDownload=1&bucket=...&object=...&credentialId=...
|
||||
const vaultDownload = url.searchParams.get('vaultDownload')
|
||||
if (vaultDownload === '1') {
|
||||
try {
|
||||
const bucket = url.searchParams.get('bucket')
|
||||
const objectParam = url.searchParams.get('object')
|
||||
const credentialId = url.searchParams.get('credentialId')
|
||||
|
||||
if (!bucket || !objectParam || !credentialId) {
|
||||
return createErrorResponse('Missing bucket, object, or credentialId', 400)
|
||||
}
|
||||
|
||||
// Fetch access token using existing token API
|
||||
const baseUrl = new URL(getBaseUrl())
|
||||
const tokenUrl = new URL('/api/auth/oauth/token', baseUrl)
|
||||
|
||||
// Build headers: forward session cookies if present; include internal auth for server-side
|
||||
const tokenHeaders: Record<string, string> = { 'Content-Type': 'application/json' }
|
||||
const incomingCookie = request.headers.get('cookie')
|
||||
if (incomingCookie) tokenHeaders.Cookie = incomingCookie
|
||||
try {
|
||||
const internalToken = await generateInternalToken()
|
||||
tokenHeaders.Authorization = `Bearer ${internalToken}`
|
||||
} catch (_e) {
|
||||
// best-effort internal auth
|
||||
}
|
||||
|
||||
// Optional workflow context for collaboration auth
|
||||
const workflowId = url.searchParams.get('workflowId') || undefined
|
||||
|
||||
const tokenRes = await fetch(tokenUrl.toString(), {
|
||||
method: 'POST',
|
||||
headers: tokenHeaders,
|
||||
body: JSON.stringify({ credentialId, workflowId }),
|
||||
})
|
||||
|
||||
if (!tokenRes.ok) {
|
||||
const err = await tokenRes.text()
|
||||
return createErrorResponse(`Failed to fetch access token: ${err}`, 401)
|
||||
}
|
||||
|
||||
const tokenJson = await tokenRes.json()
|
||||
const accessToken = tokenJson.accessToken
|
||||
if (!accessToken) {
|
||||
return createErrorResponse('No access token available', 401)
|
||||
}
|
||||
|
||||
// Avoid double-encoding: incoming object may already be percent-encoded
|
||||
const objectDecoded = decodeURIComponent(objectParam)
|
||||
const gcsUrl = `https://storage.googleapis.com/storage/v1/b/${encodeURIComponent(
|
||||
bucket
|
||||
)}/o/${encodeURIComponent(objectDecoded)}?alt=media`
|
||||
|
||||
const fileRes = await fetch(gcsUrl, {
|
||||
headers: { Authorization: `Bearer ${accessToken}` },
|
||||
})
|
||||
|
||||
if (!fileRes.ok) {
|
||||
const errText = await fileRes.text()
|
||||
return createErrorResponse(errText || 'Failed to download file', fileRes.status)
|
||||
}
|
||||
|
||||
const headers = new Headers()
|
||||
fileRes.headers.forEach((v, k) => headers.set(k, v))
|
||||
return new NextResponse(fileRes.body, { status: 200, headers })
|
||||
} catch (error: any) {
|
||||
logger.error(`[${requestId}] Vault download proxy failed`, {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
return createErrorResponse('Vault download failed', 500)
|
||||
}
|
||||
}
|
||||
|
||||
if (!targetUrl) {
|
||||
logger.error(`[${requestId}] Missing 'url' parameter`)
|
||||
return createErrorResponse("Missing 'url' parameter", 400)
|
||||
}
|
||||
|
||||
const urlValidation = await validateUrlWithDNS(targetUrl)
|
||||
if (!urlValidation.isValid) {
|
||||
logger.warn(`[${requestId}] Blocked proxy request`, {
|
||||
url: targetUrl.substring(0, 100),
|
||||
error: urlValidation.error,
|
||||
})
|
||||
return createErrorResponse(urlValidation.error || 'Invalid URL', 403)
|
||||
}
|
||||
|
||||
const method = url.searchParams.get('method') || 'GET'
|
||||
|
||||
const bodyParam = url.searchParams.get('body')
|
||||
let body: string | undefined
|
||||
|
||||
if (bodyParam && ['POST', 'PUT', 'PATCH'].includes(method.toUpperCase())) {
|
||||
try {
|
||||
body = decodeURIComponent(bodyParam)
|
||||
} catch (error) {
|
||||
logger.warn(`[${requestId}] Failed to decode body parameter`, error)
|
||||
}
|
||||
}
|
||||
|
||||
const customHeaders: Record<string, string> = {}
|
||||
|
||||
for (const [key, value] of url.searchParams.entries()) {
|
||||
if (key.startsWith('header.')) {
|
||||
const headerName = key.substring(7)
|
||||
customHeaders[headerName] = value
|
||||
}
|
||||
}
|
||||
|
||||
if (body && !customHeaders['Content-Type']) {
|
||||
customHeaders['Content-Type'] = 'application/json'
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Proxying ${method} request to: ${targetUrl}`)
|
||||
|
||||
try {
|
||||
const pinnedUrl = createPinnedUrl(targetUrl, urlValidation.resolvedIP!)
|
||||
const response = await fetch(pinnedUrl, {
|
||||
method: method,
|
||||
headers: {
|
||||
...getProxyHeaders(),
|
||||
...customHeaders,
|
||||
Host: urlValidation.originalHostname!,
|
||||
},
|
||||
body: body || undefined,
|
||||
})
|
||||
|
||||
const contentType = response.headers.get('content-type') || ''
|
||||
let data
|
||||
|
||||
if (contentType.includes('application/json')) {
|
||||
data = await response.json()
|
||||
} else {
|
||||
data = await response.text()
|
||||
}
|
||||
|
||||
const errorMessage = !response.ok
|
||||
? data && typeof data === 'object' && data.error
|
||||
? `${data.error.message || JSON.stringify(data.error)}`
|
||||
: response.statusText || `HTTP error ${response.status}`
|
||||
: undefined
|
||||
|
||||
if (!response.ok) {
|
||||
logger.error(`[${requestId}] External API error: ${response.status} ${response.statusText}`)
|
||||
}
|
||||
|
||||
return formatResponse({
|
||||
success: response.ok,
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
headers: Object.fromEntries(response.headers.entries()),
|
||||
data,
|
||||
error: errorMessage,
|
||||
})
|
||||
} catch (error: any) {
|
||||
logger.error(`[${requestId}] Proxy GET request failed`, {
|
||||
url: targetUrl,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
stack: error instanceof Error ? error.stack : undefined,
|
||||
})
|
||||
|
||||
return createErrorResponse(error)
|
||||
}
|
||||
}
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
const startTime = new Date()
|
||||
const startTimeISO = startTime.toISOString()
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
if (!authResult.success) {
|
||||
logger.error(`[${requestId}] Authentication failed for proxy:`, authResult.error)
|
||||
return createErrorResponse('Unauthorized', 401)
|
||||
}
|
||||
|
||||
let requestBody
|
||||
try {
|
||||
requestBody = await request.json()
|
||||
} catch (parseError) {
|
||||
logger.error(`[${requestId}] Failed to parse request body`, {
|
||||
error: parseError instanceof Error ? parseError.message : String(parseError),
|
||||
})
|
||||
throw new Error('Invalid JSON in request body')
|
||||
}
|
||||
|
||||
const validationResult = proxyPostSchema.safeParse(requestBody)
|
||||
if (!validationResult.success) {
|
||||
logger.error(`[${requestId}] Request validation failed`, {
|
||||
errors: validationResult.error.errors,
|
||||
})
|
||||
const errorMessages = validationResult.error.errors
|
||||
.map((err) => `${err.path.join('.')}: ${err.message}`)
|
||||
.join(', ')
|
||||
throw new Error(`Validation failed: ${errorMessages}`)
|
||||
}
|
||||
|
||||
const { toolId, params } = validationResult.data
|
||||
|
||||
logger.info(`[${requestId}] Processing tool: ${toolId}`)
|
||||
|
||||
const tool = getTool(toolId)
|
||||
|
||||
if (!tool) {
|
||||
logger.error(`[${requestId}] Tool not found: ${toolId}`)
|
||||
throw new Error(`Tool not found: ${toolId}`)
|
||||
}
|
||||
|
||||
try {
|
||||
validateRequiredParametersAfterMerge(toolId, tool, params)
|
||||
} catch (validationError) {
|
||||
logger.warn(`[${requestId}] Tool validation failed for ${toolId}`, {
|
||||
error: validationError instanceof Error ? validationError.message : String(validationError),
|
||||
})
|
||||
|
||||
const endTime = new Date()
|
||||
const endTimeISO = endTime.toISOString()
|
||||
const duration = endTime.getTime() - startTime.getTime()
|
||||
|
||||
return createErrorResponse(validationError, 400, {
|
||||
startTime: startTimeISO,
|
||||
endTime: endTimeISO,
|
||||
duration,
|
||||
})
|
||||
}
|
||||
|
||||
const hasFileOutputs =
|
||||
tool.outputs &&
|
||||
Object.values(tool.outputs).some(
|
||||
(output) => output.type === 'file' || output.type === 'file[]'
|
||||
)
|
||||
|
||||
const result = await executeTool(
|
||||
toolId,
|
||||
params,
|
||||
true, // skipProxy (we're already in the proxy)
|
||||
!hasFileOutputs, // skipPostProcess (don't skip if tool has file outputs)
|
||||
undefined // execution context is not available in proxy context
|
||||
)
|
||||
|
||||
if (!result.success) {
|
||||
logger.warn(`[${requestId}] Tool execution failed for ${toolId}`, {
|
||||
error: result.error || 'Unknown error',
|
||||
})
|
||||
|
||||
throw new Error(result.error || 'Tool execution failed')
|
||||
}
|
||||
|
||||
const endTime = new Date()
|
||||
const endTimeISO = endTime.toISOString()
|
||||
const duration = endTime.getTime() - startTime.getTime()
|
||||
|
||||
const responseWithTimingData = {
|
||||
...result,
|
||||
startTime: startTimeISO,
|
||||
endTime: endTimeISO,
|
||||
duration,
|
||||
timing: {
|
||||
startTime: startTimeISO,
|
||||
endTime: endTimeISO,
|
||||
duration,
|
||||
},
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Tool executed successfully: ${toolId} (${duration}ms)`)
|
||||
|
||||
return formatResponse(responseWithTimingData)
|
||||
} catch (error: any) {
|
||||
logger.error(`[${requestId}] Proxy request failed`, {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
stack: error instanceof Error ? error.stack : undefined,
|
||||
name: error instanceof Error ? error.name : undefined,
|
||||
})
|
||||
|
||||
const endTime = new Date()
|
||||
const endTimeISO = endTime.toISOString()
|
||||
const duration = endTime.getTime() - startTime.getTime()
|
||||
|
||||
return createErrorResponse(error, 500, {
|
||||
startTime: startTimeISO,
|
||||
endTime: endTimeISO,
|
||||
duration,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export async function OPTIONS() {
|
||||
return new NextResponse(null, {
|
||||
status: 204,
|
||||
headers: {
|
||||
'Access-Control-Allow-Origin': '*',
|
||||
'Access-Control-Allow-Methods': 'GET, POST, PUT, DELETE, OPTIONS',
|
||||
'Access-Control-Allow-Headers': 'Content-Type, Authorization',
|
||||
'Access-Control-Max-Age': '86400',
|
||||
},
|
||||
})
|
||||
}
|
||||
@@ -5,7 +5,11 @@ import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { StorageService } from '@/lib/uploads'
|
||||
import { extractStorageKey, inferContextFromKey } from '@/lib/uploads/utils/file-utils'
|
||||
import {
|
||||
extractStorageKey,
|
||||
inferContextFromKey,
|
||||
isInternalFileUrl,
|
||||
} from '@/lib/uploads/utils/file-utils'
|
||||
import { verifyFileAccess } from '@/app/api/files/authorization'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
@@ -47,13 +51,13 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
logger.info(`[${requestId}] Mistral parse request`, {
|
||||
filePath: validatedData.filePath,
|
||||
isWorkspaceFile: validatedData.filePath.includes('/api/files/serve/'),
|
||||
isWorkspaceFile: isInternalFileUrl(validatedData.filePath),
|
||||
userId,
|
||||
})
|
||||
|
||||
let fileUrl = validatedData.filePath
|
||||
|
||||
if (validatedData.filePath?.includes('/api/files/serve/')) {
|
||||
if (isInternalFileUrl(validatedData.filePath)) {
|
||||
try {
|
||||
const storageKey = extractStorageKey(validatedData.filePath)
|
||||
|
||||
|
||||
@@ -5,7 +5,11 @@ import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { StorageService } from '@/lib/uploads'
|
||||
import { extractStorageKey, inferContextFromKey } from '@/lib/uploads/utils/file-utils'
|
||||
import {
|
||||
extractStorageKey,
|
||||
inferContextFromKey,
|
||||
isInternalFileUrl,
|
||||
} from '@/lib/uploads/utils/file-utils'
|
||||
import { verifyFileAccess } from '@/app/api/files/authorization'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
@@ -48,13 +52,13 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
logger.info(`[${requestId}] Pulse parse request`, {
|
||||
filePath: validatedData.filePath,
|
||||
isWorkspaceFile: validatedData.filePath.includes('/api/files/serve/'),
|
||||
isWorkspaceFile: isInternalFileUrl(validatedData.filePath),
|
||||
userId,
|
||||
})
|
||||
|
||||
let fileUrl = validatedData.filePath
|
||||
|
||||
if (validatedData.filePath?.includes('/api/files/serve/')) {
|
||||
if (isInternalFileUrl(validatedData.filePath)) {
|
||||
try {
|
||||
const storageKey = extractStorageKey(validatedData.filePath)
|
||||
const context = inferContextFromKey(storageKey)
|
||||
|
||||
@@ -5,7 +5,11 @@ import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { StorageService } from '@/lib/uploads'
|
||||
import { extractStorageKey, inferContextFromKey } from '@/lib/uploads/utils/file-utils'
|
||||
import {
|
||||
extractStorageKey,
|
||||
inferContextFromKey,
|
||||
isInternalFileUrl,
|
||||
} from '@/lib/uploads/utils/file-utils'
|
||||
import { verifyFileAccess } from '@/app/api/files/authorization'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
@@ -44,13 +48,13 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
logger.info(`[${requestId}] Reducto parse request`, {
|
||||
filePath: validatedData.filePath,
|
||||
isWorkspaceFile: validatedData.filePath.includes('/api/files/serve/'),
|
||||
isWorkspaceFile: isInternalFileUrl(validatedData.filePath),
|
||||
userId,
|
||||
})
|
||||
|
||||
let fileUrl = validatedData.filePath
|
||||
|
||||
if (validatedData.filePath?.includes('/api/files/serve/')) {
|
||||
if (isInternalFileUrl(validatedData.filePath)) {
|
||||
try {
|
||||
const storageKey = extractStorageKey(validatedData.filePath)
|
||||
const context = inferContextFromKey(storageKey)
|
||||
|
||||
@@ -79,11 +79,13 @@ export async function POST(request: NextRequest) {
|
||||
// Generate public URL for destination (properly encode the destination key)
|
||||
const encodedDestKey = validatedData.destinationKey.split('/').map(encodeURIComponent).join('/')
|
||||
const url = `https://${validatedData.destinationBucket}.s3.${validatedData.region}.amazonaws.com/${encodedDestKey}`
|
||||
const uri = `s3://${validatedData.destinationBucket}/${validatedData.destinationKey}`
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
url,
|
||||
uri,
|
||||
copySourceVersionId: result.CopySourceVersionId,
|
||||
versionId: result.VersionId,
|
||||
etag: result.CopyObjectResult?.ETag,
|
||||
|
||||
@@ -117,11 +117,13 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
const encodedKey = validatedData.objectKey.split('/').map(encodeURIComponent).join('/')
|
||||
const url = `https://${validatedData.bucketName}.s3.${validatedData.region}.amazonaws.com/${encodedKey}`
|
||||
const uri = `s3://${validatedData.bucketName}/${validatedData.objectKey}`
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
url,
|
||||
uri,
|
||||
etag: result.ETag,
|
||||
location: url,
|
||||
key: validatedData.objectKey,
|
||||
|
||||
637
apps/sim/app/api/tools/textract/parse/route.ts
Normal file
637
apps/sim/app/api/tools/textract/parse/route.ts
Normal file
@@ -0,0 +1,637 @@
|
||||
import crypto from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import {
|
||||
validateAwsRegion,
|
||||
validateExternalUrl,
|
||||
validateS3BucketName,
|
||||
} from '@/lib/core/security/input-validation'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { StorageService } from '@/lib/uploads'
|
||||
import {
|
||||
extractStorageKey,
|
||||
inferContextFromKey,
|
||||
isInternalFileUrl,
|
||||
} from '@/lib/uploads/utils/file-utils'
|
||||
import { verifyFileAccess } from '@/app/api/files/authorization'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
export const maxDuration = 300 // 5 minutes for large multi-page PDF processing
|
||||
|
||||
const logger = createLogger('TextractParseAPI')
|
||||
|
||||
const QuerySchema = z.object({
|
||||
Text: z.string().min(1),
|
||||
Alias: z.string().optional(),
|
||||
Pages: z.array(z.string()).optional(),
|
||||
})
|
||||
|
||||
const TextractParseSchema = z
|
||||
.object({
|
||||
accessKeyId: z.string().min(1, 'AWS Access Key ID is required'),
|
||||
secretAccessKey: z.string().min(1, 'AWS Secret Access Key is required'),
|
||||
region: z.string().min(1, 'AWS region is required'),
|
||||
processingMode: z.enum(['sync', 'async']).optional().default('sync'),
|
||||
filePath: z.string().optional(),
|
||||
s3Uri: z.string().optional(),
|
||||
featureTypes: z
|
||||
.array(z.enum(['TABLES', 'FORMS', 'QUERIES', 'SIGNATURES', 'LAYOUT']))
|
||||
.optional(),
|
||||
queries: z.array(QuerySchema).optional(),
|
||||
})
|
||||
.superRefine((data, ctx) => {
|
||||
const regionValidation = validateAwsRegion(data.region, 'AWS region')
|
||||
if (!regionValidation.isValid) {
|
||||
ctx.addIssue({
|
||||
code: z.ZodIssueCode.custom,
|
||||
message: regionValidation.error,
|
||||
path: ['region'],
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
function getSignatureKey(
|
||||
key: string,
|
||||
dateStamp: string,
|
||||
regionName: string,
|
||||
serviceName: string
|
||||
): Buffer {
|
||||
const kDate = crypto.createHmac('sha256', `AWS4${key}`).update(dateStamp).digest()
|
||||
const kRegion = crypto.createHmac('sha256', kDate).update(regionName).digest()
|
||||
const kService = crypto.createHmac('sha256', kRegion).update(serviceName).digest()
|
||||
const kSigning = crypto.createHmac('sha256', kService).update('aws4_request').digest()
|
||||
return kSigning
|
||||
}
|
||||
|
||||
function signAwsRequest(
|
||||
method: string,
|
||||
host: string,
|
||||
uri: string,
|
||||
body: string,
|
||||
accessKeyId: string,
|
||||
secretAccessKey: string,
|
||||
region: string,
|
||||
service: string,
|
||||
amzTarget: string
|
||||
): Record<string, string> {
|
||||
const date = new Date()
|
||||
const amzDate = date.toISOString().replace(/[:-]|\.\d{3}/g, '')
|
||||
const dateStamp = amzDate.slice(0, 8)
|
||||
|
||||
const payloadHash = crypto.createHash('sha256').update(body).digest('hex')
|
||||
|
||||
const canonicalHeaders =
|
||||
`content-type:application/x-amz-json-1.1\n` +
|
||||
`host:${host}\n` +
|
||||
`x-amz-date:${amzDate}\n` +
|
||||
`x-amz-target:${amzTarget}\n`
|
||||
|
||||
const signedHeaders = 'content-type;host;x-amz-date;x-amz-target'
|
||||
|
||||
const canonicalRequest = `${method}\n${uri}\n\n${canonicalHeaders}\n${signedHeaders}\n${payloadHash}`
|
||||
|
||||
const algorithm = 'AWS4-HMAC-SHA256'
|
||||
const credentialScope = `${dateStamp}/${region}/${service}/aws4_request`
|
||||
const stringToSign = `${algorithm}\n${amzDate}\n${credentialScope}\n${crypto.createHash('sha256').update(canonicalRequest).digest('hex')}`
|
||||
|
||||
const signingKey = getSignatureKey(secretAccessKey, dateStamp, region, service)
|
||||
const signature = crypto.createHmac('sha256', signingKey).update(stringToSign).digest('hex')
|
||||
|
||||
const authorizationHeader = `${algorithm} Credential=${accessKeyId}/${credentialScope}, SignedHeaders=${signedHeaders}, Signature=${signature}`
|
||||
|
||||
return {
|
||||
'Content-Type': 'application/x-amz-json-1.1',
|
||||
Host: host,
|
||||
'X-Amz-Date': amzDate,
|
||||
'X-Amz-Target': amzTarget,
|
||||
Authorization: authorizationHeader,
|
||||
}
|
||||
}
|
||||
|
||||
async function fetchDocumentBytes(url: string): Promise<{ bytes: string; contentType: string }> {
|
||||
const response = await fetch(url)
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to fetch document: ${response.statusText}`)
|
||||
}
|
||||
|
||||
const arrayBuffer = await response.arrayBuffer()
|
||||
const bytes = Buffer.from(arrayBuffer).toString('base64')
|
||||
const contentType = response.headers.get('content-type') || 'application/octet-stream'
|
||||
|
||||
return { bytes, contentType }
|
||||
}
|
||||
|
||||
function parseS3Uri(s3Uri: string): { bucket: string; key: string } {
|
||||
const match = s3Uri.match(/^s3:\/\/([^/]+)\/(.+)$/)
|
||||
if (!match) {
|
||||
throw new Error(
|
||||
`Invalid S3 URI format: ${s3Uri}. Expected format: s3://bucket-name/path/to/object`
|
||||
)
|
||||
}
|
||||
|
||||
const bucket = match[1]
|
||||
const key = match[2]
|
||||
|
||||
const bucketValidation = validateS3BucketName(bucket, 'S3 bucket name')
|
||||
if (!bucketValidation.isValid) {
|
||||
throw new Error(bucketValidation.error)
|
||||
}
|
||||
|
||||
if (key.includes('..') || key.startsWith('/')) {
|
||||
throw new Error('S3 key contains invalid path traversal sequences')
|
||||
}
|
||||
|
||||
return { bucket, key }
|
||||
}
|
||||
|
||||
function sleep(ms: number): Promise<void> {
|
||||
return new Promise((resolve) => setTimeout(resolve, ms))
|
||||
}
|
||||
|
||||
async function callTextractAsync(
|
||||
host: string,
|
||||
amzTarget: string,
|
||||
body: Record<string, unknown>,
|
||||
accessKeyId: string,
|
||||
secretAccessKey: string,
|
||||
region: string
|
||||
): Promise<Record<string, unknown>> {
|
||||
const bodyString = JSON.stringify(body)
|
||||
const headers = signAwsRequest(
|
||||
'POST',
|
||||
host,
|
||||
'/',
|
||||
bodyString,
|
||||
accessKeyId,
|
||||
secretAccessKey,
|
||||
region,
|
||||
'textract',
|
||||
amzTarget
|
||||
)
|
||||
|
||||
const response = await fetch(`https://${host}/`, {
|
||||
method: 'POST',
|
||||
headers,
|
||||
body: bodyString,
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
let errorMessage = `Textract API error: ${response.statusText}`
|
||||
try {
|
||||
const errorJson = JSON.parse(errorText)
|
||||
if (errorJson.Message) {
|
||||
errorMessage = errorJson.Message
|
||||
} else if (errorJson.__type) {
|
||||
errorMessage = `${errorJson.__type}: ${errorJson.message || errorText}`
|
||||
}
|
||||
} catch {
|
||||
// Use default error message
|
||||
}
|
||||
throw new Error(errorMessage)
|
||||
}
|
||||
|
||||
return response.json()
|
||||
}
|
||||
|
||||
async function pollForJobCompletion(
|
||||
host: string,
|
||||
jobId: string,
|
||||
accessKeyId: string,
|
||||
secretAccessKey: string,
|
||||
region: string,
|
||||
useAnalyzeDocument: boolean,
|
||||
requestId: string
|
||||
): Promise<Record<string, unknown>> {
|
||||
const pollIntervalMs = 5000 // 5 seconds between polls
|
||||
const maxPollTimeMs = 180000 // 3 minutes maximum polling time
|
||||
const maxAttempts = Math.ceil(maxPollTimeMs / pollIntervalMs)
|
||||
|
||||
const getTarget = useAnalyzeDocument
|
||||
? 'Textract.GetDocumentAnalysis'
|
||||
: 'Textract.GetDocumentTextDetection'
|
||||
|
||||
for (let attempt = 0; attempt < maxAttempts; attempt++) {
|
||||
const result = await callTextractAsync(
|
||||
host,
|
||||
getTarget,
|
||||
{ JobId: jobId },
|
||||
accessKeyId,
|
||||
secretAccessKey,
|
||||
region
|
||||
)
|
||||
|
||||
const jobStatus = result.JobStatus as string
|
||||
|
||||
if (jobStatus === 'SUCCEEDED') {
|
||||
logger.info(`[${requestId}] Async job completed successfully after ${attempt + 1} polls`)
|
||||
|
||||
let allBlocks = (result.Blocks as unknown[]) || []
|
||||
let nextToken = result.NextToken as string | undefined
|
||||
|
||||
while (nextToken) {
|
||||
const nextResult = await callTextractAsync(
|
||||
host,
|
||||
getTarget,
|
||||
{ JobId: jobId, NextToken: nextToken },
|
||||
accessKeyId,
|
||||
secretAccessKey,
|
||||
region
|
||||
)
|
||||
allBlocks = allBlocks.concat((nextResult.Blocks as unknown[]) || [])
|
||||
nextToken = nextResult.NextToken as string | undefined
|
||||
}
|
||||
|
||||
return {
|
||||
...result,
|
||||
Blocks: allBlocks,
|
||||
}
|
||||
}
|
||||
|
||||
if (jobStatus === 'FAILED') {
|
||||
throw new Error(`Textract job failed: ${result.StatusMessage || 'Unknown error'}`)
|
||||
}
|
||||
|
||||
if (jobStatus === 'PARTIAL_SUCCESS') {
|
||||
logger.warn(`[${requestId}] Job completed with partial success: ${result.StatusMessage}`)
|
||||
|
||||
let allBlocks = (result.Blocks as unknown[]) || []
|
||||
let nextToken = result.NextToken as string | undefined
|
||||
|
||||
while (nextToken) {
|
||||
const nextResult = await callTextractAsync(
|
||||
host,
|
||||
getTarget,
|
||||
{ JobId: jobId, NextToken: nextToken },
|
||||
accessKeyId,
|
||||
secretAccessKey,
|
||||
region
|
||||
)
|
||||
allBlocks = allBlocks.concat((nextResult.Blocks as unknown[]) || [])
|
||||
nextToken = nextResult.NextToken as string | undefined
|
||||
}
|
||||
|
||||
return {
|
||||
...result,
|
||||
Blocks: allBlocks,
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Job status: ${jobStatus}, attempt ${attempt + 1}/${maxAttempts}`)
|
||||
await sleep(pollIntervalMs)
|
||||
}
|
||||
|
||||
throw new Error(
|
||||
`Timeout waiting for Textract job to complete (max ${maxPollTimeMs / 1000} seconds)`
|
||||
)
|
||||
}
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success || !authResult.userId) {
|
||||
logger.warn(`[${requestId}] Unauthorized Textract parse attempt`, {
|
||||
error: authResult.error || 'Missing userId',
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Unauthorized',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
const userId = authResult.userId
|
||||
const body = await request.json()
|
||||
const validatedData = TextractParseSchema.parse(body)
|
||||
|
||||
const processingMode = validatedData.processingMode || 'sync'
|
||||
const featureTypes = validatedData.featureTypes ?? []
|
||||
const useAnalyzeDocument = featureTypes.length > 0
|
||||
const host = `textract.${validatedData.region}.amazonaws.com`
|
||||
|
||||
logger.info(`[${requestId}] Textract parse request`, {
|
||||
processingMode,
|
||||
filePath: validatedData.filePath?.substring(0, 50),
|
||||
s3Uri: validatedData.s3Uri?.substring(0, 50),
|
||||
featureTypes,
|
||||
userId,
|
||||
})
|
||||
|
||||
if (processingMode === 'async') {
|
||||
if (!validatedData.s3Uri) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'S3 URI is required for multi-page processing (s3://bucket/key)',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const { bucket: s3Bucket, key: s3Key } = parseS3Uri(validatedData.s3Uri)
|
||||
|
||||
logger.info(`[${requestId}] Starting async Textract job`, { s3Bucket, s3Key })
|
||||
|
||||
const startTarget = useAnalyzeDocument
|
||||
? 'Textract.StartDocumentAnalysis'
|
||||
: 'Textract.StartDocumentTextDetection'
|
||||
|
||||
const startBody: Record<string, unknown> = {
|
||||
DocumentLocation: {
|
||||
S3Object: {
|
||||
Bucket: s3Bucket,
|
||||
Name: s3Key,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
if (useAnalyzeDocument) {
|
||||
startBody.FeatureTypes = featureTypes
|
||||
|
||||
if (
|
||||
validatedData.queries &&
|
||||
validatedData.queries.length > 0 &&
|
||||
featureTypes.includes('QUERIES')
|
||||
) {
|
||||
startBody.QueriesConfig = {
|
||||
Queries: validatedData.queries.map((q) => ({
|
||||
Text: q.Text,
|
||||
Alias: q.Alias,
|
||||
Pages: q.Pages,
|
||||
})),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const startResult = await callTextractAsync(
|
||||
host,
|
||||
startTarget,
|
||||
startBody,
|
||||
validatedData.accessKeyId,
|
||||
validatedData.secretAccessKey,
|
||||
validatedData.region
|
||||
)
|
||||
|
||||
const jobId = startResult.JobId as string
|
||||
if (!jobId) {
|
||||
throw new Error('Failed to start Textract job: No JobId returned')
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Async job started`, { jobId })
|
||||
|
||||
const textractData = await pollForJobCompletion(
|
||||
host,
|
||||
jobId,
|
||||
validatedData.accessKeyId,
|
||||
validatedData.secretAccessKey,
|
||||
validatedData.region,
|
||||
useAnalyzeDocument,
|
||||
requestId
|
||||
)
|
||||
|
||||
logger.info(`[${requestId}] Textract async parse successful`, {
|
||||
pageCount: (textractData.DocumentMetadata as { Pages?: number })?.Pages ?? 0,
|
||||
blockCount: (textractData.Blocks as unknown[])?.length ?? 0,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
blocks: textractData.Blocks ?? [],
|
||||
documentMetadata: {
|
||||
pages: (textractData.DocumentMetadata as { Pages?: number })?.Pages ?? 0,
|
||||
},
|
||||
modelVersion: (textractData.AnalyzeDocumentModelVersion ??
|
||||
textractData.DetectDocumentTextModelVersion) as string | undefined,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
if (!validatedData.filePath) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'File path is required for single-page processing',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
let fileUrl = validatedData.filePath
|
||||
|
||||
const isInternalFilePath = validatedData.filePath && isInternalFileUrl(validatedData.filePath)
|
||||
|
||||
if (isInternalFilePath) {
|
||||
try {
|
||||
const storageKey = extractStorageKey(validatedData.filePath)
|
||||
const context = inferContextFromKey(storageKey)
|
||||
|
||||
const hasAccess = await verifyFileAccess(storageKey, userId, undefined, context, false)
|
||||
|
||||
if (!hasAccess) {
|
||||
logger.warn(`[${requestId}] Unauthorized presigned URL generation attempt`, {
|
||||
userId,
|
||||
key: storageKey,
|
||||
context,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'File not found',
|
||||
},
|
||||
{ status: 404 }
|
||||
)
|
||||
}
|
||||
|
||||
fileUrl = await StorageService.generatePresignedDownloadUrl(storageKey, context, 5 * 60)
|
||||
logger.info(`[${requestId}] Generated presigned URL for ${context} file`)
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Failed to generate presigned URL:`, error)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Failed to generate file access URL',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
} else if (validatedData.filePath?.startsWith('/')) {
|
||||
// Reject arbitrary absolute paths that don't contain /api/files/serve/
|
||||
logger.warn(`[${requestId}] Invalid internal path`, {
|
||||
userId,
|
||||
path: validatedData.filePath.substring(0, 50),
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Invalid file path. Only uploaded files are supported for internal paths.',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
} else {
|
||||
const urlValidation = validateExternalUrl(fileUrl, 'Document URL')
|
||||
if (!urlValidation.isValid) {
|
||||
logger.warn(`[${requestId}] SSRF attempt blocked`, {
|
||||
userId,
|
||||
url: fileUrl.substring(0, 100),
|
||||
error: urlValidation.error,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: urlValidation.error,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
const { bytes, contentType } = await fetchDocumentBytes(fileUrl)
|
||||
|
||||
// Track if this is a PDF for better error messaging
|
||||
const isPdf = contentType.includes('pdf') || fileUrl.toLowerCase().endsWith('.pdf')
|
||||
|
||||
const uri = '/'
|
||||
|
||||
let textractBody: Record<string, unknown>
|
||||
let amzTarget: string
|
||||
|
||||
if (useAnalyzeDocument) {
|
||||
amzTarget = 'Textract.AnalyzeDocument'
|
||||
textractBody = {
|
||||
Document: {
|
||||
Bytes: bytes,
|
||||
},
|
||||
FeatureTypes: featureTypes,
|
||||
}
|
||||
|
||||
if (
|
||||
validatedData.queries &&
|
||||
validatedData.queries.length > 0 &&
|
||||
featureTypes.includes('QUERIES')
|
||||
) {
|
||||
textractBody.QueriesConfig = {
|
||||
Queries: validatedData.queries.map((q) => ({
|
||||
Text: q.Text,
|
||||
Alias: q.Alias,
|
||||
Pages: q.Pages,
|
||||
})),
|
||||
}
|
||||
}
|
||||
} else {
|
||||
amzTarget = 'Textract.DetectDocumentText'
|
||||
textractBody = {
|
||||
Document: {
|
||||
Bytes: bytes,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
const bodyString = JSON.stringify(textractBody)
|
||||
|
||||
const headers = signAwsRequest(
|
||||
'POST',
|
||||
host,
|
||||
uri,
|
||||
bodyString,
|
||||
validatedData.accessKeyId,
|
||||
validatedData.secretAccessKey,
|
||||
validatedData.region,
|
||||
'textract',
|
||||
amzTarget
|
||||
)
|
||||
|
||||
const textractResponse = await fetch(`https://${host}${uri}`, {
|
||||
method: 'POST',
|
||||
headers,
|
||||
body: bodyString,
|
||||
})
|
||||
|
||||
if (!textractResponse.ok) {
|
||||
const errorText = await textractResponse.text()
|
||||
logger.error(`[${requestId}] Textract API error:`, errorText)
|
||||
|
||||
let errorMessage = `Textract API error: ${textractResponse.statusText}`
|
||||
let isUnsupportedFormat = false
|
||||
try {
|
||||
const errorJson = JSON.parse(errorText)
|
||||
if (errorJson.Message) {
|
||||
errorMessage = errorJson.Message
|
||||
} else if (errorJson.__type) {
|
||||
errorMessage = `${errorJson.__type}: ${errorJson.message || errorText}`
|
||||
}
|
||||
// Check for unsupported document format error
|
||||
isUnsupportedFormat =
|
||||
errorJson.__type === 'UnsupportedDocumentException' ||
|
||||
errorJson.Message?.toLowerCase().includes('unsupported document') ||
|
||||
errorText.toLowerCase().includes('unsupported document')
|
||||
} catch {
|
||||
isUnsupportedFormat = errorText.toLowerCase().includes('unsupported document')
|
||||
}
|
||||
|
||||
// Provide helpful message for unsupported format (likely multi-page PDF)
|
||||
if (isUnsupportedFormat && isPdf) {
|
||||
errorMessage =
|
||||
'This document format is not supported in Single Page mode. If this is a multi-page PDF, please use "Multi-Page (PDF, TIFF via S3)" mode instead, which requires uploading your document to S3 first. Single Page mode only supports JPEG, PNG, and single-page PDF files.'
|
||||
}
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: errorMessage,
|
||||
},
|
||||
{ status: textractResponse.status }
|
||||
)
|
||||
}
|
||||
|
||||
const textractData = await textractResponse.json()
|
||||
|
||||
logger.info(`[${requestId}] Textract parse successful`, {
|
||||
pageCount: textractData.DocumentMetadata?.Pages ?? 0,
|
||||
blockCount: textractData.Blocks?.length ?? 0,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
blocks: textractData.Blocks ?? [],
|
||||
documentMetadata: {
|
||||
pages: textractData.DocumentMetadata?.Pages ?? 0,
|
||||
},
|
||||
modelVersion:
|
||||
textractData.AnalyzeDocumentModelVersion ??
|
||||
textractData.DetectDocumentTextModelVersion ??
|
||||
undefined,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Invalid request data',
|
||||
details: error.errors,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error in Textract parse:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Internal server error',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
203
apps/sim/app/api/v1/admin/credits/route.ts
Normal file
203
apps/sim/app/api/v1/admin/credits/route.ts
Normal file
@@ -0,0 +1,203 @@
|
||||
/**
|
||||
* POST /api/v1/admin/credits
|
||||
*
|
||||
* Issue credits to a user by user ID or email.
|
||||
*
|
||||
* Body:
|
||||
* - userId?: string - The user ID to issue credits to
|
||||
* - email?: string - The user email to issue credits to (alternative to userId)
|
||||
* - amount: number - The amount of credits to issue (in dollars)
|
||||
* - reason?: string - Reason for issuing credits (for audit logging)
|
||||
*
|
||||
* Response: AdminSingleResponse<{
|
||||
* success: true,
|
||||
* entityType: 'user' | 'organization',
|
||||
* entityId: string,
|
||||
* amount: number,
|
||||
* newCreditBalance: number,
|
||||
* newUsageLimit: number,
|
||||
* }>
|
||||
*
|
||||
* For Pro users: credits are added to user_stats.credit_balance
|
||||
* For Team users: credits are added to organization.credit_balance
|
||||
* Usage limits are updated accordingly to allow spending the credits.
|
||||
*/
|
||||
|
||||
import { db } from '@sim/db'
|
||||
import { organization, subscription, user, userStats } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { nanoid } from 'nanoid'
|
||||
import { getHighestPrioritySubscription } from '@/lib/billing/core/subscription'
|
||||
import { addCredits } from '@/lib/billing/credits/balance'
|
||||
import { setUsageLimitForCredits } from '@/lib/billing/credits/purchase'
|
||||
import { getEffectiveSeats } from '@/lib/billing/subscriptions/utils'
|
||||
import { withAdminAuth } from '@/app/api/v1/admin/middleware'
|
||||
import {
|
||||
badRequestResponse,
|
||||
internalErrorResponse,
|
||||
notFoundResponse,
|
||||
singleResponse,
|
||||
} from '@/app/api/v1/admin/responses'
|
||||
|
||||
const logger = createLogger('AdminCreditsAPI')
|
||||
|
||||
export const POST = withAdminAuth(async (request) => {
|
||||
try {
|
||||
const body = await request.json()
|
||||
const { userId, email, amount, reason } = body
|
||||
|
||||
if (!userId && !email) {
|
||||
return badRequestResponse('Either userId or email is required')
|
||||
}
|
||||
|
||||
if (typeof amount !== 'number' || !Number.isFinite(amount) || amount <= 0) {
|
||||
return badRequestResponse('amount must be a positive number')
|
||||
}
|
||||
|
||||
let resolvedUserId: string
|
||||
let userEmail: string | null = null
|
||||
|
||||
if (userId) {
|
||||
const [userData] = await db
|
||||
.select({ id: user.id, email: user.email })
|
||||
.from(user)
|
||||
.where(eq(user.id, userId))
|
||||
.limit(1)
|
||||
|
||||
if (!userData) {
|
||||
return notFoundResponse('User')
|
||||
}
|
||||
resolvedUserId = userData.id
|
||||
userEmail = userData.email
|
||||
} else {
|
||||
const normalizedEmail = email.toLowerCase().trim()
|
||||
const [userData] = await db
|
||||
.select({ id: user.id, email: user.email })
|
||||
.from(user)
|
||||
.where(eq(user.email, normalizedEmail))
|
||||
.limit(1)
|
||||
|
||||
if (!userData) {
|
||||
return notFoundResponse('User with email')
|
||||
}
|
||||
resolvedUserId = userData.id
|
||||
userEmail = userData.email
|
||||
}
|
||||
|
||||
const userSubscription = await getHighestPrioritySubscription(resolvedUserId)
|
||||
|
||||
if (!userSubscription || !['pro', 'team', 'enterprise'].includes(userSubscription.plan)) {
|
||||
return badRequestResponse(
|
||||
'User must have an active Pro, Team, or Enterprise subscription to receive credits'
|
||||
)
|
||||
}
|
||||
|
||||
let entityType: 'user' | 'organization'
|
||||
let entityId: string
|
||||
const plan = userSubscription.plan
|
||||
let seats: number | null = null
|
||||
|
||||
if (plan === 'team' || plan === 'enterprise') {
|
||||
entityType = 'organization'
|
||||
entityId = userSubscription.referenceId
|
||||
|
||||
const [orgExists] = await db
|
||||
.select({ id: organization.id })
|
||||
.from(organization)
|
||||
.where(eq(organization.id, entityId))
|
||||
.limit(1)
|
||||
|
||||
if (!orgExists) {
|
||||
return notFoundResponse('Organization')
|
||||
}
|
||||
|
||||
const [subData] = await db
|
||||
.select()
|
||||
.from(subscription)
|
||||
.where(and(eq(subscription.referenceId, entityId), eq(subscription.status, 'active')))
|
||||
.limit(1)
|
||||
|
||||
seats = getEffectiveSeats(subData)
|
||||
} else {
|
||||
entityType = 'user'
|
||||
entityId = resolvedUserId
|
||||
|
||||
const [existingStats] = await db
|
||||
.select({ id: userStats.id })
|
||||
.from(userStats)
|
||||
.where(eq(userStats.userId, entityId))
|
||||
.limit(1)
|
||||
|
||||
if (!existingStats) {
|
||||
await db.insert(userStats).values({
|
||||
id: nanoid(),
|
||||
userId: entityId,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
await addCredits(entityType, entityId, amount)
|
||||
|
||||
let newCreditBalance: number
|
||||
if (entityType === 'organization') {
|
||||
const [orgData] = await db
|
||||
.select({ creditBalance: organization.creditBalance })
|
||||
.from(organization)
|
||||
.where(eq(organization.id, entityId))
|
||||
.limit(1)
|
||||
newCreditBalance = Number.parseFloat(orgData?.creditBalance || '0')
|
||||
} else {
|
||||
const [stats] = await db
|
||||
.select({ creditBalance: userStats.creditBalance })
|
||||
.from(userStats)
|
||||
.where(eq(userStats.userId, entityId))
|
||||
.limit(1)
|
||||
newCreditBalance = Number.parseFloat(stats?.creditBalance || '0')
|
||||
}
|
||||
|
||||
await setUsageLimitForCredits(entityType, entityId, plan, seats, newCreditBalance)
|
||||
|
||||
let newUsageLimit: number
|
||||
if (entityType === 'organization') {
|
||||
const [orgData] = await db
|
||||
.select({ orgUsageLimit: organization.orgUsageLimit })
|
||||
.from(organization)
|
||||
.where(eq(organization.id, entityId))
|
||||
.limit(1)
|
||||
newUsageLimit = Number.parseFloat(orgData?.orgUsageLimit || '0')
|
||||
} else {
|
||||
const [stats] = await db
|
||||
.select({ currentUsageLimit: userStats.currentUsageLimit })
|
||||
.from(userStats)
|
||||
.where(eq(userStats.userId, entityId))
|
||||
.limit(1)
|
||||
newUsageLimit = Number.parseFloat(stats?.currentUsageLimit || '0')
|
||||
}
|
||||
|
||||
logger.info('Admin API: Issued credits', {
|
||||
resolvedUserId,
|
||||
userEmail,
|
||||
entityType,
|
||||
entityId,
|
||||
amount,
|
||||
newCreditBalance,
|
||||
newUsageLimit,
|
||||
reason: reason || 'No reason provided',
|
||||
})
|
||||
|
||||
return singleResponse({
|
||||
success: true,
|
||||
userId: resolvedUserId,
|
||||
userEmail,
|
||||
entityType,
|
||||
entityId,
|
||||
amount,
|
||||
newCreditBalance,
|
||||
newUsageLimit,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Admin API: Failed to issue credits', { error })
|
||||
return internalErrorResponse('Failed to issue credits')
|
||||
}
|
||||
})
|
||||
@@ -63,6 +63,9 @@
|
||||
* GET /api/v1/admin/subscriptions/:id - Get subscription details
|
||||
* DELETE /api/v1/admin/subscriptions/:id - Cancel subscription (?atPeriodEnd=true for scheduled)
|
||||
*
|
||||
* Credits:
|
||||
* POST /api/v1/admin/credits - Issue credits to user (by userId or email)
|
||||
*
|
||||
* Access Control (Permission Groups):
|
||||
* GET /api/v1/admin/access-control - List permission groups (?organizationId=X)
|
||||
* DELETE /api/v1/admin/access-control - Delete permission groups for org (?organizationId=X)
|
||||
|
||||
@@ -12,6 +12,10 @@ import { markExecutionCancelled } from '@/lib/execution/cancellation'
|
||||
import { processInputFileFields } from '@/lib/execution/files'
|
||||
import { preprocessExecution } from '@/lib/execution/preprocessing'
|
||||
import { LoggingSession } from '@/lib/logs/execution/logging-session'
|
||||
import {
|
||||
cleanupExecutionBase64Cache,
|
||||
hydrateUserFilesWithBase64,
|
||||
} from '@/lib/uploads/utils/user-file-base64.server'
|
||||
import { executeWorkflowCore } from '@/lib/workflows/executor/execution-core'
|
||||
import { type ExecutionEvent, encodeSSEEvent } from '@/lib/workflows/executor/execution-events'
|
||||
import { PauseResumeManager } from '@/lib/workflows/executor/human-in-the-loop-manager'
|
||||
@@ -25,7 +29,7 @@ import type { WorkflowExecutionPayload } from '@/background/workflow-execution'
|
||||
import { normalizeName } from '@/executor/constants'
|
||||
import { ExecutionSnapshot } from '@/executor/execution/snapshot'
|
||||
import type { ExecutionMetadata, IterationContext } from '@/executor/execution/types'
|
||||
import type { StreamingExecution } from '@/executor/types'
|
||||
import type { NormalizedBlockOutput, StreamingExecution } from '@/executor/types'
|
||||
import { Serializer } from '@/serializer'
|
||||
import { CORE_TRIGGER_TYPES, type CoreTriggerType } from '@/stores/logs/filters/types'
|
||||
|
||||
@@ -38,6 +42,8 @@ const ExecuteWorkflowSchema = z.object({
|
||||
useDraftState: z.boolean().optional(),
|
||||
input: z.any().optional(),
|
||||
isClientSession: z.boolean().optional(),
|
||||
includeFileBase64: z.boolean().optional().default(true),
|
||||
base64MaxBytes: z.number().int().positive().optional(),
|
||||
workflowStateOverride: z
|
||||
.object({
|
||||
blocks: z.record(z.any()),
|
||||
@@ -214,6 +220,8 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
useDraftState,
|
||||
input: validatedInput,
|
||||
isClientSession = false,
|
||||
includeFileBase64,
|
||||
base64MaxBytes,
|
||||
workflowStateOverride,
|
||||
} = validation.data
|
||||
|
||||
@@ -227,6 +235,8 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
triggerType,
|
||||
stream,
|
||||
useDraftState,
|
||||
includeFileBase64,
|
||||
base64MaxBytes,
|
||||
workflowStateOverride,
|
||||
workflowId: _workflowId, // Also exclude workflowId used for internal JWT auth
|
||||
...rest
|
||||
@@ -427,16 +437,31 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
snapshot,
|
||||
callbacks: {},
|
||||
loggingSession,
|
||||
includeFileBase64,
|
||||
base64MaxBytes,
|
||||
})
|
||||
|
||||
const hasResponseBlock = workflowHasResponseBlock(result)
|
||||
const outputWithBase64 = includeFileBase64
|
||||
? ((await hydrateUserFilesWithBase64(result.output, {
|
||||
requestId,
|
||||
executionId,
|
||||
maxBytes: base64MaxBytes,
|
||||
})) as NormalizedBlockOutput)
|
||||
: result.output
|
||||
|
||||
const resultWithBase64 = { ...result, output: outputWithBase64 }
|
||||
|
||||
// Cleanup base64 cache for this execution
|
||||
await cleanupExecutionBase64Cache(executionId)
|
||||
|
||||
const hasResponseBlock = workflowHasResponseBlock(resultWithBase64)
|
||||
if (hasResponseBlock) {
|
||||
return createHttpResponseFromBlock(result)
|
||||
return createHttpResponseFromBlock(resultWithBase64)
|
||||
}
|
||||
|
||||
const filteredResult = {
|
||||
success: result.success,
|
||||
output: result.output,
|
||||
output: outputWithBase64,
|
||||
error: result.error,
|
||||
metadata: result.metadata
|
||||
? {
|
||||
@@ -498,6 +523,8 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
selectedOutputs: resolvedSelectedOutputs,
|
||||
isSecureMode: false,
|
||||
workflowTriggerType: triggerType === 'chat' ? 'chat' : 'api',
|
||||
includeFileBase64,
|
||||
base64MaxBytes,
|
||||
},
|
||||
executionId,
|
||||
})
|
||||
@@ -698,6 +725,8 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
},
|
||||
loggingSession,
|
||||
abortSignal: abortController.signal,
|
||||
includeFileBase64,
|
||||
base64MaxBytes,
|
||||
})
|
||||
|
||||
if (result.status === 'paused') {
|
||||
@@ -750,12 +779,21 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
workflowId,
|
||||
data: {
|
||||
success: result.success,
|
||||
output: result.output,
|
||||
output: includeFileBase64
|
||||
? await hydrateUserFilesWithBase64(result.output, {
|
||||
requestId,
|
||||
executionId,
|
||||
maxBytes: base64MaxBytes,
|
||||
})
|
||||
: result.output,
|
||||
duration: result.metadata?.duration || 0,
|
||||
startTime: result.metadata?.startTime || startTime.toISOString(),
|
||||
endTime: result.metadata?.endTime || new Date().toISOString(),
|
||||
},
|
||||
})
|
||||
|
||||
// Cleanup base64 cache for this execution
|
||||
await cleanupExecutionBase64Cache(executionId)
|
||||
} catch (error: any) {
|
||||
const errorMessage = error.message || 'Unknown error'
|
||||
logger.error(`[${requestId}] SSE execution failed: ${errorMessage}`)
|
||||
|
||||
27
apps/sim/app/changelog/components/branded-link.tsx
Normal file
27
apps/sim/app/changelog/components/branded-link.tsx
Normal file
@@ -0,0 +1,27 @@
|
||||
'use client'
|
||||
|
||||
import Link from 'next/link'
|
||||
import { useBrandedButtonClass } from '@/hooks/use-branded-button-class'
|
||||
|
||||
interface BrandedLinkProps {
|
||||
href: string
|
||||
children: React.ReactNode
|
||||
className?: string
|
||||
target?: string
|
||||
rel?: string
|
||||
}
|
||||
|
||||
export function BrandedLink({ href, children, className = '', target, rel }: BrandedLinkProps) {
|
||||
const buttonClass = useBrandedButtonClass()
|
||||
|
||||
return (
|
||||
<Link
|
||||
href={href}
|
||||
target={target}
|
||||
rel={rel}
|
||||
className={`${buttonClass} group inline-flex items-center justify-center gap-2 rounded-[10px] py-[6px] pr-[10px] pl-[12px] text-[15px] text-white transition-all ${className}`}
|
||||
>
|
||||
{children}
|
||||
</Link>
|
||||
)
|
||||
}
|
||||
@@ -2,6 +2,7 @@ import { BookOpen, Github, Rss } from 'lucide-react'
|
||||
import Link from 'next/link'
|
||||
import { inter } from '@/app/_styles/fonts/inter/inter'
|
||||
import { soehne } from '@/app/_styles/fonts/soehne/soehne'
|
||||
import { BrandedLink } from '@/app/changelog/components/branded-link'
|
||||
import ChangelogList from '@/app/changelog/components/timeline-list'
|
||||
|
||||
export interface ChangelogEntry {
|
||||
@@ -66,25 +67,24 @@ export default async function ChangelogContent() {
|
||||
<hr className='mt-6 border-border' />
|
||||
|
||||
<div className='mt-6 flex flex-wrap items-center gap-3 text-sm'>
|
||||
<Link
|
||||
<BrandedLink
|
||||
href='https://github.com/simstudioai/sim/releases'
|
||||
target='_blank'
|
||||
rel='noopener noreferrer'
|
||||
className='group inline-flex items-center justify-center gap-2 rounded-[10px] border border-[#6F3DFA] bg-gradient-to-b from-[#8357FF] to-[#6F3DFA] py-[6px] pr-[10px] pl-[12px] text-[14px] text-white shadow-[inset_0_2px_4px_0_#9B77FF] transition-all sm:text-[16px]'
|
||||
>
|
||||
<Github className='h-4 w-4' />
|
||||
View on GitHub
|
||||
</Link>
|
||||
</BrandedLink>
|
||||
<Link
|
||||
href='https://docs.sim.ai'
|
||||
className='inline-flex items-center gap-2 rounded-md border border-border px-3 py-1.5 hover:bg-muted'
|
||||
className='inline-flex items-center gap-2 rounded-[10px] border border-border py-[6px] pr-[10px] pl-[12px] text-[15px] transition-all hover:bg-muted'
|
||||
>
|
||||
<BookOpen className='h-4 w-4' />
|
||||
Documentation
|
||||
</Link>
|
||||
<Link
|
||||
href='/changelog.xml'
|
||||
className='inline-flex items-center gap-2 rounded-md border border-border px-3 py-1.5 hover:bg-muted'
|
||||
className='inline-flex items-center gap-2 rounded-[10px] border border-border py-[6px] pr-[10px] pl-[12px] text-[15px] transition-all hover:bg-muted'
|
||||
>
|
||||
<Rss className='h-4 w-4' />
|
||||
RSS Feed
|
||||
|
||||
@@ -117,7 +117,7 @@ export default function ChatClient({ identifier }: { identifier: string }) {
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
const messagesEndRef = useRef<HTMLDivElement>(null)
|
||||
const messagesContainerRef = useRef<HTMLDivElement>(null)
|
||||
const [starCount, setStarCount] = useState('25.1k')
|
||||
const [starCount, setStarCount] = useState('25.8k')
|
||||
const [conversationId, setConversationId] = useState('')
|
||||
|
||||
const [showScrollButton, setShowScrollButton] = useState(false)
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
import { useRef, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { isUserFile } from '@/lib/core/utils/display-filters'
|
||||
import { isUserFileWithMetadata } from '@/lib/core/utils/user-file'
|
||||
import type { ChatFile, ChatMessage } from '@/app/chat/components/message/message'
|
||||
import { CHAT_ERROR_MESSAGES } from '@/app/chat/constants'
|
||||
|
||||
@@ -17,7 +17,7 @@ function extractFilesFromData(
|
||||
return files
|
||||
}
|
||||
|
||||
if (isUserFile(data)) {
|
||||
if (isUserFileWithMetadata(data)) {
|
||||
if (!seenIds.has(data.id)) {
|
||||
seenIds.add(data.id)
|
||||
files.push({
|
||||
@@ -232,7 +232,7 @@ export function useChatStreaming() {
|
||||
return null
|
||||
}
|
||||
|
||||
if (isUserFile(value)) {
|
||||
if (isUserFileWithMetadata(value)) {
|
||||
return null
|
||||
}
|
||||
|
||||
@@ -285,7 +285,7 @@ export function useChatStreaming() {
|
||||
|
||||
const value = getOutputValue(blockOutputs, config.path)
|
||||
|
||||
if (isUserFile(value)) {
|
||||
if (isUserFileWithMetadata(value)) {
|
||||
extractedFiles.push({
|
||||
id: value.id,
|
||||
name: value.name,
|
||||
|
||||
@@ -207,7 +207,6 @@ function TemplateCardInner({
|
||||
isPannable={false}
|
||||
defaultZoom={0.8}
|
||||
fitPadding={0.2}
|
||||
lightweight
|
||||
/>
|
||||
) : (
|
||||
<div className='h-full w-full bg-[var(--surface-4)]' />
|
||||
|
||||
@@ -61,6 +61,7 @@ export function EditChunkModal({
|
||||
const [showUnsavedChangesAlert, setShowUnsavedChangesAlert] = useState(false)
|
||||
const [pendingNavigation, setPendingNavigation] = useState<(() => void) | null>(null)
|
||||
const [tokenizerOn, setTokenizerOn] = useState(false)
|
||||
const [hoveredTokenIndex, setHoveredTokenIndex] = useState<number | null>(null)
|
||||
const textareaRef = useRef<HTMLTextAreaElement>(null)
|
||||
|
||||
const error = mutationError?.message ?? null
|
||||
@@ -254,6 +255,8 @@ export function EditChunkModal({
|
||||
style={{
|
||||
backgroundColor: getTokenBgColor(index),
|
||||
}}
|
||||
onMouseEnter={() => setHoveredTokenIndex(index)}
|
||||
onMouseLeave={() => setHoveredTokenIndex(null)}
|
||||
>
|
||||
{token}
|
||||
</span>
|
||||
@@ -281,6 +284,11 @@ export function EditChunkModal({
|
||||
<div className='flex items-center gap-[8px]'>
|
||||
<span className='text-[12px] text-[var(--text-secondary)]'>Tokenizer</span>
|
||||
<Switch checked={tokenizerOn} onCheckedChange={setTokenizerOn} />
|
||||
{tokenizerOn && hoveredTokenIndex !== null && (
|
||||
<span className='text-[12px] text-[var(--text-tertiary)]'>
|
||||
Token #{hoveredTokenIndex + 1}
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
<span className='text-[12px] text-[var(--text-secondary)]'>
|
||||
{tokenCount.toLocaleString()}
|
||||
|
||||
@@ -36,6 +36,7 @@ import {
|
||||
import { Input } from '@/components/ui/input'
|
||||
import { SearchHighlight } from '@/components/ui/search-highlight'
|
||||
import { Skeleton } from '@/components/ui/skeleton'
|
||||
import { formatAbsoluteDate, formatRelativeTime } from '@/lib/core/utils/formatting'
|
||||
import type { ChunkData } from '@/lib/knowledge/types'
|
||||
import {
|
||||
ChunkContextMenu,
|
||||
@@ -58,55 +59,6 @@ import {
|
||||
|
||||
const logger = createLogger('Document')
|
||||
|
||||
/**
|
||||
* Formats a date string to relative time (e.g., "2h ago", "3d ago")
|
||||
*/
|
||||
function formatRelativeTime(dateString: string): string {
|
||||
const date = new Date(dateString)
|
||||
const now = new Date()
|
||||
const diffInSeconds = Math.floor((now.getTime() - date.getTime()) / 1000)
|
||||
|
||||
if (diffInSeconds < 60) {
|
||||
return 'just now'
|
||||
}
|
||||
if (diffInSeconds < 3600) {
|
||||
const minutes = Math.floor(diffInSeconds / 60)
|
||||
return `${minutes}m ago`
|
||||
}
|
||||
if (diffInSeconds < 86400) {
|
||||
const hours = Math.floor(diffInSeconds / 3600)
|
||||
return `${hours}h ago`
|
||||
}
|
||||
if (diffInSeconds < 604800) {
|
||||
const days = Math.floor(diffInSeconds / 86400)
|
||||
return `${days}d ago`
|
||||
}
|
||||
if (diffInSeconds < 2592000) {
|
||||
const weeks = Math.floor(diffInSeconds / 604800)
|
||||
return `${weeks}w ago`
|
||||
}
|
||||
if (diffInSeconds < 31536000) {
|
||||
const months = Math.floor(diffInSeconds / 2592000)
|
||||
return `${months}mo ago`
|
||||
}
|
||||
const years = Math.floor(diffInSeconds / 31536000)
|
||||
return `${years}y ago`
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats a date string to absolute format for tooltip display
|
||||
*/
|
||||
function formatAbsoluteDate(dateString: string): string {
|
||||
const date = new Date(dateString)
|
||||
return date.toLocaleDateString('en-US', {
|
||||
year: 'numeric',
|
||||
month: 'short',
|
||||
day: 'numeric',
|
||||
hour: '2-digit',
|
||||
minute: '2-digit',
|
||||
})
|
||||
}
|
||||
|
||||
interface DocumentProps {
|
||||
knowledgeBaseId: string
|
||||
documentId: string
|
||||
@@ -304,7 +256,6 @@ export function Document({
|
||||
|
||||
const [searchQuery, setSearchQuery] = useState('')
|
||||
const [debouncedSearchQuery, setDebouncedSearchQuery] = useState('')
|
||||
const [isSearching, setIsSearching] = useState(false)
|
||||
|
||||
const {
|
||||
chunks: initialChunks,
|
||||
@@ -344,7 +295,6 @@ export function Document({
|
||||
const handler = setTimeout(() => {
|
||||
startTransition(() => {
|
||||
setDebouncedSearchQuery(searchQuery)
|
||||
setIsSearching(searchQuery.trim().length > 0)
|
||||
})
|
||||
}, 200)
|
||||
|
||||
@@ -353,6 +303,7 @@ export function Document({
|
||||
}
|
||||
}, [searchQuery])
|
||||
|
||||
const isSearching = debouncedSearchQuery.trim().length > 0
|
||||
const showingSearch = isSearching && searchQuery.trim().length > 0 && searchResults.length > 0
|
||||
const SEARCH_PAGE_SIZE = 50
|
||||
const maxSearchPages = Math.ceil(searchResults.length / SEARCH_PAGE_SIZE)
|
||||
|
||||
@@ -27,6 +27,10 @@ import {
|
||||
ModalContent,
|
||||
ModalFooter,
|
||||
ModalHeader,
|
||||
Popover,
|
||||
PopoverContent,
|
||||
PopoverItem,
|
||||
PopoverTrigger,
|
||||
Table,
|
||||
TableBody,
|
||||
TableCell,
|
||||
@@ -40,8 +44,11 @@ import { Input } from '@/components/ui/input'
|
||||
import { SearchHighlight } from '@/components/ui/search-highlight'
|
||||
import { Skeleton } from '@/components/ui/skeleton'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { formatAbsoluteDate, formatRelativeTime } from '@/lib/core/utils/formatting'
|
||||
import { ALL_TAG_SLOTS, type AllTagSlot, getFieldTypeForSlot } from '@/lib/knowledge/constants'
|
||||
import type { DocumentSortField, SortOrder } from '@/lib/knowledge/documents/types'
|
||||
import type { DocumentData } from '@/lib/knowledge/types'
|
||||
import { formatFileSize } from '@/lib/uploads/utils/file-utils'
|
||||
import {
|
||||
ActionBar,
|
||||
AddDocumentsModal,
|
||||
@@ -189,8 +196,8 @@ function KnowledgeBaseLoading({ knowledgeBaseName }: KnowledgeBaseLoadingProps)
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className='mt-[4px]'>
|
||||
<Skeleton className='h-[21px] w-[300px] rounded-[4px]' />
|
||||
<div>
|
||||
<Skeleton className='mt-[4px] h-[21px] w-[300px] rounded-[4px]' />
|
||||
</div>
|
||||
|
||||
<div className='mt-[16px] flex items-center gap-[8px]'>
|
||||
@@ -208,9 +215,12 @@ function KnowledgeBaseLoading({ knowledgeBaseName }: KnowledgeBaseLoadingProps)
|
||||
className='flex-1 border-0 bg-transparent px-0 font-medium text-[var(--text-secondary)] text-small leading-none placeholder:text-[var(--text-subtle)] focus-visible:ring-0 focus-visible:ring-offset-0'
|
||||
/>
|
||||
</div>
|
||||
<Button disabled variant='tertiary' className='h-[32px] rounded-[6px]'>
|
||||
Add Documents
|
||||
</Button>
|
||||
<div className='flex items-center gap-[8px]'>
|
||||
<Skeleton className='h-[32px] w-[52px] rounded-[6px]' />
|
||||
<Button disabled variant='tertiary' className='h-[32px] rounded-[6px]'>
|
||||
Add Documents
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className='mt-[12px] flex flex-1 flex-col overflow-hidden'>
|
||||
@@ -222,73 +232,11 @@ function KnowledgeBaseLoading({ knowledgeBaseName }: KnowledgeBaseLoadingProps)
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats a date string to relative time (e.g., "2h ago", "3d ago")
|
||||
*/
|
||||
function formatRelativeTime(dateString: string): string {
|
||||
const date = new Date(dateString)
|
||||
const now = new Date()
|
||||
const diffInSeconds = Math.floor((now.getTime() - date.getTime()) / 1000)
|
||||
|
||||
if (diffInSeconds < 60) {
|
||||
return 'just now'
|
||||
}
|
||||
if (diffInSeconds < 3600) {
|
||||
const minutes = Math.floor(diffInSeconds / 60)
|
||||
return `${minutes}m ago`
|
||||
}
|
||||
if (diffInSeconds < 86400) {
|
||||
const hours = Math.floor(diffInSeconds / 3600)
|
||||
return `${hours}h ago`
|
||||
}
|
||||
if (diffInSeconds < 604800) {
|
||||
const days = Math.floor(diffInSeconds / 86400)
|
||||
return `${days}d ago`
|
||||
}
|
||||
if (diffInSeconds < 2592000) {
|
||||
const weeks = Math.floor(diffInSeconds / 604800)
|
||||
return `${weeks}w ago`
|
||||
}
|
||||
if (diffInSeconds < 31536000) {
|
||||
const months = Math.floor(diffInSeconds / 2592000)
|
||||
return `${months}mo ago`
|
||||
}
|
||||
const years = Math.floor(diffInSeconds / 31536000)
|
||||
return `${years}y ago`
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats a date string to absolute format for tooltip display
|
||||
*/
|
||||
function formatAbsoluteDate(dateString: string): string {
|
||||
const date = new Date(dateString)
|
||||
return date.toLocaleDateString('en-US', {
|
||||
year: 'numeric',
|
||||
month: 'short',
|
||||
day: 'numeric',
|
||||
hour: '2-digit',
|
||||
minute: '2-digit',
|
||||
})
|
||||
}
|
||||
|
||||
interface KnowledgeBaseProps {
|
||||
id: string
|
||||
knowledgeBaseName?: string
|
||||
}
|
||||
|
||||
function getFileIcon(mimeType: string, filename: string) {
|
||||
const IconComponent = getDocumentIcon(mimeType, filename)
|
||||
return <IconComponent className='h-6 w-5 flex-shrink-0' />
|
||||
}
|
||||
|
||||
function formatFileSize(bytes: number): string {
|
||||
if (bytes === 0) return '0 Bytes'
|
||||
const k = 1024
|
||||
const sizes = ['Bytes', 'KB', 'MB', 'GB']
|
||||
const i = Math.floor(Math.log(bytes) / Math.log(k))
|
||||
return `${Number.parseFloat((bytes / k ** i).toFixed(2))} ${sizes[i]}`
|
||||
}
|
||||
|
||||
const AnimatedLoader = ({ className }: { className?: string }) => (
|
||||
<Loader2 className={cn(className, 'animate-spin')} />
|
||||
)
|
||||
@@ -336,53 +284,24 @@ const getStatusBadge = (doc: DocumentData) => {
|
||||
}
|
||||
}
|
||||
|
||||
const TAG_SLOTS = [
|
||||
'tag1',
|
||||
'tag2',
|
||||
'tag3',
|
||||
'tag4',
|
||||
'tag5',
|
||||
'tag6',
|
||||
'tag7',
|
||||
'number1',
|
||||
'number2',
|
||||
'number3',
|
||||
'number4',
|
||||
'number5',
|
||||
'date1',
|
||||
'date2',
|
||||
'boolean1',
|
||||
'boolean2',
|
||||
'boolean3',
|
||||
] as const
|
||||
|
||||
type TagSlot = (typeof TAG_SLOTS)[number]
|
||||
|
||||
interface TagValue {
|
||||
slot: TagSlot
|
||||
slot: AllTagSlot
|
||||
displayName: string
|
||||
value: string
|
||||
}
|
||||
|
||||
const TAG_FIELD_TYPES: Record<string, string> = {
|
||||
tag: 'text',
|
||||
number: 'number',
|
||||
date: 'date',
|
||||
boolean: 'boolean',
|
||||
}
|
||||
|
||||
/**
|
||||
* Computes tag values for a document
|
||||
*/
|
||||
function getDocumentTags(doc: DocumentData, definitions: TagDefinition[]): TagValue[] {
|
||||
const result: TagValue[] = []
|
||||
|
||||
for (const slot of TAG_SLOTS) {
|
||||
for (const slot of ALL_TAG_SLOTS) {
|
||||
const raw = doc[slot]
|
||||
if (raw == null) continue
|
||||
|
||||
const def = definitions.find((d) => d.tagSlot === slot)
|
||||
const fieldType = def?.fieldType || TAG_FIELD_TYPES[slot.replace(/\d+$/, '')] || 'text'
|
||||
const fieldType = def?.fieldType || getFieldTypeForSlot(slot) || 'text'
|
||||
|
||||
let value: string
|
||||
if (fieldType === 'date') {
|
||||
@@ -424,6 +343,8 @@ export function KnowledgeBase({
|
||||
|
||||
const [searchQuery, setSearchQuery] = useState('')
|
||||
const [showTagsModal, setShowTagsModal] = useState(false)
|
||||
const [enabledFilter, setEnabledFilter] = useState<'all' | 'enabled' | 'disabled'>('all')
|
||||
const [isFilterPopoverOpen, setIsFilterPopoverOpen] = useState(false)
|
||||
|
||||
/**
|
||||
* Memoize the search query setter to prevent unnecessary re-renders
|
||||
@@ -434,6 +355,7 @@ export function KnowledgeBase({
|
||||
}, [])
|
||||
|
||||
const [selectedDocuments, setSelectedDocuments] = useState<Set<string>>(new Set())
|
||||
const [isSelectAllMode, setIsSelectAllMode] = useState(false)
|
||||
const [showDeleteDialog, setShowDeleteDialog] = useState(false)
|
||||
const [showAddDocumentsModal, setShowAddDocumentsModal] = useState(false)
|
||||
const [showDeleteDocumentModal, setShowDeleteDocumentModal] = useState(false)
|
||||
@@ -460,7 +382,6 @@ export function KnowledgeBase({
|
||||
error: knowledgeBaseError,
|
||||
refresh: refreshKnowledgeBase,
|
||||
} = useKnowledgeBase(id)
|
||||
const [hasProcessingDocuments, setHasProcessingDocuments] = useState(false)
|
||||
|
||||
const {
|
||||
documents,
|
||||
@@ -469,6 +390,7 @@ export function KnowledgeBase({
|
||||
isFetching: isFetchingDocuments,
|
||||
isPlaceholderData: isPlaceholderDocuments,
|
||||
error: documentsError,
|
||||
hasProcessingDocuments,
|
||||
updateDocument,
|
||||
refreshDocuments,
|
||||
} = useKnowledgeBaseDocuments(id, {
|
||||
@@ -477,7 +399,14 @@ export function KnowledgeBase({
|
||||
offset: (currentPage - 1) * DOCUMENTS_PER_PAGE,
|
||||
sortBy,
|
||||
sortOrder,
|
||||
refetchInterval: hasProcessingDocuments && !isDeleting ? 3000 : false,
|
||||
refetchInterval: (data) => {
|
||||
if (isDeleting) return false
|
||||
const hasPending = data?.documents?.some(
|
||||
(doc) => doc.processingStatus === 'pending' || doc.processingStatus === 'processing'
|
||||
)
|
||||
return hasPending ? 3000 : false
|
||||
},
|
||||
enabledFilter,
|
||||
})
|
||||
|
||||
const { tagDefinitions } = useKnowledgeBaseTagDefinitions(id)
|
||||
@@ -543,52 +472,52 @@ export function KnowledgeBase({
|
||||
</TableHead>
|
||||
)
|
||||
|
||||
useEffect(() => {
|
||||
const processing = documents.some(
|
||||
(doc) => doc.processingStatus === 'pending' || doc.processingStatus === 'processing'
|
||||
)
|
||||
setHasProcessingDocuments(processing)
|
||||
|
||||
if (processing) {
|
||||
checkForDeadProcesses()
|
||||
}
|
||||
}, [documents])
|
||||
|
||||
/**
|
||||
* Checks for documents with stale processing states and marks them as failed
|
||||
*/
|
||||
const checkForDeadProcesses = () => {
|
||||
const now = new Date()
|
||||
const DEAD_PROCESS_THRESHOLD_MS = 600 * 1000 // 10 minutes
|
||||
const checkForDeadProcesses = useCallback(
|
||||
(docsToCheck: DocumentData[]) => {
|
||||
const now = new Date()
|
||||
const DEAD_PROCESS_THRESHOLD_MS = 600 * 1000 // 10 minutes
|
||||
|
||||
const staleDocuments = documents.filter((doc) => {
|
||||
if (doc.processingStatus !== 'processing' || !doc.processingStartedAt) {
|
||||
return false
|
||||
}
|
||||
|
||||
const processingDuration = now.getTime() - new Date(doc.processingStartedAt).getTime()
|
||||
return processingDuration > DEAD_PROCESS_THRESHOLD_MS
|
||||
})
|
||||
|
||||
if (staleDocuments.length === 0) return
|
||||
|
||||
logger.warn(`Found ${staleDocuments.length} documents with dead processes`)
|
||||
|
||||
staleDocuments.forEach((doc) => {
|
||||
updateDocumentMutation(
|
||||
{
|
||||
knowledgeBaseId: id,
|
||||
documentId: doc.id,
|
||||
updates: { markFailedDueToTimeout: true },
|
||||
},
|
||||
{
|
||||
onSuccess: () => {
|
||||
logger.info(`Successfully marked dead process as failed for document: ${doc.filename}`)
|
||||
},
|
||||
const staleDocuments = docsToCheck.filter((doc) => {
|
||||
if (doc.processingStatus !== 'processing' || !doc.processingStartedAt) {
|
||||
return false
|
||||
}
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
const processingDuration = now.getTime() - new Date(doc.processingStartedAt).getTime()
|
||||
return processingDuration > DEAD_PROCESS_THRESHOLD_MS
|
||||
})
|
||||
|
||||
if (staleDocuments.length === 0) return
|
||||
|
||||
logger.warn(`Found ${staleDocuments.length} documents with dead processes`)
|
||||
|
||||
staleDocuments.forEach((doc) => {
|
||||
updateDocumentMutation(
|
||||
{
|
||||
knowledgeBaseId: id,
|
||||
documentId: doc.id,
|
||||
updates: { markFailedDueToTimeout: true },
|
||||
},
|
||||
{
|
||||
onSuccess: () => {
|
||||
logger.info(
|
||||
`Successfully marked dead process as failed for document: ${doc.filename}`
|
||||
)
|
||||
},
|
||||
}
|
||||
)
|
||||
})
|
||||
},
|
||||
[id, updateDocumentMutation]
|
||||
)
|
||||
|
||||
useEffect(() => {
|
||||
if (hasProcessingDocuments) {
|
||||
checkForDeadProcesses(documents)
|
||||
}
|
||||
}, [hasProcessingDocuments, documents, checkForDeadProcesses])
|
||||
|
||||
const handleToggleEnabled = (docId: string) => {
|
||||
const document = documents.find((doc) => doc.id === docId)
|
||||
@@ -748,6 +677,7 @@ export function KnowledgeBase({
|
||||
setSelectedDocuments(new Set(documents.map((doc) => doc.id)))
|
||||
} else {
|
||||
setSelectedDocuments(new Set())
|
||||
setIsSelectAllMode(false)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -793,6 +723,26 @@ export function KnowledgeBase({
|
||||
* Handles bulk enabling of selected documents
|
||||
*/
|
||||
const handleBulkEnable = () => {
|
||||
if (isSelectAllMode) {
|
||||
bulkDocumentMutation(
|
||||
{
|
||||
knowledgeBaseId: id,
|
||||
operation: 'enable',
|
||||
selectAll: true,
|
||||
enabledFilter,
|
||||
},
|
||||
{
|
||||
onSuccess: (result) => {
|
||||
logger.info(`Successfully enabled ${result.successCount} documents`)
|
||||
setSelectedDocuments(new Set())
|
||||
setIsSelectAllMode(false)
|
||||
refreshDocuments()
|
||||
},
|
||||
}
|
||||
)
|
||||
return
|
||||
}
|
||||
|
||||
const documentsToEnable = documents.filter(
|
||||
(doc) => selectedDocuments.has(doc.id) && !doc.enabled
|
||||
)
|
||||
@@ -821,6 +771,26 @@ export function KnowledgeBase({
|
||||
* Handles bulk disabling of selected documents
|
||||
*/
|
||||
const handleBulkDisable = () => {
|
||||
if (isSelectAllMode) {
|
||||
bulkDocumentMutation(
|
||||
{
|
||||
knowledgeBaseId: id,
|
||||
operation: 'disable',
|
||||
selectAll: true,
|
||||
enabledFilter,
|
||||
},
|
||||
{
|
||||
onSuccess: (result) => {
|
||||
logger.info(`Successfully disabled ${result.successCount} documents`)
|
||||
setSelectedDocuments(new Set())
|
||||
setIsSelectAllMode(false)
|
||||
refreshDocuments()
|
||||
},
|
||||
}
|
||||
)
|
||||
return
|
||||
}
|
||||
|
||||
const documentsToDisable = documents.filter(
|
||||
(doc) => selectedDocuments.has(doc.id) && doc.enabled
|
||||
)
|
||||
@@ -845,18 +815,35 @@ export function KnowledgeBase({
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Opens the bulk delete confirmation modal
|
||||
*/
|
||||
const handleBulkDelete = () => {
|
||||
if (selectedDocuments.size === 0) return
|
||||
setShowBulkDeleteModal(true)
|
||||
}
|
||||
|
||||
/**
|
||||
* Confirms and executes the bulk deletion of selected documents
|
||||
*/
|
||||
const confirmBulkDelete = () => {
|
||||
if (isSelectAllMode) {
|
||||
bulkDocumentMutation(
|
||||
{
|
||||
knowledgeBaseId: id,
|
||||
operation: 'delete',
|
||||
selectAll: true,
|
||||
enabledFilter,
|
||||
},
|
||||
{
|
||||
onSuccess: (result) => {
|
||||
logger.info(`Successfully deleted ${result.successCount} documents`)
|
||||
refreshDocuments()
|
||||
setSelectedDocuments(new Set())
|
||||
setIsSelectAllMode(false)
|
||||
},
|
||||
onSettled: () => {
|
||||
setShowBulkDeleteModal(false)
|
||||
},
|
||||
}
|
||||
)
|
||||
return
|
||||
}
|
||||
|
||||
const documentsToDelete = documents.filter((doc) => selectedDocuments.has(doc.id))
|
||||
|
||||
if (documentsToDelete.length === 0) return
|
||||
@@ -881,14 +868,17 @@ export function KnowledgeBase({
|
||||
}
|
||||
|
||||
const selectedDocumentsList = documents.filter((doc) => selectedDocuments.has(doc.id))
|
||||
const enabledCount = selectedDocumentsList.filter((doc) => doc.enabled).length
|
||||
const disabledCount = selectedDocumentsList.filter((doc) => !doc.enabled).length
|
||||
const enabledCount = isSelectAllMode
|
||||
? enabledFilter === 'disabled'
|
||||
? 0
|
||||
: pagination.total
|
||||
: selectedDocumentsList.filter((doc) => doc.enabled).length
|
||||
const disabledCount = isSelectAllMode
|
||||
? enabledFilter === 'enabled'
|
||||
? 0
|
||||
: pagination.total
|
||||
: selectedDocumentsList.filter((doc) => !doc.enabled).length
|
||||
|
||||
/**
|
||||
* Handle right-click on a document row
|
||||
* If right-clicking on an unselected document, select only that document
|
||||
* If right-clicking on a selected document with multiple selections, keep all selections
|
||||
*/
|
||||
const handleDocumentContextMenu = useCallback(
|
||||
(e: React.MouseEvent, doc: DocumentData) => {
|
||||
const isCurrentlySelected = selectedDocuments.has(doc.id)
|
||||
@@ -1005,11 +995,13 @@ export function KnowledgeBase({
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{knowledgeBase?.description && (
|
||||
<p className='mt-[4px] line-clamp-2 max-w-[40vw] font-medium text-[14px] text-[var(--text-tertiary)]'>
|
||||
{knowledgeBase.description}
|
||||
</p>
|
||||
)}
|
||||
<div>
|
||||
{knowledgeBase?.description && (
|
||||
<p className='mt-[4px] line-clamp-2 max-w-[40vw] font-medium text-[14px] text-[var(--text-tertiary)]'>
|
||||
{knowledgeBase.description}
|
||||
</p>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<div className='mt-[16px] flex items-center gap-[8px]'>
|
||||
<span className='text-[14px] text-[var(--text-muted)]'>
|
||||
@@ -1052,21 +1044,76 @@ export function KnowledgeBase({
|
||||
))}
|
||||
</div>
|
||||
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
onClick={handleAddDocuments}
|
||||
disabled={userPermissions.canEdit !== true}
|
||||
variant='tertiary'
|
||||
className='h-[32px] rounded-[6px]'
|
||||
>
|
||||
Add Documents
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
{userPermissions.canEdit !== true && (
|
||||
<Tooltip.Content>Write permission required to add documents</Tooltip.Content>
|
||||
)}
|
||||
</Tooltip.Root>
|
||||
<div className='flex items-center gap-[8px]'>
|
||||
<Popover open={isFilterPopoverOpen} onOpenChange={setIsFilterPopoverOpen}>
|
||||
<PopoverTrigger asChild>
|
||||
<Button variant='default' className='h-[32px] rounded-[6px]'>
|
||||
{enabledFilter === 'all'
|
||||
? 'All'
|
||||
: enabledFilter === 'enabled'
|
||||
? 'Enabled'
|
||||
: 'Disabled'}
|
||||
<ChevronDown className='ml-2 h-4 w-4 text-muted-foreground' />
|
||||
</Button>
|
||||
</PopoverTrigger>
|
||||
<PopoverContent align='end' side='bottom' sideOffset={4}>
|
||||
<div className='flex flex-col gap-[2px]'>
|
||||
<PopoverItem
|
||||
active={enabledFilter === 'all'}
|
||||
onClick={() => {
|
||||
setEnabledFilter('all')
|
||||
setIsFilterPopoverOpen(false)
|
||||
setCurrentPage(1)
|
||||
setSelectedDocuments(new Set())
|
||||
setIsSelectAllMode(false)
|
||||
}}
|
||||
>
|
||||
All
|
||||
</PopoverItem>
|
||||
<PopoverItem
|
||||
active={enabledFilter === 'enabled'}
|
||||
onClick={() => {
|
||||
setEnabledFilter('enabled')
|
||||
setIsFilterPopoverOpen(false)
|
||||
setCurrentPage(1)
|
||||
setSelectedDocuments(new Set())
|
||||
setIsSelectAllMode(false)
|
||||
}}
|
||||
>
|
||||
Enabled
|
||||
</PopoverItem>
|
||||
<PopoverItem
|
||||
active={enabledFilter === 'disabled'}
|
||||
onClick={() => {
|
||||
setEnabledFilter('disabled')
|
||||
setIsFilterPopoverOpen(false)
|
||||
setCurrentPage(1)
|
||||
setSelectedDocuments(new Set())
|
||||
setIsSelectAllMode(false)
|
||||
}}
|
||||
>
|
||||
Disabled
|
||||
</PopoverItem>
|
||||
</div>
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
onClick={handleAddDocuments}
|
||||
disabled={userPermissions.canEdit !== true}
|
||||
variant='tertiary'
|
||||
className='h-[32px] rounded-[6px]'
|
||||
>
|
||||
Add Documents
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
{userPermissions.canEdit !== true && (
|
||||
<Tooltip.Content>Write permission required to add documents</Tooltip.Content>
|
||||
)}
|
||||
</Tooltip.Root>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{error && !isLoadingKnowledgeBase && (
|
||||
@@ -1089,14 +1136,20 @@ export function KnowledgeBase({
|
||||
<div className='mt-[10px] flex h-64 items-center justify-center rounded-lg border border-muted-foreground/25 bg-muted/20'>
|
||||
<div className='text-center'>
|
||||
<p className='font-medium text-[var(--text-secondary)] text-sm'>
|
||||
{searchQuery ? 'No documents found' : 'No documents yet'}
|
||||
{searchQuery
|
||||
? 'No documents found'
|
||||
: enabledFilter !== 'all'
|
||||
? 'Nothing matches your filter'
|
||||
: 'No documents yet'}
|
||||
</p>
|
||||
<p className='mt-1 text-[var(--text-muted)] text-xs'>
|
||||
{searchQuery
|
||||
? 'Try a different search term'
|
||||
: userPermissions.canEdit === true
|
||||
? 'Add documents to get started'
|
||||
: 'Documents will appear here once added'}
|
||||
: enabledFilter !== 'all'
|
||||
? 'Try changing the filter'
|
||||
: userPermissions.canEdit === true
|
||||
? 'Add documents to get started'
|
||||
: 'Documents will appear here once added'}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
@@ -1120,7 +1173,7 @@ export function KnowledgeBase({
|
||||
{renderSortableHeader('tokenCount', 'Tokens', 'hidden w-[8%] lg:table-cell')}
|
||||
{renderSortableHeader('chunkCount', 'Chunks', 'w-[8%]')}
|
||||
{renderSortableHeader('uploadedAt', 'Uploaded', 'w-[11%]')}
|
||||
{renderSortableHeader('processingStatus', 'Status', 'w-[10%]')}
|
||||
{renderSortableHeader('enabled', 'Status', 'w-[10%]')}
|
||||
<TableHead className='w-[12%] px-[12px] py-[8px] text-[12px] text-[var(--text-secondary)]'>
|
||||
Tags
|
||||
</TableHead>
|
||||
@@ -1164,7 +1217,10 @@ export function KnowledgeBase({
|
||||
</TableCell>
|
||||
<TableCell className='w-[180px] max-w-[180px] px-[12px] py-[8px]'>
|
||||
<div className='flex min-w-0 items-center gap-[8px]'>
|
||||
{getFileIcon(doc.mimeType, doc.filename)}
|
||||
{(() => {
|
||||
const IconComponent = getDocumentIcon(doc.mimeType, doc.filename)
|
||||
return <IconComponent className='h-6 w-5 flex-shrink-0' />
|
||||
})()}
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<span
|
||||
@@ -1508,6 +1564,14 @@ export function KnowledgeBase({
|
||||
enabledCount={enabledCount}
|
||||
disabledCount={disabledCount}
|
||||
isLoading={isBulkOperating}
|
||||
totalCount={pagination.total}
|
||||
isAllPageSelected={isAllSelected}
|
||||
isAllSelected={isSelectAllMode}
|
||||
onSelectAll={() => setIsSelectAllMode(true)}
|
||||
onClearSelectAll={() => {
|
||||
setIsSelectAllMode(false)
|
||||
setSelectedDocuments(new Set())
|
||||
}}
|
||||
/>
|
||||
|
||||
<DocumentContextMenu
|
||||
|
||||
@@ -13,6 +13,11 @@ interface ActionBarProps {
|
||||
disabledCount?: number
|
||||
isLoading?: boolean
|
||||
className?: string
|
||||
totalCount?: number
|
||||
isAllPageSelected?: boolean
|
||||
isAllSelected?: boolean
|
||||
onSelectAll?: () => void
|
||||
onClearSelectAll?: () => void
|
||||
}
|
||||
|
||||
export function ActionBar({
|
||||
@@ -24,14 +29,21 @@ export function ActionBar({
|
||||
disabledCount = 0,
|
||||
isLoading = false,
|
||||
className,
|
||||
totalCount = 0,
|
||||
isAllPageSelected = false,
|
||||
isAllSelected = false,
|
||||
onSelectAll,
|
||||
onClearSelectAll,
|
||||
}: ActionBarProps) {
|
||||
const userPermissions = useUserPermissionsContext()
|
||||
|
||||
if (selectedCount === 0) return null
|
||||
if (selectedCount === 0 && !isAllSelected) return null
|
||||
|
||||
const canEdit = userPermissions.canEdit
|
||||
const showEnableButton = disabledCount > 0 && onEnable && canEdit
|
||||
const showDisableButton = enabledCount > 0 && onDisable && canEdit
|
||||
const showSelectAllOption =
|
||||
isAllPageSelected && !isAllSelected && totalCount > selectedCount && onSelectAll
|
||||
|
||||
return (
|
||||
<motion.div
|
||||
@@ -43,7 +55,31 @@ export function ActionBar({
|
||||
>
|
||||
<div className='flex items-center gap-[8px] rounded-[10px] border border-[var(--border)] bg-[var(--surface-2)] px-[8px] py-[6px]'>
|
||||
<span className='px-[4px] text-[13px] text-[var(--text-secondary)]'>
|
||||
{selectedCount} selected
|
||||
{isAllSelected ? totalCount : selectedCount} selected
|
||||
{showSelectAllOption && (
|
||||
<>
|
||||
{' · '}
|
||||
<button
|
||||
type='button'
|
||||
onClick={onSelectAll}
|
||||
className='text-[var(--brand-primary)] hover:underline'
|
||||
>
|
||||
Select all
|
||||
</button>
|
||||
</>
|
||||
)}
|
||||
{isAllSelected && onClearSelectAll && (
|
||||
<>
|
||||
{' · '}
|
||||
<button
|
||||
type='button'
|
||||
onClick={onClearSelectAll}
|
||||
className='text-[var(--brand-primary)] hover:underline'
|
||||
>
|
||||
Clear
|
||||
</button>
|
||||
</>
|
||||
)}
|
||||
</span>
|
||||
|
||||
<div className='flex items-center gap-[5px]'>
|
||||
|
||||
@@ -123,7 +123,11 @@ export function RenameDocumentModal({
|
||||
>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button variant='tertiary' type='submit' disabled={isSubmitting || !name?.trim()}>
|
||||
<Button
|
||||
variant='tertiary'
|
||||
type='submit'
|
||||
disabled={isSubmitting || !name?.trim() || name.trim() === initialName}
|
||||
>
|
||||
{isSubmitting ? 'Renaming...' : 'Rename'}
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
import { useCallback, useState } from 'react'
|
||||
import { useParams, useRouter } from 'next/navigation'
|
||||
import { Badge, DocumentAttachment, Tooltip } from '@/components/emcn'
|
||||
import { formatAbsoluteDate, formatRelativeTime } from '@/lib/core/utils/formatting'
|
||||
import { BaseTagsModal } from '@/app/workspace/[workspaceId]/knowledge/[id]/components'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
import { useContextMenu } from '@/app/workspace/[workspaceId]/w/components/sidebar/hooks'
|
||||
@@ -21,55 +22,6 @@ interface BaseCardProps {
|
||||
onDelete?: (id: string) => Promise<void>
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats a date string to relative time (e.g., "2h ago", "3d ago")
|
||||
*/
|
||||
function formatRelativeTime(dateString: string): string {
|
||||
const date = new Date(dateString)
|
||||
const now = new Date()
|
||||
const diffInSeconds = Math.floor((now.getTime() - date.getTime()) / 1000)
|
||||
|
||||
if (diffInSeconds < 60) {
|
||||
return 'just now'
|
||||
}
|
||||
if (diffInSeconds < 3600) {
|
||||
const minutes = Math.floor(diffInSeconds / 60)
|
||||
return `${minutes}m ago`
|
||||
}
|
||||
if (diffInSeconds < 86400) {
|
||||
const hours = Math.floor(diffInSeconds / 3600)
|
||||
return `${hours}h ago`
|
||||
}
|
||||
if (diffInSeconds < 604800) {
|
||||
const days = Math.floor(diffInSeconds / 86400)
|
||||
return `${days}d ago`
|
||||
}
|
||||
if (diffInSeconds < 2592000) {
|
||||
const weeks = Math.floor(diffInSeconds / 604800)
|
||||
return `${weeks}w ago`
|
||||
}
|
||||
if (diffInSeconds < 31536000) {
|
||||
const months = Math.floor(diffInSeconds / 2592000)
|
||||
return `${months}mo ago`
|
||||
}
|
||||
const years = Math.floor(diffInSeconds / 31536000)
|
||||
return `${years}y ago`
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats a date string to absolute format for tooltip display
|
||||
*/
|
||||
function formatAbsoluteDate(dateString: string): string {
|
||||
const date = new Date(dateString)
|
||||
return date.toLocaleDateString('en-US', {
|
||||
year: 'numeric',
|
||||
month: 'short',
|
||||
day: 'numeric',
|
||||
hour: '2-digit',
|
||||
minute: '2-digit',
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Skeleton placeholder for a knowledge base card
|
||||
*/
|
||||
|
||||
@@ -344,53 +344,51 @@ export function CreateBaseModal({ open, onOpenChange }: CreateBaseModalProps) {
|
||||
<Textarea
|
||||
id='description'
|
||||
placeholder='Describe this knowledge base (optional)'
|
||||
rows={3}
|
||||
rows={4}
|
||||
{...register('description')}
|
||||
className={cn(errors.description && 'border-[var(--text-error)]')}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className='space-y-[12px] rounded-[6px] bg-[var(--surface-5)] px-[12px] py-[14px]'>
|
||||
<div className='grid grid-cols-2 gap-[12px]'>
|
||||
<div className='flex flex-col gap-[8px]'>
|
||||
<Label htmlFor='minChunkSize'>Min Chunk Size (characters)</Label>
|
||||
<Input
|
||||
id='minChunkSize'
|
||||
placeholder='100'
|
||||
{...register('minChunkSize', { valueAsNumber: true })}
|
||||
className={cn(errors.minChunkSize && 'border-[var(--text-error)]')}
|
||||
autoComplete='off'
|
||||
data-form-type='other'
|
||||
name='min-chunk-size'
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className='flex flex-col gap-[8px]'>
|
||||
<Label htmlFor='maxChunkSize'>Max Chunk Size (tokens)</Label>
|
||||
<Input
|
||||
id='maxChunkSize'
|
||||
placeholder='1024'
|
||||
{...register('maxChunkSize', { valueAsNumber: true })}
|
||||
className={cn(errors.maxChunkSize && 'border-[var(--text-error)]')}
|
||||
autoComplete='off'
|
||||
data-form-type='other'
|
||||
name='max-chunk-size'
|
||||
/>
|
||||
</div>
|
||||
<div className='grid grid-cols-2 gap-[12px]'>
|
||||
<div className='flex flex-col gap-[8px]'>
|
||||
<Label htmlFor='minChunkSize'>Min Chunk Size (characters)</Label>
|
||||
<Input
|
||||
id='minChunkSize'
|
||||
placeholder='100'
|
||||
{...register('minChunkSize', { valueAsNumber: true })}
|
||||
className={cn(errors.minChunkSize && 'border-[var(--text-error)]')}
|
||||
autoComplete='off'
|
||||
data-form-type='other'
|
||||
name='min-chunk-size'
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className='flex flex-col gap-[8px]'>
|
||||
<Label htmlFor='overlapSize'>Overlap (tokens)</Label>
|
||||
<Label htmlFor='maxChunkSize'>Max Chunk Size (tokens)</Label>
|
||||
<Input
|
||||
id='overlapSize'
|
||||
placeholder='200'
|
||||
{...register('overlapSize', { valueAsNumber: true })}
|
||||
className={cn(errors.overlapSize && 'border-[var(--text-error)]')}
|
||||
id='maxChunkSize'
|
||||
placeholder='1024'
|
||||
{...register('maxChunkSize', { valueAsNumber: true })}
|
||||
className={cn(errors.maxChunkSize && 'border-[var(--text-error)]')}
|
||||
autoComplete='off'
|
||||
data-form-type='other'
|
||||
name='overlap-size'
|
||||
name='max-chunk-size'
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className='flex flex-col gap-[8px]'>
|
||||
<Label htmlFor='overlapSize'>Overlap (tokens)</Label>
|
||||
<Input
|
||||
id='overlapSize'
|
||||
placeholder='200'
|
||||
{...register('overlapSize', { valueAsNumber: true })}
|
||||
className={cn(errors.overlapSize && 'border-[var(--text-error)]')}
|
||||
autoComplete='off'
|
||||
data-form-type='other'
|
||||
name='overlap-size'
|
||||
/>
|
||||
<p className='text-[11px] text-[var(--text-muted)]'>
|
||||
1 token ≈ 4 characters. Max chunk size and overlap are in tokens.
|
||||
</p>
|
||||
|
||||
@@ -59,7 +59,7 @@ export function EditKnowledgeBaseModal({
|
||||
handleSubmit,
|
||||
reset,
|
||||
watch,
|
||||
formState: { errors },
|
||||
formState: { errors, isDirty },
|
||||
} = useForm<FormValues>({
|
||||
resolver: zodResolver(FormSchema),
|
||||
defaultValues: {
|
||||
@@ -127,7 +127,7 @@ export function EditKnowledgeBaseModal({
|
||||
<Textarea
|
||||
id='description'
|
||||
placeholder='Describe this knowledge base (optional)'
|
||||
rows={3}
|
||||
rows={4}
|
||||
{...register('description')}
|
||||
className={cn(errors.description && 'border-[var(--text-error)]')}
|
||||
/>
|
||||
@@ -161,7 +161,7 @@ export function EditKnowledgeBaseModal({
|
||||
<Button
|
||||
variant='tertiary'
|
||||
type='submit'
|
||||
disabled={isSubmitting || !nameValue?.trim()}
|
||||
disabled={isSubmitting || !nameValue?.trim() || !isDirty}
|
||||
>
|
||||
{isSubmitting ? 'Saving...' : 'Save'}
|
||||
</Button>
|
||||
|
||||
@@ -16,8 +16,8 @@ import {
|
||||
import { redactApiKeys } from '@/lib/core/security/redaction'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import {
|
||||
BlockDetailsSidebar,
|
||||
getLeftmostBlockId,
|
||||
PreviewEditor,
|
||||
WorkflowPreview,
|
||||
} from '@/app/workspace/[workspaceId]/w/components/preview'
|
||||
import { useExecutionSnapshot } from '@/hooks/queries/logs'
|
||||
@@ -248,11 +248,10 @@ export function ExecutionSnapshot({
|
||||
cursorStyle='pointer'
|
||||
executedBlocks={blockExecutions}
|
||||
selectedBlockId={pinnedBlockId}
|
||||
lightweight
|
||||
/>
|
||||
</div>
|
||||
{pinnedBlockId && workflowState.blocks[pinnedBlockId] && (
|
||||
<BlockDetailsSidebar
|
||||
<PreviewEditor
|
||||
block={workflowState.blocks[pinnedBlockId]}
|
||||
executionData={blockExecutions[pinnedBlockId]}
|
||||
allBlockExecutions={blockExecutions}
|
||||
|
||||
@@ -2,8 +2,7 @@
|
||||
|
||||
import type React from 'react'
|
||||
import { memo, useCallback, useMemo, useRef, useState } from 'react'
|
||||
import clsx from 'clsx'
|
||||
import { ArrowDown, ArrowUp, X } from 'lucide-react'
|
||||
import { ArrowDown, ArrowUp, Check, Clipboard, Search, X } from 'lucide-react'
|
||||
import { createPortal } from 'react-dom'
|
||||
import {
|
||||
Button,
|
||||
@@ -15,9 +14,11 @@ import {
|
||||
PopoverContent,
|
||||
PopoverDivider,
|
||||
PopoverItem,
|
||||
Tooltip,
|
||||
} from '@/components/emcn'
|
||||
import { WorkflowIcon } from '@/components/icons'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { formatDuration } from '@/lib/core/utils/formatting'
|
||||
import { LoopTool } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/loop/loop-config'
|
||||
import { ParallelTool } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/parallel/parallel-config'
|
||||
import { getBlock, getBlockByToolName } from '@/blocks'
|
||||
@@ -26,7 +27,6 @@ import type { TraceSpan } from '@/stores/logs/filters/types'
|
||||
|
||||
interface TraceSpansProps {
|
||||
traceSpans?: TraceSpan[]
|
||||
totalDuration?: number
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -100,6 +100,20 @@ function parseTime(value?: string | number | null): number {
|
||||
return Number.isFinite(ms) ? ms : 0
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a span or any of its descendants has an error
|
||||
*/
|
||||
function hasErrorInTree(span: TraceSpan): boolean {
|
||||
if (span.status === 'error') return true
|
||||
if (span.children && span.children.length > 0) {
|
||||
return span.children.some((child) => hasErrorInTree(child))
|
||||
}
|
||||
if (span.toolCalls && span.toolCalls.length > 0) {
|
||||
return span.toolCalls.some((tc) => tc.error)
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalizes and sorts trace spans recursively.
|
||||
* Merges children from both span.children and span.output.childTraceSpans,
|
||||
@@ -142,14 +156,6 @@ function normalizeAndSortSpans(spans: TraceSpan[]): TraceSpan[] {
|
||||
|
||||
const DEFAULT_BLOCK_COLOR = '#6b7280'
|
||||
|
||||
/**
|
||||
* Formats duration in ms
|
||||
*/
|
||||
function formatDuration(ms: number): string {
|
||||
if (ms < 1000) return `${ms}ms`
|
||||
return `${(ms / 1000).toFixed(2)}s`
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets icon and color for a span type using block config
|
||||
*/
|
||||
@@ -230,11 +236,11 @@ function ProgressBar({
|
||||
}, [span, childSpans, workflowStartTime, totalDuration])
|
||||
|
||||
return (
|
||||
<div className='relative mb-[8px] h-[5px] w-full overflow-hidden rounded-[18px] bg-[var(--divider)]'>
|
||||
<div className='relative h-[5px] w-full overflow-hidden rounded-[18px] bg-[var(--divider)]'>
|
||||
{segments.map((segment, index) => (
|
||||
<div
|
||||
key={index}
|
||||
className='absolute h-full'
|
||||
className='absolute h-full opacity-70'
|
||||
style={{
|
||||
left: `${segment.startPercent}%`,
|
||||
width: `${segment.widthPercent}%`,
|
||||
@@ -246,143 +252,6 @@ function ProgressBar({
|
||||
)
|
||||
}
|
||||
|
||||
interface ExpandableRowHeaderProps {
|
||||
name: string
|
||||
duration: number
|
||||
isError: boolean
|
||||
isExpanded: boolean
|
||||
hasChildren: boolean
|
||||
showIcon: boolean
|
||||
icon: React.ComponentType<{ className?: string }> | null
|
||||
bgColor: string
|
||||
onToggle: () => void
|
||||
}
|
||||
|
||||
/**
|
||||
* Reusable expandable row header with chevron, icon, name, and duration
|
||||
*/
|
||||
function ExpandableRowHeader({
|
||||
name,
|
||||
duration,
|
||||
isError,
|
||||
isExpanded,
|
||||
hasChildren,
|
||||
showIcon,
|
||||
icon: Icon,
|
||||
bgColor,
|
||||
onToggle,
|
||||
}: ExpandableRowHeaderProps) {
|
||||
return (
|
||||
<div
|
||||
className={clsx('group flex items-center justify-between', hasChildren && 'cursor-pointer')}
|
||||
onClick={hasChildren ? onToggle : undefined}
|
||||
onKeyDown={
|
||||
hasChildren
|
||||
? (e) => {
|
||||
if (e.key === 'Enter' || e.key === ' ') {
|
||||
e.preventDefault()
|
||||
onToggle()
|
||||
}
|
||||
}
|
||||
: undefined
|
||||
}
|
||||
role={hasChildren ? 'button' : undefined}
|
||||
tabIndex={hasChildren ? 0 : undefined}
|
||||
aria-expanded={hasChildren ? isExpanded : undefined}
|
||||
aria-label={hasChildren ? (isExpanded ? 'Collapse' : 'Expand') : undefined}
|
||||
>
|
||||
<div className='flex items-center gap-[8px]'>
|
||||
{hasChildren && (
|
||||
<ChevronDown
|
||||
className='h-[10px] w-[10px] flex-shrink-0 text-[var(--text-tertiary)] transition-transform duration-100 group-hover:text-[var(--text-primary)]'
|
||||
style={{ transform: isExpanded ? 'rotate(0deg)' : 'rotate(-90deg)' }}
|
||||
/>
|
||||
)}
|
||||
{showIcon && (
|
||||
<div
|
||||
className='relative flex h-[14px] w-[14px] flex-shrink-0 items-center justify-center overflow-hidden rounded-[4px]'
|
||||
style={{ background: bgColor }}
|
||||
>
|
||||
{Icon && <Icon className={clsx('text-white', '!h-[9px] !w-[9px]')} />}
|
||||
</div>
|
||||
)}
|
||||
<span
|
||||
className='font-medium text-[12px]'
|
||||
style={{ color: isError ? 'var(--text-error)' : 'var(--text-secondary)' }}
|
||||
>
|
||||
{name}
|
||||
</span>
|
||||
</div>
|
||||
<span className='font-medium text-[12px] text-[var(--text-tertiary)]'>
|
||||
{formatDuration(duration)}
|
||||
</span>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
interface SpanContentProps {
|
||||
span: TraceSpan
|
||||
spanId: string
|
||||
isError: boolean
|
||||
workflowStartTime: number
|
||||
totalDuration: number
|
||||
expandedSections: Set<string>
|
||||
onToggle: (section: string) => void
|
||||
}
|
||||
|
||||
/**
|
||||
* Reusable component for rendering span content (progress bar + input/output sections)
|
||||
*/
|
||||
function SpanContent({
|
||||
span,
|
||||
spanId,
|
||||
isError,
|
||||
workflowStartTime,
|
||||
totalDuration,
|
||||
expandedSections,
|
||||
onToggle,
|
||||
}: SpanContentProps) {
|
||||
const hasInput = Boolean(span.input)
|
||||
const hasOutput = Boolean(span.output)
|
||||
|
||||
return (
|
||||
<>
|
||||
<ProgressBar
|
||||
span={span}
|
||||
childSpans={span.children}
|
||||
workflowStartTime={workflowStartTime}
|
||||
totalDuration={totalDuration}
|
||||
/>
|
||||
|
||||
{hasInput && (
|
||||
<InputOutputSection
|
||||
label='Input'
|
||||
data={span.input}
|
||||
isError={false}
|
||||
spanId={spanId}
|
||||
sectionType='input'
|
||||
expandedSections={expandedSections}
|
||||
onToggle={onToggle}
|
||||
/>
|
||||
)}
|
||||
|
||||
{hasInput && hasOutput && <div className='border-[var(--border)] border-t border-dashed' />}
|
||||
|
||||
{hasOutput && (
|
||||
<InputOutputSection
|
||||
label={isError ? 'Error' : 'Output'}
|
||||
data={span.output}
|
||||
isError={isError}
|
||||
spanId={spanId}
|
||||
sectionType='output'
|
||||
expandedSections={expandedSections}
|
||||
onToggle={onToggle}
|
||||
/>
|
||||
)}
|
||||
</>
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Renders input/output section with collapsible content, context menu, and search
|
||||
*/
|
||||
@@ -406,16 +275,14 @@ function InputOutputSection({
|
||||
const sectionKey = `${spanId}-${sectionType}`
|
||||
const isExpanded = expandedSections.has(sectionKey)
|
||||
const contentRef = useRef<HTMLDivElement>(null)
|
||||
const menuRef = useRef<HTMLDivElement>(null)
|
||||
|
||||
// Context menu state
|
||||
const [isContextMenuOpen, setIsContextMenuOpen] = useState(false)
|
||||
const [contextMenuPosition, setContextMenuPosition] = useState({ x: 0, y: 0 })
|
||||
const [copied, setCopied] = useState(false)
|
||||
|
||||
// Code viewer features
|
||||
const {
|
||||
wrapText,
|
||||
toggleWrapText,
|
||||
isSearchActive,
|
||||
searchQuery,
|
||||
setSearchQuery,
|
||||
@@ -447,6 +314,8 @@ function InputOutputSection({
|
||||
|
||||
const handleCopy = useCallback(() => {
|
||||
navigator.clipboard.writeText(jsonString)
|
||||
setCopied(true)
|
||||
setTimeout(() => setCopied(false), 1500)
|
||||
closeContextMenu()
|
||||
}, [jsonString, closeContextMenu])
|
||||
|
||||
@@ -455,13 +324,8 @@ function InputOutputSection({
|
||||
closeContextMenu()
|
||||
}, [activateSearch, closeContextMenu])
|
||||
|
||||
const handleToggleWrap = useCallback(() => {
|
||||
toggleWrapText()
|
||||
closeContextMenu()
|
||||
}, [toggleWrapText, closeContextMenu])
|
||||
|
||||
return (
|
||||
<div className='relative flex min-w-0 flex-col gap-[8px] overflow-hidden'>
|
||||
<div className='relative flex min-w-0 flex-col gap-[6px] overflow-hidden'>
|
||||
<div
|
||||
className='group flex cursor-pointer items-center justify-between'
|
||||
onClick={() => onToggle(sectionKey)}
|
||||
@@ -477,7 +341,7 @@ function InputOutputSection({
|
||||
aria-label={`${isExpanded ? 'Collapse' : 'Expand'} ${label.toLowerCase()}`}
|
||||
>
|
||||
<span
|
||||
className={clsx(
|
||||
className={cn(
|
||||
'font-medium text-[12px] transition-colors',
|
||||
isError
|
||||
? 'text-[var(--text-error)]'
|
||||
@@ -487,9 +351,7 @@ function InputOutputSection({
|
||||
{label}
|
||||
</span>
|
||||
<ChevronDown
|
||||
className={clsx(
|
||||
'h-[10px] w-[10px] text-[var(--text-tertiary)] transition-colors transition-transform group-hover:text-[var(--text-primary)]'
|
||||
)}
|
||||
className='h-[8px] w-[8px] text-[var(--text-tertiary)] transition-colors transition-transform group-hover:text-[var(--text-primary)]'
|
||||
style={{
|
||||
transform: isExpanded ? 'rotate(180deg)' : 'rotate(0deg)',
|
||||
}}
|
||||
@@ -497,16 +359,57 @@ function InputOutputSection({
|
||||
</div>
|
||||
{isExpanded && (
|
||||
<>
|
||||
<div ref={contentRef} onContextMenu={handleContextMenu}>
|
||||
<div ref={contentRef} onContextMenu={handleContextMenu} className='relative'>
|
||||
<Code.Viewer
|
||||
code={jsonString}
|
||||
language='json'
|
||||
className='!bg-[var(--surface-3)] max-h-[300px] min-h-0 max-w-full rounded-[6px] border-0 [word-break:break-all]'
|
||||
wrapText={wrapText}
|
||||
className='!bg-[var(--surface-4)] dark:!bg-[var(--surface-3)] max-h-[300px] min-h-0 max-w-full rounded-[6px] border-0 [word-break:break-all]'
|
||||
wrapText
|
||||
searchQuery={isSearchActive ? searchQuery : undefined}
|
||||
currentMatchIndex={currentMatchIndex}
|
||||
onMatchCountChange={handleMatchCountChange}
|
||||
/>
|
||||
{/* Glass action buttons overlay */}
|
||||
{!isSearchActive && (
|
||||
<div className='absolute top-[7px] right-[6px] z-10 flex gap-[4px]'>
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
type='button'
|
||||
variant='default'
|
||||
onClick={(e) => {
|
||||
e.stopPropagation()
|
||||
handleCopy()
|
||||
}}
|
||||
className='h-[20px] w-[20px] cursor-pointer border border-[var(--border-1)] bg-transparent p-0 backdrop-blur-sm hover:bg-[var(--surface-3)]'
|
||||
>
|
||||
{copied ? (
|
||||
<Check className='h-[10px] w-[10px] text-[var(--text-success)]' />
|
||||
) : (
|
||||
<Clipboard className='h-[10px] w-[10px]' />
|
||||
)}
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content side='top'>{copied ? 'Copied' : 'Copy'}</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
type='button'
|
||||
variant='default'
|
||||
onClick={(e) => {
|
||||
e.stopPropagation()
|
||||
activateSearch()
|
||||
}}
|
||||
className='h-[20px] w-[20px] cursor-pointer border border-[var(--border-1)] bg-transparent p-0 backdrop-blur-sm hover:bg-[var(--surface-3)]'
|
||||
>
|
||||
<Search className='h-[10px] w-[10px]' />
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content side='top'>Search</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Search Overlay */}
|
||||
@@ -579,13 +482,10 @@ function InputOutputSection({
|
||||
height: '1px',
|
||||
}}
|
||||
/>
|
||||
<PopoverContent ref={menuRef} align='start' side='bottom' sideOffset={4}>
|
||||
<PopoverContent align='start' side='bottom' sideOffset={4}>
|
||||
<PopoverItem onClick={handleCopy}>Copy</PopoverItem>
|
||||
<PopoverDivider />
|
||||
<PopoverItem onClick={handleSearch}>Search</PopoverItem>
|
||||
<PopoverItem showCheck={wrapText} onClick={handleToggleWrap}>
|
||||
Wrap Text
|
||||
</PopoverItem>
|
||||
</PopoverContent>
|
||||
</Popover>,
|
||||
document.body
|
||||
@@ -596,355 +496,229 @@ function InputOutputSection({
|
||||
)
|
||||
}
|
||||
|
||||
interface NestedBlockItemProps {
|
||||
interface TraceSpanNodeProps {
|
||||
span: TraceSpan
|
||||
parentId: string
|
||||
index: number
|
||||
workflowStartTime: number
|
||||
totalDuration: number
|
||||
depth: number
|
||||
expandedNodes: Set<string>
|
||||
expandedSections: Set<string>
|
||||
onToggle: (section: string) => void
|
||||
workflowStartTime: number
|
||||
totalDuration: number
|
||||
expandedChildren: Set<string>
|
||||
onToggleChildren: (spanId: string) => void
|
||||
onToggleNode: (nodeId: string) => void
|
||||
onToggleSection: (section: string) => void
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursive component for rendering nested blocks at any depth
|
||||
* Recursive tree node component for rendering trace spans
|
||||
*/
|
||||
function NestedBlockItem({
|
||||
const TraceSpanNode = memo(function TraceSpanNode({
|
||||
span,
|
||||
parentId,
|
||||
index,
|
||||
workflowStartTime,
|
||||
totalDuration,
|
||||
depth,
|
||||
expandedNodes,
|
||||
expandedSections,
|
||||
onToggle,
|
||||
workflowStartTime,
|
||||
totalDuration,
|
||||
expandedChildren,
|
||||
onToggleChildren,
|
||||
}: NestedBlockItemProps): React.ReactNode {
|
||||
const spanId = span.id || `${parentId}-nested-${index}`
|
||||
const isError = span.status === 'error'
|
||||
const { icon: SpanIcon, bgColor } = getBlockIconAndColor(span.type, span.name)
|
||||
const hasChildren = Boolean(span.children && span.children.length > 0)
|
||||
const isChildrenExpanded = expandedChildren.has(spanId)
|
||||
|
||||
return (
|
||||
<div className='flex min-w-0 flex-col gap-[8px] overflow-hidden'>
|
||||
<ExpandableRowHeader
|
||||
name={span.name}
|
||||
duration={span.duration || 0}
|
||||
isError={isError}
|
||||
isExpanded={isChildrenExpanded}
|
||||
hasChildren={hasChildren}
|
||||
showIcon={!isIterationType(span.type)}
|
||||
icon={SpanIcon}
|
||||
bgColor={bgColor}
|
||||
onToggle={() => onToggleChildren(spanId)}
|
||||
/>
|
||||
|
||||
<SpanContent
|
||||
span={span}
|
||||
spanId={spanId}
|
||||
isError={isError}
|
||||
workflowStartTime={workflowStartTime}
|
||||
totalDuration={totalDuration}
|
||||
expandedSections={expandedSections}
|
||||
onToggle={onToggle}
|
||||
/>
|
||||
|
||||
{/* Nested children */}
|
||||
{hasChildren && isChildrenExpanded && (
|
||||
<div className='mt-[2px] flex min-w-0 flex-col gap-[10px] overflow-hidden border-[var(--border)] border-l pl-[10px]'>
|
||||
{span.children!.map((child, childIndex) => (
|
||||
<NestedBlockItem
|
||||
key={child.id || `${spanId}-child-${childIndex}`}
|
||||
span={child}
|
||||
parentId={spanId}
|
||||
index={childIndex}
|
||||
expandedSections={expandedSections}
|
||||
onToggle={onToggle}
|
||||
workflowStartTime={workflowStartTime}
|
||||
totalDuration={totalDuration}
|
||||
expandedChildren={expandedChildren}
|
||||
onToggleChildren={onToggleChildren}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
interface TraceSpanItemProps {
|
||||
span: TraceSpan
|
||||
totalDuration: number
|
||||
workflowStartTime: number
|
||||
isFirstSpan?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Individual trace span card component.
|
||||
* Memoized to prevent re-renders when sibling spans change.
|
||||
*/
|
||||
const TraceSpanItem = memo(function TraceSpanItem({
|
||||
span,
|
||||
totalDuration,
|
||||
workflowStartTime,
|
||||
isFirstSpan = false,
|
||||
}: TraceSpanItemProps): React.ReactNode {
|
||||
const [expandedSections, setExpandedSections] = useState<Set<string>>(new Set())
|
||||
const [expandedChildren, setExpandedChildren] = useState<Set<string>>(new Set())
|
||||
const [isCardExpanded, setIsCardExpanded] = useState(false)
|
||||
const toggleSet = useSetToggle()
|
||||
|
||||
onToggleNode,
|
||||
onToggleSection,
|
||||
}: TraceSpanNodeProps): React.ReactNode {
|
||||
const spanId = span.id || `span-${span.name}-${span.startTime}`
|
||||
const spanStartTime = new Date(span.startTime).getTime()
|
||||
const spanEndTime = new Date(span.endTime).getTime()
|
||||
const duration = span.duration || spanEndTime - spanStartTime
|
||||
|
||||
const hasChildren = Boolean(span.children && span.children.length > 0)
|
||||
const hasToolCalls = Boolean(span.toolCalls && span.toolCalls.length > 0)
|
||||
const isError = span.status === 'error'
|
||||
|
||||
const inlineChildTypes = new Set([
|
||||
'tool',
|
||||
'model',
|
||||
'loop-iteration',
|
||||
'parallel-iteration',
|
||||
'workflow',
|
||||
])
|
||||
|
||||
// For workflow-in-workflow blocks, all children should be rendered inline/nested
|
||||
const isWorkflowBlock = span.type?.toLowerCase().includes('workflow')
|
||||
const inlineChildren = isWorkflowBlock
|
||||
? span.children || []
|
||||
: span.children?.filter((child) => inlineChildTypes.has(child.type?.toLowerCase() || '')) || []
|
||||
const otherChildren = isWorkflowBlock
|
||||
? []
|
||||
: span.children?.filter((child) => !inlineChildTypes.has(child.type?.toLowerCase() || '')) || []
|
||||
|
||||
const toolCallSpans = useMemo(() => {
|
||||
if (!hasToolCalls) return []
|
||||
return span.toolCalls!.map((toolCall, index) => {
|
||||
const toolStartTime = toolCall.startTime
|
||||
? new Date(toolCall.startTime).getTime()
|
||||
: spanStartTime
|
||||
const toolEndTime = toolCall.endTime
|
||||
? new Date(toolCall.endTime).getTime()
|
||||
: toolStartTime + (toolCall.duration || 0)
|
||||
|
||||
return {
|
||||
id: `${spanId}-tool-${index}`,
|
||||
name: toolCall.name,
|
||||
type: 'tool',
|
||||
duration: toolCall.duration || toolEndTime - toolStartTime,
|
||||
startTime: new Date(toolStartTime).toISOString(),
|
||||
endTime: new Date(toolEndTime).toISOString(),
|
||||
status: toolCall.error ? ('error' as const) : ('success' as const),
|
||||
input: toolCall.input,
|
||||
output: toolCall.error
|
||||
? { error: toolCall.error, ...(toolCall.output || {}) }
|
||||
: toolCall.output,
|
||||
} as TraceSpan
|
||||
})
|
||||
}, [hasToolCalls, span.toolCalls, spanId, spanStartTime])
|
||||
|
||||
const handleSectionToggle = useCallback(
|
||||
(section: string) => toggleSet(setExpandedSections, section),
|
||||
[toggleSet]
|
||||
)
|
||||
|
||||
const handleChildrenToggle = useCallback(
|
||||
(childSpanId: string) => toggleSet(setExpandedChildren, childSpanId),
|
||||
[toggleSet]
|
||||
)
|
||||
const isDirectError = span.status === 'error'
|
||||
const hasNestedError = hasErrorInTree(span)
|
||||
const showErrorStyle = isDirectError || hasNestedError
|
||||
|
||||
const { icon: BlockIcon, bgColor } = getBlockIconAndColor(span.type, span.name)
|
||||
|
||||
// Check if this card has expandable inline content
|
||||
const hasInlineContent =
|
||||
(isWorkflowBlock && inlineChildren.length > 0) ||
|
||||
(!isWorkflowBlock && (toolCallSpans.length > 0 || inlineChildren.length > 0))
|
||||
// Root workflow execution is always expanded and has no toggle
|
||||
const isRootWorkflow = depth === 0
|
||||
|
||||
const isExpandable = !isFirstSpan && hasInlineContent
|
||||
// Build all children including tool calls
|
||||
const allChildren = useMemo(() => {
|
||||
const children: TraceSpan[] = []
|
||||
|
||||
// Add tool calls as child spans
|
||||
if (span.toolCalls && span.toolCalls.length > 0) {
|
||||
span.toolCalls.forEach((toolCall, index) => {
|
||||
const toolStartTime = toolCall.startTime
|
||||
? new Date(toolCall.startTime).getTime()
|
||||
: spanStartTime
|
||||
const toolEndTime = toolCall.endTime
|
||||
? new Date(toolCall.endTime).getTime()
|
||||
: toolStartTime + (toolCall.duration || 0)
|
||||
|
||||
children.push({
|
||||
id: `${spanId}-tool-${index}`,
|
||||
name: toolCall.name,
|
||||
type: 'tool',
|
||||
duration: toolCall.duration || toolEndTime - toolStartTime,
|
||||
startTime: new Date(toolStartTime).toISOString(),
|
||||
endTime: new Date(toolEndTime).toISOString(),
|
||||
status: toolCall.error ? ('error' as const) : ('success' as const),
|
||||
input: toolCall.input,
|
||||
output: toolCall.error
|
||||
? { error: toolCall.error, ...(toolCall.output || {}) }
|
||||
: toolCall.output,
|
||||
} as TraceSpan)
|
||||
})
|
||||
}
|
||||
|
||||
// Add regular children
|
||||
if (span.children && span.children.length > 0) {
|
||||
children.push(...span.children)
|
||||
}
|
||||
|
||||
// Sort by start time
|
||||
return children.sort((a, b) => parseTime(a.startTime) - parseTime(b.startTime))
|
||||
}, [span, spanId, spanStartTime])
|
||||
|
||||
const hasChildren = allChildren.length > 0
|
||||
const isExpanded = isRootWorkflow || expandedNodes.has(spanId)
|
||||
const isToggleable = !isRootWorkflow
|
||||
|
||||
const hasInput = Boolean(span.input)
|
||||
const hasOutput = Boolean(span.output)
|
||||
|
||||
// For progress bar - show child segments for workflow/iteration types
|
||||
const lowerType = span.type?.toLowerCase() || ''
|
||||
const showChildrenInProgressBar =
|
||||
isIterationType(lowerType) || lowerType === 'workflow' || lowerType === 'workflow_input'
|
||||
|
||||
return (
|
||||
<>
|
||||
<div className='flex min-w-0 flex-col gap-[8px] overflow-hidden rounded-[6px] bg-[var(--surface-1)] px-[10px] py-[8px]'>
|
||||
<ExpandableRowHeader
|
||||
name={span.name}
|
||||
duration={duration}
|
||||
isError={isError}
|
||||
isExpanded={isCardExpanded}
|
||||
hasChildren={isExpandable}
|
||||
showIcon={!isFirstSpan}
|
||||
icon={BlockIcon}
|
||||
bgColor={bgColor}
|
||||
onToggle={() => setIsCardExpanded((prev) => !prev)}
|
||||
/>
|
||||
|
||||
<SpanContent
|
||||
span={span}
|
||||
spanId={spanId}
|
||||
isError={isError}
|
||||
workflowStartTime={workflowStartTime}
|
||||
totalDuration={totalDuration}
|
||||
expandedSections={expandedSections}
|
||||
onToggle={handleSectionToggle}
|
||||
/>
|
||||
|
||||
{/* For workflow blocks, keep children nested within the card (not as separate cards) */}
|
||||
{!isFirstSpan && isWorkflowBlock && inlineChildren.length > 0 && isCardExpanded && (
|
||||
<div className='mt-[2px] flex min-w-0 flex-col gap-[10px] overflow-hidden border-[var(--border)] border-l pl-[10px]'>
|
||||
{inlineChildren.map((childSpan, index) => (
|
||||
<NestedBlockItem
|
||||
key={childSpan.id || `${spanId}-nested-${index}`}
|
||||
span={childSpan}
|
||||
parentId={spanId}
|
||||
index={index}
|
||||
expandedSections={expandedSections}
|
||||
onToggle={handleSectionToggle}
|
||||
workflowStartTime={workflowStartTime}
|
||||
totalDuration={totalDuration}
|
||||
expandedChildren={expandedChildren}
|
||||
onToggleChildren={handleChildrenToggle}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* For non-workflow blocks, render inline children/tool calls */}
|
||||
{!isFirstSpan && !isWorkflowBlock && isCardExpanded && (
|
||||
<div className='mt-[2px] flex min-w-0 flex-col gap-[10px] overflow-hidden border-[var(--border)] border-l pl-[10px]'>
|
||||
{[...toolCallSpans, ...inlineChildren].map((childSpan, index) => {
|
||||
const childId = childSpan.id || `${spanId}-inline-${index}`
|
||||
const childIsError = childSpan.status === 'error'
|
||||
const childLowerType = childSpan.type?.toLowerCase() || ''
|
||||
const hasNestedChildren = Boolean(childSpan.children && childSpan.children.length > 0)
|
||||
const isNestedExpanded = expandedChildren.has(childId)
|
||||
const showChildrenInProgressBar =
|
||||
isIterationType(childLowerType) || childLowerType === 'workflow'
|
||||
const { icon: ChildIcon, bgColor: childBgColor } = getBlockIconAndColor(
|
||||
childSpan.type,
|
||||
childSpan.name
|
||||
)
|
||||
|
||||
return (
|
||||
<div
|
||||
key={`inline-${childId}`}
|
||||
className='flex min-w-0 flex-col gap-[8px] overflow-hidden'
|
||||
>
|
||||
<ExpandableRowHeader
|
||||
name={childSpan.name}
|
||||
duration={childSpan.duration || 0}
|
||||
isError={childIsError}
|
||||
isExpanded={isNestedExpanded}
|
||||
hasChildren={hasNestedChildren}
|
||||
showIcon={!isIterationType(childSpan.type)}
|
||||
icon={ChildIcon}
|
||||
bgColor={childBgColor}
|
||||
onToggle={() => handleChildrenToggle(childId)}
|
||||
/>
|
||||
|
||||
<ProgressBar
|
||||
span={childSpan}
|
||||
childSpans={showChildrenInProgressBar ? childSpan.children : undefined}
|
||||
workflowStartTime={workflowStartTime}
|
||||
totalDuration={totalDuration}
|
||||
/>
|
||||
|
||||
{childSpan.input && (
|
||||
<InputOutputSection
|
||||
label='Input'
|
||||
data={childSpan.input}
|
||||
isError={false}
|
||||
spanId={childId}
|
||||
sectionType='input'
|
||||
expandedSections={expandedSections}
|
||||
onToggle={handleSectionToggle}
|
||||
/>
|
||||
)}
|
||||
|
||||
{childSpan.input && childSpan.output && (
|
||||
<div className='border-[var(--border)] border-t border-dashed' />
|
||||
)}
|
||||
|
||||
{childSpan.output && (
|
||||
<InputOutputSection
|
||||
label={childIsError ? 'Error' : 'Output'}
|
||||
data={childSpan.output}
|
||||
isError={childIsError}
|
||||
spanId={childId}
|
||||
sectionType='output'
|
||||
expandedSections={expandedSections}
|
||||
onToggle={handleSectionToggle}
|
||||
/>
|
||||
)}
|
||||
|
||||
{/* Nested children */}
|
||||
{showChildrenInProgressBar && hasNestedChildren && isNestedExpanded && (
|
||||
<div className='mt-[2px] flex min-w-0 flex-col gap-[10px] overflow-hidden border-[var(--border)] border-l pl-[10px]'>
|
||||
{childSpan.children!.map((nestedChild, nestedIndex) => (
|
||||
<NestedBlockItem
|
||||
key={nestedChild.id || `${childId}-nested-${nestedIndex}`}
|
||||
span={nestedChild}
|
||||
parentId={childId}
|
||||
index={nestedIndex}
|
||||
expandedSections={expandedSections}
|
||||
onToggle={handleSectionToggle}
|
||||
workflowStartTime={workflowStartTime}
|
||||
totalDuration={totalDuration}
|
||||
expandedChildren={expandedChildren}
|
||||
onToggleChildren={handleChildrenToggle}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
})}
|
||||
</div>
|
||||
<div className='flex min-w-0 flex-col'>
|
||||
{/* Node Header Row */}
|
||||
<div
|
||||
className={cn(
|
||||
'group flex items-center justify-between gap-[8px] py-[6px]',
|
||||
isToggleable && 'cursor-pointer'
|
||||
)}
|
||||
onClick={isToggleable ? () => onToggleNode(spanId) : undefined}
|
||||
onKeyDown={
|
||||
isToggleable
|
||||
? (e) => {
|
||||
if (e.key === 'Enter' || e.key === ' ') {
|
||||
e.preventDefault()
|
||||
onToggleNode(spanId)
|
||||
}
|
||||
}
|
||||
: undefined
|
||||
}
|
||||
role={isToggleable ? 'button' : undefined}
|
||||
tabIndex={isToggleable ? 0 : undefined}
|
||||
aria-expanded={isToggleable ? isExpanded : undefined}
|
||||
aria-label={isToggleable ? (isExpanded ? 'Collapse' : 'Expand') : undefined}
|
||||
>
|
||||
<div className='flex min-w-0 flex-1 items-center gap-[8px]'>
|
||||
{!isIterationType(span.type) && (
|
||||
<div
|
||||
className='relative flex h-[14px] w-[14px] flex-shrink-0 items-center justify-center overflow-hidden rounded-[4px]'
|
||||
style={{ background: bgColor }}
|
||||
>
|
||||
{BlockIcon && <BlockIcon className='h-[9px] w-[9px] text-white' />}
|
||||
</div>
|
||||
)}
|
||||
<span
|
||||
className='min-w-0 max-w-[180px] truncate font-medium text-[12px]'
|
||||
style={{ color: showErrorStyle ? 'var(--text-error)' : 'var(--text-secondary)' }}
|
||||
>
|
||||
{span.name}
|
||||
</span>
|
||||
{isToggleable && (
|
||||
<ChevronDown
|
||||
className='h-[8px] w-[8px] flex-shrink-0 text-[var(--text-tertiary)] transition-colors transition-transform duration-100 group-hover:text-[var(--text-primary)]'
|
||||
style={{
|
||||
transform: `translateY(-0.25px) ${isExpanded ? 'rotate(0deg)' : 'rotate(-90deg)'}`,
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
<span className='flex-shrink-0 font-medium text-[12px] text-[var(--text-tertiary)]'>
|
||||
{formatDuration(duration, { precision: 2 })}
|
||||
</span>
|
||||
</div>
|
||||
|
||||
{/* For the first span (workflow execution), render all children as separate top-level cards */}
|
||||
{isFirstSpan &&
|
||||
hasChildren &&
|
||||
span.children!.map((childSpan, index) => (
|
||||
<TraceSpanItem
|
||||
key={childSpan.id || `${spanId}-child-${index}`}
|
||||
span={childSpan}
|
||||
totalDuration={totalDuration}
|
||||
{/* Expanded Content */}
|
||||
{isExpanded && (
|
||||
<div className='flex min-w-0 flex-col gap-[10px]'>
|
||||
{/* Progress Bar */}
|
||||
<ProgressBar
|
||||
span={span}
|
||||
childSpans={showChildrenInProgressBar ? span.children : undefined}
|
||||
workflowStartTime={workflowStartTime}
|
||||
isFirstSpan={false}
|
||||
totalDuration={totalDuration}
|
||||
/>
|
||||
))}
|
||||
|
||||
{!isFirstSpan &&
|
||||
otherChildren.map((childSpan, index) => (
|
||||
<TraceSpanItem
|
||||
key={childSpan.id || `${spanId}-other-${index}`}
|
||||
span={childSpan}
|
||||
totalDuration={totalDuration}
|
||||
workflowStartTime={workflowStartTime}
|
||||
isFirstSpan={false}
|
||||
/>
|
||||
))}
|
||||
</>
|
||||
{/* Input/Output Sections */}
|
||||
{(hasInput || hasOutput) && (
|
||||
<div className='flex min-w-0 flex-col gap-[6px] overflow-hidden py-[2px]'>
|
||||
{hasInput && (
|
||||
<InputOutputSection
|
||||
label='Input'
|
||||
data={span.input}
|
||||
isError={false}
|
||||
spanId={spanId}
|
||||
sectionType='input'
|
||||
expandedSections={expandedSections}
|
||||
onToggle={onToggleSection}
|
||||
/>
|
||||
)}
|
||||
|
||||
{hasInput && hasOutput && (
|
||||
<div className='border-[var(--border)] border-t border-dashed' />
|
||||
)}
|
||||
|
||||
{hasOutput && (
|
||||
<InputOutputSection
|
||||
label={isDirectError ? 'Error' : 'Output'}
|
||||
data={span.output}
|
||||
isError={isDirectError}
|
||||
spanId={spanId}
|
||||
sectionType='output'
|
||||
expandedSections={expandedSections}
|
||||
onToggle={onToggleSection}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Nested Children */}
|
||||
{hasChildren && (
|
||||
<div className='flex min-w-0 flex-col gap-[2px] border-[var(--border)] border-l pl-[10px]'>
|
||||
{allChildren.map((child, index) => (
|
||||
<div key={child.id || `${spanId}-child-${index}`} className='pl-[6px]'>
|
||||
<TraceSpanNode
|
||||
span={child}
|
||||
workflowStartTime={workflowStartTime}
|
||||
totalDuration={totalDuration}
|
||||
depth={depth + 1}
|
||||
expandedNodes={expandedNodes}
|
||||
expandedSections={expandedSections}
|
||||
onToggleNode={onToggleNode}
|
||||
onToggleSection={onToggleSection}
|
||||
/>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
})
|
||||
|
||||
/**
|
||||
* Displays workflow execution trace spans with nested structure.
|
||||
* Displays workflow execution trace spans with nested tree structure.
|
||||
* Memoized to prevent re-renders when parent LogDetails updates.
|
||||
*/
|
||||
export const TraceSpans = memo(function TraceSpans({
|
||||
traceSpans,
|
||||
totalDuration = 0,
|
||||
}: TraceSpansProps) {
|
||||
export const TraceSpans = memo(function TraceSpans({ traceSpans }: TraceSpansProps) {
|
||||
const [expandedNodes, setExpandedNodes] = useState<Set<string>>(() => new Set())
|
||||
const [expandedSections, setExpandedSections] = useState<Set<string>>(new Set())
|
||||
const toggleSet = useSetToggle()
|
||||
|
||||
const { workflowStartTime, actualTotalDuration, normalizedSpans } = useMemo(() => {
|
||||
if (!traceSpans || traceSpans.length === 0) {
|
||||
return { workflowStartTime: 0, actualTotalDuration: totalDuration, normalizedSpans: [] }
|
||||
return { workflowStartTime: 0, actualTotalDuration: 0, normalizedSpans: [] }
|
||||
}
|
||||
|
||||
let earliest = Number.POSITIVE_INFINITY
|
||||
@@ -962,26 +736,37 @@ export const TraceSpans = memo(function TraceSpans({
|
||||
actualTotalDuration: latest - earliest,
|
||||
normalizedSpans: normalizeAndSortSpans(traceSpans),
|
||||
}
|
||||
}, [traceSpans, totalDuration])
|
||||
}, [traceSpans])
|
||||
|
||||
const handleToggleNode = useCallback(
|
||||
(nodeId: string) => toggleSet(setExpandedNodes, nodeId),
|
||||
[toggleSet]
|
||||
)
|
||||
|
||||
const handleToggleSection = useCallback(
|
||||
(section: string) => toggleSet(setExpandedSections, section),
|
||||
[toggleSet]
|
||||
)
|
||||
|
||||
if (!traceSpans || traceSpans.length === 0) {
|
||||
return <div className='text-[12px] text-[var(--text-secondary)]'>No trace data available</div>
|
||||
}
|
||||
|
||||
return (
|
||||
<div className='flex w-full min-w-0 flex-col gap-[6px] overflow-hidden rounded-[6px] bg-[var(--surface-2)] px-[10px] py-[8px]'>
|
||||
<span className='font-medium text-[12px] text-[var(--text-tertiary)]'>Trace Span</span>
|
||||
<div className='flex min-w-0 flex-col gap-[8px] overflow-hidden'>
|
||||
{normalizedSpans.map((span, index) => (
|
||||
<TraceSpanItem
|
||||
key={span.id || index}
|
||||
span={span}
|
||||
totalDuration={actualTotalDuration}
|
||||
workflowStartTime={workflowStartTime}
|
||||
isFirstSpan={index === 0}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
<div className='flex w-full min-w-0 flex-col overflow-hidden'>
|
||||
{normalizedSpans.map((span, index) => (
|
||||
<TraceSpanNode
|
||||
key={span.id || index}
|
||||
span={span}
|
||||
workflowStartTime={workflowStartTime}
|
||||
totalDuration={actualTotalDuration}
|
||||
depth={0}
|
||||
expandedNodes={expandedNodes}
|
||||
expandedSections={expandedSections}
|
||||
onToggleNode={handleToggleNode}
|
||||
onToggleSection={handleToggleSection}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
)
|
||||
})
|
||||
|
||||
@@ -1,10 +1,23 @@
|
||||
'use client'
|
||||
|
||||
import { memo, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { ChevronUp, X } from 'lucide-react'
|
||||
import { Button, Eye } from '@/components/emcn'
|
||||
import { memo, useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { ArrowDown, ArrowUp, Check, ChevronUp, Clipboard, Search, X } from 'lucide-react'
|
||||
import { createPortal } from 'react-dom'
|
||||
import {
|
||||
Button,
|
||||
Code,
|
||||
Eye,
|
||||
Input,
|
||||
Popover,
|
||||
PopoverAnchor,
|
||||
PopoverContent,
|
||||
PopoverDivider,
|
||||
PopoverItem,
|
||||
Tooltip,
|
||||
} from '@/components/emcn'
|
||||
import { ScrollArea } from '@/components/ui/scroll-area'
|
||||
import { BASE_EXECUTION_CHARGE } from '@/lib/billing/constants'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import {
|
||||
ExecutionSnapshot,
|
||||
FileCards,
|
||||
@@ -17,11 +30,194 @@ import {
|
||||
StatusBadge,
|
||||
TriggerBadge,
|
||||
} from '@/app/workspace/[workspaceId]/logs/utils'
|
||||
import { useCodeViewerFeatures } from '@/hooks/use-code-viewer'
|
||||
import { usePermissionConfig } from '@/hooks/use-permission-config'
|
||||
import { formatCost } from '@/providers/utils'
|
||||
import type { WorkflowLog } from '@/stores/logs/filters/types'
|
||||
import { useLogDetailsUIStore } from '@/stores/logs/store'
|
||||
|
||||
/**
|
||||
* Workflow Output section with code viewer, copy, search, and context menu functionality
|
||||
*/
|
||||
function WorkflowOutputSection({ output }: { output: Record<string, unknown> }) {
|
||||
const contentRef = useRef<HTMLDivElement>(null)
|
||||
const [copied, setCopied] = useState(false)
|
||||
|
||||
// Context menu state
|
||||
const [isContextMenuOpen, setIsContextMenuOpen] = useState(false)
|
||||
const [contextMenuPosition, setContextMenuPosition] = useState({ x: 0, y: 0 })
|
||||
|
||||
const {
|
||||
isSearchActive,
|
||||
searchQuery,
|
||||
setSearchQuery,
|
||||
matchCount,
|
||||
currentMatchIndex,
|
||||
activateSearch,
|
||||
closeSearch,
|
||||
goToNextMatch,
|
||||
goToPreviousMatch,
|
||||
handleMatchCountChange,
|
||||
searchInputRef,
|
||||
} = useCodeViewerFeatures({ contentRef })
|
||||
|
||||
const jsonString = useMemo(() => JSON.stringify(output, null, 2), [output])
|
||||
|
||||
const handleContextMenu = useCallback((e: React.MouseEvent) => {
|
||||
e.preventDefault()
|
||||
e.stopPropagation()
|
||||
setContextMenuPosition({ x: e.clientX, y: e.clientY })
|
||||
setIsContextMenuOpen(true)
|
||||
}, [])
|
||||
|
||||
const closeContextMenu = useCallback(() => {
|
||||
setIsContextMenuOpen(false)
|
||||
}, [])
|
||||
|
||||
const handleCopy = useCallback(() => {
|
||||
navigator.clipboard.writeText(jsonString)
|
||||
setCopied(true)
|
||||
setTimeout(() => setCopied(false), 1500)
|
||||
closeContextMenu()
|
||||
}, [jsonString, closeContextMenu])
|
||||
|
||||
const handleSearch = useCallback(() => {
|
||||
activateSearch()
|
||||
closeContextMenu()
|
||||
}, [activateSearch, closeContextMenu])
|
||||
|
||||
return (
|
||||
<div className='relative flex min-w-0 flex-col overflow-hidden'>
|
||||
<div ref={contentRef} onContextMenu={handleContextMenu} className='relative'>
|
||||
<Code.Viewer
|
||||
code={jsonString}
|
||||
language='json'
|
||||
className='!bg-[var(--surface-4)] dark:!bg-[var(--surface-3)] max-h-[300px] min-h-0 max-w-full rounded-[6px] border-0 [word-break:break-all]'
|
||||
wrapText
|
||||
searchQuery={isSearchActive ? searchQuery : undefined}
|
||||
currentMatchIndex={currentMatchIndex}
|
||||
onMatchCountChange={handleMatchCountChange}
|
||||
/>
|
||||
{/* Glass action buttons overlay */}
|
||||
{!isSearchActive && (
|
||||
<div className='absolute top-[7px] right-[6px] z-10 flex gap-[4px]'>
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
type='button'
|
||||
variant='default'
|
||||
onClick={(e) => {
|
||||
e.stopPropagation()
|
||||
handleCopy()
|
||||
}}
|
||||
className='h-[20px] w-[20px] cursor-pointer border border-[var(--border-1)] bg-transparent p-0 backdrop-blur-sm hover:bg-[var(--surface-3)]'
|
||||
>
|
||||
{copied ? (
|
||||
<Check className='h-[10px] w-[10px] text-[var(--text-success)]' />
|
||||
) : (
|
||||
<Clipboard className='h-[10px] w-[10px]' />
|
||||
)}
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content side='top'>{copied ? 'Copied' : 'Copy'}</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
type='button'
|
||||
variant='default'
|
||||
onClick={(e) => {
|
||||
e.stopPropagation()
|
||||
activateSearch()
|
||||
}}
|
||||
className='h-[20px] w-[20px] cursor-pointer border border-[var(--border-1)] bg-transparent p-0 backdrop-blur-sm hover:bg-[var(--surface-3)]'
|
||||
>
|
||||
<Search className='h-[10px] w-[10px]' />
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content side='top'>Search</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Search Overlay */}
|
||||
{isSearchActive && (
|
||||
<div
|
||||
className='absolute top-0 right-0 z-30 flex h-[34px] items-center gap-[6px] rounded-[4px] border border-[var(--border)] bg-[var(--surface-1)] px-[6px] shadow-sm'
|
||||
onClick={(e) => e.stopPropagation()}
|
||||
>
|
||||
<Input
|
||||
ref={searchInputRef}
|
||||
type='text'
|
||||
value={searchQuery}
|
||||
onChange={(e) => setSearchQuery(e.target.value)}
|
||||
placeholder='Search...'
|
||||
className='mr-[2px] h-[23px] w-[94px] text-[12px]'
|
||||
/>
|
||||
<span
|
||||
className={cn(
|
||||
'min-w-[45px] text-center text-[11px]',
|
||||
matchCount > 0 ? 'text-[var(--text-secondary)]' : 'text-[var(--text-tertiary)]'
|
||||
)}
|
||||
>
|
||||
{matchCount > 0 ? `${currentMatchIndex + 1}/${matchCount}` : '0/0'}
|
||||
</span>
|
||||
<Button
|
||||
variant='ghost'
|
||||
className='!p-1'
|
||||
onClick={goToPreviousMatch}
|
||||
disabled={matchCount === 0}
|
||||
aria-label='Previous match'
|
||||
>
|
||||
<ArrowUp className='h-[12px] w-[12px]' />
|
||||
</Button>
|
||||
<Button
|
||||
variant='ghost'
|
||||
className='!p-1'
|
||||
onClick={goToNextMatch}
|
||||
disabled={matchCount === 0}
|
||||
aria-label='Next match'
|
||||
>
|
||||
<ArrowDown className='h-[12px] w-[12px]' />
|
||||
</Button>
|
||||
<Button variant='ghost' className='!p-1' onClick={closeSearch} aria-label='Close search'>
|
||||
<X className='h-[12px] w-[12px]' />
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Context Menu - rendered in portal to avoid transform/overflow clipping */}
|
||||
{typeof document !== 'undefined' &&
|
||||
createPortal(
|
||||
<Popover
|
||||
open={isContextMenuOpen}
|
||||
onOpenChange={closeContextMenu}
|
||||
variant='secondary'
|
||||
size='sm'
|
||||
colorScheme='inverted'
|
||||
>
|
||||
<PopoverAnchor
|
||||
style={{
|
||||
position: 'fixed',
|
||||
left: `${contextMenuPosition.x}px`,
|
||||
top: `${contextMenuPosition.y}px`,
|
||||
width: '1px',
|
||||
height: '1px',
|
||||
}}
|
||||
/>
|
||||
<PopoverContent align='start' side='bottom' sideOffset={4}>
|
||||
<PopoverItem onClick={handleCopy}>Copy</PopoverItem>
|
||||
<PopoverDivider />
|
||||
<PopoverItem onClick={handleSearch}>Search</PopoverItem>
|
||||
</PopoverContent>
|
||||
</Popover>,
|
||||
document.body
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
interface LogDetailsProps {
|
||||
/** The log to display details for */
|
||||
log: WorkflowLog | null
|
||||
@@ -78,6 +274,18 @@ export const LogDetails = memo(function LogDetails({
|
||||
return isWorkflowExecutionLog && log?.cost
|
||||
}, [log, isWorkflowExecutionLog])
|
||||
|
||||
// Extract and clean the workflow final output (remove childTraceSpans for cleaner display)
|
||||
const workflowOutput = useMemo(() => {
|
||||
const executionData = log?.executionData as
|
||||
| { finalOutput?: Record<string, unknown> }
|
||||
| undefined
|
||||
if (!executionData?.finalOutput) return null
|
||||
const { childTraceSpans, ...cleanOutput } = executionData.finalOutput as {
|
||||
childTraceSpans?: unknown
|
||||
} & Record<string, unknown>
|
||||
return cleanOutput
|
||||
}, [log?.executionData])
|
||||
|
||||
useEffect(() => {
|
||||
const handleKeyDown = (e: KeyboardEvent) => {
|
||||
if (e.key === 'Escape' && isOpen) {
|
||||
@@ -87,12 +295,12 @@ export const LogDetails = memo(function LogDetails({
|
||||
if (isOpen) {
|
||||
if (e.key === 'ArrowUp' && hasPrev && onNavigatePrev) {
|
||||
e.preventDefault()
|
||||
handleNavigate(onNavigatePrev)
|
||||
onNavigatePrev()
|
||||
}
|
||||
|
||||
if (e.key === 'ArrowDown' && hasNext && onNavigateNext) {
|
||||
e.preventDefault()
|
||||
handleNavigate(onNavigateNext)
|
||||
onNavigateNext()
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -101,10 +309,6 @@ export const LogDetails = memo(function LogDetails({
|
||||
return () => window.removeEventListener('keydown', handleKeyDown)
|
||||
}, [isOpen, onClose, hasPrev, hasNext, onNavigatePrev, onNavigateNext])
|
||||
|
||||
const handleNavigate = (navigateFunction: () => void) => {
|
||||
navigateFunction()
|
||||
}
|
||||
|
||||
const formattedTimestamp = useMemo(
|
||||
() => (log ? formatDate(log.createdAt) : null),
|
||||
[log?.createdAt]
|
||||
@@ -142,7 +346,7 @@ export const LogDetails = memo(function LogDetails({
|
||||
<Button
|
||||
variant='ghost'
|
||||
className='!p-[4px]'
|
||||
onClick={() => hasPrev && handleNavigate(onNavigatePrev!)}
|
||||
onClick={() => hasPrev && onNavigatePrev?.()}
|
||||
disabled={!hasPrev}
|
||||
aria-label='Previous log'
|
||||
>
|
||||
@@ -151,7 +355,7 @@ export const LogDetails = memo(function LogDetails({
|
||||
<Button
|
||||
variant='ghost'
|
||||
className='!p-[4px]'
|
||||
onClick={() => hasNext && handleNavigate(onNavigateNext!)}
|
||||
onClick={() => hasNext && onNavigateNext?.()}
|
||||
disabled={!hasNext}
|
||||
aria-label='Next log'
|
||||
>
|
||||
@@ -204,7 +408,7 @@ export const LogDetails = memo(function LogDetails({
|
||||
|
||||
{/* Execution ID */}
|
||||
{log.executionId && (
|
||||
<div className='flex flex-col gap-[6px] rounded-[6px] bg-[var(--surface-2)] px-[10px] py-[8px]'>
|
||||
<div className='flex flex-col gap-[6px] rounded-[6px] border border-[var(--border)] bg-[var(--surface-2)] px-[10px] py-[8px]'>
|
||||
<span className='font-medium text-[12px] text-[var(--text-tertiary)]'>
|
||||
Execution ID
|
||||
</span>
|
||||
@@ -215,7 +419,7 @@ export const LogDetails = memo(function LogDetails({
|
||||
)}
|
||||
|
||||
{/* Details Section */}
|
||||
<div className='flex min-w-0 flex-col overflow-hidden'>
|
||||
<div className='-my-[4px] flex min-w-0 flex-col overflow-hidden'>
|
||||
{/* Level */}
|
||||
<div className='flex h-[48px] items-center justify-between border-[var(--border)] border-b p-[8px]'>
|
||||
<span className='font-medium text-[12px] text-[var(--text-tertiary)]'>
|
||||
@@ -257,7 +461,7 @@ export const LogDetails = memo(function LogDetails({
|
||||
Version
|
||||
</span>
|
||||
<div className='flex w-0 flex-1 justify-end'>
|
||||
<span className='max-w-full truncate rounded-[6px] bg-[#14291B] px-[9px] py-[2px] font-medium text-[#86EFAC] text-[12px]'>
|
||||
<span className='max-w-full truncate rounded-[6px] bg-[#bbf7d0] px-[9px] py-[2px] font-medium text-[#15803d] text-[12px] dark:bg-[#14291B] dark:text-[#86EFAC]'>
|
||||
{log.deploymentVersionName || `v${log.deploymentVersion}`}
|
||||
</span>
|
||||
</div>
|
||||
@@ -267,19 +471,35 @@ export const LogDetails = memo(function LogDetails({
|
||||
|
||||
{/* Workflow State */}
|
||||
{isWorkflowExecutionLog && log.executionId && !permissionConfig.hideTraceSpans && (
|
||||
<div className='flex flex-col gap-[6px] rounded-[6px] bg-[var(--surface-2)] px-[10px] py-[8px]'>
|
||||
<div className='-mt-[8px] flex flex-col gap-[6px] rounded-[6px] border border-[var(--border)] bg-[var(--surface-2)] px-[10px] py-[8px]'>
|
||||
<span className='font-medium text-[12px] text-[var(--text-tertiary)]'>
|
||||
Workflow State
|
||||
</span>
|
||||
<button
|
||||
<Button
|
||||
variant='active'
|
||||
onClick={() => setIsExecutionSnapshotOpen(true)}
|
||||
className='flex items-center justify-between rounded-[6px] bg-[var(--surface-1)] px-[10px] py-[8px] transition-colors hover:bg-[var(--surface-4)]'
|
||||
className='flex w-full items-center justify-between px-[10px] py-[6px]'
|
||||
>
|
||||
<span className='font-medium text-[12px] text-[var(--text-secondary)]'>
|
||||
View Snapshot
|
||||
</span>
|
||||
<Eye className='h-[14px] w-[14px] text-[var(--text-subtle)]' />
|
||||
</button>
|
||||
<span className='font-medium text-[12px]'>View Snapshot</span>
|
||||
<Eye className='h-[14px] w-[14px]' />
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Workflow Output */}
|
||||
{isWorkflowExecutionLog && workflowOutput && !permissionConfig.hideTraceSpans && (
|
||||
<div className='mt-[4px] flex flex-col gap-[6px] rounded-[6px] border border-[var(--border)] bg-[var(--surface-2)] px-[10px] py-[8px] dark:bg-transparent'>
|
||||
<span
|
||||
className={cn(
|
||||
'font-medium text-[12px]',
|
||||
workflowOutput.error
|
||||
? 'text-[var(--text-error)]'
|
||||
: 'text-[var(--text-tertiary)]'
|
||||
)}
|
||||
>
|
||||
Workflow Output
|
||||
</span>
|
||||
<WorkflowOutputSection output={workflowOutput} />
|
||||
</div>
|
||||
)}
|
||||
|
||||
@@ -287,10 +507,12 @@ export const LogDetails = memo(function LogDetails({
|
||||
{isWorkflowExecutionLog &&
|
||||
log.executionData?.traceSpans &&
|
||||
!permissionConfig.hideTraceSpans && (
|
||||
<TraceSpans
|
||||
traceSpans={log.executionData.traceSpans}
|
||||
totalDuration={log.executionData.totalDuration}
|
||||
/>
|
||||
<div className='mt-[4px] flex flex-col gap-[6px] rounded-[6px] border border-[var(--border)] bg-[var(--surface-2)] px-[10px] py-[8px] dark:bg-transparent'>
|
||||
<span className='font-medium text-[12px] text-[var(--text-tertiary)]'>
|
||||
Trace Span
|
||||
</span>
|
||||
<TraceSpans traceSpans={log.executionData.traceSpans} />
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Files */}
|
||||
|
||||
@@ -19,6 +19,7 @@ import { DatePicker } from '@/components/emcn/components/date-picker/date-picker
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { hasActiveFilters } from '@/lib/logs/filters'
|
||||
import { getTriggerOptions } from '@/lib/logs/get-trigger-options'
|
||||
import { type LogStatus, STATUS_CONFIG } from '@/app/workspace/[workspaceId]/logs/utils'
|
||||
import { getBlock } from '@/blocks/registry'
|
||||
import { useFolderStore } from '@/stores/folders/store'
|
||||
import { useFilterStore } from '@/stores/logs/filters/store'
|
||||
@@ -211,12 +212,12 @@ export function LogsToolbar({
|
||||
}, [level])
|
||||
|
||||
const statusOptions: ComboboxOption[] = useMemo(
|
||||
() => [
|
||||
{ value: 'error', label: 'Error', icon: getColorIcon('var(--text-error)') },
|
||||
{ value: 'info', label: 'Info', icon: getColorIcon('var(--terminal-status-info-color)') },
|
||||
{ value: 'running', label: 'Running', icon: getColorIcon('#22c55e') },
|
||||
{ value: 'pending', label: 'Pending', icon: getColorIcon('#f59e0b') },
|
||||
],
|
||||
() =>
|
||||
(Object.keys(STATUS_CONFIG) as LogStatus[]).map((status) => ({
|
||||
value: status,
|
||||
label: STATUS_CONFIG[status].label,
|
||||
icon: getColorIcon(STATUS_CONFIG[status].color),
|
||||
})),
|
||||
[]
|
||||
)
|
||||
|
||||
@@ -242,12 +243,8 @@ export function LogsToolbar({
|
||||
|
||||
const selectedStatusColor = useMemo(() => {
|
||||
if (selectedStatuses.length !== 1) return null
|
||||
const status = selectedStatuses[0]
|
||||
if (status === 'error') return 'var(--text-error)'
|
||||
if (status === 'info') return 'var(--terminal-status-info-color)'
|
||||
if (status === 'running') return '#22c55e'
|
||||
if (status === 'pending') return '#f59e0b'
|
||||
return null
|
||||
const status = selectedStatuses[0] as LogStatus
|
||||
return STATUS_CONFIG[status]?.color ?? null
|
||||
}, [selectedStatuses])
|
||||
|
||||
const workflowOptions: ComboboxOption[] = useMemo(
|
||||
|
||||
@@ -5,7 +5,6 @@ import { getIntegrationMetadata } from '@/lib/logs/get-trigger-options'
|
||||
import { getBlock } from '@/blocks/registry'
|
||||
import { CORE_TRIGGER_TYPES } from '@/stores/logs/filters/types'
|
||||
|
||||
/** Column configuration for logs table - shared between header and rows */
|
||||
export const LOG_COLUMNS = {
|
||||
date: { width: 'w-[8%]', minWidth: 'min-w-[70px]', label: 'Date' },
|
||||
time: { width: 'w-[12%]', minWidth: 'min-w-[90px]', label: 'Time' },
|
||||
@@ -16,10 +15,8 @@ export const LOG_COLUMNS = {
|
||||
duration: { width: 'w-[20%]', minWidth: 'min-w-[100px]', label: 'Duration' },
|
||||
} as const
|
||||
|
||||
/** Type-safe column key derived from LOG_COLUMNS */
|
||||
export type LogColumnKey = keyof typeof LOG_COLUMNS
|
||||
|
||||
/** Ordered list of column keys for rendering table headers */
|
||||
export const LOG_COLUMN_ORDER: readonly LogColumnKey[] = [
|
||||
'date',
|
||||
'time',
|
||||
@@ -30,7 +27,6 @@ export const LOG_COLUMN_ORDER: readonly LogColumnKey[] = [
|
||||
'duration',
|
||||
] as const
|
||||
|
||||
/** Possible execution status values for workflow logs */
|
||||
export type LogStatus = 'error' | 'pending' | 'running' | 'info' | 'cancelled'
|
||||
|
||||
/**
|
||||
@@ -53,30 +49,28 @@ export function getDisplayStatus(status: string | null | undefined): LogStatus {
|
||||
}
|
||||
}
|
||||
|
||||
/** Configuration mapping log status to Badge variant and display label */
|
||||
const STATUS_VARIANT_MAP: Record<
|
||||
export const STATUS_CONFIG: Record<
|
||||
LogStatus,
|
||||
{ variant: React.ComponentProps<typeof Badge>['variant']; label: string }
|
||||
{ variant: React.ComponentProps<typeof Badge>['variant']; label: string; color: string }
|
||||
> = {
|
||||
error: { variant: 'red', label: 'Error' },
|
||||
pending: { variant: 'amber', label: 'Pending' },
|
||||
running: { variant: 'green', label: 'Running' },
|
||||
cancelled: { variant: 'gray', label: 'Cancelled' },
|
||||
info: { variant: 'gray', label: 'Info' },
|
||||
error: { variant: 'red', label: 'Error', color: 'var(--text-error)' },
|
||||
pending: { variant: 'amber', label: 'Pending', color: '#f59e0b' },
|
||||
running: { variant: 'green', label: 'Running', color: '#22c55e' },
|
||||
cancelled: { variant: 'orange', label: 'Cancelled', color: '#f97316' },
|
||||
info: { variant: 'gray', label: 'Info', color: 'var(--terminal-status-info-color)' },
|
||||
}
|
||||
|
||||
/** Configuration mapping core trigger types to Badge color variants */
|
||||
const TRIGGER_VARIANT_MAP: Record<string, React.ComponentProps<typeof Badge>['variant']> = {
|
||||
manual: 'gray-secondary',
|
||||
api: 'blue',
|
||||
schedule: 'green',
|
||||
chat: 'purple',
|
||||
webhook: 'orange',
|
||||
mcp: 'cyan',
|
||||
a2a: 'teal',
|
||||
}
|
||||
|
||||
interface StatusBadgeProps {
|
||||
/** The execution status to display */
|
||||
status: LogStatus
|
||||
}
|
||||
|
||||
@@ -86,14 +80,13 @@ interface StatusBadgeProps {
|
||||
* @returns A Badge with dot indicator and status label
|
||||
*/
|
||||
export const StatusBadge = React.memo(({ status }: StatusBadgeProps) => {
|
||||
const config = STATUS_VARIANT_MAP[status]
|
||||
const config = STATUS_CONFIG[status]
|
||||
return React.createElement(Badge, { variant: config.variant, dot: true }, config.label)
|
||||
})
|
||||
|
||||
StatusBadge.displayName = 'StatusBadge'
|
||||
|
||||
interface TriggerBadgeProps {
|
||||
/** The trigger type identifier (e.g., 'manual', 'api', or integration block type) */
|
||||
trigger: string
|
||||
}
|
||||
|
||||
|
||||
@@ -213,7 +213,6 @@ function TemplateCardInner({
|
||||
isPannable={false}
|
||||
defaultZoom={0.8}
|
||||
fitPadding={0.2}
|
||||
lightweight
|
||||
cursorStyle='pointer'
|
||||
/>
|
||||
) : (
|
||||
|
||||
@@ -2,12 +2,12 @@ import { memo, useCallback } from 'react'
|
||||
import { ArrowLeftRight, ArrowUpDown, Circle, CircleOff, LogOut } from 'lucide-react'
|
||||
import { Button, Copy, Tooltip, Trash2 } from '@/components/emcn'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { isValidStartBlockType } from '@/lib/workflows/triggers/start-block-types'
|
||||
import { isInputDefinitionTrigger } from '@/lib/workflows/triggers/input-definition-triggers'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
import { validateTriggerPaste } from '@/app/workspace/[workspaceId]/w/[workflowId]/utils'
|
||||
import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow'
|
||||
import { useNotificationStore } from '@/stores/notifications'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
import { getUniqueBlockName, prepareDuplicateBlockState } from '@/stores/workflows/utils'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
|
||||
const DEFAULT_DUPLICATE_OFFSET = { x: 50, y: 50 }
|
||||
@@ -48,29 +48,38 @@ export const ActionBar = memo(
|
||||
collaborativeBatchToggleBlockEnabled,
|
||||
collaborativeBatchToggleBlockHandles,
|
||||
} = useCollaborativeWorkflow()
|
||||
const { activeWorkflowId, setPendingSelection } = useWorkflowRegistry()
|
||||
const { setPendingSelection } = useWorkflowRegistry()
|
||||
|
||||
const addNotification = useNotificationStore((s) => s.addNotification)
|
||||
|
||||
const handleDuplicateBlock = useCallback(() => {
|
||||
const blocks = useWorkflowStore.getState().blocks
|
||||
const sourceBlock = blocks[blockId]
|
||||
if (!sourceBlock) return
|
||||
const { copyBlocks, preparePasteData, activeWorkflowId } = useWorkflowRegistry.getState()
|
||||
const existingBlocks = useWorkflowStore.getState().blocks
|
||||
copyBlocks([blockId])
|
||||
|
||||
const newId = crypto.randomUUID()
|
||||
const newName = getUniqueBlockName(sourceBlock.name, blocks)
|
||||
const subBlockValues =
|
||||
useSubBlockStore.getState().workflowValues[activeWorkflowId || '']?.[blockId] || {}
|
||||
const pasteData = preparePasteData(DEFAULT_DUPLICATE_OFFSET)
|
||||
if (!pasteData) return
|
||||
|
||||
const { block, subBlockValues: filteredValues } = prepareDuplicateBlockState({
|
||||
sourceBlock,
|
||||
newId,
|
||||
newName,
|
||||
positionOffset: DEFAULT_DUPLICATE_OFFSET,
|
||||
subBlockValues,
|
||||
})
|
||||
const blocks = Object.values(pasteData.blocks)
|
||||
const validation = validateTriggerPaste(blocks, existingBlocks, 'duplicate')
|
||||
if (!validation.isValid) {
|
||||
addNotification({
|
||||
level: 'error',
|
||||
message: validation.message!,
|
||||
workflowId: activeWorkflowId || undefined,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
setPendingSelection([newId])
|
||||
collaborativeBatchAddBlocks([block], [], {}, {}, { [newId]: filteredValues })
|
||||
}, [blockId, activeWorkflowId, collaborativeBatchAddBlocks, setPendingSelection])
|
||||
setPendingSelection(blocks.map((b) => b.id))
|
||||
collaborativeBatchAddBlocks(
|
||||
blocks,
|
||||
pasteData.edges,
|
||||
pasteData.loops,
|
||||
pasteData.parallels,
|
||||
pasteData.subBlockValues
|
||||
)
|
||||
}, [blockId, addNotification, collaborativeBatchAddBlocks, setPendingSelection])
|
||||
|
||||
const { isEnabled, horizontalHandles, parentId, parentType } = useWorkflowStore(
|
||||
useCallback(
|
||||
@@ -90,7 +99,7 @@ export const ActionBar = memo(
|
||||
|
||||
const userPermissions = useUserPermissionsContext()
|
||||
|
||||
const isStartBlock = isValidStartBlockType(blockType)
|
||||
const isStartBlock = isInputDefinitionTrigger(blockType)
|
||||
const isResponseBlock = blockType === 'response'
|
||||
const isNoteBlock = blockType === 'note'
|
||||
const isSubflowBlock = blockType === 'loop' || blockType === 'parallel'
|
||||
@@ -119,7 +128,7 @@ export const ActionBar = memo(
|
||||
'dark:border-transparent dark:bg-[var(--surface-4)]'
|
||||
)}
|
||||
>
|
||||
{!isNoteBlock && !isSubflowBlock && (
|
||||
{!isNoteBlock && (
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
@@ -142,7 +151,30 @@ export const ActionBar = memo(
|
||||
</Tooltip.Root>
|
||||
)}
|
||||
|
||||
{!isStartBlock && !isResponseBlock && !isSubflowBlock && (
|
||||
{isSubflowBlock && (
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
variant='ghost'
|
||||
onClick={(e) => {
|
||||
e.stopPropagation()
|
||||
if (!disabled) {
|
||||
collaborativeBatchToggleBlockEnabled([blockId])
|
||||
}
|
||||
}}
|
||||
className={ACTION_BUTTON_STYLES}
|
||||
disabled={disabled}
|
||||
>
|
||||
{isEnabled ? <Circle className={ICON_SIZE} /> : <CircleOff className={ICON_SIZE} />}
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content side='top'>
|
||||
{getTooltipMessage(isEnabled ? 'Disable Block' : 'Enable Block')}
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
)}
|
||||
|
||||
{!isStartBlock && !isResponseBlock && (
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
|
||||
@@ -8,7 +8,7 @@ import {
|
||||
PopoverDivider,
|
||||
PopoverItem,
|
||||
} from '@/components/emcn'
|
||||
import { isValidStartBlockType } from '@/lib/workflows/triggers/start-block-types'
|
||||
import { TriggerUtils } from '@/lib/workflows/triggers/triggers'
|
||||
|
||||
/**
|
||||
* Block information for context menu actions
|
||||
@@ -74,12 +74,16 @@ export function BlockMenu({
|
||||
const allEnabled = selectedBlocks.every((b) => b.enabled)
|
||||
const allDisabled = selectedBlocks.every((b) => !b.enabled)
|
||||
|
||||
const hasStarterBlock = selectedBlocks.some((b) => isValidStartBlockType(b.type))
|
||||
const hasSingletonBlock = selectedBlocks.some(
|
||||
(b) =>
|
||||
TriggerUtils.requiresSingleInstance(b.type) || TriggerUtils.isSingleInstanceBlockType(b.type)
|
||||
)
|
||||
const hasTriggerBlock = selectedBlocks.some((b) => TriggerUtils.isTriggerBlock(b))
|
||||
const allNoteBlocks = selectedBlocks.every((b) => b.type === 'note')
|
||||
const isSubflow =
|
||||
isSingleBlock && (selectedBlocks[0]?.type === 'loop' || selectedBlocks[0]?.type === 'parallel')
|
||||
|
||||
const canRemoveFromSubflow = showRemoveFromSubflow && !hasStarterBlock
|
||||
const canRemoveFromSubflow = showRemoveFromSubflow && !hasTriggerBlock
|
||||
|
||||
const getToggleEnabledLabel = () => {
|
||||
if (allEnabled) return 'Disable'
|
||||
@@ -127,7 +131,7 @@ export function BlockMenu({
|
||||
<span>Paste</span>
|
||||
<span className='ml-auto opacity-70 group-hover:opacity-100'>⌘V</span>
|
||||
</PopoverItem>
|
||||
{!hasStarterBlock && (
|
||||
{!hasSingletonBlock && (
|
||||
<PopoverItem
|
||||
disabled={disableEdit}
|
||||
onClick={() => {
|
||||
|
||||
@@ -26,7 +26,6 @@ export interface CanvasMenuProps {
|
||||
onOpenLogs: () => void
|
||||
onToggleVariables: () => void
|
||||
onToggleChat: () => void
|
||||
onInvite: () => void
|
||||
isVariablesOpen?: boolean
|
||||
isChatOpen?: boolean
|
||||
hasClipboard?: boolean
|
||||
@@ -55,15 +54,12 @@ export function CanvasMenu({
|
||||
onOpenLogs,
|
||||
onToggleVariables,
|
||||
onToggleChat,
|
||||
onInvite,
|
||||
isVariablesOpen = false,
|
||||
isChatOpen = false,
|
||||
hasClipboard = false,
|
||||
disableEdit = false,
|
||||
disableAdmin = false,
|
||||
canUndo = false,
|
||||
canRedo = false,
|
||||
isInvitationsDisabled = false,
|
||||
}: CanvasMenuProps) {
|
||||
return (
|
||||
<Popover
|
||||
@@ -179,22 +175,6 @@ export function CanvasMenu({
|
||||
>
|
||||
{isChatOpen ? 'Close Chat' : 'Open Chat'}
|
||||
</PopoverItem>
|
||||
|
||||
{/* Admin action - hidden when invitations are disabled */}
|
||||
{!isInvitationsDisabled && (
|
||||
<>
|
||||
<PopoverDivider />
|
||||
<PopoverItem
|
||||
disabled={disableAdmin}
|
||||
onClick={() => {
|
||||
onInvite()
|
||||
onClose()
|
||||
}}
|
||||
>
|
||||
Invite to Workspace
|
||||
</PopoverItem>
|
||||
</>
|
||||
)}
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
)
|
||||
|
||||
@@ -886,17 +886,16 @@ export function Chat() {
|
||||
onMouseDown={(e) => e.stopPropagation()}
|
||||
>
|
||||
{shouldShowConfigureStartInputsButton && (
|
||||
<Badge
|
||||
variant='outline'
|
||||
className='flex-none cursor-pointer whitespace-nowrap rounded-[6px]'
|
||||
<div
|
||||
className='flex flex-none cursor-pointer items-center whitespace-nowrap rounded-[6px] border border-[var(--border-1)] bg-[var(--surface-5)] px-[9px] py-[2px] font-medium font-sans text-[12px] text-[var(--text-primary)] hover:bg-[var(--surface-7)] dark:hover:border-[var(--surface-7)] dark:hover:bg-[var(--border-1)]'
|
||||
title='Add chat inputs to Start block'
|
||||
onMouseDown={(e) => {
|
||||
e.stopPropagation()
|
||||
handleConfigureStartInputs()
|
||||
}}
|
||||
>
|
||||
<span className='whitespace-nowrap text-[12px]'>Add inputs</span>
|
||||
</Badge>
|
||||
<span className='whitespace-nowrap'>Add inputs</span>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<OutputSelect
|
||||
|
||||
@@ -129,10 +129,6 @@ export function OutputSelect({
|
||||
? baselineWorkflow.blocks?.[block.id]?.subBlocks?.responseFormat?.value
|
||||
: subBlockValues?.[block.id]?.responseFormat
|
||||
const responseFormat = parseResponseFormatSafely(responseFormatValue, block.id)
|
||||
const operationValue =
|
||||
shouldUseBaseline && baselineWorkflow
|
||||
? baselineWorkflow.blocks?.[block.id]?.subBlocks?.operation?.value
|
||||
: subBlockValues?.[block.id]?.operation
|
||||
|
||||
let outputsToProcess: Record<string, unknown> = {}
|
||||
|
||||
@@ -146,10 +142,20 @@ export function OutputSelect({
|
||||
outputsToProcess = blockConfig?.outputs || {}
|
||||
}
|
||||
} else {
|
||||
const toolOutputs =
|
||||
blockConfig && typeof operationValue === 'string'
|
||||
? getToolOutputs(blockConfig, operationValue)
|
||||
: {}
|
||||
// Build subBlocks object for tool selector
|
||||
const rawSubBlockValues =
|
||||
shouldUseBaseline && baselineWorkflow
|
||||
? baselineWorkflow.blocks?.[block.id]?.subBlocks
|
||||
: subBlockValues?.[block.id]
|
||||
const subBlocks: Record<string, { value: unknown }> = {}
|
||||
if (rawSubBlockValues && typeof rawSubBlockValues === 'object') {
|
||||
for (const [key, val] of Object.entries(rawSubBlockValues)) {
|
||||
// Handle both { value: ... } and raw value formats
|
||||
subBlocks[key] = val && typeof val === 'object' && 'value' in val ? val : { value: val }
|
||||
}
|
||||
}
|
||||
|
||||
const toolOutputs = blockConfig ? getToolOutputs(blockConfig, subBlocks) : {}
|
||||
outputsToProcess =
|
||||
Object.keys(toolOutputs).length > 0 ? toolOutputs : blockConfig?.outputs || {}
|
||||
}
|
||||
|
||||
@@ -138,18 +138,24 @@ export const Notifications = memo(function Notifications() {
|
||||
}`}
|
||||
>
|
||||
<div className='flex h-full flex-col justify-between px-[8px] pt-[6px] pb-[8px]'>
|
||||
<div
|
||||
className={`font-medium text-[12px] leading-[16px] ${
|
||||
hasAction ? 'line-clamp-2' : 'line-clamp-4'
|
||||
}`}
|
||||
>
|
||||
<div className='flex items-start gap-[8px]'>
|
||||
<div
|
||||
className={`min-w-0 flex-1 font-medium text-[12px] leading-[16px] ${
|
||||
hasAction ? 'line-clamp-2' : 'line-clamp-4'
|
||||
}`}
|
||||
>
|
||||
{notification.level === 'error' && (
|
||||
<span className='mr-[6px] mb-[2.75px] inline-block h-[6px] w-[6px] rounded-[2px] bg-[var(--text-error)] align-middle' />
|
||||
)}
|
||||
{notification.message}
|
||||
</div>
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
variant='ghost'
|
||||
onClick={() => removeNotification(notification.id)}
|
||||
aria-label='Dismiss notification'
|
||||
className='!p-1.5 -m-1.5 float-right ml-[16px]'
|
||||
className='!p-1.5 -m-1.5 shrink-0'
|
||||
>
|
||||
<X className='h-3 w-3' />
|
||||
</Button>
|
||||
@@ -158,10 +164,6 @@ export const Notifications = memo(function Notifications() {
|
||||
<Tooltip.Shortcut keys='⌘E'>Clear all</Tooltip.Shortcut>
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
{notification.level === 'error' && (
|
||||
<span className='mr-[6px] mb-[2.75px] inline-block h-[6px] w-[6px] rounded-[2px] bg-[var(--text-error)] align-middle' />
|
||||
)}
|
||||
{notification.message}
|
||||
</div>
|
||||
{hasAction && (
|
||||
<Button
|
||||
|
||||
@@ -78,6 +78,7 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
mode,
|
||||
setMode,
|
||||
isAborting,
|
||||
maskCredentialValue,
|
||||
} = useCopilotStore()
|
||||
|
||||
const messageCheckpoints = isUser ? allMessageCheckpoints[message.id] || [] : []
|
||||
@@ -210,7 +211,10 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
const isLastTextBlock =
|
||||
index === message.contentBlocks!.length - 1 && block.type === 'text'
|
||||
const parsed = parseSpecialTags(block.content)
|
||||
const cleanBlockContent = parsed.cleanContent.replace(/\n{3,}/g, '\n\n')
|
||||
// Mask credential IDs in the displayed content
|
||||
const cleanBlockContent = maskCredentialValue(
|
||||
parsed.cleanContent.replace(/\n{3,}/g, '\n\n')
|
||||
)
|
||||
|
||||
if (!cleanBlockContent.trim()) return null
|
||||
|
||||
@@ -238,7 +242,7 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
return (
|
||||
<div key={blockKey} className='w-full'>
|
||||
<ThinkingBlock
|
||||
content={block.content}
|
||||
content={maskCredentialValue(block.content)}
|
||||
isStreaming={isActivelyStreaming}
|
||||
hasFollowingContent={hasFollowingContent}
|
||||
hasSpecialTags={hasSpecialTags}
|
||||
@@ -261,7 +265,7 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
}
|
||||
return null
|
||||
})
|
||||
}, [message.contentBlocks, isActivelyStreaming, parsedTags, isLastMessage])
|
||||
}, [message.contentBlocks, isActivelyStreaming, parsedTags, isLastMessage, maskCredentialValue])
|
||||
|
||||
if (isUser) {
|
||||
return (
|
||||
|
||||
@@ -782,6 +782,7 @@ const SubagentContentRenderer = memo(function SubagentContentRenderer({
|
||||
const [isExpanded, setIsExpanded] = useState(true)
|
||||
const [duration, setDuration] = useState(0)
|
||||
const startTimeRef = useRef<number>(Date.now())
|
||||
const maskCredentialValue = useCopilotStore((s) => s.maskCredentialValue)
|
||||
const wasStreamingRef = useRef(false)
|
||||
|
||||
// Only show streaming animations for current message
|
||||
@@ -816,14 +817,16 @@ const SubagentContentRenderer = memo(function SubagentContentRenderer({
|
||||
currentText += parsed.cleanContent
|
||||
} else if (block.type === 'subagent_tool_call' && block.toolCall) {
|
||||
if (currentText.trim()) {
|
||||
segments.push({ type: 'text', content: currentText })
|
||||
// Mask any credential IDs in the accumulated text before displaying
|
||||
segments.push({ type: 'text', content: maskCredentialValue(currentText) })
|
||||
currentText = ''
|
||||
}
|
||||
segments.push({ type: 'tool', block })
|
||||
}
|
||||
}
|
||||
if (currentText.trim()) {
|
||||
segments.push({ type: 'text', content: currentText })
|
||||
// Mask any credential IDs in the accumulated text before displaying
|
||||
segments.push({ type: 'text', content: maskCredentialValue(currentText) })
|
||||
}
|
||||
|
||||
const allParsed = parseSpecialTags(allRawText)
|
||||
@@ -952,6 +955,7 @@ const WorkflowEditSummary = memo(function WorkflowEditSummary({
|
||||
toolCall: CopilotToolCall
|
||||
}) {
|
||||
const blocks = useWorkflowStore((s) => s.blocks)
|
||||
const maskCredentialValue = useCopilotStore((s) => s.maskCredentialValue)
|
||||
|
||||
const cachedBlockInfoRef = useRef<Record<string, { name: string; type: string }>>({})
|
||||
|
||||
@@ -983,6 +987,7 @@ const WorkflowEditSummary = memo(function WorkflowEditSummary({
|
||||
title: string
|
||||
value: any
|
||||
isPassword?: boolean
|
||||
isCredential?: boolean
|
||||
}
|
||||
|
||||
interface BlockChange {
|
||||
@@ -1091,6 +1096,7 @@ const WorkflowEditSummary = memo(function WorkflowEditSummary({
|
||||
title: subBlockConfig.title ?? subBlockConfig.id,
|
||||
value,
|
||||
isPassword: subBlockConfig.password === true,
|
||||
isCredential: subBlockConfig.type === 'oauth-input',
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1172,8 +1178,15 @@ const WorkflowEditSummary = memo(function WorkflowEditSummary({
|
||||
{subBlocksToShow && subBlocksToShow.length > 0 && (
|
||||
<div className='border-[var(--border-1)] border-t px-2.5 py-1.5'>
|
||||
{subBlocksToShow.map((sb) => {
|
||||
// Mask password fields like the canvas does
|
||||
const displayValue = sb.isPassword ? '•••' : getDisplayValue(sb.value)
|
||||
// Mask password fields and credential IDs
|
||||
let displayValue: string
|
||||
if (sb.isPassword) {
|
||||
displayValue = '•••'
|
||||
} else {
|
||||
// Get display value first, then mask any credential IDs that might be in it
|
||||
const rawValue = getDisplayValue(sb.value)
|
||||
displayValue = maskCredentialValue(rawValue)
|
||||
}
|
||||
return (
|
||||
<div key={sb.id} className='flex items-start gap-1.5 py-0.5 text-[11px]'>
|
||||
<span
|
||||
@@ -1412,10 +1425,13 @@ function RunSkipButtons({
|
||||
setIsProcessing(true)
|
||||
setButtonsHidden(true)
|
||||
try {
|
||||
// Add to auto-allowed list first
|
||||
// Add to auto-allowed list - this also executes all pending integration tools of this type
|
||||
await addAutoAllowedTool(toolCall.name)
|
||||
// Then execute
|
||||
await handleRun(toolCall, setToolCallState, onStateChange, editedParams)
|
||||
// For client tools with interrupts (not integration tools), we still need to call handleRun
|
||||
// since executeIntegrationTool only works for server-side tools
|
||||
if (!isIntegrationTool(toolCall.name)) {
|
||||
await handleRun(toolCall, setToolCallState, onStateChange, editedParams)
|
||||
}
|
||||
} finally {
|
||||
setIsProcessing(false)
|
||||
actionInProgressRef.current = false
|
||||
@@ -1438,10 +1454,10 @@ function RunSkipButtons({
|
||||
|
||||
if (buttonsHidden) return null
|
||||
|
||||
// Hide "Always Allow" for integration tools (only show for client tools with interrupts)
|
||||
const showAlwaysAllow = !isIntegrationTool(toolCall.name)
|
||||
// Show "Always Allow" for all tools that require confirmation
|
||||
const showAlwaysAllow = true
|
||||
|
||||
// Standardized buttons for all interrupt tools: Allow, (Always Allow for client tools only), Skip
|
||||
// Standardized buttons for all interrupt tools: Allow, Always Allow, Skip
|
||||
return (
|
||||
<div className='mt-[10px] flex gap-[6px]'>
|
||||
<Button onClick={onRun} disabled={isProcessing} variant='tertiary'>
|
||||
|
||||
@@ -105,10 +105,10 @@ export function useCopilotInitialization(props: UseCopilotInitializationProps) {
|
||||
isSendingMessage,
|
||||
])
|
||||
|
||||
/** Load auto-allowed tools once on mount */
|
||||
/** Load auto-allowed tools once on mount - runs immediately, independent of workflow */
|
||||
const hasLoadedAutoAllowedToolsRef = useRef(false)
|
||||
useEffect(() => {
|
||||
if (hasMountedRef.current && !hasLoadedAutoAllowedToolsRef.current) {
|
||||
if (!hasLoadedAutoAllowedToolsRef.current) {
|
||||
hasLoadedAutoAllowedToolsRef.current = true
|
||||
loadAutoAllowedTools().catch((err) => {
|
||||
logger.warn('[Copilot] Failed to load auto-allowed tools', err)
|
||||
|
||||
@@ -5,7 +5,6 @@ import { createLogger } from '@sim/logger'
|
||||
import { Check, Clipboard } from 'lucide-react'
|
||||
import { useParams } from 'next/navigation'
|
||||
import {
|
||||
Badge,
|
||||
Button,
|
||||
ButtonGroup,
|
||||
ButtonGroupItem,
|
||||
@@ -883,14 +882,13 @@ console.log(data);`
|
||||
<code className='text-[10px]'><start.files></code>.
|
||||
</p>
|
||||
{missingFields.any && (
|
||||
<Badge
|
||||
variant='outline'
|
||||
className='flex-none cursor-pointer whitespace-nowrap rounded-[6px]'
|
||||
<div
|
||||
className='flex flex-none cursor-pointer items-center whitespace-nowrap rounded-[6px] border border-[var(--border-1)] bg-[var(--surface-5)] px-[9px] py-[2px] font-medium font-sans text-[12px] text-[var(--text-primary)] hover:bg-[var(--surface-7)] dark:hover:border-[var(--surface-7)] dark:hover:bg-[var(--border-1)]'
|
||||
title='Add required A2A input fields to Start block'
|
||||
onClick={handleAddA2AInputs}
|
||||
>
|
||||
<span className='whitespace-nowrap text-[12px]'>Add inputs</span>
|
||||
</Badge>
|
||||
<span className='whitespace-nowrap'>Add inputs</span>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -17,7 +17,7 @@ import { Skeleton } from '@/components/ui'
|
||||
import { isDev } from '@/lib/core/config/feature-flags'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { getBaseUrl, getEmailDomain } from '@/lib/core/utils/urls'
|
||||
import { isValidStartBlockType } from '@/lib/workflows/triggers/start-block-types'
|
||||
import { isInputDefinitionTrigger } from '@/lib/workflows/triggers/input-definition-triggers'
|
||||
import {
|
||||
type FieldConfig,
|
||||
useCreateForm,
|
||||
@@ -147,7 +147,7 @@ export function FormDeploy({
|
||||
|
||||
useEffect(() => {
|
||||
const blocks = Object.values(useWorkflowStore.getState().blocks)
|
||||
const startBlock = blocks.find((b) => isValidStartBlockType(b.type))
|
||||
const startBlock = blocks.find((b) => isInputDefinitionTrigger(b.type))
|
||||
|
||||
if (startBlock) {
|
||||
const inputFormat = useSubBlockStore.getState().getValue(startBlock.id, 'inputFormat')
|
||||
|
||||
@@ -14,7 +14,7 @@ import {
|
||||
Textarea,
|
||||
} from '@/components/emcn'
|
||||
import { normalizeInputFormatValue } from '@/lib/workflows/input-format'
|
||||
import { isValidStartBlockType } from '@/lib/workflows/triggers/start-block-types'
|
||||
import { isInputDefinitionTrigger } from '@/lib/workflows/triggers/input-definition-triggers'
|
||||
import type { InputFormatField } from '@/lib/workflows/types'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
@@ -52,7 +52,7 @@ export function ApiInfoModal({ open, onOpenChange, workflowId }: ApiInfoModalPro
|
||||
for (const [blockId, block] of Object.entries(blocks)) {
|
||||
if (!block || typeof block !== 'object') continue
|
||||
const blockType = (block as { type?: string }).type
|
||||
if (blockType && isValidStartBlockType(blockType)) {
|
||||
if (blockType && isInputDefinitionTrigger(blockType)) {
|
||||
return blockId
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,8 +18,8 @@ import {
|
||||
import { Skeleton } from '@/components/ui'
|
||||
import type { WorkflowDeploymentVersionResponse } from '@/lib/workflows/persistence/utils'
|
||||
import {
|
||||
BlockDetailsSidebar,
|
||||
getLeftmostBlockId,
|
||||
PreviewEditor,
|
||||
WorkflowPreview,
|
||||
} from '@/app/workspace/[workspaceId]/w/components/preview'
|
||||
import { useDeploymentVersionState, useRevertToVersion } from '@/hooks/queries/workflows'
|
||||
@@ -337,7 +337,7 @@ export function GeneralDeploy({
|
||||
/>
|
||||
</div>
|
||||
{expandedSelectedBlockId && workflowToShow.blocks?.[expandedSelectedBlockId] && (
|
||||
<BlockDetailsSidebar
|
||||
<PreviewEditor
|
||||
block={workflowToShow.blocks[expandedSelectedBlockId]}
|
||||
workflowVariables={workflowToShow.variables}
|
||||
loops={workflowToShow.loops}
|
||||
|
||||
@@ -15,7 +15,7 @@ import {
|
||||
import { Skeleton } from '@/components/ui'
|
||||
import { generateToolInputSchema, sanitizeToolName } from '@/lib/mcp/workflow-tool-schema'
|
||||
import { normalizeInputFormatValue } from '@/lib/workflows/input-format'
|
||||
import { isValidStartBlockType } from '@/lib/workflows/triggers/start-block-types'
|
||||
import { isInputDefinitionTrigger } from '@/lib/workflows/triggers/input-definition-triggers'
|
||||
import type { InputFormatField } from '@/lib/workflows/types'
|
||||
import {
|
||||
useAddWorkflowMcpTool,
|
||||
@@ -107,7 +107,7 @@ export function McpDeploy({
|
||||
for (const [blockId, block] of Object.entries(blocks)) {
|
||||
if (!block || typeof block !== 'object') continue
|
||||
const blockType = (block as { type?: string }).type
|
||||
if (blockType && isValidStartBlockType(blockType)) {
|
||||
if (blockType && isInputDefinitionTrigger(blockType)) {
|
||||
return blockId
|
||||
}
|
||||
}
|
||||
|
||||
@@ -446,7 +446,6 @@ const OGCaptureContainer = forwardRef<HTMLDivElement>((_, ref) => {
|
||||
isPannable={false}
|
||||
defaultZoom={0.8}
|
||||
fitPadding={0.2}
|
||||
lightweight
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
|
||||
@@ -35,9 +35,9 @@ import { WandPromptBar } from '@/app/workspace/[workspaceId]/w/[workflowId]/comp
|
||||
import { useAccessibleReferencePrefixes } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-accessible-reference-prefixes'
|
||||
import { useWand } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-wand'
|
||||
import type { GenerationType } from '@/blocks/types'
|
||||
import { normalizeName } from '@/executor/constants'
|
||||
import { createEnvVarPattern, createReferencePattern } from '@/executor/utils/reference-validation'
|
||||
import { useTagSelection } from '@/hooks/kb/use-tag-selection'
|
||||
import { normalizeName } from '@/stores/workflows/utils'
|
||||
|
||||
const logger = createLogger('Code')
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { memo, useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { isEqual } from 'lodash'
|
||||
import { useReactFlow } from 'reactflow'
|
||||
import { useStoreWithEqualityFn } from 'zustand/traditional'
|
||||
import { Combobox, type ComboboxOption } from '@/components/emcn/components'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { buildCanonicalIndex, resolveDependencyValue } from '@/lib/workflows/subblocks/visibility'
|
||||
@@ -102,7 +103,8 @@ export const ComboBox = memo(function ComboBox({
|
||||
[blockConfig?.subBlocks]
|
||||
)
|
||||
const canonicalModeOverrides = blockState?.data?.canonicalModes
|
||||
const dependencyValues = useSubBlockStore(
|
||||
const dependencyValues = useStoreWithEqualityFn(
|
||||
useSubBlockStore,
|
||||
useCallback(
|
||||
(state) => {
|
||||
if (dependsOnFields.length === 0 || !activeWorkflowId) return []
|
||||
|
||||
@@ -32,9 +32,9 @@ import {
|
||||
} from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/tag-dropdown/tag-dropdown'
|
||||
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value'
|
||||
import { useAccessibleReferencePrefixes } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-accessible-reference-prefixes'
|
||||
import { normalizeName } from '@/executor/constants'
|
||||
import { createEnvVarPattern, createReferencePattern } from '@/executor/utils/reference-validation'
|
||||
import { useTagSelection } from '@/hooks/kb/use-tag-selection'
|
||||
import { normalizeName } from '@/stores/workflows/utils'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
|
||||
const logger = createLogger('ConditionInput')
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { memo, useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { isEqual } from 'lodash'
|
||||
import { useStoreWithEqualityFn } from 'zustand/traditional'
|
||||
import { Badge } from '@/components/emcn'
|
||||
import { Combobox, type ComboboxOption } from '@/components/emcn/components'
|
||||
import { buildCanonicalIndex, resolveDependencyValue } from '@/lib/workflows/subblocks/visibility'
|
||||
@@ -100,7 +101,8 @@ export const Dropdown = memo(function Dropdown({
|
||||
[blockConfig?.subBlocks]
|
||||
)
|
||||
const canonicalModeOverrides = blockState?.data?.canonicalModes
|
||||
const dependencyValues = useSubBlockStore(
|
||||
const dependencyValues = useStoreWithEqualityFn(
|
||||
useSubBlockStore,
|
||||
useCallback(
|
||||
(state) => {
|
||||
if (dependsOnFields.length === 0 || !activeWorkflowId) return []
|
||||
|
||||
@@ -8,9 +8,10 @@ import { Button, Combobox } from '@/components/emcn/components'
|
||||
import { Progress } from '@/components/ui/progress'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import type { WorkspaceFileRecord } from '@/lib/uploads/contexts/workspace'
|
||||
import { getExtensionFromMimeType } from '@/lib/uploads/utils/file-utils'
|
||||
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
import { useSubBlockValue } from '../../hooks/use-sub-block-value'
|
||||
|
||||
const logger = createLogger('FileUpload')
|
||||
|
||||
@@ -85,14 +86,47 @@ export function FileUpload({
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a file's MIME type matches the accepted types
|
||||
* Supports exact matches, wildcard patterns (e.g., 'image/*'), and '*' for all types
|
||||
*/
|
||||
const isFileTypeAccepted = (fileType: string | undefined, accepted: string): boolean => {
|
||||
if (accepted === '*') return true
|
||||
if (!fileType) return false
|
||||
|
||||
const acceptedList = accepted.split(',').map((t) => t.trim().toLowerCase())
|
||||
const normalizedFileType = fileType.toLowerCase()
|
||||
|
||||
return acceptedList.some((acceptedType) => {
|
||||
if (acceptedType === normalizedFileType) return true
|
||||
|
||||
if (acceptedType.endsWith('/*')) {
|
||||
const typePrefix = acceptedType.slice(0, -1) // 'image/' from 'image/*'
|
||||
return normalizedFileType.startsWith(typePrefix)
|
||||
}
|
||||
|
||||
if (acceptedType.startsWith('.')) {
|
||||
const extension = acceptedType.slice(1).toLowerCase()
|
||||
const fileExtension = getExtensionFromMimeType(normalizedFileType)
|
||||
if (fileExtension === extension) return true
|
||||
return normalizedFileType.endsWith(`/${extension}`)
|
||||
}
|
||||
|
||||
return false
|
||||
})
|
||||
}
|
||||
|
||||
const availableWorkspaceFiles = workspaceFiles.filter((workspaceFile) => {
|
||||
const existingFiles = Array.isArray(value) ? value : value ? [value] : []
|
||||
return !existingFiles.some(
|
||||
|
||||
const isAlreadySelected = existingFiles.some(
|
||||
(existing) =>
|
||||
existing.name === workspaceFile.name ||
|
||||
existing.path?.includes(workspaceFile.key) ||
|
||||
existing.key === workspaceFile.key
|
||||
)
|
||||
|
||||
return !isAlreadySelected
|
||||
})
|
||||
|
||||
useEffect(() => {
|
||||
@@ -421,23 +455,23 @@ export function FileUpload({
|
||||
return (
|
||||
<div
|
||||
key={fileKey}
|
||||
className='flex items-center justify-between rounded-[4px] border border-[var(--border-1)] bg-[var(--surface-5)] px-[8px] py-[6px] hover:border-[var(--surface-7)] hover:bg-[var(--surface-5)] dark:bg-[var(--surface-5)] dark:hover:bg-[var(--border-1)]'
|
||||
className='relative rounded-[4px] border border-[var(--border-1)] bg-[var(--surface-5)] px-[8px] py-[6px] hover:border-[var(--surface-7)] hover:bg-[var(--surface-5)] dark:bg-[var(--surface-5)] dark:hover:bg-[var(--border-1)]'
|
||||
>
|
||||
<div className='flex-1 truncate pr-2 text-sm' title={file.name}>
|
||||
<div className='truncate pr-[24px] text-sm' title={file.name}>
|
||||
<span className='text-[var(--text-primary)]'>{truncateMiddle(file.name)}</span>
|
||||
<span className='ml-2 text-[var(--text-muted)]'>({formatFileSize(file.size)})</span>
|
||||
</div>
|
||||
<Button
|
||||
type='button'
|
||||
variant='ghost'
|
||||
className='h-5 w-5 shrink-0 p-0'
|
||||
className='-translate-y-1/2 absolute top-1/2 right-[4px] h-6 w-6 p-0'
|
||||
onClick={(e) => handleRemoveFile(file, e)}
|
||||
disabled={isDeleting}
|
||||
>
|
||||
{isDeleting ? (
|
||||
<div className='h-3.5 w-3.5 animate-spin rounded-full border-[1.5px] border-current border-t-transparent' />
|
||||
<div className='h-4 w-4 animate-spin rounded-full border-[1.5px] border-current border-t-transparent' />
|
||||
) : (
|
||||
<X className='h-3.5 w-3.5' />
|
||||
<X className='h-4 w-4 opacity-50' />
|
||||
)}
|
||||
</Button>
|
||||
</div>
|
||||
@@ -468,19 +502,30 @@ export function FileUpload({
|
||||
const comboboxOptions = useMemo(
|
||||
() => [
|
||||
{ label: 'Upload New File', value: '__upload_new__' },
|
||||
...availableWorkspaceFiles.map((file) => ({
|
||||
label: file.name,
|
||||
value: file.id,
|
||||
})),
|
||||
...availableWorkspaceFiles.map((file) => {
|
||||
const isAccepted =
|
||||
!acceptedTypes || acceptedTypes === '*' || isFileTypeAccepted(file.type, acceptedTypes)
|
||||
return {
|
||||
label: file.name,
|
||||
value: file.id,
|
||||
disabled: !isAccepted,
|
||||
}
|
||||
}),
|
||||
],
|
||||
[availableWorkspaceFiles]
|
||||
[availableWorkspaceFiles, acceptedTypes]
|
||||
)
|
||||
|
||||
const handleComboboxChange = (value: string) => {
|
||||
setInputValue(value)
|
||||
|
||||
const isValidOption =
|
||||
value === '__upload_new__' || availableWorkspaceFiles.some((file) => file.id === value)
|
||||
const selectedFile = availableWorkspaceFiles.find((file) => file.id === value)
|
||||
const isAcceptedType =
|
||||
selectedFile &&
|
||||
(!acceptedTypes ||
|
||||
acceptedTypes === '*' ||
|
||||
isFileTypeAccepted(selectedFile.type, acceptedTypes))
|
||||
|
||||
const isValidOption = value === '__upload_new__' || isAcceptedType
|
||||
|
||||
if (!isValidOption) {
|
||||
return
|
||||
|
||||
@@ -2,9 +2,8 @@
|
||||
|
||||
import type { ReactNode } from 'react'
|
||||
import { splitReferenceSegment } from '@/lib/workflows/sanitization/references'
|
||||
import { REFERENCE } from '@/executor/constants'
|
||||
import { normalizeName, REFERENCE } from '@/executor/constants'
|
||||
import { createCombinedPattern } from '@/executor/utils/reference-validation'
|
||||
import { normalizeName } from '@/stores/workflows/utils'
|
||||
|
||||
export interface HighlightContext {
|
||||
accessiblePrefixes?: Set<string>
|
||||
|
||||
@@ -34,3 +34,4 @@ export { Text } from './text/text'
|
||||
export { TimeInput } from './time-input/time-input'
|
||||
export { ToolInput } from './tool-input/tool-input'
|
||||
export { VariablesInput } from './variables-input/variables-input'
|
||||
export { WorkflowSelectorInput } from './workflow-selector/workflow-selector-input'
|
||||
|
||||
@@ -2,12 +2,13 @@ import { useMemo, useRef, useState } from 'react'
|
||||
import { Badge, Input } from '@/components/emcn'
|
||||
import { Label } from '@/components/ui/label'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { extractInputFieldsFromBlocks } from '@/lib/workflows/input-format'
|
||||
import { formatDisplayText } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/formatted-text'
|
||||
import { TagDropdown } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/tag-dropdown/tag-dropdown'
|
||||
import { useSubBlockInput } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-input'
|
||||
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value'
|
||||
import { useAccessibleReferencePrefixes } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-accessible-reference-prefixes'
|
||||
import { useWorkflowInputFields } from '@/hooks/queries/workflows'
|
||||
import { useWorkflowState } from '@/hooks/queries/workflows'
|
||||
|
||||
/**
|
||||
* Props for the InputMappingField component
|
||||
@@ -70,7 +71,11 @@ export function InputMapping({
|
||||
const overlayRefs = useRef<Map<string, HTMLDivElement>>(new Map())
|
||||
|
||||
const workflowId = typeof selectedWorkflowId === 'string' ? selectedWorkflowId : undefined
|
||||
const { data: childInputFields = [], isLoading } = useWorkflowInputFields(workflowId)
|
||||
const { data: workflowState, isLoading } = useWorkflowState(workflowId)
|
||||
const childInputFields = useMemo(
|
||||
() => (workflowState?.blocks ? extractInputFieldsFromBlocks(workflowState.blocks) : []),
|
||||
[workflowState?.blocks]
|
||||
)
|
||||
const [collapsedFields, setCollapsedFields] = useState<Record<string, boolean>>({})
|
||||
|
||||
const valueObj: Record<string, string> = useMemo(() => {
|
||||
|
||||
@@ -1,4 +1,12 @@
|
||||
import { useCallback, useEffect, useImperativeHandle, useMemo, useRef, useState } from 'react'
|
||||
import {
|
||||
useCallback,
|
||||
useEffect,
|
||||
useImperativeHandle,
|
||||
useLayoutEffect,
|
||||
useMemo,
|
||||
useRef,
|
||||
useState,
|
||||
} from 'react'
|
||||
import { isEqual } from 'lodash'
|
||||
import { ChevronDown, ChevronsUpDown, ChevronUp, Plus } from 'lucide-react'
|
||||
import { Button, Popover, PopoverContent, PopoverItem, PopoverTrigger } from '@/components/emcn'
|
||||
@@ -382,93 +390,138 @@ export function MessagesInput({
|
||||
textareaRefs.current[fieldId]?.focus()
|
||||
}, [])
|
||||
|
||||
const autoResizeTextarea = useCallback((fieldId: string) => {
|
||||
const syncOverlay = useCallback((fieldId: string) => {
|
||||
const textarea = textareaRefs.current[fieldId]
|
||||
if (!textarea) return
|
||||
const overlay = overlayRefs.current[fieldId]
|
||||
if (!textarea || !overlay) return
|
||||
|
||||
// If user has manually resized, respect their chosen height and only sync overlay.
|
||||
if (userResizedRef.current[fieldId]) {
|
||||
const currentHeight =
|
||||
textarea.offsetHeight || Number.parseFloat(textarea.style.height) || MIN_TEXTAREA_HEIGHT_PX
|
||||
const clampedHeight = Math.max(MIN_TEXTAREA_HEIGHT_PX, currentHeight)
|
||||
textarea.style.height = `${clampedHeight}px`
|
||||
if (overlay) {
|
||||
overlay.style.height = `${clampedHeight}px`
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
textarea.style.height = 'auto'
|
||||
const naturalHeight = textarea.scrollHeight || MIN_TEXTAREA_HEIGHT_PX
|
||||
const nextHeight = Math.min(
|
||||
MAX_TEXTAREA_HEIGHT_PX,
|
||||
Math.max(MIN_TEXTAREA_HEIGHT_PX, naturalHeight)
|
||||
)
|
||||
textarea.style.height = `${nextHeight}px`
|
||||
|
||||
if (overlay) {
|
||||
overlay.style.height = `${nextHeight}px`
|
||||
}
|
||||
overlay.style.width = `${textarea.clientWidth}px`
|
||||
overlay.scrollTop = textarea.scrollTop
|
||||
overlay.scrollLeft = textarea.scrollLeft
|
||||
}, [])
|
||||
|
||||
const handleResizeStart = useCallback((fieldId: string, e: React.MouseEvent<HTMLDivElement>) => {
|
||||
e.preventDefault()
|
||||
e.stopPropagation()
|
||||
const autoResizeTextarea = useCallback(
|
||||
(fieldId: string) => {
|
||||
const textarea = textareaRefs.current[fieldId]
|
||||
const overlay = overlayRefs.current[fieldId]
|
||||
if (!textarea) return
|
||||
|
||||
const textarea = textareaRefs.current[fieldId]
|
||||
if (!textarea) return
|
||||
|
||||
const startHeight = textarea.offsetHeight || textarea.scrollHeight || MIN_TEXTAREA_HEIGHT_PX
|
||||
|
||||
isResizingRef.current = true
|
||||
resizeStateRef.current = {
|
||||
fieldId,
|
||||
startY: e.clientY,
|
||||
startHeight,
|
||||
}
|
||||
|
||||
const handleMouseMove = (moveEvent: MouseEvent) => {
|
||||
if (!isResizingRef.current || !resizeStateRef.current) return
|
||||
|
||||
const { fieldId: activeFieldId, startY, startHeight } = resizeStateRef.current
|
||||
const deltaY = moveEvent.clientY - startY
|
||||
const nextHeight = Math.max(MIN_TEXTAREA_HEIGHT_PX, startHeight + deltaY)
|
||||
|
||||
const activeTextarea = textareaRefs.current[activeFieldId]
|
||||
if (activeTextarea) {
|
||||
activeTextarea.style.height = `${nextHeight}px`
|
||||
if (!textarea.value.trim()) {
|
||||
userResizedRef.current[fieldId] = false
|
||||
}
|
||||
|
||||
const overlay = overlayRefs.current[activeFieldId]
|
||||
if (userResizedRef.current[fieldId]) {
|
||||
if (overlay) {
|
||||
overlay.style.height = `${textarea.offsetHeight}px`
|
||||
}
|
||||
syncOverlay(fieldId)
|
||||
return
|
||||
}
|
||||
|
||||
textarea.style.height = 'auto'
|
||||
const scrollHeight = textarea.scrollHeight
|
||||
const height = Math.min(
|
||||
MAX_TEXTAREA_HEIGHT_PX,
|
||||
Math.max(MIN_TEXTAREA_HEIGHT_PX, scrollHeight)
|
||||
)
|
||||
|
||||
textarea.style.height = `${height}px`
|
||||
if (overlay) {
|
||||
overlay.style.height = `${nextHeight}px`
|
||||
}
|
||||
}
|
||||
|
||||
const handleMouseUp = () => {
|
||||
if (resizeStateRef.current) {
|
||||
const { fieldId: activeFieldId } = resizeStateRef.current
|
||||
userResizedRef.current[activeFieldId] = true
|
||||
overlay.style.height = `${height}px`
|
||||
}
|
||||
|
||||
isResizingRef.current = false
|
||||
resizeStateRef.current = null
|
||||
document.removeEventListener('mousemove', handleMouseMove)
|
||||
document.removeEventListener('mouseup', handleMouseUp)
|
||||
}
|
||||
syncOverlay(fieldId)
|
||||
},
|
||||
[syncOverlay]
|
||||
)
|
||||
|
||||
document.addEventListener('mousemove', handleMouseMove)
|
||||
document.addEventListener('mouseup', handleMouseUp)
|
||||
}, [])
|
||||
const handleResizeStart = useCallback(
|
||||
(fieldId: string, e: React.MouseEvent<HTMLDivElement>) => {
|
||||
e.preventDefault()
|
||||
e.stopPropagation()
|
||||
|
||||
useEffect(() => {
|
||||
const textarea = textareaRefs.current[fieldId]
|
||||
if (!textarea) return
|
||||
|
||||
const startHeight = textarea.offsetHeight || textarea.scrollHeight || MIN_TEXTAREA_HEIGHT_PX
|
||||
|
||||
isResizingRef.current = true
|
||||
resizeStateRef.current = {
|
||||
fieldId,
|
||||
startY: e.clientY,
|
||||
startHeight,
|
||||
}
|
||||
|
||||
const handleMouseMove = (moveEvent: MouseEvent) => {
|
||||
if (!isResizingRef.current || !resizeStateRef.current) return
|
||||
|
||||
const { fieldId: activeFieldId, startY, startHeight } = resizeStateRef.current
|
||||
const deltaY = moveEvent.clientY - startY
|
||||
const nextHeight = Math.max(MIN_TEXTAREA_HEIGHT_PX, startHeight + deltaY)
|
||||
|
||||
const activeTextarea = textareaRefs.current[activeFieldId]
|
||||
const overlay = overlayRefs.current[activeFieldId]
|
||||
|
||||
if (activeTextarea) {
|
||||
activeTextarea.style.height = `${nextHeight}px`
|
||||
}
|
||||
|
||||
if (overlay) {
|
||||
overlay.style.height = `${nextHeight}px`
|
||||
if (activeTextarea) {
|
||||
overlay.scrollTop = activeTextarea.scrollTop
|
||||
overlay.scrollLeft = activeTextarea.scrollLeft
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const handleMouseUp = () => {
|
||||
if (resizeStateRef.current) {
|
||||
const { fieldId: activeFieldId } = resizeStateRef.current
|
||||
userResizedRef.current[activeFieldId] = true
|
||||
syncOverlay(activeFieldId)
|
||||
}
|
||||
|
||||
isResizingRef.current = false
|
||||
resizeStateRef.current = null
|
||||
document.removeEventListener('mousemove', handleMouseMove)
|
||||
document.removeEventListener('mouseup', handleMouseUp)
|
||||
}
|
||||
|
||||
document.addEventListener('mousemove', handleMouseMove)
|
||||
document.addEventListener('mouseup', handleMouseUp)
|
||||
},
|
||||
[syncOverlay]
|
||||
)
|
||||
|
||||
useLayoutEffect(() => {
|
||||
currentMessages.forEach((_, index) => {
|
||||
const fieldId = `message-${index}`
|
||||
autoResizeTextarea(fieldId)
|
||||
autoResizeTextarea(`message-${index}`)
|
||||
})
|
||||
}, [currentMessages, autoResizeTextarea])
|
||||
|
||||
useEffect(() => {
|
||||
const observers: ResizeObserver[] = []
|
||||
|
||||
for (let i = 0; i < currentMessages.length; i++) {
|
||||
const fieldId = `message-${i}`
|
||||
const textarea = textareaRefs.current[fieldId]
|
||||
const overlay = overlayRefs.current[fieldId]
|
||||
|
||||
if (textarea && overlay) {
|
||||
const observer = new ResizeObserver(() => {
|
||||
overlay.style.width = `${textarea.clientWidth}px`
|
||||
})
|
||||
observer.observe(textarea)
|
||||
observers.push(observer)
|
||||
}
|
||||
}
|
||||
|
||||
return () => {
|
||||
observers.forEach((observer) => observer.disconnect())
|
||||
}
|
||||
}, [currentMessages.length])
|
||||
|
||||
return (
|
||||
<div className='flex w-full flex-col gap-[10px]'>
|
||||
{currentMessages.map((message, index) => (
|
||||
@@ -621,19 +674,15 @@ export function MessagesInput({
|
||||
</div>
|
||||
|
||||
{/* Content Input with overlay for variable highlighting */}
|
||||
<div className='relative w-full'>
|
||||
<div className='relative w-full overflow-hidden'>
|
||||
<textarea
|
||||
ref={(el) => {
|
||||
textareaRefs.current[fieldId] = el
|
||||
}}
|
||||
className='allow-scroll box-border min-h-[80px] w-full resize-none whitespace-pre-wrap break-words border-none bg-transparent px-[8px] pt-[8px] font-[inherit] font-medium text-sm text-transparent leading-[inherit] caret-[var(--text-primary)] outline-none placeholder:text-[var(--text-muted)] focus:outline-none focus-visible:outline-none disabled:cursor-not-allowed'
|
||||
rows={3}
|
||||
className='relative z-[2] m-0 box-border h-auto min-h-[80px] w-full resize-none overflow-y-auto overflow-x-hidden whitespace-pre-wrap break-words border-none bg-transparent px-[8px] py-[8px] font-medium font-sans text-sm text-transparent leading-[1.5] caret-[var(--text-primary)] outline-none [-ms-overflow-style:none] [scrollbar-width:none] placeholder:text-[var(--text-muted)] focus:outline-none focus-visible:outline-none disabled:cursor-not-allowed [&::-webkit-scrollbar]:hidden'
|
||||
placeholder='Enter message content...'
|
||||
value={message.content}
|
||||
onChange={(e) => {
|
||||
fieldHandlers.onChange(e)
|
||||
autoResizeTextarea(fieldId)
|
||||
}}
|
||||
onChange={fieldHandlers.onChange}
|
||||
onKeyDown={(e) => {
|
||||
if (e.key === 'Tab' && !isPreview && !disabled) {
|
||||
e.preventDefault()
|
||||
@@ -670,12 +719,13 @@ export function MessagesInput({
|
||||
ref={(el) => {
|
||||
overlayRefs.current[fieldId] = el
|
||||
}}
|
||||
className='scrollbar-none pointer-events-none absolute top-0 left-0 box-border w-full overflow-auto whitespace-pre-wrap break-words border-none bg-transparent px-[8px] pt-[8px] font-[inherit] font-medium text-[var(--text-primary)] text-sm leading-[inherit]'
|
||||
className='pointer-events-none absolute top-0 left-0 z-[1] m-0 box-border w-full overflow-y-auto overflow-x-hidden whitespace-pre-wrap break-words border-none bg-transparent px-[8px] py-[8px] font-medium font-sans text-[var(--text-primary)] text-sm leading-[1.5] [-ms-overflow-style:none] [scrollbar-width:none] [&::-webkit-scrollbar]:hidden'
|
||||
>
|
||||
{formatDisplayText(message.content, {
|
||||
accessiblePrefixes,
|
||||
highlightAll: !accessiblePrefixes,
|
||||
})}
|
||||
{message.content.endsWith('\n') && '\u200B'}
|
||||
</div>
|
||||
|
||||
{/* Env var dropdown for this message */}
|
||||
@@ -705,7 +755,7 @@ export function MessagesInput({
|
||||
|
||||
{!isPreview && !disabled && (
|
||||
<div
|
||||
className='absolute right-1 bottom-1 flex h-4 w-4 cursor-ns-resize items-center justify-center rounded-[4px] border border-[var(--border-1)] bg-[var(--surface-5)] dark:bg-[var(--surface-5)]'
|
||||
className='absolute right-1 bottom-1 z-[3] flex h-4 w-4 cursor-ns-resize items-center justify-center rounded-[4px] border border-[var(--border-1)] bg-[var(--surface-5)] dark:bg-[var(--surface-5)]'
|
||||
onMouseDown={(e) => handleResizeStart(fieldId, e)}
|
||||
onDragStart={(e) => {
|
||||
e.preventDefault()
|
||||
|
||||
@@ -28,6 +28,7 @@ interface Field {
|
||||
name: string
|
||||
type?: 'string' | 'number' | 'boolean' | 'object' | 'array' | 'files'
|
||||
value?: string
|
||||
description?: string
|
||||
collapsed?: boolean
|
||||
}
|
||||
|
||||
@@ -41,7 +42,9 @@ interface FieldFormatProps {
|
||||
placeholder?: string
|
||||
showType?: boolean
|
||||
showValue?: boolean
|
||||
showDescription?: boolean
|
||||
valuePlaceholder?: string
|
||||
descriptionPlaceholder?: string
|
||||
config?: any
|
||||
}
|
||||
|
||||
@@ -73,6 +76,7 @@ const createDefaultField = (): Field => ({
|
||||
name: '',
|
||||
type: 'string',
|
||||
value: '',
|
||||
description: '',
|
||||
collapsed: false,
|
||||
})
|
||||
|
||||
@@ -93,7 +97,9 @@ export function FieldFormat({
|
||||
placeholder = 'fieldName',
|
||||
showType = true,
|
||||
showValue = false,
|
||||
showDescription = false,
|
||||
valuePlaceholder = 'Enter default value',
|
||||
descriptionPlaceholder = 'Describe this field',
|
||||
}: FieldFormatProps) {
|
||||
const [storeValue, setStoreValue] = useSubBlockValue<Field[]>(blockId, subBlockId)
|
||||
const valueInputRefs = useRef<Record<string, HTMLInputElement | HTMLTextAreaElement>>({})
|
||||
@@ -554,6 +560,18 @@ export function FieldFormat({
|
||||
</div>
|
||||
)}
|
||||
|
||||
{showDescription && (
|
||||
<div className='flex flex-col gap-[6px]'>
|
||||
<Label className='text-[13px]'>Description</Label>
|
||||
<Input
|
||||
value={field.description ?? ''}
|
||||
onChange={(e) => updateField(field.id, 'description', e.target.value)}
|
||||
placeholder={descriptionPlaceholder}
|
||||
disabled={isReadOnly}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{showValue && (
|
||||
<div className='flex flex-col gap-[6px]'>
|
||||
<Label className='text-[13px]'>Value</Label>
|
||||
@@ -568,8 +586,10 @@ export function FieldFormat({
|
||||
)
|
||||
}
|
||||
|
||||
export function InputFormat(props: Omit<FieldFormatProps, 'title' | 'placeholder'>) {
|
||||
return <FieldFormat {...props} title='Input' placeholder='firstName' />
|
||||
export function InputFormat(
|
||||
props: Omit<FieldFormatProps, 'title' | 'placeholder' | 'showDescription'>
|
||||
) {
|
||||
return <FieldFormat {...props} title='Input' placeholder='firstName' showDescription={true} />
|
||||
}
|
||||
|
||||
export function ResponseFormat(
|
||||
|
||||
@@ -35,11 +35,11 @@ import type {
|
||||
import { useAccessibleReferencePrefixes } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-accessible-reference-prefixes'
|
||||
import { getBlock } from '@/blocks'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import { normalizeName } from '@/executor/constants'
|
||||
import type { Variable } from '@/stores/panel'
|
||||
import { useVariablesStore } from '@/stores/panel'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
import { normalizeName } from '@/stores/workflows/utils'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
import type { BlockState } from '@/stores/workflows/workflow/types'
|
||||
|
||||
@@ -241,13 +241,16 @@ const getOutputTypeForPath = (
|
||||
const blockState = useWorkflowStore.getState().blocks[blockId]
|
||||
const subBlocks = mergedSubBlocksOverride ?? (blockState?.subBlocks || {})
|
||||
return getBlockOutputType(block.type, outputPath, subBlocks)
|
||||
} else {
|
||||
const operationValue = getSubBlockValue(blockId, 'operation')
|
||||
if (blockConfig && operationValue) {
|
||||
return getToolOutputType(blockConfig, operationValue, outputPath)
|
||||
}
|
||||
} else if (blockConfig?.tools?.config?.tool) {
|
||||
const blockState = useWorkflowStore.getState().blocks[blockId]
|
||||
const subBlocks = mergedSubBlocksOverride ?? (blockState?.subBlocks || {})
|
||||
return getToolOutputType(blockConfig, subBlocks, outputPath)
|
||||
}
|
||||
return 'any'
|
||||
|
||||
const subBlocks =
|
||||
mergedSubBlocksOverride ?? useWorkflowStore.getState().blocks[blockId]?.subBlocks
|
||||
const triggerMode = block?.triggerMode && blockConfig?.triggers?.enabled
|
||||
return getBlockOutputType(block?.type ?? '', outputPath, subBlocks, triggerMode)
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1211,11 +1214,7 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
: allTags
|
||||
}
|
||||
} else {
|
||||
const operationValue =
|
||||
mergedSubBlocks?.operation?.value ?? getSubBlockValue(activeSourceBlockId, 'operation')
|
||||
const toolOutputPaths = operationValue
|
||||
? getToolOutputPaths(blockConfig, operationValue, mergedSubBlocks)
|
||||
: []
|
||||
const toolOutputPaths = getToolOutputPaths(blockConfig, mergedSubBlocks)
|
||||
|
||||
if (toolOutputPaths.length > 0) {
|
||||
blockTags = toolOutputPaths.map((path) => `${normalizedBlockName}.${path}`)
|
||||
@@ -1313,15 +1312,16 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
if (currentLoop && isLoopBlock) {
|
||||
containingLoopBlockId = blockId
|
||||
const loopType = currentLoop.loopType || 'for'
|
||||
const contextualTags: string[] = ['index']
|
||||
if (loopType === 'forEach') {
|
||||
contextualTags.push('currentItem')
|
||||
contextualTags.push('items')
|
||||
}
|
||||
|
||||
const loopBlock = blocks[blockId]
|
||||
if (loopBlock) {
|
||||
const loopBlockName = loopBlock.name || loopBlock.type
|
||||
const normalizedLoopName = normalizeName(loopBlockName)
|
||||
const contextualTags: string[] = [`${normalizedLoopName}.index`]
|
||||
if (loopType === 'forEach') {
|
||||
contextualTags.push(`${normalizedLoopName}.currentItem`)
|
||||
contextualTags.push(`${normalizedLoopName}.items`)
|
||||
}
|
||||
|
||||
loopBlockGroup = {
|
||||
blockName: loopBlockName,
|
||||
@@ -1329,21 +1329,23 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
blockType: 'loop',
|
||||
tags: contextualTags,
|
||||
distance: 0,
|
||||
isContextual: true,
|
||||
}
|
||||
}
|
||||
} else if (containingLoop) {
|
||||
const [loopId, loop] = containingLoop
|
||||
containingLoopBlockId = loopId
|
||||
const loopType = loop.loopType || 'for'
|
||||
const contextualTags: string[] = ['index']
|
||||
if (loopType === 'forEach') {
|
||||
contextualTags.push('currentItem')
|
||||
contextualTags.push('items')
|
||||
}
|
||||
|
||||
const containingLoopBlock = blocks[loopId]
|
||||
if (containingLoopBlock) {
|
||||
const loopBlockName = containingLoopBlock.name || containingLoopBlock.type
|
||||
const normalizedLoopName = normalizeName(loopBlockName)
|
||||
const contextualTags: string[] = [`${normalizedLoopName}.index`]
|
||||
if (loopType === 'forEach') {
|
||||
contextualTags.push(`${normalizedLoopName}.currentItem`)
|
||||
contextualTags.push(`${normalizedLoopName}.items`)
|
||||
}
|
||||
|
||||
loopBlockGroup = {
|
||||
blockName: loopBlockName,
|
||||
@@ -1351,6 +1353,7 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
blockType: 'loop',
|
||||
tags: contextualTags,
|
||||
distance: 0,
|
||||
isContextual: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1364,15 +1367,16 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
const [parallelId, parallel] = containingParallel
|
||||
containingParallelBlockId = parallelId
|
||||
const parallelType = parallel.parallelType || 'count'
|
||||
const contextualTags: string[] = ['index']
|
||||
if (parallelType === 'collection') {
|
||||
contextualTags.push('currentItem')
|
||||
contextualTags.push('items')
|
||||
}
|
||||
|
||||
const containingParallelBlock = blocks[parallelId]
|
||||
if (containingParallelBlock) {
|
||||
const parallelBlockName = containingParallelBlock.name || containingParallelBlock.type
|
||||
const normalizedParallelName = normalizeName(parallelBlockName)
|
||||
const contextualTags: string[] = [`${normalizedParallelName}.index`]
|
||||
if (parallelType === 'collection') {
|
||||
contextualTags.push(`${normalizedParallelName}.currentItem`)
|
||||
contextualTags.push(`${normalizedParallelName}.items`)
|
||||
}
|
||||
|
||||
parallelBlockGroup = {
|
||||
blockName: parallelBlockName,
|
||||
@@ -1380,6 +1384,7 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
blockType: 'parallel',
|
||||
tags: contextualTags,
|
||||
distance: 0,
|
||||
isContextual: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1535,7 +1540,6 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
|
||||
if (dynamicOutputs.length > 0) {
|
||||
const allTags = dynamicOutputs.map((path) => `${normalizedBlockName}.${path}`)
|
||||
// For self-reference, only show url and resumeEndpoint (not response format fields)
|
||||
blockTags = isSelfReference
|
||||
? allTags.filter((tag) => tag.endsWith('.url') || tag.endsWith('.resumeEndpoint'))
|
||||
: allTags
|
||||
@@ -1543,11 +1547,7 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
blockTags = [`${normalizedBlockName}.url`, `${normalizedBlockName}.resumeEndpoint`]
|
||||
}
|
||||
} else {
|
||||
const operationValue =
|
||||
mergedSubBlocks?.operation?.value ?? getSubBlockValue(accessibleBlockId, 'operation')
|
||||
const toolOutputPaths = operationValue
|
||||
? getToolOutputPaths(blockConfig, operationValue, mergedSubBlocks)
|
||||
: []
|
||||
const toolOutputPaths = getToolOutputPaths(blockConfig, mergedSubBlocks)
|
||||
|
||||
if (toolOutputPaths.length > 0) {
|
||||
blockTags = toolOutputPaths.map((path) => `${normalizedBlockName}.${path}`)
|
||||
@@ -1651,38 +1651,29 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
const nestedBlockTagGroups: NestedBlockTagGroup[] = useMemo(() => {
|
||||
return filteredBlockTagGroups.map((group: BlockTagGroup) => {
|
||||
const normalizedBlockName = normalizeName(group.blockName)
|
||||
|
||||
// Handle loop/parallel contextual tags (index, currentItem, items)
|
||||
const directTags: NestedTag[] = []
|
||||
const tagsForTree: string[] = []
|
||||
|
||||
group.tags.forEach((tag: string) => {
|
||||
const tagParts = tag.split('.')
|
||||
|
||||
// Loop/parallel contextual tags without block prefix
|
||||
if (
|
||||
(group.blockType === 'loop' || group.blockType === 'parallel') &&
|
||||
tagParts.length === 1
|
||||
) {
|
||||
if (tagParts.length === 1) {
|
||||
directTags.push({
|
||||
key: tag,
|
||||
display: tag,
|
||||
fullTag: tag,
|
||||
})
|
||||
} else if (tagParts.length === 2) {
|
||||
// Direct property like blockname.property
|
||||
directTags.push({
|
||||
key: tagParts[1],
|
||||
display: tagParts[1],
|
||||
fullTag: tag,
|
||||
})
|
||||
} else {
|
||||
// Nested property - add to tree builder
|
||||
tagsForTree.push(tag)
|
||||
}
|
||||
})
|
||||
|
||||
// Build recursive tree from nested tags
|
||||
const nestedTags = [...directTags, ...buildNestedTagTree(tagsForTree, normalizedBlockName)]
|
||||
|
||||
return {
|
||||
@@ -1789,7 +1780,7 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
mergedSubBlocks
|
||||
)
|
||||
|
||||
if (fieldType === 'files' || fieldType === 'array') {
|
||||
if (fieldType === 'files' || fieldType === 'file[]' || fieldType === 'array') {
|
||||
const blockName = parts[0]
|
||||
const remainingPath = parts.slice(2).join('.')
|
||||
processedTag = `${blockName}.${arrayFieldName}[0].${remainingPath}`
|
||||
@@ -1806,13 +1797,19 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
processedTag = tag
|
||||
}
|
||||
} else if (
|
||||
blockGroup &&
|
||||
blockGroup?.isContextual &&
|
||||
(blockGroup.blockType === 'loop' || blockGroup.blockType === 'parallel')
|
||||
) {
|
||||
if (!tag.includes('.') && ['index', 'currentItem', 'items'].includes(tag)) {
|
||||
processedTag = `${blockGroup.blockType}.${tag}`
|
||||
const tagParts = tag.split('.')
|
||||
if (tagParts.length === 1) {
|
||||
processedTag = blockGroup.blockType
|
||||
} else {
|
||||
processedTag = tag
|
||||
const lastPart = tagParts[tagParts.length - 1]
|
||||
if (['index', 'currentItem', 'items'].includes(lastPart)) {
|
||||
processedTag = `${blockGroup.blockType}.${lastPart}`
|
||||
} else {
|
||||
processedTag = tag
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -7,6 +7,8 @@ export interface BlockTagGroup {
|
||||
blockType: string
|
||||
tags: string[]
|
||||
distance: number
|
||||
/** True if this is a contextual group (loop/parallel iteration context available inside the subflow) */
|
||||
isContextual?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -29,6 +29,7 @@ import {
|
||||
type OAuthProvider,
|
||||
type OAuthService,
|
||||
} from '@/lib/oauth'
|
||||
import { extractInputFieldsFromBlocks } from '@/lib/workflows/input-format'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
import {
|
||||
CheckboxList,
|
||||
@@ -65,7 +66,7 @@ import { useForceRefreshMcpTools, useMcpServers, useStoredMcpTools } from '@/hoo
|
||||
import {
|
||||
useChildDeploymentStatus,
|
||||
useDeployChildWorkflow,
|
||||
useWorkflowInputFields,
|
||||
useWorkflowState,
|
||||
useWorkflows,
|
||||
} from '@/hooks/queries/workflows'
|
||||
import { usePermissionConfig } from '@/hooks/use-permission-config'
|
||||
@@ -771,7 +772,11 @@ function WorkflowInputMapperSyncWrapper({
|
||||
disabled: boolean
|
||||
workflowId: string
|
||||
}) {
|
||||
const { data: inputFields = [], isLoading } = useWorkflowInputFields(workflowId)
|
||||
const { data: workflowState, isLoading } = useWorkflowState(workflowId)
|
||||
const inputFields = useMemo(
|
||||
() => (workflowState?.blocks ? extractInputFieldsFromBlocks(workflowState.blocks) : []),
|
||||
[workflowState?.blocks]
|
||||
)
|
||||
|
||||
const parsedValue = useMemo(() => {
|
||||
try {
|
||||
|
||||
@@ -0,0 +1,45 @@
|
||||
'use client'
|
||||
|
||||
import { useMemo } from 'react'
|
||||
import { SelectorCombobox } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/selector-combobox/selector-combobox'
|
||||
import type { SubBlockConfig } from '@/blocks/types'
|
||||
import type { SelectorContext } from '@/hooks/selectors/types'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
|
||||
interface WorkflowSelectorInputProps {
|
||||
blockId: string
|
||||
subBlock: SubBlockConfig
|
||||
disabled?: boolean
|
||||
isPreview?: boolean
|
||||
previewValue?: string | null
|
||||
}
|
||||
|
||||
export function WorkflowSelectorInput({
|
||||
blockId,
|
||||
subBlock,
|
||||
disabled = false,
|
||||
isPreview = false,
|
||||
previewValue,
|
||||
}: WorkflowSelectorInputProps) {
|
||||
const activeWorkflowId = useWorkflowRegistry((s) => s.activeWorkflowId)
|
||||
|
||||
const context: SelectorContext = useMemo(
|
||||
() => ({
|
||||
excludeWorkflowId: activeWorkflowId ?? undefined,
|
||||
}),
|
||||
[activeWorkflowId]
|
||||
)
|
||||
|
||||
return (
|
||||
<SelectorCombobox
|
||||
blockId={blockId}
|
||||
subBlock={subBlock}
|
||||
selectorKey='sim.workflows'
|
||||
selectorContext={context}
|
||||
disabled={disabled}
|
||||
isPreview={isPreview}
|
||||
previewValue={previewValue}
|
||||
placeholder={subBlock.placeholder || 'Select workflow...'}
|
||||
/>
|
||||
)
|
||||
}
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
import { useCallback, useMemo } from 'react'
|
||||
import { isEqual } from 'lodash'
|
||||
import { useStoreWithEqualityFn } from 'zustand/traditional'
|
||||
import {
|
||||
buildCanonicalIndex,
|
||||
isNonEmptyValue,
|
||||
@@ -151,7 +152,7 @@ export function useDependsOnGate(
|
||||
|
||||
// Get values for all dependency fields (both all and any)
|
||||
// Use isEqual to prevent re-renders when dependency values haven't actually changed
|
||||
const dependencyValuesMap = useSubBlockStore(dependencySelector, isEqual)
|
||||
const dependencyValuesMap = useStoreWithEqualityFn(useSubBlockStore, dependencySelector, isEqual)
|
||||
|
||||
const depsSatisfied = useMemo(() => {
|
||||
// Check all fields (AND logic) - all must be satisfied
|
||||
|
||||
@@ -2,6 +2,7 @@ import { useCallback, useEffect, useRef } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { isEqual } from 'lodash'
|
||||
import { useShallow } from 'zustand/react/shallow'
|
||||
import { useStoreWithEqualityFn } from 'zustand/traditional'
|
||||
import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow'
|
||||
import { getProviderFromModel } from '@/providers/utils'
|
||||
import { useWorkflowDiffStore } from '@/stores/workflow-diff/store'
|
||||
@@ -58,7 +59,8 @@ export function useSubBlockValue<T = any>(
|
||||
const streamingValueRef = useRef<T | null>(null)
|
||||
const wasStreamingRef = useRef<boolean>(false)
|
||||
|
||||
const storeValue = useSubBlockStore(
|
||||
const storeValue = useStoreWithEqualityFn(
|
||||
useSubBlockStore,
|
||||
useCallback(
|
||||
(state) => {
|
||||
// If the active workflow ID isn't available yet, return undefined so we can fall back to initialValue
|
||||
@@ -92,7 +94,8 @@ export function useSubBlockValue<T = any>(
|
||||
|
||||
// Always call this hook unconditionally - don't wrap it in a condition
|
||||
// Optimized: only re-render if model value actually changes
|
||||
const modelSubBlockValue = useSubBlockStore(
|
||||
const modelSubBlockValue = useStoreWithEqualityFn(
|
||||
useSubBlockStore,
|
||||
useCallback((state) => (blockId ? state.getValue(blockId, 'model') : null), [blockId]),
|
||||
(a, b) => a === b
|
||||
)
|
||||
|
||||
@@ -40,6 +40,7 @@ import {
|
||||
TimeInput,
|
||||
ToolInput,
|
||||
VariablesInput,
|
||||
WorkflowSelectorInput,
|
||||
} from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components'
|
||||
import { useDependsOnGate } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-depends-on-gate'
|
||||
import type { SubBlockConfig } from '@/blocks/types'
|
||||
@@ -90,7 +91,6 @@ const isFieldRequired = (config: SubBlockConfig, subBlockValues?: Record<string,
|
||||
if (!config.required) return false
|
||||
if (typeof config.required === 'boolean') return config.required
|
||||
|
||||
// Helper function to evaluate a condition
|
||||
const evalCond = (
|
||||
cond: {
|
||||
field: string
|
||||
@@ -132,7 +132,6 @@ const isFieldRequired = (config: SubBlockConfig, subBlockValues?: Record<string,
|
||||
return match
|
||||
}
|
||||
|
||||
// If required is a condition object or function, evaluate it
|
||||
const condition = typeof config.required === 'function' ? config.required() : config.required
|
||||
return evalCond(condition, subBlockValues || {})
|
||||
}
|
||||
@@ -378,7 +377,6 @@ function SubBlockComponent({
|
||||
setIsValidJson(isValid)
|
||||
}
|
||||
|
||||
// Check if wand is enabled for this sub-block
|
||||
const isWandEnabled = config.wandConfig?.enabled ?? false
|
||||
|
||||
/**
|
||||
@@ -438,8 +436,6 @@ function SubBlockComponent({
|
||||
| null
|
||||
| undefined
|
||||
|
||||
// Use dependsOn gating to compute final disabled state
|
||||
// Only pass previewContextValues when in preview mode to avoid format mismatches
|
||||
const { finalDisabled: gatedDisabled } = useDependsOnGate(blockId, config, {
|
||||
disabled,
|
||||
isPreview,
|
||||
@@ -869,6 +865,17 @@ function SubBlockComponent({
|
||||
/>
|
||||
)
|
||||
|
||||
case 'workflow-selector':
|
||||
return (
|
||||
<WorkflowSelectorInput
|
||||
blockId={blockId}
|
||||
subBlock={config}
|
||||
disabled={isDisabled}
|
||||
isPreview={isPreview}
|
||||
previewValue={previewValue as string | null}
|
||||
/>
|
||||
)
|
||||
|
||||
case 'mcp-server-selector':
|
||||
return (
|
||||
<McpServerSelector
|
||||
|
||||
@@ -68,7 +68,7 @@ export function SubflowEditor({
|
||||
<div className='flex flex-1 flex-col overflow-hidden pt-[0px]'>
|
||||
{/* Subflow Editor Section */}
|
||||
<div ref={subBlocksRef} className='subblocks-section flex flex-1 flex-col overflow-hidden'>
|
||||
<div className='flex-1 overflow-y-auto overflow-x-hidden px-[8px] pt-[5px] pb-[8px]'>
|
||||
<div className='flex-1 overflow-y-auto overflow-x-hidden px-[8px] pt-[9px] pb-[8px]'>
|
||||
{/* Type Selection */}
|
||||
<div>
|
||||
<Label className='mb-[6.5px] block pl-[2px] font-medium text-[13px] text-[var(--text-primary)]'>
|
||||
|
||||
@@ -2,8 +2,18 @@
|
||||
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { isEqual } from 'lodash'
|
||||
import { BookOpen, Check, ChevronDown, ChevronUp, Pencil } from 'lucide-react'
|
||||
import {
|
||||
BookOpen,
|
||||
Check,
|
||||
ChevronDown,
|
||||
ChevronUp,
|
||||
ExternalLink,
|
||||
Loader2,
|
||||
Pencil,
|
||||
} from 'lucide-react'
|
||||
import { useParams } from 'next/navigation'
|
||||
import { useShallow } from 'zustand/react/shallow'
|
||||
import { useStoreWithEqualityFn } from 'zustand/traditional'
|
||||
import { Button, Tooltip } from '@/components/emcn'
|
||||
import {
|
||||
buildCanonicalIndex,
|
||||
@@ -28,8 +38,10 @@ import { LoopTool } from '@/app/workspace/[workspaceId]/w/[workflowId]/component
|
||||
import { ParallelTool } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/parallel/parallel-config'
|
||||
import { getSubBlockStableKey } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/utils'
|
||||
import { useCurrentWorkflow } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks'
|
||||
import { WorkflowPreview } from '@/app/workspace/[workspaceId]/w/components/preview'
|
||||
import { getBlock } from '@/blocks/registry'
|
||||
import type { SubBlockType } from '@/blocks/types'
|
||||
import { useWorkflowState } from '@/hooks/queries/workflows'
|
||||
import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow'
|
||||
import { usePanelEditorStore } from '@/stores/panel'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
@@ -84,6 +96,14 @@ export function Editor() {
|
||||
// Get subflow display properties from configs
|
||||
const subflowConfig = isSubflow ? (currentBlock.type === 'loop' ? LoopTool : ParallelTool) : null
|
||||
|
||||
// Check if selected block is a workflow block
|
||||
const isWorkflowBlock =
|
||||
currentBlock && (currentBlock.type === 'workflow' || currentBlock.type === 'workflow_input')
|
||||
|
||||
// Get workspace ID from params
|
||||
const params = useParams()
|
||||
const workspaceId = params.workspaceId as string
|
||||
|
||||
// Refs for resize functionality
|
||||
const subBlocksRef = useRef<HTMLDivElement>(null)
|
||||
|
||||
@@ -99,7 +119,8 @@ export function Editor() {
|
||||
currentWorkflow.isSnapshotView
|
||||
)
|
||||
|
||||
const blockSubBlockValues = useSubBlockStore(
|
||||
const blockSubBlockValues = useStoreWithEqualityFn(
|
||||
useSubBlockStore,
|
||||
useCallback(
|
||||
(state) => {
|
||||
if (!activeWorkflowId || !currentBlockId) return EMPTY_SUBBLOCK_VALUES
|
||||
@@ -252,11 +273,11 @@ export function Editor() {
|
||||
|
||||
// Trigger rename mode when signaled from context menu
|
||||
useEffect(() => {
|
||||
if (shouldFocusRename && currentBlock && !isSubflow) {
|
||||
if (shouldFocusRename && currentBlock) {
|
||||
handleStartRename()
|
||||
setShouldFocusRename(false)
|
||||
}
|
||||
}, [shouldFocusRename, currentBlock, isSubflow, handleStartRename, setShouldFocusRename])
|
||||
}, [shouldFocusRename, currentBlock, handleStartRename, setShouldFocusRename])
|
||||
|
||||
/**
|
||||
* Handles opening documentation link in a new secure tab.
|
||||
@@ -268,6 +289,22 @@ export function Editor() {
|
||||
}
|
||||
}
|
||||
|
||||
// Get child workflow ID for workflow blocks
|
||||
const childWorkflowId = isWorkflowBlock ? blockSubBlockValues?.workflowId : null
|
||||
|
||||
// Fetch child workflow state for preview (only for workflow blocks with a selected workflow)
|
||||
const { data: childWorkflowState, isLoading: isLoadingChildWorkflow } =
|
||||
useWorkflowState(childWorkflowId)
|
||||
|
||||
/**
|
||||
* Handles opening the child workflow in a new tab.
|
||||
*/
|
||||
const handleOpenChildWorkflow = useCallback(() => {
|
||||
if (childWorkflowId && workspaceId) {
|
||||
window.open(`/workspace/${workspaceId}/w/${childWorkflowId}`, '_blank', 'noopener,noreferrer')
|
||||
}
|
||||
}, [childWorkflowId, workspaceId])
|
||||
|
||||
// Determine if connections are at minimum height (collapsed state)
|
||||
const isConnectionsAtMinHeight = connectionsHeight <= 35
|
||||
|
||||
@@ -320,7 +357,7 @@ export function Editor() {
|
||||
</div>
|
||||
<div className='flex shrink-0 items-center gap-[8px]'>
|
||||
{/* Rename button */}
|
||||
{currentBlock && !isSubflow && (
|
||||
{currentBlock && (
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
@@ -406,7 +443,66 @@ export function Editor() {
|
||||
className='subblocks-section flex flex-1 flex-col overflow-hidden'
|
||||
>
|
||||
<div className='flex-1 overflow-y-auto overflow-x-hidden px-[8px] pt-[12px] pb-[8px] [overflow-anchor:none]'>
|
||||
{subBlocks.length === 0 ? (
|
||||
{/* Workflow Preview - only for workflow blocks with a selected child workflow */}
|
||||
{isWorkflowBlock && childWorkflowId && (
|
||||
<>
|
||||
<div className='subblock-content flex flex-col gap-[9.5px]'>
|
||||
<div className='pl-[2px] font-medium text-[13px] text-[var(--text-primary)] leading-none'>
|
||||
Workflow Preview
|
||||
</div>
|
||||
<div className='relative h-[160px] overflow-hidden rounded-[4px] border border-[var(--border)]'>
|
||||
{isLoadingChildWorkflow ? (
|
||||
<div className='flex h-full items-center justify-center bg-[var(--surface-3)]'>
|
||||
<Loader2 className='h-5 w-5 animate-spin text-[var(--text-tertiary)]' />
|
||||
</div>
|
||||
) : childWorkflowState ? (
|
||||
<>
|
||||
<div className='[&_*:active]:!cursor-grabbing [&_*]:!cursor-grab [&_.react-flow__handle]:!hidden h-full w-full'>
|
||||
<WorkflowPreview
|
||||
workflowState={childWorkflowState}
|
||||
height={160}
|
||||
width='100%'
|
||||
isPannable={true}
|
||||
defaultZoom={0.6}
|
||||
fitPadding={0.15}
|
||||
cursorStyle='grab'
|
||||
/>
|
||||
</div>
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
type='button'
|
||||
variant='ghost'
|
||||
onClick={handleOpenChildWorkflow}
|
||||
className='absolute right-[6px] bottom-[6px] z-10 h-[24px] w-[24px] cursor-pointer border border-[var(--border)] bg-[var(--surface-2)] p-0 hover:bg-[var(--surface-4)]'
|
||||
>
|
||||
<ExternalLink className='h-[12px] w-[12px]' />
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content side='top'>Open workflow</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
</>
|
||||
) : (
|
||||
<div className='flex h-full items-center justify-center bg-[var(--surface-3)]'>
|
||||
<span className='text-[13px] text-[var(--text-tertiary)]'>
|
||||
Unable to load preview
|
||||
</span>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
<div className='subblock-divider px-[2px] pt-[16px] pb-[13px]'>
|
||||
<div
|
||||
className='h-[1.25px]'
|
||||
style={{
|
||||
backgroundImage:
|
||||
'repeating-linear-gradient(to right, var(--border) 0px, var(--border) 6px, transparent 6px, transparent 12px)',
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
{subBlocks.length === 0 && !isWorkflowBlock ? (
|
||||
<div className='flex h-full items-center justify-center text-center text-[#8D8D8D] text-[13px]'>
|
||||
This block has no subblocks
|
||||
</div>
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import { useCallback } from 'react'
|
||||
import { shallow } from 'zustand/shallow'
|
||||
import { useShallow } from 'zustand/react/shallow'
|
||||
import { useWorkflowDiffStore } from '@/stores/workflow-diff'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
|
||||
@@ -13,35 +12,26 @@ import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
*/
|
||||
export function useEditorBlockProperties(blockId: string | null, isSnapshotView: boolean) {
|
||||
const normalBlockProps = useWorkflowStore(
|
||||
useCallback(
|
||||
(state) => {
|
||||
if (!blockId) return { advancedMode: false, triggerMode: false }
|
||||
const block = state.blocks?.[blockId]
|
||||
return {
|
||||
advancedMode: block?.advancedMode ?? false,
|
||||
triggerMode: block?.triggerMode ?? false,
|
||||
}
|
||||
},
|
||||
[blockId]
|
||||
),
|
||||
shallow
|
||||
useShallow((state) => {
|
||||
if (!blockId) return { advancedMode: false, triggerMode: false }
|
||||
const block = state.blocks?.[blockId]
|
||||
return {
|
||||
advancedMode: block?.advancedMode ?? false,
|
||||
triggerMode: block?.triggerMode ?? false,
|
||||
}
|
||||
})
|
||||
)
|
||||
|
||||
const baselineBlockProps = useWorkflowDiffStore(
|
||||
useCallback(
|
||||
(state) => {
|
||||
if (!blockId) return { advancedMode: false, triggerMode: false }
|
||||
const block = state.baselineWorkflow?.blocks?.[blockId]
|
||||
return {
|
||||
advancedMode: block?.advancedMode ?? false,
|
||||
triggerMode: block?.triggerMode ?? false,
|
||||
}
|
||||
},
|
||||
[blockId]
|
||||
),
|
||||
shallow
|
||||
useShallow((state) => {
|
||||
if (!blockId) return { advancedMode: false, triggerMode: false }
|
||||
const block = state.baselineWorkflow?.blocks?.[blockId]
|
||||
return {
|
||||
advancedMode: block?.advancedMode ?? false,
|
||||
triggerMode: block?.triggerMode ?? false,
|
||||
}
|
||||
})
|
||||
)
|
||||
|
||||
// Use the appropriate props based on view mode
|
||||
return isSnapshotView ? baselineBlockProps : normalBlockProps
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { memo, useMemo, useRef } from 'react'
|
||||
import { RepeatIcon, SplitIcon } from 'lucide-react'
|
||||
import { Handle, type NodeProps, Position, useReactFlow } from 'reactflow'
|
||||
import { Badge } from '@/components/emcn'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { HANDLE_POSITIONS } from '@/lib/workflows/blocks/block-dimensions'
|
||||
import { type DiffStatus, hasDiffStatus } from '@/lib/workflows/diff/types'
|
||||
@@ -78,6 +79,7 @@ export const SubflowNodeComponent = memo(({ data, id, selected }: NodeProps<Subf
|
||||
? currentBlock.is_diff
|
||||
: undefined
|
||||
|
||||
const isEnabled = currentBlock?.enabled ?? true
|
||||
const isPreview = data?.isPreview || false
|
||||
|
||||
// Focus state
|
||||
@@ -184,14 +186,21 @@ export const SubflowNodeComponent = memo(({ data, id, selected }: NodeProps<Subf
|
||||
<div className='flex min-w-0 flex-1 items-center gap-[10px]'>
|
||||
<div
|
||||
className='flex h-[24px] w-[24px] flex-shrink-0 items-center justify-center rounded-[6px]'
|
||||
style={{ backgroundColor: blockIconBg }}
|
||||
style={{ backgroundColor: isEnabled ? blockIconBg : 'gray' }}
|
||||
>
|
||||
<BlockIcon className='h-[16px] w-[16px] text-white' />
|
||||
</div>
|
||||
<span className='font-medium text-[16px]' title={blockName}>
|
||||
<span
|
||||
className={cn(
|
||||
'truncate font-medium text-[16px]',
|
||||
!isEnabled && 'text-[var(--text-muted)]'
|
||||
)}
|
||||
title={blockName}
|
||||
>
|
||||
{blockName}
|
||||
</span>
|
||||
</div>
|
||||
{!isEnabled && <Badge variant='gray-secondary'>disabled</Badge>}
|
||||
</div>
|
||||
|
||||
{!isPreview && (
|
||||
|
||||
@@ -3,6 +3,7 @@ import { createLogger } from '@sim/logger'
|
||||
import { isEqual } from 'lodash'
|
||||
import { useParams } from 'next/navigation'
|
||||
import { Handle, type NodeProps, Position, useUpdateNodeInternals } from 'reactflow'
|
||||
import { useStoreWithEqualityFn } from 'zustand/traditional'
|
||||
import { Badge, Tooltip } from '@/components/emcn'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
@@ -526,7 +527,8 @@ const SubBlockRow = memo(function SubBlockRow({
|
||||
* Subscribe only to variables for this workflow to avoid re-renders from other workflows.
|
||||
* Uses isEqual for deep comparison since Object.fromEntries creates a new object each time.
|
||||
*/
|
||||
const workflowVariables = useVariablesStore(
|
||||
const workflowVariables = useStoreWithEqualityFn(
|
||||
useVariablesStore,
|
||||
useCallback(
|
||||
(state) => {
|
||||
if (!workflowId) return {}
|
||||
@@ -729,7 +731,8 @@ export const WorkflowBlock = memo(function WorkflowBlock({
|
||||
const isStarterBlock = type === 'starter'
|
||||
const isWebhookTriggerBlock = type === 'webhook' || type === 'generic_webhook'
|
||||
|
||||
const blockSubBlockValues = useSubBlockStore(
|
||||
const blockSubBlockValues = useStoreWithEqualityFn(
|
||||
useSubBlockStore,
|
||||
useCallback(
|
||||
(state) => {
|
||||
if (!activeWorkflowId) return EMPTY_SUBBLOCK_VALUES
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user