mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-13 08:57:55 -05:00
Compare commits
19 Commits
v0.5.56
...
feat/copil
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4ee863a9ce | ||
|
|
23f4305bc0 | ||
|
|
42e496f5ff | ||
|
|
23b3dacd1a | ||
|
|
d55072a45f | ||
|
|
684ad5aeec | ||
|
|
a3dff1027f | ||
|
|
0aec9ef571 | ||
|
|
cb4db20a5f | ||
|
|
4941b5224b | ||
|
|
7f18d96d32 | ||
|
|
e347486f50 | ||
|
|
e21cc1132b | ||
|
|
ab32a19cf4 | ||
|
|
ead2413b95 | ||
|
|
9a16e7c20f | ||
|
|
283a521614 | ||
|
|
92fabe785d | ||
|
|
3ed177520a |
@@ -2,7 +2,6 @@
|
||||
title: Router
|
||||
---
|
||||
|
||||
import { Callout } from 'fumadocs-ui/components/callout'
|
||||
import { Tab, Tabs } from 'fumadocs-ui/components/tabs'
|
||||
import { Image } from '@/components/ui/image'
|
||||
|
||||
@@ -102,11 +101,18 @@ Input (Lead) → Router
|
||||
└── [Self-serve] → Workflow (Automated Onboarding)
|
||||
```
|
||||
|
||||
## Error Handling
|
||||
|
||||
When the Router cannot determine an appropriate route for the given context, it will route to the **error path** instead of arbitrarily selecting a route. This happens when:
|
||||
|
||||
- The context doesn't clearly match any of the defined route descriptions
|
||||
- The AI determines that none of the available routes are appropriate
|
||||
|
||||
## Best Practices
|
||||
|
||||
- **Write clear route descriptions**: Each route description should clearly explain when that route should be selected. Be specific about the criteria.
|
||||
- **Make routes mutually exclusive**: When possible, ensure route descriptions don't overlap to prevent ambiguous routing decisions.
|
||||
- **Include an error/fallback route**: Add a catch-all route for unexpected inputs that don't match other routes.
|
||||
- **Connect an error path**: Handle cases where no route matches by connecting an error handler for graceful fallback behavior.
|
||||
- **Use descriptive route titles**: Route titles appear in the workflow canvas, so make them meaningful for readability.
|
||||
- **Test with diverse inputs**: Ensure the Router handles various input types, edge cases, and unexpected content.
|
||||
- **Monitor routing performance**: Review routing decisions regularly and refine route descriptions based on actual usage patterns.
|
||||
|
||||
@@ -37,7 +37,7 @@ This integration empowers Sim agents to automate data management tasks within yo
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Integrate Amazon DynamoDB into workflows. Supports Get, Put, Query, Scan, Update, and Delete operations on DynamoDB tables.
|
||||
Integrate Amazon DynamoDB into workflows. Supports Get, Put, Query, Scan, Update, Delete, and Introspect operations on DynamoDB tables.
|
||||
|
||||
|
||||
|
||||
@@ -185,6 +185,27 @@ Delete an item from a DynamoDB table
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Operation status message |
|
||||
|
||||
### `dynamodb_introspect`
|
||||
|
||||
Introspect DynamoDB to list tables or get detailed schema information for a specific table
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `region` | string | Yes | AWS region \(e.g., us-east-1\) |
|
||||
| `accessKeyId` | string | Yes | AWS access key ID |
|
||||
| `secretAccessKey` | string | Yes | AWS secret access key |
|
||||
| `tableName` | string | No | Optional table name to get detailed schema. If not provided, lists all tables. |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Operation status message |
|
||||
| `tables` | array | List of table names in the region |
|
||||
| `tableDetails` | object | Detailed schema information for a specific table |
|
||||
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -362,6 +362,29 @@ Get comprehensive statistics about the Elasticsearch cluster.
|
||||
| `nodes` | object | Node statistics including count and versions |
|
||||
| `indices` | object | Index statistics including document count and store size |
|
||||
|
||||
### `elasticsearch_list_indices`
|
||||
|
||||
List all indices in the Elasticsearch cluster with their health, status, and statistics.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `deploymentType` | string | Yes | Deployment type: self_hosted or cloud |
|
||||
| `host` | string | No | Elasticsearch host URL \(for self-hosted\) |
|
||||
| `cloudId` | string | No | Elastic Cloud ID \(for cloud deployments\) |
|
||||
| `authMethod` | string | Yes | Authentication method: api_key or basic_auth |
|
||||
| `apiKey` | string | No | Elasticsearch API key |
|
||||
| `username` | string | No | Username for basic auth |
|
||||
| `password` | string | No | Password for basic auth |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Summary message about the indices |
|
||||
| `indices` | json | Array of index information objects |
|
||||
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -96,13 +96,13 @@ Download a file from Google Drive with complete metadata (exports Google Workspa
|
||||
| `fileId` | string | Yes | The ID of the file to download |
|
||||
| `mimeType` | string | No | The MIME type to export Google Workspace files to \(optional\) |
|
||||
| `fileName` | string | No | Optional filename override |
|
||||
| `includeRevisions` | boolean | No | Whether to include revision history in the metadata \(default: true\) |
|
||||
| `includeRevisions` | boolean | No | Whether to include revision history in the metadata \(default: true, returns first 100 revisions\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `file` | object | Downloaded file stored in execution files |
|
||||
| `file` | object | Downloaded file data |
|
||||
|
||||
### `google_drive_list`
|
||||
|
||||
|
||||
@@ -172,6 +172,30 @@ Execute MongoDB aggregation pipeline
|
||||
| `documents` | array | Array of documents returned from aggregation |
|
||||
| `documentCount` | number | Number of documents returned |
|
||||
|
||||
### `mongodb_introspect`
|
||||
|
||||
Introspect MongoDB database to list databases, collections, and indexes
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | Yes | MongoDB server hostname or IP address |
|
||||
| `port` | number | Yes | MongoDB server port \(default: 27017\) |
|
||||
| `database` | string | No | Database name to introspect \(optional - if not provided, lists all databases\) |
|
||||
| `username` | string | No | MongoDB username |
|
||||
| `password` | string | No | MongoDB password |
|
||||
| `authSource` | string | No | Authentication database |
|
||||
| `ssl` | string | No | SSL connection mode \(disabled, required, preferred\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Operation status message |
|
||||
| `databases` | array | Array of database names |
|
||||
| `collections` | array | Array of collection info with name, type, document count, and indexes |
|
||||
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -157,6 +157,29 @@ Execute raw SQL query on MySQL database
|
||||
| `rows` | array | Array of rows returned from the query |
|
||||
| `rowCount` | number | Number of rows affected |
|
||||
|
||||
### `mysql_introspect`
|
||||
|
||||
Introspect MySQL database schema to retrieve table structures, columns, and relationships
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | Yes | MySQL server hostname or IP address |
|
||||
| `port` | number | Yes | MySQL server port \(default: 3306\) |
|
||||
| `database` | string | Yes | Database name to connect to |
|
||||
| `username` | string | Yes | Database username |
|
||||
| `password` | string | Yes | Database password |
|
||||
| `ssl` | string | No | SSL connection mode \(disabled, required, preferred\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Operation status message |
|
||||
| `tables` | array | Array of table schemas with columns, keys, and indexes |
|
||||
| `databases` | array | List of available databases on the server |
|
||||
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -168,6 +168,33 @@ Execute arbitrary Cypher queries on Neo4j graph database for complex operations
|
||||
| `recordCount` | number | Number of records returned |
|
||||
| `summary` | json | Execution summary with timing and counters |
|
||||
|
||||
### `neo4j_introspect`
|
||||
|
||||
Introspect a Neo4j database to discover its schema including node labels, relationship types, properties, constraints, and indexes.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | Yes | Neo4j server hostname or IP address |
|
||||
| `port` | number | Yes | Neo4j server port \(default: 7687 for Bolt protocol\) |
|
||||
| `database` | string | Yes | Database name to connect to |
|
||||
| `username` | string | Yes | Neo4j username |
|
||||
| `password` | string | Yes | Neo4j password |
|
||||
| `encryption` | string | No | Connection encryption mode \(enabled, disabled\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Operation status message |
|
||||
| `labels` | array | Array of node labels in the database |
|
||||
| `relationshipTypes` | array | Array of relationship types in the database |
|
||||
| `nodeSchemas` | array | Array of node schemas with their properties |
|
||||
| `relationshipSchemas` | array | Array of relationship schemas with their properties |
|
||||
| `constraints` | array | Array of database constraints |
|
||||
| `indexes` | array | Array of database indexes |
|
||||
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -157,6 +157,30 @@ Execute raw SQL query on PostgreSQL database
|
||||
| `rows` | array | Array of rows returned from the query |
|
||||
| `rowCount` | number | Number of rows affected |
|
||||
|
||||
### `postgresql_introspect`
|
||||
|
||||
Introspect PostgreSQL database schema to retrieve table structures, columns, and relationships
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | Yes | PostgreSQL server hostname or IP address |
|
||||
| `port` | number | Yes | PostgreSQL server port \(default: 5432\) |
|
||||
| `database` | string | Yes | Database name to connect to |
|
||||
| `username` | string | Yes | Database username |
|
||||
| `password` | string | Yes | Database password |
|
||||
| `ssl` | string | No | SSL connection mode \(disabled, required, preferred\) |
|
||||
| `schema` | string | No | Schema to introspect \(default: public\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Operation status message |
|
||||
| `tables` | array | Array of table schemas with columns, keys, and indexes |
|
||||
| `schemas` | array | List of available schemas in the database |
|
||||
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -165,6 +165,32 @@ Execute raw SQL on Amazon RDS using the Data API
|
||||
| `rows` | array | Array of rows returned or affected |
|
||||
| `rowCount` | number | Number of rows affected |
|
||||
|
||||
### `rds_introspect`
|
||||
|
||||
Introspect Amazon RDS Aurora database schema to retrieve table structures, columns, and relationships
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `region` | string | Yes | AWS region \(e.g., us-east-1\) |
|
||||
| `accessKeyId` | string | Yes | AWS access key ID |
|
||||
| `secretAccessKey` | string | Yes | AWS secret access key |
|
||||
| `resourceArn` | string | Yes | ARN of the Aurora DB cluster |
|
||||
| `secretArn` | string | Yes | ARN of the Secrets Manager secret containing DB credentials |
|
||||
| `database` | string | No | Database name \(optional\) |
|
||||
| `schema` | string | No | Schema to introspect \(default: public for PostgreSQL, database name for MySQL\) |
|
||||
| `engine` | string | No | Database engine \(aurora-postgresql or aurora-mysql\). Auto-detected if not provided. |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Operation status message |
|
||||
| `engine` | string | Detected database engine type |
|
||||
| `tables` | array | Array of table schemas with columns, keys, and indexes |
|
||||
| `schemas` | array | List of available schemas in the database |
|
||||
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -261,6 +261,25 @@ Call a PostgreSQL function in Supabase
|
||||
| `message` | string | Operation status message |
|
||||
| `results` | json | Result returned from the function |
|
||||
|
||||
### `supabase_introspect`
|
||||
|
||||
Introspect Supabase database schema to get table structures, columns, and relationships
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `projectId` | string | Yes | Your Supabase project ID \(e.g., jdrkgepadsdopsntdlom\) |
|
||||
| `schema` | string | No | Database schema to introspect \(defaults to all user schemas, commonly "public"\) |
|
||||
| `apiKey` | string | Yes | Your Supabase service role secret key |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Operation status message |
|
||||
| `tables` | array | Array of table schemas with columns, keys, and indexes |
|
||||
|
||||
### `supabase_storage_upload`
|
||||
|
||||
Upload a file to a Supabase storage bucket
|
||||
|
||||
@@ -96,6 +96,7 @@ const ChatMessageSchema = z.object({
|
||||
})
|
||||
)
|
||||
.optional(),
|
||||
commands: z.array(z.string()).optional(),
|
||||
})
|
||||
|
||||
/**
|
||||
@@ -131,6 +132,7 @@ export async function POST(req: NextRequest) {
|
||||
provider,
|
||||
conversationId,
|
||||
contexts,
|
||||
commands,
|
||||
} = ChatMessageSchema.parse(body)
|
||||
// Ensure we have a consistent user message ID for this request
|
||||
const userMessageIdToUse = userMessageId || crypto.randomUUID()
|
||||
@@ -458,6 +460,7 @@ export async function POST(req: NextRequest) {
|
||||
...(integrationTools.length > 0 && { tools: integrationTools }),
|
||||
...(baseTools.length > 0 && { baseTools }),
|
||||
...(credentials && { credentials }),
|
||||
...(commands && commands.length > 0 && { commands }),
|
||||
}
|
||||
|
||||
try {
|
||||
@@ -802,49 +805,29 @@ export async function POST(req: NextRequest) {
|
||||
toolNames: toolCalls.map((tc) => tc?.name).filter(Boolean),
|
||||
})
|
||||
|
||||
// Save messages to database after streaming completes (including aborted messages)
|
||||
// NOTE: Messages are saved by the client via update-messages endpoint with full contentBlocks.
|
||||
// Server only updates conversationId here to avoid overwriting client's richer save.
|
||||
if (currentChat) {
|
||||
const updatedMessages = [...conversationHistory, userMessage]
|
||||
|
||||
// Save assistant message if there's any content or tool calls (even partial from abort)
|
||||
if (assistantContent.trim() || toolCalls.length > 0) {
|
||||
const assistantMessage = {
|
||||
id: crypto.randomUUID(),
|
||||
role: 'assistant',
|
||||
content: assistantContent,
|
||||
timestamp: new Date().toISOString(),
|
||||
...(toolCalls.length > 0 && { toolCalls }),
|
||||
}
|
||||
updatedMessages.push(assistantMessage)
|
||||
logger.info(
|
||||
`[${tracker.requestId}] Saving assistant message with content (${assistantContent.length} chars) and ${toolCalls.length} tool calls`
|
||||
)
|
||||
} else {
|
||||
logger.info(
|
||||
`[${tracker.requestId}] No assistant content or tool calls to save (aborted before response)`
|
||||
)
|
||||
}
|
||||
|
||||
// Persist only a safe conversationId to avoid continuing from a state that expects tool outputs
|
||||
const previousConversationId = currentChat?.conversationId as string | undefined
|
||||
const responseId = lastSafeDoneResponseId || previousConversationId || undefined
|
||||
|
||||
// Update chat in database immediately (without title)
|
||||
await db
|
||||
.update(copilotChats)
|
||||
.set({
|
||||
messages: updatedMessages,
|
||||
updatedAt: new Date(),
|
||||
...(responseId ? { conversationId: responseId } : {}),
|
||||
})
|
||||
.where(eq(copilotChats.id, actualChatId!))
|
||||
if (responseId) {
|
||||
await db
|
||||
.update(copilotChats)
|
||||
.set({
|
||||
updatedAt: new Date(),
|
||||
conversationId: responseId,
|
||||
})
|
||||
.where(eq(copilotChats.id, actualChatId!))
|
||||
|
||||
logger.info(`[${tracker.requestId}] Updated chat ${actualChatId} with new messages`, {
|
||||
messageCount: updatedMessages.length,
|
||||
savedUserMessage: true,
|
||||
savedAssistantMessage: assistantContent.trim().length > 0,
|
||||
updatedConversationId: responseId || null,
|
||||
})
|
||||
logger.info(
|
||||
`[${tracker.requestId}] Updated conversationId for chat ${actualChatId}`,
|
||||
{
|
||||
updatedConversationId: responseId,
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`[${tracker.requestId}] Error processing stream:`, error)
|
||||
|
||||
@@ -17,25 +17,30 @@ const logger = createLogger('CopilotChatUpdateAPI')
|
||||
const UpdateMessagesSchema = z.object({
|
||||
chatId: z.string(),
|
||||
messages: z.array(
|
||||
z.object({
|
||||
id: z.string(),
|
||||
role: z.enum(['user', 'assistant']),
|
||||
content: z.string(),
|
||||
timestamp: z.string(),
|
||||
toolCalls: z.array(z.any()).optional(),
|
||||
contentBlocks: z.array(z.any()).optional(),
|
||||
fileAttachments: z
|
||||
.array(
|
||||
z.object({
|
||||
id: z.string(),
|
||||
key: z.string(),
|
||||
filename: z.string(),
|
||||
media_type: z.string(),
|
||||
size: z.number(),
|
||||
})
|
||||
)
|
||||
.optional(),
|
||||
})
|
||||
z
|
||||
.object({
|
||||
id: z.string(),
|
||||
role: z.enum(['user', 'assistant', 'system']),
|
||||
content: z.string(),
|
||||
timestamp: z.string(),
|
||||
toolCalls: z.array(z.any()).optional(),
|
||||
contentBlocks: z.array(z.any()).optional(),
|
||||
fileAttachments: z
|
||||
.array(
|
||||
z.object({
|
||||
id: z.string(),
|
||||
key: z.string(),
|
||||
filename: z.string(),
|
||||
media_type: z.string(),
|
||||
size: z.number(),
|
||||
})
|
||||
)
|
||||
.optional(),
|
||||
contexts: z.array(z.any()).optional(),
|
||||
citations: z.array(z.any()).optional(),
|
||||
errorType: z.string().optional(),
|
||||
})
|
||||
.passthrough() // Preserve any additional fields for future compatibility
|
||||
),
|
||||
planArtifact: z.string().nullable().optional(),
|
||||
config: z
|
||||
@@ -57,8 +62,33 @@ export async function POST(req: NextRequest) {
|
||||
}
|
||||
|
||||
const body = await req.json()
|
||||
|
||||
// Debug: Log what we received
|
||||
const lastMsg = body.messages?.[body.messages.length - 1]
|
||||
if (lastMsg?.role === 'assistant') {
|
||||
logger.info(`[${tracker.requestId}] Received messages to save`, {
|
||||
messageCount: body.messages?.length,
|
||||
lastMsgId: lastMsg.id,
|
||||
lastMsgContentLength: lastMsg.content?.length || 0,
|
||||
lastMsgContentBlockCount: lastMsg.contentBlocks?.length || 0,
|
||||
lastMsgContentBlockTypes: lastMsg.contentBlocks?.map((b: any) => b?.type) || [],
|
||||
})
|
||||
}
|
||||
|
||||
const { chatId, messages, planArtifact, config } = UpdateMessagesSchema.parse(body)
|
||||
|
||||
// Debug: Log what we're about to save
|
||||
const lastMsgParsed = messages[messages.length - 1]
|
||||
if (lastMsgParsed?.role === 'assistant') {
|
||||
logger.info(`[${tracker.requestId}] Parsed messages to save`, {
|
||||
messageCount: messages.length,
|
||||
lastMsgId: lastMsgParsed.id,
|
||||
lastMsgContentLength: lastMsgParsed.content?.length || 0,
|
||||
lastMsgContentBlockCount: lastMsgParsed.contentBlocks?.length || 0,
|
||||
lastMsgContentBlockTypes: lastMsgParsed.contentBlocks?.map((b: any) => b?.type) || [],
|
||||
})
|
||||
}
|
||||
|
||||
// Verify that the chat belongs to the user
|
||||
const [chat] = await db
|
||||
.select()
|
||||
|
||||
@@ -1,134 +0,0 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { getCopilotModel } from '@/lib/copilot/config'
|
||||
import { SIM_AGENT_API_URL_DEFAULT } from '@/lib/copilot/constants'
|
||||
import type { CopilotProviderConfig } from '@/lib/copilot/types'
|
||||
import { env } from '@/lib/core/config/env'
|
||||
|
||||
const logger = createLogger('ContextUsageAPI')
|
||||
|
||||
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
|
||||
|
||||
const ContextUsageRequestSchema = z.object({
|
||||
chatId: z.string(),
|
||||
model: z.string(),
|
||||
workflowId: z.string(),
|
||||
provider: z.any().optional(),
|
||||
})
|
||||
|
||||
/**
|
||||
* POST /api/copilot/context-usage
|
||||
* Fetch context usage from sim-agent API
|
||||
*/
|
||||
export async function POST(req: NextRequest) {
|
||||
try {
|
||||
logger.info('[Context Usage API] Request received')
|
||||
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
logger.warn('[Context Usage API] No session/user ID')
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body = await req.json()
|
||||
logger.info('[Context Usage API] Request body', body)
|
||||
|
||||
const parsed = ContextUsageRequestSchema.safeParse(body)
|
||||
|
||||
if (!parsed.success) {
|
||||
logger.warn('[Context Usage API] Invalid request body', parsed.error.errors)
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request body', details: parsed.error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const { chatId, model, workflowId, provider } = parsed.data
|
||||
const userId = session.user.id // Get userId from session, not from request
|
||||
|
||||
logger.info('[Context Usage API] Request validated', { chatId, model, userId, workflowId })
|
||||
|
||||
// Build provider config similar to chat route
|
||||
let providerConfig: CopilotProviderConfig | undefined = provider
|
||||
if (!providerConfig) {
|
||||
const defaults = getCopilotModel('chat')
|
||||
const modelToUse = env.COPILOT_MODEL || defaults.model
|
||||
const providerEnv = env.COPILOT_PROVIDER as any
|
||||
|
||||
if (providerEnv) {
|
||||
if (providerEnv === 'azure-openai') {
|
||||
providerConfig = {
|
||||
provider: 'azure-openai',
|
||||
model: modelToUse,
|
||||
apiKey: env.AZURE_OPENAI_API_KEY,
|
||||
apiVersion: env.AZURE_OPENAI_API_VERSION,
|
||||
endpoint: env.AZURE_OPENAI_ENDPOINT,
|
||||
}
|
||||
} else if (providerEnv === 'vertex') {
|
||||
providerConfig = {
|
||||
provider: 'vertex',
|
||||
model: modelToUse,
|
||||
apiKey: env.COPILOT_API_KEY,
|
||||
vertexProject: env.VERTEX_PROJECT,
|
||||
vertexLocation: env.VERTEX_LOCATION,
|
||||
}
|
||||
} else {
|
||||
providerConfig = {
|
||||
provider: providerEnv,
|
||||
model: modelToUse,
|
||||
apiKey: env.COPILOT_API_KEY,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Call sim-agent API
|
||||
const requestPayload = {
|
||||
chatId,
|
||||
model,
|
||||
userId,
|
||||
workflowId,
|
||||
...(providerConfig ? { provider: providerConfig } : {}),
|
||||
}
|
||||
|
||||
logger.info('[Context Usage API] Calling sim-agent', {
|
||||
url: `${SIM_AGENT_API_URL}/api/get-context-usage`,
|
||||
payload: requestPayload,
|
||||
})
|
||||
|
||||
const simAgentResponse = await fetch(`${SIM_AGENT_API_URL}/api/get-context-usage`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
...(env.COPILOT_API_KEY ? { 'x-api-key': env.COPILOT_API_KEY } : {}),
|
||||
},
|
||||
body: JSON.stringify(requestPayload),
|
||||
})
|
||||
|
||||
logger.info('[Context Usage API] Sim-agent response', {
|
||||
status: simAgentResponse.status,
|
||||
ok: simAgentResponse.ok,
|
||||
})
|
||||
|
||||
if (!simAgentResponse.ok) {
|
||||
const errorText = await simAgentResponse.text().catch(() => '')
|
||||
logger.warn('[Context Usage API] Sim agent request failed', {
|
||||
status: simAgentResponse.status,
|
||||
error: errorText,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{ error: 'Failed to fetch context usage from sim-agent' },
|
||||
{ status: simAgentResponse.status }
|
||||
)
|
||||
}
|
||||
|
||||
const data = await simAgentResponse.json()
|
||||
logger.info('[Context Usage API] Sim-agent data received', data)
|
||||
return NextResponse.json(data)
|
||||
} catch (error) {
|
||||
logger.error('Error fetching context usage:', error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
73
apps/sim/app/api/tools/dynamodb/introspect/route.ts
Normal file
73
apps/sim/app/api/tools/dynamodb/introspect/route.ts
Normal file
@@ -0,0 +1,73 @@
|
||||
import { randomUUID } from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createRawDynamoDBClient, describeTable, listTables } from '@/app/api/tools/dynamodb/utils'
|
||||
|
||||
const logger = createLogger('DynamoDBIntrospectAPI')
|
||||
|
||||
const IntrospectSchema = z.object({
|
||||
region: z.string().min(1, 'AWS region is required'),
|
||||
accessKeyId: z.string().min(1, 'AWS access key ID is required'),
|
||||
secretAccessKey: z.string().min(1, 'AWS secret access key is required'),
|
||||
tableName: z.string().optional(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = IntrospectSchema.parse(body)
|
||||
|
||||
logger.info(`[${requestId}] Introspecting DynamoDB in region ${params.region}`)
|
||||
|
||||
const client = createRawDynamoDBClient({
|
||||
region: params.region,
|
||||
accessKeyId: params.accessKeyId,
|
||||
secretAccessKey: params.secretAccessKey,
|
||||
})
|
||||
|
||||
try {
|
||||
const { tables } = await listTables(client)
|
||||
|
||||
if (params.tableName) {
|
||||
logger.info(`[${requestId}] Describing table: ${params.tableName}`)
|
||||
const { tableDetails } = await describeTable(client, params.tableName)
|
||||
|
||||
logger.info(`[${requestId}] Table description completed for '${params.tableName}'`)
|
||||
|
||||
return NextResponse.json({
|
||||
message: `Table '${params.tableName}' described successfully.`,
|
||||
tables,
|
||||
tableDetails,
|
||||
})
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Listed ${tables.length} tables`)
|
||||
|
||||
return NextResponse.json({
|
||||
message: `Found ${tables.length} table(s) in region '${params.region}'.`,
|
||||
tables,
|
||||
})
|
||||
} finally {
|
||||
client.destroy()
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
logger.error(`[${requestId}] DynamoDB introspection failed:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{ error: `DynamoDB introspection failed: ${errorMessage}` },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
import { DynamoDBClient } from '@aws-sdk/client-dynamodb'
|
||||
import { DescribeTableCommand, DynamoDBClient, ListTablesCommand } from '@aws-sdk/client-dynamodb'
|
||||
import {
|
||||
DeleteCommand,
|
||||
DynamoDBDocumentClient,
|
||||
@@ -8,7 +8,7 @@ import {
|
||||
ScanCommand,
|
||||
UpdateCommand,
|
||||
} from '@aws-sdk/lib-dynamodb'
|
||||
import type { DynamoDBConnectionConfig } from '@/tools/dynamodb/types'
|
||||
import type { DynamoDBConnectionConfig, DynamoDBTableSchema } from '@/tools/dynamodb/types'
|
||||
|
||||
export function createDynamoDBClient(config: DynamoDBConnectionConfig): DynamoDBDocumentClient {
|
||||
const client = new DynamoDBClient({
|
||||
@@ -172,3 +172,99 @@ export async function deleteItem(
|
||||
await client.send(command)
|
||||
return { success: true }
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a raw DynamoDB client for operations that don't require DocumentClient
|
||||
*/
|
||||
export function createRawDynamoDBClient(config: DynamoDBConnectionConfig): DynamoDBClient {
|
||||
return new DynamoDBClient({
|
||||
region: config.region,
|
||||
credentials: {
|
||||
accessKeyId: config.accessKeyId,
|
||||
secretAccessKey: config.secretAccessKey,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Lists all DynamoDB tables in the configured region
|
||||
*/
|
||||
export async function listTables(client: DynamoDBClient): Promise<{ tables: string[] }> {
|
||||
const tables: string[] = []
|
||||
let exclusiveStartTableName: string | undefined
|
||||
|
||||
do {
|
||||
const command = new ListTablesCommand({
|
||||
ExclusiveStartTableName: exclusiveStartTableName,
|
||||
})
|
||||
|
||||
const response = await client.send(command)
|
||||
if (response.TableNames) {
|
||||
tables.push(...response.TableNames)
|
||||
}
|
||||
exclusiveStartTableName = response.LastEvaluatedTableName
|
||||
} while (exclusiveStartTableName)
|
||||
|
||||
return { tables }
|
||||
}
|
||||
|
||||
/**
|
||||
* Describes a specific DynamoDB table and returns its schema information
|
||||
*/
|
||||
export async function describeTable(
|
||||
client: DynamoDBClient,
|
||||
tableName: string
|
||||
): Promise<{ tableDetails: DynamoDBTableSchema }> {
|
||||
const command = new DescribeTableCommand({
|
||||
TableName: tableName,
|
||||
})
|
||||
|
||||
const response = await client.send(command)
|
||||
const table = response.Table
|
||||
|
||||
if (!table) {
|
||||
throw new Error(`Table '${tableName}' not found`)
|
||||
}
|
||||
|
||||
const tableDetails: DynamoDBTableSchema = {
|
||||
tableName: table.TableName || tableName,
|
||||
tableStatus: table.TableStatus || 'UNKNOWN',
|
||||
keySchema:
|
||||
table.KeySchema?.map((key) => ({
|
||||
attributeName: key.AttributeName || '',
|
||||
keyType: (key.KeyType as 'HASH' | 'RANGE') || 'HASH',
|
||||
})) || [],
|
||||
attributeDefinitions:
|
||||
table.AttributeDefinitions?.map((attr) => ({
|
||||
attributeName: attr.AttributeName || '',
|
||||
attributeType: (attr.AttributeType as 'S' | 'N' | 'B') || 'S',
|
||||
})) || [],
|
||||
globalSecondaryIndexes:
|
||||
table.GlobalSecondaryIndexes?.map((gsi) => ({
|
||||
indexName: gsi.IndexName || '',
|
||||
keySchema:
|
||||
gsi.KeySchema?.map((key) => ({
|
||||
attributeName: key.AttributeName || '',
|
||||
keyType: (key.KeyType as 'HASH' | 'RANGE') || 'HASH',
|
||||
})) || [],
|
||||
projectionType: gsi.Projection?.ProjectionType || 'ALL',
|
||||
indexStatus: gsi.IndexStatus || 'UNKNOWN',
|
||||
})) || [],
|
||||
localSecondaryIndexes:
|
||||
table.LocalSecondaryIndexes?.map((lsi) => ({
|
||||
indexName: lsi.IndexName || '',
|
||||
keySchema:
|
||||
lsi.KeySchema?.map((key) => ({
|
||||
attributeName: key.AttributeName || '',
|
||||
keyType: (key.KeyType as 'HASH' | 'RANGE') || 'HASH',
|
||||
})) || [],
|
||||
projectionType: lsi.Projection?.ProjectionType || 'ALL',
|
||||
indexStatus: 'ACTIVE',
|
||||
})) || [],
|
||||
itemCount: Number(table.ItemCount) || 0,
|
||||
tableSizeBytes: Number(table.TableSizeBytes) || 0,
|
||||
billingMode: table.BillingModeSummary?.BillingMode || 'PROVISIONED',
|
||||
}
|
||||
|
||||
return { tableDetails }
|
||||
}
|
||||
|
||||
73
apps/sim/app/api/tools/mongodb/introspect/route.ts
Normal file
73
apps/sim/app/api/tools/mongodb/introspect/route.ts
Normal file
@@ -0,0 +1,73 @@
|
||||
import { randomUUID } from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createMongoDBConnection, executeIntrospect } from '../utils'
|
||||
|
||||
const logger = createLogger('MongoDBIntrospectAPI')
|
||||
|
||||
const IntrospectSchema = z.object({
|
||||
host: z.string().min(1, 'Host is required'),
|
||||
port: z.coerce.number().int().positive('Port must be a positive integer'),
|
||||
database: z.string().optional(),
|
||||
username: z.string().optional(),
|
||||
password: z.string().optional(),
|
||||
authSource: z.string().optional(),
|
||||
ssl: z.enum(['disabled', 'required', 'preferred']).default('preferred'),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
let client = null
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = IntrospectSchema.parse(body)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Introspecting MongoDB at ${params.host}:${params.port}${params.database ? `/${params.database}` : ''}`
|
||||
)
|
||||
|
||||
client = await createMongoDBConnection({
|
||||
host: params.host,
|
||||
port: params.port,
|
||||
database: params.database || 'admin',
|
||||
username: params.username,
|
||||
password: params.password,
|
||||
authSource: params.authSource,
|
||||
ssl: params.ssl,
|
||||
})
|
||||
|
||||
const result = await executeIntrospect(client, params.database)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Introspection completed: ${result.databases.length} databases, ${result.collections.length} collections`
|
||||
)
|
||||
|
||||
return NextResponse.json({
|
||||
message: result.message,
|
||||
databases: result.databases,
|
||||
collections: result.collections,
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
logger.error(`[${requestId}] MongoDB introspect failed:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{ error: `MongoDB introspect failed: ${errorMessage}` },
|
||||
{ status: 500 }
|
||||
)
|
||||
} finally {
|
||||
if (client) {
|
||||
await client.close()
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
import { MongoClient } from 'mongodb'
|
||||
import type { MongoDBConnectionConfig } from '@/tools/mongodb/types'
|
||||
import type { MongoDBCollectionInfo, MongoDBConnectionConfig } from '@/tools/mongodb/types'
|
||||
|
||||
export async function createMongoDBConnection(config: MongoDBConnectionConfig) {
|
||||
const credentials =
|
||||
@@ -129,3 +129,59 @@ export function sanitizeCollectionName(name: string): string {
|
||||
}
|
||||
return name
|
||||
}
|
||||
|
||||
/**
|
||||
* Introspect MongoDB to get databases, collections, and indexes
|
||||
*/
|
||||
export async function executeIntrospect(
|
||||
client: MongoClient,
|
||||
database?: string
|
||||
): Promise<{
|
||||
message: string
|
||||
databases: string[]
|
||||
collections: MongoDBCollectionInfo[]
|
||||
}> {
|
||||
const databases: string[] = []
|
||||
const collections: MongoDBCollectionInfo[] = []
|
||||
|
||||
if (database) {
|
||||
databases.push(database)
|
||||
const db = client.db(database)
|
||||
const collectionList = await db.listCollections().toArray()
|
||||
|
||||
for (const collInfo of collectionList) {
|
||||
const coll = db.collection(collInfo.name)
|
||||
const indexes = await coll.indexes()
|
||||
const documentCount = await coll.estimatedDocumentCount()
|
||||
|
||||
collections.push({
|
||||
name: collInfo.name,
|
||||
type: collInfo.type || 'collection',
|
||||
documentCount,
|
||||
indexes: indexes.map((idx) => ({
|
||||
name: idx.name || '',
|
||||
key: idx.key as Record<string, number>,
|
||||
unique: idx.unique || false,
|
||||
sparse: idx.sparse,
|
||||
})),
|
||||
})
|
||||
}
|
||||
} else {
|
||||
const admin = client.db().admin()
|
||||
const dbList = await admin.listDatabases()
|
||||
|
||||
for (const dbInfo of dbList.databases) {
|
||||
databases.push(dbInfo.name)
|
||||
}
|
||||
}
|
||||
|
||||
const message = database
|
||||
? `Found ${collections.length} collections in database '${database}'`
|
||||
: `Found ${databases.length} databases`
|
||||
|
||||
return {
|
||||
message,
|
||||
databases,
|
||||
collections,
|
||||
}
|
||||
}
|
||||
|
||||
70
apps/sim/app/api/tools/mysql/introspect/route.ts
Normal file
70
apps/sim/app/api/tools/mysql/introspect/route.ts
Normal file
@@ -0,0 +1,70 @@
|
||||
import { randomUUID } from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createMySQLConnection, executeIntrospect } from '@/app/api/tools/mysql/utils'
|
||||
|
||||
const logger = createLogger('MySQLIntrospectAPI')
|
||||
|
||||
const IntrospectSchema = z.object({
|
||||
host: z.string().min(1, 'Host is required'),
|
||||
port: z.coerce.number().int().positive('Port must be a positive integer'),
|
||||
database: z.string().min(1, 'Database name is required'),
|
||||
username: z.string().min(1, 'Username is required'),
|
||||
password: z.string().min(1, 'Password is required'),
|
||||
ssl: z.enum(['disabled', 'required', 'preferred']).default('preferred'),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = IntrospectSchema.parse(body)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Introspecting MySQL schema on ${params.host}:${params.port}/${params.database}`
|
||||
)
|
||||
|
||||
const connection = await createMySQLConnection({
|
||||
host: params.host,
|
||||
port: params.port,
|
||||
database: params.database,
|
||||
username: params.username,
|
||||
password: params.password,
|
||||
ssl: params.ssl,
|
||||
})
|
||||
|
||||
try {
|
||||
const result = await executeIntrospect(connection, params.database)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Introspection completed successfully, found ${result.tables.length} tables`
|
||||
)
|
||||
|
||||
return NextResponse.json({
|
||||
message: `Schema introspection completed. Found ${result.tables.length} table(s) in database '${params.database}'.`,
|
||||
tables: result.tables,
|
||||
databases: result.databases,
|
||||
})
|
||||
} finally {
|
||||
await connection.end()
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
logger.error(`[${requestId}] MySQL introspection failed:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{ error: `MySQL introspection failed: ${errorMessage}` },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -166,3 +166,146 @@ function sanitizeSingleIdentifier(identifier: string): string {
|
||||
|
||||
return `\`${cleaned}\``
|
||||
}
|
||||
|
||||
export interface MySQLIntrospectionResult {
|
||||
tables: Array<{
|
||||
name: string
|
||||
database: string
|
||||
columns: Array<{
|
||||
name: string
|
||||
type: string
|
||||
nullable: boolean
|
||||
default: string | null
|
||||
isPrimaryKey: boolean
|
||||
isForeignKey: boolean
|
||||
autoIncrement: boolean
|
||||
references?: {
|
||||
table: string
|
||||
column: string
|
||||
}
|
||||
}>
|
||||
primaryKey: string[]
|
||||
foreignKeys: Array<{
|
||||
column: string
|
||||
referencesTable: string
|
||||
referencesColumn: string
|
||||
}>
|
||||
indexes: Array<{
|
||||
name: string
|
||||
columns: string[]
|
||||
unique: boolean
|
||||
}>
|
||||
}>
|
||||
databases: string[]
|
||||
}
|
||||
|
||||
export async function executeIntrospect(
|
||||
connection: mysql.Connection,
|
||||
databaseName: string
|
||||
): Promise<MySQLIntrospectionResult> {
|
||||
const [databasesRows] = await connection.execute<mysql.RowDataPacket[]>(
|
||||
`SELECT SCHEMA_NAME FROM INFORMATION_SCHEMA.SCHEMATA
|
||||
WHERE SCHEMA_NAME NOT IN ('mysql', 'information_schema', 'performance_schema', 'sys')
|
||||
ORDER BY SCHEMA_NAME`
|
||||
)
|
||||
const databases = databasesRows.map((row) => row.SCHEMA_NAME)
|
||||
|
||||
const [tablesRows] = await connection.execute<mysql.RowDataPacket[]>(
|
||||
`SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES
|
||||
WHERE TABLE_SCHEMA = ? AND TABLE_TYPE = 'BASE TABLE'
|
||||
ORDER BY TABLE_NAME`,
|
||||
[databaseName]
|
||||
)
|
||||
|
||||
const tables = []
|
||||
|
||||
for (const tableRow of tablesRows) {
|
||||
const tableName = tableRow.TABLE_NAME
|
||||
|
||||
const [columnsRows] = await connection.execute<mysql.RowDataPacket[]>(
|
||||
`SELECT COLUMN_NAME, DATA_TYPE, COLUMN_TYPE, IS_NULLABLE, COLUMN_DEFAULT, EXTRA
|
||||
FROM INFORMATION_SCHEMA.COLUMNS
|
||||
WHERE TABLE_SCHEMA = ? AND TABLE_NAME = ?
|
||||
ORDER BY ORDINAL_POSITION`,
|
||||
[databaseName, tableName]
|
||||
)
|
||||
|
||||
const [pkRows] = await connection.execute<mysql.RowDataPacket[]>(
|
||||
`SELECT COLUMN_NAME FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE
|
||||
WHERE TABLE_SCHEMA = ? AND TABLE_NAME = ? AND CONSTRAINT_NAME = 'PRIMARY'
|
||||
ORDER BY ORDINAL_POSITION`,
|
||||
[databaseName, tableName]
|
||||
)
|
||||
const primaryKeyColumns = pkRows.map((row) => row.COLUMN_NAME)
|
||||
|
||||
const [fkRows] = await connection.execute<mysql.RowDataPacket[]>(
|
||||
`SELECT kcu.COLUMN_NAME, kcu.REFERENCED_TABLE_NAME, kcu.REFERENCED_COLUMN_NAME
|
||||
FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE kcu
|
||||
WHERE kcu.TABLE_SCHEMA = ? AND kcu.TABLE_NAME = ? AND kcu.REFERENCED_TABLE_NAME IS NOT NULL`,
|
||||
[databaseName, tableName]
|
||||
)
|
||||
|
||||
const foreignKeys = fkRows.map((row) => ({
|
||||
column: row.COLUMN_NAME,
|
||||
referencesTable: row.REFERENCED_TABLE_NAME,
|
||||
referencesColumn: row.REFERENCED_COLUMN_NAME,
|
||||
}))
|
||||
|
||||
const fkColumnSet = new Set(foreignKeys.map((fk) => fk.column))
|
||||
|
||||
const [indexRows] = await connection.execute<mysql.RowDataPacket[]>(
|
||||
`SELECT INDEX_NAME, COLUMN_NAME, SEQ_IN_INDEX, NON_UNIQUE
|
||||
FROM INFORMATION_SCHEMA.STATISTICS
|
||||
WHERE TABLE_SCHEMA = ? AND TABLE_NAME = ? AND INDEX_NAME != 'PRIMARY'
|
||||
ORDER BY INDEX_NAME, SEQ_IN_INDEX`,
|
||||
[databaseName, tableName]
|
||||
)
|
||||
|
||||
const indexMap = new Map<string, { name: string; columns: string[]; unique: boolean }>()
|
||||
for (const row of indexRows) {
|
||||
const indexName = row.INDEX_NAME
|
||||
if (!indexMap.has(indexName)) {
|
||||
indexMap.set(indexName, {
|
||||
name: indexName,
|
||||
columns: [],
|
||||
unique: row.NON_UNIQUE === 0,
|
||||
})
|
||||
}
|
||||
indexMap.get(indexName)!.columns.push(row.COLUMN_NAME)
|
||||
}
|
||||
const indexes = Array.from(indexMap.values())
|
||||
|
||||
const columns = columnsRows.map((col) => {
|
||||
const columnName = col.COLUMN_NAME
|
||||
const fk = foreignKeys.find((f) => f.column === columnName)
|
||||
const isAutoIncrement = col.EXTRA?.toLowerCase().includes('auto_increment') || false
|
||||
|
||||
return {
|
||||
name: columnName,
|
||||
type: col.COLUMN_TYPE || col.DATA_TYPE,
|
||||
nullable: col.IS_NULLABLE === 'YES',
|
||||
default: col.COLUMN_DEFAULT,
|
||||
isPrimaryKey: primaryKeyColumns.includes(columnName),
|
||||
isForeignKey: fkColumnSet.has(columnName),
|
||||
autoIncrement: isAutoIncrement,
|
||||
...(fk && {
|
||||
references: {
|
||||
table: fk.referencesTable,
|
||||
column: fk.referencesColumn,
|
||||
},
|
||||
}),
|
||||
}
|
||||
})
|
||||
|
||||
tables.push({
|
||||
name: tableName,
|
||||
database: databaseName,
|
||||
columns,
|
||||
primaryKey: primaryKeyColumns,
|
||||
foreignKeys,
|
||||
indexes,
|
||||
})
|
||||
}
|
||||
|
||||
return { tables, databases }
|
||||
}
|
||||
|
||||
199
apps/sim/app/api/tools/neo4j/introspect/route.ts
Normal file
199
apps/sim/app/api/tools/neo4j/introspect/route.ts
Normal file
@@ -0,0 +1,199 @@
|
||||
import { randomUUID } from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createNeo4jDriver } from '@/app/api/tools/neo4j/utils'
|
||||
import type { Neo4jNodeSchema, Neo4jRelationshipSchema } from '@/tools/neo4j/types'
|
||||
|
||||
const logger = createLogger('Neo4jIntrospectAPI')
|
||||
|
||||
const IntrospectSchema = z.object({
|
||||
host: z.string().min(1, 'Host is required'),
|
||||
port: z.coerce.number().int().positive('Port must be a positive integer'),
|
||||
database: z.string().min(1, 'Database name is required'),
|
||||
username: z.string().min(1, 'Username is required'),
|
||||
password: z.string().min(1, 'Password is required'),
|
||||
encryption: z.enum(['enabled', 'disabled']).default('disabled'),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
let driver = null
|
||||
let session = null
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = IntrospectSchema.parse(body)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Introspecting Neo4j database at ${params.host}:${params.port}/${params.database}`
|
||||
)
|
||||
|
||||
driver = await createNeo4jDriver({
|
||||
host: params.host,
|
||||
port: params.port,
|
||||
database: params.database,
|
||||
username: params.username,
|
||||
password: params.password,
|
||||
encryption: params.encryption,
|
||||
})
|
||||
|
||||
session = driver.session({ database: params.database })
|
||||
|
||||
const labelsResult = await session.run(
|
||||
'CALL db.labels() YIELD label RETURN label ORDER BY label'
|
||||
)
|
||||
const labels: string[] = labelsResult.records.map((record) => record.get('label') as string)
|
||||
|
||||
const relationshipTypesResult = await session.run(
|
||||
'CALL db.relationshipTypes() YIELD relationshipType RETURN relationshipType ORDER BY relationshipType'
|
||||
)
|
||||
const relationshipTypes: string[] = relationshipTypesResult.records.map(
|
||||
(record) => record.get('relationshipType') as string
|
||||
)
|
||||
|
||||
const nodeSchemas: Neo4jNodeSchema[] = []
|
||||
try {
|
||||
const nodePropertiesResult = await session.run(
|
||||
'CALL db.schema.nodeTypeProperties() YIELD nodeLabels, propertyName, propertyTypes RETURN nodeLabels, propertyName, propertyTypes'
|
||||
)
|
||||
|
||||
const nodePropertiesMap = new Map<string, Array<{ name: string; types: string[] }>>()
|
||||
|
||||
for (const record of nodePropertiesResult.records) {
|
||||
const nodeLabels = record.get('nodeLabels') as string[]
|
||||
const propertyName = record.get('propertyName') as string
|
||||
const propertyTypes = record.get('propertyTypes') as string[]
|
||||
|
||||
const labelKey = nodeLabels.join(':')
|
||||
if (!nodePropertiesMap.has(labelKey)) {
|
||||
nodePropertiesMap.set(labelKey, [])
|
||||
}
|
||||
nodePropertiesMap.get(labelKey)!.push({ name: propertyName, types: propertyTypes })
|
||||
}
|
||||
|
||||
for (const [labelKey, properties] of nodePropertiesMap) {
|
||||
nodeSchemas.push({
|
||||
label: labelKey,
|
||||
properties,
|
||||
})
|
||||
}
|
||||
} catch (nodePropsError) {
|
||||
logger.warn(
|
||||
`[${requestId}] Could not fetch node properties (may not be supported in this Neo4j version): ${nodePropsError}`
|
||||
)
|
||||
}
|
||||
|
||||
const relationshipSchemas: Neo4jRelationshipSchema[] = []
|
||||
try {
|
||||
const relPropertiesResult = await session.run(
|
||||
'CALL db.schema.relTypeProperties() YIELD relationshipType, propertyName, propertyTypes RETURN relationshipType, propertyName, propertyTypes'
|
||||
)
|
||||
|
||||
const relPropertiesMap = new Map<string, Array<{ name: string; types: string[] }>>()
|
||||
|
||||
for (const record of relPropertiesResult.records) {
|
||||
const relType = record.get('relationshipType') as string
|
||||
const propertyName = record.get('propertyName') as string | null
|
||||
const propertyTypes = record.get('propertyTypes') as string[]
|
||||
|
||||
if (!relPropertiesMap.has(relType)) {
|
||||
relPropertiesMap.set(relType, [])
|
||||
}
|
||||
if (propertyName) {
|
||||
relPropertiesMap.get(relType)!.push({ name: propertyName, types: propertyTypes })
|
||||
}
|
||||
}
|
||||
|
||||
for (const [relType, properties] of relPropertiesMap) {
|
||||
relationshipSchemas.push({
|
||||
type: relType,
|
||||
properties,
|
||||
})
|
||||
}
|
||||
} catch (relPropsError) {
|
||||
logger.warn(
|
||||
`[${requestId}] Could not fetch relationship properties (may not be supported in this Neo4j version): ${relPropsError}`
|
||||
)
|
||||
}
|
||||
|
||||
const constraints: Array<{
|
||||
name: string
|
||||
type: string
|
||||
entityType: string
|
||||
properties: string[]
|
||||
}> = []
|
||||
try {
|
||||
const constraintsResult = await session.run('SHOW CONSTRAINTS')
|
||||
|
||||
for (const record of constraintsResult.records) {
|
||||
const name = record.get('name') as string
|
||||
const type = record.get('type') as string
|
||||
const entityType = record.get('entityType') as string
|
||||
const properties = (record.get('properties') as string[]) || []
|
||||
|
||||
constraints.push({ name, type, entityType, properties })
|
||||
}
|
||||
} catch (constraintsError) {
|
||||
logger.warn(
|
||||
`[${requestId}] Could not fetch constraints (may not be supported in this Neo4j version): ${constraintsError}`
|
||||
)
|
||||
}
|
||||
|
||||
const indexes: Array<{ name: string; type: string; entityType: string; properties: string[] }> =
|
||||
[]
|
||||
try {
|
||||
const indexesResult = await session.run('SHOW INDEXES')
|
||||
|
||||
for (const record of indexesResult.records) {
|
||||
const name = record.get('name') as string
|
||||
const type = record.get('type') as string
|
||||
const entityType = record.get('entityType') as string
|
||||
const properties = (record.get('properties') as string[]) || []
|
||||
|
||||
indexes.push({ name, type, entityType, properties })
|
||||
}
|
||||
} catch (indexesError) {
|
||||
logger.warn(
|
||||
`[${requestId}] Could not fetch indexes (may not be supported in this Neo4j version): ${indexesError}`
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Introspection completed: ${labels.length} labels, ${relationshipTypes.length} relationship types, ${constraints.length} constraints, ${indexes.length} indexes`
|
||||
)
|
||||
|
||||
return NextResponse.json({
|
||||
message: `Database introspection completed: found ${labels.length} labels, ${relationshipTypes.length} relationship types, ${nodeSchemas.length} node schemas, ${relationshipSchemas.length} relationship schemas, ${constraints.length} constraints, ${indexes.length} indexes`,
|
||||
labels,
|
||||
relationshipTypes,
|
||||
nodeSchemas,
|
||||
relationshipSchemas,
|
||||
constraints,
|
||||
indexes,
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
logger.error(`[${requestId}] Neo4j introspection failed:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{ error: `Neo4j introspection failed: ${errorMessage}` },
|
||||
{ status: 500 }
|
||||
)
|
||||
} finally {
|
||||
if (session) {
|
||||
await session.close()
|
||||
}
|
||||
if (driver) {
|
||||
await driver.close()
|
||||
}
|
||||
}
|
||||
}
|
||||
71
apps/sim/app/api/tools/postgresql/introspect/route.ts
Normal file
71
apps/sim/app/api/tools/postgresql/introspect/route.ts
Normal file
@@ -0,0 +1,71 @@
|
||||
import { randomUUID } from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createPostgresConnection, executeIntrospect } from '@/app/api/tools/postgresql/utils'
|
||||
|
||||
const logger = createLogger('PostgreSQLIntrospectAPI')
|
||||
|
||||
const IntrospectSchema = z.object({
|
||||
host: z.string().min(1, 'Host is required'),
|
||||
port: z.coerce.number().int().positive('Port must be a positive integer'),
|
||||
database: z.string().min(1, 'Database name is required'),
|
||||
username: z.string().min(1, 'Username is required'),
|
||||
password: z.string().min(1, 'Password is required'),
|
||||
ssl: z.enum(['disabled', 'required', 'preferred']).default('preferred'),
|
||||
schema: z.string().default('public'),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = IntrospectSchema.parse(body)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Introspecting PostgreSQL schema on ${params.host}:${params.port}/${params.database}`
|
||||
)
|
||||
|
||||
const sql = createPostgresConnection({
|
||||
host: params.host,
|
||||
port: params.port,
|
||||
database: params.database,
|
||||
username: params.username,
|
||||
password: params.password,
|
||||
ssl: params.ssl,
|
||||
})
|
||||
|
||||
try {
|
||||
const result = await executeIntrospect(sql, params.schema)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Introspection completed successfully, found ${result.tables.length} tables`
|
||||
)
|
||||
|
||||
return NextResponse.json({
|
||||
message: `Schema introspection completed. Found ${result.tables.length} table(s) in schema '${params.schema}'.`,
|
||||
tables: result.tables,
|
||||
schemas: result.schemas,
|
||||
})
|
||||
} finally {
|
||||
await sql.end()
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
logger.error(`[${requestId}] PostgreSQL introspection failed:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{ error: `PostgreSQL introspection failed: ${errorMessage}` },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -187,3 +187,184 @@ export async function executeDelete(
|
||||
rowCount,
|
||||
}
|
||||
}
|
||||
|
||||
export interface IntrospectionResult {
|
||||
tables: Array<{
|
||||
name: string
|
||||
schema: string
|
||||
columns: Array<{
|
||||
name: string
|
||||
type: string
|
||||
nullable: boolean
|
||||
default: string | null
|
||||
isPrimaryKey: boolean
|
||||
isForeignKey: boolean
|
||||
references?: {
|
||||
table: string
|
||||
column: string
|
||||
}
|
||||
}>
|
||||
primaryKey: string[]
|
||||
foreignKeys: Array<{
|
||||
column: string
|
||||
referencesTable: string
|
||||
referencesColumn: string
|
||||
}>
|
||||
indexes: Array<{
|
||||
name: string
|
||||
columns: string[]
|
||||
unique: boolean
|
||||
}>
|
||||
}>
|
||||
schemas: string[]
|
||||
}
|
||||
|
||||
export async function executeIntrospect(
|
||||
sql: any,
|
||||
schemaName = 'public'
|
||||
): Promise<IntrospectionResult> {
|
||||
const schemasResult = await sql`
|
||||
SELECT schema_name
|
||||
FROM information_schema.schemata
|
||||
WHERE schema_name NOT IN ('pg_catalog', 'information_schema', 'pg_toast')
|
||||
ORDER BY schema_name
|
||||
`
|
||||
const schemas = schemasResult.map((row: { schema_name: string }) => row.schema_name)
|
||||
|
||||
const tablesResult = await sql`
|
||||
SELECT table_name, table_schema
|
||||
FROM information_schema.tables
|
||||
WHERE table_schema = ${schemaName}
|
||||
AND table_type = 'BASE TABLE'
|
||||
ORDER BY table_name
|
||||
`
|
||||
|
||||
const tables = []
|
||||
|
||||
for (const tableRow of tablesResult) {
|
||||
const tableName = tableRow.table_name
|
||||
const tableSchema = tableRow.table_schema
|
||||
|
||||
const columnsResult = await sql`
|
||||
SELECT
|
||||
c.column_name,
|
||||
c.data_type,
|
||||
c.is_nullable,
|
||||
c.column_default,
|
||||
c.udt_name
|
||||
FROM information_schema.columns c
|
||||
WHERE c.table_schema = ${tableSchema}
|
||||
AND c.table_name = ${tableName}
|
||||
ORDER BY c.ordinal_position
|
||||
`
|
||||
|
||||
const pkResult = await sql`
|
||||
SELECT kcu.column_name
|
||||
FROM information_schema.table_constraints tc
|
||||
JOIN information_schema.key_column_usage kcu
|
||||
ON tc.constraint_name = kcu.constraint_name
|
||||
AND tc.table_schema = kcu.table_schema
|
||||
WHERE tc.constraint_type = 'PRIMARY KEY'
|
||||
AND tc.table_schema = ${tableSchema}
|
||||
AND tc.table_name = ${tableName}
|
||||
`
|
||||
const primaryKeyColumns = pkResult.map((row: { column_name: string }) => row.column_name)
|
||||
|
||||
const fkResult = await sql`
|
||||
SELECT
|
||||
kcu.column_name,
|
||||
ccu.table_name AS foreign_table_name,
|
||||
ccu.column_name AS foreign_column_name
|
||||
FROM information_schema.table_constraints tc
|
||||
JOIN information_schema.key_column_usage kcu
|
||||
ON tc.constraint_name = kcu.constraint_name
|
||||
AND tc.table_schema = kcu.table_schema
|
||||
JOIN information_schema.constraint_column_usage ccu
|
||||
ON ccu.constraint_name = tc.constraint_name
|
||||
AND ccu.table_schema = tc.table_schema
|
||||
WHERE tc.constraint_type = 'FOREIGN KEY'
|
||||
AND tc.table_schema = ${tableSchema}
|
||||
AND tc.table_name = ${tableName}
|
||||
`
|
||||
|
||||
const foreignKeys = fkResult.map(
|
||||
(row: { column_name: string; foreign_table_name: string; foreign_column_name: string }) => ({
|
||||
column: row.column_name,
|
||||
referencesTable: row.foreign_table_name,
|
||||
referencesColumn: row.foreign_column_name,
|
||||
})
|
||||
)
|
||||
|
||||
const fkColumnSet = new Set(foreignKeys.map((fk: { column: string }) => fk.column))
|
||||
|
||||
const indexesResult = await sql`
|
||||
SELECT
|
||||
i.relname AS index_name,
|
||||
a.attname AS column_name,
|
||||
ix.indisunique AS is_unique
|
||||
FROM pg_class t
|
||||
JOIN pg_index ix ON t.oid = ix.indrelid
|
||||
JOIN pg_class i ON i.oid = ix.indexrelid
|
||||
JOIN pg_attribute a ON a.attrelid = t.oid AND a.attnum = ANY(ix.indkey)
|
||||
JOIN pg_namespace n ON n.oid = t.relnamespace
|
||||
WHERE t.relkind = 'r'
|
||||
AND n.nspname = ${tableSchema}
|
||||
AND t.relname = ${tableName}
|
||||
AND NOT ix.indisprimary
|
||||
ORDER BY i.relname, a.attnum
|
||||
`
|
||||
|
||||
const indexMap = new Map<string, { name: string; columns: string[]; unique: boolean }>()
|
||||
for (const row of indexesResult) {
|
||||
const indexName = row.index_name
|
||||
if (!indexMap.has(indexName)) {
|
||||
indexMap.set(indexName, {
|
||||
name: indexName,
|
||||
columns: [],
|
||||
unique: row.is_unique,
|
||||
})
|
||||
}
|
||||
indexMap.get(indexName)!.columns.push(row.column_name)
|
||||
}
|
||||
const indexes = Array.from(indexMap.values())
|
||||
|
||||
const columns = columnsResult.map(
|
||||
(col: {
|
||||
column_name: string
|
||||
data_type: string
|
||||
is_nullable: string
|
||||
column_default: string | null
|
||||
udt_name: string
|
||||
}) => {
|
||||
const columnName = col.column_name
|
||||
const fk = foreignKeys.find((f: { column: string }) => f.column === columnName)
|
||||
|
||||
return {
|
||||
name: columnName,
|
||||
type: col.data_type === 'USER-DEFINED' ? col.udt_name : col.data_type,
|
||||
nullable: col.is_nullable === 'YES',
|
||||
default: col.column_default,
|
||||
isPrimaryKey: primaryKeyColumns.includes(columnName),
|
||||
isForeignKey: fkColumnSet.has(columnName),
|
||||
...(fk && {
|
||||
references: {
|
||||
table: fk.referencesTable,
|
||||
column: fk.referencesColumn,
|
||||
},
|
||||
}),
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
tables.push({
|
||||
name: tableName,
|
||||
schema: tableSchema,
|
||||
columns,
|
||||
primaryKey: primaryKeyColumns,
|
||||
foreignKeys,
|
||||
indexes,
|
||||
})
|
||||
}
|
||||
|
||||
return { tables, schemas }
|
||||
}
|
||||
|
||||
80
apps/sim/app/api/tools/rds/introspect/route.ts
Normal file
80
apps/sim/app/api/tools/rds/introspect/route.ts
Normal file
@@ -0,0 +1,80 @@
|
||||
import { randomUUID } from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createRdsClient, executeIntrospect, type RdsEngine } from '@/app/api/tools/rds/utils'
|
||||
|
||||
const logger = createLogger('RDSIntrospectAPI')
|
||||
|
||||
const IntrospectSchema = z.object({
|
||||
region: z.string().min(1, 'AWS region is required'),
|
||||
accessKeyId: z.string().min(1, 'AWS access key ID is required'),
|
||||
secretAccessKey: z.string().min(1, 'AWS secret access key is required'),
|
||||
resourceArn: z.string().min(1, 'Resource ARN is required'),
|
||||
secretArn: z.string().min(1, 'Secret ARN is required'),
|
||||
database: z.string().optional(),
|
||||
schema: z.string().optional(),
|
||||
engine: z.enum(['aurora-postgresql', 'aurora-mysql']).optional(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = IntrospectSchema.parse(body)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Introspecting RDS Aurora database${params.database ? ` (${params.database})` : ''}`
|
||||
)
|
||||
|
||||
const client = createRdsClient({
|
||||
region: params.region,
|
||||
accessKeyId: params.accessKeyId,
|
||||
secretAccessKey: params.secretAccessKey,
|
||||
resourceArn: params.resourceArn,
|
||||
secretArn: params.secretArn,
|
||||
database: params.database,
|
||||
})
|
||||
|
||||
try {
|
||||
const result = await executeIntrospect(
|
||||
client,
|
||||
params.resourceArn,
|
||||
params.secretArn,
|
||||
params.database,
|
||||
params.schema,
|
||||
params.engine as RdsEngine | undefined
|
||||
)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Introspection completed successfully. Engine: ${result.engine}, found ${result.tables.length} tables`
|
||||
)
|
||||
|
||||
return NextResponse.json({
|
||||
message: `Schema introspection completed. Engine: ${result.engine}. Found ${result.tables.length} table(s).`,
|
||||
engine: result.engine,
|
||||
tables: result.tables,
|
||||
schemas: result.schemas,
|
||||
})
|
||||
} finally {
|
||||
client.destroy()
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
logger.error(`[${requestId}] RDS introspection failed:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{ error: `RDS introspection failed: ${errorMessage}` },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -241,3 +241,487 @@ export async function executeDelete(
|
||||
|
||||
return executeStatement(client, resourceArn, secretArn, database, sql, parameters)
|
||||
}
|
||||
|
||||
export type RdsEngine = 'aurora-postgresql' | 'aurora-mysql'
|
||||
|
||||
export interface RdsIntrospectionResult {
|
||||
engine: RdsEngine
|
||||
tables: Array<{
|
||||
name: string
|
||||
schema: string
|
||||
columns: Array<{
|
||||
name: string
|
||||
type: string
|
||||
nullable: boolean
|
||||
default: string | null
|
||||
isPrimaryKey: boolean
|
||||
isForeignKey: boolean
|
||||
references?: {
|
||||
table: string
|
||||
column: string
|
||||
}
|
||||
}>
|
||||
primaryKey: string[]
|
||||
foreignKeys: Array<{
|
||||
column: string
|
||||
referencesTable: string
|
||||
referencesColumn: string
|
||||
}>
|
||||
indexes: Array<{
|
||||
name: string
|
||||
columns: string[]
|
||||
unique: boolean
|
||||
}>
|
||||
}>
|
||||
schemas: string[]
|
||||
}
|
||||
|
||||
/**
|
||||
* Detects the database engine by querying SELECT VERSION()
|
||||
*/
|
||||
export async function detectEngine(
|
||||
client: RDSDataClient,
|
||||
resourceArn: string,
|
||||
secretArn: string,
|
||||
database: string | undefined
|
||||
): Promise<RdsEngine> {
|
||||
const result = await executeStatement(
|
||||
client,
|
||||
resourceArn,
|
||||
secretArn,
|
||||
database,
|
||||
'SELECT VERSION()'
|
||||
)
|
||||
|
||||
if (result.rows.length > 0) {
|
||||
const versionRow = result.rows[0] as Record<string, unknown>
|
||||
const versionValue = Object.values(versionRow)[0]
|
||||
const versionString = String(versionValue).toLowerCase()
|
||||
|
||||
if (versionString.includes('postgresql') || versionString.includes('postgres')) {
|
||||
return 'aurora-postgresql'
|
||||
}
|
||||
if (versionString.includes('mysql') || versionString.includes('mariadb')) {
|
||||
return 'aurora-mysql'
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error('Unable to detect database engine. Please specify the engine parameter.')
|
||||
}
|
||||
|
||||
/**
|
||||
* Introspects PostgreSQL schema using INFORMATION_SCHEMA
|
||||
*/
|
||||
async function introspectPostgresql(
|
||||
client: RDSDataClient,
|
||||
resourceArn: string,
|
||||
secretArn: string,
|
||||
database: string | undefined,
|
||||
schemaName: string
|
||||
): Promise<RdsIntrospectionResult> {
|
||||
const schemasResult = await executeStatement(
|
||||
client,
|
||||
resourceArn,
|
||||
secretArn,
|
||||
database,
|
||||
`SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name NOT IN ('pg_catalog', 'information_schema', 'pg_toast')
|
||||
ORDER BY schema_name`
|
||||
)
|
||||
const schemas = schemasResult.rows.map((row) => (row as { schema_name: string }).schema_name)
|
||||
|
||||
const tablesResult = await executeStatement(
|
||||
client,
|
||||
resourceArn,
|
||||
secretArn,
|
||||
database,
|
||||
`SELECT table_name, table_schema
|
||||
FROM information_schema.tables
|
||||
WHERE table_schema = :schemaName
|
||||
AND table_type = 'BASE TABLE'
|
||||
ORDER BY table_name`,
|
||||
[{ name: 'schemaName', value: { stringValue: schemaName } }]
|
||||
)
|
||||
|
||||
const tables = []
|
||||
|
||||
for (const tableRow of tablesResult.rows) {
|
||||
const row = tableRow as { table_name: string; table_schema: string }
|
||||
const tableName = row.table_name
|
||||
const tableSchema = row.table_schema
|
||||
|
||||
const columnsResult = await executeStatement(
|
||||
client,
|
||||
resourceArn,
|
||||
secretArn,
|
||||
database,
|
||||
`SELECT
|
||||
c.column_name,
|
||||
c.data_type,
|
||||
c.is_nullable,
|
||||
c.column_default,
|
||||
c.udt_name
|
||||
FROM information_schema.columns c
|
||||
WHERE c.table_schema = :tableSchema
|
||||
AND c.table_name = :tableName
|
||||
ORDER BY c.ordinal_position`,
|
||||
[
|
||||
{ name: 'tableSchema', value: { stringValue: tableSchema } },
|
||||
{ name: 'tableName', value: { stringValue: tableName } },
|
||||
]
|
||||
)
|
||||
|
||||
const pkResult = await executeStatement(
|
||||
client,
|
||||
resourceArn,
|
||||
secretArn,
|
||||
database,
|
||||
`SELECT kcu.column_name
|
||||
FROM information_schema.table_constraints tc
|
||||
JOIN information_schema.key_column_usage kcu
|
||||
ON tc.constraint_name = kcu.constraint_name
|
||||
AND tc.table_schema = kcu.table_schema
|
||||
WHERE tc.constraint_type = 'PRIMARY KEY'
|
||||
AND tc.table_schema = :tableSchema
|
||||
AND tc.table_name = :tableName`,
|
||||
[
|
||||
{ name: 'tableSchema', value: { stringValue: tableSchema } },
|
||||
{ name: 'tableName', value: { stringValue: tableName } },
|
||||
]
|
||||
)
|
||||
const primaryKeyColumns = pkResult.rows.map((r) => (r as { column_name: string }).column_name)
|
||||
|
||||
const fkResult = await executeStatement(
|
||||
client,
|
||||
resourceArn,
|
||||
secretArn,
|
||||
database,
|
||||
`SELECT
|
||||
kcu.column_name,
|
||||
ccu.table_name AS foreign_table_name,
|
||||
ccu.column_name AS foreign_column_name
|
||||
FROM information_schema.table_constraints tc
|
||||
JOIN information_schema.key_column_usage kcu
|
||||
ON tc.constraint_name = kcu.constraint_name
|
||||
AND tc.table_schema = kcu.table_schema
|
||||
JOIN information_schema.constraint_column_usage ccu
|
||||
ON ccu.constraint_name = tc.constraint_name
|
||||
AND ccu.table_schema = tc.table_schema
|
||||
WHERE tc.constraint_type = 'FOREIGN KEY'
|
||||
AND tc.table_schema = :tableSchema
|
||||
AND tc.table_name = :tableName`,
|
||||
[
|
||||
{ name: 'tableSchema', value: { stringValue: tableSchema } },
|
||||
{ name: 'tableName', value: { stringValue: tableName } },
|
||||
]
|
||||
)
|
||||
|
||||
const foreignKeys = fkResult.rows.map((r) => {
|
||||
const fkRow = r as {
|
||||
column_name: string
|
||||
foreign_table_name: string
|
||||
foreign_column_name: string
|
||||
}
|
||||
return {
|
||||
column: fkRow.column_name,
|
||||
referencesTable: fkRow.foreign_table_name,
|
||||
referencesColumn: fkRow.foreign_column_name,
|
||||
}
|
||||
})
|
||||
|
||||
const fkColumnSet = new Set(foreignKeys.map((fk) => fk.column))
|
||||
|
||||
const indexesResult = await executeStatement(
|
||||
client,
|
||||
resourceArn,
|
||||
secretArn,
|
||||
database,
|
||||
`SELECT
|
||||
i.relname AS index_name,
|
||||
a.attname AS column_name,
|
||||
ix.indisunique AS is_unique
|
||||
FROM pg_class t
|
||||
JOIN pg_index ix ON t.oid = ix.indrelid
|
||||
JOIN pg_class i ON i.oid = ix.indexrelid
|
||||
JOIN pg_attribute a ON a.attrelid = t.oid AND a.attnum = ANY(ix.indkey)
|
||||
JOIN pg_namespace n ON n.oid = t.relnamespace
|
||||
WHERE t.relkind = 'r'
|
||||
AND n.nspname = :tableSchema
|
||||
AND t.relname = :tableName
|
||||
AND NOT ix.indisprimary
|
||||
ORDER BY i.relname, a.attnum`,
|
||||
[
|
||||
{ name: 'tableSchema', value: { stringValue: tableSchema } },
|
||||
{ name: 'tableName', value: { stringValue: tableName } },
|
||||
]
|
||||
)
|
||||
|
||||
const indexMap = new Map<string, { name: string; columns: string[]; unique: boolean }>()
|
||||
for (const idxRow of indexesResult.rows) {
|
||||
const idx = idxRow as { index_name: string; column_name: string; is_unique: boolean }
|
||||
const indexName = idx.index_name
|
||||
if (!indexMap.has(indexName)) {
|
||||
indexMap.set(indexName, {
|
||||
name: indexName,
|
||||
columns: [],
|
||||
unique: idx.is_unique,
|
||||
})
|
||||
}
|
||||
indexMap.get(indexName)!.columns.push(idx.column_name)
|
||||
}
|
||||
const indexes = Array.from(indexMap.values())
|
||||
|
||||
const columns = columnsResult.rows.map((colRow) => {
|
||||
const col = colRow as {
|
||||
column_name: string
|
||||
data_type: string
|
||||
is_nullable: string
|
||||
column_default: string | null
|
||||
udt_name: string
|
||||
}
|
||||
const columnName = col.column_name
|
||||
const fk = foreignKeys.find((f) => f.column === columnName)
|
||||
|
||||
return {
|
||||
name: columnName,
|
||||
type: col.data_type === 'USER-DEFINED' ? col.udt_name : col.data_type,
|
||||
nullable: col.is_nullable === 'YES',
|
||||
default: col.column_default,
|
||||
isPrimaryKey: primaryKeyColumns.includes(columnName),
|
||||
isForeignKey: fkColumnSet.has(columnName),
|
||||
...(fk && {
|
||||
references: {
|
||||
table: fk.referencesTable,
|
||||
column: fk.referencesColumn,
|
||||
},
|
||||
}),
|
||||
}
|
||||
})
|
||||
|
||||
tables.push({
|
||||
name: tableName,
|
||||
schema: tableSchema,
|
||||
columns,
|
||||
primaryKey: primaryKeyColumns,
|
||||
foreignKeys,
|
||||
indexes,
|
||||
})
|
||||
}
|
||||
|
||||
return { engine: 'aurora-postgresql', tables, schemas }
|
||||
}
|
||||
|
||||
/**
|
||||
* Introspects MySQL schema using INFORMATION_SCHEMA
|
||||
*/
|
||||
async function introspectMysql(
|
||||
client: RDSDataClient,
|
||||
resourceArn: string,
|
||||
secretArn: string,
|
||||
database: string | undefined,
|
||||
schemaName: string
|
||||
): Promise<RdsIntrospectionResult> {
|
||||
const schemasResult = await executeStatement(
|
||||
client,
|
||||
resourceArn,
|
||||
secretArn,
|
||||
database,
|
||||
`SELECT SCHEMA_NAME as schema_name FROM information_schema.SCHEMATA
|
||||
WHERE SCHEMA_NAME NOT IN ('mysql', 'information_schema', 'performance_schema', 'sys')
|
||||
ORDER BY SCHEMA_NAME`
|
||||
)
|
||||
const schemas = schemasResult.rows.map((row) => (row as { schema_name: string }).schema_name)
|
||||
|
||||
const tablesResult = await executeStatement(
|
||||
client,
|
||||
resourceArn,
|
||||
secretArn,
|
||||
database,
|
||||
`SELECT TABLE_NAME as table_name, TABLE_SCHEMA as table_schema
|
||||
FROM information_schema.TABLES
|
||||
WHERE TABLE_SCHEMA = :schemaName
|
||||
AND TABLE_TYPE = 'BASE TABLE'
|
||||
ORDER BY TABLE_NAME`,
|
||||
[{ name: 'schemaName', value: { stringValue: schemaName } }]
|
||||
)
|
||||
|
||||
const tables = []
|
||||
|
||||
for (const tableRow of tablesResult.rows) {
|
||||
const row = tableRow as { table_name: string; table_schema: string }
|
||||
const tableName = row.table_name
|
||||
const tableSchema = row.table_schema
|
||||
|
||||
const columnsResult = await executeStatement(
|
||||
client,
|
||||
resourceArn,
|
||||
secretArn,
|
||||
database,
|
||||
`SELECT
|
||||
COLUMN_NAME as column_name,
|
||||
DATA_TYPE as data_type,
|
||||
IS_NULLABLE as is_nullable,
|
||||
COLUMN_DEFAULT as column_default,
|
||||
COLUMN_TYPE as column_type,
|
||||
COLUMN_KEY as column_key
|
||||
FROM information_schema.COLUMNS
|
||||
WHERE TABLE_SCHEMA = :tableSchema
|
||||
AND TABLE_NAME = :tableName
|
||||
ORDER BY ORDINAL_POSITION`,
|
||||
[
|
||||
{ name: 'tableSchema', value: { stringValue: tableSchema } },
|
||||
{ name: 'tableName', value: { stringValue: tableName } },
|
||||
]
|
||||
)
|
||||
|
||||
const pkResult = await executeStatement(
|
||||
client,
|
||||
resourceArn,
|
||||
secretArn,
|
||||
database,
|
||||
`SELECT COLUMN_NAME as column_name
|
||||
FROM information_schema.KEY_COLUMN_USAGE
|
||||
WHERE TABLE_SCHEMA = :tableSchema
|
||||
AND TABLE_NAME = :tableName
|
||||
AND CONSTRAINT_NAME = 'PRIMARY'
|
||||
ORDER BY ORDINAL_POSITION`,
|
||||
[
|
||||
{ name: 'tableSchema', value: { stringValue: tableSchema } },
|
||||
{ name: 'tableName', value: { stringValue: tableName } },
|
||||
]
|
||||
)
|
||||
const primaryKeyColumns = pkResult.rows.map((r) => (r as { column_name: string }).column_name)
|
||||
|
||||
const fkResult = await executeStatement(
|
||||
client,
|
||||
resourceArn,
|
||||
secretArn,
|
||||
database,
|
||||
`SELECT
|
||||
kcu.COLUMN_NAME as column_name,
|
||||
kcu.REFERENCED_TABLE_NAME as foreign_table_name,
|
||||
kcu.REFERENCED_COLUMN_NAME as foreign_column_name
|
||||
FROM information_schema.KEY_COLUMN_USAGE kcu
|
||||
WHERE kcu.TABLE_SCHEMA = :tableSchema
|
||||
AND kcu.TABLE_NAME = :tableName
|
||||
AND kcu.REFERENCED_TABLE_NAME IS NOT NULL`,
|
||||
[
|
||||
{ name: 'tableSchema', value: { stringValue: tableSchema } },
|
||||
{ name: 'tableName', value: { stringValue: tableName } },
|
||||
]
|
||||
)
|
||||
|
||||
const foreignKeys = fkResult.rows.map((r) => {
|
||||
const fkRow = r as {
|
||||
column_name: string
|
||||
foreign_table_name: string
|
||||
foreign_column_name: string
|
||||
}
|
||||
return {
|
||||
column: fkRow.column_name,
|
||||
referencesTable: fkRow.foreign_table_name,
|
||||
referencesColumn: fkRow.foreign_column_name,
|
||||
}
|
||||
})
|
||||
|
||||
const fkColumnSet = new Set(foreignKeys.map((fk) => fk.column))
|
||||
|
||||
const indexesResult = await executeStatement(
|
||||
client,
|
||||
resourceArn,
|
||||
secretArn,
|
||||
database,
|
||||
`SELECT
|
||||
INDEX_NAME as index_name,
|
||||
COLUMN_NAME as column_name,
|
||||
NON_UNIQUE as non_unique
|
||||
FROM information_schema.STATISTICS
|
||||
WHERE TABLE_SCHEMA = :tableSchema
|
||||
AND TABLE_NAME = :tableName
|
||||
AND INDEX_NAME != 'PRIMARY'
|
||||
ORDER BY INDEX_NAME, SEQ_IN_INDEX`,
|
||||
[
|
||||
{ name: 'tableSchema', value: { stringValue: tableSchema } },
|
||||
{ name: 'tableName', value: { stringValue: tableName } },
|
||||
]
|
||||
)
|
||||
|
||||
const indexMap = new Map<string, { name: string; columns: string[]; unique: boolean }>()
|
||||
for (const idxRow of indexesResult.rows) {
|
||||
const idx = idxRow as { index_name: string; column_name: string; non_unique: number }
|
||||
const indexName = idx.index_name
|
||||
if (!indexMap.has(indexName)) {
|
||||
indexMap.set(indexName, {
|
||||
name: indexName,
|
||||
columns: [],
|
||||
unique: idx.non_unique === 0,
|
||||
})
|
||||
}
|
||||
indexMap.get(indexName)!.columns.push(idx.column_name)
|
||||
}
|
||||
const indexes = Array.from(indexMap.values())
|
||||
|
||||
const columns = columnsResult.rows.map((colRow) => {
|
||||
const col = colRow as {
|
||||
column_name: string
|
||||
data_type: string
|
||||
is_nullable: string
|
||||
column_default: string | null
|
||||
column_type: string
|
||||
column_key: string
|
||||
}
|
||||
const columnName = col.column_name
|
||||
const fk = foreignKeys.find((f) => f.column === columnName)
|
||||
|
||||
return {
|
||||
name: columnName,
|
||||
type: col.column_type || col.data_type,
|
||||
nullable: col.is_nullable === 'YES',
|
||||
default: col.column_default,
|
||||
isPrimaryKey: col.column_key === 'PRI',
|
||||
isForeignKey: fkColumnSet.has(columnName),
|
||||
...(fk && {
|
||||
references: {
|
||||
table: fk.referencesTable,
|
||||
column: fk.referencesColumn,
|
||||
},
|
||||
}),
|
||||
}
|
||||
})
|
||||
|
||||
tables.push({
|
||||
name: tableName,
|
||||
schema: tableSchema,
|
||||
columns,
|
||||
primaryKey: primaryKeyColumns,
|
||||
foreignKeys,
|
||||
indexes,
|
||||
})
|
||||
}
|
||||
|
||||
return { engine: 'aurora-mysql', tables, schemas }
|
||||
}
|
||||
|
||||
/**
|
||||
* Introspects RDS Aurora database schema with auto-detection of engine type
|
||||
*/
|
||||
export async function executeIntrospect(
|
||||
client: RDSDataClient,
|
||||
resourceArn: string,
|
||||
secretArn: string,
|
||||
database: string | undefined,
|
||||
schemaName?: string,
|
||||
engine?: RdsEngine
|
||||
): Promise<RdsIntrospectionResult> {
|
||||
const detectedEngine = engine || (await detectEngine(client, resourceArn, secretArn, database))
|
||||
|
||||
if (detectedEngine === 'aurora-postgresql') {
|
||||
const schema = schemaName || 'public'
|
||||
return introspectPostgresql(client, resourceArn, secretArn, database, schema)
|
||||
}
|
||||
const schema = schemaName || database || ''
|
||||
if (!schema) {
|
||||
throw new Error('Schema or database name is required for MySQL introspection')
|
||||
}
|
||||
return introspectMysql(client, resourceArn, secretArn, database, schema)
|
||||
}
|
||||
|
||||
@@ -462,9 +462,6 @@ export default function PlaygroundPage() {
|
||||
<Avatar size='lg'>
|
||||
<AvatarFallback>LG</AvatarFallback>
|
||||
</Avatar>
|
||||
<Avatar size='xl'>
|
||||
<AvatarFallback>XL</AvatarFallback>
|
||||
</Avatar>
|
||||
</VariantRow>
|
||||
<VariantRow label='with image'>
|
||||
<Avatar size='md'>
|
||||
@@ -505,9 +502,6 @@ export default function PlaygroundPage() {
|
||||
<Avatar size='lg' status='online'>
|
||||
<AvatarFallback>LG</AvatarFallback>
|
||||
</Avatar>
|
||||
<Avatar size='xl' status='online'>
|
||||
<AvatarFallback>XL</AvatarFallback>
|
||||
</Avatar>
|
||||
</VariantRow>
|
||||
</Section>
|
||||
|
||||
|
||||
@@ -7,6 +7,7 @@ import type { GlobalCommand } from '@/app/workspace/[workspaceId]/providers/glob
|
||||
* ad-hoc ids or shortcuts to ensure a single source of truth.
|
||||
*/
|
||||
export type CommandId =
|
||||
| 'accept-diff-changes'
|
||||
| 'add-agent'
|
||||
| 'goto-templates'
|
||||
| 'goto-logs'
|
||||
@@ -43,6 +44,11 @@ export interface CommandDefinition {
|
||||
* All global commands must be declared here to be usable.
|
||||
*/
|
||||
export const COMMAND_DEFINITIONS: Record<CommandId, CommandDefinition> = {
|
||||
'accept-diff-changes': {
|
||||
id: 'accept-diff-changes',
|
||||
shortcut: 'Mod+Shift+Enter',
|
||||
allowInEditable: true,
|
||||
},
|
||||
'add-agent': {
|
||||
id: 'add-agent',
|
||||
shortcut: 'Mod+Shift+A',
|
||||
|
||||
@@ -2,10 +2,10 @@
|
||||
|
||||
import { useCallback } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { Layout, LibraryBig, Search } from 'lucide-react'
|
||||
import { Layout, Search } from 'lucide-react'
|
||||
import Image from 'next/image'
|
||||
import { useParams, useRouter } from 'next/navigation'
|
||||
import { Button } from '@/components/emcn'
|
||||
import { Button, Library } from '@/components/emcn'
|
||||
import { AgentIcon } from '@/components/icons'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { usePreventZoom } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks'
|
||||
@@ -41,7 +41,7 @@ const commands: CommandItem[] = [
|
||||
},
|
||||
{
|
||||
label: 'Logs',
|
||||
icon: LibraryBig,
|
||||
icon: Library,
|
||||
shortcut: 'L',
|
||||
},
|
||||
{
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import { memo, useCallback } from 'react'
|
||||
import { memo, useCallback, useMemo } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import clsx from 'clsx'
|
||||
import { Eye, EyeOff } from 'lucide-react'
|
||||
import { Button } from '@/components/emcn'
|
||||
import { useRegisterGlobalCommands } from '@/app/workspace/[workspaceId]/providers/global-commands-provider'
|
||||
import { createCommand } from '@/app/workspace/[workspaceId]/utils/commands-utils'
|
||||
import { usePreventZoom } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks'
|
||||
import { useCopilotStore } from '@/stores/panel'
|
||||
import { useCopilotStore, usePanelStore } from '@/stores/panel'
|
||||
import { useTerminalStore } from '@/stores/terminal'
|
||||
import { useWorkflowDiffStore } from '@/stores/workflow-diff'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
@@ -15,28 +15,20 @@ const logger = createLogger('DiffControls')
|
||||
|
||||
export const DiffControls = memo(function DiffControls() {
|
||||
const isTerminalResizing = useTerminalStore((state) => state.isResizing)
|
||||
const {
|
||||
isShowingDiff,
|
||||
isDiffReady,
|
||||
hasActiveDiff,
|
||||
toggleDiffView,
|
||||
acceptChanges,
|
||||
rejectChanges,
|
||||
baselineWorkflow,
|
||||
} = useWorkflowDiffStore(
|
||||
useCallback(
|
||||
(state) => ({
|
||||
isShowingDiff: state.isShowingDiff,
|
||||
isDiffReady: state.isDiffReady,
|
||||
hasActiveDiff: state.hasActiveDiff,
|
||||
toggleDiffView: state.toggleDiffView,
|
||||
acceptChanges: state.acceptChanges,
|
||||
rejectChanges: state.rejectChanges,
|
||||
baselineWorkflow: state.baselineWorkflow,
|
||||
}),
|
||||
[]
|
||||
const isPanelResizing = usePanelStore((state) => state.isResizing)
|
||||
const { isDiffReady, hasActiveDiff, acceptChanges, rejectChanges, baselineWorkflow } =
|
||||
useWorkflowDiffStore(
|
||||
useCallback(
|
||||
(state) => ({
|
||||
isDiffReady: state.isDiffReady,
|
||||
hasActiveDiff: state.hasActiveDiff,
|
||||
acceptChanges: state.acceptChanges,
|
||||
rejectChanges: state.rejectChanges,
|
||||
baselineWorkflow: state.baselineWorkflow,
|
||||
}),
|
||||
[]
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
const { updatePreviewToolCallState, currentChat, messages } = useCopilotStore(
|
||||
useCallback(
|
||||
@@ -53,11 +45,6 @@ export const DiffControls = memo(function DiffControls() {
|
||||
useCallback((state) => ({ activeWorkflowId: state.activeWorkflowId }), [])
|
||||
)
|
||||
|
||||
const handleToggleDiff = useCallback(() => {
|
||||
logger.info('Toggling diff view', { currentState: isShowingDiff })
|
||||
toggleDiffView()
|
||||
}, [isShowingDiff, toggleDiffView])
|
||||
|
||||
const createCheckpoint = useCallback(async () => {
|
||||
if (!activeWorkflowId || !currentChat?.id) {
|
||||
logger.warn('Cannot create checkpoint: missing workflowId or chatId', {
|
||||
@@ -206,54 +193,47 @@ export const DiffControls = memo(function DiffControls() {
|
||||
}
|
||||
}, [activeWorkflowId, currentChat, messages, baselineWorkflow])
|
||||
|
||||
const handleAccept = useCallback(async () => {
|
||||
const handleAccept = useCallback(() => {
|
||||
logger.info('Accepting proposed changes with backup protection')
|
||||
|
||||
// Resolve target toolCallId for build/edit and update to terminal success state in the copilot store
|
||||
// This happens synchronously first for instant UI feedback
|
||||
try {
|
||||
// Create a checkpoint before applying changes so it appears under the triggering user message
|
||||
await createCheckpoint().catch((error) => {
|
||||
logger.warn('Failed to create checkpoint before accept:', error)
|
||||
})
|
||||
|
||||
// Resolve target toolCallId for build/edit and update to terminal success state in the copilot store
|
||||
try {
|
||||
const { toolCallsById, messages } = useCopilotStore.getState()
|
||||
let id: string | undefined
|
||||
outer: for (let mi = messages.length - 1; mi >= 0; mi--) {
|
||||
const m = messages[mi]
|
||||
if (m.role !== 'assistant' || !m.contentBlocks) continue
|
||||
const blocks = m.contentBlocks as any[]
|
||||
for (let bi = blocks.length - 1; bi >= 0; bi--) {
|
||||
const b = blocks[bi]
|
||||
if (b?.type === 'tool_call') {
|
||||
const tn = b.toolCall?.name
|
||||
if (tn === 'edit_workflow') {
|
||||
id = b.toolCall?.id
|
||||
break outer
|
||||
}
|
||||
const { toolCallsById, messages } = useCopilotStore.getState()
|
||||
let id: string | undefined
|
||||
outer: for (let mi = messages.length - 1; mi >= 0; mi--) {
|
||||
const m = messages[mi]
|
||||
if (m.role !== 'assistant' || !m.contentBlocks) continue
|
||||
const blocks = m.contentBlocks as any[]
|
||||
for (let bi = blocks.length - 1; bi >= 0; bi--) {
|
||||
const b = blocks[bi]
|
||||
if (b?.type === 'tool_call') {
|
||||
const tn = b.toolCall?.name
|
||||
if (tn === 'edit_workflow') {
|
||||
id = b.toolCall?.id
|
||||
break outer
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!id) {
|
||||
const candidates = Object.values(toolCallsById).filter((t) => t.name === 'edit_workflow')
|
||||
id = candidates.length ? candidates[candidates.length - 1].id : undefined
|
||||
}
|
||||
if (id) updatePreviewToolCallState('accepted', id)
|
||||
} catch {}
|
||||
}
|
||||
if (!id) {
|
||||
const candidates = Object.values(toolCallsById).filter((t) => t.name === 'edit_workflow')
|
||||
id = candidates.length ? candidates[candidates.length - 1].id : undefined
|
||||
}
|
||||
if (id) updatePreviewToolCallState('accepted', id)
|
||||
} catch {}
|
||||
|
||||
// Accept changes without blocking the UI; errors will be logged by the store handler
|
||||
acceptChanges().catch((error) => {
|
||||
logger.error('Failed to accept changes (background):', error)
|
||||
})
|
||||
// Accept changes without blocking the UI; errors will be logged by the store handler
|
||||
acceptChanges().catch((error) => {
|
||||
logger.error('Failed to accept changes (background):', error)
|
||||
})
|
||||
|
||||
logger.info('Accept triggered; UI will update optimistically')
|
||||
} catch (error) {
|
||||
logger.error('Failed to accept changes:', error)
|
||||
// Create checkpoint in the background (fire-and-forget) so it doesn't block UI
|
||||
createCheckpoint().catch((error) => {
|
||||
logger.warn('Failed to create checkpoint after accept:', error)
|
||||
})
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
logger.error('Workflow update failed:', errorMessage)
|
||||
alert(`Failed to save workflow changes: ${errorMessage}`)
|
||||
}
|
||||
logger.info('Accept triggered; UI will update optimistically')
|
||||
}, [createCheckpoint, updatePreviewToolCallState, acceptChanges])
|
||||
|
||||
const handleReject = useCallback(() => {
|
||||
@@ -293,54 +273,82 @@ export const DiffControls = memo(function DiffControls() {
|
||||
|
||||
const preventZoomRef = usePreventZoom()
|
||||
|
||||
// Register global command to accept changes (Cmd/Ctrl + Shift + Enter)
|
||||
const acceptCommand = useMemo(
|
||||
() =>
|
||||
createCommand({
|
||||
id: 'accept-diff-changes',
|
||||
handler: () => {
|
||||
if (hasActiveDiff && isDiffReady) {
|
||||
handleAccept()
|
||||
}
|
||||
},
|
||||
}),
|
||||
[hasActiveDiff, isDiffReady, handleAccept]
|
||||
)
|
||||
useRegisterGlobalCommands([acceptCommand])
|
||||
|
||||
// Don't show anything if no diff is available or diff is not ready
|
||||
if (!hasActiveDiff || !isDiffReady) {
|
||||
return null
|
||||
}
|
||||
|
||||
const isResizing = isTerminalResizing || isPanelResizing
|
||||
|
||||
return (
|
||||
<div
|
||||
ref={preventZoomRef}
|
||||
className={clsx(
|
||||
'-translate-x-1/2 fixed left-1/2 z-30',
|
||||
!isTerminalResizing && 'transition-[bottom] duration-100 ease-out'
|
||||
'fixed z-30',
|
||||
!isResizing && 'transition-[bottom,right] duration-100 ease-out'
|
||||
)}
|
||||
style={{ bottom: 'calc(var(--terminal-height) + 40px)' }}
|
||||
style={{
|
||||
bottom: 'calc(var(--terminal-height) + 16px)',
|
||||
right: 'calc(var(--panel-width) + 16px)',
|
||||
}}
|
||||
>
|
||||
<div className='flex items-center gap-[6px] rounded-[10px] p-[6px]'>
|
||||
{/* Toggle (left, icon-only) */}
|
||||
<Button
|
||||
variant='active'
|
||||
onClick={handleToggleDiff}
|
||||
className='h-[30px] w-[30px] rounded-[8px] p-0'
|
||||
title={isShowingDiff ? 'View original' : 'Preview changes'}
|
||||
>
|
||||
{isShowingDiff ? (
|
||||
<Eye className='h-[14px] w-[14px]' />
|
||||
) : (
|
||||
<EyeOff className='h-[14px] w-[14px]' />
|
||||
)}
|
||||
</Button>
|
||||
|
||||
{/* Reject */}
|
||||
<Button
|
||||
variant='active'
|
||||
<div
|
||||
className='group relative flex h-[30px] overflow-hidden rounded-[4px]'
|
||||
style={{ isolation: 'isolate' }}
|
||||
>
|
||||
{/* Reject side */}
|
||||
<button
|
||||
onClick={handleReject}
|
||||
className='h-[30px] rounded-[8px] px-3'
|
||||
title='Reject changes'
|
||||
className='relative flex h-full items-center border border-[var(--border)] bg-[var(--surface-4)] pr-[20px] pl-[12px] font-medium text-[13px] text-[var(--text-secondary)] transition-colors hover:border-[var(--border-1)] hover:bg-[var(--surface-6)] hover:text-[var(--text-primary)] dark:hover:bg-[var(--surface-5)]'
|
||||
style={{
|
||||
clipPath: 'polygon(0 0, calc(100% + 10px) 0, 100% 100%, 0 100%)',
|
||||
borderRadius: '4px 0 0 4px',
|
||||
}}
|
||||
>
|
||||
Reject
|
||||
</Button>
|
||||
|
||||
{/* Accept */}
|
||||
<Button
|
||||
variant='tertiary'
|
||||
</button>
|
||||
{/* Slanted divider - split gray/green */}
|
||||
<div
|
||||
className='pointer-events-none absolute top-0 bottom-0 z-10'
|
||||
style={{
|
||||
left: '66px',
|
||||
width: '2px',
|
||||
transform: 'skewX(-18.4deg)',
|
||||
background:
|
||||
'linear-gradient(to right, var(--border) 50%, color-mix(in srgb, var(--brand-tertiary-2) 70%, black) 50%)',
|
||||
}}
|
||||
/>
|
||||
{/* Accept side */}
|
||||
<button
|
||||
onClick={handleAccept}
|
||||
className='h-[30px] rounded-[8px] px-3'
|
||||
title='Accept changes'
|
||||
title='Accept changes (⇧⌘⏎)'
|
||||
className='-ml-[10px] relative flex h-full items-center border border-[rgba(0,0,0,0.15)] bg-[var(--brand-tertiary-2)] pr-[12px] pl-[20px] font-medium text-[13px] text-[var(--text-inverse)] transition-[background-color,border-color,fill,stroke] hover:brightness-110 dark:border-[rgba(255,255,255,0.1)]'
|
||||
style={{
|
||||
clipPath: 'polygon(10px 0, 100% 0, 100% 100%, 0 100%)',
|
||||
borderRadius: '0 4px 4px 0',
|
||||
}}
|
||||
>
|
||||
Accept
|
||||
</Button>
|
||||
<kbd className='ml-2 rounded border border-white/20 bg-white/10 px-1.5 py-0.5 font-medium font-sans text-[10px]'>
|
||||
⇧⌘<span className='translate-y-[-1px]'>⏎</span>
|
||||
</kbd>
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
|
||||
@@ -11,6 +11,7 @@ import {
|
||||
openCopilotWithMessage,
|
||||
useNotificationStore,
|
||||
} from '@/stores/notifications'
|
||||
import { useSidebarStore } from '@/stores/sidebar/store'
|
||||
import { useTerminalStore } from '@/stores/terminal'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
|
||||
@@ -19,7 +20,7 @@ const MAX_VISIBLE_NOTIFICATIONS = 4
|
||||
|
||||
/**
|
||||
* Notifications display component
|
||||
* Positioned in the bottom-right workspace area, aligned with terminal and panel spacing
|
||||
* Positioned in the bottom-left workspace area, reactive to sidebar width and terminal height
|
||||
* Shows both global notifications and workflow-specific notifications
|
||||
*/
|
||||
export const Notifications = memo(function Notifications() {
|
||||
@@ -36,6 +37,7 @@ export const Notifications = memo(function Notifications() {
|
||||
.slice(0, MAX_VISIBLE_NOTIFICATIONS)
|
||||
}, [allNotifications, activeWorkflowId])
|
||||
const isTerminalResizing = useTerminalStore((state) => state.isResizing)
|
||||
const isSidebarResizing = useSidebarStore((state) => state.isResizing)
|
||||
|
||||
/**
|
||||
* Executes a notification action and handles side effects.
|
||||
@@ -103,12 +105,14 @@ export const Notifications = memo(function Notifications() {
|
||||
return null
|
||||
}
|
||||
|
||||
const isResizing = isTerminalResizing || isSidebarResizing
|
||||
|
||||
return (
|
||||
<div
|
||||
ref={preventZoomRef}
|
||||
className={clsx(
|
||||
'fixed right-[calc(var(--panel-width)+16px)] bottom-[calc(var(--terminal-height)+16px)] z-30 flex flex-col items-end',
|
||||
!isTerminalResizing && 'transition-[bottom] duration-100 ease-out'
|
||||
'fixed bottom-[calc(var(--terminal-height)+16px)] left-[calc(var(--sidebar-width)+16px)] z-30 flex flex-col items-start',
|
||||
!isResizing && 'transition-[bottom,left] duration-100 ease-out'
|
||||
)}
|
||||
>
|
||||
{[...visibleNotifications].reverse().map((notification, index, stacked) => {
|
||||
|
||||
@@ -326,8 +326,8 @@ export default function CopilotMarkdownRenderer({ content }: CopilotMarkdownRend
|
||||
),
|
||||
|
||||
table: ({ children }: React.TableHTMLAttributes<HTMLTableElement>) => (
|
||||
<div className='my-4 max-w-full overflow-x-auto'>
|
||||
<table className='min-w-full table-auto border border-[var(--border-1)] font-season text-sm'>
|
||||
<div className='my-3 max-w-full overflow-x-auto'>
|
||||
<table className='min-w-full table-auto border border-[var(--border-1)] font-season text-xs'>
|
||||
{children}
|
||||
</table>
|
||||
</div>
|
||||
@@ -346,12 +346,12 @@ export default function CopilotMarkdownRenderer({ content }: CopilotMarkdownRend
|
||||
</tr>
|
||||
),
|
||||
th: ({ children }: React.ThHTMLAttributes<HTMLTableCellElement>) => (
|
||||
<th className='border-[var(--border-1)] border-r px-4 py-2 align-top font-base text-[var(--text-secondary)] last:border-r-0 dark:font-[470]'>
|
||||
<th className='border-[var(--border-1)] border-r px-2.5 py-1.5 align-top font-base text-[var(--text-secondary)] last:border-r-0 dark:font-[470]'>
|
||||
{children}
|
||||
</th>
|
||||
),
|
||||
td: ({ children }: React.TdHTMLAttributes<HTMLTableCellElement>) => (
|
||||
<td className='break-words border-[var(--border-1)] border-r px-4 py-2 align-top font-base text-[var(--text-primary)] last:border-r-0 dark:font-[470]'>
|
||||
<td className='break-words border-[var(--border-1)] border-r px-2.5 py-1.5 align-top font-base text-[var(--text-primary)] last:border-r-0 dark:font-[470]'>
|
||||
{children}
|
||||
</td>
|
||||
),
|
||||
|
||||
@@ -3,75 +3,23 @@
|
||||
import { useEffect, useRef, useState } from 'react'
|
||||
import clsx from 'clsx'
|
||||
import { ChevronUp } from 'lucide-react'
|
||||
import CopilotMarkdownRenderer from './markdown-renderer'
|
||||
|
||||
/**
|
||||
* Max height for thinking content before internal scrolling kicks in
|
||||
*/
|
||||
const THINKING_MAX_HEIGHT = 200
|
||||
|
||||
/**
|
||||
* Interval for auto-scroll during streaming (ms)
|
||||
*/
|
||||
const SCROLL_INTERVAL = 100
|
||||
|
||||
/**
|
||||
* Timer update interval in milliseconds
|
||||
*/
|
||||
const TIMER_UPDATE_INTERVAL = 100
|
||||
|
||||
/**
|
||||
* Milliseconds threshold for displaying as seconds
|
||||
*/
|
||||
const SECONDS_THRESHOLD = 1000
|
||||
|
||||
/**
|
||||
* Props for the ShimmerOverlayText component
|
||||
*/
|
||||
interface ShimmerOverlayTextProps {
|
||||
/** Label text to display */
|
||||
label: string
|
||||
/** Value text to display */
|
||||
value: string
|
||||
/** Whether the shimmer animation is active */
|
||||
active?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* ShimmerOverlayText component for thinking block
|
||||
* Applies shimmer effect to the "Thought for X.Xs" text during streaming
|
||||
*
|
||||
* @param props - Component props
|
||||
* @returns Text with optional shimmer overlay effect
|
||||
*/
|
||||
function ShimmerOverlayText({ label, value, active = false }: ShimmerOverlayTextProps) {
|
||||
return (
|
||||
<span className='relative inline-block'>
|
||||
<span className='text-[var(--text-tertiary)]'>{label}</span>
|
||||
<span className='text-[var(--text-muted)]'>{value}</span>
|
||||
{active ? (
|
||||
<span
|
||||
aria-hidden='true'
|
||||
className='pointer-events-none absolute inset-0 select-none overflow-hidden'
|
||||
>
|
||||
<span
|
||||
className='block text-transparent'
|
||||
style={{
|
||||
backgroundImage:
|
||||
'linear-gradient(90deg, rgba(255,255,255,0) 0%, rgba(255,255,255,0.85) 50%, rgba(255,255,255,0) 100%)',
|
||||
backgroundSize: '200% 100%',
|
||||
backgroundRepeat: 'no-repeat',
|
||||
WebkitBackgroundClip: 'text',
|
||||
backgroundClip: 'text',
|
||||
animation: 'thinking-shimmer 1.4s ease-in-out infinite',
|
||||
mixBlendMode: 'screen',
|
||||
}}
|
||||
>
|
||||
{label}
|
||||
{value}
|
||||
</span>
|
||||
</span>
|
||||
) : null}
|
||||
<style>{`
|
||||
@keyframes thinking-shimmer {
|
||||
0% { background-position: 150% 0; }
|
||||
50% { background-position: 0% 0; }
|
||||
100% { background-position: -150% 0; }
|
||||
}
|
||||
`}</style>
|
||||
</span>
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Props for the ThinkingBlock component
|
||||
*/
|
||||
@@ -80,16 +28,19 @@ interface ThinkingBlockProps {
|
||||
content: string
|
||||
/** Whether the block is currently streaming */
|
||||
isStreaming?: boolean
|
||||
/** Persisted duration from content block */
|
||||
duration?: number
|
||||
/** Persisted start time from content block */
|
||||
startTime?: number
|
||||
/** Whether there are more content blocks after this one (e.g., tool calls) */
|
||||
hasFollowingContent?: boolean
|
||||
/** Custom label for the thinking block (e.g., "Thinking", "Exploring"). Defaults to "Thought" */
|
||||
label?: string
|
||||
/** Whether special tags (plan, options) are present - triggers collapse */
|
||||
hasSpecialTags?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* ThinkingBlock component displays AI reasoning/thinking process
|
||||
* Shows collapsible content with duration timer
|
||||
* Auto-expands during streaming and collapses when complete
|
||||
* Auto-collapses when a tool call or other content comes in after it
|
||||
*
|
||||
* @param props - Component props
|
||||
* @returns Thinking block with expandable content and timer
|
||||
@@ -97,112 +48,248 @@ interface ThinkingBlockProps {
|
||||
export function ThinkingBlock({
|
||||
content,
|
||||
isStreaming = false,
|
||||
duration: persistedDuration,
|
||||
startTime: persistedStartTime,
|
||||
hasFollowingContent = false,
|
||||
label = 'Thought',
|
||||
hasSpecialTags = false,
|
||||
}: ThinkingBlockProps) {
|
||||
const [isExpanded, setIsExpanded] = useState(false)
|
||||
const [duration, setDuration] = useState(persistedDuration ?? 0)
|
||||
const [duration, setDuration] = useState(0)
|
||||
const [userHasScrolledAway, setUserHasScrolledAway] = useState(false)
|
||||
const userCollapsedRef = useRef<boolean>(false)
|
||||
const startTimeRef = useRef<number>(persistedStartTime ?? Date.now())
|
||||
|
||||
/**
|
||||
* Updates start time reference when persisted start time changes
|
||||
*/
|
||||
useEffect(() => {
|
||||
if (typeof persistedStartTime === 'number') {
|
||||
startTimeRef.current = persistedStartTime
|
||||
}
|
||||
}, [persistedStartTime])
|
||||
const scrollContainerRef = useRef<HTMLDivElement>(null)
|
||||
const startTimeRef = useRef<number>(Date.now())
|
||||
const lastScrollTopRef = useRef(0)
|
||||
const programmaticScrollRef = useRef(false)
|
||||
|
||||
/**
|
||||
* Auto-expands block when streaming with content
|
||||
* Auto-collapses when streaming ends
|
||||
* Auto-collapses when streaming ends OR when following content arrives
|
||||
*/
|
||||
useEffect(() => {
|
||||
if (!isStreaming) {
|
||||
// Collapse if streaming ended or if there's following content (like a tool call)
|
||||
if (!isStreaming || hasFollowingContent) {
|
||||
setIsExpanded(false)
|
||||
userCollapsedRef.current = false
|
||||
setUserHasScrolledAway(false)
|
||||
return
|
||||
}
|
||||
|
||||
if (!userCollapsedRef.current && content && content.trim().length > 0) {
|
||||
setIsExpanded(true)
|
||||
}
|
||||
}, [isStreaming, content])
|
||||
}, [isStreaming, content, hasFollowingContent])
|
||||
|
||||
/**
|
||||
* Updates duration timer during streaming
|
||||
* Uses persisted duration when available
|
||||
*/
|
||||
// Reset start time when streaming begins
|
||||
useEffect(() => {
|
||||
if (typeof persistedDuration === 'number') {
|
||||
setDuration(persistedDuration)
|
||||
return
|
||||
if (isStreaming && !hasFollowingContent) {
|
||||
startTimeRef.current = Date.now()
|
||||
setDuration(0)
|
||||
setUserHasScrolledAway(false)
|
||||
}
|
||||
}, [isStreaming, hasFollowingContent])
|
||||
|
||||
// Update duration timer during streaming (stop when following content arrives)
|
||||
useEffect(() => {
|
||||
// Stop timer if not streaming or if there's following content (thinking is done)
|
||||
if (!isStreaming || hasFollowingContent) return
|
||||
|
||||
const interval = setInterval(() => {
|
||||
setDuration(Date.now() - startTimeRef.current)
|
||||
}, TIMER_UPDATE_INTERVAL)
|
||||
|
||||
return () => clearInterval(interval)
|
||||
}, [isStreaming, hasFollowingContent])
|
||||
|
||||
// Handle scroll events to detect user scrolling away
|
||||
useEffect(() => {
|
||||
const container = scrollContainerRef.current
|
||||
if (!container || !isExpanded) return
|
||||
|
||||
const handleScroll = () => {
|
||||
if (programmaticScrollRef.current) return
|
||||
|
||||
const { scrollTop, scrollHeight, clientHeight } = container
|
||||
const distanceFromBottom = scrollHeight - scrollTop - clientHeight
|
||||
const isNearBottom = distanceFromBottom <= 20
|
||||
|
||||
const delta = scrollTop - lastScrollTopRef.current
|
||||
const movedUp = delta < -2
|
||||
|
||||
if (movedUp && !isNearBottom) {
|
||||
setUserHasScrolledAway(true)
|
||||
}
|
||||
|
||||
// Re-stick if user scrolls back to bottom
|
||||
if (userHasScrolledAway && isNearBottom) {
|
||||
setUserHasScrolledAway(false)
|
||||
}
|
||||
|
||||
lastScrollTopRef.current = scrollTop
|
||||
}
|
||||
|
||||
if (isStreaming) {
|
||||
const interval = setInterval(() => {
|
||||
setDuration(Date.now() - startTimeRef.current)
|
||||
}, TIMER_UPDATE_INTERVAL)
|
||||
return () => clearInterval(interval)
|
||||
}
|
||||
container.addEventListener('scroll', handleScroll, { passive: true })
|
||||
lastScrollTopRef.current = container.scrollTop
|
||||
|
||||
setDuration(Date.now() - startTimeRef.current)
|
||||
}, [isStreaming, persistedDuration])
|
||||
return () => container.removeEventListener('scroll', handleScroll)
|
||||
}, [isExpanded, userHasScrolledAway])
|
||||
|
||||
// Smart auto-scroll: only scroll if user hasn't scrolled away
|
||||
useEffect(() => {
|
||||
if (!isStreaming || !isExpanded || userHasScrolledAway) return
|
||||
|
||||
const intervalId = window.setInterval(() => {
|
||||
const container = scrollContainerRef.current
|
||||
if (!container) return
|
||||
|
||||
const { scrollTop, scrollHeight, clientHeight } = container
|
||||
const distanceFromBottom = scrollHeight - scrollTop - clientHeight
|
||||
const isNearBottom = distanceFromBottom <= 50
|
||||
|
||||
if (isNearBottom) {
|
||||
programmaticScrollRef.current = true
|
||||
container.scrollTo({
|
||||
top: container.scrollHeight,
|
||||
behavior: 'smooth',
|
||||
})
|
||||
window.setTimeout(() => {
|
||||
programmaticScrollRef.current = false
|
||||
}, 150)
|
||||
}
|
||||
}, SCROLL_INTERVAL)
|
||||
|
||||
return () => window.clearInterval(intervalId)
|
||||
}, [isStreaming, isExpanded, userHasScrolledAway])
|
||||
|
||||
/**
|
||||
* Formats duration in milliseconds to human-readable format
|
||||
* @param ms - Duration in milliseconds
|
||||
* @returns Formatted string (e.g., "150ms" or "2.5s")
|
||||
* Formats duration in milliseconds to seconds
|
||||
* Always shows seconds, rounded to nearest whole second, minimum 1s
|
||||
*/
|
||||
const formatDuration = (ms: number) => {
|
||||
if (ms < SECONDS_THRESHOLD) {
|
||||
return `${ms}ms`
|
||||
}
|
||||
const seconds = (ms / SECONDS_THRESHOLD).toFixed(1)
|
||||
const seconds = Math.max(1, Math.round(ms / 1000))
|
||||
return `${seconds}s`
|
||||
}
|
||||
|
||||
const hasContent = content && content.trim().length > 0
|
||||
// Thinking is "done" when streaming ends OR when there's following content (like a tool call) OR when special tags appear
|
||||
const isThinkingDone = !isStreaming || hasFollowingContent || hasSpecialTags
|
||||
const durationText = `${label} for ${formatDuration(duration)}`
|
||||
// Convert past tense label to present tense for streaming (e.g., "Thought" → "Thinking")
|
||||
const getStreamingLabel = (lbl: string) => {
|
||||
if (lbl === 'Thought') return 'Thinking'
|
||||
if (lbl.endsWith('ed')) return `${lbl.slice(0, -2)}ing`
|
||||
return lbl
|
||||
}
|
||||
const streamingLabel = getStreamingLabel(label)
|
||||
|
||||
// During streaming: show header with shimmer effect + expanded content
|
||||
if (!isThinkingDone) {
|
||||
return (
|
||||
<div>
|
||||
{/* Define shimmer keyframes */}
|
||||
<style>{`
|
||||
@keyframes thinking-shimmer {
|
||||
0% { background-position: 150% 0; }
|
||||
50% { background-position: 0% 0; }
|
||||
100% { background-position: -150% 0; }
|
||||
}
|
||||
`}</style>
|
||||
<button
|
||||
onClick={() => {
|
||||
setIsExpanded((v) => {
|
||||
const next = !v
|
||||
if (!next) userCollapsedRef.current = true
|
||||
return next
|
||||
})
|
||||
}}
|
||||
className='group inline-flex items-center gap-1 text-left font-[470] font-season text-[var(--text-secondary)] text-sm transition-colors hover:text-[var(--text-primary)]'
|
||||
type='button'
|
||||
>
|
||||
<span className='relative inline-block'>
|
||||
<span className='text-[var(--text-tertiary)]'>{streamingLabel}</span>
|
||||
<span
|
||||
aria-hidden='true'
|
||||
className='pointer-events-none absolute inset-0 select-none overflow-hidden'
|
||||
>
|
||||
<span
|
||||
className='block text-transparent'
|
||||
style={{
|
||||
backgroundImage:
|
||||
'linear-gradient(90deg, rgba(255,255,255,0) 0%, rgba(255,255,255,0.85) 50%, rgba(255,255,255,0) 100%)',
|
||||
backgroundSize: '200% 100%',
|
||||
backgroundRepeat: 'no-repeat',
|
||||
WebkitBackgroundClip: 'text',
|
||||
backgroundClip: 'text',
|
||||
animation: 'thinking-shimmer 1.4s ease-in-out infinite',
|
||||
mixBlendMode: 'screen',
|
||||
}}
|
||||
>
|
||||
{streamingLabel}
|
||||
</span>
|
||||
</span>
|
||||
</span>
|
||||
{hasContent && (
|
||||
<ChevronUp
|
||||
className={clsx(
|
||||
'h-3 w-3 transition-all group-hover:opacity-100',
|
||||
isExpanded ? 'rotate-180 opacity-100' : 'rotate-90 opacity-0'
|
||||
)}
|
||||
aria-hidden='true'
|
||||
/>
|
||||
)}
|
||||
</button>
|
||||
|
||||
<div
|
||||
ref={scrollContainerRef}
|
||||
className={clsx(
|
||||
'overflow-y-auto transition-all duration-300 ease-in-out',
|
||||
isExpanded ? 'mt-1.5 max-h-[200px] opacity-100' : 'max-h-0 opacity-0'
|
||||
)}
|
||||
>
|
||||
{/* Render markdown during streaming with thinking text styling */}
|
||||
<div className='[&_*]:!text-[var(--text-muted)] [&_*]:!text-[12px] [&_*]:!leading-[1.3] [&_p]:!m-0 [&_p]:!mb-1 [&_h1]:!text-[12px] [&_h1]:!font-semibold [&_h1]:!m-0 [&_h1]:!mb-1 [&_h2]:!text-[12px] [&_h2]:!font-semibold [&_h2]:!m-0 [&_h2]:!mb-1 [&_h3]:!text-[12px] [&_h3]:!font-semibold [&_h3]:!m-0 [&_h3]:!mb-1 [&_code]:!text-[11px] [&_ul]:!pl-5 [&_ul]:!my-1 [&_ol]:!pl-6 [&_ol]:!my-1 [&_li]:!my-0.5 [&_li]:!py-0 [&_br]:!leading-[0.5] [&_table]:!my-2 [&_th]:!px-2 [&_th]:!py-1 [&_th]:!text-[11px] [&_td]:!px-2 [&_td]:!py-1 [&_td]:!text-[11px] whitespace-pre-wrap font-[470] font-season text-[12px] text-[var(--text-muted)]'>
|
||||
<CopilotMarkdownRenderer content={content} />
|
||||
<span className='ml-1 inline-block h-2 w-1 animate-pulse bg-[var(--text-muted)]' />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
// After done: show collapsible header with duration
|
||||
return (
|
||||
<div className='mt-1 mb-0'>
|
||||
<div>
|
||||
<button
|
||||
onClick={() => {
|
||||
setIsExpanded((v) => {
|
||||
const next = !v
|
||||
// If user collapses during streaming, remember to not auto-expand again
|
||||
if (!next && isStreaming) userCollapsedRef.current = true
|
||||
return next
|
||||
})
|
||||
setIsExpanded((v) => !v)
|
||||
}}
|
||||
className='mb-1 inline-flex items-center gap-1 text-left font-[470] font-season text-[var(--text-secondary)] text-sm transition-colors hover:text-[var(--text-primary)]'
|
||||
className='group inline-flex items-center gap-1 text-left font-[470] font-season text-[var(--text-secondary)] text-sm transition-colors hover:text-[var(--text-primary)]'
|
||||
type='button'
|
||||
disabled={!hasContent}
|
||||
>
|
||||
<ShimmerOverlayText
|
||||
label='Thought'
|
||||
value={` for ${formatDuration(duration)}`}
|
||||
active={isStreaming}
|
||||
/>
|
||||
<span className='text-[var(--text-tertiary)]'>{durationText}</span>
|
||||
{hasContent && (
|
||||
<ChevronUp
|
||||
className={clsx('h-3 w-3 transition-transform', isExpanded && 'rotate-180')}
|
||||
className={clsx(
|
||||
'h-3 w-3 transition-all group-hover:opacity-100',
|
||||
isExpanded ? 'rotate-180 opacity-100' : 'rotate-90 opacity-0'
|
||||
)}
|
||||
aria-hidden='true'
|
||||
/>
|
||||
)}
|
||||
</button>
|
||||
|
||||
{isExpanded && (
|
||||
<div className='ml-1 border-[var(--border-1)] border-l-2 pl-2'>
|
||||
<pre className='whitespace-pre-wrap font-[470] font-season text-[12px] text-[var(--text-tertiary)] leading-[1.15rem]'>
|
||||
{content}
|
||||
{isStreaming && (
|
||||
<span className='ml-1 inline-block h-2 w-1 animate-pulse bg-[var(--text-tertiary)]' />
|
||||
)}
|
||||
</pre>
|
||||
<div
|
||||
ref={scrollContainerRef}
|
||||
className={clsx(
|
||||
'overflow-y-auto transition-all duration-300 ease-in-out',
|
||||
isExpanded ? 'mt-1.5 max-h-[200px] opacity-100' : 'max-h-0 opacity-0'
|
||||
)}
|
||||
>
|
||||
{/* Use markdown renderer for completed content */}
|
||||
<div className='[&_*]:!text-[var(--text-muted)] [&_*]:!text-[12px] [&_*]:!leading-[1.3] [&_p]:!m-0 [&_p]:!mb-1 [&_h1]:!text-[12px] [&_h1]:!font-semibold [&_h1]:!m-0 [&_h1]:!mb-1 [&_h2]:!text-[12px] [&_h2]:!font-semibold [&_h2]:!m-0 [&_h2]:!mb-1 [&_h3]:!text-[12px] [&_h3]:!font-semibold [&_h3]:!m-0 [&_h3]:!mb-1 [&_code]:!text-[11px] [&_ul]:!pl-5 [&_ul]:!my-1 [&_ol]:!pl-6 [&_ol]:!my-1 [&_li]:!my-0.5 [&_li]:!py-0 [&_br]:!leading-[0.5] [&_table]:!my-2 [&_th]:!px-2 [&_th]:!py-1 [&_th]:!text-[11px] [&_td]:!px-2 [&_td]:!py-1 [&_td]:!text-[11px] whitespace-pre-wrap font-[470] font-season text-[12px] text-[var(--text-muted)]'>
|
||||
<CopilotMarkdownRenderer content={content} />
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -1,9 +1,13 @@
|
||||
'use client'
|
||||
|
||||
import { type FC, memo, useMemo, useState } from 'react'
|
||||
import { Check, Copy, RotateCcw, ThumbsDown, ThumbsUp } from 'lucide-react'
|
||||
import { type FC, memo, useCallback, useMemo, useState } from 'react'
|
||||
import { RotateCcw } from 'lucide-react'
|
||||
import { Button } from '@/components/emcn'
|
||||
import { ToolCall } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components'
|
||||
import {
|
||||
OptionsSelector,
|
||||
parseSpecialTags,
|
||||
ToolCall,
|
||||
} from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components'
|
||||
import {
|
||||
FileAttachmentDisplay,
|
||||
SmoothStreamingText,
|
||||
@@ -15,8 +19,6 @@ import CopilotMarkdownRenderer from '@/app/workspace/[workspaceId]/w/[workflowId
|
||||
import {
|
||||
useCheckpointManagement,
|
||||
useMessageEditing,
|
||||
useMessageFeedback,
|
||||
useSuccessTimers,
|
||||
} from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/hooks'
|
||||
import { UserInput } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/user-input/user-input'
|
||||
import type { CopilotMessage as CopilotMessageType } from '@/stores/panel'
|
||||
@@ -40,6 +42,8 @@ interface CopilotMessageProps {
|
||||
onEditModeChange?: (isEditing: boolean, cancelCallback?: () => void) => void
|
||||
/** Callback when revert mode changes */
|
||||
onRevertModeChange?: (isReverting: boolean) => void
|
||||
/** Whether this is the last message in the conversation */
|
||||
isLastMessage?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -59,6 +63,7 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
checkpointCount = 0,
|
||||
onEditModeChange,
|
||||
onRevertModeChange,
|
||||
isLastMessage = false,
|
||||
}) => {
|
||||
const isUser = message.role === 'user'
|
||||
const isAssistant = message.role === 'assistant'
|
||||
@@ -88,22 +93,6 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
// UI state
|
||||
const [isHoveringMessage, setIsHoveringMessage] = useState(false)
|
||||
|
||||
// Success timers hook
|
||||
const {
|
||||
showCopySuccess,
|
||||
showUpvoteSuccess,
|
||||
showDownvoteSuccess,
|
||||
handleCopy,
|
||||
setShowUpvoteSuccess,
|
||||
setShowDownvoteSuccess,
|
||||
} = useSuccessTimers()
|
||||
|
||||
// Message feedback hook
|
||||
const { handleUpvote, handleDownvote } = useMessageFeedback(message, messages, {
|
||||
setShowUpvoteSuccess,
|
||||
setShowDownvoteSuccess,
|
||||
})
|
||||
|
||||
// Checkpoint management hook
|
||||
const {
|
||||
showRestoreConfirmation,
|
||||
@@ -153,14 +142,6 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
pendingEditRef,
|
||||
})
|
||||
|
||||
/**
|
||||
* Handles copying message content to clipboard
|
||||
* Uses the success timer hook to show feedback
|
||||
*/
|
||||
const handleCopyContent = () => {
|
||||
handleCopy(message.content)
|
||||
}
|
||||
|
||||
// Get clean text content with double newline parsing
|
||||
const cleanTextContent = useMemo(() => {
|
||||
if (!message.content) return ''
|
||||
@@ -169,6 +150,42 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
return message.content.replace(/\n{3,}/g, '\n\n')
|
||||
}, [message.content])
|
||||
|
||||
// Parse special tags from message content (options, plan)
|
||||
// Parse during streaming to show options/plan as they stream in
|
||||
const parsedTags = useMemo(() => {
|
||||
if (isUser) return null
|
||||
|
||||
// Try message.content first
|
||||
if (message.content) {
|
||||
const parsed = parseSpecialTags(message.content)
|
||||
if (parsed.options || parsed.plan) return parsed
|
||||
}
|
||||
|
||||
// During streaming, check content blocks for options/plan
|
||||
if (isStreaming && message.contentBlocks && message.contentBlocks.length > 0) {
|
||||
for (const block of message.contentBlocks) {
|
||||
if (block.type === 'text' && block.content) {
|
||||
const parsed = parseSpecialTags(block.content)
|
||||
if (parsed.options || parsed.plan) return parsed
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return message.content ? parseSpecialTags(message.content) : null
|
||||
}, [message.content, message.contentBlocks, isUser, isStreaming])
|
||||
|
||||
// Get sendMessage from store for continuation actions
|
||||
const sendMessage = useCopilotStore((s) => s.sendMessage)
|
||||
|
||||
// Handler for option selection
|
||||
const handleOptionSelect = useCallback(
|
||||
(_optionKey: string, optionText: string) => {
|
||||
// Send the option text as a message
|
||||
sendMessage(optionText)
|
||||
},
|
||||
[sendMessage]
|
||||
)
|
||||
|
||||
// Memoize content blocks to avoid re-rendering unchanged blocks
|
||||
const memoizedContentBlocks = useMemo(() => {
|
||||
if (!message.contentBlocks || message.contentBlocks.length === 0) {
|
||||
@@ -179,8 +196,12 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
if (block.type === 'text') {
|
||||
const isLastTextBlock =
|
||||
index === message.contentBlocks!.length - 1 && block.type === 'text'
|
||||
// Clean content for this text block
|
||||
const cleanBlockContent = block.content.replace(/\n{3,}/g, '\n\n')
|
||||
// Always strip special tags from display (they're rendered separately as options/plan)
|
||||
const parsed = parseSpecialTags(block.content)
|
||||
const cleanBlockContent = parsed.cleanContent.replace(/\n{3,}/g, '\n\n')
|
||||
|
||||
// Skip if no content after stripping tags
|
||||
if (!cleanBlockContent.trim()) return null
|
||||
|
||||
// Use smooth streaming for the last text block if we're streaming
|
||||
const shouldUseSmoothing = isStreaming && isLastTextBlock
|
||||
@@ -201,19 +222,14 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
)
|
||||
}
|
||||
if (block.type === 'thinking') {
|
||||
const isLastBlock = index === message.contentBlocks!.length - 1
|
||||
// Consider the thinking block streaming if the overall message is streaming
|
||||
// and the block has not been finalized with a duration yet. This avoids
|
||||
// freezing the timer when new blocks are appended after the thinking block.
|
||||
const isStreamingThinking = isStreaming && (block as any).duration == null
|
||||
|
||||
// Check if there are any blocks after this one (tool calls, text, etc.)
|
||||
const hasFollowingContent = index < message.contentBlocks!.length - 1
|
||||
return (
|
||||
<div key={`thinking-${index}-${block.timestamp || index}`} className='w-full'>
|
||||
<ThinkingBlock
|
||||
content={block.content}
|
||||
isStreaming={isStreamingThinking}
|
||||
duration={block.duration}
|
||||
startTime={block.startTime}
|
||||
isStreaming={isStreaming}
|
||||
hasFollowingContent={hasFollowingContent}
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
@@ -330,14 +346,18 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
const contexts: any[] = Array.isArray((message as any).contexts)
|
||||
? ((message as any).contexts as any[])
|
||||
: []
|
||||
const labels = contexts
|
||||
.filter((c) => c?.kind !== 'current_workflow')
|
||||
.map((c) => c?.label)
|
||||
.filter(Boolean) as string[]
|
||||
if (!labels.length) return text
|
||||
|
||||
// Build tokens with their prefixes (@ for mentions, / for commands)
|
||||
const tokens = contexts
|
||||
.filter((c) => c?.kind !== 'current_workflow' && c?.label)
|
||||
.map((c) => {
|
||||
const prefix = c?.kind === 'slash_command' ? '/' : '@'
|
||||
return `${prefix}${c.label}`
|
||||
})
|
||||
if (!tokens.length) return text
|
||||
|
||||
const escapeRegex = (s: string) => s.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')
|
||||
const pattern = new RegExp(`@(${labels.map(escapeRegex).join('|')})`, 'g')
|
||||
const pattern = new RegExp(`(${tokens.map(escapeRegex).join('|')})`, 'g')
|
||||
|
||||
const nodes: React.ReactNode[] = []
|
||||
let lastIndex = 0
|
||||
@@ -454,66 +474,15 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
{/* Content blocks in chronological order */}
|
||||
{memoizedContentBlocks}
|
||||
|
||||
{/* Show streaming indicator if streaming but no text content yet after tool calls */}
|
||||
{isStreaming &&
|
||||
!message.content &&
|
||||
message.contentBlocks?.every((block) => block.type === 'tool_call') && (
|
||||
<StreamingIndicator />
|
||||
)}
|
||||
|
||||
{/* Streaming indicator when no content yet */}
|
||||
{!cleanTextContent && !message.contentBlocks?.length && isStreaming && (
|
||||
<StreamingIndicator />
|
||||
)}
|
||||
{/* Always show streaming indicator at the end while streaming */}
|
||||
{isStreaming && <StreamingIndicator />}
|
||||
|
||||
{message.errorType === 'usage_limit' && (
|
||||
<div className='mt-3 flex gap-1.5'>
|
||||
<div className='flex gap-1.5'>
|
||||
<UsageLimitActions />
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Action buttons for completed messages */}
|
||||
{!isStreaming && cleanTextContent && (
|
||||
<div className='flex items-center gap-[8px] pt-[8px]'>
|
||||
<Button
|
||||
onClick={handleCopyContent}
|
||||
variant='ghost'
|
||||
title='Copy'
|
||||
className='!h-[14px] !w-[14px] !p-0'
|
||||
>
|
||||
{showCopySuccess ? (
|
||||
<Check className='h-[14px] w-[14px]' strokeWidth={2} />
|
||||
) : (
|
||||
<Copy className='h-[14px] w-[14px]' strokeWidth={2} />
|
||||
)}
|
||||
</Button>
|
||||
<Button
|
||||
onClick={handleUpvote}
|
||||
variant='ghost'
|
||||
title='Upvote'
|
||||
className='!h-[14px] !w-[14px] !p-0'
|
||||
>
|
||||
{showUpvoteSuccess ? (
|
||||
<Check className='h-[14px] w-[14px]' strokeWidth={2} />
|
||||
) : (
|
||||
<ThumbsUp className='h-[14px] w-[14px]' strokeWidth={2} />
|
||||
)}
|
||||
</Button>
|
||||
<Button
|
||||
onClick={handleDownvote}
|
||||
variant='ghost'
|
||||
title='Downvote'
|
||||
className='!h-[14px] !w-[14px] !p-0'
|
||||
>
|
||||
{showDownvoteSuccess ? (
|
||||
<Check className='h-[14px] w-[14px]' strokeWidth={2} />
|
||||
) : (
|
||||
<ThumbsDown className='h-[14px] w-[14px]' strokeWidth={2} />
|
||||
)}
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Citations if available */}
|
||||
{message.citations && message.citations.length > 0 && (
|
||||
<div className='pt-1'>
|
||||
@@ -533,6 +502,20 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Options selector when agent presents choices - streams in but disabled until complete */}
|
||||
{/* Disabled for previous messages (not isLastMessage) so only the latest options are interactive */}
|
||||
{parsedTags?.options && Object.keys(parsedTags.options).length > 0 && (
|
||||
<OptionsSelector
|
||||
options={parsedTags.options}
|
||||
onSelect={handleOptionSelect}
|
||||
disabled={!isLastMessage || isSendingMessage || isStreaming}
|
||||
enableKeyboardNav={
|
||||
isLastMessage && !isStreaming && parsedTags.optionsComplete === true
|
||||
}
|
||||
streaming={isStreaming || !parsedTags.optionsComplete}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
@@ -570,6 +553,11 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
return false
|
||||
}
|
||||
|
||||
// If isLastMessage changed, re-render (for options visibility)
|
||||
if (prevProps.isLastMessage !== nextProps.isLastMessage) {
|
||||
return false
|
||||
}
|
||||
|
||||
// For streaming messages, check if content actually changed
|
||||
if (nextProps.isStreaming) {
|
||||
const prevBlocks = prevMessage.contentBlocks || []
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
export * from './copilot-message/copilot-message'
|
||||
export * from './plan-mode-section/plan-mode-section'
|
||||
export * from './queued-messages/queued-messages'
|
||||
export * from './todo-list/todo-list'
|
||||
export * from './tool-call/tool-call'
|
||||
export * from './user-input/user-input'
|
||||
|
||||
@@ -0,0 +1,102 @@
|
||||
'use client'
|
||||
|
||||
import { useCallback, useState } from 'react'
|
||||
import { ArrowUp, ChevronDown, ChevronRight, Trash2 } from 'lucide-react'
|
||||
import { useCopilotStore } from '@/stores/panel/copilot/store'
|
||||
|
||||
/**
|
||||
* Displays queued messages in a Cursor-style collapsible panel above the input box.
|
||||
*/
|
||||
export function QueuedMessages() {
|
||||
const messageQueue = useCopilotStore((s) => s.messageQueue)
|
||||
const removeFromQueue = useCopilotStore((s) => s.removeFromQueue)
|
||||
const sendNow = useCopilotStore((s) => s.sendNow)
|
||||
|
||||
const [isExpanded, setIsExpanded] = useState(true)
|
||||
|
||||
const handleRemove = useCallback(
|
||||
(id: string) => {
|
||||
removeFromQueue(id)
|
||||
},
|
||||
[removeFromQueue]
|
||||
)
|
||||
|
||||
const handleSendNow = useCallback(
|
||||
async (id: string) => {
|
||||
await sendNow(id)
|
||||
},
|
||||
[sendNow]
|
||||
)
|
||||
|
||||
if (messageQueue.length === 0) return null
|
||||
|
||||
return (
|
||||
<div className='mx-2 overflow-hidden rounded-t-lg border border-black/[0.08] border-b-0 bg-[var(--bg-secondary)] dark:border-white/[0.08]'>
|
||||
{/* Header */}
|
||||
<button
|
||||
type='button'
|
||||
onClick={() => setIsExpanded(!isExpanded)}
|
||||
className='flex w-full items-center justify-between px-2.5 py-1.5 transition-colors hover:bg-[var(--bg-tertiary)]'
|
||||
>
|
||||
<div className='flex items-center gap-1.5'>
|
||||
{isExpanded ? (
|
||||
<ChevronDown className='h-3 w-3 text-[var(--text-tertiary)]' />
|
||||
) : (
|
||||
<ChevronRight className='h-3 w-3 text-[var(--text-tertiary)]' />
|
||||
)}
|
||||
<span className='font-medium text-[var(--text-secondary)] text-xs'>
|
||||
{messageQueue.length} Queued
|
||||
</span>
|
||||
</div>
|
||||
</button>
|
||||
|
||||
{/* Message list */}
|
||||
{isExpanded && (
|
||||
<div>
|
||||
{messageQueue.map((msg) => (
|
||||
<div
|
||||
key={msg.id}
|
||||
className='group flex items-center gap-2 border-black/[0.04] border-t px-2.5 py-1.5 hover:bg-[var(--bg-tertiary)] dark:border-white/[0.04]'
|
||||
>
|
||||
{/* Radio indicator */}
|
||||
<div className='flex h-3 w-3 shrink-0 items-center justify-center'>
|
||||
<div className='h-2.5 w-2.5 rounded-full border border-[var(--text-tertiary)]/50' />
|
||||
</div>
|
||||
|
||||
{/* Message content */}
|
||||
<div className='min-w-0 flex-1'>
|
||||
<p className='truncate text-[var(--text-primary)] text-xs'>{msg.content}</p>
|
||||
</div>
|
||||
|
||||
{/* Actions - always visible */}
|
||||
<div className='flex shrink-0 items-center gap-0.5'>
|
||||
<button
|
||||
type='button'
|
||||
onClick={(e) => {
|
||||
e.stopPropagation()
|
||||
handleSendNow(msg.id)
|
||||
}}
|
||||
className='rounded p-0.5 text-[var(--text-tertiary)] transition-colors hover:bg-[var(--bg-quaternary)] hover:text-[var(--text-primary)]'
|
||||
title='Send now (aborts current stream)'
|
||||
>
|
||||
<ArrowUp className='h-3 w-3' />
|
||||
</button>
|
||||
<button
|
||||
type='button'
|
||||
onClick={(e) => {
|
||||
e.stopPropagation()
|
||||
handleRemove(msg.id)
|
||||
}}
|
||||
className='rounded p-0.5 text-[var(--text-tertiary)] transition-colors hover:bg-red-500/10 hover:text-red-400'
|
||||
title='Remove from queue'
|
||||
>
|
||||
<Trash2 className='h-3 w-3' />
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,76 +0,0 @@
|
||||
'use client'
|
||||
|
||||
import { useMemo } from 'react'
|
||||
import { Tooltip } from '@/components/emcn'
|
||||
|
||||
interface ContextUsageIndicatorProps {
|
||||
/** Usage percentage (0-100) */
|
||||
percentage: number
|
||||
/** Size of the indicator in pixels */
|
||||
size?: number
|
||||
/** Stroke width in pixels */
|
||||
strokeWidth?: number
|
||||
}
|
||||
|
||||
/**
|
||||
* Circular context usage indicator showing percentage of context window used.
|
||||
* Displays a progress ring that changes color based on usage level.
|
||||
*
|
||||
* @param props - Component props
|
||||
* @returns Rendered context usage indicator
|
||||
*/
|
||||
export function ContextUsageIndicator({
|
||||
percentage,
|
||||
size = 20,
|
||||
strokeWidth = 2,
|
||||
}: ContextUsageIndicatorProps) {
|
||||
const radius = (size - strokeWidth) / 2
|
||||
const circumference = radius * 2 * Math.PI
|
||||
const offset = circumference - (percentage / 100) * circumference
|
||||
|
||||
const color = useMemo(() => {
|
||||
if (percentage >= 90) return 'var(--text-error)'
|
||||
if (percentage >= 75) return 'var(--warning)'
|
||||
return 'var(--text-muted)'
|
||||
}, [percentage])
|
||||
|
||||
const displayPercentage = useMemo(() => {
|
||||
return Math.round(percentage)
|
||||
}, [percentage])
|
||||
|
||||
return (
|
||||
<Tooltip.Root delayDuration={100}>
|
||||
<Tooltip.Trigger asChild>
|
||||
<div
|
||||
className='flex cursor-pointer items-center justify-center transition-opacity hover:opacity-80'
|
||||
style={{ width: size, height: size }}
|
||||
>
|
||||
<svg width={size} height={size} className='rotate-[-90deg]'>
|
||||
<circle
|
||||
cx={size / 2}
|
||||
cy={size / 2}
|
||||
r={radius}
|
||||
stroke='currentColor'
|
||||
strokeWidth={strokeWidth}
|
||||
fill='none'
|
||||
className='text-muted-foreground/20'
|
||||
/>
|
||||
<circle
|
||||
cx={size / 2}
|
||||
cy={size / 2}
|
||||
r={radius}
|
||||
stroke={color}
|
||||
strokeWidth={strokeWidth}
|
||||
fill='none'
|
||||
strokeDasharray={circumference}
|
||||
strokeDashoffset={offset}
|
||||
className='transition-all duration-300 ease-in-out'
|
||||
strokeLinecap='round'
|
||||
/>
|
||||
</svg>
|
||||
</div>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content side='top'>{displayPercentage}% context used</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
)
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
export { AttachedFilesDisplay } from './attached-files-display/attached-files-display'
|
||||
export { ContextPills } from './context-pills/context-pills'
|
||||
export { ContextUsageIndicator } from './context-usage-indicator/context-usage-indicator'
|
||||
export { MentionMenu } from './mention-menu/mention-menu'
|
||||
export { ModeSelector } from './mode-selector/mode-selector'
|
||||
export { ModelSelector } from './model-selector/model-selector'
|
||||
export { SlashMenu } from './slash-menu/slash-menu'
|
||||
|
||||
@@ -0,0 +1,247 @@
|
||||
'use client'
|
||||
|
||||
import { useMemo } from 'react'
|
||||
import {
|
||||
Popover,
|
||||
PopoverAnchor,
|
||||
PopoverBackButton,
|
||||
PopoverContent,
|
||||
PopoverFolder,
|
||||
PopoverItem,
|
||||
PopoverScrollArea,
|
||||
} from '@/components/emcn'
|
||||
import type { useMentionMenu } from '../../hooks/use-mention-menu'
|
||||
|
||||
/**
|
||||
* Top-level slash command options
|
||||
*/
|
||||
const TOP_LEVEL_COMMANDS = [
|
||||
{ id: 'plan', label: 'plan' },
|
||||
{ id: 'debug', label: 'debug' },
|
||||
{ id: 'fast', label: 'fast' },
|
||||
{ id: 'superagent', label: 'superagent' },
|
||||
{ id: 'deploy', label: 'deploy' },
|
||||
] as const
|
||||
|
||||
/**
|
||||
* Web submenu commands
|
||||
*/
|
||||
const WEB_COMMANDS = [
|
||||
{ id: 'search', label: 'search' },
|
||||
{ id: 'research', label: 'research' },
|
||||
{ id: 'crawl', label: 'crawl' },
|
||||
{ id: 'read', label: 'read' },
|
||||
{ id: 'scrape', label: 'scrape' },
|
||||
] as const
|
||||
|
||||
/**
|
||||
* All command labels for filtering
|
||||
*/
|
||||
const ALL_COMMANDS = [...TOP_LEVEL_COMMANDS, ...WEB_COMMANDS]
|
||||
|
||||
interface SlashMenuProps {
|
||||
mentionMenu: ReturnType<typeof useMentionMenu>
|
||||
message: string
|
||||
onSelectCommand: (command: string) => void
|
||||
}
|
||||
|
||||
/**
|
||||
* SlashMenu component for slash command dropdown.
|
||||
* Shows command options when user types '/'.
|
||||
*
|
||||
* @param props - Component props
|
||||
* @returns Rendered slash menu
|
||||
*/
|
||||
export function SlashMenu({ mentionMenu, message, onSelectCommand }: SlashMenuProps) {
|
||||
const {
|
||||
mentionMenuRef,
|
||||
menuListRef,
|
||||
getActiveSlashQueryAtPosition,
|
||||
getCaretPos,
|
||||
submenuActiveIndex,
|
||||
mentionActiveIndex,
|
||||
openSubmenuFor,
|
||||
setOpenSubmenuFor,
|
||||
} = mentionMenu
|
||||
|
||||
/**
|
||||
* Get the current query string after /
|
||||
*/
|
||||
const currentQuery = useMemo(() => {
|
||||
const caretPos = getCaretPos()
|
||||
const active = getActiveSlashQueryAtPosition(caretPos, message)
|
||||
return active?.query.trim().toLowerCase() || ''
|
||||
}, [message, getCaretPos, getActiveSlashQueryAtPosition])
|
||||
|
||||
/**
|
||||
* Filter commands based on query (search across all commands when there's a query)
|
||||
*/
|
||||
const filteredCommands = useMemo(() => {
|
||||
if (!currentQuery) return null // Show folder view when no query
|
||||
return ALL_COMMANDS.filter((cmd) => cmd.label.toLowerCase().includes(currentQuery))
|
||||
}, [currentQuery])
|
||||
|
||||
// Show aggregated view when there's a query
|
||||
const showAggregatedView = currentQuery.length > 0
|
||||
|
||||
// Compute caret viewport position via mirror technique for precise anchoring
|
||||
const textareaEl = mentionMenu.textareaRef.current
|
||||
if (!textareaEl) return null
|
||||
|
||||
const getCaretViewport = (textarea: HTMLTextAreaElement, caretPosition: number, text: string) => {
|
||||
const textareaRect = textarea.getBoundingClientRect()
|
||||
const style = window.getComputedStyle(textarea)
|
||||
|
||||
const mirrorDiv = document.createElement('div')
|
||||
mirrorDiv.style.position = 'absolute'
|
||||
mirrorDiv.style.visibility = 'hidden'
|
||||
mirrorDiv.style.whiteSpace = 'pre-wrap'
|
||||
mirrorDiv.style.wordWrap = 'break-word'
|
||||
mirrorDiv.style.font = style.font
|
||||
mirrorDiv.style.padding = style.padding
|
||||
mirrorDiv.style.border = style.border
|
||||
mirrorDiv.style.width = style.width
|
||||
mirrorDiv.style.lineHeight = style.lineHeight
|
||||
mirrorDiv.style.boxSizing = style.boxSizing
|
||||
mirrorDiv.style.letterSpacing = style.letterSpacing
|
||||
mirrorDiv.style.textTransform = style.textTransform
|
||||
mirrorDiv.style.textIndent = style.textIndent
|
||||
mirrorDiv.style.textAlign = style.textAlign
|
||||
|
||||
mirrorDiv.textContent = text.substring(0, caretPosition)
|
||||
|
||||
const caretMarker = document.createElement('span')
|
||||
caretMarker.style.display = 'inline-block'
|
||||
caretMarker.style.width = '0px'
|
||||
caretMarker.style.padding = '0'
|
||||
caretMarker.style.border = '0'
|
||||
mirrorDiv.appendChild(caretMarker)
|
||||
|
||||
document.body.appendChild(mirrorDiv)
|
||||
const markerRect = caretMarker.getBoundingClientRect()
|
||||
const mirrorRect = mirrorDiv.getBoundingClientRect()
|
||||
document.body.removeChild(mirrorDiv)
|
||||
|
||||
const leftOffset = markerRect.left - mirrorRect.left - textarea.scrollLeft
|
||||
const topOffset = markerRect.top - mirrorRect.top - textarea.scrollTop
|
||||
|
||||
return {
|
||||
left: textareaRect.left + leftOffset,
|
||||
top: textareaRect.top + topOffset,
|
||||
}
|
||||
}
|
||||
|
||||
const caretPos = getCaretPos()
|
||||
const caretViewport = getCaretViewport(textareaEl, caretPos, message)
|
||||
|
||||
// Decide preferred side based on available space
|
||||
const margin = 8
|
||||
const spaceAbove = caretViewport.top - margin
|
||||
const spaceBelow = window.innerHeight - caretViewport.top - margin
|
||||
const side: 'top' | 'bottom' = spaceBelow >= spaceAbove ? 'bottom' : 'top'
|
||||
|
||||
// Check if we're in folder navigation mode (no query, not in submenu)
|
||||
const isInFolderNavigationMode = !openSubmenuFor && !showAggregatedView
|
||||
|
||||
return (
|
||||
<Popover
|
||||
open={true}
|
||||
onOpenChange={() => {
|
||||
/* controlled externally */
|
||||
}}
|
||||
>
|
||||
<PopoverAnchor asChild>
|
||||
<div
|
||||
style={{
|
||||
position: 'fixed',
|
||||
top: `${caretViewport.top}px`,
|
||||
left: `${caretViewport.left}px`,
|
||||
width: '1px',
|
||||
height: '1px',
|
||||
pointerEvents: 'none',
|
||||
}}
|
||||
/>
|
||||
</PopoverAnchor>
|
||||
<PopoverContent
|
||||
ref={mentionMenuRef}
|
||||
side={side}
|
||||
align='start'
|
||||
collisionPadding={6}
|
||||
maxHeight={360}
|
||||
className='pointer-events-auto'
|
||||
style={{
|
||||
width: `180px`,
|
||||
}}
|
||||
onOpenAutoFocus={(e) => e.preventDefault()}
|
||||
onCloseAutoFocus={(e) => e.preventDefault()}
|
||||
>
|
||||
<PopoverBackButton />
|
||||
<PopoverScrollArea ref={menuListRef} className='space-y-[2px]'>
|
||||
{openSubmenuFor === 'Web' ? (
|
||||
// Web submenu view
|
||||
<>
|
||||
{WEB_COMMANDS.map((cmd, index) => (
|
||||
<PopoverItem
|
||||
key={cmd.id}
|
||||
onClick={() => onSelectCommand(cmd.label)}
|
||||
data-idx={index}
|
||||
active={index === submenuActiveIndex}
|
||||
>
|
||||
<span className='truncate capitalize'>{cmd.label}</span>
|
||||
</PopoverItem>
|
||||
))}
|
||||
</>
|
||||
) : showAggregatedView ? (
|
||||
// Aggregated filtered view
|
||||
<>
|
||||
{filteredCommands && filteredCommands.length === 0 ? (
|
||||
<div className='px-[8px] py-[8px] text-[12px] text-[var(--text-muted)]'>
|
||||
No commands found
|
||||
</div>
|
||||
) : (
|
||||
filteredCommands?.map((cmd, index) => (
|
||||
<PopoverItem
|
||||
key={cmd.id}
|
||||
onClick={() => onSelectCommand(cmd.label)}
|
||||
data-idx={index}
|
||||
active={index === submenuActiveIndex}
|
||||
>
|
||||
<span className='truncate capitalize'>{cmd.label}</span>
|
||||
</PopoverItem>
|
||||
))
|
||||
)}
|
||||
</>
|
||||
) : (
|
||||
// Folder navigation view
|
||||
<>
|
||||
{TOP_LEVEL_COMMANDS.map((cmd, index) => (
|
||||
<PopoverItem
|
||||
key={cmd.id}
|
||||
onClick={() => onSelectCommand(cmd.label)}
|
||||
data-idx={index}
|
||||
active={isInFolderNavigationMode && index === mentionActiveIndex}
|
||||
>
|
||||
<span className='truncate capitalize'>{cmd.label}</span>
|
||||
</PopoverItem>
|
||||
))}
|
||||
|
||||
<PopoverFolder
|
||||
id='web'
|
||||
title='Web'
|
||||
onOpen={() => setOpenSubmenuFor('Web')}
|
||||
active={isInFolderNavigationMode && mentionActiveIndex === TOP_LEVEL_COMMANDS.length}
|
||||
data-idx={TOP_LEVEL_COMMANDS.length}
|
||||
>
|
||||
{WEB_COMMANDS.map((cmd) => (
|
||||
<PopoverItem key={cmd.id} onClick={() => onSelectCommand(cmd.label)}>
|
||||
<span className='truncate capitalize'>{cmd.label}</span>
|
||||
</PopoverItem>
|
||||
))}
|
||||
</PopoverFolder>
|
||||
</>
|
||||
)}
|
||||
</PopoverScrollArea>
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
)
|
||||
}
|
||||
@@ -63,6 +63,9 @@ export function useContextManagement({ message }: UseContextManagementProps) {
|
||||
if (c.kind === 'docs') {
|
||||
return true // Only one docs context allowed
|
||||
}
|
||||
if (c.kind === 'slash_command' && 'command' in context && 'command' in c) {
|
||||
return c.command === (context as any).command
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
@@ -103,6 +106,8 @@ export function useContextManagement({ message }: UseContextManagementProps) {
|
||||
return (c as any).executionId !== (contextToRemove as any).executionId
|
||||
case 'docs':
|
||||
return false // Remove docs (only one docs context)
|
||||
case 'slash_command':
|
||||
return (c as any).command !== (contextToRemove as any).command
|
||||
default:
|
||||
return c.label !== contextToRemove.label
|
||||
}
|
||||
@@ -118,7 +123,7 @@ export function useContextManagement({ message }: UseContextManagementProps) {
|
||||
}, [])
|
||||
|
||||
/**
|
||||
* Synchronizes selected contexts with inline @label tokens in the message.
|
||||
* Synchronizes selected contexts with inline @label or /label tokens in the message.
|
||||
* Removes contexts whose labels are no longer present in the message.
|
||||
*/
|
||||
useEffect(() => {
|
||||
@@ -130,17 +135,14 @@ export function useContextManagement({ message }: UseContextManagementProps) {
|
||||
setSelectedContexts((prev) => {
|
||||
if (prev.length === 0) return prev
|
||||
|
||||
const presentLabels = new Set<string>()
|
||||
const labels = prev.map((c) => c.label).filter(Boolean)
|
||||
|
||||
for (const label of labels) {
|
||||
const token = ` @${label} `
|
||||
if (message.includes(token)) {
|
||||
presentLabels.add(label)
|
||||
}
|
||||
}
|
||||
|
||||
const filtered = prev.filter((c) => !!c.label && presentLabels.has(c.label))
|
||||
const filtered = prev.filter((c) => {
|
||||
if (!c.label) return false
|
||||
// Check for slash command tokens or mention tokens based on kind
|
||||
const isSlashCommand = c.kind === 'slash_command'
|
||||
const prefix = isSlashCommand ? '/' : '@'
|
||||
const token = ` ${prefix}${c.label} `
|
||||
return message.includes(token)
|
||||
})
|
||||
return filtered.length === prev.length ? prev : filtered
|
||||
})
|
||||
}, [message])
|
||||
|
||||
@@ -178,11 +178,12 @@ export function useFileAttachments(props: UseFileAttachmentsProps) {
|
||||
|
||||
/**
|
||||
* Opens file picker dialog
|
||||
* Note: We allow file selection even when isLoading (streaming) so users can prepare images for the next message
|
||||
*/
|
||||
const handleFileSelect = useCallback(() => {
|
||||
if (disabled || isLoading) return
|
||||
if (disabled) return
|
||||
fileInputRef.current?.click()
|
||||
}, [disabled, isLoading])
|
||||
}, [disabled])
|
||||
|
||||
/**
|
||||
* Handles file input change event
|
||||
|
||||
@@ -113,6 +113,62 @@ export function useMentionMenu({
|
||||
[message, selectedContexts]
|
||||
)
|
||||
|
||||
/**
|
||||
* Finds active slash command query at the given position
|
||||
*
|
||||
* @param pos - Position in the text to check
|
||||
* @param textOverride - Optional text override (for checking during input)
|
||||
* @returns Active slash query object or null if no active slash command
|
||||
*/
|
||||
const getActiveSlashQueryAtPosition = useCallback(
|
||||
(pos: number, textOverride?: string) => {
|
||||
const text = textOverride ?? message
|
||||
const before = text.slice(0, pos)
|
||||
const slashIndex = before.lastIndexOf('/')
|
||||
if (slashIndex === -1) return null
|
||||
|
||||
// Ensure '/' starts a token (start or whitespace before)
|
||||
if (slashIndex > 0 && !/\s/.test(before.charAt(slashIndex - 1))) return null
|
||||
|
||||
// Check if this '/' is part of a completed slash token ( /command )
|
||||
if (selectedContexts.length > 0) {
|
||||
const labels = selectedContexts.map((c) => c.label).filter(Boolean) as string[]
|
||||
for (const label of labels) {
|
||||
// Space-wrapped token: " /label "
|
||||
const token = ` /${label} `
|
||||
let fromIndex = 0
|
||||
while (fromIndex <= text.length) {
|
||||
const idx = text.indexOf(token, fromIndex)
|
||||
if (idx === -1) break
|
||||
|
||||
const tokenStart = idx
|
||||
const tokenEnd = idx + token.length
|
||||
const slashPositionInToken = idx + 1 // position of / in " /label "
|
||||
|
||||
if (slashIndex === slashPositionInToken) {
|
||||
return null
|
||||
}
|
||||
|
||||
if (pos > tokenStart && pos < tokenEnd) {
|
||||
return null
|
||||
}
|
||||
|
||||
fromIndex = tokenEnd
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const segment = before.slice(slashIndex + 1)
|
||||
// Close the popup if user types space immediately after /
|
||||
if (segment.length > 0 && /^\s/.test(segment)) {
|
||||
return null
|
||||
}
|
||||
|
||||
return { query: segment, start: slashIndex, end: pos }
|
||||
},
|
||||
[message, selectedContexts]
|
||||
)
|
||||
|
||||
/**
|
||||
* Gets the submenu query text
|
||||
*
|
||||
@@ -217,6 +273,40 @@ export function useMentionMenu({
|
||||
[message, getActiveMentionQueryAtPosition, onMessageChange]
|
||||
)
|
||||
|
||||
/**
|
||||
* Replaces active slash command with a label
|
||||
*
|
||||
* @param label - Label to replace the slash command with
|
||||
* @returns True if replacement was successful, false if no active slash command found
|
||||
*/
|
||||
const replaceActiveSlashWith = useCallback(
|
||||
(label: string) => {
|
||||
const textarea = textareaRef.current
|
||||
if (!textarea) return false
|
||||
const pos = textarea.selectionStart ?? message.length
|
||||
const active = getActiveSlashQueryAtPosition(pos)
|
||||
if (!active) return false
|
||||
|
||||
const before = message.slice(0, active.start)
|
||||
const after = message.slice(active.end)
|
||||
|
||||
// Always include leading space, avoid duplicate if one exists
|
||||
const needsLeadingSpace = !before.endsWith(' ')
|
||||
const insertion = `${needsLeadingSpace ? ' ' : ''}/${label} `
|
||||
|
||||
const next = `${before}${insertion}${after}`
|
||||
onMessageChange(next)
|
||||
|
||||
setTimeout(() => {
|
||||
const cursorPos = before.length + insertion.length
|
||||
textarea.setSelectionRange(cursorPos, cursorPos)
|
||||
textarea.focus()
|
||||
}, 0)
|
||||
return true
|
||||
},
|
||||
[message, getActiveSlashQueryAtPosition, onMessageChange]
|
||||
)
|
||||
|
||||
/**
|
||||
* Scrolls active item into view in the menu
|
||||
*
|
||||
@@ -304,10 +394,12 @@ export function useMentionMenu({
|
||||
// Operations
|
||||
getCaretPos,
|
||||
getActiveMentionQueryAtPosition,
|
||||
getActiveSlashQueryAtPosition,
|
||||
getSubmenuQuery,
|
||||
resetActiveMentionQuery,
|
||||
insertAtCursor,
|
||||
replaceActiveMentionWith,
|
||||
replaceActiveSlashWith,
|
||||
scrollActiveItemIntoView,
|
||||
closeMentionMenu,
|
||||
}
|
||||
|
||||
@@ -39,7 +39,7 @@ export function useMentionTokens({
|
||||
setSelectedContexts,
|
||||
}: UseMentionTokensProps) {
|
||||
/**
|
||||
* Computes all mention ranges in the message
|
||||
* Computes all mention ranges in the message (both @mentions and /commands)
|
||||
*
|
||||
* @returns Array of mention ranges sorted by start position
|
||||
*/
|
||||
@@ -55,8 +55,13 @@ export function useMentionTokens({
|
||||
const uniqueLabels = Array.from(new Set(labels))
|
||||
|
||||
for (const label of uniqueLabels) {
|
||||
// Space-wrapped token: " @label " (search from start)
|
||||
const token = ` @${label} `
|
||||
// Find matching context to determine if it's a slash command
|
||||
const matchingContext = selectedContexts.find((c) => c.label === label)
|
||||
const isSlashCommand = matchingContext?.kind === 'slash_command'
|
||||
const prefix = isSlashCommand ? '/' : '@'
|
||||
|
||||
// Space-wrapped token: " @label " or " /label " (search from start)
|
||||
const token = ` ${prefix}${label} `
|
||||
let fromIndex = 0
|
||||
while (fromIndex <= message.length) {
|
||||
const idx = message.indexOf(token, fromIndex)
|
||||
|
||||
@@ -21,6 +21,7 @@ import {
|
||||
MentionMenu,
|
||||
ModelSelector,
|
||||
ModeSelector,
|
||||
SlashMenu,
|
||||
} from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/user-input/components'
|
||||
import { NEAR_TOP_THRESHOLD } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/user-input/constants'
|
||||
import {
|
||||
@@ -117,13 +118,13 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
const selectedModel =
|
||||
selectedModelOverride !== undefined ? selectedModelOverride : copilotStore.selectedModel
|
||||
const setSelectedModel = onModelChangeOverride || copilotStore.setSelectedModel
|
||||
const contextUsage = copilotStore.contextUsage
|
||||
|
||||
// Internal state
|
||||
const [internalMessage, setInternalMessage] = useState('')
|
||||
const [isNearTop, setIsNearTop] = useState(false)
|
||||
const [containerRef, setContainerRef] = useState<HTMLDivElement | null>(null)
|
||||
const [inputContainerRef, setInputContainerRef] = useState<HTMLDivElement | null>(null)
|
||||
const [showSlashMenu, setShowSlashMenu] = useState(false)
|
||||
|
||||
// Controlled vs uncontrolled message state
|
||||
const message = controlledValue !== undefined ? controlledValue : internalMessage
|
||||
@@ -300,7 +301,8 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
async (overrideMessage?: string, options: { preserveInput?: boolean } = {}) => {
|
||||
const targetMessage = overrideMessage ?? message
|
||||
const trimmedMessage = targetMessage.trim()
|
||||
if (!trimmedMessage || disabled || isLoading) return
|
||||
// Allow submission even when isLoading - store will queue the message
|
||||
if (!trimmedMessage || disabled) return
|
||||
|
||||
const failedUploads = fileAttachments.attachedFiles.filter((f) => !f.uploading && !f.key)
|
||||
if (failedUploads.length > 0) {
|
||||
@@ -370,20 +372,113 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
}
|
||||
}, [onAbort, isLoading])
|
||||
|
||||
const handleSlashCommandSelect = useCallback(
|
||||
(command: string) => {
|
||||
// Capitalize the command for display
|
||||
const capitalizedCommand = command.charAt(0).toUpperCase() + command.slice(1)
|
||||
|
||||
// Replace the active slash query with the capitalized command
|
||||
mentionMenu.replaceActiveSlashWith(capitalizedCommand)
|
||||
|
||||
// Add as a context so it gets highlighted
|
||||
contextManagement.addContext({
|
||||
kind: 'slash_command',
|
||||
command,
|
||||
label: capitalizedCommand,
|
||||
})
|
||||
|
||||
setShowSlashMenu(false)
|
||||
mentionMenu.textareaRef.current?.focus()
|
||||
},
|
||||
[mentionMenu, contextManagement]
|
||||
)
|
||||
|
||||
const handleKeyDown = useCallback(
|
||||
(e: KeyboardEvent<HTMLTextAreaElement>) => {
|
||||
// Escape key handling
|
||||
if (e.key === 'Escape' && mentionMenu.showMentionMenu) {
|
||||
if (e.key === 'Escape' && (mentionMenu.showMentionMenu || showSlashMenu)) {
|
||||
e.preventDefault()
|
||||
if (mentionMenu.openSubmenuFor) {
|
||||
mentionMenu.setOpenSubmenuFor(null)
|
||||
mentionMenu.setSubmenuQueryStart(null)
|
||||
} else {
|
||||
mentionMenu.closeMentionMenu()
|
||||
setShowSlashMenu(false)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Arrow navigation in slash menu
|
||||
if (showSlashMenu) {
|
||||
const TOP_LEVEL_COMMANDS = ['plan', 'debug', 'fast', 'superagent', 'deploy']
|
||||
const WEB_COMMANDS = ['search', 'research', 'crawl', 'read', 'scrape']
|
||||
const ALL_COMMANDS = [...TOP_LEVEL_COMMANDS, ...WEB_COMMANDS]
|
||||
|
||||
const caretPos = mentionMenu.getCaretPos()
|
||||
const activeSlash = mentionMenu.getActiveSlashQueryAtPosition(caretPos, message)
|
||||
const query = activeSlash?.query.trim().toLowerCase() || ''
|
||||
const showAggregatedView = query.length > 0
|
||||
|
||||
if (e.key === 'ArrowDown' || e.key === 'ArrowUp') {
|
||||
e.preventDefault()
|
||||
|
||||
if (mentionMenu.openSubmenuFor === 'Web') {
|
||||
// Navigate in Web submenu
|
||||
const last = WEB_COMMANDS.length - 1
|
||||
mentionMenu.setSubmenuActiveIndex((prev) => {
|
||||
const next =
|
||||
e.key === 'ArrowDown' ? (prev >= last ? 0 : prev + 1) : prev <= 0 ? last : prev - 1
|
||||
requestAnimationFrame(() => mentionMenu.scrollActiveItemIntoView(next))
|
||||
return next
|
||||
})
|
||||
} else if (showAggregatedView) {
|
||||
// Navigate in filtered view
|
||||
const filtered = ALL_COMMANDS.filter((cmd) => cmd.includes(query))
|
||||
const last = Math.max(0, filtered.length - 1)
|
||||
mentionMenu.setSubmenuActiveIndex((prev) => {
|
||||
if (filtered.length === 0) return 0
|
||||
const next =
|
||||
e.key === 'ArrowDown' ? (prev >= last ? 0 : prev + 1) : prev <= 0 ? last : prev - 1
|
||||
requestAnimationFrame(() => mentionMenu.scrollActiveItemIntoView(next))
|
||||
return next
|
||||
})
|
||||
} else {
|
||||
// Navigate in folder view (top-level + Web folder)
|
||||
const totalItems = TOP_LEVEL_COMMANDS.length + 1 // +1 for Web folder
|
||||
const last = totalItems - 1
|
||||
mentionMenu.setMentionActiveIndex((prev) => {
|
||||
const next =
|
||||
e.key === 'ArrowDown' ? (prev >= last ? 0 : prev + 1) : prev <= 0 ? last : prev - 1
|
||||
requestAnimationFrame(() => mentionMenu.scrollActiveItemIntoView(next))
|
||||
return next
|
||||
})
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Arrow right to enter Web submenu
|
||||
if (e.key === 'ArrowRight') {
|
||||
e.preventDefault()
|
||||
if (!showAggregatedView && !mentionMenu.openSubmenuFor) {
|
||||
// Check if Web folder is selected (it's after all top-level commands)
|
||||
if (mentionMenu.mentionActiveIndex === TOP_LEVEL_COMMANDS.length) {
|
||||
mentionMenu.setOpenSubmenuFor('Web')
|
||||
mentionMenu.setSubmenuActiveIndex(0)
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Arrow left to exit submenu
|
||||
if (e.key === 'ArrowLeft') {
|
||||
e.preventDefault()
|
||||
if (mentionMenu.openSubmenuFor) {
|
||||
mentionMenu.setOpenSubmenuFor(null)
|
||||
}
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// Arrow navigation in mention menu
|
||||
if (mentionKeyboard.handleArrowNavigation(e)) return
|
||||
if (mentionKeyboard.handleArrowRight(e)) return
|
||||
@@ -392,6 +487,41 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
// Enter key handling
|
||||
if (e.key === 'Enter' && !e.shiftKey && !e.nativeEvent.isComposing) {
|
||||
e.preventDefault()
|
||||
if (showSlashMenu) {
|
||||
const TOP_LEVEL_COMMANDS = ['plan', 'debug', 'fast', 'superagent', 'deploy']
|
||||
const WEB_COMMANDS = ['search', 'research', 'crawl', 'read', 'scrape']
|
||||
const ALL_COMMANDS = [...TOP_LEVEL_COMMANDS, ...WEB_COMMANDS]
|
||||
|
||||
const caretPos = mentionMenu.getCaretPos()
|
||||
const activeSlash = mentionMenu.getActiveSlashQueryAtPosition(caretPos, message)
|
||||
const query = activeSlash?.query.trim().toLowerCase() || ''
|
||||
const showAggregatedView = query.length > 0
|
||||
|
||||
if (mentionMenu.openSubmenuFor === 'Web') {
|
||||
// Select from Web submenu
|
||||
const selectedCommand = WEB_COMMANDS[mentionMenu.submenuActiveIndex] || WEB_COMMANDS[0]
|
||||
handleSlashCommandSelect(selectedCommand)
|
||||
} else if (showAggregatedView) {
|
||||
// Select from filtered view
|
||||
const filtered = ALL_COMMANDS.filter((cmd) => cmd.includes(query))
|
||||
if (filtered.length > 0) {
|
||||
const selectedCommand = filtered[mentionMenu.submenuActiveIndex] || filtered[0]
|
||||
handleSlashCommandSelect(selectedCommand)
|
||||
}
|
||||
} else {
|
||||
// Folder navigation view
|
||||
const selectedIndex = mentionMenu.mentionActiveIndex
|
||||
if (selectedIndex < TOP_LEVEL_COMMANDS.length) {
|
||||
// Top-level command selected
|
||||
handleSlashCommandSelect(TOP_LEVEL_COMMANDS[selectedIndex])
|
||||
} else if (selectedIndex === TOP_LEVEL_COMMANDS.length) {
|
||||
// Web folder selected - open it
|
||||
mentionMenu.setOpenSubmenuFor('Web')
|
||||
mentionMenu.setSubmenuActiveIndex(0)
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
if (!mentionMenu.showMentionMenu) {
|
||||
handleSubmit()
|
||||
} else {
|
||||
@@ -469,7 +599,15 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
}
|
||||
}
|
||||
},
|
||||
[mentionMenu, mentionKeyboard, handleSubmit, message.length, mentionTokensWithContext]
|
||||
[
|
||||
mentionMenu,
|
||||
mentionKeyboard,
|
||||
handleSubmit,
|
||||
handleSlashCommandSelect,
|
||||
message,
|
||||
mentionTokensWithContext,
|
||||
showSlashMenu,
|
||||
]
|
||||
)
|
||||
|
||||
const handleInputChange = useCallback(
|
||||
@@ -481,9 +619,14 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
if (disableMentions) return
|
||||
|
||||
const caret = e.target.selectionStart ?? newValue.length
|
||||
const active = mentionMenu.getActiveMentionQueryAtPosition(caret, newValue)
|
||||
|
||||
if (active) {
|
||||
// Check for @ mention trigger
|
||||
const activeMention = mentionMenu.getActiveMentionQueryAtPosition(caret, newValue)
|
||||
// Check for / slash command trigger
|
||||
const activeSlash = mentionMenu.getActiveSlashQueryAtPosition(caret, newValue)
|
||||
|
||||
if (activeMention) {
|
||||
setShowSlashMenu(false)
|
||||
mentionMenu.setShowMentionMenu(true)
|
||||
mentionMenu.setInAggregated(false)
|
||||
if (mentionMenu.openSubmenuFor) {
|
||||
@@ -492,10 +635,17 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
mentionMenu.setMentionActiveIndex(0)
|
||||
mentionMenu.setSubmenuActiveIndex(0)
|
||||
}
|
||||
} else if (activeSlash) {
|
||||
mentionMenu.setShowMentionMenu(false)
|
||||
mentionMenu.setOpenSubmenuFor(null)
|
||||
mentionMenu.setSubmenuQueryStart(null)
|
||||
setShowSlashMenu(true)
|
||||
mentionMenu.setSubmenuActiveIndex(0)
|
||||
} else {
|
||||
mentionMenu.setShowMentionMenu(false)
|
||||
mentionMenu.setOpenSubmenuFor(null)
|
||||
mentionMenu.setSubmenuQueryStart(null)
|
||||
setShowSlashMenu(false)
|
||||
}
|
||||
},
|
||||
[setMessage, mentionMenu, disableMentions]
|
||||
@@ -542,6 +692,32 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
mentionMenu.setSubmenuActiveIndex(0)
|
||||
}, [disabled, isLoading, mentionMenu, message, setMessage])
|
||||
|
||||
const handleOpenSlashMenu = useCallback(() => {
|
||||
if (disabled || isLoading) return
|
||||
const textarea = mentionMenu.textareaRef.current
|
||||
if (!textarea) return
|
||||
textarea.focus()
|
||||
const pos = textarea.selectionStart ?? message.length
|
||||
const needsSpaceBefore = pos > 0 && !/\s/.test(message.charAt(pos - 1))
|
||||
|
||||
const insertText = needsSpaceBefore ? ' /' : '/'
|
||||
const start = textarea.selectionStart ?? message.length
|
||||
const end = textarea.selectionEnd ?? message.length
|
||||
const before = message.slice(0, start)
|
||||
const after = message.slice(end)
|
||||
const next = `${before}${insertText}${after}`
|
||||
setMessage(next)
|
||||
|
||||
setTimeout(() => {
|
||||
const newPos = before.length + insertText.length
|
||||
textarea.setSelectionRange(newPos, newPos)
|
||||
textarea.focus()
|
||||
}, 0)
|
||||
|
||||
setShowSlashMenu(true)
|
||||
mentionMenu.setSubmenuActiveIndex(0)
|
||||
}, [disabled, isLoading, mentionMenu, message, setMessage])
|
||||
|
||||
const canSubmit = message.trim().length > 0 && !disabled && !isLoading
|
||||
const showAbortButton = isLoading && onAbort
|
||||
|
||||
@@ -643,6 +819,18 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
<AtSign className='h-3 w-3' strokeWidth={1.75} />
|
||||
</Badge>
|
||||
|
||||
<Badge
|
||||
variant='outline'
|
||||
onClick={handleOpenSlashMenu}
|
||||
title='Insert /'
|
||||
className={cn(
|
||||
'cursor-pointer rounded-[6px] p-[4.5px]',
|
||||
(disabled || isLoading) && 'cursor-not-allowed'
|
||||
)}
|
||||
>
|
||||
<span className='flex h-3 w-3 items-center justify-center text-[11px] font-medium leading-none'>/</span>
|
||||
</Badge>
|
||||
|
||||
{/* Selected Context Pills */}
|
||||
<ContextPills
|
||||
contexts={contextManagement.selectedContexts}
|
||||
@@ -717,6 +905,18 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
/>,
|
||||
document.body
|
||||
)}
|
||||
|
||||
{/* Slash Menu Portal */}
|
||||
{!disableMentions &&
|
||||
showSlashMenu &&
|
||||
createPortal(
|
||||
<SlashMenu
|
||||
mentionMenu={mentionMenu}
|
||||
message={message}
|
||||
onSelectCommand={handleSlashCommandSelect}
|
||||
/>,
|
||||
document.body
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Bottom Row: Mode Selector + Model Selector + Attach Button + Send Button */}
|
||||
@@ -746,7 +946,7 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
title='Attach file'
|
||||
className={cn(
|
||||
'cursor-pointer rounded-[6px] border-0 bg-transparent p-[0px] dark:bg-transparent',
|
||||
(disabled || isLoading) && 'cursor-not-allowed opacity-50'
|
||||
disabled && 'cursor-not-allowed opacity-50'
|
||||
)}
|
||||
>
|
||||
<Image className='!h-3.5 !w-3.5 scale-x-110' />
|
||||
@@ -802,7 +1002,7 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Hidden File Input */}
|
||||
{/* Hidden File Input - enabled during streaming so users can prepare images for the next message */}
|
||||
<input
|
||||
ref={fileAttachments.fileInputRef}
|
||||
type='file'
|
||||
@@ -810,7 +1010,7 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
className='hidden'
|
||||
accept='image/*'
|
||||
multiple
|
||||
disabled={disabled || isLoading}
|
||||
disabled={disabled}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -22,9 +22,11 @@ import {
|
||||
PopoverTrigger,
|
||||
} from '@/components/emcn'
|
||||
import { Trash } from '@/components/emcn/icons/trash'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import {
|
||||
CopilotMessage,
|
||||
PlanModeSection,
|
||||
QueuedMessages,
|
||||
TodoList,
|
||||
UserInput,
|
||||
Welcome,
|
||||
@@ -99,7 +101,6 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
||||
loadChats,
|
||||
messageCheckpoints,
|
||||
currentChat,
|
||||
fetchContextUsage,
|
||||
selectChat,
|
||||
deleteChat,
|
||||
areChatsFresh,
|
||||
@@ -118,14 +119,15 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
||||
chatsLoadedForWorkflow,
|
||||
setCopilotWorkflowId,
|
||||
loadChats,
|
||||
fetchContextUsage,
|
||||
loadAutoAllowedTools,
|
||||
currentChat,
|
||||
isSendingMessage,
|
||||
})
|
||||
|
||||
// Handle scroll management
|
||||
const { scrollAreaRef, scrollToBottom } = useScrollManagement(messages, isSendingMessage)
|
||||
// Handle scroll management (80px stickiness for copilot)
|
||||
const { scrollAreaRef, scrollToBottom } = useScrollManagement(messages, isSendingMessage, {
|
||||
stickinessThreshold: 80,
|
||||
})
|
||||
|
||||
// Handle chat history grouping
|
||||
const { groupedChats, handleHistoryDropdownOpen: handleHistoryDropdownOpenHook } = useChatHistory(
|
||||
@@ -298,7 +300,8 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
||||
*/
|
||||
const handleSubmit = useCallback(
|
||||
async (query: string, fileAttachments?: MessageFileAttachment[], contexts?: any[]) => {
|
||||
if (!query || isSendingMessage || !activeWorkflowId) return
|
||||
// Allow submission even when isSendingMessage - store will queue the message
|
||||
if (!query || !activeWorkflowId) return
|
||||
|
||||
if (showPlanTodos) {
|
||||
const store = useCopilotStore.getState()
|
||||
@@ -316,7 +319,7 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
||||
logger.error('Failed to send message:', error)
|
||||
}
|
||||
},
|
||||
[isSendingMessage, activeWorkflowId, sendMessage, showPlanTodos]
|
||||
[activeWorkflowId, sendMessage, showPlanTodos]
|
||||
)
|
||||
|
||||
/**
|
||||
@@ -443,7 +446,13 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
||||
<span className='min-w-0 flex-1 truncate'>
|
||||
{chat.title || 'New Chat'}
|
||||
</span>
|
||||
<div className='flex flex-shrink-0 items-center gap-[4px] opacity-0 transition-opacity group-hover:opacity-100'>
|
||||
<div
|
||||
className={cn(
|
||||
'flex flex-shrink-0 items-center gap-[4px]',
|
||||
currentChat?.id !== chat.id &&
|
||||
'opacity-0 transition-opacity group-hover:opacity-100'
|
||||
)}
|
||||
>
|
||||
<Button
|
||||
variant='ghost'
|
||||
className='h-[16px] w-[16px] p-0'
|
||||
@@ -563,6 +572,7 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
||||
onRevertModeChange={(isReverting) =>
|
||||
handleRevertModeChange(message.id, isReverting)
|
||||
}
|
||||
isLastMessage={index === messages.length - 1}
|
||||
/>
|
||||
)
|
||||
})}
|
||||
@@ -588,6 +598,9 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Queued messages (shown when messages are waiting) */}
|
||||
<QueuedMessages />
|
||||
|
||||
{/* Input area with integrated mode selector */}
|
||||
<div className='flex-shrink-0 px-[8px] pb-[8px]'>
|
||||
<UserInput
|
||||
|
||||
@@ -11,7 +11,6 @@ interface UseCopilotInitializationProps {
|
||||
chatsLoadedForWorkflow: string | null
|
||||
setCopilotWorkflowId: (workflowId: string | null) => Promise<void>
|
||||
loadChats: (forceRefresh?: boolean) => Promise<void>
|
||||
fetchContextUsage: () => Promise<void>
|
||||
loadAutoAllowedTools: () => Promise<void>
|
||||
currentChat: any
|
||||
isSendingMessage: boolean
|
||||
@@ -30,7 +29,6 @@ export function useCopilotInitialization(props: UseCopilotInitializationProps) {
|
||||
chatsLoadedForWorkflow,
|
||||
setCopilotWorkflowId,
|
||||
loadChats,
|
||||
fetchContextUsage,
|
||||
loadAutoAllowedTools,
|
||||
currentChat,
|
||||
isSendingMessage,
|
||||
@@ -102,18 +100,6 @@ export function useCopilotInitialization(props: UseCopilotInitializationProps) {
|
||||
isSendingMessage,
|
||||
])
|
||||
|
||||
/**
|
||||
* Fetch context usage when component is initialized and has a current chat
|
||||
*/
|
||||
useEffect(() => {
|
||||
if (isInitialized && currentChat?.id && activeWorkflowId) {
|
||||
logger.info('[Copilot] Component initialized, fetching context usage')
|
||||
fetchContextUsage().catch((err) => {
|
||||
logger.warn('[Copilot] Failed to fetch context usage on mount', err)
|
||||
})
|
||||
}
|
||||
}, [isInitialized, currentChat?.id, activeWorkflowId, fetchContextUsage])
|
||||
|
||||
/**
|
||||
* Load auto-allowed tools once on mount
|
||||
*/
|
||||
|
||||
@@ -654,17 +654,20 @@ export function ConditionInput({
|
||||
}
|
||||
|
||||
const removeBlock = (id: string) => {
|
||||
if (isPreview || disabled || conditionalBlocks.length <= 2) return
|
||||
if (isPreview || disabled) return
|
||||
// Condition mode requires at least 2 blocks (if/else), router mode requires at least 1
|
||||
const minBlocks = isRouterMode ? 1 : 2
|
||||
if (conditionalBlocks.length <= minBlocks) return
|
||||
|
||||
// Remove any associated edges before removing the block
|
||||
const handlePrefix = isRouterMode ? `router-${id}` : `condition-${id}`
|
||||
const edgeIdsToRemove = edges
|
||||
.filter((edge) => edge.sourceHandle?.startsWith(`condition-${id}`))
|
||||
.filter((edge) => edge.sourceHandle?.startsWith(handlePrefix))
|
||||
.map((edge) => edge.id)
|
||||
if (edgeIdsToRemove.length > 0) {
|
||||
batchRemoveEdges(edgeIdsToRemove)
|
||||
}
|
||||
|
||||
if (conditionalBlocks.length === 1) return
|
||||
shouldPersistRef.current = true
|
||||
setConditionalBlocks((blocks) => updateBlockTitles(blocks.filter((block) => block.id !== id)))
|
||||
|
||||
@@ -816,7 +819,9 @@ export function ConditionInput({
|
||||
<Button
|
||||
variant='ghost'
|
||||
onClick={() => removeBlock(block.id)}
|
||||
disabled={isPreview || disabled || conditionalBlocks.length === 1}
|
||||
disabled={
|
||||
isPreview || disabled || conditionalBlocks.length <= (isRouterMode ? 1 : 2)
|
||||
}
|
||||
className='h-auto p-0 text-[var(--text-error)] hover:text-[var(--text-error)]'
|
||||
>
|
||||
<Trash className='h-[14px] w-[14px]' />
|
||||
|
||||
@@ -16,7 +16,7 @@ import {
|
||||
Switch,
|
||||
Tooltip,
|
||||
} from '@/components/emcn'
|
||||
import { McpIcon } from '@/components/icons'
|
||||
import { McpIcon, WorkflowIcon } from '@/components/icons'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import {
|
||||
getIssueBadgeLabel,
|
||||
@@ -30,6 +30,7 @@ import {
|
||||
type OAuthProvider,
|
||||
type OAuthService,
|
||||
} from '@/lib/oauth'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
import {
|
||||
CheckboxList,
|
||||
Code,
|
||||
@@ -769,9 +770,10 @@ function WorkflowToolDeployBadge({
|
||||
}) {
|
||||
const { isDeployed, needsRedeploy, isLoading, refetch } = useChildDeployment(workflowId)
|
||||
const [isDeploying, setIsDeploying] = useState(false)
|
||||
const userPermissions = useUserPermissionsContext()
|
||||
|
||||
const deployWorkflow = useCallback(async () => {
|
||||
if (isDeploying || !workflowId) return
|
||||
if (isDeploying || !workflowId || !userPermissions.canAdmin) return
|
||||
|
||||
try {
|
||||
setIsDeploying(true)
|
||||
@@ -796,7 +798,7 @@ function WorkflowToolDeployBadge({
|
||||
} finally {
|
||||
setIsDeploying(false)
|
||||
}
|
||||
}, [isDeploying, workflowId, refetch, onDeploySuccess])
|
||||
}, [isDeploying, workflowId, refetch, onDeploySuccess, userPermissions.canAdmin])
|
||||
|
||||
if (isLoading || (isDeployed && !needsRedeploy)) {
|
||||
return null
|
||||
@@ -811,13 +813,13 @@ function WorkflowToolDeployBadge({
|
||||
<Tooltip.Trigger asChild>
|
||||
<Badge
|
||||
variant={!isDeployed ? 'red' : 'amber'}
|
||||
className='cursor-pointer'
|
||||
className={userPermissions.canAdmin ? 'cursor-pointer' : 'cursor-not-allowed'}
|
||||
size='sm'
|
||||
dot
|
||||
onClick={(e: React.MouseEvent) => {
|
||||
e.stopPropagation()
|
||||
e.preventDefault()
|
||||
if (!isDeploying) {
|
||||
if (!isDeploying && userPermissions.canAdmin) {
|
||||
deployWorkflow()
|
||||
}
|
||||
}}
|
||||
@@ -826,7 +828,13 @@ function WorkflowToolDeployBadge({
|
||||
</Badge>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content>
|
||||
<span className='text-sm'>{!isDeployed ? 'Click to deploy' : 'Click to redeploy'}</span>
|
||||
<span className='text-sm'>
|
||||
{!userPermissions.canAdmin
|
||||
? 'Admin permission required to deploy'
|
||||
: !isDeployed
|
||||
? 'Click to deploy'
|
||||
: 'Click to redeploy'}
|
||||
</span>
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
)
|
||||
@@ -933,6 +941,13 @@ export function ToolInput({
|
||||
const forceRefreshMcpTools = useForceRefreshMcpTools()
|
||||
const openSettingsModal = useSettingsModalStore((state) => state.openModal)
|
||||
const mcpDataLoading = mcpLoading || mcpServersLoading
|
||||
|
||||
// Fetch workflows for the Workflows section in the dropdown
|
||||
const { data: workflowsList = [] } = useWorkflows(workspaceId, { syncRegistry: false })
|
||||
const availableWorkflows = useMemo(
|
||||
() => workflowsList.filter((w) => w.id !== workflowId),
|
||||
[workflowsList, workflowId]
|
||||
)
|
||||
const hasRefreshedRef = useRef(false)
|
||||
|
||||
const hasMcpTools = selectedTools.some((tool) => tool.type === 'mcp')
|
||||
@@ -1735,6 +1750,36 @@ export function ToolInput({
|
||||
})
|
||||
}
|
||||
|
||||
// Workflows section - shows available workflows that can be executed as tools
|
||||
if (availableWorkflows.length > 0) {
|
||||
groups.push({
|
||||
section: 'Workflows',
|
||||
items: availableWorkflows.map((workflow) => ({
|
||||
label: workflow.name,
|
||||
value: `workflow-${workflow.id}`,
|
||||
iconElement: createToolIcon('#6366F1', WorkflowIcon),
|
||||
onSelect: () => {
|
||||
const newTool: StoredTool = {
|
||||
type: 'workflow',
|
||||
title: 'Workflow',
|
||||
toolId: 'workflow_executor',
|
||||
params: {
|
||||
workflowId: workflow.id,
|
||||
},
|
||||
isExpanded: true,
|
||||
usageControl: 'auto',
|
||||
}
|
||||
setStoreValue([
|
||||
...selectedTools.map((tool) => ({ ...tool, isExpanded: false })),
|
||||
newTool,
|
||||
])
|
||||
setOpen(false)
|
||||
},
|
||||
disabled: isPreview || disabled,
|
||||
})),
|
||||
})
|
||||
}
|
||||
|
||||
return groups
|
||||
}, [
|
||||
customTools,
|
||||
@@ -1749,6 +1794,7 @@ export function ToolInput({
|
||||
handleSelectTool,
|
||||
permissionConfig.disableCustomTools,
|
||||
permissionConfig.disableMcpTools,
|
||||
availableWorkflows,
|
||||
])
|
||||
|
||||
const toolRequiresOAuth = (toolId: string): boolean => {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { useCallback, useEffect, useState } from 'react'
|
||||
import { useCallback, useEffect } from 'react'
|
||||
import { PANEL_WIDTH } from '@/stores/constants'
|
||||
import { usePanelStore } from '@/stores/panel'
|
||||
|
||||
@@ -10,15 +10,14 @@ import { usePanelStore } from '@/stores/panel'
|
||||
* @returns Resize state and handlers
|
||||
*/
|
||||
export function usePanelResize() {
|
||||
const { setPanelWidth } = usePanelStore()
|
||||
const [isResizing, setIsResizing] = useState(false)
|
||||
const { setPanelWidth, isResizing, setIsResizing } = usePanelStore()
|
||||
|
||||
/**
|
||||
* Handles mouse down on resize handle
|
||||
*/
|
||||
const handleMouseDown = useCallback(() => {
|
||||
setIsResizing(true)
|
||||
}, [])
|
||||
}, [setIsResizing])
|
||||
|
||||
/**
|
||||
* Setup resize event listeners and body styles when resizing
|
||||
@@ -52,7 +51,7 @@ export function usePanelResize() {
|
||||
document.body.style.cursor = ''
|
||||
document.body.style.userSelect = ''
|
||||
}
|
||||
}, [isResizing, setPanelWidth])
|
||||
}, [isResizing, setPanelWidth, setIsResizing])
|
||||
|
||||
return {
|
||||
isResizing,
|
||||
|
||||
@@ -108,7 +108,7 @@ export function Panel() {
|
||||
// Delete workflow hook
|
||||
const { isDeleting, handleDeleteWorkflow } = useDeleteWorkflow({
|
||||
workspaceId,
|
||||
getWorkflowIds: () => activeWorkflowId || '',
|
||||
workflowIds: activeWorkflowId || '',
|
||||
isActive: true,
|
||||
onSuccess: () => setIsDeleteModalOpen(false),
|
||||
})
|
||||
|
||||
@@ -136,7 +136,7 @@ export const SubflowNodeComponent = memo(({ data, id }: NodeProps<SubflowNodeDat
|
||||
const ringStyles = cn(
|
||||
hasRing && 'ring-[1.75px]',
|
||||
(isFocused || isPreviewSelected) && 'ring-[var(--brand-secondary)]',
|
||||
diffStatus === 'new' && 'ring-[#22C55F]',
|
||||
diffStatus === 'new' && 'ring-[var(--brand-tertiary-2)]',
|
||||
diffStatus === 'edited' && 'ring-[var(--warning)]'
|
||||
)
|
||||
|
||||
@@ -148,7 +148,7 @@ export const SubflowNodeComponent = memo(({ data, id }: NodeProps<SubflowNodeDat
|
||||
ref={blockRef}
|
||||
onClick={() => setCurrentBlockId(id)}
|
||||
className={cn(
|
||||
'relative cursor-pointer select-none rounded-[8px] border border-[var(--border)]',
|
||||
'relative cursor-pointer select-none rounded-[8px] border border-[var(--border-1)]',
|
||||
'transition-block-bg transition-ring',
|
||||
'z-[20]'
|
||||
)}
|
||||
|
||||
@@ -306,6 +306,7 @@ export function Terminal() {
|
||||
const terminalRef = useRef<HTMLElement>(null)
|
||||
const prevEntriesLengthRef = useRef(0)
|
||||
const prevWorkflowEntriesLengthRef = useRef(0)
|
||||
const isTerminalFocusedRef = useRef(false)
|
||||
const {
|
||||
setTerminalHeight,
|
||||
lastExpandedHeight,
|
||||
@@ -540,8 +541,11 @@ export function Terminal() {
|
||||
/**
|
||||
* Handle row click - toggle if clicking same entry
|
||||
* Disables auto-selection when user manually selects, re-enables when deselecting
|
||||
* Also focuses the terminal to enable keyboard navigation
|
||||
*/
|
||||
const handleRowClick = useCallback((entry: ConsoleEntry) => {
|
||||
// Focus the terminal to enable keyboard navigation
|
||||
terminalRef.current?.focus()
|
||||
setSelectedEntry((prev) => {
|
||||
const isDeselecting = prev?.id === entry.id
|
||||
setAutoSelectEnabled(isDeselecting)
|
||||
@@ -562,6 +566,26 @@ export function Terminal() {
|
||||
setIsToggling(false)
|
||||
}, [])
|
||||
|
||||
/**
|
||||
* Handle terminal focus - enables keyboard navigation
|
||||
*/
|
||||
const handleTerminalFocus = useCallback(() => {
|
||||
isTerminalFocusedRef.current = true
|
||||
}, [])
|
||||
|
||||
/**
|
||||
* Handle terminal blur - disables keyboard navigation
|
||||
*/
|
||||
const handleTerminalBlur = useCallback((e: React.FocusEvent) => {
|
||||
// Only blur if focus is moving outside the terminal
|
||||
if (!terminalRef.current?.contains(e.relatedTarget as Node)) {
|
||||
isTerminalFocusedRef.current = false
|
||||
}
|
||||
}, [])
|
||||
|
||||
/**
|
||||
* Handle copy output to clipboard
|
||||
*/
|
||||
const handleCopy = useCallback(() => {
|
||||
if (!selectedEntry) return
|
||||
|
||||
@@ -792,9 +816,12 @@ export function Terminal() {
|
||||
/**
|
||||
* Handle keyboard navigation through logs
|
||||
* Disables auto-selection when user manually navigates
|
||||
* Only active when the terminal is focused
|
||||
*/
|
||||
useEffect(() => {
|
||||
const handleKeyDown = (e: KeyboardEvent) => {
|
||||
// Only handle navigation when terminal is focused
|
||||
if (!isTerminalFocusedRef.current) return
|
||||
if (isEventFromEditableElement(e)) return
|
||||
const activeElement = document.activeElement as HTMLElement | null
|
||||
const toolbarRoot = document.querySelector(
|
||||
@@ -829,9 +856,12 @@ export function Terminal() {
|
||||
/**
|
||||
* Handle keyboard navigation for input/output toggle
|
||||
* Left arrow shows output, right arrow shows input
|
||||
* Only active when the terminal is focused
|
||||
*/
|
||||
useEffect(() => {
|
||||
const handleKeyDown = (e: KeyboardEvent) => {
|
||||
// Only handle navigation when terminal is focused
|
||||
if (!isTerminalFocusedRef.current) return
|
||||
// Ignore when typing/navigating inside editable inputs/editors
|
||||
if (isEventFromEditableElement(e)) return
|
||||
|
||||
@@ -936,6 +966,9 @@ export function Terminal() {
|
||||
isToggling && 'transition-[height] duration-100 ease-out'
|
||||
)}
|
||||
onTransitionEnd={handleTransitionEnd}
|
||||
onFocus={handleTerminalFocus}
|
||||
onBlur={handleTerminalBlur}
|
||||
tabIndex={-1}
|
||||
aria-label='Terminal'
|
||||
>
|
||||
<div className='relative flex h-full border-[var(--border)] border-t'>
|
||||
|
||||
@@ -87,8 +87,8 @@ export const ActionBar = memo(
|
||||
|
||||
const userPermissions = useUserPermissionsContext()
|
||||
|
||||
// Check for start_trigger (unified start block) - prevent duplication but allow deletion
|
||||
const isStartBlock = blockType === 'starter' || blockType === 'start_trigger'
|
||||
const isResponseBlock = blockType === 'response'
|
||||
const isNoteBlock = blockType === 'note'
|
||||
|
||||
/**
|
||||
@@ -140,7 +140,7 @@ export const ActionBar = memo(
|
||||
</Tooltip.Root>
|
||||
)}
|
||||
|
||||
{!isStartBlock && (
|
||||
{!isStartBlock && !isResponseBlock && (
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
|
||||
@@ -199,8 +199,9 @@ const tryParseJson = (value: unknown): unknown => {
|
||||
|
||||
/**
|
||||
* Formats a subblock value for display, intelligently handling nested objects and arrays.
|
||||
* Used by both the canvas workflow blocks and copilot edit summaries.
|
||||
*/
|
||||
const getDisplayValue = (value: unknown): string => {
|
||||
export const getDisplayValue = (value: unknown): string => {
|
||||
if (value == null || value === '') return '-'
|
||||
|
||||
// Try parsing JSON strings first
|
||||
@@ -630,10 +631,13 @@ export const WorkflowBlock = memo(function WorkflowBlock({
|
||||
? ((credValue as { value?: unknown }).value as string | undefined)
|
||||
: (credValue as string | undefined)
|
||||
if (prevCredRef.current !== cred) {
|
||||
const hadPreviousCredential = prevCredRef.current !== undefined
|
||||
prevCredRef.current = cred
|
||||
const keys = Object.keys(current)
|
||||
const dependentKeys = keys.filter((k) => k !== 'credential')
|
||||
dependentKeys.forEach((k) => collaborativeSetSubblockValue(id, k, ''))
|
||||
if (hadPreviousCredential) {
|
||||
const keys = Object.keys(current)
|
||||
const dependentKeys = keys.filter((k) => k !== 'credential')
|
||||
dependentKeys.forEach((k) => collaborativeSetSubblockValue(id, k, ''))
|
||||
}
|
||||
}
|
||||
}, [id, collaborativeSetSubblockValue])
|
||||
|
||||
@@ -863,7 +867,8 @@ export const WorkflowBlock = memo(function WorkflowBlock({
|
||||
return parsed.map((item: unknown, index: number) => {
|
||||
const routeItem = item as { id?: string; value?: string }
|
||||
return {
|
||||
id: routeItem?.id ?? `${id}-route-${index}`,
|
||||
// Use stable ID format that matches ConditionInput's generateStableId
|
||||
id: routeItem?.id ?? `${id}-route${index + 1}`,
|
||||
value: routeItem?.value ?? '',
|
||||
}
|
||||
})
|
||||
@@ -873,7 +878,8 @@ export const WorkflowBlock = memo(function WorkflowBlock({
|
||||
logger.warn('Failed to parse router routes value', { error, blockId: id })
|
||||
}
|
||||
|
||||
return [{ id: `${id}-route-route1`, value: '' }]
|
||||
// Fallback must match ConditionInput's default: generateStableId(blockId, 'route1') = `${blockId}-route1`
|
||||
return [{ id: `${id}-route1`, value: '' }]
|
||||
}, [type, subBlockState, id])
|
||||
|
||||
/**
|
||||
@@ -1015,11 +1021,11 @@ export const WorkflowBlock = memo(function WorkflowBlock({
|
||||
<Tooltip.Trigger asChild>
|
||||
<Badge
|
||||
variant={!childIsDeployed ? 'red' : 'amber'}
|
||||
className='cursor-pointer'
|
||||
className={userPermissions.canAdmin ? 'cursor-pointer' : 'cursor-not-allowed'}
|
||||
dot
|
||||
onClick={(e) => {
|
||||
e.stopPropagation()
|
||||
if (childWorkflowId && !isDeploying) {
|
||||
if (childWorkflowId && !isDeploying && userPermissions.canAdmin) {
|
||||
deployWorkflow(childWorkflowId)
|
||||
}
|
||||
}}
|
||||
@@ -1029,7 +1035,11 @@ export const WorkflowBlock = memo(function WorkflowBlock({
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content>
|
||||
<span className='text-sm'>
|
||||
{!childIsDeployed ? 'Click to deploy' : 'Click to redeploy'}
|
||||
{!userPermissions.canAdmin
|
||||
? 'Admin permission required to deploy'
|
||||
: !childIsDeployed
|
||||
? 'Click to deploy'
|
||||
: 'Click to redeploy'}
|
||||
</span>
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
|
||||
@@ -93,7 +93,7 @@ const WorkflowEdgeComponent = ({
|
||||
} else if (isErrorEdge) {
|
||||
color = 'var(--text-error)'
|
||||
} else if (edgeDiffStatus === 'new') {
|
||||
color = 'var(--brand-tertiary)'
|
||||
color = 'var(--brand-tertiary-2)'
|
||||
} else if (edgeRunStatus === 'success') {
|
||||
// Use green for preview mode, default for canvas execution
|
||||
color = previewExecutionStatus ? 'var(--brand-tertiary-2)' : 'var(--border-success)'
|
||||
|
||||
@@ -4,7 +4,7 @@ export {
|
||||
computeParentUpdateEntries,
|
||||
getClampedPositionForNode,
|
||||
isInEditableElement,
|
||||
selectNodesDeferred,
|
||||
resolveParentChildSelectionConflicts,
|
||||
validateTriggerPaste,
|
||||
} from '@/app/workspace/[workspaceId]/w/[workflowId]/utils/workflow-canvas-helpers'
|
||||
export { useFloatBoundarySync, useFloatDrag, useFloatResize } from './float'
|
||||
@@ -12,7 +12,7 @@ export { useAutoLayout } from './use-auto-layout'
|
||||
export { BLOCK_DIMENSIONS, useBlockDimensions } from './use-block-dimensions'
|
||||
export { useBlockVisual } from './use-block-visual'
|
||||
export { type CurrentWorkflow, useCurrentWorkflow } from './use-current-workflow'
|
||||
export { useNodeUtilities } from './use-node-utilities'
|
||||
export { calculateContainerDimensions, useNodeUtilities } from './use-node-utilities'
|
||||
export { usePreventZoom } from './use-prevent-zoom'
|
||||
export { useScrollManagement } from './use-scroll-management'
|
||||
export { useWorkflowExecution } from './use-workflow-execution'
|
||||
|
||||
@@ -62,6 +62,47 @@ export function clampPositionToContainer(
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates container dimensions based on child block positions.
|
||||
* Single source of truth for container sizing - ensures consistency between
|
||||
* live drag updates and final dimension calculations.
|
||||
*
|
||||
* @param childPositions - Array of child positions with their dimensions
|
||||
* @returns Calculated width and height for the container
|
||||
*/
|
||||
export function calculateContainerDimensions(
|
||||
childPositions: Array<{ x: number; y: number; width: number; height: number }>
|
||||
): { width: number; height: number } {
|
||||
if (childPositions.length === 0) {
|
||||
return {
|
||||
width: CONTAINER_DIMENSIONS.DEFAULT_WIDTH,
|
||||
height: CONTAINER_DIMENSIONS.DEFAULT_HEIGHT,
|
||||
}
|
||||
}
|
||||
|
||||
let maxRight = 0
|
||||
let maxBottom = 0
|
||||
|
||||
for (const child of childPositions) {
|
||||
maxRight = Math.max(maxRight, child.x + child.width)
|
||||
maxBottom = Math.max(maxBottom, child.y + child.height)
|
||||
}
|
||||
|
||||
const width = Math.max(
|
||||
CONTAINER_DIMENSIONS.DEFAULT_WIDTH,
|
||||
CONTAINER_DIMENSIONS.LEFT_PADDING + maxRight + CONTAINER_DIMENSIONS.RIGHT_PADDING
|
||||
)
|
||||
const height = Math.max(
|
||||
CONTAINER_DIMENSIONS.DEFAULT_HEIGHT,
|
||||
CONTAINER_DIMENSIONS.HEADER_HEIGHT +
|
||||
CONTAINER_DIMENSIONS.TOP_PADDING +
|
||||
maxBottom +
|
||||
CONTAINER_DIMENSIONS.BOTTOM_PADDING
|
||||
)
|
||||
|
||||
return { width, height }
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook providing utilities for node position, hierarchy, and dimension calculations
|
||||
*/
|
||||
@@ -306,36 +347,16 @@ export function useNodeUtilities(blocks: Record<string, any>) {
|
||||
(id) => currentBlocks[id]?.data?.parentId === nodeId
|
||||
)
|
||||
|
||||
if (childBlockIds.length === 0) {
|
||||
return {
|
||||
width: CONTAINER_DIMENSIONS.DEFAULT_WIDTH,
|
||||
height: CONTAINER_DIMENSIONS.DEFAULT_HEIGHT,
|
||||
}
|
||||
}
|
||||
const childPositions = childBlockIds
|
||||
.map((childId) => {
|
||||
const child = currentBlocks[childId]
|
||||
if (!child?.position) return null
|
||||
const { width, height } = getBlockDimensions(childId)
|
||||
return { x: child.position.x, y: child.position.y, width, height }
|
||||
})
|
||||
.filter((p): p is NonNullable<typeof p> => p !== null)
|
||||
|
||||
let maxRight = 0
|
||||
let maxBottom = 0
|
||||
|
||||
for (const childId of childBlockIds) {
|
||||
const child = currentBlocks[childId]
|
||||
if (!child?.position) continue
|
||||
|
||||
const { width: childWidth, height: childHeight } = getBlockDimensions(childId)
|
||||
|
||||
maxRight = Math.max(maxRight, child.position.x + childWidth)
|
||||
maxBottom = Math.max(maxBottom, child.position.y + childHeight)
|
||||
}
|
||||
|
||||
const width = Math.max(
|
||||
CONTAINER_DIMENSIONS.DEFAULT_WIDTH,
|
||||
maxRight + CONTAINER_DIMENSIONS.RIGHT_PADDING
|
||||
)
|
||||
const height = Math.max(
|
||||
CONTAINER_DIMENSIONS.DEFAULT_HEIGHT,
|
||||
maxBottom + CONTAINER_DIMENSIONS.BOTTOM_PADDING
|
||||
)
|
||||
|
||||
return { width, height }
|
||||
return calculateContainerDimensions(childPositions)
|
||||
},
|
||||
[getBlockDimensions]
|
||||
)
|
||||
|
||||
@@ -12,6 +12,12 @@ interface UseScrollManagementOptions {
|
||||
* - `auto`: immediate scroll to bottom (used by floating chat to avoid jitter).
|
||||
*/
|
||||
behavior?: 'auto' | 'smooth'
|
||||
/**
|
||||
* Distance from bottom (in pixels) within which auto-scroll stays active.
|
||||
* Lower values = less sticky (user can scroll away easier).
|
||||
* Default is 100px.
|
||||
*/
|
||||
stickinessThreshold?: number
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -34,6 +40,7 @@ export function useScrollManagement(
|
||||
const programmaticScrollInProgressRef = useRef(false)
|
||||
const lastScrollTopRef = useRef(0)
|
||||
const scrollBehavior: 'auto' | 'smooth' = options?.behavior ?? 'smooth'
|
||||
const stickinessThreshold = options?.stickinessThreshold ?? 100
|
||||
|
||||
/**
|
||||
* Scrolls the container to the bottom with smooth animation
|
||||
@@ -74,7 +81,7 @@ export function useScrollManagement(
|
||||
const { scrollTop, scrollHeight, clientHeight } = scrollContainer
|
||||
const distanceFromBottom = scrollHeight - scrollTop - clientHeight
|
||||
|
||||
const nearBottom = distanceFromBottom <= 100
|
||||
const nearBottom = distanceFromBottom <= stickinessThreshold
|
||||
setIsNearBottom(nearBottom)
|
||||
|
||||
if (isSendingMessage) {
|
||||
@@ -95,7 +102,7 @@ export function useScrollManagement(
|
||||
|
||||
// Track last scrollTop for direction detection
|
||||
lastScrollTopRef.current = scrollTop
|
||||
}, [getScrollContainer, isSendingMessage, userHasScrolledDuringStream])
|
||||
}, [getScrollContainer, isSendingMessage, userHasScrolledDuringStream, stickinessThreshold])
|
||||
|
||||
// Attach scroll listener
|
||||
useEffect(() => {
|
||||
@@ -174,14 +181,20 @@ export function useScrollManagement(
|
||||
|
||||
const { scrollTop, scrollHeight, clientHeight } = scrollContainer
|
||||
const distanceFromBottom = scrollHeight - scrollTop - clientHeight
|
||||
const nearBottom = distanceFromBottom <= 120
|
||||
const nearBottom = distanceFromBottom <= stickinessThreshold
|
||||
if (nearBottom) {
|
||||
scrollToBottom()
|
||||
}
|
||||
}, 100)
|
||||
|
||||
return () => window.clearInterval(intervalId)
|
||||
}, [isSendingMessage, userHasScrolledDuringStream, getScrollContainer, scrollToBottom])
|
||||
}, [
|
||||
isSendingMessage,
|
||||
userHasScrolledDuringStream,
|
||||
getScrollContainer,
|
||||
scrollToBottom,
|
||||
stickinessThreshold,
|
||||
])
|
||||
|
||||
return {
|
||||
scrollAreaRef,
|
||||
|
||||
@@ -50,7 +50,7 @@ export function getBlockRingStyles(options: BlockRingOptions): {
|
||||
!isPending &&
|
||||
!isDeletedBlock &&
|
||||
diffStatus === 'new' &&
|
||||
'ring-[var(--brand-tertiary)]',
|
||||
'ring-[var(--brand-tertiary-2)]',
|
||||
!isActive &&
|
||||
!isPending &&
|
||||
!isDeletedBlock &&
|
||||
|
||||
@@ -23,7 +23,8 @@ interface TriggerValidationResult {
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates that pasting/duplicating trigger blocks won't violate constraints.
|
||||
* Validates that pasting/duplicating blocks won't violate constraints.
|
||||
* Checks both trigger constraints and single-instance block constraints.
|
||||
* Returns validation result with error message if invalid.
|
||||
*/
|
||||
export function validateTriggerPaste(
|
||||
@@ -43,6 +44,12 @@ export function validateTriggerPaste(
|
||||
return { isValid: false, message }
|
||||
}
|
||||
}
|
||||
|
||||
const singleInstanceIssue = TriggerUtils.getSingleInstanceBlockIssue(existingBlocks, block.type)
|
||||
if (singleInstanceIssue) {
|
||||
const message = `A workflow can only have one ${singleInstanceIssue.blockName} block. ${action === 'paste' ? 'Please remove the existing one before pasting.' : 'Cannot duplicate.'}`
|
||||
return { isValid: false, message }
|
||||
}
|
||||
}
|
||||
return { isValid: true }
|
||||
}
|
||||
@@ -58,27 +65,6 @@ export function clearDragHighlights(): void {
|
||||
document.body.style.cursor = ''
|
||||
}
|
||||
|
||||
/**
|
||||
* Selects nodes by their IDs after paste/duplicate operations.
|
||||
* Defers selection to next animation frame to allow displayNodes to sync from store first.
|
||||
* This is necessary because the component uses controlled state (nodes={displayNodes})
|
||||
* and newly added blocks need time to propagate through the store → derivedNodes → displayNodes cycle.
|
||||
*/
|
||||
export function selectNodesDeferred(
|
||||
nodeIds: string[],
|
||||
setDisplayNodes: (updater: (nodes: Node[]) => Node[]) => void
|
||||
): void {
|
||||
const idsSet = new Set(nodeIds)
|
||||
requestAnimationFrame(() => {
|
||||
setDisplayNodes((nodes) =>
|
||||
nodes.map((node) => ({
|
||||
...node,
|
||||
selected: idsSet.has(node.id),
|
||||
}))
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
interface BlockData {
|
||||
height?: number
|
||||
data?: {
|
||||
@@ -179,3 +165,26 @@ export function computeParentUpdateEntries(
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves parent-child selection conflicts by deselecting children whose parent is also selected.
|
||||
*/
|
||||
export function resolveParentChildSelectionConflicts(
|
||||
nodes: Node[],
|
||||
blocks: Record<string, { data?: { parentId?: string } }>
|
||||
): Node[] {
|
||||
const selectedIds = new Set(nodes.filter((n) => n.selected).map((n) => n.id))
|
||||
|
||||
let hasConflict = false
|
||||
const resolved = nodes.map((n) => {
|
||||
if (!n.selected) return n
|
||||
const parentId = n.parentId || blocks[n.id]?.data?.parentId
|
||||
if (parentId && selectedIds.has(parentId)) {
|
||||
hasConflict = true
|
||||
return { ...n, selected: false }
|
||||
}
|
||||
return n
|
||||
})
|
||||
|
||||
return hasConflict ? resolved : nodes
|
||||
}
|
||||
|
||||
@@ -47,7 +47,7 @@ import {
|
||||
computeClampedPositionUpdates,
|
||||
getClampedPositionForNode,
|
||||
isInEditableElement,
|
||||
selectNodesDeferred,
|
||||
resolveParentChildSelectionConflicts,
|
||||
useAutoLayout,
|
||||
useCurrentWorkflow,
|
||||
useNodeUtilities,
|
||||
@@ -55,6 +55,7 @@ import {
|
||||
} from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks'
|
||||
import { useCanvasContextMenu } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-canvas-context-menu'
|
||||
import {
|
||||
calculateContainerDimensions,
|
||||
clampPositionToContainer,
|
||||
estimateBlockDimensions,
|
||||
} from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-node-utilities'
|
||||
@@ -356,6 +357,9 @@ const WorkflowContent = React.memo(() => {
|
||||
new Map()
|
||||
)
|
||||
|
||||
/** Stores node IDs to select on next derivedNodes sync (for paste/duplicate operations). */
|
||||
const pendingSelectionRef = useRef<Set<string> | null>(null)
|
||||
|
||||
/** Re-applies diff markers when blocks change after socket rehydration. */
|
||||
const blocksRef = useRef(blocks)
|
||||
useEffect(() => {
|
||||
@@ -687,6 +691,12 @@ const WorkflowContent = React.memo(() => {
|
||||
return
|
||||
}
|
||||
|
||||
// Set pending selection before adding blocks - sync effect will apply it (accumulates for rapid pastes)
|
||||
pendingSelectionRef.current = new Set([
|
||||
...(pendingSelectionRef.current ?? []),
|
||||
...pastedBlocksArray.map((b) => b.id),
|
||||
])
|
||||
|
||||
collaborativeBatchAddBlocks(
|
||||
pastedBlocksArray,
|
||||
pastedEdges,
|
||||
@@ -694,11 +704,6 @@ const WorkflowContent = React.memo(() => {
|
||||
pastedParallels,
|
||||
pastedSubBlockValues
|
||||
)
|
||||
|
||||
selectNodesDeferred(
|
||||
pastedBlocksArray.map((b) => b.id),
|
||||
setDisplayNodes
|
||||
)
|
||||
}, [
|
||||
hasClipboard,
|
||||
clipboard,
|
||||
@@ -735,6 +740,12 @@ const WorkflowContent = React.memo(() => {
|
||||
return
|
||||
}
|
||||
|
||||
// Set pending selection before adding blocks - sync effect will apply it (accumulates for rapid pastes)
|
||||
pendingSelectionRef.current = new Set([
|
||||
...(pendingSelectionRef.current ?? []),
|
||||
...pastedBlocksArray.map((b) => b.id),
|
||||
])
|
||||
|
||||
collaborativeBatchAddBlocks(
|
||||
pastedBlocksArray,
|
||||
pastedEdges,
|
||||
@@ -742,11 +753,6 @@ const WorkflowContent = React.memo(() => {
|
||||
pastedParallels,
|
||||
pastedSubBlockValues
|
||||
)
|
||||
|
||||
selectNodesDeferred(
|
||||
pastedBlocksArray.map((b) => b.id),
|
||||
setDisplayNodes
|
||||
)
|
||||
}, [
|
||||
contextMenuBlocks,
|
||||
copyBlocks,
|
||||
@@ -880,6 +886,12 @@ const WorkflowContent = React.memo(() => {
|
||||
return
|
||||
}
|
||||
|
||||
// Set pending selection before adding blocks - sync effect will apply it (accumulates for rapid pastes)
|
||||
pendingSelectionRef.current = new Set([
|
||||
...(pendingSelectionRef.current ?? []),
|
||||
...pastedBlocks.map((b) => b.id),
|
||||
])
|
||||
|
||||
collaborativeBatchAddBlocks(
|
||||
pastedBlocks,
|
||||
pasteData.edges,
|
||||
@@ -887,11 +899,6 @@ const WorkflowContent = React.memo(() => {
|
||||
pasteData.parallels,
|
||||
pasteData.subBlockValues
|
||||
)
|
||||
|
||||
selectNodesDeferred(
|
||||
pastedBlocks.map((b) => b.id),
|
||||
setDisplayNodes
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -987,6 +994,14 @@ const WorkflowContent = React.memo(() => {
|
||||
const handleId = conditionHandles[0].getAttribute('data-handleid')
|
||||
if (handleId) return handleId
|
||||
}
|
||||
} else if (block.type === 'router_v2') {
|
||||
const routerHandles = document.querySelectorAll(
|
||||
`[data-nodeid^="${block.id}"][data-handleid^="router-"]`
|
||||
)
|
||||
if (routerHandles.length > 0) {
|
||||
const handleId = routerHandles[0].getAttribute('data-handleid')
|
||||
if (handleId) return handleId
|
||||
}
|
||||
} else if (block.type === 'loop') {
|
||||
return 'loop-end-source'
|
||||
} else if (block.type === 'parallel') {
|
||||
@@ -1121,17 +1136,18 @@ const WorkflowContent = React.memo(() => {
|
||||
)
|
||||
|
||||
/**
|
||||
* Checks if adding a trigger block would violate constraints and shows notification if so.
|
||||
* Checks if adding a block would violate constraints (triggers or single-instance blocks)
|
||||
* and shows notification if so.
|
||||
* @returns true if validation failed (caller should return early), false if ok to proceed
|
||||
*/
|
||||
const checkTriggerConstraints = useCallback(
|
||||
(blockType: string): boolean => {
|
||||
const issue = TriggerUtils.getTriggerAdditionIssue(blocks, blockType)
|
||||
if (issue) {
|
||||
const triggerIssue = TriggerUtils.getTriggerAdditionIssue(blocks, blockType)
|
||||
if (triggerIssue) {
|
||||
const message =
|
||||
issue.issue === 'legacy'
|
||||
triggerIssue.issue === 'legacy'
|
||||
? 'Cannot add new trigger blocks when a legacy Start block exists. Available in newer workflows.'
|
||||
: `A workflow can only have one ${issue.triggerName} trigger block. Please remove the existing one before adding a new one.`
|
||||
: `A workflow can only have one ${triggerIssue.triggerName} trigger block. Please remove the existing one before adding a new one.`
|
||||
addNotification({
|
||||
level: 'error',
|
||||
message,
|
||||
@@ -1139,6 +1155,17 @@ const WorkflowContent = React.memo(() => {
|
||||
})
|
||||
return true
|
||||
}
|
||||
|
||||
const singleInstanceIssue = TriggerUtils.getSingleInstanceBlockIssue(blocks, blockType)
|
||||
if (singleInstanceIssue) {
|
||||
addNotification({
|
||||
level: 'error',
|
||||
message: `A workflow can only have one ${singleInstanceIssue.blockName} block. Please remove the existing one before adding a new one.`,
|
||||
workflowId: activeWorkflowId || undefined,
|
||||
})
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
},
|
||||
[blocks, addNotification, activeWorkflowId]
|
||||
@@ -1934,15 +1961,27 @@ const WorkflowContent = React.memo(() => {
|
||||
}, [isShiftPressed])
|
||||
|
||||
useEffect(() => {
|
||||
// Preserve selection state when syncing from derivedNodes
|
||||
// Check for pending selection (from paste/duplicate), otherwise preserve existing selection
|
||||
const pendingSelection = pendingSelectionRef.current
|
||||
pendingSelectionRef.current = null
|
||||
|
||||
setDisplayNodes((currentNodes) => {
|
||||
if (pendingSelection) {
|
||||
// Apply pending selection and resolve parent-child conflicts
|
||||
const withSelection = derivedNodes.map((node) => ({
|
||||
...node,
|
||||
selected: pendingSelection.has(node.id),
|
||||
}))
|
||||
return resolveParentChildSelectionConflicts(withSelection, blocks)
|
||||
}
|
||||
// Preserve existing selection state
|
||||
const selectedIds = new Set(currentNodes.filter((n) => n.selected).map((n) => n.id))
|
||||
return derivedNodes.map((node) => ({
|
||||
...node,
|
||||
selected: selectedIds.has(node.id),
|
||||
}))
|
||||
})
|
||||
}, [derivedNodes])
|
||||
}, [derivedNodes, blocks])
|
||||
|
||||
/** Handles ActionBar remove-from-subflow events. */
|
||||
useEffect(() => {
|
||||
@@ -2017,10 +2056,17 @@ const WorkflowContent = React.memo(() => {
|
||||
window.removeEventListener('remove-from-subflow', handleRemoveFromSubflow as EventListener)
|
||||
}, [blocks, edgesForDisplay, getNodeAbsolutePosition, collaborativeBatchUpdateParent])
|
||||
|
||||
/** Handles node position changes - updates local state for smooth drag, syncs to store only on drag end. */
|
||||
const onNodesChange = useCallback((changes: NodeChange[]) => {
|
||||
setDisplayNodes((nds) => applyNodeChanges(changes, nds))
|
||||
}, [])
|
||||
/** Handles node changes - applies changes and resolves parent-child selection conflicts. */
|
||||
const onNodesChange = useCallback(
|
||||
(changes: NodeChange[]) => {
|
||||
setDisplayNodes((nds) => {
|
||||
const updated = applyNodeChanges(changes, nds)
|
||||
const hasSelectionChange = changes.some((c) => c.type === 'select')
|
||||
return hasSelectionChange ? resolveParentChildSelectionConflicts(updated, blocks) : updated
|
||||
})
|
||||
},
|
||||
[blocks]
|
||||
)
|
||||
|
||||
/**
|
||||
* Updates container dimensions in displayNodes during drag.
|
||||
@@ -2035,28 +2081,13 @@ const WorkflowContent = React.memo(() => {
|
||||
const childNodes = currentNodes.filter((n) => n.parentId === parentId)
|
||||
if (childNodes.length === 0) return currentNodes
|
||||
|
||||
let maxRight = 0
|
||||
let maxBottom = 0
|
||||
|
||||
childNodes.forEach((node) => {
|
||||
const childPositions = childNodes.map((node) => {
|
||||
const nodePosition = node.id === draggedNodeId ? draggedNodePosition : node.position
|
||||
const { width: nodeWidth, height: nodeHeight } = getBlockDimensions(node.id)
|
||||
|
||||
maxRight = Math.max(maxRight, nodePosition.x + nodeWidth)
|
||||
maxBottom = Math.max(maxBottom, nodePosition.y + nodeHeight)
|
||||
const { width, height } = getBlockDimensions(node.id)
|
||||
return { x: nodePosition.x, y: nodePosition.y, width, height }
|
||||
})
|
||||
|
||||
const newWidth = Math.max(
|
||||
CONTAINER_DIMENSIONS.DEFAULT_WIDTH,
|
||||
CONTAINER_DIMENSIONS.LEFT_PADDING + maxRight + CONTAINER_DIMENSIONS.RIGHT_PADDING
|
||||
)
|
||||
const newHeight = Math.max(
|
||||
CONTAINER_DIMENSIONS.DEFAULT_HEIGHT,
|
||||
CONTAINER_DIMENSIONS.HEADER_HEIGHT +
|
||||
CONTAINER_DIMENSIONS.TOP_PADDING +
|
||||
maxBottom +
|
||||
CONTAINER_DIMENSIONS.BOTTOM_PADDING
|
||||
)
|
||||
const { width: newWidth, height: newHeight } = calculateContainerDimensions(childPositions)
|
||||
|
||||
return currentNodes.map((node) => {
|
||||
if (node.id === parentId) {
|
||||
@@ -2824,30 +2855,42 @@ const WorkflowContent = React.memo(() => {
|
||||
}, [isShiftPressed])
|
||||
|
||||
const onSelectionEnd = useCallback(() => {
|
||||
requestAnimationFrame(() => setIsSelectionDragActive(false))
|
||||
}, [])
|
||||
requestAnimationFrame(() => {
|
||||
setIsSelectionDragActive(false)
|
||||
setDisplayNodes((nodes) => resolveParentChildSelectionConflicts(nodes, blocks))
|
||||
})
|
||||
}, [blocks])
|
||||
|
||||
/** Captures initial positions when selection drag starts (for marquee-selected nodes). */
|
||||
const onSelectionDragStart = useCallback(
|
||||
(_event: React.MouseEvent, nodes: Node[]) => {
|
||||
// Capture the parent ID of the first node as reference (they should all be in the same context)
|
||||
if (nodes.length > 0) {
|
||||
const firstNodeParentId = blocks[nodes[0].id]?.data?.parentId || null
|
||||
setDragStartParentId(firstNodeParentId)
|
||||
}
|
||||
|
||||
// Capture all selected nodes' positions for undo/redo
|
||||
// Filter to nodes that won't be deselected (exclude children whose parent is selected)
|
||||
const nodeIds = new Set(nodes.map((n) => n.id))
|
||||
const effectiveNodes = nodes.filter((n) => {
|
||||
const parentId = blocks[n.id]?.data?.parentId
|
||||
return !parentId || !nodeIds.has(parentId)
|
||||
})
|
||||
|
||||
// Capture positions for undo/redo before applying display changes
|
||||
multiNodeDragStartRef.current.clear()
|
||||
nodes.forEach((n) => {
|
||||
const block = blocks[n.id]
|
||||
if (block) {
|
||||
effectiveNodes.forEach((n) => {
|
||||
const blk = blocks[n.id]
|
||||
if (blk) {
|
||||
multiNodeDragStartRef.current.set(n.id, {
|
||||
x: n.position.x,
|
||||
y: n.position.y,
|
||||
parentId: block.data?.parentId,
|
||||
parentId: blk.data?.parentId,
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
// Apply visual deselection of children
|
||||
setDisplayNodes((allNodes) => resolveParentChildSelectionConflicts(allNodes, blocks))
|
||||
},
|
||||
[blocks]
|
||||
)
|
||||
@@ -2883,7 +2926,6 @@ const WorkflowContent = React.memo(() => {
|
||||
|
||||
eligibleNodes.forEach((node) => {
|
||||
const absolutePos = getNodeAbsolutePosition(node.id)
|
||||
const block = blocks[node.id]
|
||||
const width = BLOCK_DIMENSIONS.FIXED_WIDTH
|
||||
const height = Math.max(
|
||||
node.height || BLOCK_DIMENSIONS.MIN_HEIGHT,
|
||||
@@ -3109,13 +3151,11 @@ const WorkflowContent = React.memo(() => {
|
||||
|
||||
/**
|
||||
* Handles node click to select the node in ReactFlow.
|
||||
* This ensures clicking anywhere on a block (not just the drag handle)
|
||||
* selects it for delete/backspace and multi-select operations.
|
||||
* Parent-child conflict resolution happens automatically in onNodesChange.
|
||||
*/
|
||||
const handleNodeClick = useCallback(
|
||||
(event: React.MouseEvent, node: Node) => {
|
||||
const isMultiSelect = event.shiftKey || event.metaKey || event.ctrlKey
|
||||
|
||||
setNodes((nodes) =>
|
||||
nodes.map((n) => ({
|
||||
...n,
|
||||
@@ -3342,8 +3382,6 @@ const WorkflowContent = React.memo(() => {
|
||||
<LazyChat />
|
||||
</Suspense>
|
||||
|
||||
<DiffControls />
|
||||
|
||||
{/* Context Menus */}
|
||||
<BlockContextMenu
|
||||
isOpen={isBlockMenuOpen}
|
||||
@@ -3399,6 +3437,8 @@ const WorkflowContent = React.memo(() => {
|
||||
<Panel />
|
||||
</div>
|
||||
|
||||
<DiffControls />
|
||||
|
||||
<Terminal />
|
||||
|
||||
{oauthModal && (
|
||||
|
||||
@@ -38,7 +38,7 @@ function WorkflowPreviewSubflowInner({ data }: NodeProps<WorkflowPreviewSubflowD
|
||||
|
||||
return (
|
||||
<div
|
||||
className='relative select-none rounded-[8px] border border-[var(--border)]'
|
||||
className='relative select-none rounded-[8px] border border-[var(--border-1)]'
|
||||
style={{
|
||||
width,
|
||||
height,
|
||||
|
||||
@@ -163,7 +163,7 @@ function AddMembersModal({
|
||||
className='flex items-center gap-[10px] rounded-[4px] px-[8px] py-[6px] hover:bg-[var(--surface-2)]'
|
||||
>
|
||||
<Checkbox checked={isSelected} />
|
||||
<Avatar size='xs'>
|
||||
<Avatar size='sm'>
|
||||
{member.user?.image && (
|
||||
<AvatarImage src={member.user.image} alt={name} />
|
||||
)}
|
||||
@@ -663,7 +663,7 @@ export function AccessControl() {
|
||||
return (
|
||||
<div key={member.id} className='flex items-center justify-between'>
|
||||
<div className='flex flex-1 items-center gap-[12px]'>
|
||||
<Avatar size='sm'>
|
||||
<Avatar size='md'>
|
||||
{member.userImage && <AvatarImage src={member.userImage} alt={name} />}
|
||||
<AvatarFallback
|
||||
style={{
|
||||
|
||||
@@ -434,12 +434,10 @@ export function CredentialSets() {
|
||||
filteredOwnedSets.length === 0 &&
|
||||
!hasNoContent
|
||||
|
||||
// Early returns AFTER all hooks
|
||||
if (membershipsLoading || invitationsLoading) {
|
||||
return <CredentialSetsSkeleton />
|
||||
}
|
||||
|
||||
// Detail view for a polling group
|
||||
if (viewingSet) {
|
||||
const activeMembers = members.filter((m) => m.status === 'active')
|
||||
const totalCount = activeMembers.length + pendingInvitations.length
|
||||
@@ -529,7 +527,7 @@ export function CredentialSets() {
|
||||
return (
|
||||
<div key={member.id} className='flex items-center justify-between'>
|
||||
<div className='flex flex-1 items-center gap-[12px]'>
|
||||
<Avatar size='sm'>
|
||||
<Avatar size='md'>
|
||||
{member.userImage && (
|
||||
<AvatarImage src={member.userImage} alt={name} />
|
||||
)}
|
||||
@@ -583,7 +581,7 @@ export function CredentialSets() {
|
||||
return (
|
||||
<div key={invitation.id} className='flex items-center justify-between'>
|
||||
<div className='flex flex-1 items-center gap-[12px]'>
|
||||
<Avatar size='sm'>
|
||||
<Avatar size='md'>
|
||||
<AvatarFallback
|
||||
style={{ background: getUserColor(email) }}
|
||||
className='border-0 text-white'
|
||||
|
||||
@@ -1,12 +1,41 @@
|
||||
'use client'
|
||||
|
||||
import { useCallback, useEffect, useMemo, useState } from 'react'
|
||||
import { Check } from 'lucide-react'
|
||||
import {
|
||||
Popover,
|
||||
PopoverAnchor,
|
||||
PopoverBackButton,
|
||||
PopoverContent,
|
||||
PopoverDivider,
|
||||
PopoverFolder,
|
||||
PopoverItem,
|
||||
} from '@/components/emcn'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { WORKFLOW_COLORS } from '@/lib/workflows/colors'
|
||||
|
||||
/**
|
||||
* Validates a hex color string.
|
||||
* Accepts 3 or 6 character hex codes with or without #.
|
||||
*/
|
||||
function isValidHex(hex: string): boolean {
|
||||
const cleaned = hex.replace('#', '')
|
||||
return /^[0-9A-Fa-f]{3}$|^[0-9A-Fa-f]{6}$/.test(cleaned)
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalizes a hex color to lowercase 6-character format with #.
|
||||
*/
|
||||
function normalizeHex(hex: string): string {
|
||||
let cleaned = hex.replace('#', '').toLowerCase()
|
||||
if (cleaned.length === 3) {
|
||||
cleaned = cleaned
|
||||
.split('')
|
||||
.map((c) => c + c)
|
||||
.join('')
|
||||
}
|
||||
return `#${cleaned}`
|
||||
}
|
||||
|
||||
interface ContextMenuProps {
|
||||
/**
|
||||
@@ -53,6 +82,14 @@ interface ContextMenuProps {
|
||||
* Callback when delete is clicked
|
||||
*/
|
||||
onDelete: () => void
|
||||
/**
|
||||
* Callback when color is changed
|
||||
*/
|
||||
onColorChange?: (color: string) => void
|
||||
/**
|
||||
* Current workflow color (for showing selected state)
|
||||
*/
|
||||
currentColor?: string
|
||||
/**
|
||||
* Whether to show the open in new tab option (default: false)
|
||||
* Set to true for items that can be opened in a new tab
|
||||
@@ -83,11 +120,21 @@ interface ContextMenuProps {
|
||||
* Set to true for items that can be exported (like workspaces)
|
||||
*/
|
||||
showExport?: boolean
|
||||
/**
|
||||
* Whether to show the change color option (default: false)
|
||||
* Set to true for workflows to allow color customization
|
||||
*/
|
||||
showColorChange?: boolean
|
||||
/**
|
||||
* Whether the export option is disabled (default: false)
|
||||
* Set to true when user lacks permissions
|
||||
*/
|
||||
disableExport?: boolean
|
||||
/**
|
||||
* Whether the change color option is disabled (default: false)
|
||||
* Set to true when user lacks permissions
|
||||
*/
|
||||
disableColorChange?: boolean
|
||||
/**
|
||||
* Whether the rename option is disabled (default: false)
|
||||
* Set to true when user lacks permissions
|
||||
@@ -134,19 +181,76 @@ export function ContextMenu({
|
||||
onDuplicate,
|
||||
onExport,
|
||||
onDelete,
|
||||
onColorChange,
|
||||
currentColor,
|
||||
showOpenInNewTab = false,
|
||||
showRename = true,
|
||||
showCreate = false,
|
||||
showCreateFolder = false,
|
||||
showDuplicate = true,
|
||||
showExport = false,
|
||||
showColorChange = false,
|
||||
disableExport = false,
|
||||
disableColorChange = false,
|
||||
disableRename = false,
|
||||
disableDuplicate = false,
|
||||
disableDelete = false,
|
||||
disableCreate = false,
|
||||
disableCreateFolder = false,
|
||||
}: ContextMenuProps) {
|
||||
const [hexInput, setHexInput] = useState(currentColor || '#ffffff')
|
||||
|
||||
// Sync hexInput when currentColor changes (e.g., opening menu on different workflow)
|
||||
useEffect(() => {
|
||||
setHexInput(currentColor || '#ffffff')
|
||||
}, [currentColor])
|
||||
|
||||
const canSubmitHex = useMemo(() => {
|
||||
if (!isValidHex(hexInput)) return false
|
||||
const normalized = normalizeHex(hexInput)
|
||||
if (currentColor && normalized.toLowerCase() === currentColor.toLowerCase()) return false
|
||||
return true
|
||||
}, [hexInput, currentColor])
|
||||
|
||||
const handleHexSubmit = useCallback(() => {
|
||||
if (!canSubmitHex || !onColorChange) return
|
||||
|
||||
const normalized = normalizeHex(hexInput)
|
||||
onColorChange(normalized)
|
||||
setHexInput(normalized)
|
||||
}, [hexInput, canSubmitHex, onColorChange])
|
||||
|
||||
const handleHexKeyDown = useCallback(
|
||||
(e: React.KeyboardEvent<HTMLInputElement>) => {
|
||||
if (e.key === 'Enter') {
|
||||
e.preventDefault()
|
||||
handleHexSubmit()
|
||||
}
|
||||
},
|
||||
[handleHexSubmit]
|
||||
)
|
||||
|
||||
const handleHexChange = useCallback((e: React.ChangeEvent<HTMLInputElement>) => {
|
||||
let value = e.target.value.trim()
|
||||
if (value && !value.startsWith('#')) {
|
||||
value = `#${value}`
|
||||
}
|
||||
value = value.slice(0, 1) + value.slice(1).replace(/[^0-9a-fA-F]/g, '')
|
||||
setHexInput(value.slice(0, 7))
|
||||
}, [])
|
||||
|
||||
const handleHexFocus = useCallback((e: React.FocusEvent<HTMLInputElement>) => {
|
||||
e.target.select()
|
||||
}, [])
|
||||
|
||||
const hasNavigationSection = showOpenInNewTab && onOpenInNewTab
|
||||
const hasEditSection =
|
||||
(showRename && onRename) ||
|
||||
(showCreate && onCreate) ||
|
||||
(showCreateFolder && onCreateFolder) ||
|
||||
(showColorChange && onColorChange)
|
||||
const hasCopySection = (showDuplicate && onDuplicate) || (showExport && onExport)
|
||||
|
||||
return (
|
||||
<Popover
|
||||
open={isOpen}
|
||||
@@ -164,10 +268,21 @@ export function ContextMenu({
|
||||
height: '1px',
|
||||
}}
|
||||
/>
|
||||
<PopoverContent ref={menuRef} align='start' side='bottom' sideOffset={4}>
|
||||
<PopoverContent
|
||||
ref={menuRef}
|
||||
align='start'
|
||||
side='bottom'
|
||||
sideOffset={4}
|
||||
onPointerDownOutside={(e) => e.preventDefault()}
|
||||
onInteractOutside={(e) => e.preventDefault()}
|
||||
>
|
||||
{/* Back button - shown only when in a folder */}
|
||||
<PopoverBackButton />
|
||||
|
||||
{/* Navigation actions */}
|
||||
{showOpenInNewTab && onOpenInNewTab && (
|
||||
<PopoverItem
|
||||
rootOnly
|
||||
onClick={() => {
|
||||
onOpenInNewTab()
|
||||
onClose()
|
||||
@@ -176,11 +291,12 @@ export function ContextMenu({
|
||||
Open in new tab
|
||||
</PopoverItem>
|
||||
)}
|
||||
{showOpenInNewTab && onOpenInNewTab && <PopoverDivider />}
|
||||
{hasNavigationSection && (hasEditSection || hasCopySection) && <PopoverDivider rootOnly />}
|
||||
|
||||
{/* Edit and create actions */}
|
||||
{showRename && onRename && (
|
||||
<PopoverItem
|
||||
rootOnly
|
||||
disabled={disableRename}
|
||||
onClick={() => {
|
||||
onRename()
|
||||
@@ -192,6 +308,7 @@ export function ContextMenu({
|
||||
)}
|
||||
{showCreate && onCreate && (
|
||||
<PopoverItem
|
||||
rootOnly
|
||||
disabled={disableCreate}
|
||||
onClick={() => {
|
||||
onCreate()
|
||||
@@ -203,6 +320,7 @@ export function ContextMenu({
|
||||
)}
|
||||
{showCreateFolder && onCreateFolder && (
|
||||
<PopoverItem
|
||||
rootOnly
|
||||
disabled={disableCreateFolder}
|
||||
onClick={() => {
|
||||
onCreateFolder()
|
||||
@@ -212,11 +330,72 @@ export function ContextMenu({
|
||||
Create folder
|
||||
</PopoverItem>
|
||||
)}
|
||||
{showColorChange && onColorChange && (
|
||||
<PopoverFolder
|
||||
id='color-picker'
|
||||
title='Change color'
|
||||
expandOnHover
|
||||
className={disableColorChange ? 'pointer-events-none opacity-50' : ''}
|
||||
>
|
||||
<div className='flex w-[140px] flex-col gap-[8px] p-[2px]'>
|
||||
{/* Preset colors */}
|
||||
<div className='grid grid-cols-6 gap-[4px]'>
|
||||
{WORKFLOW_COLORS.map(({ color, name }) => (
|
||||
<button
|
||||
key={color}
|
||||
type='button'
|
||||
title={name}
|
||||
onClick={(e) => {
|
||||
e.stopPropagation()
|
||||
setHexInput(color)
|
||||
}}
|
||||
className={cn(
|
||||
'h-[20px] w-[20px] rounded-[4px]',
|
||||
hexInput.toLowerCase() === color.toLowerCase() && 'ring-1 ring-white'
|
||||
)}
|
||||
style={{ backgroundColor: color }}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
|
||||
{/* Hex input */}
|
||||
<div className='flex items-center gap-[4px]'>
|
||||
<div
|
||||
className='h-[20px] w-[20px] flex-shrink-0 rounded-[4px]'
|
||||
style={{
|
||||
backgroundColor: isValidHex(hexInput) ? normalizeHex(hexInput) : '#ffffff',
|
||||
}}
|
||||
/>
|
||||
<input
|
||||
type='text'
|
||||
value={hexInput}
|
||||
onChange={handleHexChange}
|
||||
onKeyDown={handleHexKeyDown}
|
||||
onFocus={handleHexFocus}
|
||||
onClick={(e) => e.stopPropagation()}
|
||||
className='h-[20px] min-w-0 flex-1 rounded-[4px] bg-[#363636] px-[6px] text-[11px] text-white uppercase caret-white focus:outline-none'
|
||||
/>
|
||||
<button
|
||||
type='button'
|
||||
disabled={!canSubmitHex}
|
||||
onClick={(e) => {
|
||||
e.stopPropagation()
|
||||
handleHexSubmit()
|
||||
}}
|
||||
className='flex h-[20px] w-[20px] flex-shrink-0 items-center justify-center rounded-[4px] bg-[var(--brand-tertiary-2)] text-white disabled:opacity-40'
|
||||
>
|
||||
<Check className='h-[12px] w-[12px]' />
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</PopoverFolder>
|
||||
)}
|
||||
|
||||
{/* Copy and export actions */}
|
||||
{(showDuplicate || showExport) && <PopoverDivider />}
|
||||
{hasEditSection && hasCopySection && <PopoverDivider rootOnly />}
|
||||
{showDuplicate && onDuplicate && (
|
||||
<PopoverItem
|
||||
rootOnly
|
||||
disabled={disableDuplicate}
|
||||
onClick={() => {
|
||||
onDuplicate()
|
||||
@@ -228,6 +407,7 @@ export function ContextMenu({
|
||||
)}
|
||||
{showExport && onExport && (
|
||||
<PopoverItem
|
||||
rootOnly
|
||||
disabled={disableExport}
|
||||
onClick={() => {
|
||||
onExport()
|
||||
@@ -239,8 +419,9 @@ export function ContextMenu({
|
||||
)}
|
||||
|
||||
{/* Destructive action */}
|
||||
<PopoverDivider />
|
||||
{(hasNavigationSection || hasEditSection || hasCopySection) && <PopoverDivider rootOnly />}
|
||||
<PopoverItem
|
||||
rootOnly
|
||||
disabled={disableDelete}
|
||||
onClick={() => {
|
||||
onDelete()
|
||||
|
||||
@@ -3,8 +3,9 @@
|
||||
import { useCallback, useMemo, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import clsx from 'clsx'
|
||||
import { ChevronRight, Folder, FolderOpen } from 'lucide-react'
|
||||
import { ChevronRight, Folder, FolderOpen, MoreHorizontal } from 'lucide-react'
|
||||
import { useParams, useRouter } from 'next/navigation'
|
||||
import { getNextWorkflowColor } from '@/lib/workflows/colors'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
import { ContextMenu } from '@/app/workspace/[workspaceId]/w/components/sidebar/components/workflow-list/components/context-menu/context-menu'
|
||||
import { DeleteModal } from '@/app/workspace/[workspaceId]/w/components/sidebar/components/workflow-list/components/delete-modal/delete-modal'
|
||||
@@ -19,14 +20,12 @@ import {
|
||||
useCanDelete,
|
||||
useDeleteFolder,
|
||||
useDuplicateFolder,
|
||||
useExportFolder,
|
||||
} from '@/app/workspace/[workspaceId]/w/hooks'
|
||||
import { useCreateFolder, useUpdateFolder } from '@/hooks/queries/folders'
|
||||
import { useCreateWorkflow } from '@/hooks/queries/workflows'
|
||||
import type { FolderTreeNode } from '@/stores/folders/types'
|
||||
import {
|
||||
generateCreativeWorkflowName,
|
||||
getNextWorkflowColor,
|
||||
} from '@/stores/workflows/registry/utils'
|
||||
import { generateCreativeWorkflowName } from '@/stores/workflows/registry/utils'
|
||||
|
||||
const logger = createLogger('FolderItem')
|
||||
|
||||
@@ -59,23 +58,24 @@ export function FolderItem({ folder, level, hoverHandlers }: FolderItemProps) {
|
||||
const { canDeleteFolder } = useCanDelete({ workspaceId })
|
||||
const canDelete = useMemo(() => canDeleteFolder(folder.id), [canDeleteFolder, folder.id])
|
||||
|
||||
// Delete modal state
|
||||
const [isDeleteModalOpen, setIsDeleteModalOpen] = useState(false)
|
||||
|
||||
// Delete folder hook
|
||||
const { isDeleting, handleDeleteFolder } = useDeleteFolder({
|
||||
workspaceId,
|
||||
getFolderIds: () => folder.id,
|
||||
folderIds: folder.id,
|
||||
onSuccess: () => setIsDeleteModalOpen(false),
|
||||
})
|
||||
|
||||
// Duplicate folder hook
|
||||
const { handleDuplicateFolder } = useDuplicateFolder({
|
||||
workspaceId,
|
||||
getFolderIds: () => folder.id,
|
||||
folderIds: folder.id,
|
||||
})
|
||||
|
||||
const { isExporting, hasWorkflows, handleExportFolder } = useExportFolder({
|
||||
workspaceId,
|
||||
folderId: folder.id,
|
||||
})
|
||||
|
||||
// Folder expand hook - must be declared before callbacks that use expandFolder
|
||||
const {
|
||||
isExpanded,
|
||||
handleToggleExpanded,
|
||||
@@ -92,7 +92,6 @@ export function FolderItem({ folder, level, hoverHandlers }: FolderItemProps) {
|
||||
*/
|
||||
const handleCreateWorkflowInFolder = useCallback(async () => {
|
||||
try {
|
||||
// Generate name and color upfront for optimistic updates
|
||||
const name = generateCreativeWorkflowName()
|
||||
const color = getNextWorkflowColor()
|
||||
|
||||
@@ -105,15 +104,12 @@ export function FolderItem({ folder, level, hoverHandlers }: FolderItemProps) {
|
||||
|
||||
if (result.id) {
|
||||
router.push(`/workspace/${workspaceId}/w/${result.id}`)
|
||||
// Expand the parent folder so the new workflow is visible
|
||||
expandFolder()
|
||||
// Scroll to the newly created workflow
|
||||
window.dispatchEvent(
|
||||
new CustomEvent(SIDEBAR_SCROLL_EVENT, { detail: { itemId: result.id } })
|
||||
)
|
||||
}
|
||||
} catch (error) {
|
||||
// Error already handled by mutation's onError callback
|
||||
logger.error('Failed to create workflow in folder:', error)
|
||||
}
|
||||
}, [createWorkflowMutation, workspaceId, folder.id, router, expandFolder])
|
||||
@@ -130,9 +126,7 @@ export function FolderItem({ folder, level, hoverHandlers }: FolderItemProps) {
|
||||
parentId: folder.id,
|
||||
})
|
||||
if (result.id) {
|
||||
// Expand the parent folder so the new folder is visible
|
||||
expandFolder()
|
||||
// Scroll to the newly created folder
|
||||
window.dispatchEvent(
|
||||
new CustomEvent(SIDEBAR_SCROLL_EVENT, { detail: { itemId: result.id } })
|
||||
)
|
||||
@@ -149,7 +143,6 @@ export function FolderItem({ folder, level, hoverHandlers }: FolderItemProps) {
|
||||
*/
|
||||
const onDragStart = useCallback(
|
||||
(e: React.DragEvent) => {
|
||||
// Don't start drag if editing
|
||||
if (isEditing) {
|
||||
e.preventDefault()
|
||||
return
|
||||
@@ -161,21 +154,19 @@ export function FolderItem({ folder, level, hoverHandlers }: FolderItemProps) {
|
||||
[folder.id]
|
||||
)
|
||||
|
||||
// Item drag hook
|
||||
const { isDragging, shouldPreventClickRef, handleDragStart, handleDragEnd } = useItemDrag({
|
||||
onDragStart,
|
||||
})
|
||||
|
||||
// Context menu hook
|
||||
const {
|
||||
isOpen: isContextMenuOpen,
|
||||
position,
|
||||
menuRef,
|
||||
handleContextMenu,
|
||||
closeMenu,
|
||||
preventDismiss,
|
||||
} = useContextMenu()
|
||||
|
||||
// Rename hook
|
||||
const {
|
||||
isEditing,
|
||||
editValue,
|
||||
@@ -242,6 +233,39 @@ export function FolderItem({ folder, level, hoverHandlers }: FolderItemProps) {
|
||||
[isEditing, handleRenameKeyDown, handleExpandKeyDown]
|
||||
)
|
||||
|
||||
/**
|
||||
* Handle more button pointerdown - prevents click-outside dismissal when toggling
|
||||
*/
|
||||
const handleMorePointerDown = useCallback(() => {
|
||||
if (isContextMenuOpen) {
|
||||
preventDismiss()
|
||||
}
|
||||
}, [isContextMenuOpen, preventDismiss])
|
||||
|
||||
/**
|
||||
* Handle more button click - toggles context menu at button position
|
||||
*/
|
||||
const handleMoreClick = useCallback(
|
||||
(e: React.MouseEvent<HTMLButtonElement>) => {
|
||||
e.preventDefault()
|
||||
e.stopPropagation()
|
||||
|
||||
if (isContextMenuOpen) {
|
||||
closeMenu()
|
||||
return
|
||||
}
|
||||
|
||||
const rect = e.currentTarget.getBoundingClientRect()
|
||||
handleContextMenu({
|
||||
preventDefault: () => {},
|
||||
stopPropagation: () => {},
|
||||
clientX: rect.right,
|
||||
clientY: rect.top,
|
||||
} as React.MouseEvent)
|
||||
},
|
||||
[isContextMenuOpen, closeMenu, handleContextMenu]
|
||||
)
|
||||
|
||||
return (
|
||||
<>
|
||||
<div
|
||||
@@ -303,12 +327,22 @@ export function FolderItem({ folder, level, hoverHandlers }: FolderItemProps) {
|
||||
spellCheck='false'
|
||||
/>
|
||||
) : (
|
||||
<span
|
||||
className='truncate font-medium text-[var(--text-tertiary)] group-hover:text-[var(--text-primary)]'
|
||||
onDoubleClick={handleDoubleClick}
|
||||
>
|
||||
{folder.name}
|
||||
</span>
|
||||
<>
|
||||
<span
|
||||
className='min-w-0 flex-1 truncate font-medium text-[var(--text-tertiary)] group-hover:text-[var(--text-primary)]'
|
||||
onDoubleClick={handleDoubleClick}
|
||||
>
|
||||
{folder.name}
|
||||
</span>
|
||||
<button
|
||||
type='button'
|
||||
onPointerDown={handleMorePointerDown}
|
||||
onClick={handleMoreClick}
|
||||
className='flex h-[18px] w-[18px] flex-shrink-0 items-center justify-center rounded-[4px] opacity-0 transition-opacity hover:bg-[var(--surface-7)] group-hover:opacity-100'
|
||||
>
|
||||
<MoreHorizontal className='h-[14px] w-[14px] text-[var(--text-tertiary)]' />
|
||||
</button>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
|
||||
@@ -322,13 +356,16 @@ export function FolderItem({ folder, level, hoverHandlers }: FolderItemProps) {
|
||||
onCreate={handleCreateWorkflowInFolder}
|
||||
onCreateFolder={handleCreateFolderInFolder}
|
||||
onDuplicate={handleDuplicateFolder}
|
||||
onExport={handleExportFolder}
|
||||
onDelete={() => setIsDeleteModalOpen(true)}
|
||||
showCreate={true}
|
||||
showCreateFolder={true}
|
||||
showExport={true}
|
||||
disableRename={!userPermissions.canEdit}
|
||||
disableCreate={!userPermissions.canEdit || createWorkflowMutation.isPending}
|
||||
disableCreateFolder={!userPermissions.canEdit || createFolderMutation.isPending}
|
||||
disableDuplicate={!userPermissions.canEdit}
|
||||
disableDuplicate={!userPermissions.canEdit || !hasWorkflows}
|
||||
disableExport={!userPermissions.canEdit || isExporting || !hasWorkflows}
|
||||
disableDelete={!userPermissions.canEdit || !canDelete}
|
||||
/>
|
||||
|
||||
|
||||
@@ -1,15 +1,24 @@
|
||||
'use client'
|
||||
|
||||
import { type CSSProperties, useEffect, useMemo, useState } from 'react'
|
||||
import Image from 'next/image'
|
||||
import { Tooltip } from '@/components/emcn'
|
||||
import { type CSSProperties, useEffect, useMemo } from 'react'
|
||||
import { Avatar, AvatarFallback, AvatarImage, Tooltip } from '@/components/emcn'
|
||||
import { useSession } from '@/lib/auth/auth-client'
|
||||
import { getUserColor } from '@/lib/workspaces/colors'
|
||||
import { useSocket } from '@/app/workspace/providers/socket-provider'
|
||||
import { SIDEBAR_WIDTH } from '@/stores/constants'
|
||||
import { useSidebarStore } from '@/stores/sidebar/store'
|
||||
|
||||
/**
|
||||
* Avatar display configuration for responsive layout.
|
||||
*/
|
||||
const AVATAR_CONFIG = {
|
||||
MIN_COUNT: 3,
|
||||
MAX_COUNT: 12,
|
||||
WIDTH_PER_AVATAR: 20,
|
||||
} as const
|
||||
|
||||
interface AvatarsProps {
|
||||
workflowId: string
|
||||
maxVisible?: number
|
||||
/**
|
||||
* Callback fired when the presence visibility changes.
|
||||
* Used by parent components to adjust layout (e.g., text truncation spacing).
|
||||
@@ -30,45 +39,29 @@ interface UserAvatarProps {
|
||||
}
|
||||
|
||||
/**
|
||||
* Individual user avatar with error handling for image loading.
|
||||
* Individual user avatar using emcn Avatar component.
|
||||
* Falls back to colored circle with initials if image fails to load.
|
||||
*/
|
||||
function UserAvatar({ user, index }: UserAvatarProps) {
|
||||
const [imageError, setImageError] = useState(false)
|
||||
const color = getUserColor(user.userId)
|
||||
const initials = user.userName ? user.userName.charAt(0).toUpperCase() : '?'
|
||||
const hasAvatar = Boolean(user.avatarUrl) && !imageError
|
||||
|
||||
// Reset error state when avatar URL changes
|
||||
useEffect(() => {
|
||||
setImageError(false)
|
||||
}, [user.avatarUrl])
|
||||
|
||||
const avatarElement = (
|
||||
<div
|
||||
className='relative flex h-[14px] w-[14px] flex-shrink-0 cursor-default items-center justify-center overflow-hidden rounded-full font-semibold text-[7px] text-white'
|
||||
style={
|
||||
{
|
||||
background: hasAvatar ? undefined : color,
|
||||
zIndex: 10 - index,
|
||||
} as CSSProperties
|
||||
}
|
||||
>
|
||||
{hasAvatar && user.avatarUrl ? (
|
||||
<Image
|
||||
<Avatar size='xs' style={{ zIndex: index + 1 } as CSSProperties}>
|
||||
{user.avatarUrl && (
|
||||
<AvatarImage
|
||||
src={user.avatarUrl}
|
||||
alt={user.userName ? `${user.userName}'s avatar` : 'User avatar'}
|
||||
fill
|
||||
sizes='14px'
|
||||
className='object-cover'
|
||||
referrerPolicy='no-referrer'
|
||||
unoptimized
|
||||
onError={() => setImageError(true)}
|
||||
/>
|
||||
) : (
|
||||
initials
|
||||
)}
|
||||
</div>
|
||||
<AvatarFallback
|
||||
style={{ background: color }}
|
||||
className='border-0 font-semibold text-[7px] text-white'
|
||||
>
|
||||
{initials}
|
||||
</AvatarFallback>
|
||||
</Avatar>
|
||||
)
|
||||
|
||||
if (user.userName) {
|
||||
@@ -92,14 +85,26 @@ function UserAvatar({ user, index }: UserAvatarProps) {
|
||||
* @param props - Component props
|
||||
* @returns Avatar stack for workflow presence
|
||||
*/
|
||||
export function Avatars({ workflowId, maxVisible = 3, onPresenceChange }: AvatarsProps) {
|
||||
export function Avatars({ workflowId, onPresenceChange }: AvatarsProps) {
|
||||
const { presenceUsers, currentWorkflowId } = useSocket()
|
||||
const { data: session } = useSession()
|
||||
const currentUserId = session?.user?.id
|
||||
const sidebarWidth = useSidebarStore((state) => state.sidebarWidth)
|
||||
|
||||
/**
|
||||
* Only show presence for the currently active workflow
|
||||
* Filter out the current user from the list
|
||||
* Calculate max visible avatars based on sidebar width.
|
||||
* Scales between MIN_COUNT and MAX_COUNT as sidebar expands.
|
||||
*/
|
||||
const maxVisible = useMemo(() => {
|
||||
const widthDelta = sidebarWidth - SIDEBAR_WIDTH.MIN
|
||||
const additionalAvatars = Math.floor(widthDelta / AVATAR_CONFIG.WIDTH_PER_AVATAR)
|
||||
const calculated = AVATAR_CONFIG.MIN_COUNT + additionalAvatars
|
||||
return Math.max(AVATAR_CONFIG.MIN_COUNT, Math.min(AVATAR_CONFIG.MAX_COUNT, calculated))
|
||||
}, [sidebarWidth])
|
||||
|
||||
/**
|
||||
* Only show presence for the currently active workflow.
|
||||
* Filter out the current user from the list.
|
||||
*/
|
||||
const workflowUsers = useMemo(() => {
|
||||
if (currentWorkflowId !== workflowId) {
|
||||
@@ -122,7 +127,6 @@ export function Avatars({ workflowId, maxVisible = 3, onPresenceChange }: Avatar
|
||||
return { visibleUsers: visible, overflowCount: overflow }
|
||||
}, [workflowUsers, maxVisible])
|
||||
|
||||
// Notify parent when avatars are present or not
|
||||
useEffect(() => {
|
||||
const hasAnyAvatars = visibleUsers.length > 0
|
||||
if (typeof onPresenceChange === 'function') {
|
||||
@@ -135,26 +139,25 @@ export function Avatars({ workflowId, maxVisible = 3, onPresenceChange }: Avatar
|
||||
}
|
||||
|
||||
return (
|
||||
<div className='-space-x-1 ml-[-8px] flex items-center'>
|
||||
{visibleUsers.map((user, index) => (
|
||||
<UserAvatar key={user.socketId} user={user} index={index} />
|
||||
))}
|
||||
|
||||
<div className='-space-x-1 flex items-center'>
|
||||
{overflowCount > 0 && (
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<div
|
||||
className='relative flex h-[14px] w-[14px] flex-shrink-0 cursor-default items-center justify-center overflow-hidden rounded-full bg-[#404040] font-semibold text-[7px] text-white'
|
||||
style={{ zIndex: 10 - visibleUsers.length } as CSSProperties}
|
||||
>
|
||||
+{overflowCount}
|
||||
</div>
|
||||
<Avatar size='xs' style={{ zIndex: 0 } as CSSProperties}>
|
||||
<AvatarFallback className='border-0 bg-[#404040] font-semibold text-[7px] text-white'>
|
||||
+{overflowCount}
|
||||
</AvatarFallback>
|
||||
</Avatar>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content side='bottom'>
|
||||
{overflowCount} more user{overflowCount > 1 ? 's' : ''}
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
)}
|
||||
|
||||
{visibleUsers.map((user, index) => (
|
||||
<UserAvatar key={user.socketId} user={user} index={overflowCount > 0 ? index + 1 : index} />
|
||||
))}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
import { useCallback, useRef, useState } from 'react'
|
||||
import clsx from 'clsx'
|
||||
import { MoreHorizontal } from 'lucide-react'
|
||||
import Link from 'next/link'
|
||||
import { useParams } from 'next/navigation'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
@@ -45,19 +46,15 @@ export function WorkflowItem({ workflow, active, level, onWorkflowClick }: Workf
|
||||
const userPermissions = useUserPermissionsContext()
|
||||
const isSelected = selectedWorkflows.has(workflow.id)
|
||||
|
||||
// Can delete check hook
|
||||
const { canDeleteWorkflows } = useCanDelete({ workspaceId })
|
||||
|
||||
// Delete modal state
|
||||
const [isDeleteModalOpen, setIsDeleteModalOpen] = useState(false)
|
||||
const [workflowIdsToDelete, setWorkflowIdsToDelete] = useState<string[]>([])
|
||||
const [deleteModalNames, setDeleteModalNames] = useState<string | string[]>('')
|
||||
const [canDeleteCaptured, setCanDeleteCaptured] = useState(true)
|
||||
|
||||
// Presence avatars state
|
||||
const [hasAvatars, setHasAvatars] = useState(false)
|
||||
|
||||
// Capture selection at right-click time (using ref to persist across renders)
|
||||
const capturedSelectionRef = useRef<{
|
||||
workflowIds: string[]
|
||||
workflowNames: string | string[]
|
||||
@@ -67,7 +64,6 @@ export function WorkflowItem({ workflow, active, level, onWorkflowClick }: Workf
|
||||
* Handle opening the delete modal - uses pre-captured selection state
|
||||
*/
|
||||
const handleOpenDeleteModal = useCallback(() => {
|
||||
// Use the selection captured at right-click time
|
||||
if (capturedSelectionRef.current) {
|
||||
setWorkflowIdsToDelete(capturedSelectionRef.current.workflowIds)
|
||||
setDeleteModalNames(capturedSelectionRef.current.workflowNames)
|
||||
@@ -75,39 +71,39 @@ export function WorkflowItem({ workflow, active, level, onWorkflowClick }: Workf
|
||||
}
|
||||
}, [])
|
||||
|
||||
// Delete workflow hook
|
||||
const { isDeleting, handleDeleteWorkflow } = useDeleteWorkflow({
|
||||
workspaceId,
|
||||
getWorkflowIds: () => workflowIdsToDelete,
|
||||
workflowIds: workflowIdsToDelete,
|
||||
isActive: (workflowIds) => workflowIds.includes(params.workflowId as string),
|
||||
onSuccess: () => setIsDeleteModalOpen(false),
|
||||
})
|
||||
|
||||
// Duplicate workflow hook
|
||||
const { handleDuplicateWorkflow } = useDuplicateWorkflow({
|
||||
workspaceId,
|
||||
getWorkflowIds: () => {
|
||||
// Use the selection captured at right-click time
|
||||
return capturedSelectionRef.current?.workflowIds || []
|
||||
},
|
||||
})
|
||||
const { handleDuplicateWorkflow: duplicateWorkflow } = useDuplicateWorkflow({ workspaceId })
|
||||
|
||||
// Export workflow hook
|
||||
const { handleExportWorkflow } = useExportWorkflow({
|
||||
workspaceId,
|
||||
getWorkflowIds: () => {
|
||||
// Use the selection captured at right-click time
|
||||
return capturedSelectionRef.current?.workflowIds || []
|
||||
},
|
||||
})
|
||||
const { handleExportWorkflow: exportWorkflow } = useExportWorkflow({ workspaceId })
|
||||
const handleDuplicateWorkflow = useCallback(() => {
|
||||
const workflowIds = capturedSelectionRef.current?.workflowIds || []
|
||||
if (workflowIds.length === 0) return
|
||||
duplicateWorkflow(workflowIds)
|
||||
}, [duplicateWorkflow])
|
||||
|
||||
const handleExportWorkflow = useCallback(() => {
|
||||
const workflowIds = capturedSelectionRef.current?.workflowIds || []
|
||||
if (workflowIds.length === 0) return
|
||||
exportWorkflow(workflowIds)
|
||||
}, [exportWorkflow])
|
||||
|
||||
/**
|
||||
* Opens the workflow in a new browser tab
|
||||
*/
|
||||
const handleOpenInNewTab = useCallback(() => {
|
||||
window.open(`/workspace/${workspaceId}/w/${workflow.id}`, '_blank')
|
||||
}, [workspaceId, workflow.id])
|
||||
|
||||
const handleColorChange = useCallback(
|
||||
(color: string) => {
|
||||
updateWorkflow(workflow.id, { color })
|
||||
},
|
||||
[workflow.id, updateWorkflow]
|
||||
)
|
||||
|
||||
/**
|
||||
* Drag start handler - handles workflow dragging with multi-selection support
|
||||
*
|
||||
@@ -115,7 +111,6 @@ export function WorkflowItem({ workflow, active, level, onWorkflowClick }: Workf
|
||||
*/
|
||||
const onDragStart = useCallback(
|
||||
(e: React.DragEvent) => {
|
||||
// Don't start drag if editing
|
||||
if (isEditing) {
|
||||
e.preventDefault()
|
||||
return
|
||||
@@ -130,20 +125,48 @@ export function WorkflowItem({ workflow, active, level, onWorkflowClick }: Workf
|
||||
[isSelected, selectedWorkflows, workflow.id]
|
||||
)
|
||||
|
||||
// Item drag hook
|
||||
const { isDragging, shouldPreventClickRef, handleDragStart, handleDragEnd } = useItemDrag({
|
||||
onDragStart,
|
||||
})
|
||||
|
||||
// Context menu hook
|
||||
const {
|
||||
isOpen: isContextMenuOpen,
|
||||
position,
|
||||
menuRef,
|
||||
handleContextMenu: handleContextMenuBase,
|
||||
closeMenu,
|
||||
preventDismiss,
|
||||
} = useContextMenu()
|
||||
|
||||
/**
|
||||
* Captures selection state for context menu operations
|
||||
*/
|
||||
const captureSelectionState = useCallback(() => {
|
||||
const { selectedWorkflows: currentSelection, selectOnly } = useFolderStore.getState()
|
||||
const isCurrentlySelected = currentSelection.has(workflow.id)
|
||||
|
||||
if (!isCurrentlySelected) {
|
||||
selectOnly(workflow.id)
|
||||
}
|
||||
|
||||
const finalSelection = useFolderStore.getState().selectedWorkflows
|
||||
const finalIsSelected = finalSelection.has(workflow.id)
|
||||
|
||||
const workflowIds =
|
||||
finalIsSelected && finalSelection.size > 1 ? Array.from(finalSelection) : [workflow.id]
|
||||
|
||||
const workflowNames = workflowIds
|
||||
.map((id) => workflows[id]?.name)
|
||||
.filter((name): name is string => !!name)
|
||||
|
||||
capturedSelectionRef.current = {
|
||||
workflowIds,
|
||||
workflowNames: workflowNames.length > 1 ? workflowNames : workflowNames[0],
|
||||
}
|
||||
|
||||
setCanDeleteCaptured(canDeleteWorkflows(workflowIds))
|
||||
}, [workflow.id, workflows, canDeleteWorkflows])
|
||||
|
||||
/**
|
||||
* Handle right-click - ensure proper selection behavior and capture selection state
|
||||
* If right-clicking on an unselected workflow, select only that workflow
|
||||
@@ -151,42 +174,46 @@ export function WorkflowItem({ workflow, active, level, onWorkflowClick }: Workf
|
||||
*/
|
||||
const handleContextMenu = useCallback(
|
||||
(e: React.MouseEvent) => {
|
||||
// Check current selection state at time of right-click
|
||||
const { selectedWorkflows: currentSelection, selectOnly } = useFolderStore.getState()
|
||||
const isCurrentlySelected = currentSelection.has(workflow.id)
|
||||
|
||||
// If this workflow is not in the current selection, select only this workflow
|
||||
if (!isCurrentlySelected) {
|
||||
selectOnly(workflow.id)
|
||||
}
|
||||
|
||||
// Capture the selection state at right-click time
|
||||
const finalSelection = useFolderStore.getState().selectedWorkflows
|
||||
const finalIsSelected = finalSelection.has(workflow.id)
|
||||
|
||||
const workflowIds =
|
||||
finalIsSelected && finalSelection.size > 1 ? Array.from(finalSelection) : [workflow.id]
|
||||
|
||||
const workflowNames = workflowIds
|
||||
.map((id) => workflows[id]?.name)
|
||||
.filter((name): name is string => !!name)
|
||||
|
||||
// Store in ref so it persists even if selection changes
|
||||
capturedSelectionRef.current = {
|
||||
workflowIds,
|
||||
workflowNames: workflowNames.length > 1 ? workflowNames : workflowNames[0],
|
||||
}
|
||||
|
||||
// Check if the captured selection can be deleted
|
||||
setCanDeleteCaptured(canDeleteWorkflows(workflowIds))
|
||||
|
||||
// If already selected with multiple selections, keep all selections
|
||||
captureSelectionState()
|
||||
handleContextMenuBase(e)
|
||||
},
|
||||
[workflow.id, workflows, handleContextMenuBase, canDeleteWorkflows]
|
||||
[captureSelectionState, handleContextMenuBase]
|
||||
)
|
||||
|
||||
/**
|
||||
* Handle more button pointerdown - prevents click-outside dismissal when toggling
|
||||
*/
|
||||
const handleMorePointerDown = useCallback(() => {
|
||||
if (isContextMenuOpen) {
|
||||
preventDismiss()
|
||||
}
|
||||
}, [isContextMenuOpen, preventDismiss])
|
||||
|
||||
/**
|
||||
* Handle more button click - toggles context menu at button position
|
||||
*/
|
||||
const handleMoreClick = useCallback(
|
||||
(e: React.MouseEvent<HTMLButtonElement>) => {
|
||||
e.preventDefault()
|
||||
e.stopPropagation()
|
||||
|
||||
if (isContextMenuOpen) {
|
||||
closeMenu()
|
||||
return
|
||||
}
|
||||
|
||||
captureSelectionState()
|
||||
const rect = e.currentTarget.getBoundingClientRect()
|
||||
handleContextMenuBase({
|
||||
preventDefault: () => {},
|
||||
stopPropagation: () => {},
|
||||
clientX: rect.right,
|
||||
clientY: rect.top,
|
||||
} as React.MouseEvent)
|
||||
},
|
||||
[isContextMenuOpen, closeMenu, captureSelectionState, handleContextMenuBase]
|
||||
)
|
||||
|
||||
// Rename hook
|
||||
const {
|
||||
isEditing,
|
||||
editValue,
|
||||
@@ -233,12 +260,10 @@ export function WorkflowItem({ workflow, active, level, onWorkflowClick }: Workf
|
||||
|
||||
const isModifierClick = e.shiftKey || e.metaKey || e.ctrlKey
|
||||
|
||||
// Prevent default link behavior when using modifier keys
|
||||
if (isModifierClick) {
|
||||
e.preventDefault()
|
||||
}
|
||||
|
||||
// Use metaKey (Cmd on Mac) or ctrlKey (Ctrl on Windows/Linux)
|
||||
onWorkflowClick(workflow.id, e.shiftKey, e.metaKey || e.ctrlKey)
|
||||
},
|
||||
[shouldPreventClickRef, workflow.id, onWorkflowClick, isEditing]
|
||||
@@ -309,7 +334,17 @@ export function WorkflowItem({ workflow, active, level, onWorkflowClick }: Workf
|
||||
)}
|
||||
</div>
|
||||
{!isEditing && (
|
||||
<Avatars workflowId={workflow.id} maxVisible={3} onPresenceChange={setHasAvatars} />
|
||||
<>
|
||||
<Avatars workflowId={workflow.id} onPresenceChange={setHasAvatars} />
|
||||
<button
|
||||
type='button'
|
||||
onPointerDown={handleMorePointerDown}
|
||||
onClick={handleMoreClick}
|
||||
className='flex h-[18px] w-[18px] flex-shrink-0 items-center justify-center rounded-[4px] opacity-0 transition-opacity hover:bg-[var(--surface-7)] group-hover:opacity-100'
|
||||
>
|
||||
<MoreHorizontal className='h-[14px] w-[14px] text-[var(--text-tertiary)]' />
|
||||
</button>
|
||||
</>
|
||||
)}
|
||||
</Link>
|
||||
|
||||
@@ -324,13 +359,17 @@ export function WorkflowItem({ workflow, active, level, onWorkflowClick }: Workf
|
||||
onDuplicate={handleDuplicateWorkflow}
|
||||
onExport={handleExportWorkflow}
|
||||
onDelete={handleOpenDeleteModal}
|
||||
onColorChange={handleColorChange}
|
||||
currentColor={workflow.color}
|
||||
showOpenInNewTab={selectedWorkflows.size <= 1}
|
||||
showRename={selectedWorkflows.size <= 1}
|
||||
showDuplicate={true}
|
||||
showExport={true}
|
||||
showColorChange={selectedWorkflows.size <= 1}
|
||||
disableRename={!userPermissions.canEdit}
|
||||
disableDuplicate={!userPermissions.canEdit}
|
||||
disableExport={!userPermissions.canEdit}
|
||||
disableColorChange={!userPermissions.canEdit}
|
||||
disableDelete={!userPermissions.canEdit || !canDeleteCaptured}
|
||||
/>
|
||||
|
||||
|
||||
@@ -9,7 +9,6 @@ import {
|
||||
useDragDrop,
|
||||
useWorkflowSelection,
|
||||
} from '@/app/workspace/[workspaceId]/w/components/sidebar/hooks'
|
||||
import { useImportWorkflow } from '@/app/workspace/[workspaceId]/w/hooks/use-import-workflow'
|
||||
import { useFolders } from '@/hooks/queries/folders'
|
||||
import { useFolderStore } from '@/stores/folders/store'
|
||||
import type { FolderTreeNode } from '@/stores/folders/types'
|
||||
@@ -25,15 +24,13 @@ const TREE_SPACING = {
|
||||
interface WorkflowListProps {
|
||||
regularWorkflows: WorkflowMetadata[]
|
||||
isLoading?: boolean
|
||||
isImporting: boolean
|
||||
setIsImporting: (value: boolean) => void
|
||||
handleFileChange: (event: React.ChangeEvent<HTMLInputElement>) => void
|
||||
fileInputRef: React.RefObject<HTMLInputElement | null>
|
||||
scrollContainerRef: React.RefObject<HTMLDivElement | null>
|
||||
}
|
||||
|
||||
/**
|
||||
* WorkflowList component displays workflows organized by folders with drag-and-drop support.
|
||||
* Uses the workflow import hook for handling JSON imports.
|
||||
*
|
||||
* @param props - Component props
|
||||
* @returns Workflow list with folders and drag-drop support
|
||||
@@ -41,8 +38,7 @@ interface WorkflowListProps {
|
||||
export function WorkflowList({
|
||||
regularWorkflows,
|
||||
isLoading = false,
|
||||
isImporting,
|
||||
setIsImporting,
|
||||
handleFileChange,
|
||||
fileInputRef,
|
||||
scrollContainerRef,
|
||||
}: WorkflowListProps) {
|
||||
@@ -65,9 +61,6 @@ export function WorkflowList({
|
||||
createFolderHeaderHoverHandlers,
|
||||
} = useDragDrop()
|
||||
|
||||
// Workflow import hook
|
||||
const { handleFileChange } = useImportWorkflow({ workspaceId })
|
||||
|
||||
// Set scroll container when ref changes
|
||||
useEffect(() => {
|
||||
if (scrollContainerRef.current) {
|
||||
|
||||
@@ -657,6 +657,7 @@ export function InviteModal({ open, onOpenChange, workspaceName }: InviteModalPr
|
||||
items={emailItems}
|
||||
onAdd={(value) => addEmail(value)}
|
||||
onRemove={removeEmailItem}
|
||||
onInputChange={() => setErrorMessage(null)}
|
||||
placeholder={
|
||||
!userPerms.canAdmin
|
||||
? 'Only administrators can invite new members'
|
||||
|
||||
@@ -27,6 +27,8 @@ export function useContextMenu({ onContextMenu }: UseContextMenuProps = {}) {
|
||||
const [isOpen, setIsOpen] = useState(false)
|
||||
const [position, setPosition] = useState<ContextMenuPosition>({ x: 0, y: 0 })
|
||||
const menuRef = useRef<HTMLDivElement>(null)
|
||||
// Used to prevent click-outside dismissal when trigger is clicked
|
||||
const dismissPreventedRef = useRef(false)
|
||||
|
||||
/**
|
||||
* Handle right-click event
|
||||
@@ -55,6 +57,14 @@ export function useContextMenu({ onContextMenu }: UseContextMenuProps = {}) {
|
||||
setIsOpen(false)
|
||||
}, [])
|
||||
|
||||
/**
|
||||
* Prevent the next click-outside from dismissing the menu.
|
||||
* Call this on pointerdown of a toggle trigger to allow proper toggle behavior.
|
||||
*/
|
||||
const preventDismiss = useCallback(() => {
|
||||
dismissPreventedRef.current = true
|
||||
}, [])
|
||||
|
||||
/**
|
||||
* Handle clicks outside the menu to close it
|
||||
*/
|
||||
@@ -62,6 +72,11 @@ export function useContextMenu({ onContextMenu }: UseContextMenuProps = {}) {
|
||||
if (!isOpen) return
|
||||
|
||||
const handleClickOutside = (e: MouseEvent) => {
|
||||
// Check if dismissal was prevented (e.g., by toggle trigger's pointerdown)
|
||||
if (dismissPreventedRef.current) {
|
||||
dismissPreventedRef.current = false
|
||||
return
|
||||
}
|
||||
if (menuRef.current && !menuRef.current.contains(e.target as Node)) {
|
||||
closeMenu()
|
||||
}
|
||||
@@ -84,5 +99,6 @@ export function useContextMenu({ onContextMenu }: UseContextMenuProps = {}) {
|
||||
menuRef,
|
||||
handleContextMenu,
|
||||
closeMenu,
|
||||
preventDismiss,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { useCallback, useEffect, useState } from 'react'
|
||||
import { useCallback, useEffect } from 'react'
|
||||
import { SIDEBAR_WIDTH } from '@/stores/constants'
|
||||
import { useSidebarStore } from '@/stores/sidebar/store'
|
||||
|
||||
@@ -10,8 +10,7 @@ import { useSidebarStore } from '@/stores/sidebar/store'
|
||||
* @returns Resize state and handlers
|
||||
*/
|
||||
export function useSidebarResize() {
|
||||
const { setSidebarWidth } = useSidebarStore()
|
||||
const [isResizing, setIsResizing] = useState(false)
|
||||
const { setSidebarWidth, isResizing, setIsResizing } = useSidebarStore()
|
||||
|
||||
/**
|
||||
* Handles mouse down on resize handle
|
||||
|
||||
@@ -1,13 +1,11 @@
|
||||
import { useCallback } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { useRouter } from 'next/navigation'
|
||||
import { getNextWorkflowColor } from '@/lib/workflows/colors'
|
||||
import { useCreateWorkflow, useWorkflows } from '@/hooks/queries/workflows'
|
||||
import { useWorkflowDiffStore } from '@/stores/workflow-diff/store'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import {
|
||||
generateCreativeWorkflowName,
|
||||
getNextWorkflowColor,
|
||||
} from '@/stores/workflows/registry/utils'
|
||||
import { generateCreativeWorkflowName } from '@/stores/workflows/registry/utils'
|
||||
|
||||
const logger = createLogger('useWorkflowOperations')
|
||||
|
||||
|
||||
@@ -2,10 +2,10 @@
|
||||
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { ArrowDown, Database, HelpCircle, Layout, Plus, Search, Settings } from 'lucide-react'
|
||||
import { Database, HelpCircle, Layout, Plus, Search, Settings } from 'lucide-react'
|
||||
import Link from 'next/link'
|
||||
import { useParams, usePathname, useRouter } from 'next/navigation'
|
||||
import { Button, FolderPlus, Library, Tooltip } from '@/components/emcn'
|
||||
import { Button, Download, FolderPlus, Library, Loader, Tooltip } from '@/components/emcn'
|
||||
import { useSession } from '@/lib/auth/auth-client'
|
||||
import { getEnv, isTruthy } from '@/lib/core/config/env'
|
||||
import { useRegisterGlobalCommands } from '@/app/workspace/[workspaceId]/providers/global-commands-provider'
|
||||
@@ -30,6 +30,7 @@ import {
|
||||
import {
|
||||
useDuplicateWorkspace,
|
||||
useExportWorkspace,
|
||||
useImportWorkflow,
|
||||
useImportWorkspace,
|
||||
} from '@/app/workspace/[workspaceId]/w/hooks'
|
||||
import { usePermissionConfig } from '@/hooks/use-permission-config'
|
||||
@@ -85,9 +86,11 @@ export function Sidebar() {
|
||||
const isCollapsed = hasHydrated ? isCollapsedStore : false
|
||||
const isOnWorkflowPage = !!workflowId
|
||||
|
||||
const [isImporting, setIsImporting] = useState(false)
|
||||
const workspaceFileInputRef = useRef<HTMLInputElement>(null)
|
||||
|
||||
const { isImporting, handleFileChange: handleImportFileChange } = useImportWorkflow({
|
||||
workspaceId,
|
||||
})
|
||||
const { isImporting: isImportingWorkspace, handleImportWorkspace: importWorkspace } =
|
||||
useImportWorkspace()
|
||||
const { handleExportWorkspace: exportWorkspace } = useExportWorkspace()
|
||||
@@ -213,7 +216,7 @@ export function Sidebar() {
|
||||
}, [activeNavItemHref])
|
||||
|
||||
const { handleDuplicateWorkspace: duplicateWorkspace } = useDuplicateWorkspace({
|
||||
getWorkspaceId: () => workspaceId,
|
||||
workspaceId,
|
||||
})
|
||||
|
||||
const searchModalWorkflows = useMemo(
|
||||
@@ -565,21 +568,31 @@ export function Sidebar() {
|
||||
Workflows
|
||||
</div>
|
||||
<div className='flex items-center justify-center gap-[10px]'>
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
variant='ghost'
|
||||
className='translate-y-[-0.25px] p-[1px]'
|
||||
onClick={handleImportWorkflow}
|
||||
disabled={isImporting || !canEdit}
|
||||
>
|
||||
<ArrowDown className='h-[14px] w-[14px]' />
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content>
|
||||
<p>{isImporting ? 'Importing workflow...' : 'Import workflow'}</p>
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
{isImporting ? (
|
||||
<Button
|
||||
variant='ghost'
|
||||
className='translate-y-[-0.25px] p-[1px]'
|
||||
disabled={!canEdit || isImporting}
|
||||
>
|
||||
<Loader className='h-[14px] w-[14px]' animate />
|
||||
</Button>
|
||||
) : (
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
variant='ghost'
|
||||
className='translate-y-[-0.25px] p-[1px]'
|
||||
onClick={handleImportWorkflow}
|
||||
disabled={!canEdit}
|
||||
>
|
||||
<Download className='h-[14px] w-[14px]' />
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content>
|
||||
<p>Import workflows</p>
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
)}
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
@@ -622,8 +635,7 @@ export function Sidebar() {
|
||||
<WorkflowList
|
||||
regularWorkflows={regularWorkflows}
|
||||
isLoading={isLoading}
|
||||
isImporting={isImporting}
|
||||
setIsImporting={setIsImporting}
|
||||
handleFileChange={handleImportFileChange}
|
||||
fileInputRef={fileInputRef}
|
||||
scrollContainerRef={scrollContainerRef}
|
||||
/>
|
||||
|
||||
@@ -4,6 +4,7 @@ export { useDeleteWorkflow } from './use-delete-workflow'
|
||||
export { useDuplicateFolder } from './use-duplicate-folder'
|
||||
export { useDuplicateWorkflow } from './use-duplicate-workflow'
|
||||
export { useDuplicateWorkspace } from './use-duplicate-workspace'
|
||||
export { useExportFolder } from './use-export-folder'
|
||||
export { useExportWorkflow } from './use-export-workflow'
|
||||
export { useExportWorkspace } from './use-export-workspace'
|
||||
export { useImportWorkflow } from './use-import-workflow'
|
||||
|
||||
@@ -11,10 +11,9 @@ interface UseDeleteFolderProps {
|
||||
*/
|
||||
workspaceId: string
|
||||
/**
|
||||
* Function that returns the folder ID(s) to delete
|
||||
* This function is called when deletion occurs to get fresh selection state
|
||||
* The folder ID(s) to delete
|
||||
*/
|
||||
getFolderIds: () => string | string[]
|
||||
folderIds: string | string[]
|
||||
/**
|
||||
* Optional callback after successful deletion
|
||||
*/
|
||||
@@ -24,17 +23,10 @@ interface UseDeleteFolderProps {
|
||||
/**
|
||||
* Hook for managing folder deletion.
|
||||
*
|
||||
* Handles:
|
||||
* - Single or bulk folder deletion
|
||||
* - Calling delete API for each folder
|
||||
* - Loading state management
|
||||
* - Error handling and logging
|
||||
* - Clearing selection after deletion
|
||||
*
|
||||
* @param props - Hook configuration
|
||||
* @returns Delete folder handlers and state
|
||||
*/
|
||||
export function useDeleteFolder({ workspaceId, getFolderIds, onSuccess }: UseDeleteFolderProps) {
|
||||
export function useDeleteFolder({ workspaceId, folderIds, onSuccess }: UseDeleteFolderProps) {
|
||||
const deleteFolderMutation = useDeleteFolderMutation()
|
||||
const [isDeleting, setIsDeleting] = useState(false)
|
||||
|
||||
@@ -46,23 +38,18 @@ export function useDeleteFolder({ workspaceId, getFolderIds, onSuccess }: UseDel
|
||||
return
|
||||
}
|
||||
|
||||
if (!folderIds) {
|
||||
return
|
||||
}
|
||||
|
||||
setIsDeleting(true)
|
||||
try {
|
||||
// Get fresh folder IDs at deletion time
|
||||
const folderIdsOrId = getFolderIds()
|
||||
if (!folderIdsOrId) {
|
||||
return
|
||||
}
|
||||
const folderIdsToDelete = Array.isArray(folderIds) ? folderIds : [folderIds]
|
||||
|
||||
// Normalize to array for consistent handling
|
||||
const folderIdsToDelete = Array.isArray(folderIdsOrId) ? folderIdsOrId : [folderIdsOrId]
|
||||
|
||||
// Delete each folder sequentially
|
||||
for (const folderId of folderIdsToDelete) {
|
||||
await deleteFolderMutation.mutateAsync({ id: folderId, workspaceId })
|
||||
}
|
||||
|
||||
// Clear selection after successful deletion
|
||||
const { clearSelection } = useFolderStore.getState()
|
||||
clearSelection()
|
||||
|
||||
@@ -74,7 +61,7 @@ export function useDeleteFolder({ workspaceId, getFolderIds, onSuccess }: UseDel
|
||||
} finally {
|
||||
setIsDeleting(false)
|
||||
}
|
||||
}, [getFolderIds, isDeleting, deleteFolderMutation, workspaceId, onSuccess])
|
||||
}, [folderIds, isDeleting, deleteFolderMutation, workspaceId, onSuccess])
|
||||
|
||||
return {
|
||||
isDeleting,
|
||||
|
||||
@@ -12,10 +12,9 @@ interface UseDeleteWorkflowProps {
|
||||
*/
|
||||
workspaceId: string
|
||||
/**
|
||||
* Function that returns the workflow ID(s) to delete
|
||||
* This function is called when deletion occurs to get fresh selection state
|
||||
* Workflow ID(s) to delete
|
||||
*/
|
||||
getWorkflowIds: () => string | string[]
|
||||
workflowIds: string | string[]
|
||||
/**
|
||||
* Whether the active workflow is being deleted
|
||||
* Can be a boolean or a function that receives the workflow IDs
|
||||
@@ -30,20 +29,12 @@ interface UseDeleteWorkflowProps {
|
||||
/**
|
||||
* Hook for managing workflow deletion with navigation logic.
|
||||
*
|
||||
* Handles:
|
||||
* - Single or bulk workflow deletion
|
||||
* - Finding next workflow to navigate to
|
||||
* - Navigating before deletion (if active workflow)
|
||||
* - Removing workflow(s) from registry
|
||||
* - Loading state management
|
||||
* - Error handling and logging
|
||||
*
|
||||
* @param props - Hook configuration
|
||||
* @returns Delete workflow handlers and state
|
||||
*/
|
||||
export function useDeleteWorkflow({
|
||||
workspaceId,
|
||||
getWorkflowIds,
|
||||
workflowIds,
|
||||
isActive = false,
|
||||
onSuccess,
|
||||
}: UseDeleteWorkflowProps) {
|
||||
@@ -59,30 +50,21 @@ export function useDeleteWorkflow({
|
||||
return
|
||||
}
|
||||
|
||||
if (!workflowIds) {
|
||||
return
|
||||
}
|
||||
|
||||
setIsDeleting(true)
|
||||
try {
|
||||
// Get fresh workflow IDs at deletion time
|
||||
const workflowIdsOrId = getWorkflowIds()
|
||||
if (!workflowIdsOrId) {
|
||||
return
|
||||
}
|
||||
const workflowIdsToDelete = Array.isArray(workflowIds) ? workflowIds : [workflowIds]
|
||||
|
||||
// Normalize to array for consistent handling
|
||||
const workflowIdsToDelete = Array.isArray(workflowIdsOrId)
|
||||
? workflowIdsOrId
|
||||
: [workflowIdsOrId]
|
||||
|
||||
// Determine if active workflow is being deleted
|
||||
const isActiveWorkflowBeingDeleted =
|
||||
typeof isActive === 'function' ? isActive(workflowIdsToDelete) : isActive
|
||||
|
||||
// Find next workflow to navigate to (if active workflow is being deleted)
|
||||
const sidebarWorkflows = Object.values(workflows).filter((w) => w.workspaceId === workspaceId)
|
||||
|
||||
// Find which specific workflow is the active one (if any in the deletion list)
|
||||
let activeWorkflowId: string | null = null
|
||||
if (isActiveWorkflowBeingDeleted && typeof isActive === 'function') {
|
||||
// Check each workflow being deleted to find which one is active
|
||||
activeWorkflowId =
|
||||
workflowIdsToDelete.find((id) => isActive([id])) || workflowIdsToDelete[0]
|
||||
} else {
|
||||
@@ -93,13 +75,11 @@ export function useDeleteWorkflow({
|
||||
|
||||
let nextWorkflowId: string | null = null
|
||||
if (isActiveWorkflowBeingDeleted && sidebarWorkflows.length > workflowIdsToDelete.length) {
|
||||
// Find the first workflow that's not being deleted
|
||||
const remainingWorkflows = sidebarWorkflows.filter(
|
||||
(w) => !workflowIdsToDelete.includes(w.id)
|
||||
)
|
||||
|
||||
if (remainingWorkflows.length > 0) {
|
||||
// Try to find the next workflow after the current one
|
||||
const workflowsAfterCurrent = remainingWorkflows.filter((w) => {
|
||||
const idx = sidebarWorkflows.findIndex((sw) => sw.id === w.id)
|
||||
return idx > currentIndex
|
||||
@@ -108,13 +88,11 @@ export function useDeleteWorkflow({
|
||||
if (workflowsAfterCurrent.length > 0) {
|
||||
nextWorkflowId = workflowsAfterCurrent[0].id
|
||||
} else {
|
||||
// Otherwise, use the first remaining workflow
|
||||
nextWorkflowId = remainingWorkflows[0].id
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Navigate first if this is the active workflow
|
||||
if (isActiveWorkflowBeingDeleted) {
|
||||
if (nextWorkflowId) {
|
||||
router.push(`/workspace/${workspaceId}/w/${nextWorkflowId}`)
|
||||
@@ -123,10 +101,8 @@ export function useDeleteWorkflow({
|
||||
}
|
||||
}
|
||||
|
||||
// Delete all workflows
|
||||
await Promise.all(workflowIdsToDelete.map((id) => removeWorkflow(id)))
|
||||
|
||||
// Clear selection after successful deletion
|
||||
const { clearSelection } = useFolderStore.getState()
|
||||
clearSelection()
|
||||
|
||||
@@ -138,16 +114,7 @@ export function useDeleteWorkflow({
|
||||
} finally {
|
||||
setIsDeleting(false)
|
||||
}
|
||||
}, [
|
||||
getWorkflowIds,
|
||||
isDeleting,
|
||||
workflows,
|
||||
workspaceId,
|
||||
isActive,
|
||||
router,
|
||||
removeWorkflow,
|
||||
onSuccess,
|
||||
])
|
||||
}, [workflowIds, isDeleting, workflows, workspaceId, isActive, router, removeWorkflow, onSuccess])
|
||||
|
||||
return {
|
||||
isDeleting,
|
||||
|
||||
@@ -7,7 +7,10 @@ const logger = createLogger('useDuplicateFolder')
|
||||
|
||||
interface UseDuplicateFolderProps {
|
||||
workspaceId: string
|
||||
getFolderIds: () => string | string[]
|
||||
/**
|
||||
* The folder ID(s) to duplicate
|
||||
*/
|
||||
folderIds: string | string[]
|
||||
onSuccess?: () => void
|
||||
}
|
||||
|
||||
@@ -17,11 +20,7 @@ interface UseDuplicateFolderProps {
|
||||
* @param props - Hook configuration
|
||||
* @returns Duplicate folder handlers and state
|
||||
*/
|
||||
export function useDuplicateFolder({
|
||||
workspaceId,
|
||||
getFolderIds,
|
||||
onSuccess,
|
||||
}: UseDuplicateFolderProps) {
|
||||
export function useDuplicateFolder({ workspaceId, folderIds, onSuccess }: UseDuplicateFolderProps) {
|
||||
const duplicateFolderMutation = useDuplicateFolderMutation()
|
||||
const [isDuplicating, setIsDuplicating] = useState(false)
|
||||
|
||||
@@ -46,21 +45,17 @@ export function useDuplicateFolder({
|
||||
return
|
||||
}
|
||||
|
||||
if (!folderIds) {
|
||||
return
|
||||
}
|
||||
|
||||
setIsDuplicating(true)
|
||||
try {
|
||||
// Get fresh folder IDs at duplication time
|
||||
const folderIdsOrId = getFolderIds()
|
||||
if (!folderIdsOrId) {
|
||||
return
|
||||
}
|
||||
|
||||
// Normalize to array for consistent handling
|
||||
const folderIdsToDuplicate = Array.isArray(folderIdsOrId) ? folderIdsOrId : [folderIdsOrId]
|
||||
const folderIdsToDuplicate = Array.isArray(folderIds) ? folderIds : [folderIds]
|
||||
|
||||
const duplicatedIds: string[] = []
|
||||
const folderStore = useFolderStore.getState()
|
||||
|
||||
// Duplicate each folder sequentially
|
||||
for (const folderId of folderIdsToDuplicate) {
|
||||
const folder = folderStore.getFolderById(folderId)
|
||||
|
||||
@@ -72,7 +67,6 @@ export function useDuplicateFolder({
|
||||
const siblingNames = new Set(
|
||||
folderStore.getChildFolders(folder.parentId).map((sibling) => sibling.name)
|
||||
)
|
||||
// Avoid colliding with the original folder name
|
||||
siblingNames.add(folder.name)
|
||||
|
||||
const duplicateName = generateDuplicateName(folder.name, siblingNames)
|
||||
@@ -90,7 +84,6 @@ export function useDuplicateFolder({
|
||||
}
|
||||
}
|
||||
|
||||
// Clear selection after successful duplication
|
||||
const { clearSelection } = useFolderStore.getState()
|
||||
clearSelection()
|
||||
|
||||
@@ -107,7 +100,7 @@ export function useDuplicateFolder({
|
||||
setIsDuplicating(false)
|
||||
}
|
||||
}, [
|
||||
getFolderIds,
|
||||
folderIds,
|
||||
generateDuplicateName,
|
||||
isDuplicating,
|
||||
duplicateFolderMutation,
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import { useCallback } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { useRouter } from 'next/navigation'
|
||||
import { getNextWorkflowColor } from '@/lib/workflows/colors'
|
||||
import { useDuplicateWorkflowMutation } from '@/hooks/queries/workflows'
|
||||
import { useFolderStore } from '@/stores/folders/store'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { getNextWorkflowColor } from '@/stores/workflows/registry/utils'
|
||||
|
||||
const logger = createLogger('useDuplicateWorkflow')
|
||||
|
||||
@@ -13,11 +13,6 @@ interface UseDuplicateWorkflowProps {
|
||||
* Current workspace ID
|
||||
*/
|
||||
workspaceId: string
|
||||
/**
|
||||
* Function that returns the workflow ID(s) to duplicate
|
||||
* This function is called when duplication occurs to get fresh selection state
|
||||
*/
|
||||
getWorkflowIds: () => string | string[]
|
||||
/**
|
||||
* Optional callback after successful duplication
|
||||
*/
|
||||
@@ -27,89 +22,72 @@ interface UseDuplicateWorkflowProps {
|
||||
/**
|
||||
* Hook for managing workflow duplication with optimistic updates.
|
||||
*
|
||||
* Handles:
|
||||
* - Single or bulk workflow duplication
|
||||
* - Optimistic UI updates (shows new workflow immediately)
|
||||
* - Automatic rollback on failure
|
||||
* - Loading state management
|
||||
* - Error handling and logging
|
||||
* - Clearing selection after duplication
|
||||
* - Navigation to duplicated workflow (single only)
|
||||
*
|
||||
* @param props - Hook configuration
|
||||
* @returns Duplicate workflow handlers and state
|
||||
*/
|
||||
export function useDuplicateWorkflow({
|
||||
workspaceId,
|
||||
getWorkflowIds,
|
||||
onSuccess,
|
||||
}: UseDuplicateWorkflowProps) {
|
||||
export function useDuplicateWorkflow({ workspaceId, onSuccess }: UseDuplicateWorkflowProps) {
|
||||
const router = useRouter()
|
||||
const { workflows } = useWorkflowRegistry()
|
||||
const duplicateMutation = useDuplicateWorkflowMutation()
|
||||
|
||||
/**
|
||||
* Duplicate the workflow(s)
|
||||
* @param workflowIds - The workflow ID(s) to duplicate
|
||||
*/
|
||||
const handleDuplicateWorkflow = useCallback(async () => {
|
||||
if (duplicateMutation.isPending) {
|
||||
return
|
||||
}
|
||||
const handleDuplicateWorkflow = useCallback(
|
||||
async (workflowIds: string | string[]) => {
|
||||
if (!workflowIds || (Array.isArray(workflowIds) && workflowIds.length === 0)) {
|
||||
return
|
||||
}
|
||||
|
||||
// Get fresh workflow IDs at duplication time
|
||||
const workflowIdsOrId = getWorkflowIds()
|
||||
if (!workflowIdsOrId) {
|
||||
return
|
||||
}
|
||||
if (duplicateMutation.isPending) {
|
||||
return
|
||||
}
|
||||
|
||||
// Normalize to array for consistent handling
|
||||
const workflowIdsToDuplicate = Array.isArray(workflowIdsOrId)
|
||||
? workflowIdsOrId
|
||||
: [workflowIdsOrId]
|
||||
const workflowIdsToDuplicate = Array.isArray(workflowIds) ? workflowIds : [workflowIds]
|
||||
|
||||
const duplicatedIds: string[] = []
|
||||
const duplicatedIds: string[] = []
|
||||
|
||||
try {
|
||||
// Duplicate each workflow sequentially
|
||||
for (const sourceId of workflowIdsToDuplicate) {
|
||||
const sourceWorkflow = workflows[sourceId]
|
||||
if (!sourceWorkflow) {
|
||||
logger.warn(`Workflow ${sourceId} not found, skipping`)
|
||||
continue
|
||||
try {
|
||||
for (const sourceId of workflowIdsToDuplicate) {
|
||||
const sourceWorkflow = workflows[sourceId]
|
||||
if (!sourceWorkflow) {
|
||||
logger.warn(`Workflow ${sourceId} not found, skipping`)
|
||||
continue
|
||||
}
|
||||
|
||||
const result = await duplicateMutation.mutateAsync({
|
||||
workspaceId,
|
||||
sourceId,
|
||||
name: `${sourceWorkflow.name} (Copy)`,
|
||||
description: sourceWorkflow.description,
|
||||
color: getNextWorkflowColor(),
|
||||
folderId: sourceWorkflow.folderId,
|
||||
})
|
||||
|
||||
duplicatedIds.push(result.id)
|
||||
}
|
||||
|
||||
const result = await duplicateMutation.mutateAsync({
|
||||
workspaceId,
|
||||
sourceId,
|
||||
name: `${sourceWorkflow.name} (Copy)`,
|
||||
description: sourceWorkflow.description,
|
||||
color: getNextWorkflowColor(),
|
||||
folderId: sourceWorkflow.folderId,
|
||||
const { clearSelection } = useFolderStore.getState()
|
||||
clearSelection()
|
||||
|
||||
logger.info('Workflow(s) duplicated successfully', {
|
||||
workflowIds: workflowIdsToDuplicate,
|
||||
duplicatedIds,
|
||||
})
|
||||
|
||||
duplicatedIds.push(result.id)
|
||||
if (duplicatedIds.length === 1) {
|
||||
router.push(`/workspace/${workspaceId}/w/${duplicatedIds[0]}`)
|
||||
}
|
||||
|
||||
onSuccess?.()
|
||||
} catch (error) {
|
||||
logger.error('Error duplicating workflow(s):', { error })
|
||||
throw error
|
||||
}
|
||||
|
||||
// Clear selection after successful duplication
|
||||
const { clearSelection } = useFolderStore.getState()
|
||||
clearSelection()
|
||||
|
||||
logger.info('Workflow(s) duplicated successfully', {
|
||||
workflowIds: workflowIdsToDuplicate,
|
||||
duplicatedIds,
|
||||
})
|
||||
|
||||
// Navigate to duplicated workflow if single duplication
|
||||
if (duplicatedIds.length === 1) {
|
||||
router.push(`/workspace/${workspaceId}/w/${duplicatedIds[0]}`)
|
||||
}
|
||||
|
||||
onSuccess?.()
|
||||
} catch (error) {
|
||||
logger.error('Error duplicating workflow(s):', { error })
|
||||
throw error
|
||||
}
|
||||
}, [getWorkflowIds, duplicateMutation, workflows, workspaceId, router, onSuccess])
|
||||
},
|
||||
[duplicateMutation, workflows, workspaceId, router, onSuccess]
|
||||
)
|
||||
|
||||
return {
|
||||
isDuplicating: duplicateMutation.isPending,
|
||||
|
||||
@@ -6,10 +6,9 @@ const logger = createLogger('useDuplicateWorkspace')
|
||||
|
||||
interface UseDuplicateWorkspaceProps {
|
||||
/**
|
||||
* Function that returns the workspace ID to duplicate
|
||||
* This function is called when duplication occurs to get fresh state
|
||||
* The workspace ID to duplicate
|
||||
*/
|
||||
getWorkspaceId: () => string | null
|
||||
workspaceId: string | null
|
||||
/**
|
||||
* Optional callback after successful duplication
|
||||
*/
|
||||
@@ -19,17 +18,10 @@ interface UseDuplicateWorkspaceProps {
|
||||
/**
|
||||
* Hook for managing workspace duplication.
|
||||
*
|
||||
* Handles:
|
||||
* - Workspace duplication
|
||||
* - Calling duplicate API
|
||||
* - Loading state management
|
||||
* - Error handling and logging
|
||||
* - Navigation to duplicated workspace
|
||||
*
|
||||
* @param props - Hook configuration
|
||||
* @returns Duplicate workspace handlers and state
|
||||
*/
|
||||
export function useDuplicateWorkspace({ getWorkspaceId, onSuccess }: UseDuplicateWorkspaceProps) {
|
||||
export function useDuplicateWorkspace({ workspaceId, onSuccess }: UseDuplicateWorkspaceProps) {
|
||||
const router = useRouter()
|
||||
const [isDuplicating, setIsDuplicating] = useState(false)
|
||||
|
||||
@@ -38,18 +30,12 @@ export function useDuplicateWorkspace({ getWorkspaceId, onSuccess }: UseDuplicat
|
||||
*/
|
||||
const handleDuplicateWorkspace = useCallback(
|
||||
async (workspaceName: string) => {
|
||||
if (isDuplicating) {
|
||||
if (isDuplicating || !workspaceId) {
|
||||
return
|
||||
}
|
||||
|
||||
setIsDuplicating(true)
|
||||
try {
|
||||
// Get fresh workspace ID at duplication time
|
||||
const workspaceId = getWorkspaceId()
|
||||
if (!workspaceId) {
|
||||
return
|
||||
}
|
||||
|
||||
const response = await fetch(`/api/workspaces/${workspaceId}/duplicate`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
@@ -70,7 +56,6 @@ export function useDuplicateWorkspace({ getWorkspaceId, onSuccess }: UseDuplicat
|
||||
workflowsCount: duplicatedWorkspace.workflowsCount,
|
||||
})
|
||||
|
||||
// Navigate to duplicated workspace
|
||||
router.push(`/workspace/${duplicatedWorkspace.id}/w`)
|
||||
|
||||
onSuccess?.()
|
||||
@@ -83,7 +68,7 @@ export function useDuplicateWorkspace({ getWorkspaceId, onSuccess }: UseDuplicat
|
||||
setIsDuplicating(false)
|
||||
}
|
||||
},
|
||||
[getWorkspaceId, isDuplicating, router, onSuccess]
|
||||
[workspaceId, isDuplicating, router, onSuccess]
|
||||
)
|
||||
|
||||
return {
|
||||
|
||||
@@ -0,0 +1,237 @@
|
||||
import { useCallback, useMemo, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import JSZip from 'jszip'
|
||||
import { sanitizeForExport } from '@/lib/workflows/sanitization/json-sanitizer'
|
||||
import { useFolderStore } from '@/stores/folders/store'
|
||||
import type { WorkflowFolder } from '@/stores/folders/types'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import type { WorkflowMetadata } from '@/stores/workflows/registry/types'
|
||||
import type { Variable } from '@/stores/workflows/workflow/types'
|
||||
|
||||
const logger = createLogger('useExportFolder')
|
||||
|
||||
interface UseExportFolderProps {
|
||||
/**
|
||||
* Current workspace ID
|
||||
*/
|
||||
workspaceId: string
|
||||
/**
|
||||
* The folder ID to export
|
||||
*/
|
||||
folderId: string
|
||||
/**
|
||||
* Optional callback after successful export
|
||||
*/
|
||||
onSuccess?: () => void
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively collects all workflow IDs within a folder and its subfolders.
|
||||
*
|
||||
* @param folderId - The folder ID to collect workflows from
|
||||
* @param workflows - All workflows in the workspace
|
||||
* @param folders - All folders in the workspace
|
||||
* @returns Array of workflow IDs
|
||||
*/
|
||||
function collectWorkflowsInFolder(
|
||||
folderId: string,
|
||||
workflows: Record<string, WorkflowMetadata>,
|
||||
folders: Record<string, WorkflowFolder>
|
||||
): string[] {
|
||||
const workflowIds: string[] = []
|
||||
|
||||
for (const workflow of Object.values(workflows)) {
|
||||
if (workflow.folderId === folderId) {
|
||||
workflowIds.push(workflow.id)
|
||||
}
|
||||
}
|
||||
|
||||
for (const folder of Object.values(folders)) {
|
||||
if (folder.parentId === folderId) {
|
||||
const childWorkflowIds = collectWorkflowsInFolder(folder.id, workflows, folders)
|
||||
workflowIds.push(...childWorkflowIds)
|
||||
}
|
||||
}
|
||||
|
||||
return workflowIds
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook for managing folder export to ZIP.
|
||||
*
|
||||
* @param props - Hook configuration
|
||||
* @returns Export folder handlers and state
|
||||
*/
|
||||
export function useExportFolder({ workspaceId, folderId, onSuccess }: UseExportFolderProps) {
|
||||
const { workflows } = useWorkflowRegistry()
|
||||
const { folders } = useFolderStore()
|
||||
const [isExporting, setIsExporting] = useState(false)
|
||||
|
||||
/**
|
||||
* Check if the folder has any workflows (recursively)
|
||||
*/
|
||||
const hasWorkflows = useMemo(() => {
|
||||
if (!folderId) return false
|
||||
return collectWorkflowsInFolder(folderId, workflows, folders).length > 0
|
||||
}, [folderId, workflows, folders])
|
||||
|
||||
/**
|
||||
* Download file helper
|
||||
*/
|
||||
const downloadFile = (content: Blob, filename: string, mimeType = 'application/zip') => {
|
||||
try {
|
||||
const blob = content instanceof Blob ? content : new Blob([content], { type: mimeType })
|
||||
const url = URL.createObjectURL(blob)
|
||||
const a = document.createElement('a')
|
||||
a.href = url
|
||||
a.download = filename
|
||||
document.body.appendChild(a)
|
||||
a.click()
|
||||
document.body.removeChild(a)
|
||||
URL.revokeObjectURL(url)
|
||||
} catch (error) {
|
||||
logger.error('Failed to download file:', error)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Export all workflows in the folder (including nested subfolders) to ZIP
|
||||
*/
|
||||
const handleExportFolder = useCallback(async () => {
|
||||
if (isExporting) {
|
||||
return
|
||||
}
|
||||
|
||||
if (!folderId) {
|
||||
logger.warn('No folder ID provided for export')
|
||||
return
|
||||
}
|
||||
|
||||
setIsExporting(true)
|
||||
try {
|
||||
const folderStore = useFolderStore.getState()
|
||||
const folder = folderStore.getFolderById(folderId)
|
||||
|
||||
if (!folder) {
|
||||
logger.warn('Folder not found for export', { folderId })
|
||||
return
|
||||
}
|
||||
|
||||
const workflowIdsToExport = collectWorkflowsInFolder(folderId, workflows, folderStore.folders)
|
||||
|
||||
if (workflowIdsToExport.length === 0) {
|
||||
logger.warn('No workflows found in folder to export', { folderId, folderName: folder.name })
|
||||
return
|
||||
}
|
||||
|
||||
logger.info('Starting folder export', {
|
||||
folderId,
|
||||
folderName: folder.name,
|
||||
workflowCount: workflowIdsToExport.length,
|
||||
})
|
||||
|
||||
const exportedWorkflows: Array<{ name: string; content: string }> = []
|
||||
|
||||
for (const workflowId of workflowIdsToExport) {
|
||||
try {
|
||||
const workflow = workflows[workflowId]
|
||||
if (!workflow) {
|
||||
logger.warn(`Workflow ${workflowId} not found in registry`)
|
||||
continue
|
||||
}
|
||||
|
||||
const workflowResponse = await fetch(`/api/workflows/${workflowId}`)
|
||||
if (!workflowResponse.ok) {
|
||||
logger.error(`Failed to fetch workflow ${workflowId}`)
|
||||
continue
|
||||
}
|
||||
|
||||
const { data: workflowData } = await workflowResponse.json()
|
||||
if (!workflowData?.state) {
|
||||
logger.warn(`Workflow ${workflowId} has no state`)
|
||||
continue
|
||||
}
|
||||
|
||||
const variablesResponse = await fetch(`/api/workflows/${workflowId}/variables`)
|
||||
let workflowVariables: Record<string, Variable> | undefined
|
||||
if (variablesResponse.ok) {
|
||||
const variablesData = await variablesResponse.json()
|
||||
workflowVariables = variablesData?.data
|
||||
}
|
||||
|
||||
const workflowState = {
|
||||
...workflowData.state,
|
||||
metadata: {
|
||||
name: workflow.name,
|
||||
description: workflow.description,
|
||||
color: workflow.color,
|
||||
exportedAt: new Date().toISOString(),
|
||||
},
|
||||
variables: workflowVariables,
|
||||
}
|
||||
|
||||
const exportState = sanitizeForExport(workflowState)
|
||||
const jsonString = JSON.stringify(exportState, null, 2)
|
||||
|
||||
exportedWorkflows.push({
|
||||
name: workflow.name,
|
||||
content: jsonString,
|
||||
})
|
||||
|
||||
logger.info(`Workflow ${workflowId} exported successfully`)
|
||||
} catch (error) {
|
||||
logger.error(`Failed to export workflow ${workflowId}:`, error)
|
||||
}
|
||||
}
|
||||
|
||||
if (exportedWorkflows.length === 0) {
|
||||
logger.warn('No workflows were successfully exported from folder', {
|
||||
folderId,
|
||||
folderName: folder.name,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
const zip = new JSZip()
|
||||
const seenFilenames = new Set<string>()
|
||||
|
||||
for (const exportedWorkflow of exportedWorkflows) {
|
||||
const baseName = exportedWorkflow.name.replace(/[^a-z0-9]/gi, '-')
|
||||
let filename = `${baseName}.json`
|
||||
let counter = 1
|
||||
while (seenFilenames.has(filename.toLowerCase())) {
|
||||
filename = `${baseName}-${counter}.json`
|
||||
counter++
|
||||
}
|
||||
seenFilenames.add(filename.toLowerCase())
|
||||
zip.file(filename, exportedWorkflow.content)
|
||||
}
|
||||
|
||||
const zipBlob = await zip.generateAsync({ type: 'blob' })
|
||||
const zipFilename = `${folder.name.replace(/[^a-z0-9]/gi, '-')}-export.zip`
|
||||
downloadFile(zipBlob, zipFilename, 'application/zip')
|
||||
|
||||
const { clearSelection } = useFolderStore.getState()
|
||||
clearSelection()
|
||||
|
||||
logger.info('Folder exported successfully', {
|
||||
folderId,
|
||||
folderName: folder.name,
|
||||
workflowCount: exportedWorkflows.length,
|
||||
})
|
||||
|
||||
onSuccess?.()
|
||||
} catch (error) {
|
||||
logger.error('Error exporting folder:', { error })
|
||||
throw error
|
||||
} finally {
|
||||
setIsExporting(false)
|
||||
}
|
||||
}, [folderId, isExporting, workflows, folders, onSuccess])
|
||||
|
||||
return {
|
||||
isExporting,
|
||||
hasWorkflows,
|
||||
handleExportFolder,
|
||||
}
|
||||
}
|
||||
@@ -13,11 +13,6 @@ interface UseExportWorkflowProps {
|
||||
* Current workspace ID
|
||||
*/
|
||||
workspaceId: string
|
||||
/**
|
||||
* Function that returns the workflow ID(s) to export
|
||||
* This function is called when export occurs to get fresh selection state
|
||||
*/
|
||||
getWorkflowIds: () => string | string[]
|
||||
/**
|
||||
* Optional callback after successful export
|
||||
*/
|
||||
@@ -27,23 +22,10 @@ interface UseExportWorkflowProps {
|
||||
/**
|
||||
* Hook for managing workflow export to JSON.
|
||||
*
|
||||
* Handles:
|
||||
* - Single or bulk workflow export
|
||||
* - Fetching workflow data and variables from API
|
||||
* - Sanitizing workflow state for export
|
||||
* - Downloading as JSON file(s)
|
||||
* - Loading state management
|
||||
* - Error handling and logging
|
||||
* - Clearing selection after export
|
||||
*
|
||||
* @param props - Hook configuration
|
||||
* @returns Export workflow handlers and state
|
||||
*/
|
||||
export function useExportWorkflow({
|
||||
workspaceId,
|
||||
getWorkflowIds,
|
||||
onSuccess,
|
||||
}: UseExportWorkflowProps) {
|
||||
export function useExportWorkflow({ workspaceId, onSuccess }: UseExportWorkflowProps) {
|
||||
const { workflows } = useWorkflowRegistry()
|
||||
const [isExporting, setIsExporting] = useState(false)
|
||||
|
||||
@@ -75,130 +57,129 @@ export function useExportWorkflow({
|
||||
* - Single workflow: exports as JSON file
|
||||
* - Multiple workflows: exports as ZIP file containing all JSON files
|
||||
* Fetches workflow data from API to support bulk export of non-active workflows
|
||||
* @param workflowIds - The workflow ID(s) to export
|
||||
*/
|
||||
const handleExportWorkflow = useCallback(async () => {
|
||||
if (isExporting) {
|
||||
return
|
||||
}
|
||||
|
||||
setIsExporting(true)
|
||||
try {
|
||||
// Get fresh workflow IDs at export time
|
||||
const workflowIdsOrId = getWorkflowIds()
|
||||
if (!workflowIdsOrId) {
|
||||
const handleExportWorkflow = useCallback(
|
||||
async (workflowIds: string | string[]) => {
|
||||
if (isExporting) {
|
||||
return
|
||||
}
|
||||
|
||||
// Normalize to array for consistent handling
|
||||
const workflowIdsToExport = Array.isArray(workflowIdsOrId)
|
||||
? workflowIdsOrId
|
||||
: [workflowIdsOrId]
|
||||
if (!workflowIds || (Array.isArray(workflowIds) && workflowIds.length === 0)) {
|
||||
return
|
||||
}
|
||||
|
||||
logger.info('Starting workflow export', {
|
||||
workflowIdsToExport,
|
||||
count: workflowIdsToExport.length,
|
||||
})
|
||||
setIsExporting(true)
|
||||
try {
|
||||
const workflowIdsToExport = Array.isArray(workflowIds) ? workflowIds : [workflowIds]
|
||||
|
||||
const exportedWorkflows: Array<{ name: string; content: string }> = []
|
||||
logger.info('Starting workflow export', {
|
||||
workflowIdsToExport,
|
||||
count: workflowIdsToExport.length,
|
||||
})
|
||||
|
||||
// Export each workflow
|
||||
for (const workflowId of workflowIdsToExport) {
|
||||
try {
|
||||
const workflow = workflows[workflowId]
|
||||
if (!workflow) {
|
||||
logger.warn(`Workflow ${workflowId} not found in registry`)
|
||||
continue
|
||||
}
|
||||
const exportedWorkflows: Array<{ name: string; content: string }> = []
|
||||
|
||||
// Fetch workflow state from API
|
||||
const workflowResponse = await fetch(`/api/workflows/${workflowId}`)
|
||||
if (!workflowResponse.ok) {
|
||||
logger.error(`Failed to fetch workflow ${workflowId}`)
|
||||
continue
|
||||
}
|
||||
for (const workflowId of workflowIdsToExport) {
|
||||
try {
|
||||
const workflow = workflows[workflowId]
|
||||
if (!workflow) {
|
||||
logger.warn(`Workflow ${workflowId} not found in registry`)
|
||||
continue
|
||||
}
|
||||
|
||||
const { data: workflowData } = await workflowResponse.json()
|
||||
if (!workflowData?.state) {
|
||||
logger.warn(`Workflow ${workflowId} has no state`)
|
||||
continue
|
||||
}
|
||||
const workflowResponse = await fetch(`/api/workflows/${workflowId}`)
|
||||
if (!workflowResponse.ok) {
|
||||
logger.error(`Failed to fetch workflow ${workflowId}`)
|
||||
continue
|
||||
}
|
||||
|
||||
// Fetch workflow variables (API returns Record format directly)
|
||||
const variablesResponse = await fetch(`/api/workflows/${workflowId}/variables`)
|
||||
let workflowVariables: Record<string, Variable> | undefined
|
||||
if (variablesResponse.ok) {
|
||||
const variablesData = await variablesResponse.json()
|
||||
workflowVariables = variablesData?.data
|
||||
}
|
||||
const { data: workflowData } = await workflowResponse.json()
|
||||
if (!workflowData?.state) {
|
||||
logger.warn(`Workflow ${workflowId} has no state`)
|
||||
continue
|
||||
}
|
||||
|
||||
// Prepare export state
|
||||
const workflowState = {
|
||||
...workflowData.state,
|
||||
metadata: {
|
||||
const variablesResponse = await fetch(`/api/workflows/${workflowId}/variables`)
|
||||
let workflowVariables: Record<string, Variable> | undefined
|
||||
if (variablesResponse.ok) {
|
||||
const variablesData = await variablesResponse.json()
|
||||
workflowVariables = variablesData?.data
|
||||
}
|
||||
|
||||
const workflowState = {
|
||||
...workflowData.state,
|
||||
metadata: {
|
||||
name: workflow.name,
|
||||
description: workflow.description,
|
||||
color: workflow.color,
|
||||
exportedAt: new Date().toISOString(),
|
||||
},
|
||||
variables: workflowVariables,
|
||||
}
|
||||
|
||||
const exportState = sanitizeForExport(workflowState)
|
||||
const jsonString = JSON.stringify(exportState, null, 2)
|
||||
|
||||
exportedWorkflows.push({
|
||||
name: workflow.name,
|
||||
description: workflow.description,
|
||||
color: workflow.color,
|
||||
exportedAt: new Date().toISOString(),
|
||||
},
|
||||
variables: workflowVariables,
|
||||
content: jsonString,
|
||||
})
|
||||
|
||||
logger.info(`Workflow ${workflowId} exported successfully`)
|
||||
} catch (error) {
|
||||
logger.error(`Failed to export workflow ${workflowId}:`, error)
|
||||
}
|
||||
}
|
||||
|
||||
if (exportedWorkflows.length === 0) {
|
||||
logger.warn('No workflows were successfully exported')
|
||||
return
|
||||
}
|
||||
|
||||
if (exportedWorkflows.length === 1) {
|
||||
const filename = `${exportedWorkflows[0].name.replace(/[^a-z0-9]/gi, '-')}.json`
|
||||
downloadFile(exportedWorkflows[0].content, filename, 'application/json')
|
||||
} else {
|
||||
const zip = new JSZip()
|
||||
const seenFilenames = new Set<string>()
|
||||
|
||||
for (const exportedWorkflow of exportedWorkflows) {
|
||||
const baseName = exportedWorkflow.name.replace(/[^a-z0-9]/gi, '-')
|
||||
let filename = `${baseName}.json`
|
||||
let counter = 1
|
||||
while (seenFilenames.has(filename.toLowerCase())) {
|
||||
filename = `${baseName}-${counter}.json`
|
||||
counter++
|
||||
}
|
||||
seenFilenames.add(filename.toLowerCase())
|
||||
zip.file(filename, exportedWorkflow.content)
|
||||
}
|
||||
|
||||
const exportState = sanitizeForExport(workflowState)
|
||||
const jsonString = JSON.stringify(exportState, null, 2)
|
||||
|
||||
exportedWorkflows.push({
|
||||
name: workflow.name,
|
||||
content: jsonString,
|
||||
})
|
||||
|
||||
logger.info(`Workflow ${workflowId} exported successfully`)
|
||||
} catch (error) {
|
||||
logger.error(`Failed to export workflow ${workflowId}:`, error)
|
||||
}
|
||||
}
|
||||
|
||||
if (exportedWorkflows.length === 0) {
|
||||
logger.warn('No workflows were successfully exported')
|
||||
return
|
||||
}
|
||||
|
||||
// Download as single JSON or ZIP depending on count
|
||||
if (exportedWorkflows.length === 1) {
|
||||
// Single workflow - download as JSON
|
||||
const filename = `${exportedWorkflows[0].name.replace(/[^a-z0-9]/gi, '-')}.json`
|
||||
downloadFile(exportedWorkflows[0].content, filename, 'application/json')
|
||||
} else {
|
||||
// Multiple workflows - download as ZIP
|
||||
const zip = new JSZip()
|
||||
|
||||
for (const exportedWorkflow of exportedWorkflows) {
|
||||
const filename = `${exportedWorkflow.name.replace(/[^a-z0-9]/gi, '-')}.json`
|
||||
zip.file(filename, exportedWorkflow.content)
|
||||
const zipBlob = await zip.generateAsync({ type: 'blob' })
|
||||
const zipFilename = `workflows-export-${Date.now()}.zip`
|
||||
downloadFile(zipBlob, zipFilename, 'application/zip')
|
||||
}
|
||||
|
||||
const zipBlob = await zip.generateAsync({ type: 'blob' })
|
||||
const zipFilename = `workflows-export-${Date.now()}.zip`
|
||||
downloadFile(zipBlob, zipFilename, 'application/zip')
|
||||
const { clearSelection } = useFolderStore.getState()
|
||||
clearSelection()
|
||||
|
||||
logger.info('Workflow(s) exported successfully', {
|
||||
workflowIds: workflowIdsToExport,
|
||||
count: exportedWorkflows.length,
|
||||
format: exportedWorkflows.length === 1 ? 'JSON' : 'ZIP',
|
||||
})
|
||||
|
||||
onSuccess?.()
|
||||
} catch (error) {
|
||||
logger.error('Error exporting workflow(s):', { error })
|
||||
throw error
|
||||
} finally {
|
||||
setIsExporting(false)
|
||||
}
|
||||
|
||||
// Clear selection after successful export
|
||||
const { clearSelection } = useFolderStore.getState()
|
||||
clearSelection()
|
||||
|
||||
logger.info('Workflow(s) exported successfully', {
|
||||
workflowIds: workflowIdsToExport,
|
||||
count: exportedWorkflows.length,
|
||||
format: exportedWorkflows.length === 1 ? 'JSON' : 'ZIP',
|
||||
})
|
||||
|
||||
onSuccess?.()
|
||||
} catch (error) {
|
||||
logger.error('Error exporting workflow(s):', { error })
|
||||
throw error
|
||||
} finally {
|
||||
setIsExporting(false)
|
||||
}
|
||||
}, [getWorkflowIds, isExporting, workflows, onSuccess])
|
||||
},
|
||||
[isExporting, workflows, onSuccess]
|
||||
)
|
||||
|
||||
return {
|
||||
isExporting,
|
||||
|
||||
@@ -44,21 +44,18 @@ export function useExportWorkspace({ onSuccess }: UseExportWorkspaceProps = {})
|
||||
try {
|
||||
logger.info('Exporting workspace', { workspaceId })
|
||||
|
||||
// Fetch all workflows in workspace
|
||||
const workflowsResponse = await fetch(`/api/workflows?workspaceId=${workspaceId}`)
|
||||
if (!workflowsResponse.ok) {
|
||||
throw new Error('Failed to fetch workflows')
|
||||
}
|
||||
const { data: workflows } = await workflowsResponse.json()
|
||||
|
||||
// Fetch all folders in workspace
|
||||
const foldersResponse = await fetch(`/api/folders?workspaceId=${workspaceId}`)
|
||||
if (!foldersResponse.ok) {
|
||||
throw new Error('Failed to fetch folders')
|
||||
}
|
||||
const foldersData = await foldersResponse.json()
|
||||
|
||||
// Export each workflow
|
||||
const workflowsToExport: WorkflowExportData[] = []
|
||||
|
||||
for (const workflow of workflows) {
|
||||
|
||||
@@ -33,6 +33,7 @@ export function useImportWorkflow({ workspaceId }: UseImportWorkflowProps) {
|
||||
const createWorkflowMutation = useCreateWorkflow()
|
||||
const queryClient = useQueryClient()
|
||||
const createFolderMutation = useCreateFolder()
|
||||
const clearDiff = useWorkflowDiffStore((state) => state.clearDiff)
|
||||
const [isImporting, setIsImporting] = useState(false)
|
||||
|
||||
/**
|
||||
@@ -48,9 +49,8 @@ export function useImportWorkflow({ workspaceId }: UseImportWorkflowProps) {
|
||||
}
|
||||
|
||||
const workflowName = extractWorkflowName(content, filename)
|
||||
useWorkflowDiffStore.getState().clearDiff()
|
||||
clearDiff()
|
||||
|
||||
// Extract color from metadata
|
||||
const parsedContent = JSON.parse(content)
|
||||
const workflowColor =
|
||||
parsedContent.state?.metadata?.color || parsedContent.metadata?.color || '#3972F6'
|
||||
@@ -63,7 +63,6 @@ export function useImportWorkflow({ workspaceId }: UseImportWorkflowProps) {
|
||||
})
|
||||
const newWorkflowId = result.id
|
||||
|
||||
// Update workflow color if we extracted one
|
||||
if (workflowColor !== '#3972F6') {
|
||||
await fetch(`/api/workflows/${newWorkflowId}`, {
|
||||
method: 'PATCH',
|
||||
@@ -72,16 +71,13 @@ export function useImportWorkflow({ workspaceId }: UseImportWorkflowProps) {
|
||||
})
|
||||
}
|
||||
|
||||
// Save workflow state
|
||||
await fetch(`/api/workflows/${newWorkflowId}/state`, {
|
||||
method: 'PUT',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(workflowData),
|
||||
})
|
||||
|
||||
// Save variables if any (handle both legacy Array and current Record formats)
|
||||
if (workflowData.variables) {
|
||||
// Convert to Record format for API (handles backwards compatibility with old Array exports)
|
||||
const variablesArray = Array.isArray(workflowData.variables)
|
||||
? workflowData.variables
|
||||
: Object.values(workflowData.variables)
|
||||
@@ -114,7 +110,7 @@ export function useImportWorkflow({ workspaceId }: UseImportWorkflowProps) {
|
||||
logger.info(`Imported workflow: ${workflowName}`)
|
||||
return newWorkflowId
|
||||
},
|
||||
[createWorkflowMutation, workspaceId]
|
||||
[clearDiff, createWorkflowMutation, workspaceId]
|
||||
)
|
||||
|
||||
/**
|
||||
@@ -134,7 +130,6 @@ export function useImportWorkflow({ workspaceId }: UseImportWorkflowProps) {
|
||||
const importedWorkflowIds: string[] = []
|
||||
|
||||
if (hasZip && fileArray.length === 1) {
|
||||
// Import from ZIP - preserves folder structure
|
||||
const zipFile = fileArray[0]
|
||||
const { workflows: extractedWorkflows, metadata } = await extractWorkflowsFromZip(zipFile)
|
||||
|
||||
@@ -149,7 +144,6 @@ export function useImportWorkflow({ workspaceId }: UseImportWorkflowProps) {
|
||||
try {
|
||||
let targetFolderId = importFolder.id
|
||||
|
||||
// Recreate nested folder structure
|
||||
if (workflow.folderPath.length > 0) {
|
||||
const folderPathKey = workflow.folderPath.join('/')
|
||||
|
||||
@@ -187,7 +181,6 @@ export function useImportWorkflow({ workspaceId }: UseImportWorkflowProps) {
|
||||
}
|
||||
}
|
||||
} else if (jsonFiles.length > 0) {
|
||||
// Import multiple JSON files or single JSON
|
||||
const extractedWorkflows = await extractWorkflowsFromFiles(jsonFiles)
|
||||
|
||||
for (const workflow of extractedWorkflows) {
|
||||
@@ -200,22 +193,21 @@ export function useImportWorkflow({ workspaceId }: UseImportWorkflowProps) {
|
||||
}
|
||||
}
|
||||
|
||||
// Reload workflows and folders to show newly imported ones
|
||||
await queryClient.invalidateQueries({ queryKey: workflowKeys.list(workspaceId) })
|
||||
await queryClient.invalidateQueries({ queryKey: folderKeys.list(workspaceId) })
|
||||
|
||||
logger.info(`Import complete. Imported ${importedWorkflowIds.length} workflow(s)`)
|
||||
|
||||
// Navigate to first imported workflow if any
|
||||
if (importedWorkflowIds.length > 0) {
|
||||
router.push(`/workspace/${workspaceId}/w/${importedWorkflowIds[0]}`)
|
||||
router.push(
|
||||
`/workspace/${workspaceId}/w/${importedWorkflowIds[importedWorkflowIds.length - 1]}`
|
||||
)
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Failed to import workflows:', error)
|
||||
} finally {
|
||||
setIsImporting(false)
|
||||
|
||||
// Reset file input
|
||||
if (event.target) {
|
||||
event.target.value = ''
|
||||
}
|
||||
|
||||
@@ -21,15 +21,6 @@ interface UseImportWorkspaceProps {
|
||||
/**
|
||||
* Hook for managing workspace import from ZIP files.
|
||||
*
|
||||
* Handles:
|
||||
* - Extracting workflows from ZIP file
|
||||
* - Creating new workspace
|
||||
* - Recreating folder structure
|
||||
* - Importing all workflows with states and variables
|
||||
* - Navigation to imported workspace
|
||||
* - Loading state management
|
||||
* - Error handling and logging
|
||||
*
|
||||
* @param props - Hook configuration
|
||||
* @returns Import workspace handlers and state
|
||||
*/
|
||||
@@ -37,6 +28,7 @@ export function useImportWorkspace({ onSuccess }: UseImportWorkspaceProps = {})
|
||||
const router = useRouter()
|
||||
const [isImporting, setIsImporting] = useState(false)
|
||||
const createFolderMutation = useCreateFolder()
|
||||
const clearDiff = useWorkflowDiffStore((state) => state.clearDiff)
|
||||
|
||||
/**
|
||||
* Handle workspace import from ZIP file
|
||||
@@ -56,7 +48,6 @@ export function useImportWorkspace({ onSuccess }: UseImportWorkspaceProps = {})
|
||||
try {
|
||||
logger.info('Importing workspace from ZIP')
|
||||
|
||||
// Extract workflows from ZIP
|
||||
const { workflows: extractedWorkflows, metadata } = await extractWorkflowsFromZip(zipFile)
|
||||
|
||||
if (extractedWorkflows.length === 0) {
|
||||
@@ -64,7 +55,6 @@ export function useImportWorkspace({ onSuccess }: UseImportWorkspaceProps = {})
|
||||
return
|
||||
}
|
||||
|
||||
// Create new workspace
|
||||
const workspaceName = metadata?.workspaceName || zipFile.name.replace(/\.zip$/i, '')
|
||||
const createResponse = await fetch('/api/workspaces', {
|
||||
method: 'POST',
|
||||
@@ -81,7 +71,6 @@ export function useImportWorkspace({ onSuccess }: UseImportWorkspaceProps = {})
|
||||
|
||||
const folderMap = new Map<string, string>()
|
||||
|
||||
// Import workflows
|
||||
for (const workflow of extractedWorkflows) {
|
||||
try {
|
||||
const { data: workflowData, errors: parseErrors } = parseWorkflowJson(workflow.content)
|
||||
@@ -91,7 +80,6 @@ export function useImportWorkspace({ onSuccess }: UseImportWorkspaceProps = {})
|
||||
continue
|
||||
}
|
||||
|
||||
// Recreate folder structure
|
||||
let targetFolderId: string | null = null
|
||||
if (workflow.folderPath.length > 0) {
|
||||
const folderPathKey = workflow.folderPath.join('/')
|
||||
@@ -120,14 +108,12 @@ export function useImportWorkspace({ onSuccess }: UseImportWorkspaceProps = {})
|
||||
}
|
||||
|
||||
const workflowName = extractWorkflowName(workflow.content, workflow.name)
|
||||
useWorkflowDiffStore.getState().clearDiff()
|
||||
clearDiff()
|
||||
|
||||
// Extract color from workflow metadata
|
||||
const parsedContent = JSON.parse(workflow.content)
|
||||
const workflowColor =
|
||||
parsedContent.state?.metadata?.color || parsedContent.metadata?.color || '#3972F6'
|
||||
|
||||
// Create workflow
|
||||
const createWorkflowResponse = await fetch('/api/workflows', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
@@ -147,7 +133,6 @@ export function useImportWorkspace({ onSuccess }: UseImportWorkspaceProps = {})
|
||||
|
||||
const newWorkflow = await createWorkflowResponse.json()
|
||||
|
||||
// Save workflow state
|
||||
const stateResponse = await fetch(`/api/workflows/${newWorkflow.id}/state`, {
|
||||
method: 'PUT',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
@@ -159,9 +144,7 @@ export function useImportWorkspace({ onSuccess }: UseImportWorkspaceProps = {})
|
||||
continue
|
||||
}
|
||||
|
||||
// Save variables if any (handle both legacy Array and current Record formats)
|
||||
if (workflowData.variables) {
|
||||
// Convert to Record format for API (handles backwards compatibility with old Array exports)
|
||||
const variablesArray = Array.isArray(workflowData.variables)
|
||||
? workflowData.variables
|
||||
: Object.values(workflowData.variables)
|
||||
@@ -199,7 +182,6 @@ export function useImportWorkspace({ onSuccess }: UseImportWorkspaceProps = {})
|
||||
|
||||
logger.info(`Workspace import complete. Imported ${extractedWorkflows.length} workflows`)
|
||||
|
||||
// Navigate to new workspace
|
||||
router.push(`/workspace/${newWorkspace.id}/w`)
|
||||
|
||||
onSuccess?.()
|
||||
@@ -210,7 +192,7 @@ export function useImportWorkspace({ onSuccess }: UseImportWorkspaceProps = {})
|
||||
setIsImporting(false)
|
||||
}
|
||||
},
|
||||
[isImporting, router, onSuccess, createFolderMutation]
|
||||
[isImporting, router, onSuccess, createFolderMutation, clearDiff]
|
||||
)
|
||||
|
||||
return {
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
import { DynamoDBIcon } from '@/components/icons'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import type { DynamoDBResponse } from '@/tools/dynamodb/types'
|
||||
import type { DynamoDBIntrospectResponse, DynamoDBResponse } from '@/tools/dynamodb/types'
|
||||
|
||||
export const DynamoDBBlock: BlockConfig<DynamoDBResponse> = {
|
||||
export const DynamoDBBlock: BlockConfig<DynamoDBResponse | DynamoDBIntrospectResponse> = {
|
||||
type: 'dynamodb',
|
||||
name: 'Amazon DynamoDB',
|
||||
description: 'Connect to Amazon DynamoDB',
|
||||
longDescription:
|
||||
'Integrate Amazon DynamoDB into workflows. Supports Get, Put, Query, Scan, Update, and Delete operations on DynamoDB tables.',
|
||||
'Integrate Amazon DynamoDB into workflows. Supports Get, Put, Query, Scan, Update, Delete, and Introspect operations on DynamoDB tables.',
|
||||
docsLink: 'https://docs.sim.ai/tools/dynamodb',
|
||||
category: 'tools',
|
||||
bgColor: 'linear-gradient(45deg, #2E27AD 0%, #527FFF 100%)',
|
||||
@@ -24,6 +24,7 @@ export const DynamoDBBlock: BlockConfig<DynamoDBResponse> = {
|
||||
{ label: 'Scan', id: 'scan' },
|
||||
{ label: 'Update Item', id: 'update' },
|
||||
{ label: 'Delete Item', id: 'delete' },
|
||||
{ label: 'Introspect', id: 'introspect' },
|
||||
],
|
||||
value: () => 'get',
|
||||
},
|
||||
@@ -56,6 +57,19 @@ export const DynamoDBBlock: BlockConfig<DynamoDBResponse> = {
|
||||
type: 'short-input',
|
||||
placeholder: 'my-table',
|
||||
required: true,
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'introspect',
|
||||
not: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'tableName',
|
||||
title: 'Table Name (Optional)',
|
||||
type: 'short-input',
|
||||
placeholder: 'Leave empty to list all tables',
|
||||
required: false,
|
||||
condition: { field: 'operation', value: 'introspect' },
|
||||
},
|
||||
// Key field for get, update, delete operations
|
||||
{
|
||||
@@ -420,6 +434,7 @@ Return ONLY the expression - no explanations.`,
|
||||
'dynamodb_scan',
|
||||
'dynamodb_update',
|
||||
'dynamodb_delete',
|
||||
'dynamodb_introspect',
|
||||
],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
@@ -436,6 +451,8 @@ Return ONLY the expression - no explanations.`,
|
||||
return 'dynamodb_update'
|
||||
case 'delete':
|
||||
return 'dynamodb_delete'
|
||||
case 'introspect':
|
||||
return 'dynamodb_introspect'
|
||||
default:
|
||||
throw new Error(`Invalid DynamoDB operation: ${params.operation}`)
|
||||
}
|
||||
@@ -552,5 +569,13 @@ Return ONLY the expression - no explanations.`,
|
||||
type: 'number',
|
||||
description: 'Number of items returned',
|
||||
},
|
||||
tables: {
|
||||
type: 'array',
|
||||
description: 'List of table names from introspect operation',
|
||||
},
|
||||
tableDetails: {
|
||||
type: 'json',
|
||||
description: 'Detailed schema information for a specific table from introspect operation',
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -33,6 +33,7 @@ export const ElasticsearchBlock: BlockConfig<ElasticsearchResponse> = {
|
||||
{ label: 'Create Index', id: 'elasticsearch_create_index' },
|
||||
{ label: 'Delete Index', id: 'elasticsearch_delete_index' },
|
||||
{ label: 'Get Index Info', id: 'elasticsearch_get_index' },
|
||||
{ label: 'List Indices', id: 'elasticsearch_list_indices' },
|
||||
// Cluster Operations
|
||||
{ label: 'Cluster Health', id: 'elasticsearch_cluster_health' },
|
||||
{ label: 'Cluster Stats', id: 'elasticsearch_cluster_stats' },
|
||||
@@ -452,6 +453,7 @@ Return ONLY valid JSON - no explanations, no markdown code blocks.`,
|
||||
'elasticsearch_get_index',
|
||||
'elasticsearch_cluster_health',
|
||||
'elasticsearch_cluster_stats',
|
||||
'elasticsearch_list_indices',
|
||||
],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import { MongoDBIcon } from '@/components/icons'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import type { MongoDBResponse } from '@/tools/mongodb/types'
|
||||
import type { MongoDBIntrospectResponse, MongoDBResponse } from '@/tools/mongodb/types'
|
||||
|
||||
export const MongoDBBlock: BlockConfig<MongoDBResponse> = {
|
||||
export const MongoDBBlock: BlockConfig<MongoDBResponse | MongoDBIntrospectResponse> = {
|
||||
type: 'mongodb',
|
||||
name: 'MongoDB',
|
||||
description: 'Connect to MongoDB database',
|
||||
@@ -23,6 +23,7 @@ export const MongoDBBlock: BlockConfig<MongoDBResponse> = {
|
||||
{ label: 'Update Documents', id: 'update' },
|
||||
{ label: 'Delete Documents', id: 'delete' },
|
||||
{ label: 'Aggregate Pipeline', id: 'execute' },
|
||||
{ label: 'Introspect Database', id: 'introspect' },
|
||||
],
|
||||
value: () => 'query',
|
||||
},
|
||||
@@ -86,6 +87,7 @@ export const MongoDBBlock: BlockConfig<MongoDBResponse> = {
|
||||
type: 'short-input',
|
||||
placeholder: 'users',
|
||||
required: true,
|
||||
condition: { field: 'operation', value: 'introspect', not: true },
|
||||
},
|
||||
{
|
||||
id: 'query',
|
||||
@@ -803,6 +805,7 @@ Return ONLY the MongoDB query filter as valid JSON - no explanations, no markdow
|
||||
'mongodb_update',
|
||||
'mongodb_delete',
|
||||
'mongodb_execute',
|
||||
'mongodb_introspect',
|
||||
],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
@@ -817,6 +820,8 @@ Return ONLY the MongoDB query filter as valid JSON - no explanations, no markdow
|
||||
return 'mongodb_delete'
|
||||
case 'execute':
|
||||
return 'mongodb_execute'
|
||||
case 'introspect':
|
||||
return 'mongodb_introspect'
|
||||
default:
|
||||
throw new Error(`Invalid MongoDB operation: ${params.operation}`)
|
||||
}
|
||||
@@ -936,5 +941,14 @@ Return ONLY the MongoDB query filter as valid JSON - no explanations, no markdow
|
||||
type: 'number',
|
||||
description: 'Number of documents matched (update operations)',
|
||||
},
|
||||
databases: {
|
||||
type: 'array',
|
||||
description: 'Array of database names (introspect operation)',
|
||||
},
|
||||
collections: {
|
||||
type: 'array',
|
||||
description:
|
||||
'Array of collection info with name, type, document count, and indexes (introspect operation)',
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -23,6 +23,7 @@ export const MySQLBlock: BlockConfig<MySQLResponse> = {
|
||||
{ label: 'Update Data', id: 'update' },
|
||||
{ label: 'Delete Data', id: 'delete' },
|
||||
{ label: 'Execute Raw SQL', id: 'execute' },
|
||||
{ label: 'Introspect Schema', id: 'introspect' },
|
||||
],
|
||||
value: () => 'query',
|
||||
},
|
||||
@@ -285,7 +286,14 @@ Return ONLY the SQL query - no explanations, no markdown, no extra text.`,
|
||||
},
|
||||
],
|
||||
tools: {
|
||||
access: ['mysql_query', 'mysql_insert', 'mysql_update', 'mysql_delete', 'mysql_execute'],
|
||||
access: [
|
||||
'mysql_query',
|
||||
'mysql_insert',
|
||||
'mysql_update',
|
||||
'mysql_delete',
|
||||
'mysql_execute',
|
||||
'mysql_introspect',
|
||||
],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
switch (params.operation) {
|
||||
@@ -299,6 +307,8 @@ Return ONLY the SQL query - no explanations, no markdown, no extra text.`,
|
||||
return 'mysql_delete'
|
||||
case 'execute':
|
||||
return 'mysql_execute'
|
||||
case 'introspect':
|
||||
return 'mysql_introspect'
|
||||
default:
|
||||
throw new Error(`Invalid MySQL operation: ${params.operation}`)
|
||||
}
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import { Neo4jIcon } from '@/components/icons'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import type { Neo4jResponse } from '@/tools/neo4j/types'
|
||||
import type { Neo4jIntrospectResponse, Neo4jResponse } from '@/tools/neo4j/types'
|
||||
|
||||
export const Neo4jBlock: BlockConfig<Neo4jResponse> = {
|
||||
export const Neo4jBlock: BlockConfig<Neo4jResponse | Neo4jIntrospectResponse> = {
|
||||
type: 'neo4j',
|
||||
name: 'Neo4j',
|
||||
description: 'Connect to Neo4j graph database',
|
||||
@@ -24,6 +24,7 @@ export const Neo4jBlock: BlockConfig<Neo4jResponse> = {
|
||||
{ label: 'Update Properties (SET)', id: 'update' },
|
||||
{ label: 'Delete Nodes/Relationships', id: 'delete' },
|
||||
{ label: 'Execute Cypher', id: 'execute' },
|
||||
{ label: 'Introspect Schema', id: 'introspect' },
|
||||
],
|
||||
value: () => 'query',
|
||||
},
|
||||
@@ -589,6 +590,7 @@ Return ONLY valid JSON.`,
|
||||
'neo4j_update',
|
||||
'neo4j_delete',
|
||||
'neo4j_execute',
|
||||
'neo4j_introspect',
|
||||
],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
@@ -605,6 +607,8 @@ Return ONLY valid JSON.`,
|
||||
return 'neo4j_delete'
|
||||
case 'execute':
|
||||
return 'neo4j_execute'
|
||||
case 'introspect':
|
||||
return 'neo4j_introspect'
|
||||
default:
|
||||
throw new Error(`Invalid Neo4j operation: ${params.operation}`)
|
||||
}
|
||||
|
||||
@@ -23,6 +23,7 @@ export const PostgreSQLBlock: BlockConfig<PostgresResponse> = {
|
||||
{ label: 'Update Data', id: 'update' },
|
||||
{ label: 'Delete Data', id: 'delete' },
|
||||
{ label: 'Execute Raw SQL', id: 'execute' },
|
||||
{ label: 'Introspect Schema', id: 'introspect' },
|
||||
],
|
||||
value: () => 'query',
|
||||
},
|
||||
@@ -285,6 +286,14 @@ Return ONLY the SQL query - no explanations, no markdown, no extra text.`,
|
||||
condition: { field: 'operation', value: 'delete' },
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'schema',
|
||||
title: 'Schema Name',
|
||||
type: 'short-input',
|
||||
placeholder: 'public',
|
||||
value: () => 'public',
|
||||
condition: { field: 'operation', value: 'introspect' },
|
||||
},
|
||||
],
|
||||
tools: {
|
||||
access: [
|
||||
@@ -293,6 +302,7 @@ Return ONLY the SQL query - no explanations, no markdown, no extra text.`,
|
||||
'postgresql_update',
|
||||
'postgresql_delete',
|
||||
'postgresql_execute',
|
||||
'postgresql_introspect',
|
||||
],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
@@ -307,6 +317,8 @@ Return ONLY the SQL query - no explanations, no markdown, no extra text.`,
|
||||
return 'postgresql_delete'
|
||||
case 'execute':
|
||||
return 'postgresql_execute'
|
||||
case 'introspect':
|
||||
return 'postgresql_introspect'
|
||||
default:
|
||||
throw new Error(`Invalid PostgreSQL operation: ${params.operation}`)
|
||||
}
|
||||
@@ -343,6 +355,7 @@ Return ONLY the SQL query - no explanations, no markdown, no extra text.`,
|
||||
if (rest.table) result.table = rest.table
|
||||
if (rest.query) result.query = rest.query
|
||||
if (rest.where) result.where = rest.where
|
||||
if (rest.schema) result.schema = rest.schema
|
||||
if (parsedData !== undefined) result.data = parsedData
|
||||
|
||||
return result
|
||||
@@ -361,6 +374,7 @@ Return ONLY the SQL query - no explanations, no markdown, no extra text.`,
|
||||
query: { type: 'string', description: 'SQL query to execute' },
|
||||
data: { type: 'json', description: 'Data for insert/update operations' },
|
||||
where: { type: 'string', description: 'WHERE clause for update/delete' },
|
||||
schema: { type: 'string', description: 'Schema name for introspection' },
|
||||
},
|
||||
outputs: {
|
||||
message: {
|
||||
@@ -375,5 +389,13 @@ Return ONLY the SQL query - no explanations, no markdown, no extra text.`,
|
||||
type: 'number',
|
||||
description: 'Number of rows affected by the operation',
|
||||
},
|
||||
tables: {
|
||||
type: 'array',
|
||||
description: 'Array of table schemas with columns, keys, and indexes (introspect operation)',
|
||||
},
|
||||
schemas: {
|
||||
type: 'array',
|
||||
description: 'List of available schemas in the database (introspect operation)',
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import { RDSIcon } from '@/components/icons'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import type { RdsResponse } from '@/tools/rds/types'
|
||||
import type { RdsIntrospectResponse, RdsResponse } from '@/tools/rds/types'
|
||||
|
||||
export const RDSBlock: BlockConfig<RdsResponse> = {
|
||||
export const RDSBlock: BlockConfig<RdsResponse | RdsIntrospectResponse> = {
|
||||
type: 'rds',
|
||||
name: 'Amazon RDS',
|
||||
description: 'Connect to Amazon RDS via Data API',
|
||||
@@ -23,6 +23,7 @@ export const RDSBlock: BlockConfig<RdsResponse> = {
|
||||
{ label: 'Update Data', id: 'update' },
|
||||
{ label: 'Delete Data', id: 'delete' },
|
||||
{ label: 'Execute Raw SQL', id: 'execute' },
|
||||
{ label: 'Introspect Schema', id: 'introspect' },
|
||||
],
|
||||
value: () => 'query',
|
||||
},
|
||||
@@ -340,9 +341,36 @@ Return ONLY the JSON object.`,
|
||||
generationType: 'json-object',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'schema',
|
||||
title: 'Schema Name',
|
||||
type: 'short-input',
|
||||
placeholder: 'public (PostgreSQL) or database name (MySQL)',
|
||||
condition: { field: 'operation', value: 'introspect' },
|
||||
required: false,
|
||||
},
|
||||
{
|
||||
id: 'engine',
|
||||
title: 'Database Engine',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Auto-detect', id: '' },
|
||||
{ label: 'Aurora PostgreSQL', id: 'aurora-postgresql' },
|
||||
{ label: 'Aurora MySQL', id: 'aurora-mysql' },
|
||||
],
|
||||
condition: { field: 'operation', value: 'introspect' },
|
||||
value: () => '',
|
||||
},
|
||||
],
|
||||
tools: {
|
||||
access: ['rds_query', 'rds_insert', 'rds_update', 'rds_delete', 'rds_execute'],
|
||||
access: [
|
||||
'rds_query',
|
||||
'rds_insert',
|
||||
'rds_update',
|
||||
'rds_delete',
|
||||
'rds_execute',
|
||||
'rds_introspect',
|
||||
],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
switch (params.operation) {
|
||||
@@ -356,12 +384,14 @@ Return ONLY the JSON object.`,
|
||||
return 'rds_delete'
|
||||
case 'execute':
|
||||
return 'rds_execute'
|
||||
case 'introspect':
|
||||
return 'rds_introspect'
|
||||
default:
|
||||
throw new Error(`Invalid RDS operation: ${params.operation}`)
|
||||
}
|
||||
},
|
||||
params: (params) => {
|
||||
const { operation, data, conditions, ...rest } = params
|
||||
const { operation, data, conditions, schema, engine, ...rest } = params
|
||||
|
||||
// Parse JSON fields
|
||||
const parseJson = (value: unknown, fieldName: string) => {
|
||||
@@ -399,6 +429,8 @@ Return ONLY the JSON object.`,
|
||||
if (rest.query) result.query = rest.query
|
||||
if (parsedConditions !== undefined) result.conditions = parsedConditions
|
||||
if (parsedData !== undefined) result.data = parsedData
|
||||
if (schema) result.schema = schema
|
||||
if (engine) result.engine = engine
|
||||
|
||||
return result
|
||||
},
|
||||
@@ -416,6 +448,11 @@ Return ONLY the JSON object.`,
|
||||
query: { type: 'string', description: 'SQL query to execute' },
|
||||
data: { type: 'json', description: 'Data for insert/update operations' },
|
||||
conditions: { type: 'json', description: 'Conditions for update/delete (e.g., {"id": 1})' },
|
||||
schema: { type: 'string', description: 'Schema to introspect (for introspect operation)' },
|
||||
engine: {
|
||||
type: 'string',
|
||||
description: 'Database engine (aurora-postgresql or aurora-mysql, auto-detected if not set)',
|
||||
},
|
||||
},
|
||||
outputs: {
|
||||
message: {
|
||||
@@ -430,5 +467,18 @@ Return ONLY the JSON object.`,
|
||||
type: 'number',
|
||||
description: 'Number of rows affected by the operation',
|
||||
},
|
||||
engine: {
|
||||
type: 'string',
|
||||
description: 'Detected database engine type (for introspect operation)',
|
||||
},
|
||||
tables: {
|
||||
type: 'array',
|
||||
description:
|
||||
'Array of table schemas with columns, keys, and indexes (for introspect operation)',
|
||||
},
|
||||
schemas: {
|
||||
type: 'array',
|
||||
description: 'List of available schemas in the database (for introspect operation)',
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -17,6 +17,7 @@ export const ResponseBlock: BlockConfig<ResponseBlockOutput> = {
|
||||
category: 'blocks',
|
||||
bgColor: '#2F55FF',
|
||||
icon: ResponseIcon,
|
||||
singleInstance: true,
|
||||
subBlocks: [
|
||||
{
|
||||
id: 'dataMode',
|
||||
|
||||
@@ -115,25 +115,26 @@ Description: ${route.value || 'No description provided'}
|
||||
)
|
||||
.join('\n')
|
||||
|
||||
return `You are an intelligent routing agent. Your task is to analyze the provided context and select the most appropriate route from the available options.
|
||||
return `You are a DETERMINISTIC routing agent. You MUST select exactly ONE option.
|
||||
|
||||
Available Routes:
|
||||
${routesInfo}
|
||||
|
||||
Context to analyze:
|
||||
Context to route:
|
||||
${context}
|
||||
|
||||
Instructions:
|
||||
1. Carefully analyze the context against each route's description
|
||||
2. Select the route that best matches the context's intent and requirements
|
||||
3. Consider the semantic meaning, not just keyword matching
|
||||
4. If multiple routes could match, choose the most specific one
|
||||
ROUTING RULES:
|
||||
1. ALWAYS prefer selecting a route over NO_MATCH
|
||||
2. Pick the route whose description BEST matches the context, even if it's not a perfect match
|
||||
3. If the context is even partially related to a route's description, select that route
|
||||
4. ONLY output NO_MATCH if the context is completely unrelated to ALL route descriptions
|
||||
|
||||
Response Format:
|
||||
Return ONLY the route ID as a single string, no punctuation, no explanation.
|
||||
Example: "route-abc123"
|
||||
OUTPUT FORMAT:
|
||||
- Output EXACTLY one route ID (copied exactly as shown above) OR "NO_MATCH"
|
||||
- No explanation, no punctuation, no additional text
|
||||
- Just the route ID or NO_MATCH
|
||||
|
||||
Remember: Your response must be ONLY the route ID - no additional text, formatting, or explanation.`
|
||||
Your response:`
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -34,6 +34,7 @@ export const SupabaseBlock: BlockConfig<SupabaseResponse> = {
|
||||
{ label: 'Full-Text Search', id: 'text_search' },
|
||||
{ label: 'Vector Search', id: 'vector_search' },
|
||||
{ label: 'Call RPC Function', id: 'rpc' },
|
||||
{ label: 'Introspect Schema', id: 'introspect' },
|
||||
// Storage - File Operations
|
||||
{ label: 'Storage: Upload File', id: 'storage_upload' },
|
||||
{ label: 'Storage: Download File', id: 'storage_download' },
|
||||
@@ -490,6 +491,14 @@ Return ONLY the order by expression - no explanations, no extra text.`,
|
||||
placeholder: '{\n "param1": "value1",\n "param2": "value2"\n}',
|
||||
condition: { field: 'operation', value: 'rpc' },
|
||||
},
|
||||
// Introspect operation fields
|
||||
{
|
||||
id: 'schema',
|
||||
title: 'Schema',
|
||||
type: 'short-input',
|
||||
placeholder: 'public (leave empty for all user schemas)',
|
||||
condition: { field: 'operation', value: 'introspect' },
|
||||
},
|
||||
// Text Search operation fields
|
||||
{
|
||||
id: 'column',
|
||||
@@ -876,6 +885,7 @@ Return ONLY the PostgREST filter expression - no explanations, no markdown, no e
|
||||
'supabase_text_search',
|
||||
'supabase_vector_search',
|
||||
'supabase_rpc',
|
||||
'supabase_introspect',
|
||||
'supabase_storage_upload',
|
||||
'supabase_storage_download',
|
||||
'supabase_storage_list',
|
||||
@@ -911,6 +921,8 @@ Return ONLY the PostgREST filter expression - no explanations, no markdown, no e
|
||||
return 'supabase_vector_search'
|
||||
case 'rpc':
|
||||
return 'supabase_rpc'
|
||||
case 'introspect':
|
||||
return 'supabase_introspect'
|
||||
case 'storage_upload':
|
||||
return 'supabase_storage_upload'
|
||||
case 'storage_download':
|
||||
@@ -1085,7 +1097,6 @@ Return ONLY the PostgREST filter expression - no explanations, no markdown, no e
|
||||
operation: { type: 'string', description: 'Operation to perform' },
|
||||
projectId: { type: 'string', description: 'Supabase project identifier' },
|
||||
table: { type: 'string', description: 'Database table name' },
|
||||
schema: { type: 'string', description: 'Database schema (default: public)' },
|
||||
select: { type: 'string', description: 'Columns to return (comma-separated, defaults to *)' },
|
||||
apiKey: { type: 'string', description: 'Service role secret key' },
|
||||
// Data for insert/update operations
|
||||
@@ -1113,6 +1124,8 @@ Return ONLY the PostgREST filter expression - no explanations, no markdown, no e
|
||||
language: { type: 'string', description: 'Language for text search' },
|
||||
// Count operation inputs
|
||||
countType: { type: 'string', description: 'Count type: exact, planned, or estimated' },
|
||||
// Introspect operation inputs
|
||||
schema: { type: 'string', description: 'Database schema to introspect (e.g., public)' },
|
||||
// Storage operation inputs
|
||||
bucket: { type: 'string', description: 'Storage bucket name' },
|
||||
path: { type: 'string', description: 'File or folder path in storage' },
|
||||
@@ -1158,5 +1171,13 @@ Return ONLY the PostgREST filter expression - no explanations, no markdown, no e
|
||||
type: 'string',
|
||||
description: 'Temporary signed URL for storage file',
|
||||
},
|
||||
tables: {
|
||||
type: 'json',
|
||||
description: 'Array of table schemas for introspect operation',
|
||||
},
|
||||
schemas: {
|
||||
type: 'json',
|
||||
description: 'Array of schema names found in the database',
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -320,6 +320,7 @@ export interface BlockConfig<T extends ToolResponse = ToolResponse> {
|
||||
subBlocks: SubBlockConfig[]
|
||||
triggerAllowed?: boolean
|
||||
authMode?: AuthMode
|
||||
singleInstance?: boolean
|
||||
tools: {
|
||||
access: string[]
|
||||
config?: {
|
||||
|
||||
@@ -12,11 +12,10 @@ import { cn } from '@/lib/core/utils/cn'
|
||||
const avatarVariants = cva('relative flex shrink-0 overflow-hidden rounded-full', {
|
||||
variants: {
|
||||
size: {
|
||||
xs: 'h-6 w-6',
|
||||
sm: 'h-8 w-8',
|
||||
md: 'h-10 w-10',
|
||||
lg: 'h-12 w-12',
|
||||
xl: 'h-16 w-16',
|
||||
xs: 'h-3.5 w-3.5',
|
||||
sm: 'h-6 w-6',
|
||||
md: 'h-8 w-8',
|
||||
lg: 'h-10 w-10',
|
||||
},
|
||||
},
|
||||
defaultVariants: {
|
||||
@@ -38,11 +37,10 @@ const avatarStatusVariants = cva(
|
||||
away: 'bg-[#f59e0b]',
|
||||
},
|
||||
size: {
|
||||
xs: 'h-2 w-2',
|
||||
sm: 'h-2.5 w-2.5',
|
||||
md: 'h-3 w-3',
|
||||
lg: 'h-3.5 w-3.5',
|
||||
xl: 'h-4 w-4',
|
||||
xs: 'h-1.5 w-1.5 border',
|
||||
sm: 'h-2 w-2',
|
||||
md: 'h-2.5 w-2.5',
|
||||
lg: 'h-3 w-3',
|
||||
},
|
||||
},
|
||||
defaultVariants: {
|
||||
|
||||
@@ -52,6 +52,7 @@
|
||||
import * as React from 'react'
|
||||
import * as PopoverPrimitive from '@radix-ui/react-popover'
|
||||
import { Check, ChevronLeft, ChevronRight, Search } from 'lucide-react'
|
||||
import { createPortal } from 'react-dom'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
|
||||
type PopoverSize = 'sm' | 'md'
|
||||
@@ -166,6 +167,9 @@ interface PopoverContextValue {
|
||||
colorScheme: PopoverColorScheme
|
||||
searchQuery: string
|
||||
setSearchQuery: (query: string) => void
|
||||
/** ID of the last hovered item (for hover submenus) */
|
||||
lastHoveredItem: string | null
|
||||
setLastHoveredItem: (id: string | null) => void
|
||||
}
|
||||
|
||||
const PopoverContext = React.createContext<PopoverContextValue | null>(null)
|
||||
@@ -208,12 +212,24 @@ const Popover: React.FC<PopoverProps> = ({
|
||||
variant = 'default',
|
||||
size = 'md',
|
||||
colorScheme = 'default',
|
||||
open,
|
||||
...props
|
||||
}) => {
|
||||
const [currentFolder, setCurrentFolder] = React.useState<string | null>(null)
|
||||
const [folderTitle, setFolderTitle] = React.useState<string | null>(null)
|
||||
const [onFolderSelect, setOnFolderSelect] = React.useState<(() => void) | null>(null)
|
||||
const [searchQuery, setSearchQuery] = React.useState<string>('')
|
||||
const [lastHoveredItem, setLastHoveredItem] = React.useState<string | null>(null)
|
||||
|
||||
React.useEffect(() => {
|
||||
if (open === false) {
|
||||
setCurrentFolder(null)
|
||||
setFolderTitle(null)
|
||||
setOnFolderSelect(null)
|
||||
setSearchQuery('')
|
||||
setLastHoveredItem(null)
|
||||
}
|
||||
}, [open])
|
||||
|
||||
const openFolder = React.useCallback(
|
||||
(id: string, title: string, onLoad?: () => void | Promise<void>, onSelect?: () => void) => {
|
||||
@@ -246,6 +262,8 @@ const Popover: React.FC<PopoverProps> = ({
|
||||
colorScheme,
|
||||
searchQuery,
|
||||
setSearchQuery,
|
||||
lastHoveredItem,
|
||||
setLastHoveredItem,
|
||||
}),
|
||||
[
|
||||
openFolder,
|
||||
@@ -257,12 +275,15 @@ const Popover: React.FC<PopoverProps> = ({
|
||||
size,
|
||||
colorScheme,
|
||||
searchQuery,
|
||||
lastHoveredItem,
|
||||
]
|
||||
)
|
||||
|
||||
return (
|
||||
<PopoverContext.Provider value={contextValue}>
|
||||
<PopoverPrimitive.Root {...props}>{children}</PopoverPrimitive.Root>
|
||||
<PopoverPrimitive.Root open={open} {...props}>
|
||||
{children}
|
||||
</PopoverPrimitive.Root>
|
||||
</PopoverContext.Provider>
|
||||
)
|
||||
}
|
||||
@@ -496,7 +517,17 @@ export interface PopoverItemProps extends React.HTMLAttributes<HTMLDivElement> {
|
||||
*/
|
||||
const PopoverItem = React.forwardRef<HTMLDivElement, PopoverItemProps>(
|
||||
(
|
||||
{ className, active, rootOnly, disabled, showCheck = false, children, onClick, ...props },
|
||||
{
|
||||
className,
|
||||
active,
|
||||
rootOnly,
|
||||
disabled,
|
||||
showCheck = false,
|
||||
children,
|
||||
onClick,
|
||||
onMouseEnter,
|
||||
...props
|
||||
},
|
||||
ref
|
||||
) => {
|
||||
const context = React.useContext(PopoverContext)
|
||||
@@ -514,6 +545,12 @@ const PopoverItem = React.forwardRef<HTMLDivElement, PopoverItemProps>(
|
||||
onClick?.(e)
|
||||
}
|
||||
|
||||
const handleMouseEnter = (e: React.MouseEvent<HTMLDivElement>) => {
|
||||
// Clear last hovered item to close any open hover submenus
|
||||
context?.setLastHoveredItem(null)
|
||||
onMouseEnter?.(e)
|
||||
}
|
||||
|
||||
return (
|
||||
<div
|
||||
className={cn(
|
||||
@@ -529,6 +566,7 @@ const PopoverItem = React.forwardRef<HTMLDivElement, PopoverItemProps>(
|
||||
aria-selected={active}
|
||||
aria-disabled={disabled}
|
||||
onClick={handleClick}
|
||||
onMouseEnter={handleMouseEnter}
|
||||
{...props}
|
||||
>
|
||||
{children}
|
||||
@@ -589,44 +627,150 @@ export interface PopoverFolderProps extends Omit<React.HTMLAttributes<HTMLDivEle
|
||||
children?: React.ReactNode
|
||||
/** Whether currently active/selected */
|
||||
active?: boolean
|
||||
/**
|
||||
* Expand folder on hover to show submenu alongside parent
|
||||
* When true, hovering shows a floating submenu; clicking still uses inline navigation
|
||||
* @default false
|
||||
*/
|
||||
expandOnHover?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Expandable folder that shows nested content.
|
||||
* Supports two modes:
|
||||
* - Click mode (default): Replaces parent content, shows back button
|
||||
* - Hover mode (expandOnHover): Shows floating submenu alongside parent
|
||||
*/
|
||||
const PopoverFolder = React.forwardRef<HTMLDivElement, PopoverFolderProps>(
|
||||
({ className, id, title, icon, onOpen, onSelect, children, active, ...props }, ref) => {
|
||||
const { openFolder, currentFolder, isInFolder, variant, size, colorScheme } =
|
||||
usePopoverContext()
|
||||
(
|
||||
{
|
||||
className,
|
||||
id,
|
||||
title,
|
||||
icon,
|
||||
onOpen,
|
||||
onSelect,
|
||||
children,
|
||||
active,
|
||||
expandOnHover = false,
|
||||
...props
|
||||
},
|
||||
ref
|
||||
) => {
|
||||
const {
|
||||
openFolder,
|
||||
currentFolder,
|
||||
isInFolder,
|
||||
variant,
|
||||
size,
|
||||
colorScheme,
|
||||
lastHoveredItem,
|
||||
setLastHoveredItem,
|
||||
} = usePopoverContext()
|
||||
const [submenuPosition, setSubmenuPosition] = React.useState<{ top: number; left: number }>({
|
||||
top: 0,
|
||||
left: 0,
|
||||
})
|
||||
const triggerRef = React.useRef<HTMLDivElement>(null)
|
||||
|
||||
// Submenu is open when this folder is the last hovered item (for expandOnHover mode)
|
||||
const isHoverOpen = expandOnHover && lastHoveredItem === id
|
||||
|
||||
// Merge refs
|
||||
const mergedRef = React.useCallback(
|
||||
(node: HTMLDivElement | null) => {
|
||||
triggerRef.current = node
|
||||
if (typeof ref === 'function') {
|
||||
ref(node)
|
||||
} else if (ref) {
|
||||
ref.current = node
|
||||
}
|
||||
},
|
||||
[ref]
|
||||
)
|
||||
|
||||
// If we're in a folder and this isn't the current one, hide
|
||||
if (isInFolder && currentFolder !== id) return null
|
||||
// If this folder is open via click (inline mode), render children directly
|
||||
if (currentFolder === id) return <>{children}</>
|
||||
|
||||
const handleClick = (e: React.MouseEvent) => {
|
||||
e.stopPropagation()
|
||||
const handleClickOpen = () => {
|
||||
openFolder(id, title, onOpen, onSelect)
|
||||
}
|
||||
|
||||
const handleClick = (e: React.MouseEvent) => {
|
||||
e.stopPropagation()
|
||||
if (expandOnHover) {
|
||||
// In hover mode, clicking opens inline and clears hover state
|
||||
setLastHoveredItem(null)
|
||||
}
|
||||
handleClickOpen()
|
||||
}
|
||||
|
||||
const handleMouseEnter = () => {
|
||||
if (!expandOnHover) return
|
||||
|
||||
// Calculate position for submenu
|
||||
if (triggerRef.current) {
|
||||
const rect = triggerRef.current.getBoundingClientRect()
|
||||
const parentPopover = triggerRef.current.closest('[data-radix-popper-content-wrapper]')
|
||||
const parentRect = parentPopover?.getBoundingClientRect()
|
||||
|
||||
// Position to the right of the parent popover with a small gap
|
||||
setSubmenuPosition({
|
||||
top: rect.top,
|
||||
left: parentRect ? parentRect.right + 4 : rect.right + 4,
|
||||
})
|
||||
}
|
||||
|
||||
setLastHoveredItem(id)
|
||||
onOpen?.()
|
||||
}
|
||||
|
||||
return (
|
||||
<div
|
||||
ref={ref}
|
||||
className={cn(
|
||||
STYLES.itemBase,
|
||||
STYLES.colorScheme[colorScheme].text,
|
||||
STYLES.size[size].item,
|
||||
getItemStateClasses(variant, colorScheme, !!active),
|
||||
className
|
||||
)}
|
||||
role='menuitem'
|
||||
aria-haspopup='true'
|
||||
aria-expanded={false}
|
||||
onClick={handleClick}
|
||||
{...props}
|
||||
>
|
||||
{icon}
|
||||
<span className='flex-1'>{title}</span>
|
||||
<ChevronRight className={STYLES.size[size].icon} />
|
||||
</div>
|
||||
<>
|
||||
<div
|
||||
ref={mergedRef}
|
||||
className={cn(
|
||||
STYLES.itemBase,
|
||||
STYLES.colorScheme[colorScheme].text,
|
||||
STYLES.size[size].item,
|
||||
getItemStateClasses(variant, colorScheme, !!active || isHoverOpen),
|
||||
className
|
||||
)}
|
||||
role='menuitem'
|
||||
aria-haspopup='true'
|
||||
aria-expanded={isHoverOpen}
|
||||
onClick={handleClick}
|
||||
onMouseEnter={handleMouseEnter}
|
||||
{...props}
|
||||
>
|
||||
{icon}
|
||||
<span className='flex-1'>{title}</span>
|
||||
<ChevronRight className={STYLES.size[size].icon} />
|
||||
</div>
|
||||
|
||||
{/* Hover submenu - rendered as a portal to escape overflow clipping */}
|
||||
{isHoverOpen &&
|
||||
typeof document !== 'undefined' &&
|
||||
createPortal(
|
||||
<div
|
||||
className={cn(
|
||||
'fixed z-[10000201] min-w-[120px]',
|
||||
STYLES.content,
|
||||
STYLES.colorScheme[colorScheme].content,
|
||||
'shadow-lg'
|
||||
)}
|
||||
style={{
|
||||
top: submenuPosition.top,
|
||||
left: submenuPosition.left,
|
||||
}}
|
||||
>
|
||||
{children}
|
||||
</div>,
|
||||
document.body
|
||||
)}
|
||||
</>
|
||||
)
|
||||
}
|
||||
)
|
||||
@@ -665,7 +809,10 @@ const PopoverBackButton = React.forwardRef<HTMLDivElement, PopoverBackButtonProp
|
||||
className
|
||||
)}
|
||||
role='button'
|
||||
onClick={closeFolder}
|
||||
onClick={(e) => {
|
||||
e.stopPropagation()
|
||||
closeFolder()
|
||||
}}
|
||||
{...props}
|
||||
>
|
||||
<ChevronLeft className={STYLES.size[size].icon} />
|
||||
|
||||
@@ -166,6 +166,8 @@ export interface TagInputProps extends VariantProps<typeof tagInputVariants> {
|
||||
onAdd: (value: string) => boolean
|
||||
/** Callback when a tag is removed (receives value, index, and isValid) */
|
||||
onRemove: (value: string, index: number, isValid: boolean) => void
|
||||
/** Callback when the input value changes (useful for clearing errors) */
|
||||
onInputChange?: (value: string) => void
|
||||
/** Placeholder text for the input */
|
||||
placeholder?: string
|
||||
/** Placeholder text when there are existing tags */
|
||||
@@ -207,6 +209,7 @@ const TagInput = React.forwardRef<HTMLInputElement, TagInputProps>(
|
||||
items,
|
||||
onAdd,
|
||||
onRemove,
|
||||
onInputChange,
|
||||
placeholder = 'Enter values',
|
||||
placeholderWithTags = 'Add another',
|
||||
disabled = false,
|
||||
@@ -344,10 +347,12 @@ const TagInput = React.forwardRef<HTMLInputElement, TagInputProps>(
|
||||
})
|
||||
|
||||
if (addedCount === 0 && pastedValues.length === 1) {
|
||||
setInputValue(inputValue + pastedValues[0])
|
||||
const newValue = inputValue + pastedValues[0]
|
||||
setInputValue(newValue)
|
||||
onInputChange?.(newValue)
|
||||
}
|
||||
},
|
||||
[onAdd, inputValue]
|
||||
[onAdd, inputValue, onInputChange]
|
||||
)
|
||||
|
||||
const handleBlur = React.useCallback(() => {
|
||||
@@ -422,7 +427,10 @@ const TagInput = React.forwardRef<HTMLInputElement, TagInputProps>(
|
||||
name={name}
|
||||
type='text'
|
||||
value={inputValue}
|
||||
onChange={(e) => setInputValue(e.target.value)}
|
||||
onChange={(e) => {
|
||||
setInputValue(e.target.value)
|
||||
onInputChange?.(e.target.value)
|
||||
}}
|
||||
onKeyDown={handleKeyDown}
|
||||
onPaste={handlePaste}
|
||||
onBlur={handleBlur}
|
||||
|
||||
22
apps/sim/components/emcn/icons/animate/download.module.css
Normal file
22
apps/sim/components/emcn/icons/animate/download.module.css
Normal file
@@ -0,0 +1,22 @@
|
||||
/**
|
||||
* Download icon animation
|
||||
* Subtle continuous animation for import/download states
|
||||
* Arrow gently pulses down to suggest downloading motion
|
||||
*/
|
||||
|
||||
@keyframes arrow-pulse {
|
||||
0%,
|
||||
100% {
|
||||
transform: translateY(0);
|
||||
opacity: 1;
|
||||
}
|
||||
50% {
|
||||
transform: translateY(1.5px);
|
||||
opacity: 0.7;
|
||||
}
|
||||
}
|
||||
|
||||
.animated-download-svg {
|
||||
animation: arrow-pulse 1.5s ease-in-out infinite;
|
||||
transform-origin: center center;
|
||||
}
|
||||
42
apps/sim/components/emcn/icons/download.tsx
Normal file
42
apps/sim/components/emcn/icons/download.tsx
Normal file
@@ -0,0 +1,42 @@
|
||||
import type { SVGProps } from 'react'
|
||||
import styles from '@/components/emcn/icons/animate/download.module.css'
|
||||
|
||||
export interface DownloadProps extends SVGProps<SVGSVGElement> {
|
||||
/**
|
||||
* Enable animation on the download icon
|
||||
* @default false
|
||||
*/
|
||||
animate?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Download icon component with optional CSS-based animation
|
||||
* Based on lucide arrow-down icon structure.
|
||||
* When animate is false, this is a lightweight static icon with no animation overhead.
|
||||
* When animate is true, CSS module animations are applied for a subtle pulsing effect.
|
||||
* @param props - SVG properties including className, animate, etc.
|
||||
*/
|
||||
export function Download({ animate = false, className, ...props }: DownloadProps) {
|
||||
const svgClassName = animate
|
||||
? `${styles['animated-download-svg']} ${className || ''}`.trim()
|
||||
: className
|
||||
|
||||
return (
|
||||
<svg
|
||||
xmlns='http://www.w3.org/2000/svg'
|
||||
width='24'
|
||||
height='24'
|
||||
viewBox='0 0 24 24'
|
||||
fill='none'
|
||||
stroke='currentColor'
|
||||
strokeWidth='2'
|
||||
strokeLinecap='round'
|
||||
strokeLinejoin='round'
|
||||
className={svgClassName}
|
||||
{...props}
|
||||
>
|
||||
<path d='M12 5v14' />
|
||||
<path d='m19 12-7 7-7-7' />
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user