mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-13 17:08:01 -05:00
Compare commits
19 Commits
v0.5.57
...
feat/copil
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9c27950f0a | ||
|
|
6fdb45a6b1 | ||
|
|
c8865fa395 | ||
|
|
d40e34bbfe | ||
|
|
936d2bd729 | ||
|
|
3b925c807f | ||
|
|
87a7162212 | ||
|
|
f2a9bf49d5 | ||
|
|
2c250edb6a | ||
|
|
acb696207d | ||
|
|
4ee863a9ce | ||
|
|
23f4305bc0 | ||
|
|
42e496f5ff | ||
|
|
23b3dacd1a | ||
|
|
d55072a45f | ||
|
|
684ad5aeec | ||
|
|
a3dff1027f | ||
|
|
0aec9ef571 | ||
|
|
cb4db20a5f |
@@ -37,7 +37,7 @@ This integration empowers Sim agents to automate data management tasks within yo
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Integrate Amazon DynamoDB into workflows. Supports Get, Put, Query, Scan, Update, and Delete operations on DynamoDB tables.
|
||||
Integrate Amazon DynamoDB into workflows. Supports Get, Put, Query, Scan, Update, Delete, and Introspect operations on DynamoDB tables.
|
||||
|
||||
|
||||
|
||||
@@ -185,6 +185,27 @@ Delete an item from a DynamoDB table
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Operation status message |
|
||||
|
||||
### `dynamodb_introspect`
|
||||
|
||||
Introspect DynamoDB to list tables or get detailed schema information for a specific table
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `region` | string | Yes | AWS region \(e.g., us-east-1\) |
|
||||
| `accessKeyId` | string | Yes | AWS access key ID |
|
||||
| `secretAccessKey` | string | Yes | AWS secret access key |
|
||||
| `tableName` | string | No | Optional table name to get detailed schema. If not provided, lists all tables. |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Operation status message |
|
||||
| `tables` | array | List of table names in the region |
|
||||
| `tableDetails` | object | Detailed schema information for a specific table |
|
||||
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -362,6 +362,29 @@ Get comprehensive statistics about the Elasticsearch cluster.
|
||||
| `nodes` | object | Node statistics including count and versions |
|
||||
| `indices` | object | Index statistics including document count and store size |
|
||||
|
||||
### `elasticsearch_list_indices`
|
||||
|
||||
List all indices in the Elasticsearch cluster with their health, status, and statistics.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `deploymentType` | string | Yes | Deployment type: self_hosted or cloud |
|
||||
| `host` | string | No | Elasticsearch host URL \(for self-hosted\) |
|
||||
| `cloudId` | string | No | Elastic Cloud ID \(for cloud deployments\) |
|
||||
| `authMethod` | string | Yes | Authentication method: api_key or basic_auth |
|
||||
| `apiKey` | string | No | Elasticsearch API key |
|
||||
| `username` | string | No | Username for basic auth |
|
||||
| `password` | string | No | Password for basic auth |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Summary message about the indices |
|
||||
| `indices` | json | Array of index information objects |
|
||||
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -96,13 +96,13 @@ Download a file from Google Drive with complete metadata (exports Google Workspa
|
||||
| `fileId` | string | Yes | The ID of the file to download |
|
||||
| `mimeType` | string | No | The MIME type to export Google Workspace files to \(optional\) |
|
||||
| `fileName` | string | No | Optional filename override |
|
||||
| `includeRevisions` | boolean | No | Whether to include revision history in the metadata \(default: true\) |
|
||||
| `includeRevisions` | boolean | No | Whether to include revision history in the metadata \(default: true, returns first 100 revisions\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `file` | object | Downloaded file stored in execution files |
|
||||
| `file` | object | Downloaded file data |
|
||||
|
||||
### `google_drive_list`
|
||||
|
||||
|
||||
@@ -172,6 +172,30 @@ Execute MongoDB aggregation pipeline
|
||||
| `documents` | array | Array of documents returned from aggregation |
|
||||
| `documentCount` | number | Number of documents returned |
|
||||
|
||||
### `mongodb_introspect`
|
||||
|
||||
Introspect MongoDB database to list databases, collections, and indexes
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | Yes | MongoDB server hostname or IP address |
|
||||
| `port` | number | Yes | MongoDB server port \(default: 27017\) |
|
||||
| `database` | string | No | Database name to introspect \(optional - if not provided, lists all databases\) |
|
||||
| `username` | string | No | MongoDB username |
|
||||
| `password` | string | No | MongoDB password |
|
||||
| `authSource` | string | No | Authentication database |
|
||||
| `ssl` | string | No | SSL connection mode \(disabled, required, preferred\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Operation status message |
|
||||
| `databases` | array | Array of database names |
|
||||
| `collections` | array | Array of collection info with name, type, document count, and indexes |
|
||||
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -157,6 +157,29 @@ Execute raw SQL query on MySQL database
|
||||
| `rows` | array | Array of rows returned from the query |
|
||||
| `rowCount` | number | Number of rows affected |
|
||||
|
||||
### `mysql_introspect`
|
||||
|
||||
Introspect MySQL database schema to retrieve table structures, columns, and relationships
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | Yes | MySQL server hostname or IP address |
|
||||
| `port` | number | Yes | MySQL server port \(default: 3306\) |
|
||||
| `database` | string | Yes | Database name to connect to |
|
||||
| `username` | string | Yes | Database username |
|
||||
| `password` | string | Yes | Database password |
|
||||
| `ssl` | string | No | SSL connection mode \(disabled, required, preferred\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Operation status message |
|
||||
| `tables` | array | Array of table schemas with columns, keys, and indexes |
|
||||
| `databases` | array | List of available databases on the server |
|
||||
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -168,6 +168,33 @@ Execute arbitrary Cypher queries on Neo4j graph database for complex operations
|
||||
| `recordCount` | number | Number of records returned |
|
||||
| `summary` | json | Execution summary with timing and counters |
|
||||
|
||||
### `neo4j_introspect`
|
||||
|
||||
Introspect a Neo4j database to discover its schema including node labels, relationship types, properties, constraints, and indexes.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | Yes | Neo4j server hostname or IP address |
|
||||
| `port` | number | Yes | Neo4j server port \(default: 7687 for Bolt protocol\) |
|
||||
| `database` | string | Yes | Database name to connect to |
|
||||
| `username` | string | Yes | Neo4j username |
|
||||
| `password` | string | Yes | Neo4j password |
|
||||
| `encryption` | string | No | Connection encryption mode \(enabled, disabled\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Operation status message |
|
||||
| `labels` | array | Array of node labels in the database |
|
||||
| `relationshipTypes` | array | Array of relationship types in the database |
|
||||
| `nodeSchemas` | array | Array of node schemas with their properties |
|
||||
| `relationshipSchemas` | array | Array of relationship schemas with their properties |
|
||||
| `constraints` | array | Array of database constraints |
|
||||
| `indexes` | array | Array of database indexes |
|
||||
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -157,6 +157,30 @@ Execute raw SQL query on PostgreSQL database
|
||||
| `rows` | array | Array of rows returned from the query |
|
||||
| `rowCount` | number | Number of rows affected |
|
||||
|
||||
### `postgresql_introspect`
|
||||
|
||||
Introspect PostgreSQL database schema to retrieve table structures, columns, and relationships
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `host` | string | Yes | PostgreSQL server hostname or IP address |
|
||||
| `port` | number | Yes | PostgreSQL server port \(default: 5432\) |
|
||||
| `database` | string | Yes | Database name to connect to |
|
||||
| `username` | string | Yes | Database username |
|
||||
| `password` | string | Yes | Database password |
|
||||
| `ssl` | string | No | SSL connection mode \(disabled, required, preferred\) |
|
||||
| `schema` | string | No | Schema to introspect \(default: public\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Operation status message |
|
||||
| `tables` | array | Array of table schemas with columns, keys, and indexes |
|
||||
| `schemas` | array | List of available schemas in the database |
|
||||
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -165,6 +165,32 @@ Execute raw SQL on Amazon RDS using the Data API
|
||||
| `rows` | array | Array of rows returned or affected |
|
||||
| `rowCount` | number | Number of rows affected |
|
||||
|
||||
### `rds_introspect`
|
||||
|
||||
Introspect Amazon RDS Aurora database schema to retrieve table structures, columns, and relationships
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `region` | string | Yes | AWS region \(e.g., us-east-1\) |
|
||||
| `accessKeyId` | string | Yes | AWS access key ID |
|
||||
| `secretAccessKey` | string | Yes | AWS secret access key |
|
||||
| `resourceArn` | string | Yes | ARN of the Aurora DB cluster |
|
||||
| `secretArn` | string | Yes | ARN of the Secrets Manager secret containing DB credentials |
|
||||
| `database` | string | No | Database name \(optional\) |
|
||||
| `schema` | string | No | Schema to introspect \(default: public for PostgreSQL, database name for MySQL\) |
|
||||
| `engine` | string | No | Database engine \(aurora-postgresql or aurora-mysql\). Auto-detected if not provided. |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Operation status message |
|
||||
| `engine` | string | Detected database engine type |
|
||||
| `tables` | array | Array of table schemas with columns, keys, and indexes |
|
||||
| `schemas` | array | List of available schemas in the database |
|
||||
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -261,6 +261,25 @@ Call a PostgreSQL function in Supabase
|
||||
| `message` | string | Operation status message |
|
||||
| `results` | json | Result returned from the function |
|
||||
|
||||
### `supabase_introspect`
|
||||
|
||||
Introspect Supabase database schema to get table structures, columns, and relationships
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `projectId` | string | Yes | Your Supabase project ID \(e.g., jdrkgepadsdopsntdlom\) |
|
||||
| `schema` | string | No | Database schema to introspect \(defaults to all user schemas, commonly "public"\) |
|
||||
| `apiKey` | string | Yes | Your Supabase service role secret key |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `message` | string | Operation status message |
|
||||
| `tables` | array | Array of table schemas with columns, keys, and indexes |
|
||||
|
||||
### `supabase_storage_upload`
|
||||
|
||||
Upload a file to a Supabase storage bucket
|
||||
|
||||
@@ -96,6 +96,7 @@ const ChatMessageSchema = z.object({
|
||||
})
|
||||
)
|
||||
.optional(),
|
||||
commands: z.array(z.string()).optional(),
|
||||
})
|
||||
|
||||
/**
|
||||
@@ -131,6 +132,7 @@ export async function POST(req: NextRequest) {
|
||||
provider,
|
||||
conversationId,
|
||||
contexts,
|
||||
commands,
|
||||
} = ChatMessageSchema.parse(body)
|
||||
// Ensure we have a consistent user message ID for this request
|
||||
const userMessageIdToUse = userMessageId || crypto.randomUUID()
|
||||
@@ -458,6 +460,7 @@ export async function POST(req: NextRequest) {
|
||||
...(integrationTools.length > 0 && { tools: integrationTools }),
|
||||
...(baseTools.length > 0 && { baseTools }),
|
||||
...(credentials && { credentials }),
|
||||
...(commands && commands.length > 0 && { commands }),
|
||||
}
|
||||
|
||||
try {
|
||||
|
||||
73
apps/sim/app/api/tools/dynamodb/introspect/route.ts
Normal file
73
apps/sim/app/api/tools/dynamodb/introspect/route.ts
Normal file
@@ -0,0 +1,73 @@
|
||||
import { randomUUID } from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createRawDynamoDBClient, describeTable, listTables } from '@/app/api/tools/dynamodb/utils'
|
||||
|
||||
const logger = createLogger('DynamoDBIntrospectAPI')
|
||||
|
||||
const IntrospectSchema = z.object({
|
||||
region: z.string().min(1, 'AWS region is required'),
|
||||
accessKeyId: z.string().min(1, 'AWS access key ID is required'),
|
||||
secretAccessKey: z.string().min(1, 'AWS secret access key is required'),
|
||||
tableName: z.string().optional(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = IntrospectSchema.parse(body)
|
||||
|
||||
logger.info(`[${requestId}] Introspecting DynamoDB in region ${params.region}`)
|
||||
|
||||
const client = createRawDynamoDBClient({
|
||||
region: params.region,
|
||||
accessKeyId: params.accessKeyId,
|
||||
secretAccessKey: params.secretAccessKey,
|
||||
})
|
||||
|
||||
try {
|
||||
const { tables } = await listTables(client)
|
||||
|
||||
if (params.tableName) {
|
||||
logger.info(`[${requestId}] Describing table: ${params.tableName}`)
|
||||
const { tableDetails } = await describeTable(client, params.tableName)
|
||||
|
||||
logger.info(`[${requestId}] Table description completed for '${params.tableName}'`)
|
||||
|
||||
return NextResponse.json({
|
||||
message: `Table '${params.tableName}' described successfully.`,
|
||||
tables,
|
||||
tableDetails,
|
||||
})
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Listed ${tables.length} tables`)
|
||||
|
||||
return NextResponse.json({
|
||||
message: `Found ${tables.length} table(s) in region '${params.region}'.`,
|
||||
tables,
|
||||
})
|
||||
} finally {
|
||||
client.destroy()
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
logger.error(`[${requestId}] DynamoDB introspection failed:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{ error: `DynamoDB introspection failed: ${errorMessage}` },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
import { DynamoDBClient } from '@aws-sdk/client-dynamodb'
|
||||
import { DescribeTableCommand, DynamoDBClient, ListTablesCommand } from '@aws-sdk/client-dynamodb'
|
||||
import {
|
||||
DeleteCommand,
|
||||
DynamoDBDocumentClient,
|
||||
@@ -8,7 +8,7 @@ import {
|
||||
ScanCommand,
|
||||
UpdateCommand,
|
||||
} from '@aws-sdk/lib-dynamodb'
|
||||
import type { DynamoDBConnectionConfig } from '@/tools/dynamodb/types'
|
||||
import type { DynamoDBConnectionConfig, DynamoDBTableSchema } from '@/tools/dynamodb/types'
|
||||
|
||||
export function createDynamoDBClient(config: DynamoDBConnectionConfig): DynamoDBDocumentClient {
|
||||
const client = new DynamoDBClient({
|
||||
@@ -172,3 +172,99 @@ export async function deleteItem(
|
||||
await client.send(command)
|
||||
return { success: true }
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a raw DynamoDB client for operations that don't require DocumentClient
|
||||
*/
|
||||
export function createRawDynamoDBClient(config: DynamoDBConnectionConfig): DynamoDBClient {
|
||||
return new DynamoDBClient({
|
||||
region: config.region,
|
||||
credentials: {
|
||||
accessKeyId: config.accessKeyId,
|
||||
secretAccessKey: config.secretAccessKey,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Lists all DynamoDB tables in the configured region
|
||||
*/
|
||||
export async function listTables(client: DynamoDBClient): Promise<{ tables: string[] }> {
|
||||
const tables: string[] = []
|
||||
let exclusiveStartTableName: string | undefined
|
||||
|
||||
do {
|
||||
const command = new ListTablesCommand({
|
||||
ExclusiveStartTableName: exclusiveStartTableName,
|
||||
})
|
||||
|
||||
const response = await client.send(command)
|
||||
if (response.TableNames) {
|
||||
tables.push(...response.TableNames)
|
||||
}
|
||||
exclusiveStartTableName = response.LastEvaluatedTableName
|
||||
} while (exclusiveStartTableName)
|
||||
|
||||
return { tables }
|
||||
}
|
||||
|
||||
/**
|
||||
* Describes a specific DynamoDB table and returns its schema information
|
||||
*/
|
||||
export async function describeTable(
|
||||
client: DynamoDBClient,
|
||||
tableName: string
|
||||
): Promise<{ tableDetails: DynamoDBTableSchema }> {
|
||||
const command = new DescribeTableCommand({
|
||||
TableName: tableName,
|
||||
})
|
||||
|
||||
const response = await client.send(command)
|
||||
const table = response.Table
|
||||
|
||||
if (!table) {
|
||||
throw new Error(`Table '${tableName}' not found`)
|
||||
}
|
||||
|
||||
const tableDetails: DynamoDBTableSchema = {
|
||||
tableName: table.TableName || tableName,
|
||||
tableStatus: table.TableStatus || 'UNKNOWN',
|
||||
keySchema:
|
||||
table.KeySchema?.map((key) => ({
|
||||
attributeName: key.AttributeName || '',
|
||||
keyType: (key.KeyType as 'HASH' | 'RANGE') || 'HASH',
|
||||
})) || [],
|
||||
attributeDefinitions:
|
||||
table.AttributeDefinitions?.map((attr) => ({
|
||||
attributeName: attr.AttributeName || '',
|
||||
attributeType: (attr.AttributeType as 'S' | 'N' | 'B') || 'S',
|
||||
})) || [],
|
||||
globalSecondaryIndexes:
|
||||
table.GlobalSecondaryIndexes?.map((gsi) => ({
|
||||
indexName: gsi.IndexName || '',
|
||||
keySchema:
|
||||
gsi.KeySchema?.map((key) => ({
|
||||
attributeName: key.AttributeName || '',
|
||||
keyType: (key.KeyType as 'HASH' | 'RANGE') || 'HASH',
|
||||
})) || [],
|
||||
projectionType: gsi.Projection?.ProjectionType || 'ALL',
|
||||
indexStatus: gsi.IndexStatus || 'UNKNOWN',
|
||||
})) || [],
|
||||
localSecondaryIndexes:
|
||||
table.LocalSecondaryIndexes?.map((lsi) => ({
|
||||
indexName: lsi.IndexName || '',
|
||||
keySchema:
|
||||
lsi.KeySchema?.map((key) => ({
|
||||
attributeName: key.AttributeName || '',
|
||||
keyType: (key.KeyType as 'HASH' | 'RANGE') || 'HASH',
|
||||
})) || [],
|
||||
projectionType: lsi.Projection?.ProjectionType || 'ALL',
|
||||
indexStatus: 'ACTIVE',
|
||||
})) || [],
|
||||
itemCount: Number(table.ItemCount) || 0,
|
||||
tableSizeBytes: Number(table.TableSizeBytes) || 0,
|
||||
billingMode: table.BillingModeSummary?.BillingMode || 'PROVISIONED',
|
||||
}
|
||||
|
||||
return { tableDetails }
|
||||
}
|
||||
|
||||
73
apps/sim/app/api/tools/mongodb/introspect/route.ts
Normal file
73
apps/sim/app/api/tools/mongodb/introspect/route.ts
Normal file
@@ -0,0 +1,73 @@
|
||||
import { randomUUID } from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createMongoDBConnection, executeIntrospect } from '../utils'
|
||||
|
||||
const logger = createLogger('MongoDBIntrospectAPI')
|
||||
|
||||
const IntrospectSchema = z.object({
|
||||
host: z.string().min(1, 'Host is required'),
|
||||
port: z.coerce.number().int().positive('Port must be a positive integer'),
|
||||
database: z.string().optional(),
|
||||
username: z.string().optional(),
|
||||
password: z.string().optional(),
|
||||
authSource: z.string().optional(),
|
||||
ssl: z.enum(['disabled', 'required', 'preferred']).default('preferred'),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
let client = null
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = IntrospectSchema.parse(body)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Introspecting MongoDB at ${params.host}:${params.port}${params.database ? `/${params.database}` : ''}`
|
||||
)
|
||||
|
||||
client = await createMongoDBConnection({
|
||||
host: params.host,
|
||||
port: params.port,
|
||||
database: params.database || 'admin',
|
||||
username: params.username,
|
||||
password: params.password,
|
||||
authSource: params.authSource,
|
||||
ssl: params.ssl,
|
||||
})
|
||||
|
||||
const result = await executeIntrospect(client, params.database)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Introspection completed: ${result.databases.length} databases, ${result.collections.length} collections`
|
||||
)
|
||||
|
||||
return NextResponse.json({
|
||||
message: result.message,
|
||||
databases: result.databases,
|
||||
collections: result.collections,
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
logger.error(`[${requestId}] MongoDB introspect failed:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{ error: `MongoDB introspect failed: ${errorMessage}` },
|
||||
{ status: 500 }
|
||||
)
|
||||
} finally {
|
||||
if (client) {
|
||||
await client.close()
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
import { MongoClient } from 'mongodb'
|
||||
import type { MongoDBConnectionConfig } from '@/tools/mongodb/types'
|
||||
import type { MongoDBCollectionInfo, MongoDBConnectionConfig } from '@/tools/mongodb/types'
|
||||
|
||||
export async function createMongoDBConnection(config: MongoDBConnectionConfig) {
|
||||
const credentials =
|
||||
@@ -129,3 +129,59 @@ export function sanitizeCollectionName(name: string): string {
|
||||
}
|
||||
return name
|
||||
}
|
||||
|
||||
/**
|
||||
* Introspect MongoDB to get databases, collections, and indexes
|
||||
*/
|
||||
export async function executeIntrospect(
|
||||
client: MongoClient,
|
||||
database?: string
|
||||
): Promise<{
|
||||
message: string
|
||||
databases: string[]
|
||||
collections: MongoDBCollectionInfo[]
|
||||
}> {
|
||||
const databases: string[] = []
|
||||
const collections: MongoDBCollectionInfo[] = []
|
||||
|
||||
if (database) {
|
||||
databases.push(database)
|
||||
const db = client.db(database)
|
||||
const collectionList = await db.listCollections().toArray()
|
||||
|
||||
for (const collInfo of collectionList) {
|
||||
const coll = db.collection(collInfo.name)
|
||||
const indexes = await coll.indexes()
|
||||
const documentCount = await coll.estimatedDocumentCount()
|
||||
|
||||
collections.push({
|
||||
name: collInfo.name,
|
||||
type: collInfo.type || 'collection',
|
||||
documentCount,
|
||||
indexes: indexes.map((idx) => ({
|
||||
name: idx.name || '',
|
||||
key: idx.key as Record<string, number>,
|
||||
unique: idx.unique || false,
|
||||
sparse: idx.sparse,
|
||||
})),
|
||||
})
|
||||
}
|
||||
} else {
|
||||
const admin = client.db().admin()
|
||||
const dbList = await admin.listDatabases()
|
||||
|
||||
for (const dbInfo of dbList.databases) {
|
||||
databases.push(dbInfo.name)
|
||||
}
|
||||
}
|
||||
|
||||
const message = database
|
||||
? `Found ${collections.length} collections in database '${database}'`
|
||||
: `Found ${databases.length} databases`
|
||||
|
||||
return {
|
||||
message,
|
||||
databases,
|
||||
collections,
|
||||
}
|
||||
}
|
||||
|
||||
70
apps/sim/app/api/tools/mysql/introspect/route.ts
Normal file
70
apps/sim/app/api/tools/mysql/introspect/route.ts
Normal file
@@ -0,0 +1,70 @@
|
||||
import { randomUUID } from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createMySQLConnection, executeIntrospect } from '@/app/api/tools/mysql/utils'
|
||||
|
||||
const logger = createLogger('MySQLIntrospectAPI')
|
||||
|
||||
const IntrospectSchema = z.object({
|
||||
host: z.string().min(1, 'Host is required'),
|
||||
port: z.coerce.number().int().positive('Port must be a positive integer'),
|
||||
database: z.string().min(1, 'Database name is required'),
|
||||
username: z.string().min(1, 'Username is required'),
|
||||
password: z.string().min(1, 'Password is required'),
|
||||
ssl: z.enum(['disabled', 'required', 'preferred']).default('preferred'),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = IntrospectSchema.parse(body)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Introspecting MySQL schema on ${params.host}:${params.port}/${params.database}`
|
||||
)
|
||||
|
||||
const connection = await createMySQLConnection({
|
||||
host: params.host,
|
||||
port: params.port,
|
||||
database: params.database,
|
||||
username: params.username,
|
||||
password: params.password,
|
||||
ssl: params.ssl,
|
||||
})
|
||||
|
||||
try {
|
||||
const result = await executeIntrospect(connection, params.database)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Introspection completed successfully, found ${result.tables.length} tables`
|
||||
)
|
||||
|
||||
return NextResponse.json({
|
||||
message: `Schema introspection completed. Found ${result.tables.length} table(s) in database '${params.database}'.`,
|
||||
tables: result.tables,
|
||||
databases: result.databases,
|
||||
})
|
||||
} finally {
|
||||
await connection.end()
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
logger.error(`[${requestId}] MySQL introspection failed:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{ error: `MySQL introspection failed: ${errorMessage}` },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -166,3 +166,146 @@ function sanitizeSingleIdentifier(identifier: string): string {
|
||||
|
||||
return `\`${cleaned}\``
|
||||
}
|
||||
|
||||
export interface MySQLIntrospectionResult {
|
||||
tables: Array<{
|
||||
name: string
|
||||
database: string
|
||||
columns: Array<{
|
||||
name: string
|
||||
type: string
|
||||
nullable: boolean
|
||||
default: string | null
|
||||
isPrimaryKey: boolean
|
||||
isForeignKey: boolean
|
||||
autoIncrement: boolean
|
||||
references?: {
|
||||
table: string
|
||||
column: string
|
||||
}
|
||||
}>
|
||||
primaryKey: string[]
|
||||
foreignKeys: Array<{
|
||||
column: string
|
||||
referencesTable: string
|
||||
referencesColumn: string
|
||||
}>
|
||||
indexes: Array<{
|
||||
name: string
|
||||
columns: string[]
|
||||
unique: boolean
|
||||
}>
|
||||
}>
|
||||
databases: string[]
|
||||
}
|
||||
|
||||
export async function executeIntrospect(
|
||||
connection: mysql.Connection,
|
||||
databaseName: string
|
||||
): Promise<MySQLIntrospectionResult> {
|
||||
const [databasesRows] = await connection.execute<mysql.RowDataPacket[]>(
|
||||
`SELECT SCHEMA_NAME FROM INFORMATION_SCHEMA.SCHEMATA
|
||||
WHERE SCHEMA_NAME NOT IN ('mysql', 'information_schema', 'performance_schema', 'sys')
|
||||
ORDER BY SCHEMA_NAME`
|
||||
)
|
||||
const databases = databasesRows.map((row) => row.SCHEMA_NAME)
|
||||
|
||||
const [tablesRows] = await connection.execute<mysql.RowDataPacket[]>(
|
||||
`SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES
|
||||
WHERE TABLE_SCHEMA = ? AND TABLE_TYPE = 'BASE TABLE'
|
||||
ORDER BY TABLE_NAME`,
|
||||
[databaseName]
|
||||
)
|
||||
|
||||
const tables = []
|
||||
|
||||
for (const tableRow of tablesRows) {
|
||||
const tableName = tableRow.TABLE_NAME
|
||||
|
||||
const [columnsRows] = await connection.execute<mysql.RowDataPacket[]>(
|
||||
`SELECT COLUMN_NAME, DATA_TYPE, COLUMN_TYPE, IS_NULLABLE, COLUMN_DEFAULT, EXTRA
|
||||
FROM INFORMATION_SCHEMA.COLUMNS
|
||||
WHERE TABLE_SCHEMA = ? AND TABLE_NAME = ?
|
||||
ORDER BY ORDINAL_POSITION`,
|
||||
[databaseName, tableName]
|
||||
)
|
||||
|
||||
const [pkRows] = await connection.execute<mysql.RowDataPacket[]>(
|
||||
`SELECT COLUMN_NAME FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE
|
||||
WHERE TABLE_SCHEMA = ? AND TABLE_NAME = ? AND CONSTRAINT_NAME = 'PRIMARY'
|
||||
ORDER BY ORDINAL_POSITION`,
|
||||
[databaseName, tableName]
|
||||
)
|
||||
const primaryKeyColumns = pkRows.map((row) => row.COLUMN_NAME)
|
||||
|
||||
const [fkRows] = await connection.execute<mysql.RowDataPacket[]>(
|
||||
`SELECT kcu.COLUMN_NAME, kcu.REFERENCED_TABLE_NAME, kcu.REFERENCED_COLUMN_NAME
|
||||
FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE kcu
|
||||
WHERE kcu.TABLE_SCHEMA = ? AND kcu.TABLE_NAME = ? AND kcu.REFERENCED_TABLE_NAME IS NOT NULL`,
|
||||
[databaseName, tableName]
|
||||
)
|
||||
|
||||
const foreignKeys = fkRows.map((row) => ({
|
||||
column: row.COLUMN_NAME,
|
||||
referencesTable: row.REFERENCED_TABLE_NAME,
|
||||
referencesColumn: row.REFERENCED_COLUMN_NAME,
|
||||
}))
|
||||
|
||||
const fkColumnSet = new Set(foreignKeys.map((fk) => fk.column))
|
||||
|
||||
const [indexRows] = await connection.execute<mysql.RowDataPacket[]>(
|
||||
`SELECT INDEX_NAME, COLUMN_NAME, SEQ_IN_INDEX, NON_UNIQUE
|
||||
FROM INFORMATION_SCHEMA.STATISTICS
|
||||
WHERE TABLE_SCHEMA = ? AND TABLE_NAME = ? AND INDEX_NAME != 'PRIMARY'
|
||||
ORDER BY INDEX_NAME, SEQ_IN_INDEX`,
|
||||
[databaseName, tableName]
|
||||
)
|
||||
|
||||
const indexMap = new Map<string, { name: string; columns: string[]; unique: boolean }>()
|
||||
for (const row of indexRows) {
|
||||
const indexName = row.INDEX_NAME
|
||||
if (!indexMap.has(indexName)) {
|
||||
indexMap.set(indexName, {
|
||||
name: indexName,
|
||||
columns: [],
|
||||
unique: row.NON_UNIQUE === 0,
|
||||
})
|
||||
}
|
||||
indexMap.get(indexName)!.columns.push(row.COLUMN_NAME)
|
||||
}
|
||||
const indexes = Array.from(indexMap.values())
|
||||
|
||||
const columns = columnsRows.map((col) => {
|
||||
const columnName = col.COLUMN_NAME
|
||||
const fk = foreignKeys.find((f) => f.column === columnName)
|
||||
const isAutoIncrement = col.EXTRA?.toLowerCase().includes('auto_increment') || false
|
||||
|
||||
return {
|
||||
name: columnName,
|
||||
type: col.COLUMN_TYPE || col.DATA_TYPE,
|
||||
nullable: col.IS_NULLABLE === 'YES',
|
||||
default: col.COLUMN_DEFAULT,
|
||||
isPrimaryKey: primaryKeyColumns.includes(columnName),
|
||||
isForeignKey: fkColumnSet.has(columnName),
|
||||
autoIncrement: isAutoIncrement,
|
||||
...(fk && {
|
||||
references: {
|
||||
table: fk.referencesTable,
|
||||
column: fk.referencesColumn,
|
||||
},
|
||||
}),
|
||||
}
|
||||
})
|
||||
|
||||
tables.push({
|
||||
name: tableName,
|
||||
database: databaseName,
|
||||
columns,
|
||||
primaryKey: primaryKeyColumns,
|
||||
foreignKeys,
|
||||
indexes,
|
||||
})
|
||||
}
|
||||
|
||||
return { tables, databases }
|
||||
}
|
||||
|
||||
199
apps/sim/app/api/tools/neo4j/introspect/route.ts
Normal file
199
apps/sim/app/api/tools/neo4j/introspect/route.ts
Normal file
@@ -0,0 +1,199 @@
|
||||
import { randomUUID } from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createNeo4jDriver } from '@/app/api/tools/neo4j/utils'
|
||||
import type { Neo4jNodeSchema, Neo4jRelationshipSchema } from '@/tools/neo4j/types'
|
||||
|
||||
const logger = createLogger('Neo4jIntrospectAPI')
|
||||
|
||||
const IntrospectSchema = z.object({
|
||||
host: z.string().min(1, 'Host is required'),
|
||||
port: z.coerce.number().int().positive('Port must be a positive integer'),
|
||||
database: z.string().min(1, 'Database name is required'),
|
||||
username: z.string().min(1, 'Username is required'),
|
||||
password: z.string().min(1, 'Password is required'),
|
||||
encryption: z.enum(['enabled', 'disabled']).default('disabled'),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
let driver = null
|
||||
let session = null
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = IntrospectSchema.parse(body)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Introspecting Neo4j database at ${params.host}:${params.port}/${params.database}`
|
||||
)
|
||||
|
||||
driver = await createNeo4jDriver({
|
||||
host: params.host,
|
||||
port: params.port,
|
||||
database: params.database,
|
||||
username: params.username,
|
||||
password: params.password,
|
||||
encryption: params.encryption,
|
||||
})
|
||||
|
||||
session = driver.session({ database: params.database })
|
||||
|
||||
const labelsResult = await session.run(
|
||||
'CALL db.labels() YIELD label RETURN label ORDER BY label'
|
||||
)
|
||||
const labels: string[] = labelsResult.records.map((record) => record.get('label') as string)
|
||||
|
||||
const relationshipTypesResult = await session.run(
|
||||
'CALL db.relationshipTypes() YIELD relationshipType RETURN relationshipType ORDER BY relationshipType'
|
||||
)
|
||||
const relationshipTypes: string[] = relationshipTypesResult.records.map(
|
||||
(record) => record.get('relationshipType') as string
|
||||
)
|
||||
|
||||
const nodeSchemas: Neo4jNodeSchema[] = []
|
||||
try {
|
||||
const nodePropertiesResult = await session.run(
|
||||
'CALL db.schema.nodeTypeProperties() YIELD nodeLabels, propertyName, propertyTypes RETURN nodeLabels, propertyName, propertyTypes'
|
||||
)
|
||||
|
||||
const nodePropertiesMap = new Map<string, Array<{ name: string; types: string[] }>>()
|
||||
|
||||
for (const record of nodePropertiesResult.records) {
|
||||
const nodeLabels = record.get('nodeLabels') as string[]
|
||||
const propertyName = record.get('propertyName') as string
|
||||
const propertyTypes = record.get('propertyTypes') as string[]
|
||||
|
||||
const labelKey = nodeLabels.join(':')
|
||||
if (!nodePropertiesMap.has(labelKey)) {
|
||||
nodePropertiesMap.set(labelKey, [])
|
||||
}
|
||||
nodePropertiesMap.get(labelKey)!.push({ name: propertyName, types: propertyTypes })
|
||||
}
|
||||
|
||||
for (const [labelKey, properties] of nodePropertiesMap) {
|
||||
nodeSchemas.push({
|
||||
label: labelKey,
|
||||
properties,
|
||||
})
|
||||
}
|
||||
} catch (nodePropsError) {
|
||||
logger.warn(
|
||||
`[${requestId}] Could not fetch node properties (may not be supported in this Neo4j version): ${nodePropsError}`
|
||||
)
|
||||
}
|
||||
|
||||
const relationshipSchemas: Neo4jRelationshipSchema[] = []
|
||||
try {
|
||||
const relPropertiesResult = await session.run(
|
||||
'CALL db.schema.relTypeProperties() YIELD relationshipType, propertyName, propertyTypes RETURN relationshipType, propertyName, propertyTypes'
|
||||
)
|
||||
|
||||
const relPropertiesMap = new Map<string, Array<{ name: string; types: string[] }>>()
|
||||
|
||||
for (const record of relPropertiesResult.records) {
|
||||
const relType = record.get('relationshipType') as string
|
||||
const propertyName = record.get('propertyName') as string | null
|
||||
const propertyTypes = record.get('propertyTypes') as string[]
|
||||
|
||||
if (!relPropertiesMap.has(relType)) {
|
||||
relPropertiesMap.set(relType, [])
|
||||
}
|
||||
if (propertyName) {
|
||||
relPropertiesMap.get(relType)!.push({ name: propertyName, types: propertyTypes })
|
||||
}
|
||||
}
|
||||
|
||||
for (const [relType, properties] of relPropertiesMap) {
|
||||
relationshipSchemas.push({
|
||||
type: relType,
|
||||
properties,
|
||||
})
|
||||
}
|
||||
} catch (relPropsError) {
|
||||
logger.warn(
|
||||
`[${requestId}] Could not fetch relationship properties (may not be supported in this Neo4j version): ${relPropsError}`
|
||||
)
|
||||
}
|
||||
|
||||
const constraints: Array<{
|
||||
name: string
|
||||
type: string
|
||||
entityType: string
|
||||
properties: string[]
|
||||
}> = []
|
||||
try {
|
||||
const constraintsResult = await session.run('SHOW CONSTRAINTS')
|
||||
|
||||
for (const record of constraintsResult.records) {
|
||||
const name = record.get('name') as string
|
||||
const type = record.get('type') as string
|
||||
const entityType = record.get('entityType') as string
|
||||
const properties = (record.get('properties') as string[]) || []
|
||||
|
||||
constraints.push({ name, type, entityType, properties })
|
||||
}
|
||||
} catch (constraintsError) {
|
||||
logger.warn(
|
||||
`[${requestId}] Could not fetch constraints (may not be supported in this Neo4j version): ${constraintsError}`
|
||||
)
|
||||
}
|
||||
|
||||
const indexes: Array<{ name: string; type: string; entityType: string; properties: string[] }> =
|
||||
[]
|
||||
try {
|
||||
const indexesResult = await session.run('SHOW INDEXES')
|
||||
|
||||
for (const record of indexesResult.records) {
|
||||
const name = record.get('name') as string
|
||||
const type = record.get('type') as string
|
||||
const entityType = record.get('entityType') as string
|
||||
const properties = (record.get('properties') as string[]) || []
|
||||
|
||||
indexes.push({ name, type, entityType, properties })
|
||||
}
|
||||
} catch (indexesError) {
|
||||
logger.warn(
|
||||
`[${requestId}] Could not fetch indexes (may not be supported in this Neo4j version): ${indexesError}`
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Introspection completed: ${labels.length} labels, ${relationshipTypes.length} relationship types, ${constraints.length} constraints, ${indexes.length} indexes`
|
||||
)
|
||||
|
||||
return NextResponse.json({
|
||||
message: `Database introspection completed: found ${labels.length} labels, ${relationshipTypes.length} relationship types, ${nodeSchemas.length} node schemas, ${relationshipSchemas.length} relationship schemas, ${constraints.length} constraints, ${indexes.length} indexes`,
|
||||
labels,
|
||||
relationshipTypes,
|
||||
nodeSchemas,
|
||||
relationshipSchemas,
|
||||
constraints,
|
||||
indexes,
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
logger.error(`[${requestId}] Neo4j introspection failed:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{ error: `Neo4j introspection failed: ${errorMessage}` },
|
||||
{ status: 500 }
|
||||
)
|
||||
} finally {
|
||||
if (session) {
|
||||
await session.close()
|
||||
}
|
||||
if (driver) {
|
||||
await driver.close()
|
||||
}
|
||||
}
|
||||
}
|
||||
71
apps/sim/app/api/tools/postgresql/introspect/route.ts
Normal file
71
apps/sim/app/api/tools/postgresql/introspect/route.ts
Normal file
@@ -0,0 +1,71 @@
|
||||
import { randomUUID } from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createPostgresConnection, executeIntrospect } from '@/app/api/tools/postgresql/utils'
|
||||
|
||||
const logger = createLogger('PostgreSQLIntrospectAPI')
|
||||
|
||||
const IntrospectSchema = z.object({
|
||||
host: z.string().min(1, 'Host is required'),
|
||||
port: z.coerce.number().int().positive('Port must be a positive integer'),
|
||||
database: z.string().min(1, 'Database name is required'),
|
||||
username: z.string().min(1, 'Username is required'),
|
||||
password: z.string().min(1, 'Password is required'),
|
||||
ssl: z.enum(['disabled', 'required', 'preferred']).default('preferred'),
|
||||
schema: z.string().default('public'),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = IntrospectSchema.parse(body)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Introspecting PostgreSQL schema on ${params.host}:${params.port}/${params.database}`
|
||||
)
|
||||
|
||||
const sql = createPostgresConnection({
|
||||
host: params.host,
|
||||
port: params.port,
|
||||
database: params.database,
|
||||
username: params.username,
|
||||
password: params.password,
|
||||
ssl: params.ssl,
|
||||
})
|
||||
|
||||
try {
|
||||
const result = await executeIntrospect(sql, params.schema)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Introspection completed successfully, found ${result.tables.length} tables`
|
||||
)
|
||||
|
||||
return NextResponse.json({
|
||||
message: `Schema introspection completed. Found ${result.tables.length} table(s) in schema '${params.schema}'.`,
|
||||
tables: result.tables,
|
||||
schemas: result.schemas,
|
||||
})
|
||||
} finally {
|
||||
await sql.end()
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
logger.error(`[${requestId}] PostgreSQL introspection failed:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{ error: `PostgreSQL introspection failed: ${errorMessage}` },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -187,3 +187,184 @@ export async function executeDelete(
|
||||
rowCount,
|
||||
}
|
||||
}
|
||||
|
||||
export interface IntrospectionResult {
|
||||
tables: Array<{
|
||||
name: string
|
||||
schema: string
|
||||
columns: Array<{
|
||||
name: string
|
||||
type: string
|
||||
nullable: boolean
|
||||
default: string | null
|
||||
isPrimaryKey: boolean
|
||||
isForeignKey: boolean
|
||||
references?: {
|
||||
table: string
|
||||
column: string
|
||||
}
|
||||
}>
|
||||
primaryKey: string[]
|
||||
foreignKeys: Array<{
|
||||
column: string
|
||||
referencesTable: string
|
||||
referencesColumn: string
|
||||
}>
|
||||
indexes: Array<{
|
||||
name: string
|
||||
columns: string[]
|
||||
unique: boolean
|
||||
}>
|
||||
}>
|
||||
schemas: string[]
|
||||
}
|
||||
|
||||
export async function executeIntrospect(
|
||||
sql: any,
|
||||
schemaName = 'public'
|
||||
): Promise<IntrospectionResult> {
|
||||
const schemasResult = await sql`
|
||||
SELECT schema_name
|
||||
FROM information_schema.schemata
|
||||
WHERE schema_name NOT IN ('pg_catalog', 'information_schema', 'pg_toast')
|
||||
ORDER BY schema_name
|
||||
`
|
||||
const schemas = schemasResult.map((row: { schema_name: string }) => row.schema_name)
|
||||
|
||||
const tablesResult = await sql`
|
||||
SELECT table_name, table_schema
|
||||
FROM information_schema.tables
|
||||
WHERE table_schema = ${schemaName}
|
||||
AND table_type = 'BASE TABLE'
|
||||
ORDER BY table_name
|
||||
`
|
||||
|
||||
const tables = []
|
||||
|
||||
for (const tableRow of tablesResult) {
|
||||
const tableName = tableRow.table_name
|
||||
const tableSchema = tableRow.table_schema
|
||||
|
||||
const columnsResult = await sql`
|
||||
SELECT
|
||||
c.column_name,
|
||||
c.data_type,
|
||||
c.is_nullable,
|
||||
c.column_default,
|
||||
c.udt_name
|
||||
FROM information_schema.columns c
|
||||
WHERE c.table_schema = ${tableSchema}
|
||||
AND c.table_name = ${tableName}
|
||||
ORDER BY c.ordinal_position
|
||||
`
|
||||
|
||||
const pkResult = await sql`
|
||||
SELECT kcu.column_name
|
||||
FROM information_schema.table_constraints tc
|
||||
JOIN information_schema.key_column_usage kcu
|
||||
ON tc.constraint_name = kcu.constraint_name
|
||||
AND tc.table_schema = kcu.table_schema
|
||||
WHERE tc.constraint_type = 'PRIMARY KEY'
|
||||
AND tc.table_schema = ${tableSchema}
|
||||
AND tc.table_name = ${tableName}
|
||||
`
|
||||
const primaryKeyColumns = pkResult.map((row: { column_name: string }) => row.column_name)
|
||||
|
||||
const fkResult = await sql`
|
||||
SELECT
|
||||
kcu.column_name,
|
||||
ccu.table_name AS foreign_table_name,
|
||||
ccu.column_name AS foreign_column_name
|
||||
FROM information_schema.table_constraints tc
|
||||
JOIN information_schema.key_column_usage kcu
|
||||
ON tc.constraint_name = kcu.constraint_name
|
||||
AND tc.table_schema = kcu.table_schema
|
||||
JOIN information_schema.constraint_column_usage ccu
|
||||
ON ccu.constraint_name = tc.constraint_name
|
||||
AND ccu.table_schema = tc.table_schema
|
||||
WHERE tc.constraint_type = 'FOREIGN KEY'
|
||||
AND tc.table_schema = ${tableSchema}
|
||||
AND tc.table_name = ${tableName}
|
||||
`
|
||||
|
||||
const foreignKeys = fkResult.map(
|
||||
(row: { column_name: string; foreign_table_name: string; foreign_column_name: string }) => ({
|
||||
column: row.column_name,
|
||||
referencesTable: row.foreign_table_name,
|
||||
referencesColumn: row.foreign_column_name,
|
||||
})
|
||||
)
|
||||
|
||||
const fkColumnSet = new Set(foreignKeys.map((fk: { column: string }) => fk.column))
|
||||
|
||||
const indexesResult = await sql`
|
||||
SELECT
|
||||
i.relname AS index_name,
|
||||
a.attname AS column_name,
|
||||
ix.indisunique AS is_unique
|
||||
FROM pg_class t
|
||||
JOIN pg_index ix ON t.oid = ix.indrelid
|
||||
JOIN pg_class i ON i.oid = ix.indexrelid
|
||||
JOIN pg_attribute a ON a.attrelid = t.oid AND a.attnum = ANY(ix.indkey)
|
||||
JOIN pg_namespace n ON n.oid = t.relnamespace
|
||||
WHERE t.relkind = 'r'
|
||||
AND n.nspname = ${tableSchema}
|
||||
AND t.relname = ${tableName}
|
||||
AND NOT ix.indisprimary
|
||||
ORDER BY i.relname, a.attnum
|
||||
`
|
||||
|
||||
const indexMap = new Map<string, { name: string; columns: string[]; unique: boolean }>()
|
||||
for (const row of indexesResult) {
|
||||
const indexName = row.index_name
|
||||
if (!indexMap.has(indexName)) {
|
||||
indexMap.set(indexName, {
|
||||
name: indexName,
|
||||
columns: [],
|
||||
unique: row.is_unique,
|
||||
})
|
||||
}
|
||||
indexMap.get(indexName)!.columns.push(row.column_name)
|
||||
}
|
||||
const indexes = Array.from(indexMap.values())
|
||||
|
||||
const columns = columnsResult.map(
|
||||
(col: {
|
||||
column_name: string
|
||||
data_type: string
|
||||
is_nullable: string
|
||||
column_default: string | null
|
||||
udt_name: string
|
||||
}) => {
|
||||
const columnName = col.column_name
|
||||
const fk = foreignKeys.find((f: { column: string }) => f.column === columnName)
|
||||
|
||||
return {
|
||||
name: columnName,
|
||||
type: col.data_type === 'USER-DEFINED' ? col.udt_name : col.data_type,
|
||||
nullable: col.is_nullable === 'YES',
|
||||
default: col.column_default,
|
||||
isPrimaryKey: primaryKeyColumns.includes(columnName),
|
||||
isForeignKey: fkColumnSet.has(columnName),
|
||||
...(fk && {
|
||||
references: {
|
||||
table: fk.referencesTable,
|
||||
column: fk.referencesColumn,
|
||||
},
|
||||
}),
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
tables.push({
|
||||
name: tableName,
|
||||
schema: tableSchema,
|
||||
columns,
|
||||
primaryKey: primaryKeyColumns,
|
||||
foreignKeys,
|
||||
indexes,
|
||||
})
|
||||
}
|
||||
|
||||
return { tables, schemas }
|
||||
}
|
||||
|
||||
80
apps/sim/app/api/tools/rds/introspect/route.ts
Normal file
80
apps/sim/app/api/tools/rds/introspect/route.ts
Normal file
@@ -0,0 +1,80 @@
|
||||
import { randomUUID } from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createRdsClient, executeIntrospect, type RdsEngine } from '@/app/api/tools/rds/utils'
|
||||
|
||||
const logger = createLogger('RDSIntrospectAPI')
|
||||
|
||||
const IntrospectSchema = z.object({
|
||||
region: z.string().min(1, 'AWS region is required'),
|
||||
accessKeyId: z.string().min(1, 'AWS access key ID is required'),
|
||||
secretAccessKey: z.string().min(1, 'AWS secret access key is required'),
|
||||
resourceArn: z.string().min(1, 'Resource ARN is required'),
|
||||
secretArn: z.string().min(1, 'Secret ARN is required'),
|
||||
database: z.string().optional(),
|
||||
schema: z.string().optional(),
|
||||
engine: z.enum(['aurora-postgresql', 'aurora-mysql']).optional(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const params = IntrospectSchema.parse(body)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Introspecting RDS Aurora database${params.database ? ` (${params.database})` : ''}`
|
||||
)
|
||||
|
||||
const client = createRdsClient({
|
||||
region: params.region,
|
||||
accessKeyId: params.accessKeyId,
|
||||
secretAccessKey: params.secretAccessKey,
|
||||
resourceArn: params.resourceArn,
|
||||
secretArn: params.secretArn,
|
||||
database: params.database,
|
||||
})
|
||||
|
||||
try {
|
||||
const result = await executeIntrospect(
|
||||
client,
|
||||
params.resourceArn,
|
||||
params.secretArn,
|
||||
params.database,
|
||||
params.schema,
|
||||
params.engine as RdsEngine | undefined
|
||||
)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Introspection completed successfully. Engine: ${result.engine}, found ${result.tables.length} tables`
|
||||
)
|
||||
|
||||
return NextResponse.json({
|
||||
message: `Schema introspection completed. Engine: ${result.engine}. Found ${result.tables.length} table(s).`,
|
||||
engine: result.engine,
|
||||
tables: result.tables,
|
||||
schemas: result.schemas,
|
||||
})
|
||||
} finally {
|
||||
client.destroy()
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
logger.error(`[${requestId}] RDS introspection failed:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{ error: `RDS introspection failed: ${errorMessage}` },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -241,3 +241,487 @@ export async function executeDelete(
|
||||
|
||||
return executeStatement(client, resourceArn, secretArn, database, sql, parameters)
|
||||
}
|
||||
|
||||
export type RdsEngine = 'aurora-postgresql' | 'aurora-mysql'
|
||||
|
||||
export interface RdsIntrospectionResult {
|
||||
engine: RdsEngine
|
||||
tables: Array<{
|
||||
name: string
|
||||
schema: string
|
||||
columns: Array<{
|
||||
name: string
|
||||
type: string
|
||||
nullable: boolean
|
||||
default: string | null
|
||||
isPrimaryKey: boolean
|
||||
isForeignKey: boolean
|
||||
references?: {
|
||||
table: string
|
||||
column: string
|
||||
}
|
||||
}>
|
||||
primaryKey: string[]
|
||||
foreignKeys: Array<{
|
||||
column: string
|
||||
referencesTable: string
|
||||
referencesColumn: string
|
||||
}>
|
||||
indexes: Array<{
|
||||
name: string
|
||||
columns: string[]
|
||||
unique: boolean
|
||||
}>
|
||||
}>
|
||||
schemas: string[]
|
||||
}
|
||||
|
||||
/**
|
||||
* Detects the database engine by querying SELECT VERSION()
|
||||
*/
|
||||
export async function detectEngine(
|
||||
client: RDSDataClient,
|
||||
resourceArn: string,
|
||||
secretArn: string,
|
||||
database: string | undefined
|
||||
): Promise<RdsEngine> {
|
||||
const result = await executeStatement(
|
||||
client,
|
||||
resourceArn,
|
||||
secretArn,
|
||||
database,
|
||||
'SELECT VERSION()'
|
||||
)
|
||||
|
||||
if (result.rows.length > 0) {
|
||||
const versionRow = result.rows[0] as Record<string, unknown>
|
||||
const versionValue = Object.values(versionRow)[0]
|
||||
const versionString = String(versionValue).toLowerCase()
|
||||
|
||||
if (versionString.includes('postgresql') || versionString.includes('postgres')) {
|
||||
return 'aurora-postgresql'
|
||||
}
|
||||
if (versionString.includes('mysql') || versionString.includes('mariadb')) {
|
||||
return 'aurora-mysql'
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error('Unable to detect database engine. Please specify the engine parameter.')
|
||||
}
|
||||
|
||||
/**
|
||||
* Introspects PostgreSQL schema using INFORMATION_SCHEMA
|
||||
*/
|
||||
async function introspectPostgresql(
|
||||
client: RDSDataClient,
|
||||
resourceArn: string,
|
||||
secretArn: string,
|
||||
database: string | undefined,
|
||||
schemaName: string
|
||||
): Promise<RdsIntrospectionResult> {
|
||||
const schemasResult = await executeStatement(
|
||||
client,
|
||||
resourceArn,
|
||||
secretArn,
|
||||
database,
|
||||
`SELECT schema_name FROM information_schema.schemata
|
||||
WHERE schema_name NOT IN ('pg_catalog', 'information_schema', 'pg_toast')
|
||||
ORDER BY schema_name`
|
||||
)
|
||||
const schemas = schemasResult.rows.map((row) => (row as { schema_name: string }).schema_name)
|
||||
|
||||
const tablesResult = await executeStatement(
|
||||
client,
|
||||
resourceArn,
|
||||
secretArn,
|
||||
database,
|
||||
`SELECT table_name, table_schema
|
||||
FROM information_schema.tables
|
||||
WHERE table_schema = :schemaName
|
||||
AND table_type = 'BASE TABLE'
|
||||
ORDER BY table_name`,
|
||||
[{ name: 'schemaName', value: { stringValue: schemaName } }]
|
||||
)
|
||||
|
||||
const tables = []
|
||||
|
||||
for (const tableRow of tablesResult.rows) {
|
||||
const row = tableRow as { table_name: string; table_schema: string }
|
||||
const tableName = row.table_name
|
||||
const tableSchema = row.table_schema
|
||||
|
||||
const columnsResult = await executeStatement(
|
||||
client,
|
||||
resourceArn,
|
||||
secretArn,
|
||||
database,
|
||||
`SELECT
|
||||
c.column_name,
|
||||
c.data_type,
|
||||
c.is_nullable,
|
||||
c.column_default,
|
||||
c.udt_name
|
||||
FROM information_schema.columns c
|
||||
WHERE c.table_schema = :tableSchema
|
||||
AND c.table_name = :tableName
|
||||
ORDER BY c.ordinal_position`,
|
||||
[
|
||||
{ name: 'tableSchema', value: { stringValue: tableSchema } },
|
||||
{ name: 'tableName', value: { stringValue: tableName } },
|
||||
]
|
||||
)
|
||||
|
||||
const pkResult = await executeStatement(
|
||||
client,
|
||||
resourceArn,
|
||||
secretArn,
|
||||
database,
|
||||
`SELECT kcu.column_name
|
||||
FROM information_schema.table_constraints tc
|
||||
JOIN information_schema.key_column_usage kcu
|
||||
ON tc.constraint_name = kcu.constraint_name
|
||||
AND tc.table_schema = kcu.table_schema
|
||||
WHERE tc.constraint_type = 'PRIMARY KEY'
|
||||
AND tc.table_schema = :tableSchema
|
||||
AND tc.table_name = :tableName`,
|
||||
[
|
||||
{ name: 'tableSchema', value: { stringValue: tableSchema } },
|
||||
{ name: 'tableName', value: { stringValue: tableName } },
|
||||
]
|
||||
)
|
||||
const primaryKeyColumns = pkResult.rows.map((r) => (r as { column_name: string }).column_name)
|
||||
|
||||
const fkResult = await executeStatement(
|
||||
client,
|
||||
resourceArn,
|
||||
secretArn,
|
||||
database,
|
||||
`SELECT
|
||||
kcu.column_name,
|
||||
ccu.table_name AS foreign_table_name,
|
||||
ccu.column_name AS foreign_column_name
|
||||
FROM information_schema.table_constraints tc
|
||||
JOIN information_schema.key_column_usage kcu
|
||||
ON tc.constraint_name = kcu.constraint_name
|
||||
AND tc.table_schema = kcu.table_schema
|
||||
JOIN information_schema.constraint_column_usage ccu
|
||||
ON ccu.constraint_name = tc.constraint_name
|
||||
AND ccu.table_schema = tc.table_schema
|
||||
WHERE tc.constraint_type = 'FOREIGN KEY'
|
||||
AND tc.table_schema = :tableSchema
|
||||
AND tc.table_name = :tableName`,
|
||||
[
|
||||
{ name: 'tableSchema', value: { stringValue: tableSchema } },
|
||||
{ name: 'tableName', value: { stringValue: tableName } },
|
||||
]
|
||||
)
|
||||
|
||||
const foreignKeys = fkResult.rows.map((r) => {
|
||||
const fkRow = r as {
|
||||
column_name: string
|
||||
foreign_table_name: string
|
||||
foreign_column_name: string
|
||||
}
|
||||
return {
|
||||
column: fkRow.column_name,
|
||||
referencesTable: fkRow.foreign_table_name,
|
||||
referencesColumn: fkRow.foreign_column_name,
|
||||
}
|
||||
})
|
||||
|
||||
const fkColumnSet = new Set(foreignKeys.map((fk) => fk.column))
|
||||
|
||||
const indexesResult = await executeStatement(
|
||||
client,
|
||||
resourceArn,
|
||||
secretArn,
|
||||
database,
|
||||
`SELECT
|
||||
i.relname AS index_name,
|
||||
a.attname AS column_name,
|
||||
ix.indisunique AS is_unique
|
||||
FROM pg_class t
|
||||
JOIN pg_index ix ON t.oid = ix.indrelid
|
||||
JOIN pg_class i ON i.oid = ix.indexrelid
|
||||
JOIN pg_attribute a ON a.attrelid = t.oid AND a.attnum = ANY(ix.indkey)
|
||||
JOIN pg_namespace n ON n.oid = t.relnamespace
|
||||
WHERE t.relkind = 'r'
|
||||
AND n.nspname = :tableSchema
|
||||
AND t.relname = :tableName
|
||||
AND NOT ix.indisprimary
|
||||
ORDER BY i.relname, a.attnum`,
|
||||
[
|
||||
{ name: 'tableSchema', value: { stringValue: tableSchema } },
|
||||
{ name: 'tableName', value: { stringValue: tableName } },
|
||||
]
|
||||
)
|
||||
|
||||
const indexMap = new Map<string, { name: string; columns: string[]; unique: boolean }>()
|
||||
for (const idxRow of indexesResult.rows) {
|
||||
const idx = idxRow as { index_name: string; column_name: string; is_unique: boolean }
|
||||
const indexName = idx.index_name
|
||||
if (!indexMap.has(indexName)) {
|
||||
indexMap.set(indexName, {
|
||||
name: indexName,
|
||||
columns: [],
|
||||
unique: idx.is_unique,
|
||||
})
|
||||
}
|
||||
indexMap.get(indexName)!.columns.push(idx.column_name)
|
||||
}
|
||||
const indexes = Array.from(indexMap.values())
|
||||
|
||||
const columns = columnsResult.rows.map((colRow) => {
|
||||
const col = colRow as {
|
||||
column_name: string
|
||||
data_type: string
|
||||
is_nullable: string
|
||||
column_default: string | null
|
||||
udt_name: string
|
||||
}
|
||||
const columnName = col.column_name
|
||||
const fk = foreignKeys.find((f) => f.column === columnName)
|
||||
|
||||
return {
|
||||
name: columnName,
|
||||
type: col.data_type === 'USER-DEFINED' ? col.udt_name : col.data_type,
|
||||
nullable: col.is_nullable === 'YES',
|
||||
default: col.column_default,
|
||||
isPrimaryKey: primaryKeyColumns.includes(columnName),
|
||||
isForeignKey: fkColumnSet.has(columnName),
|
||||
...(fk && {
|
||||
references: {
|
||||
table: fk.referencesTable,
|
||||
column: fk.referencesColumn,
|
||||
},
|
||||
}),
|
||||
}
|
||||
})
|
||||
|
||||
tables.push({
|
||||
name: tableName,
|
||||
schema: tableSchema,
|
||||
columns,
|
||||
primaryKey: primaryKeyColumns,
|
||||
foreignKeys,
|
||||
indexes,
|
||||
})
|
||||
}
|
||||
|
||||
return { engine: 'aurora-postgresql', tables, schemas }
|
||||
}
|
||||
|
||||
/**
|
||||
* Introspects MySQL schema using INFORMATION_SCHEMA
|
||||
*/
|
||||
async function introspectMysql(
|
||||
client: RDSDataClient,
|
||||
resourceArn: string,
|
||||
secretArn: string,
|
||||
database: string | undefined,
|
||||
schemaName: string
|
||||
): Promise<RdsIntrospectionResult> {
|
||||
const schemasResult = await executeStatement(
|
||||
client,
|
||||
resourceArn,
|
||||
secretArn,
|
||||
database,
|
||||
`SELECT SCHEMA_NAME as schema_name FROM information_schema.SCHEMATA
|
||||
WHERE SCHEMA_NAME NOT IN ('mysql', 'information_schema', 'performance_schema', 'sys')
|
||||
ORDER BY SCHEMA_NAME`
|
||||
)
|
||||
const schemas = schemasResult.rows.map((row) => (row as { schema_name: string }).schema_name)
|
||||
|
||||
const tablesResult = await executeStatement(
|
||||
client,
|
||||
resourceArn,
|
||||
secretArn,
|
||||
database,
|
||||
`SELECT TABLE_NAME as table_name, TABLE_SCHEMA as table_schema
|
||||
FROM information_schema.TABLES
|
||||
WHERE TABLE_SCHEMA = :schemaName
|
||||
AND TABLE_TYPE = 'BASE TABLE'
|
||||
ORDER BY TABLE_NAME`,
|
||||
[{ name: 'schemaName', value: { stringValue: schemaName } }]
|
||||
)
|
||||
|
||||
const tables = []
|
||||
|
||||
for (const tableRow of tablesResult.rows) {
|
||||
const row = tableRow as { table_name: string; table_schema: string }
|
||||
const tableName = row.table_name
|
||||
const tableSchema = row.table_schema
|
||||
|
||||
const columnsResult = await executeStatement(
|
||||
client,
|
||||
resourceArn,
|
||||
secretArn,
|
||||
database,
|
||||
`SELECT
|
||||
COLUMN_NAME as column_name,
|
||||
DATA_TYPE as data_type,
|
||||
IS_NULLABLE as is_nullable,
|
||||
COLUMN_DEFAULT as column_default,
|
||||
COLUMN_TYPE as column_type,
|
||||
COLUMN_KEY as column_key
|
||||
FROM information_schema.COLUMNS
|
||||
WHERE TABLE_SCHEMA = :tableSchema
|
||||
AND TABLE_NAME = :tableName
|
||||
ORDER BY ORDINAL_POSITION`,
|
||||
[
|
||||
{ name: 'tableSchema', value: { stringValue: tableSchema } },
|
||||
{ name: 'tableName', value: { stringValue: tableName } },
|
||||
]
|
||||
)
|
||||
|
||||
const pkResult = await executeStatement(
|
||||
client,
|
||||
resourceArn,
|
||||
secretArn,
|
||||
database,
|
||||
`SELECT COLUMN_NAME as column_name
|
||||
FROM information_schema.KEY_COLUMN_USAGE
|
||||
WHERE TABLE_SCHEMA = :tableSchema
|
||||
AND TABLE_NAME = :tableName
|
||||
AND CONSTRAINT_NAME = 'PRIMARY'
|
||||
ORDER BY ORDINAL_POSITION`,
|
||||
[
|
||||
{ name: 'tableSchema', value: { stringValue: tableSchema } },
|
||||
{ name: 'tableName', value: { stringValue: tableName } },
|
||||
]
|
||||
)
|
||||
const primaryKeyColumns = pkResult.rows.map((r) => (r as { column_name: string }).column_name)
|
||||
|
||||
const fkResult = await executeStatement(
|
||||
client,
|
||||
resourceArn,
|
||||
secretArn,
|
||||
database,
|
||||
`SELECT
|
||||
kcu.COLUMN_NAME as column_name,
|
||||
kcu.REFERENCED_TABLE_NAME as foreign_table_name,
|
||||
kcu.REFERENCED_COLUMN_NAME as foreign_column_name
|
||||
FROM information_schema.KEY_COLUMN_USAGE kcu
|
||||
WHERE kcu.TABLE_SCHEMA = :tableSchema
|
||||
AND kcu.TABLE_NAME = :tableName
|
||||
AND kcu.REFERENCED_TABLE_NAME IS NOT NULL`,
|
||||
[
|
||||
{ name: 'tableSchema', value: { stringValue: tableSchema } },
|
||||
{ name: 'tableName', value: { stringValue: tableName } },
|
||||
]
|
||||
)
|
||||
|
||||
const foreignKeys = fkResult.rows.map((r) => {
|
||||
const fkRow = r as {
|
||||
column_name: string
|
||||
foreign_table_name: string
|
||||
foreign_column_name: string
|
||||
}
|
||||
return {
|
||||
column: fkRow.column_name,
|
||||
referencesTable: fkRow.foreign_table_name,
|
||||
referencesColumn: fkRow.foreign_column_name,
|
||||
}
|
||||
})
|
||||
|
||||
const fkColumnSet = new Set(foreignKeys.map((fk) => fk.column))
|
||||
|
||||
const indexesResult = await executeStatement(
|
||||
client,
|
||||
resourceArn,
|
||||
secretArn,
|
||||
database,
|
||||
`SELECT
|
||||
INDEX_NAME as index_name,
|
||||
COLUMN_NAME as column_name,
|
||||
NON_UNIQUE as non_unique
|
||||
FROM information_schema.STATISTICS
|
||||
WHERE TABLE_SCHEMA = :tableSchema
|
||||
AND TABLE_NAME = :tableName
|
||||
AND INDEX_NAME != 'PRIMARY'
|
||||
ORDER BY INDEX_NAME, SEQ_IN_INDEX`,
|
||||
[
|
||||
{ name: 'tableSchema', value: { stringValue: tableSchema } },
|
||||
{ name: 'tableName', value: { stringValue: tableName } },
|
||||
]
|
||||
)
|
||||
|
||||
const indexMap = new Map<string, { name: string; columns: string[]; unique: boolean }>()
|
||||
for (const idxRow of indexesResult.rows) {
|
||||
const idx = idxRow as { index_name: string; column_name: string; non_unique: number }
|
||||
const indexName = idx.index_name
|
||||
if (!indexMap.has(indexName)) {
|
||||
indexMap.set(indexName, {
|
||||
name: indexName,
|
||||
columns: [],
|
||||
unique: idx.non_unique === 0,
|
||||
})
|
||||
}
|
||||
indexMap.get(indexName)!.columns.push(idx.column_name)
|
||||
}
|
||||
const indexes = Array.from(indexMap.values())
|
||||
|
||||
const columns = columnsResult.rows.map((colRow) => {
|
||||
const col = colRow as {
|
||||
column_name: string
|
||||
data_type: string
|
||||
is_nullable: string
|
||||
column_default: string | null
|
||||
column_type: string
|
||||
column_key: string
|
||||
}
|
||||
const columnName = col.column_name
|
||||
const fk = foreignKeys.find((f) => f.column === columnName)
|
||||
|
||||
return {
|
||||
name: columnName,
|
||||
type: col.column_type || col.data_type,
|
||||
nullable: col.is_nullable === 'YES',
|
||||
default: col.column_default,
|
||||
isPrimaryKey: col.column_key === 'PRI',
|
||||
isForeignKey: fkColumnSet.has(columnName),
|
||||
...(fk && {
|
||||
references: {
|
||||
table: fk.referencesTable,
|
||||
column: fk.referencesColumn,
|
||||
},
|
||||
}),
|
||||
}
|
||||
})
|
||||
|
||||
tables.push({
|
||||
name: tableName,
|
||||
schema: tableSchema,
|
||||
columns,
|
||||
primaryKey: primaryKeyColumns,
|
||||
foreignKeys,
|
||||
indexes,
|
||||
})
|
||||
}
|
||||
|
||||
return { engine: 'aurora-mysql', tables, schemas }
|
||||
}
|
||||
|
||||
/**
|
||||
* Introspects RDS Aurora database schema with auto-detection of engine type
|
||||
*/
|
||||
export async function executeIntrospect(
|
||||
client: RDSDataClient,
|
||||
resourceArn: string,
|
||||
secretArn: string,
|
||||
database: string | undefined,
|
||||
schemaName?: string,
|
||||
engine?: RdsEngine
|
||||
): Promise<RdsIntrospectionResult> {
|
||||
const detectedEngine = engine || (await detectEngine(client, resourceArn, secretArn, database))
|
||||
|
||||
if (detectedEngine === 'aurora-postgresql') {
|
||||
const schema = schemaName || 'public'
|
||||
return introspectPostgresql(client, resourceArn, secretArn, database, schema)
|
||||
}
|
||||
const schema = schemaName || database || ''
|
||||
if (!schema) {
|
||||
throw new Error('Schema or database name is required for MySQL introspection')
|
||||
}
|
||||
return introspectMysql(client, resourceArn, secretArn, database, schema)
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
'use client'
|
||||
|
||||
import React, { useEffect, useMemo, useState } from 'react'
|
||||
import React, { memo, useCallback, useState } from 'react'
|
||||
import { Check, Copy } from 'lucide-react'
|
||||
import ReactMarkdown from 'react-markdown'
|
||||
import remarkGfm from 'remark-gfm'
|
||||
@@ -28,55 +28,95 @@ const getTextContent = (element: React.ReactNode): string => {
|
||||
return ''
|
||||
}
|
||||
|
||||
// Global layout fixes for markdown content inside the copilot panel
|
||||
if (typeof document !== 'undefined') {
|
||||
const styleId = 'copilot-markdown-fix'
|
||||
if (!document.getElementById(styleId)) {
|
||||
const style = document.createElement('style')
|
||||
style.id = styleId
|
||||
style.textContent = `
|
||||
/* Prevent any markdown content from expanding beyond the panel */
|
||||
.copilot-markdown-wrapper,
|
||||
.copilot-markdown-wrapper * {
|
||||
max-width: 100% !important;
|
||||
}
|
||||
|
||||
.copilot-markdown-wrapper p,
|
||||
.copilot-markdown-wrapper li {
|
||||
overflow-wrap: anywhere !important;
|
||||
word-break: break-word !important;
|
||||
}
|
||||
|
||||
.copilot-markdown-wrapper a {
|
||||
overflow-wrap: anywhere !important;
|
||||
word-break: break-all !important;
|
||||
}
|
||||
|
||||
.copilot-markdown-wrapper code:not(pre code) {
|
||||
white-space: normal !important;
|
||||
overflow-wrap: anywhere !important;
|
||||
word-break: break-word !important;
|
||||
}
|
||||
|
||||
/* Reduce top margin for first heading (e.g., right after thinking block) */
|
||||
.copilot-markdown-wrapper > h1:first-child,
|
||||
.copilot-markdown-wrapper > h2:first-child,
|
||||
.copilot-markdown-wrapper > h3:first-child,
|
||||
.copilot-markdown-wrapper > h4:first-child {
|
||||
margin-top: 0.25rem !important;
|
||||
}
|
||||
`
|
||||
document.head.appendChild(style)
|
||||
}
|
||||
/**
|
||||
* Maps common language aliases to supported viewer languages
|
||||
*/
|
||||
const LANGUAGE_MAP: Record<string, 'javascript' | 'json' | 'python'> = {
|
||||
js: 'javascript',
|
||||
javascript: 'javascript',
|
||||
jsx: 'javascript',
|
||||
ts: 'javascript',
|
||||
typescript: 'javascript',
|
||||
tsx: 'javascript',
|
||||
json: 'json',
|
||||
python: 'python',
|
||||
py: 'python',
|
||||
code: 'javascript',
|
||||
}
|
||||
|
||||
/**
|
||||
* Link component with hover preview tooltip
|
||||
* Displays full URL on hover for better UX
|
||||
* @param props - Component props with href and children
|
||||
* @returns Link element with tooltip preview
|
||||
* Normalizes a language string to a supported viewer language
|
||||
*/
|
||||
function LinkWithPreview({ href, children }: { href: string; children: React.ReactNode }) {
|
||||
function normalizeLanguage(lang: string): 'javascript' | 'json' | 'python' {
|
||||
const normalized = (lang || '').toLowerCase()
|
||||
return LANGUAGE_MAP[normalized] || 'javascript'
|
||||
}
|
||||
|
||||
/**
|
||||
* Props for the CodeBlock component
|
||||
*/
|
||||
interface CodeBlockProps {
|
||||
/** Code content to display */
|
||||
code: string
|
||||
/** Language identifier from markdown */
|
||||
language: string
|
||||
}
|
||||
|
||||
/**
|
||||
* CodeBlock component with isolated copy state
|
||||
* Prevents full markdown re-renders when copy button is clicked
|
||||
*/
|
||||
const CodeBlock = memo(function CodeBlock({ code, language }: CodeBlockProps) {
|
||||
const [copied, setCopied] = useState(false)
|
||||
|
||||
const handleCopy = useCallback(() => {
|
||||
if (code) {
|
||||
navigator.clipboard.writeText(code)
|
||||
setCopied(true)
|
||||
setTimeout(() => setCopied(false), 2000)
|
||||
}
|
||||
}, [code])
|
||||
|
||||
const viewerLanguage = normalizeLanguage(language)
|
||||
const displayLanguage = language === 'code' ? viewerLanguage : language
|
||||
|
||||
return (
|
||||
<div className='mt-2.5 mb-2.5 w-0 min-w-full overflow-hidden rounded-md border border-[var(--border-1)] bg-[var(--surface-1)] text-sm'>
|
||||
<div className='flex items-center justify-between border-[var(--border-1)] border-b px-3 py-1'>
|
||||
<span className='font-season text-[var(--text-muted)] text-xs'>{displayLanguage}</span>
|
||||
<button
|
||||
onClick={handleCopy}
|
||||
className='text-[var(--text-muted)] transition-colors hover:text-[var(--text-tertiary)]'
|
||||
title='Copy'
|
||||
type='button'
|
||||
>
|
||||
{copied ? (
|
||||
<Check className='h-3 w-3' strokeWidth={2} />
|
||||
) : (
|
||||
<Copy className='h-3 w-3' strokeWidth={2} />
|
||||
)}
|
||||
</button>
|
||||
</div>
|
||||
<Code.Viewer
|
||||
code={code.replace(/\n+$/, '')}
|
||||
showGutter
|
||||
language={viewerLanguage}
|
||||
className='m-0 min-h-0 rounded-none border-0 bg-transparent'
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
})
|
||||
|
||||
/**
|
||||
* Link component with hover preview tooltip
|
||||
*/
|
||||
const LinkWithPreview = memo(function LinkWithPreview({
|
||||
href,
|
||||
children,
|
||||
}: {
|
||||
href: string
|
||||
children: React.ReactNode
|
||||
}) {
|
||||
return (
|
||||
<Tooltip.Root delayDuration={300}>
|
||||
<Tooltip.Trigger asChild>
|
||||
@@ -94,7 +134,7 @@ function LinkWithPreview({ href, children }: { href: string; children: React.Rea
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
)
|
||||
}
|
||||
})
|
||||
|
||||
/**
|
||||
* Props for the CopilotMarkdownRenderer component
|
||||
@@ -104,275 +144,197 @@ interface CopilotMarkdownRendererProps {
|
||||
content: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Static markdown component definitions - optimized for LLM chat spacing
|
||||
* Tighter spacing compared to traditional prose for better chat UX
|
||||
*/
|
||||
const markdownComponents = {
|
||||
// Paragraphs - tight spacing, no margin on last
|
||||
p: ({ children }: React.HTMLAttributes<HTMLParagraphElement>) => (
|
||||
<p className='mb-1.5 font-base font-season text-[var(--text-primary)] text-sm leading-[1.4] last:mb-0 dark:font-[470]'>
|
||||
{children}
|
||||
</p>
|
||||
),
|
||||
|
||||
// Headings - minimal margins for chat context
|
||||
h1: ({ children }: React.HTMLAttributes<HTMLHeadingElement>) => (
|
||||
<h1 className='mt-2 mb-1 font-season font-semibold text-[var(--text-primary)] text-base first:mt-0'>
|
||||
{children}
|
||||
</h1>
|
||||
),
|
||||
h2: ({ children }: React.HTMLAttributes<HTMLHeadingElement>) => (
|
||||
<h2 className='mt-2 mb-1 font-season font-semibold text-[15px] text-[var(--text-primary)] first:mt-0'>
|
||||
{children}
|
||||
</h2>
|
||||
),
|
||||
h3: ({ children }: React.HTMLAttributes<HTMLHeadingElement>) => (
|
||||
<h3 className='mt-1.5 mb-0.5 font-season font-semibold text-[var(--text-primary)] text-sm first:mt-0'>
|
||||
{children}
|
||||
</h3>
|
||||
),
|
||||
h4: ({ children }: React.HTMLAttributes<HTMLHeadingElement>) => (
|
||||
<h4 className='mt-1.5 mb-0.5 font-season font-semibold text-[var(--text-primary)] text-sm first:mt-0'>
|
||||
{children}
|
||||
</h4>
|
||||
),
|
||||
|
||||
// Lists - compact spacing
|
||||
ul: ({ children }: React.HTMLAttributes<HTMLUListElement>) => (
|
||||
<ul
|
||||
className='my-1 space-y-0.5 pl-5 font-base font-season text-[var(--text-primary)] dark:font-[470]'
|
||||
style={{ listStyleType: 'disc' }}
|
||||
>
|
||||
{children}
|
||||
</ul>
|
||||
),
|
||||
ol: ({ children }: React.HTMLAttributes<HTMLOListElement>) => (
|
||||
<ol
|
||||
className='my-1 space-y-0.5 pl-5 font-base font-season text-[var(--text-primary)] dark:font-[470]'
|
||||
style={{ listStyleType: 'decimal' }}
|
||||
>
|
||||
{children}
|
||||
</ol>
|
||||
),
|
||||
li: ({ children }: React.LiHTMLAttributes<HTMLLIElement>) => (
|
||||
<li
|
||||
className='font-base font-season text-[var(--text-primary)] text-sm leading-[1.4] dark:font-[470]'
|
||||
style={{ display: 'list-item' }}
|
||||
>
|
||||
{children}
|
||||
</li>
|
||||
),
|
||||
|
||||
// Code blocks - handled by CodeBlock component
|
||||
pre: ({ children }: React.HTMLAttributes<HTMLPreElement>) => {
|
||||
let codeContent: React.ReactNode = children
|
||||
let language = 'code'
|
||||
|
||||
if (
|
||||
React.isValidElement<{ className?: string; children?: React.ReactNode }>(children) &&
|
||||
children.type === 'code'
|
||||
) {
|
||||
const childElement = children as React.ReactElement<{
|
||||
className?: string
|
||||
children?: React.ReactNode
|
||||
}>
|
||||
codeContent = childElement.props.children
|
||||
language = childElement.props.className?.replace('language-', '') || 'code'
|
||||
}
|
||||
|
||||
let actualCodeText = ''
|
||||
if (typeof codeContent === 'string') {
|
||||
actualCodeText = codeContent
|
||||
} else if (React.isValidElement(codeContent)) {
|
||||
actualCodeText = getTextContent(codeContent)
|
||||
} else if (Array.isArray(codeContent)) {
|
||||
actualCodeText = codeContent
|
||||
.map((child) =>
|
||||
typeof child === 'string'
|
||||
? child
|
||||
: React.isValidElement(child)
|
||||
? getTextContent(child)
|
||||
: ''
|
||||
)
|
||||
.join('')
|
||||
} else {
|
||||
actualCodeText = String(codeContent || '')
|
||||
}
|
||||
|
||||
return <CodeBlock code={actualCodeText} language={language} />
|
||||
},
|
||||
|
||||
// Inline code
|
||||
code: ({
|
||||
className,
|
||||
children,
|
||||
...props
|
||||
}: React.HTMLAttributes<HTMLElement> & { className?: string }) => (
|
||||
<code
|
||||
className='whitespace-normal break-all rounded border border-[var(--border-1)] bg-[var(--surface-1)] px-1 py-0.5 font-mono text-[0.85em] text-[var(--text-primary)]'
|
||||
{...props}
|
||||
>
|
||||
{children}
|
||||
</code>
|
||||
),
|
||||
|
||||
// Text formatting
|
||||
strong: ({ children }: React.HTMLAttributes<HTMLElement>) => (
|
||||
<strong className='font-semibold text-[var(--text-primary)]'>{children}</strong>
|
||||
),
|
||||
b: ({ children }: React.HTMLAttributes<HTMLElement>) => (
|
||||
<b className='font-semibold text-[var(--text-primary)]'>{children}</b>
|
||||
),
|
||||
em: ({ children }: React.HTMLAttributes<HTMLElement>) => (
|
||||
<em className='text-[var(--text-primary)] italic'>{children}</em>
|
||||
),
|
||||
i: ({ children }: React.HTMLAttributes<HTMLElement>) => (
|
||||
<i className='text-[var(--text-primary)] italic'>{children}</i>
|
||||
),
|
||||
|
||||
// Blockquote - compact
|
||||
blockquote: ({ children }: React.HTMLAttributes<HTMLQuoteElement>) => (
|
||||
<blockquote className='my-1.5 border-[var(--border-1)] border-l-2 py-0.5 pl-3 font-season text-[var(--text-secondary)] text-sm italic'>
|
||||
{children}
|
||||
</blockquote>
|
||||
),
|
||||
|
||||
// Horizontal rule
|
||||
hr: () => <hr className='my-3 border-[var(--divider)] border-t' />,
|
||||
|
||||
// Links
|
||||
a: ({ href, children }: React.AnchorHTMLAttributes<HTMLAnchorElement>) => (
|
||||
<LinkWithPreview href={href || '#'}>{children}</LinkWithPreview>
|
||||
),
|
||||
|
||||
// Tables - compact
|
||||
table: ({ children }: React.TableHTMLAttributes<HTMLTableElement>) => (
|
||||
<div className='my-2 max-w-full overflow-x-auto'>
|
||||
<table className='min-w-full table-auto border border-[var(--border-1)] font-season text-xs'>
|
||||
{children}
|
||||
</table>
|
||||
</div>
|
||||
),
|
||||
thead: ({ children }: React.HTMLAttributes<HTMLTableSectionElement>) => (
|
||||
<thead className='bg-[var(--surface-5)] text-left dark:bg-[var(--surface-4)]'>{children}</thead>
|
||||
),
|
||||
tbody: ({ children }: React.HTMLAttributes<HTMLTableSectionElement>) => (
|
||||
<tbody className='divide-y divide-[var(--border-1)]'>{children}</tbody>
|
||||
),
|
||||
tr: ({ children }: React.HTMLAttributes<HTMLTableRowElement>) => (
|
||||
<tr className='border-[var(--border-1)] border-b'>{children}</tr>
|
||||
),
|
||||
th: ({ children }: React.ThHTMLAttributes<HTMLTableCellElement>) => (
|
||||
<th className='border-[var(--border-1)] border-r px-2 py-1 align-top font-base text-[var(--text-secondary)] last:border-r-0 dark:font-[470]'>
|
||||
{children}
|
||||
</th>
|
||||
),
|
||||
td: ({ children }: React.TdHTMLAttributes<HTMLTableCellElement>) => (
|
||||
<td className='break-words border-[var(--border-1)] border-r px-2 py-1 align-top font-base text-[var(--text-primary)] last:border-r-0 dark:font-[470]'>
|
||||
{children}
|
||||
</td>
|
||||
),
|
||||
|
||||
// Images
|
||||
img: ({ src, alt, ...props }: React.ImgHTMLAttributes<HTMLImageElement>) => (
|
||||
<img src={src} alt={alt || 'Image'} className='my-2 h-auto max-w-full rounded-md' {...props} />
|
||||
),
|
||||
}
|
||||
|
||||
/**
|
||||
* CopilotMarkdownRenderer renders markdown content with custom styling
|
||||
* Supports GitHub-flavored markdown, code blocks with syntax highlighting,
|
||||
* tables, links with preview, and more
|
||||
* Optimized for LLM chat: tight spacing, memoized components, isolated state
|
||||
*
|
||||
* @param props - Component props
|
||||
* @returns Rendered markdown content
|
||||
*/
|
||||
export default function CopilotMarkdownRenderer({ content }: CopilotMarkdownRendererProps) {
|
||||
const [copiedCodeBlocks, setCopiedCodeBlocks] = useState<Record<string, boolean>>({})
|
||||
|
||||
useEffect(() => {
|
||||
const timers: Record<string, NodeJS.Timeout> = {}
|
||||
|
||||
Object.keys(copiedCodeBlocks).forEach((key) => {
|
||||
if (copiedCodeBlocks[key]) {
|
||||
timers[key] = setTimeout(() => {
|
||||
setCopiedCodeBlocks((prev) => ({ ...prev, [key]: false }))
|
||||
}, 2000)
|
||||
}
|
||||
})
|
||||
|
||||
return () => {
|
||||
Object.values(timers).forEach(clearTimeout)
|
||||
}
|
||||
}, [copiedCodeBlocks])
|
||||
|
||||
const markdownComponents = useMemo(
|
||||
() => ({
|
||||
p: ({ children }: React.HTMLAttributes<HTMLParagraphElement>) => (
|
||||
<p className='mb-2 font-base font-season text-[var(--text-primary)] text-sm leading-[1.25rem] last:mb-0 dark:font-[470]'>
|
||||
{children}
|
||||
</p>
|
||||
),
|
||||
|
||||
h1: ({ children }: React.HTMLAttributes<HTMLHeadingElement>) => (
|
||||
<h1 className='mt-3 mb-3 font-season font-semibold text-2xl text-[var(--text-primary)]'>
|
||||
{children}
|
||||
</h1>
|
||||
),
|
||||
h2: ({ children }: React.HTMLAttributes<HTMLHeadingElement>) => (
|
||||
<h2 className='mt-2.5 mb-2.5 font-season font-semibold text-[var(--text-primary)] text-xl'>
|
||||
{children}
|
||||
</h2>
|
||||
),
|
||||
h3: ({ children }: React.HTMLAttributes<HTMLHeadingElement>) => (
|
||||
<h3 className='mt-2 mb-2 font-season font-semibold text-[var(--text-primary)] text-lg'>
|
||||
{children}
|
||||
</h3>
|
||||
),
|
||||
h4: ({ children }: React.HTMLAttributes<HTMLHeadingElement>) => (
|
||||
<h4 className='mt-2 mb-2 font-season font-semibold text-[var(--text-primary)] text-base'>
|
||||
{children}
|
||||
</h4>
|
||||
),
|
||||
|
||||
ul: ({ children }: React.HTMLAttributes<HTMLUListElement>) => (
|
||||
<ul
|
||||
className='mt-1 mb-1 space-y-1.5 pl-6 font-base font-season text-[var(--text-primary)] dark:font-[470]'
|
||||
style={{ listStyleType: 'disc' }}
|
||||
>
|
||||
{children}
|
||||
</ul>
|
||||
),
|
||||
ol: ({ children }: React.HTMLAttributes<HTMLOListElement>) => (
|
||||
<ol
|
||||
className='mt-1 mb-1 space-y-1.5 pl-6 font-base font-season text-[var(--text-primary)] dark:font-[470]'
|
||||
style={{ listStyleType: 'decimal' }}
|
||||
>
|
||||
{children}
|
||||
</ol>
|
||||
),
|
||||
li: ({
|
||||
children,
|
||||
ordered,
|
||||
}: React.LiHTMLAttributes<HTMLLIElement> & { ordered?: boolean }) => (
|
||||
<li
|
||||
className='font-base font-season text-[var(--text-primary)] dark:font-[470]'
|
||||
style={{ display: 'list-item' }}
|
||||
>
|
||||
{children}
|
||||
</li>
|
||||
),
|
||||
|
||||
pre: ({ children }: React.HTMLAttributes<HTMLPreElement>) => {
|
||||
let codeContent: React.ReactNode = children
|
||||
let language = 'code'
|
||||
|
||||
if (
|
||||
React.isValidElement<{ className?: string; children?: React.ReactNode }>(children) &&
|
||||
children.type === 'code'
|
||||
) {
|
||||
const childElement = children as React.ReactElement<{
|
||||
className?: string
|
||||
children?: React.ReactNode
|
||||
}>
|
||||
codeContent = childElement.props.children
|
||||
language = childElement.props.className?.replace('language-', '') || 'code'
|
||||
}
|
||||
|
||||
let actualCodeText = ''
|
||||
if (typeof codeContent === 'string') {
|
||||
actualCodeText = codeContent
|
||||
} else if (React.isValidElement(codeContent)) {
|
||||
actualCodeText = getTextContent(codeContent)
|
||||
} else if (Array.isArray(codeContent)) {
|
||||
actualCodeText = codeContent
|
||||
.map((child) =>
|
||||
typeof child === 'string'
|
||||
? child
|
||||
: React.isValidElement(child)
|
||||
? getTextContent(child)
|
||||
: ''
|
||||
)
|
||||
.join('')
|
||||
} else {
|
||||
actualCodeText = String(codeContent || '')
|
||||
}
|
||||
|
||||
const codeText = actualCodeText || 'code'
|
||||
const codeBlockKey = `${language}-${codeText.substring(0, 30).replace(/\s/g, '-')}-${codeText.length}`
|
||||
|
||||
const showCopySuccess = copiedCodeBlocks[codeBlockKey] || false
|
||||
|
||||
const handleCopy = () => {
|
||||
const textToCopy = actualCodeText
|
||||
if (textToCopy) {
|
||||
navigator.clipboard.writeText(textToCopy)
|
||||
setCopiedCodeBlocks((prev) => ({ ...prev, [codeBlockKey]: true }))
|
||||
}
|
||||
}
|
||||
|
||||
const normalizedLanguage = (language || '').toLowerCase()
|
||||
const viewerLanguage: 'javascript' | 'json' | 'python' =
|
||||
normalizedLanguage === 'json'
|
||||
? 'json'
|
||||
: normalizedLanguage === 'python' || normalizedLanguage === 'py'
|
||||
? 'python'
|
||||
: 'javascript'
|
||||
|
||||
return (
|
||||
<div className='mt-6 mb-6 w-0 min-w-full overflow-hidden rounded-md border border-[var(--border-1)] bg-[var(--surface-1)] text-sm'>
|
||||
<div className='flex items-center justify-between border-[var(--border-1)] border-b px-4 py-1.5'>
|
||||
<span className='font-season text-[var(--text-muted)] text-xs'>
|
||||
{language === 'code' ? viewerLanguage : language}
|
||||
</span>
|
||||
<button
|
||||
onClick={handleCopy}
|
||||
className='text-[var(--text-muted)] transition-colors hover:text-[var(--text-tertiary)]'
|
||||
title='Copy'
|
||||
>
|
||||
{showCopySuccess ? (
|
||||
<Check className='h-3 w-3' strokeWidth={2} />
|
||||
) : (
|
||||
<Copy className='h-3 w-3' strokeWidth={2} />
|
||||
)}
|
||||
</button>
|
||||
</div>
|
||||
<Code.Viewer
|
||||
code={actualCodeText.replace(/\n+$/, '')}
|
||||
showGutter
|
||||
language={viewerLanguage}
|
||||
className='m-0 min-h-0 rounded-none border-0 bg-transparent'
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
},
|
||||
|
||||
code: ({
|
||||
inline,
|
||||
className,
|
||||
children,
|
||||
...props
|
||||
}: React.HTMLAttributes<HTMLElement> & { className?: string; inline?: boolean }) => {
|
||||
if (inline) {
|
||||
return (
|
||||
<code
|
||||
className='whitespace-normal break-all rounded border border-[var(--border-1)] bg-[var(--surface-1)] px-1 py-0.5 font-mono text-[0.9em] text-[var(--text-primary)]'
|
||||
{...props}
|
||||
>
|
||||
{children}
|
||||
</code>
|
||||
)
|
||||
}
|
||||
return (
|
||||
<code className={className} {...props}>
|
||||
{children}
|
||||
</code>
|
||||
)
|
||||
},
|
||||
|
||||
strong: ({ children }: React.HTMLAttributes<HTMLElement>) => (
|
||||
<strong className='font-semibold text-[var(--text-primary)]'>{children}</strong>
|
||||
),
|
||||
|
||||
b: ({ children }: React.HTMLAttributes<HTMLElement>) => (
|
||||
<b className='font-semibold text-[var(--text-primary)]'>{children}</b>
|
||||
),
|
||||
|
||||
em: ({ children }: React.HTMLAttributes<HTMLElement>) => (
|
||||
<em className='text-[var(--text-primary)] italic'>{children}</em>
|
||||
),
|
||||
|
||||
i: ({ children }: React.HTMLAttributes<HTMLElement>) => (
|
||||
<i className='text-[var(--text-primary)] italic'>{children}</i>
|
||||
),
|
||||
|
||||
blockquote: ({ children }: React.HTMLAttributes<HTMLQuoteElement>) => (
|
||||
<blockquote className='my-4 border-[var(--border-1)] border-l-4 py-1 pl-4 font-season text-[var(--text-secondary)] italic'>
|
||||
{children}
|
||||
</blockquote>
|
||||
),
|
||||
|
||||
hr: () => <hr className='my-8 border-[var(--divider)] border-t' />,
|
||||
|
||||
a: ({ href, children, ...props }: React.AnchorHTMLAttributes<HTMLAnchorElement>) => (
|
||||
<LinkWithPreview href={href || '#'} {...props}>
|
||||
{children}
|
||||
</LinkWithPreview>
|
||||
),
|
||||
|
||||
table: ({ children }: React.TableHTMLAttributes<HTMLTableElement>) => (
|
||||
<div className='my-4 max-w-full overflow-x-auto'>
|
||||
<table className='min-w-full table-auto border border-[var(--border-1)] font-season text-sm'>
|
||||
{children}
|
||||
</table>
|
||||
</div>
|
||||
),
|
||||
thead: ({ children }: React.HTMLAttributes<HTMLTableSectionElement>) => (
|
||||
<thead className='bg-[var(--surface-5)] text-left dark:bg-[var(--surface-4)]'>
|
||||
{children}
|
||||
</thead>
|
||||
),
|
||||
tbody: ({ children }: React.HTMLAttributes<HTMLTableSectionElement>) => (
|
||||
<tbody className='divide-y divide-[var(--border-1)]'>{children}</tbody>
|
||||
),
|
||||
tr: ({ children }: React.HTMLAttributes<HTMLTableRowElement>) => (
|
||||
<tr className='border-[var(--border-1)] border-b transition-colors hover:bg-[var(--surface-5)] dark:hover:bg-[var(--surface-4)]/60'>
|
||||
{children}
|
||||
</tr>
|
||||
),
|
||||
th: ({ children }: React.ThHTMLAttributes<HTMLTableCellElement>) => (
|
||||
<th className='border-[var(--border-1)] border-r px-4 py-2 align-top font-base text-[var(--text-secondary)] last:border-r-0 dark:font-[470]'>
|
||||
{children}
|
||||
</th>
|
||||
),
|
||||
td: ({ children }: React.TdHTMLAttributes<HTMLTableCellElement>) => (
|
||||
<td className='break-words border-[var(--border-1)] border-r px-4 py-2 align-top font-base text-[var(--text-primary)] last:border-r-0 dark:font-[470]'>
|
||||
{children}
|
||||
</td>
|
||||
),
|
||||
|
||||
img: ({ src, alt, ...props }: React.ImgHTMLAttributes<HTMLImageElement>) => (
|
||||
<img
|
||||
src={src}
|
||||
alt={alt || 'Image'}
|
||||
className='my-3 h-auto max-w-full rounded-md'
|
||||
{...props}
|
||||
/>
|
||||
),
|
||||
}),
|
||||
[copiedCodeBlocks]
|
||||
)
|
||||
|
||||
function CopilotMarkdownRenderer({ content }: CopilotMarkdownRendererProps) {
|
||||
return (
|
||||
<div className='copilot-markdown-wrapper max-w-full space-y-3 break-words font-base font-season text-[var(--text-primary)] text-sm leading-[1.25rem] dark:font-[470]'>
|
||||
<div className='max-w-full break-words font-base font-season text-[var(--text-primary)] text-sm leading-[1.4] dark:font-[470] [&_*]:max-w-full [&_a]:break-all [&_code:not(pre_code)]:break-words [&_li]:break-words [&_p]:break-words'>
|
||||
<ReactMarkdown remarkPlugins={[remarkGfm]} components={markdownComponents}>
|
||||
{content}
|
||||
</ReactMarkdown>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
export default memo(CopilotMarkdownRenderer)
|
||||
|
||||
@@ -2,18 +2,38 @@ import { memo, useEffect, useRef, useState } from 'react'
|
||||
import CopilotMarkdownRenderer from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/markdown-renderer'
|
||||
|
||||
/**
|
||||
* Character animation delay in milliseconds
|
||||
* Minimum delay between characters (fast catch-up mode)
|
||||
*/
|
||||
const CHARACTER_DELAY = 3
|
||||
const MIN_DELAY = 1
|
||||
|
||||
/**
|
||||
* Maximum delay between characters (when waiting for content)
|
||||
*/
|
||||
const MAX_DELAY = 12
|
||||
|
||||
/**
|
||||
* Default delay when streaming normally
|
||||
*/
|
||||
const DEFAULT_DELAY = 4
|
||||
|
||||
/**
|
||||
* How far behind (in characters) before we speed up
|
||||
*/
|
||||
const CATCH_UP_THRESHOLD = 20
|
||||
|
||||
/**
|
||||
* How close to content before we slow down
|
||||
*/
|
||||
const SLOW_DOWN_THRESHOLD = 5
|
||||
|
||||
/**
|
||||
* StreamingIndicator shows animated dots during message streaming
|
||||
* Uses CSS classes for animations to follow best practices
|
||||
* Used as a standalone indicator when no content has arrived yet
|
||||
*
|
||||
* @returns Animated loading indicator
|
||||
*/
|
||||
export const StreamingIndicator = memo(() => (
|
||||
<div className='flex items-center py-1 text-muted-foreground transition-opacity duration-200 ease-in-out'>
|
||||
<div className='flex h-[1.25rem] items-center text-muted-foreground'>
|
||||
<div className='flex space-x-0.5'>
|
||||
<div className='h-1 w-1 animate-bounce rounded-full bg-muted-foreground [animation-delay:0ms] [animation-duration:1.2s]' />
|
||||
<div className='h-1 w-1 animate-bounce rounded-full bg-muted-foreground [animation-delay:150ms] [animation-duration:1.2s]' />
|
||||
@@ -34,9 +54,39 @@ interface SmoothStreamingTextProps {
|
||||
isStreaming: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates adaptive delay based on how far behind animation is from actual content
|
||||
*
|
||||
* @param displayedLength - Current displayed content length
|
||||
* @param totalLength - Total available content length
|
||||
* @returns Delay in milliseconds
|
||||
*/
|
||||
function calculateAdaptiveDelay(displayedLength: number, totalLength: number): number {
|
||||
const charsRemaining = totalLength - displayedLength
|
||||
|
||||
if (charsRemaining > CATCH_UP_THRESHOLD) {
|
||||
// Far behind - speed up to catch up
|
||||
// Scale from MIN_DELAY to DEFAULT_DELAY based on how far behind
|
||||
const catchUpFactor = Math.min(1, (charsRemaining - CATCH_UP_THRESHOLD) / 50)
|
||||
return MIN_DELAY + (DEFAULT_DELAY - MIN_DELAY) * (1 - catchUpFactor)
|
||||
}
|
||||
|
||||
if (charsRemaining <= SLOW_DOWN_THRESHOLD) {
|
||||
// Close to content edge - slow down to feel natural
|
||||
// The closer we are, the slower we go (up to MAX_DELAY)
|
||||
const slowFactor = 1 - charsRemaining / SLOW_DOWN_THRESHOLD
|
||||
return DEFAULT_DELAY + (MAX_DELAY - DEFAULT_DELAY) * slowFactor
|
||||
}
|
||||
|
||||
// Normal streaming speed
|
||||
return DEFAULT_DELAY
|
||||
}
|
||||
|
||||
/**
|
||||
* SmoothStreamingText component displays text with character-by-character animation
|
||||
* Creates a smooth streaming effect for AI responses
|
||||
* Creates a smooth streaming effect for AI responses with adaptive speed
|
||||
*
|
||||
* Uses adaptive pacing: speeds up when catching up, slows down near content edge
|
||||
*
|
||||
* @param props - Component props
|
||||
* @returns Streaming text with smooth animation
|
||||
@@ -45,74 +95,73 @@ export const SmoothStreamingText = memo(
|
||||
({ content, isStreaming }: SmoothStreamingTextProps) => {
|
||||
const [displayedContent, setDisplayedContent] = useState('')
|
||||
const contentRef = useRef(content)
|
||||
const timeoutRef = useRef<NodeJS.Timeout | null>(null)
|
||||
const rafRef = useRef<number | null>(null)
|
||||
const indexRef = useRef(0)
|
||||
const streamingStartTimeRef = useRef<number | null>(null)
|
||||
const lastFrameTimeRef = useRef<number>(0)
|
||||
const isAnimatingRef = useRef(false)
|
||||
|
||||
/**
|
||||
* Handles content streaming animation
|
||||
* Updates displayed content character by character during streaming
|
||||
*/
|
||||
useEffect(() => {
|
||||
contentRef.current = content
|
||||
|
||||
if (content.length === 0) {
|
||||
setDisplayedContent('')
|
||||
indexRef.current = 0
|
||||
streamingStartTimeRef.current = null
|
||||
return
|
||||
}
|
||||
|
||||
if (isStreaming) {
|
||||
if (streamingStartTimeRef.current === null) {
|
||||
streamingStartTimeRef.current = Date.now()
|
||||
}
|
||||
if (indexRef.current < content.length && !isAnimatingRef.current) {
|
||||
isAnimatingRef.current = true
|
||||
lastFrameTimeRef.current = performance.now()
|
||||
|
||||
if (indexRef.current < content.length) {
|
||||
const animateText = () => {
|
||||
const animateText = (timestamp: number) => {
|
||||
const currentContent = contentRef.current
|
||||
const currentIndex = indexRef.current
|
||||
const elapsed = timestamp - lastFrameTimeRef.current
|
||||
|
||||
if (currentIndex < currentContent.length) {
|
||||
const chunkSize = 1
|
||||
const newDisplayed = currentContent.slice(0, currentIndex + chunkSize)
|
||||
// Calculate adaptive delay based on how far behind we are
|
||||
const delay = calculateAdaptiveDelay(currentIndex, currentContent.length)
|
||||
|
||||
setDisplayedContent(newDisplayed)
|
||||
indexRef.current = currentIndex + chunkSize
|
||||
if (elapsed >= delay) {
|
||||
if (currentIndex < currentContent.length) {
|
||||
const newDisplayed = currentContent.slice(0, currentIndex + 1)
|
||||
setDisplayedContent(newDisplayed)
|
||||
indexRef.current = currentIndex + 1
|
||||
lastFrameTimeRef.current = timestamp
|
||||
}
|
||||
}
|
||||
|
||||
timeoutRef.current = setTimeout(animateText, CHARACTER_DELAY)
|
||||
if (indexRef.current < currentContent.length) {
|
||||
rafRef.current = requestAnimationFrame(animateText)
|
||||
} else {
|
||||
isAnimatingRef.current = false
|
||||
}
|
||||
}
|
||||
|
||||
if (!isAnimatingRef.current) {
|
||||
if (timeoutRef.current) {
|
||||
clearTimeout(timeoutRef.current)
|
||||
}
|
||||
|
||||
isAnimatingRef.current = true
|
||||
animateText()
|
||||
}
|
||||
rafRef.current = requestAnimationFrame(animateText)
|
||||
} else if (indexRef.current < content.length && isAnimatingRef.current) {
|
||||
// Animation already running, it will pick up new content automatically
|
||||
}
|
||||
} else {
|
||||
// Streaming ended - show full content immediately
|
||||
if (rafRef.current) {
|
||||
cancelAnimationFrame(rafRef.current)
|
||||
}
|
||||
setDisplayedContent(content)
|
||||
indexRef.current = content.length
|
||||
isAnimatingRef.current = false
|
||||
streamingStartTimeRef.current = null
|
||||
}
|
||||
|
||||
return () => {
|
||||
if (timeoutRef.current) {
|
||||
clearTimeout(timeoutRef.current)
|
||||
if (rafRef.current) {
|
||||
cancelAnimationFrame(rafRef.current)
|
||||
}
|
||||
isAnimatingRef.current = false
|
||||
}
|
||||
}, [content, isStreaming])
|
||||
|
||||
return (
|
||||
<div className='relative min-h-[1.25rem] max-w-full overflow-hidden'>
|
||||
<div className='min-h-[1.25rem] max-w-full'>
|
||||
<CopilotMarkdownRenderer content={displayedContent} />
|
||||
</div>
|
||||
)
|
||||
@@ -121,7 +170,6 @@ export const SmoothStreamingText = memo(
|
||||
// Prevent re-renders during streaming unless content actually changed
|
||||
return (
|
||||
prevProps.content === nextProps.content && prevProps.isStreaming === nextProps.isStreaming
|
||||
// markdownComponents is now memoized so no need to compare
|
||||
)
|
||||
}
|
||||
)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
'use client'
|
||||
|
||||
import { useEffect, useRef, useState } from 'react'
|
||||
import { memo, useEffect, useRef, useState } from 'react'
|
||||
import clsx from 'clsx'
|
||||
import { ChevronUp } from 'lucide-react'
|
||||
import CopilotMarkdownRenderer from './markdown-renderer'
|
||||
@@ -8,18 +8,151 @@ import CopilotMarkdownRenderer from './markdown-renderer'
|
||||
/**
|
||||
* Max height for thinking content before internal scrolling kicks in
|
||||
*/
|
||||
const THINKING_MAX_HEIGHT = 200
|
||||
const THINKING_MAX_HEIGHT = 150
|
||||
|
||||
/**
|
||||
* Height threshold before gradient fade kicks in
|
||||
*/
|
||||
const GRADIENT_THRESHOLD = 100
|
||||
|
||||
/**
|
||||
* Interval for auto-scroll during streaming (ms)
|
||||
*/
|
||||
const SCROLL_INTERVAL = 100
|
||||
const SCROLL_INTERVAL = 50
|
||||
|
||||
/**
|
||||
* Timer update interval in milliseconds
|
||||
*/
|
||||
const TIMER_UPDATE_INTERVAL = 100
|
||||
|
||||
/**
|
||||
* Thinking text streaming - much faster than main text
|
||||
* Essentially instant with minimal delay
|
||||
*/
|
||||
const THINKING_DELAY = 0.5
|
||||
const THINKING_CHARS_PER_FRAME = 3
|
||||
|
||||
/**
|
||||
* Props for the SmoothThinkingText component
|
||||
*/
|
||||
interface SmoothThinkingTextProps {
|
||||
content: string
|
||||
isStreaming: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* SmoothThinkingText renders thinking content with fast streaming animation
|
||||
* Uses gradient fade at top when content is tall enough
|
||||
*/
|
||||
const SmoothThinkingText = memo(
|
||||
({ content, isStreaming }: SmoothThinkingTextProps) => {
|
||||
const [displayedContent, setDisplayedContent] = useState('')
|
||||
const [showGradient, setShowGradient] = useState(false)
|
||||
const contentRef = useRef(content)
|
||||
const textRef = useRef<HTMLDivElement>(null)
|
||||
const rafRef = useRef<number | null>(null)
|
||||
const indexRef = useRef(0)
|
||||
const lastFrameTimeRef = useRef<number>(0)
|
||||
const isAnimatingRef = useRef(false)
|
||||
|
||||
useEffect(() => {
|
||||
contentRef.current = content
|
||||
|
||||
if (content.length === 0) {
|
||||
setDisplayedContent('')
|
||||
indexRef.current = 0
|
||||
return
|
||||
}
|
||||
|
||||
if (isStreaming) {
|
||||
if (indexRef.current < content.length && !isAnimatingRef.current) {
|
||||
isAnimatingRef.current = true
|
||||
lastFrameTimeRef.current = performance.now()
|
||||
|
||||
const animateText = (timestamp: number) => {
|
||||
const currentContent = contentRef.current
|
||||
const currentIndex = indexRef.current
|
||||
const elapsed = timestamp - lastFrameTimeRef.current
|
||||
|
||||
if (elapsed >= THINKING_DELAY) {
|
||||
if (currentIndex < currentContent.length) {
|
||||
// Reveal multiple characters per frame for faster streaming
|
||||
const newIndex = Math.min(
|
||||
currentIndex + THINKING_CHARS_PER_FRAME,
|
||||
currentContent.length
|
||||
)
|
||||
const newDisplayed = currentContent.slice(0, newIndex)
|
||||
setDisplayedContent(newDisplayed)
|
||||
indexRef.current = newIndex
|
||||
lastFrameTimeRef.current = timestamp
|
||||
}
|
||||
}
|
||||
|
||||
if (indexRef.current < currentContent.length) {
|
||||
rafRef.current = requestAnimationFrame(animateText)
|
||||
} else {
|
||||
isAnimatingRef.current = false
|
||||
}
|
||||
}
|
||||
|
||||
rafRef.current = requestAnimationFrame(animateText)
|
||||
}
|
||||
} else {
|
||||
// Streaming ended - show full content immediately
|
||||
if (rafRef.current) {
|
||||
cancelAnimationFrame(rafRef.current)
|
||||
}
|
||||
setDisplayedContent(content)
|
||||
indexRef.current = content.length
|
||||
isAnimatingRef.current = false
|
||||
}
|
||||
|
||||
return () => {
|
||||
if (rafRef.current) {
|
||||
cancelAnimationFrame(rafRef.current)
|
||||
}
|
||||
isAnimatingRef.current = false
|
||||
}
|
||||
}, [content, isStreaming])
|
||||
|
||||
// Check if content height exceeds threshold for gradient
|
||||
useEffect(() => {
|
||||
if (textRef.current && isStreaming) {
|
||||
const height = textRef.current.scrollHeight
|
||||
setShowGradient(height > GRADIENT_THRESHOLD)
|
||||
} else {
|
||||
setShowGradient(false)
|
||||
}
|
||||
}, [displayedContent, isStreaming])
|
||||
|
||||
// Apply vertical gradient fade at the top only when content is tall enough
|
||||
const gradientStyle =
|
||||
isStreaming && showGradient
|
||||
? {
|
||||
maskImage: 'linear-gradient(to bottom, transparent 0%, black 30%, black 100%)',
|
||||
WebkitMaskImage: 'linear-gradient(to bottom, transparent 0%, black 30%, black 100%)',
|
||||
}
|
||||
: undefined
|
||||
|
||||
return (
|
||||
<div
|
||||
ref={textRef}
|
||||
className='[&_*]:!text-[var(--text-muted)] [&_*]:!text-[12px] [&_*]:!leading-[1.4] [&_p]:!m-0 [&_p]:!mb-1 [&_h1]:!text-[12px] [&_h1]:!font-semibold [&_h1]:!m-0 [&_h1]:!mb-1 [&_h2]:!text-[12px] [&_h2]:!font-semibold [&_h2]:!m-0 [&_h2]:!mb-1 [&_h3]:!text-[12px] [&_h3]:!font-semibold [&_h3]:!m-0 [&_h3]:!mb-1 [&_code]:!text-[11px] [&_ul]:!pl-5 [&_ul]:!my-1 [&_ol]:!pl-6 [&_ol]:!my-1 [&_li]:!my-0.5 [&_li]:!py-0 font-season text-[12px] text-[var(--text-muted)]'
|
||||
style={gradientStyle}
|
||||
>
|
||||
<CopilotMarkdownRenderer content={displayedContent} />
|
||||
</div>
|
||||
)
|
||||
},
|
||||
(prevProps, nextProps) => {
|
||||
return (
|
||||
prevProps.content === nextProps.content && prevProps.isStreaming === nextProps.isStreaming
|
||||
)
|
||||
}
|
||||
)
|
||||
|
||||
SmoothThinkingText.displayName = 'SmoothThinkingText'
|
||||
|
||||
/**
|
||||
* Props for the ThinkingBlock component
|
||||
*/
|
||||
@@ -66,8 +199,8 @@ export function ThinkingBlock({
|
||||
* Auto-collapses when streaming ends OR when following content arrives
|
||||
*/
|
||||
useEffect(() => {
|
||||
// Collapse if streaming ended or if there's following content (like a tool call)
|
||||
if (!isStreaming || hasFollowingContent) {
|
||||
// Collapse if streaming ended, there's following content, or special tags arrived
|
||||
if (!isStreaming || hasFollowingContent || hasSpecialTags) {
|
||||
setIsExpanded(false)
|
||||
userCollapsedRef.current = false
|
||||
setUserHasScrolledAway(false)
|
||||
@@ -77,7 +210,7 @@ export function ThinkingBlock({
|
||||
if (!userCollapsedRef.current && content && content.trim().length > 0) {
|
||||
setIsExpanded(true)
|
||||
}
|
||||
}, [isStreaming, content, hasFollowingContent])
|
||||
}, [isStreaming, content, hasFollowingContent, hasSpecialTags])
|
||||
|
||||
// Reset start time when streaming begins
|
||||
useEffect(() => {
|
||||
@@ -113,14 +246,14 @@ export function ThinkingBlock({
|
||||
const isNearBottom = distanceFromBottom <= 20
|
||||
|
||||
const delta = scrollTop - lastScrollTopRef.current
|
||||
const movedUp = delta < -2
|
||||
const movedUp = delta < -1
|
||||
|
||||
if (movedUp && !isNearBottom) {
|
||||
setUserHasScrolledAway(true)
|
||||
}
|
||||
|
||||
// Re-stick if user scrolls back to bottom
|
||||
if (userHasScrolledAway && isNearBottom) {
|
||||
// Re-stick if user scrolls back to bottom with intent
|
||||
if (userHasScrolledAway && isNearBottom && delta > 10) {
|
||||
setUserHasScrolledAway(false)
|
||||
}
|
||||
|
||||
@@ -133,7 +266,7 @@ export function ThinkingBlock({
|
||||
return () => container.removeEventListener('scroll', handleScroll)
|
||||
}, [isExpanded, userHasScrolledAway])
|
||||
|
||||
// Smart auto-scroll: only scroll if user hasn't scrolled away
|
||||
// Smart auto-scroll: always scroll to bottom while streaming unless user scrolled away
|
||||
useEffect(() => {
|
||||
if (!isStreaming || !isExpanded || userHasScrolledAway) return
|
||||
|
||||
@@ -141,20 +274,14 @@ export function ThinkingBlock({
|
||||
const container = scrollContainerRef.current
|
||||
if (!container) return
|
||||
|
||||
const { scrollTop, scrollHeight, clientHeight } = container
|
||||
const distanceFromBottom = scrollHeight - scrollTop - clientHeight
|
||||
const isNearBottom = distanceFromBottom <= 50
|
||||
|
||||
if (isNearBottom) {
|
||||
programmaticScrollRef.current = true
|
||||
container.scrollTo({
|
||||
top: container.scrollHeight,
|
||||
behavior: 'smooth',
|
||||
})
|
||||
window.setTimeout(() => {
|
||||
programmaticScrollRef.current = false
|
||||
}, 150)
|
||||
}
|
||||
programmaticScrollRef.current = true
|
||||
container.scrollTo({
|
||||
top: container.scrollHeight,
|
||||
behavior: 'auto',
|
||||
})
|
||||
window.setTimeout(() => {
|
||||
programmaticScrollRef.current = false
|
||||
}, 16)
|
||||
}, SCROLL_INTERVAL)
|
||||
|
||||
return () => window.clearInterval(intervalId)
|
||||
@@ -241,15 +368,11 @@ export function ThinkingBlock({
|
||||
<div
|
||||
ref={scrollContainerRef}
|
||||
className={clsx(
|
||||
'overflow-y-auto transition-all duration-300 ease-in-out',
|
||||
isExpanded ? 'mt-1.5 max-h-[200px] opacity-100' : 'max-h-0 opacity-0'
|
||||
'overflow-y-auto transition-all duration-150 ease-out',
|
||||
isExpanded ? 'mt-1.5 max-h-[150px] opacity-100' : 'max-h-0 opacity-0'
|
||||
)}
|
||||
>
|
||||
{/* Render markdown during streaming with thinking text styling */}
|
||||
<div className='[&_*]:!text-[var(--text-muted)] [&_*]:!text-[12px] [&_*]:!leading-none [&_*]:!m-0 [&_*]:!p-0 [&_*]:!mb-0 [&_*]:!mt-0 [&_p]:!m-0 [&_h1]:!text-[12px] [&_h1]:!font-semibold [&_h2]:!text-[12px] [&_h2]:!font-semibold [&_h3]:!text-[12px] [&_h3]:!font-semibold [&_code]:!text-[11px] [&_ul]:!pl-4 [&_ul]:!my-0 [&_ol]:!pl-4 [&_ol]:!my-0 [&_li]:!my-0 [&_li]:!py-0 [&_br]:!leading-[0.5] whitespace-pre-wrap font-[470] font-season text-[12px] text-[var(--text-muted)] leading-none'>
|
||||
<CopilotMarkdownRenderer content={content} />
|
||||
<span className='ml-1 inline-block h-2 w-1 animate-pulse bg-[var(--text-muted)]' />
|
||||
</div>
|
||||
<SmoothThinkingText content={content} isStreaming={isStreaming && !hasFollowingContent} />
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
@@ -281,12 +404,12 @@ export function ThinkingBlock({
|
||||
<div
|
||||
ref={scrollContainerRef}
|
||||
className={clsx(
|
||||
'overflow-y-auto transition-all duration-300 ease-in-out',
|
||||
isExpanded ? 'mt-1.5 max-h-[200px] opacity-100' : 'max-h-0 opacity-0'
|
||||
'overflow-y-auto transition-all duration-150 ease-out',
|
||||
isExpanded ? 'mt-1.5 max-h-[150px] opacity-100' : 'max-h-0 opacity-0'
|
||||
)}
|
||||
>
|
||||
{/* Use markdown renderer for completed content */}
|
||||
<div className='[&_*]:!text-[var(--text-muted)] [&_*]:!text-[12px] [&_*]:!leading-none [&_*]:!m-0 [&_*]:!p-0 [&_*]:!mb-0 [&_*]:!mt-0 [&_p]:!m-0 [&_h1]:!text-[12px] [&_h1]:!font-semibold [&_h2]:!text-[12px] [&_h2]:!font-semibold [&_h3]:!text-[12px] [&_h3]:!font-semibold [&_code]:!text-[11px] [&_ul]:!pl-4 [&_ul]:!my-0 [&_ol]:!pl-4 [&_ol]:!my-0 [&_li]:!my-0 [&_li]:!py-0 [&_br]:!leading-[0.5] whitespace-pre-wrap font-[470] font-season text-[12px] text-[var(--text-muted)] leading-none'>
|
||||
{/* Completed thinking text - dimmed with markdown */}
|
||||
<div className='[&_*]:!text-[var(--text-muted)] [&_*]:!text-[12px] [&_*]:!leading-[1.4] [&_p]:!m-0 [&_p]:!mb-1 [&_h1]:!text-[12px] [&_h1]:!font-semibold [&_h1]:!m-0 [&_h1]:!mb-1 [&_h2]:!text-[12px] [&_h2]:!font-semibold [&_h2]:!m-0 [&_h2]:!mb-1 [&_h3]:!text-[12px] [&_h3]:!font-semibold [&_h3]:!m-0 [&_h3]:!mb-1 [&_code]:!text-[11px] [&_ul]:!pl-5 [&_ul]:!my-1 [&_ol]:!pl-6 [&_ol]:!my-1 [&_li]:!my-0.5 [&_li]:!py-0 font-season text-[12px] text-[var(--text-muted)]'>
|
||||
<CopilotMarkdownRenderer content={content} />
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -187,6 +187,7 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
)
|
||||
|
||||
// Memoize content blocks to avoid re-rendering unchanged blocks
|
||||
// No entrance animations to prevent layout shift
|
||||
const memoizedContentBlocks = useMemo(() => {
|
||||
if (!message.contentBlocks || message.contentBlocks.length === 0) {
|
||||
return null
|
||||
@@ -205,14 +206,10 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
|
||||
// Use smooth streaming for the last text block if we're streaming
|
||||
const shouldUseSmoothing = isStreaming && isLastTextBlock
|
||||
const blockKey = `text-${index}-${block.timestamp || index}`
|
||||
|
||||
return (
|
||||
<div
|
||||
key={`text-${index}-${block.timestamp || index}`}
|
||||
className={`w-full max-w-full overflow-hidden transition-opacity duration-200 ease-in-out ${
|
||||
cleanBlockContent.length > 0 ? 'opacity-100' : 'opacity-70'
|
||||
} ${shouldUseSmoothing ? 'translate-y-0 transition-transform duration-100 ease-out' : ''}`}
|
||||
>
|
||||
<div key={blockKey} className='w-full max-w-full'>
|
||||
{shouldUseSmoothing ? (
|
||||
<SmoothStreamingText content={cleanBlockContent} isStreaming={isStreaming} />
|
||||
) : (
|
||||
@@ -224,29 +221,33 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
if (block.type === 'thinking') {
|
||||
// Check if there are any blocks after this one (tool calls, text, etc.)
|
||||
const hasFollowingContent = index < message.contentBlocks!.length - 1
|
||||
// Check if special tags (options, plan) are present - should also close thinking
|
||||
const hasSpecialTags = !!(parsedTags?.options || parsedTags?.plan)
|
||||
const blockKey = `thinking-${index}-${block.timestamp || index}`
|
||||
|
||||
return (
|
||||
<div key={`thinking-${index}-${block.timestamp || index}`} className='w-full'>
|
||||
<div key={blockKey} className='w-full'>
|
||||
<ThinkingBlock
|
||||
content={block.content}
|
||||
isStreaming={isStreaming}
|
||||
hasFollowingContent={hasFollowingContent}
|
||||
hasSpecialTags={hasSpecialTags}
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
if (block.type === 'tool_call') {
|
||||
const blockKey = `tool-${block.toolCall.id}`
|
||||
|
||||
return (
|
||||
<div
|
||||
key={`tool-${block.toolCall.id}`}
|
||||
className='opacity-100 transition-opacity duration-300 ease-in-out'
|
||||
>
|
||||
<div key={blockKey}>
|
||||
<ToolCall toolCallId={block.toolCall.id} toolCall={block.toolCall} />
|
||||
</div>
|
||||
)
|
||||
}
|
||||
return null
|
||||
})
|
||||
}, [message.contentBlocks, isStreaming])
|
||||
}, [message.contentBlocks, isStreaming, parsedTags])
|
||||
|
||||
if (isUser) {
|
||||
return (
|
||||
@@ -279,6 +280,7 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
onModeChange={setMode}
|
||||
panelWidth={panelWidth}
|
||||
clearOnSubmit={false}
|
||||
initialContexts={message.contexts}
|
||||
/>
|
||||
|
||||
{/* Inline Checkpoint Discard Confirmation - shown below input in edit mode */}
|
||||
@@ -346,14 +348,18 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
const contexts: any[] = Array.isArray((message as any).contexts)
|
||||
? ((message as any).contexts as any[])
|
||||
: []
|
||||
const labels = contexts
|
||||
.filter((c) => c?.kind !== 'current_workflow')
|
||||
.map((c) => c?.label)
|
||||
.filter(Boolean) as string[]
|
||||
if (!labels.length) return text
|
||||
|
||||
// Build tokens with their prefixes (@ for mentions, / for commands)
|
||||
const tokens = contexts
|
||||
.filter((c) => c?.kind !== 'current_workflow' && c?.label)
|
||||
.map((c) => {
|
||||
const prefix = c?.kind === 'slash_command' ? '/' : '@'
|
||||
return `${prefix}${c.label}`
|
||||
})
|
||||
if (!tokens.length) return text
|
||||
|
||||
const escapeRegex = (s: string) => s.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')
|
||||
const pattern = new RegExp(`@(${labels.map(escapeRegex).join('|')})`, 'g')
|
||||
const pattern = new RegExp(`(${tokens.map(escapeRegex).join('|')})`, 'g')
|
||||
|
||||
const nodes: React.ReactNode[] = []
|
||||
let lastIndex = 0
|
||||
@@ -460,17 +466,29 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
)
|
||||
}
|
||||
|
||||
// Check if there's any visible content in the blocks
|
||||
const hasVisibleContent = useMemo(() => {
|
||||
if (!message.contentBlocks || message.contentBlocks.length === 0) return false
|
||||
return message.contentBlocks.some((block) => {
|
||||
if (block.type === 'text') {
|
||||
const parsed = parseSpecialTags(block.content)
|
||||
return parsed.cleanContent.trim().length > 0
|
||||
}
|
||||
return block.type === 'thinking' || block.type === 'tool_call'
|
||||
})
|
||||
}, [message.contentBlocks])
|
||||
|
||||
if (isAssistant) {
|
||||
return (
|
||||
<div
|
||||
className={`w-full max-w-full overflow-hidden transition-opacity duration-200 [max-width:var(--panel-max-width)] ${isDimmed ? 'opacity-40' : 'opacity-100'}`}
|
||||
className={`w-full max-w-full overflow-hidden [max-width:var(--panel-max-width)] ${isDimmed ? 'opacity-40' : 'opacity-100'}`}
|
||||
style={{ '--panel-max-width': `${panelWidth - 16}px` } as React.CSSProperties}
|
||||
>
|
||||
<div className='max-w-full space-y-1.5 px-[2px] transition-all duration-200 ease-in-out'>
|
||||
<div className='max-w-full space-y-1 px-[2px]'>
|
||||
{/* Content blocks in chronological order */}
|
||||
{memoizedContentBlocks}
|
||||
|
||||
{/* Always show streaming indicator at the end while streaming */}
|
||||
{/* Streaming indicator always at bottom during streaming */}
|
||||
{isStreaming && <StreamingIndicator />}
|
||||
|
||||
{message.errorType === 'usage_limit' && (
|
||||
|
||||
@@ -497,6 +497,11 @@ const ACTION_VERBS = [
|
||||
'Accessed',
|
||||
'Managing',
|
||||
'Managed',
|
||||
'Scraping',
|
||||
'Scraped',
|
||||
'Crawling',
|
||||
'Crawled',
|
||||
'Getting',
|
||||
] as const
|
||||
|
||||
/**
|
||||
@@ -1061,7 +1066,7 @@ function SubAgentContent({
|
||||
<div
|
||||
ref={scrollContainerRef}
|
||||
className={clsx(
|
||||
'overflow-y-auto transition-all duration-300 ease-in-out',
|
||||
'overflow-y-auto transition-all duration-150 ease-out',
|
||||
isExpanded ? 'mt-1.5 max-h-[200px] opacity-100' : 'max-h-0 opacity-0'
|
||||
)}
|
||||
>
|
||||
@@ -1160,7 +1165,7 @@ function SubAgentThinkingContent({
|
||||
* Default behavior is to NOT collapse (stay expanded like edit).
|
||||
* Only these specific subagents collapse into "Planned for Xs >" style headers.
|
||||
*/
|
||||
const COLLAPSIBLE_SUBAGENTS = new Set(['plan', 'debug', 'research', 'info'])
|
||||
const COLLAPSIBLE_SUBAGENTS = new Set(['plan', 'debug', 'research', 'info', 'superagent'])
|
||||
|
||||
/**
|
||||
* SubagentContentRenderer handles the rendering of subagent content.
|
||||
@@ -1321,7 +1326,7 @@ function SubagentContentRenderer({
|
||||
|
||||
<div
|
||||
className={clsx(
|
||||
'overflow-hidden transition-all duration-300 ease-in-out',
|
||||
'overflow-hidden transition-all duration-150 ease-out',
|
||||
isExpanded ? 'mt-1.5 max-h-[5000px] opacity-100' : 'max-h-0 opacity-0'
|
||||
)}
|
||||
>
|
||||
@@ -1968,6 +1973,7 @@ export function ToolCall({ toolCall: toolCallProp, toolCallId, onStateChange }:
|
||||
'tour',
|
||||
'info',
|
||||
'workflow',
|
||||
'superagent',
|
||||
]
|
||||
const isSubagentTool = SUBAGENT_TOOLS.includes(toolCall.name)
|
||||
|
||||
@@ -2595,16 +2601,23 @@ export function ToolCall({ toolCall: toolCallProp, toolCallId, onStateChange }:
|
||||
}
|
||||
}
|
||||
|
||||
// For edit_workflow, hide text display when we have operations (WorkflowEditSummary replaces it)
|
||||
const isEditWorkflow = toolCall.name === 'edit_workflow'
|
||||
const hasOperations = Array.isArray(params.operations) && params.operations.length > 0
|
||||
const hideTextForEditWorkflow = isEditWorkflow && hasOperations
|
||||
|
||||
return (
|
||||
<div className='w-full'>
|
||||
<div className={isToolNameClickable ? 'cursor-pointer' : ''} onClick={handleToolNameClick}>
|
||||
<ShimmerOverlayText
|
||||
text={displayName}
|
||||
active={isLoadingState}
|
||||
isSpecial={isSpecial}
|
||||
className='font-[470] font-season text-[var(--text-secondary)] text-sm dark:text-[var(--text-muted)]'
|
||||
/>
|
||||
</div>
|
||||
{!hideTextForEditWorkflow && (
|
||||
<div className={isToolNameClickable ? 'cursor-pointer' : ''} onClick={handleToolNameClick}>
|
||||
<ShimmerOverlayText
|
||||
text={displayName}
|
||||
active={isLoadingState}
|
||||
isSpecial={isSpecial}
|
||||
className='font-[470] font-season text-[var(--text-secondary)] text-sm dark:text-[var(--text-muted)]'
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
{isExpandableTool && expanded && <div className='mt-1.5'>{renderPendingDetails()}</div>}
|
||||
{showRemoveAutoAllow && isAutoAllowed && (
|
||||
<div className='mt-1.5'>
|
||||
|
||||
@@ -3,3 +3,4 @@ export { ContextPills } from './context-pills/context-pills'
|
||||
export { MentionMenu } from './mention-menu/mention-menu'
|
||||
export { ModeSelector } from './mode-selector/mode-selector'
|
||||
export { ModelSelector } from './model-selector/model-selector'
|
||||
export { SlashMenu } from './slash-menu/slash-menu'
|
||||
|
||||
@@ -0,0 +1,249 @@
|
||||
'use client'
|
||||
|
||||
import { useMemo } from 'react'
|
||||
import {
|
||||
Popover,
|
||||
PopoverAnchor,
|
||||
PopoverBackButton,
|
||||
PopoverContent,
|
||||
PopoverFolder,
|
||||
PopoverItem,
|
||||
PopoverScrollArea,
|
||||
} from '@/components/emcn'
|
||||
import type { useMentionMenu } from '../../hooks/use-mention-menu'
|
||||
|
||||
/**
|
||||
* Top-level slash command options
|
||||
*/
|
||||
const TOP_LEVEL_COMMANDS = [
|
||||
{ id: 'fast', label: 'fast' },
|
||||
{ id: 'plan', label: 'plan' },
|
||||
{ id: 'debug', label: 'debug' },
|
||||
{ id: 'research', label: 'research' },
|
||||
{ id: 'deploy', label: 'deploy' },
|
||||
{ id: 'superagent', label: 'superagent' },
|
||||
] as const
|
||||
|
||||
/**
|
||||
* Web submenu commands
|
||||
*/
|
||||
const WEB_COMMANDS = [
|
||||
{ id: 'search', label: 'search' },
|
||||
{ id: 'read', label: 'read' },
|
||||
{ id: 'scrape', label: 'scrape' },
|
||||
{ id: 'crawl', label: 'crawl' },
|
||||
] as const
|
||||
|
||||
/**
|
||||
* All command labels for filtering
|
||||
*/
|
||||
const ALL_COMMANDS = [...TOP_LEVEL_COMMANDS, ...WEB_COMMANDS]
|
||||
|
||||
interface SlashMenuProps {
|
||||
mentionMenu: ReturnType<typeof useMentionMenu>
|
||||
message: string
|
||||
onSelectCommand: (command: string) => void
|
||||
}
|
||||
|
||||
/**
|
||||
* SlashMenu component for slash command dropdown.
|
||||
* Shows command options when user types '/'.
|
||||
*
|
||||
* @param props - Component props
|
||||
* @returns Rendered slash menu
|
||||
*/
|
||||
export function SlashMenu({ mentionMenu, message, onSelectCommand }: SlashMenuProps) {
|
||||
const {
|
||||
mentionMenuRef,
|
||||
menuListRef,
|
||||
getActiveSlashQueryAtPosition,
|
||||
getCaretPos,
|
||||
submenuActiveIndex,
|
||||
mentionActiveIndex,
|
||||
openSubmenuFor,
|
||||
setOpenSubmenuFor,
|
||||
} = mentionMenu
|
||||
|
||||
/**
|
||||
* Get the current query string after /
|
||||
*/
|
||||
const currentQuery = useMemo(() => {
|
||||
const caretPos = getCaretPos()
|
||||
const active = getActiveSlashQueryAtPosition(caretPos, message)
|
||||
return active?.query.trim().toLowerCase() || ''
|
||||
}, [message, getCaretPos, getActiveSlashQueryAtPosition])
|
||||
|
||||
/**
|
||||
* Filter commands based on query (search across all commands when there's a query)
|
||||
*/
|
||||
const filteredCommands = useMemo(() => {
|
||||
if (!currentQuery) return null // Show folder view when no query
|
||||
return ALL_COMMANDS.filter((cmd) => cmd.label.toLowerCase().includes(currentQuery))
|
||||
}, [currentQuery])
|
||||
|
||||
// Show aggregated view when there's a query
|
||||
const showAggregatedView = currentQuery.length > 0
|
||||
|
||||
// Compute caret viewport position via mirror technique for precise anchoring
|
||||
const textareaEl = mentionMenu.textareaRef.current
|
||||
if (!textareaEl) return null
|
||||
|
||||
const getCaretViewport = (textarea: HTMLTextAreaElement, caretPosition: number, text: string) => {
|
||||
const textareaRect = textarea.getBoundingClientRect()
|
||||
const style = window.getComputedStyle(textarea)
|
||||
|
||||
const mirrorDiv = document.createElement('div')
|
||||
mirrorDiv.style.position = 'absolute'
|
||||
mirrorDiv.style.visibility = 'hidden'
|
||||
mirrorDiv.style.whiteSpace = 'pre-wrap'
|
||||
mirrorDiv.style.wordWrap = 'break-word'
|
||||
mirrorDiv.style.font = style.font
|
||||
mirrorDiv.style.padding = style.padding
|
||||
mirrorDiv.style.border = style.border
|
||||
mirrorDiv.style.width = style.width
|
||||
mirrorDiv.style.lineHeight = style.lineHeight
|
||||
mirrorDiv.style.boxSizing = style.boxSizing
|
||||
mirrorDiv.style.letterSpacing = style.letterSpacing
|
||||
mirrorDiv.style.textTransform = style.textTransform
|
||||
mirrorDiv.style.textIndent = style.textIndent
|
||||
mirrorDiv.style.textAlign = style.textAlign
|
||||
|
||||
mirrorDiv.textContent = text.substring(0, caretPosition)
|
||||
|
||||
const caretMarker = document.createElement('span')
|
||||
caretMarker.style.display = 'inline-block'
|
||||
caretMarker.style.width = '0px'
|
||||
caretMarker.style.padding = '0'
|
||||
caretMarker.style.border = '0'
|
||||
mirrorDiv.appendChild(caretMarker)
|
||||
|
||||
document.body.appendChild(mirrorDiv)
|
||||
const markerRect = caretMarker.getBoundingClientRect()
|
||||
const mirrorRect = mirrorDiv.getBoundingClientRect()
|
||||
document.body.removeChild(mirrorDiv)
|
||||
|
||||
const leftOffset = markerRect.left - mirrorRect.left - textarea.scrollLeft
|
||||
const topOffset = markerRect.top - mirrorRect.top - textarea.scrollTop
|
||||
|
||||
return {
|
||||
left: textareaRect.left + leftOffset,
|
||||
top: textareaRect.top + topOffset,
|
||||
}
|
||||
}
|
||||
|
||||
const caretPos = getCaretPos()
|
||||
const caretViewport = getCaretViewport(textareaEl, caretPos, message)
|
||||
|
||||
// Decide preferred side based on available space
|
||||
const margin = 8
|
||||
const spaceAbove = caretViewport.top - margin
|
||||
const spaceBelow = window.innerHeight - caretViewport.top - margin
|
||||
const side: 'top' | 'bottom' = spaceBelow >= spaceAbove ? 'bottom' : 'top'
|
||||
|
||||
// Check if we're in folder navigation mode (no query, not in submenu)
|
||||
const isInFolderNavigationMode = !openSubmenuFor && !showAggregatedView
|
||||
|
||||
return (
|
||||
<Popover
|
||||
open={true}
|
||||
onOpenChange={() => {
|
||||
/* controlled externally */
|
||||
}}
|
||||
>
|
||||
<PopoverAnchor asChild>
|
||||
<div
|
||||
style={{
|
||||
position: 'fixed',
|
||||
top: `${caretViewport.top}px`,
|
||||
left: `${caretViewport.left}px`,
|
||||
width: '1px',
|
||||
height: '1px',
|
||||
pointerEvents: 'none',
|
||||
}}
|
||||
/>
|
||||
</PopoverAnchor>
|
||||
<PopoverContent
|
||||
ref={mentionMenuRef}
|
||||
side={side}
|
||||
align='start'
|
||||
collisionPadding={6}
|
||||
maxHeight={360}
|
||||
className='pointer-events-auto'
|
||||
style={{
|
||||
width: `180px`,
|
||||
}}
|
||||
onOpenAutoFocus={(e) => e.preventDefault()}
|
||||
onCloseAutoFocus={(e) => e.preventDefault()}
|
||||
>
|
||||
<PopoverBackButton />
|
||||
<PopoverScrollArea ref={menuListRef} className='space-y-[2px]'>
|
||||
{openSubmenuFor === 'Web' ? (
|
||||
// Web submenu view
|
||||
<>
|
||||
{WEB_COMMANDS.map((cmd, index) => (
|
||||
<PopoverItem
|
||||
key={cmd.id}
|
||||
onClick={() => onSelectCommand(cmd.label)}
|
||||
data-idx={index}
|
||||
active={index === submenuActiveIndex}
|
||||
>
|
||||
<span className='truncate capitalize'>{cmd.label}</span>
|
||||
</PopoverItem>
|
||||
))}
|
||||
</>
|
||||
) : showAggregatedView ? (
|
||||
// Aggregated filtered view
|
||||
<>
|
||||
{filteredCommands && filteredCommands.length === 0 ? (
|
||||
<div className='px-[8px] py-[8px] text-[12px] text-[var(--text-muted)]'>
|
||||
No commands found
|
||||
</div>
|
||||
) : (
|
||||
filteredCommands?.map((cmd, index) => (
|
||||
<PopoverItem
|
||||
key={cmd.id}
|
||||
onClick={() => onSelectCommand(cmd.label)}
|
||||
data-idx={index}
|
||||
active={index === submenuActiveIndex}
|
||||
>
|
||||
<span className='truncate capitalize'>{cmd.label}</span>
|
||||
</PopoverItem>
|
||||
))
|
||||
)}
|
||||
</>
|
||||
) : (
|
||||
// Folder navigation view
|
||||
<>
|
||||
{TOP_LEVEL_COMMANDS.map((cmd, index) => (
|
||||
<PopoverItem
|
||||
key={cmd.id}
|
||||
onClick={() => onSelectCommand(cmd.label)}
|
||||
data-idx={index}
|
||||
active={isInFolderNavigationMode && index === mentionActiveIndex}
|
||||
>
|
||||
<span className='truncate capitalize'>{cmd.label}</span>
|
||||
</PopoverItem>
|
||||
))}
|
||||
|
||||
<PopoverFolder
|
||||
id='web'
|
||||
title='Web'
|
||||
onOpen={() => setOpenSubmenuFor('Web')}
|
||||
active={
|
||||
isInFolderNavigationMode && mentionActiveIndex === TOP_LEVEL_COMMANDS.length
|
||||
}
|
||||
data-idx={TOP_LEVEL_COMMANDS.length}
|
||||
>
|
||||
{WEB_COMMANDS.map((cmd) => (
|
||||
<PopoverItem key={cmd.id} onClick={() => onSelectCommand(cmd.label)}>
|
||||
<span className='truncate capitalize'>{cmd.label}</span>
|
||||
</PopoverItem>
|
||||
))}
|
||||
</PopoverFolder>
|
||||
</>
|
||||
)}
|
||||
</PopoverScrollArea>
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
)
|
||||
}
|
||||
@@ -1,9 +1,11 @@
|
||||
import { useCallback, useEffect, useState } from 'react'
|
||||
import { useCallback, useEffect, useRef, useState } from 'react'
|
||||
import type { ChatContext } from '@/stores/panel'
|
||||
|
||||
interface UseContextManagementProps {
|
||||
/** Current message text */
|
||||
message: string
|
||||
/** Initial contexts to populate when editing a message */
|
||||
initialContexts?: ChatContext[]
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -13,8 +15,17 @@ interface UseContextManagementProps {
|
||||
* @param props - Configuration object
|
||||
* @returns Context state and management functions
|
||||
*/
|
||||
export function useContextManagement({ message }: UseContextManagementProps) {
|
||||
const [selectedContexts, setSelectedContexts] = useState<ChatContext[]>([])
|
||||
export function useContextManagement({ message, initialContexts }: UseContextManagementProps) {
|
||||
const [selectedContexts, setSelectedContexts] = useState<ChatContext[]>(initialContexts ?? [])
|
||||
const initializedRef = useRef(false)
|
||||
|
||||
// Initialize with initial contexts when they're first provided (for edit mode)
|
||||
useEffect(() => {
|
||||
if (initialContexts && initialContexts.length > 0 && !initializedRef.current) {
|
||||
setSelectedContexts(initialContexts)
|
||||
initializedRef.current = true
|
||||
}
|
||||
}, [initialContexts])
|
||||
|
||||
/**
|
||||
* Adds a context to the selected contexts list, avoiding duplicates
|
||||
@@ -63,6 +74,9 @@ export function useContextManagement({ message }: UseContextManagementProps) {
|
||||
if (c.kind === 'docs') {
|
||||
return true // Only one docs context allowed
|
||||
}
|
||||
if (c.kind === 'slash_command' && 'command' in context && 'command' in c) {
|
||||
return c.command === (context as any).command
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
@@ -103,6 +117,8 @@ export function useContextManagement({ message }: UseContextManagementProps) {
|
||||
return (c as any).executionId !== (contextToRemove as any).executionId
|
||||
case 'docs':
|
||||
return false // Remove docs (only one docs context)
|
||||
case 'slash_command':
|
||||
return (c as any).command !== (contextToRemove as any).command
|
||||
default:
|
||||
return c.label !== contextToRemove.label
|
||||
}
|
||||
@@ -118,7 +134,7 @@ export function useContextManagement({ message }: UseContextManagementProps) {
|
||||
}, [])
|
||||
|
||||
/**
|
||||
* Synchronizes selected contexts with inline @label tokens in the message.
|
||||
* Synchronizes selected contexts with inline @label or /label tokens in the message.
|
||||
* Removes contexts whose labels are no longer present in the message.
|
||||
*/
|
||||
useEffect(() => {
|
||||
@@ -130,17 +146,16 @@ export function useContextManagement({ message }: UseContextManagementProps) {
|
||||
setSelectedContexts((prev) => {
|
||||
if (prev.length === 0) return prev
|
||||
|
||||
const presentLabels = new Set<string>()
|
||||
const labels = prev.map((c) => c.label).filter(Boolean)
|
||||
|
||||
for (const label of labels) {
|
||||
const token = ` @${label} `
|
||||
if (message.includes(token)) {
|
||||
presentLabels.add(label)
|
||||
}
|
||||
}
|
||||
|
||||
const filtered = prev.filter((c) => !!c.label && presentLabels.has(c.label))
|
||||
const filtered = prev.filter((c) => {
|
||||
if (!c.label) return false
|
||||
// Check for slash command tokens or mention tokens based on kind
|
||||
const isSlashCommand = c.kind === 'slash_command'
|
||||
const prefix = isSlashCommand ? '/' : '@'
|
||||
const tokenWithSpaces = ` ${prefix}${c.label} `
|
||||
const tokenAtStart = `${prefix}${c.label} `
|
||||
// Token can appear with leading space OR at the start of the message
|
||||
return message.includes(tokenWithSpaces) || message.startsWith(tokenAtStart)
|
||||
})
|
||||
return filtered.length === prev.length ? prev : filtered
|
||||
})
|
||||
}, [message])
|
||||
|
||||
@@ -70,11 +70,25 @@ export function useMentionMenu({
|
||||
// Ensure '@' starts a token (start or whitespace before)
|
||||
if (atIndex > 0 && !/\s/.test(before.charAt(atIndex - 1))) return null
|
||||
|
||||
// Check if this '@' is part of a completed mention token ( @label )
|
||||
// Check if this '@' is part of a completed mention token
|
||||
if (selectedContexts.length > 0) {
|
||||
const labels = selectedContexts.map((c) => c.label).filter(Boolean) as string[]
|
||||
for (const label of labels) {
|
||||
// Space-wrapped token: " @label "
|
||||
// Only check non-slash_command contexts for mentions
|
||||
const mentionLabels = selectedContexts
|
||||
.filter((c) => c.kind !== 'slash_command')
|
||||
.map((c) => c.label)
|
||||
.filter(Boolean) as string[]
|
||||
|
||||
for (const label of mentionLabels) {
|
||||
// Check for token at start of text: "@label "
|
||||
if (atIndex === 0) {
|
||||
const startToken = `@${label} `
|
||||
if (text.startsWith(startToken)) {
|
||||
// This @ is part of a completed token
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
// Check for space-wrapped token: " @label "
|
||||
const token = ` @${label} `
|
||||
let fromIndex = 0
|
||||
while (fromIndex <= text.length) {
|
||||
@@ -88,7 +102,6 @@ export function useMentionMenu({
|
||||
// Check if the @ we found is the @ of this completed token
|
||||
if (atIndex === atPositionInToken) {
|
||||
// The @ we found is part of a completed mention
|
||||
// Don't show menu - user is typing after the completed mention
|
||||
return null
|
||||
}
|
||||
|
||||
@@ -113,6 +126,76 @@ export function useMentionMenu({
|
||||
[message, selectedContexts]
|
||||
)
|
||||
|
||||
/**
|
||||
* Finds active slash command query at the given position
|
||||
*
|
||||
* @param pos - Position in the text to check
|
||||
* @param textOverride - Optional text override (for checking during input)
|
||||
* @returns Active slash query object or null if no active slash command
|
||||
*/
|
||||
const getActiveSlashQueryAtPosition = useCallback(
|
||||
(pos: number, textOverride?: string) => {
|
||||
const text = textOverride ?? message
|
||||
const before = text.slice(0, pos)
|
||||
const slashIndex = before.lastIndexOf('/')
|
||||
if (slashIndex === -1) return null
|
||||
|
||||
// Ensure '/' starts a token (start or whitespace before)
|
||||
if (slashIndex > 0 && !/\s/.test(before.charAt(slashIndex - 1))) return null
|
||||
|
||||
// Check if this '/' is part of a completed slash token
|
||||
if (selectedContexts.length > 0) {
|
||||
// Only check slash_command contexts
|
||||
const slashLabels = selectedContexts
|
||||
.filter((c) => c.kind === 'slash_command')
|
||||
.map((c) => c.label)
|
||||
.filter(Boolean) as string[]
|
||||
|
||||
for (const label of slashLabels) {
|
||||
// Check for token at start of text: "/label "
|
||||
if (slashIndex === 0) {
|
||||
const startToken = `/${label} `
|
||||
if (text.startsWith(startToken)) {
|
||||
// This slash is part of a completed token
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
// Check for space-wrapped token: " /label "
|
||||
const token = ` /${label} `
|
||||
let fromIndex = 0
|
||||
while (fromIndex <= text.length) {
|
||||
const idx = text.indexOf(token, fromIndex)
|
||||
if (idx === -1) break
|
||||
|
||||
const tokenStart = idx
|
||||
const tokenEnd = idx + token.length
|
||||
const slashPositionInToken = idx + 1 // position of / in " /label "
|
||||
|
||||
if (slashIndex === slashPositionInToken) {
|
||||
return null
|
||||
}
|
||||
|
||||
if (pos > tokenStart && pos < tokenEnd) {
|
||||
return null
|
||||
}
|
||||
|
||||
fromIndex = tokenEnd
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const segment = before.slice(slashIndex + 1)
|
||||
// Close the popup if user types space immediately after /
|
||||
if (segment.length > 0 && /^\s/.test(segment)) {
|
||||
return null
|
||||
}
|
||||
|
||||
return { query: segment, start: slashIndex, end: pos }
|
||||
},
|
||||
[message, selectedContexts]
|
||||
)
|
||||
|
||||
/**
|
||||
* Gets the submenu query text
|
||||
*
|
||||
@@ -200,9 +283,10 @@ export function useMentionMenu({
|
||||
const before = message.slice(0, active.start)
|
||||
const after = message.slice(active.end)
|
||||
|
||||
// Always include leading space, avoid duplicate if one exists
|
||||
const needsLeadingSpace = !before.endsWith(' ')
|
||||
const insertion = `${needsLeadingSpace ? ' ' : ''}@${label} `
|
||||
// Add leading space only if not at start and previous char isn't whitespace
|
||||
const needsLeadingSpace = before.length > 0 && !before.endsWith(' ')
|
||||
// Always add trailing space for easy continued typing
|
||||
const insertion = `${needsLeadingSpace ? ' ' : ''}@${label} `
|
||||
|
||||
const next = `${before}${insertion}${after}`
|
||||
onMessageChange(next)
|
||||
@@ -217,6 +301,41 @@ export function useMentionMenu({
|
||||
[message, getActiveMentionQueryAtPosition, onMessageChange]
|
||||
)
|
||||
|
||||
/**
|
||||
* Replaces active slash command with a label
|
||||
*
|
||||
* @param label - Label to replace the slash command with
|
||||
* @returns True if replacement was successful, false if no active slash command found
|
||||
*/
|
||||
const replaceActiveSlashWith = useCallback(
|
||||
(label: string) => {
|
||||
const textarea = textareaRef.current
|
||||
if (!textarea) return false
|
||||
const pos = textarea.selectionStart ?? message.length
|
||||
const active = getActiveSlashQueryAtPosition(pos)
|
||||
if (!active) return false
|
||||
|
||||
const before = message.slice(0, active.start)
|
||||
const after = message.slice(active.end)
|
||||
|
||||
// Add leading space only if not at start and previous char isn't whitespace
|
||||
const needsLeadingSpace = before.length > 0 && !before.endsWith(' ')
|
||||
// Always add trailing space for easy continued typing
|
||||
const insertion = `${needsLeadingSpace ? ' ' : ''}/${label} `
|
||||
|
||||
const next = `${before}${insertion}${after}`
|
||||
onMessageChange(next)
|
||||
|
||||
setTimeout(() => {
|
||||
const cursorPos = before.length + insertion.length
|
||||
textarea.setSelectionRange(cursorPos, cursorPos)
|
||||
textarea.focus()
|
||||
}, 0)
|
||||
return true
|
||||
},
|
||||
[message, getActiveSlashQueryAtPosition, onMessageChange]
|
||||
)
|
||||
|
||||
/**
|
||||
* Scrolls active item into view in the menu
|
||||
*
|
||||
@@ -304,10 +423,12 @@ export function useMentionMenu({
|
||||
// Operations
|
||||
getCaretPos,
|
||||
getActiveMentionQueryAtPosition,
|
||||
getActiveSlashQueryAtPosition,
|
||||
getSubmenuQuery,
|
||||
resetActiveMentionQuery,
|
||||
insertAtCursor,
|
||||
replaceActiveMentionWith,
|
||||
replaceActiveSlashWith,
|
||||
scrollActiveItemIntoView,
|
||||
closeMentionMenu,
|
||||
}
|
||||
|
||||
@@ -39,7 +39,7 @@ export function useMentionTokens({
|
||||
setSelectedContexts,
|
||||
}: UseMentionTokensProps) {
|
||||
/**
|
||||
* Computes all mention ranges in the message
|
||||
* Computes all mention ranges in the message (both @mentions and /commands)
|
||||
*
|
||||
* @returns Array of mention ranges sorted by start position
|
||||
*/
|
||||
@@ -55,8 +55,19 @@ export function useMentionTokens({
|
||||
const uniqueLabels = Array.from(new Set(labels))
|
||||
|
||||
for (const label of uniqueLabels) {
|
||||
// Space-wrapped token: " @label " (search from start)
|
||||
const token = ` @${label} `
|
||||
// Find matching context to determine if it's a slash command
|
||||
const matchingContext = selectedContexts.find((c) => c.label === label)
|
||||
const isSlashCommand = matchingContext?.kind === 'slash_command'
|
||||
const prefix = isSlashCommand ? '/' : '@'
|
||||
|
||||
// Check for token at the very start of the message (no leading space)
|
||||
const tokenAtStart = `${prefix}${label} `
|
||||
if (message.startsWith(tokenAtStart)) {
|
||||
ranges.push({ start: 0, end: tokenAtStart.length, label })
|
||||
}
|
||||
|
||||
// Space-wrapped token: " @label " or " /label " (search from start)
|
||||
const token = ` ${prefix}${label} `
|
||||
let fromIndex = 0
|
||||
while (fromIndex <= message.length) {
|
||||
const idx = message.indexOf(token, fromIndex)
|
||||
|
||||
@@ -21,6 +21,7 @@ import {
|
||||
MentionMenu,
|
||||
ModelSelector,
|
||||
ModeSelector,
|
||||
SlashMenu,
|
||||
} from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/user-input/components'
|
||||
import { NEAR_TOP_THRESHOLD } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/user-input/constants'
|
||||
import {
|
||||
@@ -67,6 +68,8 @@ interface UserInputProps {
|
||||
hideModeSelector?: boolean
|
||||
/** Disable @mention functionality */
|
||||
disableMentions?: boolean
|
||||
/** Initial contexts for editing a message with existing context mentions */
|
||||
initialContexts?: ChatContext[]
|
||||
}
|
||||
|
||||
interface UserInputRef {
|
||||
@@ -103,6 +106,7 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
onModelChangeOverride,
|
||||
hideModeSelector = false,
|
||||
disableMentions = false,
|
||||
initialContexts,
|
||||
},
|
||||
ref
|
||||
) => {
|
||||
@@ -123,6 +127,7 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
const [isNearTop, setIsNearTop] = useState(false)
|
||||
const [containerRef, setContainerRef] = useState<HTMLDivElement | null>(null)
|
||||
const [inputContainerRef, setInputContainerRef] = useState<HTMLDivElement | null>(null)
|
||||
const [showSlashMenu, setShowSlashMenu] = useState(false)
|
||||
|
||||
// Controlled vs uncontrolled message state
|
||||
const message = controlledValue !== undefined ? controlledValue : internalMessage
|
||||
@@ -140,7 +145,7 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
|
||||
// Custom hooks - order matters for ref sharing
|
||||
// Context management (manages selectedContexts state)
|
||||
const contextManagement = useContextManagement({ message })
|
||||
const contextManagement = useContextManagement({ message, initialContexts })
|
||||
|
||||
// Mention menu
|
||||
const mentionMenu = useMentionMenu({
|
||||
@@ -370,20 +375,131 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
}
|
||||
}, [onAbort, isLoading])
|
||||
|
||||
const handleSlashCommandSelect = useCallback(
|
||||
(command: string) => {
|
||||
// Capitalize the command for display
|
||||
const capitalizedCommand = command.charAt(0).toUpperCase() + command.slice(1)
|
||||
|
||||
// Replace the active slash query with the capitalized command
|
||||
mentionMenu.replaceActiveSlashWith(capitalizedCommand)
|
||||
|
||||
// Add as a context so it gets highlighted
|
||||
contextManagement.addContext({
|
||||
kind: 'slash_command',
|
||||
command,
|
||||
label: capitalizedCommand,
|
||||
})
|
||||
|
||||
setShowSlashMenu(false)
|
||||
mentionMenu.textareaRef.current?.focus()
|
||||
},
|
||||
[mentionMenu, contextManagement]
|
||||
)
|
||||
|
||||
const handleKeyDown = useCallback(
|
||||
(e: KeyboardEvent<HTMLTextAreaElement>) => {
|
||||
// Escape key handling
|
||||
if (e.key === 'Escape' && mentionMenu.showMentionMenu) {
|
||||
if (e.key === 'Escape' && (mentionMenu.showMentionMenu || showSlashMenu)) {
|
||||
e.preventDefault()
|
||||
if (mentionMenu.openSubmenuFor) {
|
||||
mentionMenu.setOpenSubmenuFor(null)
|
||||
mentionMenu.setSubmenuQueryStart(null)
|
||||
} else {
|
||||
mentionMenu.closeMentionMenu()
|
||||
setShowSlashMenu(false)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Arrow navigation in slash menu
|
||||
if (showSlashMenu) {
|
||||
const TOP_LEVEL_COMMANDS = ['fast', 'plan', 'debug', 'research', 'deploy', 'superagent']
|
||||
const WEB_COMMANDS = ['search', 'read', 'scrape', 'crawl']
|
||||
const ALL_COMMANDS = [...TOP_LEVEL_COMMANDS, ...WEB_COMMANDS]
|
||||
|
||||
const caretPos = mentionMenu.getCaretPos()
|
||||
const activeSlash = mentionMenu.getActiveSlashQueryAtPosition(caretPos, message)
|
||||
const query = activeSlash?.query.trim().toLowerCase() || ''
|
||||
const showAggregatedView = query.length > 0
|
||||
|
||||
if (e.key === 'ArrowDown' || e.key === 'ArrowUp') {
|
||||
e.preventDefault()
|
||||
|
||||
if (mentionMenu.openSubmenuFor === 'Web') {
|
||||
// Navigate in Web submenu
|
||||
const last = WEB_COMMANDS.length - 1
|
||||
mentionMenu.setSubmenuActiveIndex((prev) => {
|
||||
const next =
|
||||
e.key === 'ArrowDown'
|
||||
? prev >= last
|
||||
? 0
|
||||
: prev + 1
|
||||
: prev <= 0
|
||||
? last
|
||||
: prev - 1
|
||||
requestAnimationFrame(() => mentionMenu.scrollActiveItemIntoView(next))
|
||||
return next
|
||||
})
|
||||
} else if (showAggregatedView) {
|
||||
// Navigate in filtered view
|
||||
const filtered = ALL_COMMANDS.filter((cmd) => cmd.includes(query))
|
||||
const last = Math.max(0, filtered.length - 1)
|
||||
mentionMenu.setSubmenuActiveIndex((prev) => {
|
||||
if (filtered.length === 0) return 0
|
||||
const next =
|
||||
e.key === 'ArrowDown'
|
||||
? prev >= last
|
||||
? 0
|
||||
: prev + 1
|
||||
: prev <= 0
|
||||
? last
|
||||
: prev - 1
|
||||
requestAnimationFrame(() => mentionMenu.scrollActiveItemIntoView(next))
|
||||
return next
|
||||
})
|
||||
} else {
|
||||
// Navigate in folder view (top-level + Web folder)
|
||||
const totalItems = TOP_LEVEL_COMMANDS.length + 1 // +1 for Web folder
|
||||
const last = totalItems - 1
|
||||
mentionMenu.setMentionActiveIndex((prev) => {
|
||||
const next =
|
||||
e.key === 'ArrowDown'
|
||||
? prev >= last
|
||||
? 0
|
||||
: prev + 1
|
||||
: prev <= 0
|
||||
? last
|
||||
: prev - 1
|
||||
requestAnimationFrame(() => mentionMenu.scrollActiveItemIntoView(next))
|
||||
return next
|
||||
})
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Arrow right to enter Web submenu
|
||||
if (e.key === 'ArrowRight') {
|
||||
e.preventDefault()
|
||||
if (!showAggregatedView && !mentionMenu.openSubmenuFor) {
|
||||
// Check if Web folder is selected (it's after all top-level commands)
|
||||
if (mentionMenu.mentionActiveIndex === TOP_LEVEL_COMMANDS.length) {
|
||||
mentionMenu.setOpenSubmenuFor('Web')
|
||||
mentionMenu.setSubmenuActiveIndex(0)
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Arrow left to exit submenu
|
||||
if (e.key === 'ArrowLeft') {
|
||||
e.preventDefault()
|
||||
if (mentionMenu.openSubmenuFor) {
|
||||
mentionMenu.setOpenSubmenuFor(null)
|
||||
}
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// Arrow navigation in mention menu
|
||||
if (mentionKeyboard.handleArrowNavigation(e)) return
|
||||
if (mentionKeyboard.handleArrowRight(e)) return
|
||||
@@ -392,6 +508,42 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
// Enter key handling
|
||||
if (e.key === 'Enter' && !e.shiftKey && !e.nativeEvent.isComposing) {
|
||||
e.preventDefault()
|
||||
if (showSlashMenu) {
|
||||
const TOP_LEVEL_COMMANDS = ['plan', 'debug', 'fast', 'superagent', 'deploy', 'research']
|
||||
const WEB_COMMANDS = ['search', 'read', 'scrape', 'crawl']
|
||||
const ALL_COMMANDS = [...TOP_LEVEL_COMMANDS, ...WEB_COMMANDS]
|
||||
|
||||
const caretPos = mentionMenu.getCaretPos()
|
||||
const activeSlash = mentionMenu.getActiveSlashQueryAtPosition(caretPos, message)
|
||||
const query = activeSlash?.query.trim().toLowerCase() || ''
|
||||
const showAggregatedView = query.length > 0
|
||||
|
||||
if (mentionMenu.openSubmenuFor === 'Web') {
|
||||
// Select from Web submenu
|
||||
const selectedCommand =
|
||||
WEB_COMMANDS[mentionMenu.submenuActiveIndex] || WEB_COMMANDS[0]
|
||||
handleSlashCommandSelect(selectedCommand)
|
||||
} else if (showAggregatedView) {
|
||||
// Select from filtered view
|
||||
const filtered = ALL_COMMANDS.filter((cmd) => cmd.includes(query))
|
||||
if (filtered.length > 0) {
|
||||
const selectedCommand = filtered[mentionMenu.submenuActiveIndex] || filtered[0]
|
||||
handleSlashCommandSelect(selectedCommand)
|
||||
}
|
||||
} else {
|
||||
// Folder navigation view
|
||||
const selectedIndex = mentionMenu.mentionActiveIndex
|
||||
if (selectedIndex < TOP_LEVEL_COMMANDS.length) {
|
||||
// Top-level command selected
|
||||
handleSlashCommandSelect(TOP_LEVEL_COMMANDS[selectedIndex])
|
||||
} else if (selectedIndex === TOP_LEVEL_COMMANDS.length) {
|
||||
// Web folder selected - open it
|
||||
mentionMenu.setOpenSubmenuFor('Web')
|
||||
mentionMenu.setSubmenuActiveIndex(0)
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
if (!mentionMenu.showMentionMenu) {
|
||||
handleSubmit()
|
||||
} else {
|
||||
@@ -469,7 +621,15 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
}
|
||||
}
|
||||
},
|
||||
[mentionMenu, mentionKeyboard, handleSubmit, message.length, mentionTokensWithContext]
|
||||
[
|
||||
mentionMenu,
|
||||
mentionKeyboard,
|
||||
handleSubmit,
|
||||
handleSlashCommandSelect,
|
||||
message,
|
||||
mentionTokensWithContext,
|
||||
showSlashMenu,
|
||||
]
|
||||
)
|
||||
|
||||
const handleInputChange = useCallback(
|
||||
@@ -481,9 +641,14 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
if (disableMentions) return
|
||||
|
||||
const caret = e.target.selectionStart ?? newValue.length
|
||||
const active = mentionMenu.getActiveMentionQueryAtPosition(caret, newValue)
|
||||
|
||||
if (active) {
|
||||
// Check for @ mention trigger
|
||||
const activeMention = mentionMenu.getActiveMentionQueryAtPosition(caret, newValue)
|
||||
// Check for / slash command trigger
|
||||
const activeSlash = mentionMenu.getActiveSlashQueryAtPosition(caret, newValue)
|
||||
|
||||
if (activeMention) {
|
||||
setShowSlashMenu(false)
|
||||
mentionMenu.setShowMentionMenu(true)
|
||||
mentionMenu.setInAggregated(false)
|
||||
if (mentionMenu.openSubmenuFor) {
|
||||
@@ -492,10 +657,17 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
mentionMenu.setMentionActiveIndex(0)
|
||||
mentionMenu.setSubmenuActiveIndex(0)
|
||||
}
|
||||
} else if (activeSlash) {
|
||||
mentionMenu.setShowMentionMenu(false)
|
||||
mentionMenu.setOpenSubmenuFor(null)
|
||||
mentionMenu.setSubmenuQueryStart(null)
|
||||
setShowSlashMenu(true)
|
||||
mentionMenu.setSubmenuActiveIndex(0)
|
||||
} else {
|
||||
mentionMenu.setShowMentionMenu(false)
|
||||
mentionMenu.setOpenSubmenuFor(null)
|
||||
mentionMenu.setSubmenuQueryStart(null)
|
||||
setShowSlashMenu(false)
|
||||
}
|
||||
},
|
||||
[setMessage, mentionMenu, disableMentions]
|
||||
@@ -542,6 +714,32 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
mentionMenu.setSubmenuActiveIndex(0)
|
||||
}, [disabled, isLoading, mentionMenu, message, setMessage])
|
||||
|
||||
const handleOpenSlashMenu = useCallback(() => {
|
||||
if (disabled || isLoading) return
|
||||
const textarea = mentionMenu.textareaRef.current
|
||||
if (!textarea) return
|
||||
textarea.focus()
|
||||
const pos = textarea.selectionStart ?? message.length
|
||||
const needsSpaceBefore = pos > 0 && !/\s/.test(message.charAt(pos - 1))
|
||||
|
||||
const insertText = needsSpaceBefore ? ' /' : '/'
|
||||
const start = textarea.selectionStart ?? message.length
|
||||
const end = textarea.selectionEnd ?? message.length
|
||||
const before = message.slice(0, start)
|
||||
const after = message.slice(end)
|
||||
const next = `${before}${insertText}${after}`
|
||||
setMessage(next)
|
||||
|
||||
setTimeout(() => {
|
||||
const newPos = before.length + insertText.length
|
||||
textarea.setSelectionRange(newPos, newPos)
|
||||
textarea.focus()
|
||||
}, 0)
|
||||
|
||||
setShowSlashMenu(true)
|
||||
mentionMenu.setSubmenuActiveIndex(0)
|
||||
}, [disabled, isLoading, mentionMenu, message, setMessage])
|
||||
|
||||
const canSubmit = message.trim().length > 0 && !disabled && !isLoading
|
||||
const showAbortButton = isLoading && onAbort
|
||||
|
||||
@@ -643,6 +841,20 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
<AtSign className='h-3 w-3' strokeWidth={1.75} />
|
||||
</Badge>
|
||||
|
||||
<Badge
|
||||
variant='outline'
|
||||
onClick={handleOpenSlashMenu}
|
||||
title='Insert /'
|
||||
className={cn(
|
||||
'cursor-pointer rounded-[6px] p-[4.5px]',
|
||||
(disabled || isLoading) && 'cursor-not-allowed'
|
||||
)}
|
||||
>
|
||||
<span className='flex h-3 w-3 items-center justify-center font-medium text-[11px] leading-none'>
|
||||
/
|
||||
</span>
|
||||
</Badge>
|
||||
|
||||
{/* Selected Context Pills */}
|
||||
<ContextPills
|
||||
contexts={contextManagement.selectedContexts}
|
||||
@@ -717,6 +929,18 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
/>,
|
||||
document.body
|
||||
)}
|
||||
|
||||
{/* Slash Menu Portal */}
|
||||
{!disableMentions &&
|
||||
showSlashMenu &&
|
||||
createPortal(
|
||||
<SlashMenu
|
||||
mentionMenu={mentionMenu}
|
||||
message={message}
|
||||
onSelectCommand={handleSlashCommandSelect}
|
||||
/>,
|
||||
document.body
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Bottom Row: Mode Selector + Model Selector + Attach Button + Send Button */}
|
||||
|
||||
@@ -16,7 +16,7 @@ import {
|
||||
Switch,
|
||||
Tooltip,
|
||||
} from '@/components/emcn'
|
||||
import { McpIcon } from '@/components/icons'
|
||||
import { McpIcon, WorkflowIcon } from '@/components/icons'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import {
|
||||
getIssueBadgeLabel,
|
||||
@@ -30,6 +30,7 @@ import {
|
||||
type OAuthProvider,
|
||||
type OAuthService,
|
||||
} from '@/lib/oauth'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
import {
|
||||
CheckboxList,
|
||||
Code,
|
||||
@@ -769,9 +770,10 @@ function WorkflowToolDeployBadge({
|
||||
}) {
|
||||
const { isDeployed, needsRedeploy, isLoading, refetch } = useChildDeployment(workflowId)
|
||||
const [isDeploying, setIsDeploying] = useState(false)
|
||||
const userPermissions = useUserPermissionsContext()
|
||||
|
||||
const deployWorkflow = useCallback(async () => {
|
||||
if (isDeploying || !workflowId) return
|
||||
if (isDeploying || !workflowId || !userPermissions.canAdmin) return
|
||||
|
||||
try {
|
||||
setIsDeploying(true)
|
||||
@@ -796,7 +798,7 @@ function WorkflowToolDeployBadge({
|
||||
} finally {
|
||||
setIsDeploying(false)
|
||||
}
|
||||
}, [isDeploying, workflowId, refetch, onDeploySuccess])
|
||||
}, [isDeploying, workflowId, refetch, onDeploySuccess, userPermissions.canAdmin])
|
||||
|
||||
if (isLoading || (isDeployed && !needsRedeploy)) {
|
||||
return null
|
||||
@@ -811,13 +813,13 @@ function WorkflowToolDeployBadge({
|
||||
<Tooltip.Trigger asChild>
|
||||
<Badge
|
||||
variant={!isDeployed ? 'red' : 'amber'}
|
||||
className='cursor-pointer'
|
||||
className={userPermissions.canAdmin ? 'cursor-pointer' : 'cursor-not-allowed'}
|
||||
size='sm'
|
||||
dot
|
||||
onClick={(e: React.MouseEvent) => {
|
||||
e.stopPropagation()
|
||||
e.preventDefault()
|
||||
if (!isDeploying) {
|
||||
if (!isDeploying && userPermissions.canAdmin) {
|
||||
deployWorkflow()
|
||||
}
|
||||
}}
|
||||
@@ -826,7 +828,13 @@ function WorkflowToolDeployBadge({
|
||||
</Badge>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content>
|
||||
<span className='text-sm'>{!isDeployed ? 'Click to deploy' : 'Click to redeploy'}</span>
|
||||
<span className='text-sm'>
|
||||
{!userPermissions.canAdmin
|
||||
? 'Admin permission required to deploy'
|
||||
: !isDeployed
|
||||
? 'Click to deploy'
|
||||
: 'Click to redeploy'}
|
||||
</span>
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
)
|
||||
@@ -933,6 +941,13 @@ export function ToolInput({
|
||||
const forceRefreshMcpTools = useForceRefreshMcpTools()
|
||||
const openSettingsModal = useSettingsModalStore((state) => state.openModal)
|
||||
const mcpDataLoading = mcpLoading || mcpServersLoading
|
||||
|
||||
// Fetch workflows for the Workflows section in the dropdown
|
||||
const { data: workflowsList = [] } = useWorkflows(workspaceId, { syncRegistry: false })
|
||||
const availableWorkflows = useMemo(
|
||||
() => workflowsList.filter((w) => w.id !== workflowId),
|
||||
[workflowsList, workflowId]
|
||||
)
|
||||
const hasRefreshedRef = useRef(false)
|
||||
|
||||
const hasMcpTools = selectedTools.some((tool) => tool.type === 'mcp')
|
||||
@@ -1735,6 +1750,36 @@ export function ToolInput({
|
||||
})
|
||||
}
|
||||
|
||||
// Workflows section - shows available workflows that can be executed as tools
|
||||
if (availableWorkflows.length > 0) {
|
||||
groups.push({
|
||||
section: 'Workflows',
|
||||
items: availableWorkflows.map((workflow) => ({
|
||||
label: workflow.name,
|
||||
value: `workflow-${workflow.id}`,
|
||||
iconElement: createToolIcon('#6366F1', WorkflowIcon),
|
||||
onSelect: () => {
|
||||
const newTool: StoredTool = {
|
||||
type: 'workflow',
|
||||
title: 'Workflow',
|
||||
toolId: 'workflow_executor',
|
||||
params: {
|
||||
workflowId: workflow.id,
|
||||
},
|
||||
isExpanded: true,
|
||||
usageControl: 'auto',
|
||||
}
|
||||
setStoreValue([
|
||||
...selectedTools.map((tool) => ({ ...tool, isExpanded: false })),
|
||||
newTool,
|
||||
])
|
||||
setOpen(false)
|
||||
},
|
||||
disabled: isPreview || disabled,
|
||||
})),
|
||||
})
|
||||
}
|
||||
|
||||
return groups
|
||||
}, [
|
||||
customTools,
|
||||
@@ -1749,6 +1794,7 @@ export function ToolInput({
|
||||
handleSelectTool,
|
||||
permissionConfig.disableCustomTools,
|
||||
permissionConfig.disableMcpTools,
|
||||
availableWorkflows,
|
||||
])
|
||||
|
||||
const toolRequiresOAuth = (toolId: string): boolean => {
|
||||
|
||||
@@ -108,7 +108,7 @@ export function Panel() {
|
||||
// Delete workflow hook
|
||||
const { isDeleting, handleDeleteWorkflow } = useDeleteWorkflow({
|
||||
workspaceId,
|
||||
getWorkflowIds: () => activeWorkflowId || '',
|
||||
workflowIds: activeWorkflowId || '',
|
||||
isActive: true,
|
||||
onSuccess: () => setIsDeleteModalOpen(false),
|
||||
})
|
||||
|
||||
@@ -1021,11 +1021,11 @@ export const WorkflowBlock = memo(function WorkflowBlock({
|
||||
<Tooltip.Trigger asChild>
|
||||
<Badge
|
||||
variant={!childIsDeployed ? 'red' : 'amber'}
|
||||
className='cursor-pointer'
|
||||
className={userPermissions.canAdmin ? 'cursor-pointer' : 'cursor-not-allowed'}
|
||||
dot
|
||||
onClick={(e) => {
|
||||
e.stopPropagation()
|
||||
if (childWorkflowId && !isDeploying) {
|
||||
if (childWorkflowId && !isDeploying && userPermissions.canAdmin) {
|
||||
deployWorkflow(childWorkflowId)
|
||||
}
|
||||
}}
|
||||
@@ -1035,7 +1035,11 @@ export const WorkflowBlock = memo(function WorkflowBlock({
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content>
|
||||
<span className='text-sm'>
|
||||
{!childIsDeployed ? 'Click to deploy' : 'Click to redeploy'}
|
||||
{!userPermissions.canAdmin
|
||||
? 'Admin permission required to deploy'
|
||||
: !childIsDeployed
|
||||
? 'Click to deploy'
|
||||
: 'Click to redeploy'}
|
||||
</span>
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
|
||||
@@ -50,7 +50,7 @@ export function getBlockRingStyles(options: BlockRingOptions): {
|
||||
!isPending &&
|
||||
!isDeletedBlock &&
|
||||
diffStatus === 'new' &&
|
||||
'ring-[var(--brand-tertiary)]',
|
||||
'ring-[var(--brand-tertiary-2)]',
|
||||
!isActive &&
|
||||
!isPending &&
|
||||
!isDeletedBlock &&
|
||||
|
||||
@@ -347,11 +347,11 @@ export function ContextMenu({
|
||||
title={name}
|
||||
onClick={(e) => {
|
||||
e.stopPropagation()
|
||||
onColorChange(color)
|
||||
setHexInput(color)
|
||||
}}
|
||||
className={cn(
|
||||
'h-[20px] w-[20px] rounded-[4px]',
|
||||
currentColor?.toLowerCase() === color.toLowerCase() && 'ring-1 ring-white'
|
||||
hexInput.toLowerCase() === color.toLowerCase() && 'ring-1 ring-white'
|
||||
)}
|
||||
style={{ backgroundColor: color }}
|
||||
/>
|
||||
@@ -373,7 +373,7 @@ export function ContextMenu({
|
||||
onKeyDown={handleHexKeyDown}
|
||||
onFocus={handleHexFocus}
|
||||
onClick={(e) => e.stopPropagation()}
|
||||
className='h-[20px] min-w-0 flex-1 rounded-[4px] bg-[#363636] px-[6px] text-[11px] text-white uppercase focus:outline-none'
|
||||
className='h-[20px] min-w-0 flex-1 rounded-[4px] bg-[#363636] px-[6px] text-[11px] text-white uppercase caret-white focus:outline-none'
|
||||
/>
|
||||
<button
|
||||
type='button'
|
||||
|
||||
@@ -20,6 +20,7 @@ import {
|
||||
useCanDelete,
|
||||
useDeleteFolder,
|
||||
useDuplicateFolder,
|
||||
useExportFolder,
|
||||
} from '@/app/workspace/[workspaceId]/w/hooks'
|
||||
import { useCreateFolder, useUpdateFolder } from '@/hooks/queries/folders'
|
||||
import { useCreateWorkflow } from '@/hooks/queries/workflows'
|
||||
@@ -57,23 +58,24 @@ export function FolderItem({ folder, level, hoverHandlers }: FolderItemProps) {
|
||||
const { canDeleteFolder } = useCanDelete({ workspaceId })
|
||||
const canDelete = useMemo(() => canDeleteFolder(folder.id), [canDeleteFolder, folder.id])
|
||||
|
||||
// Delete modal state
|
||||
const [isDeleteModalOpen, setIsDeleteModalOpen] = useState(false)
|
||||
|
||||
// Delete folder hook
|
||||
const { isDeleting, handleDeleteFolder } = useDeleteFolder({
|
||||
workspaceId,
|
||||
getFolderIds: () => folder.id,
|
||||
folderIds: folder.id,
|
||||
onSuccess: () => setIsDeleteModalOpen(false),
|
||||
})
|
||||
|
||||
// Duplicate folder hook
|
||||
const { handleDuplicateFolder } = useDuplicateFolder({
|
||||
workspaceId,
|
||||
getFolderIds: () => folder.id,
|
||||
folderIds: folder.id,
|
||||
})
|
||||
|
||||
const { isExporting, hasWorkflows, handleExportFolder } = useExportFolder({
|
||||
workspaceId,
|
||||
folderId: folder.id,
|
||||
})
|
||||
|
||||
// Folder expand hook - must be declared before callbacks that use expandFolder
|
||||
const {
|
||||
isExpanded,
|
||||
handleToggleExpanded,
|
||||
@@ -90,7 +92,6 @@ export function FolderItem({ folder, level, hoverHandlers }: FolderItemProps) {
|
||||
*/
|
||||
const handleCreateWorkflowInFolder = useCallback(async () => {
|
||||
try {
|
||||
// Generate name and color upfront for optimistic updates
|
||||
const name = generateCreativeWorkflowName()
|
||||
const color = getNextWorkflowColor()
|
||||
|
||||
@@ -103,15 +104,12 @@ export function FolderItem({ folder, level, hoverHandlers }: FolderItemProps) {
|
||||
|
||||
if (result.id) {
|
||||
router.push(`/workspace/${workspaceId}/w/${result.id}`)
|
||||
// Expand the parent folder so the new workflow is visible
|
||||
expandFolder()
|
||||
// Scroll to the newly created workflow
|
||||
window.dispatchEvent(
|
||||
new CustomEvent(SIDEBAR_SCROLL_EVENT, { detail: { itemId: result.id } })
|
||||
)
|
||||
}
|
||||
} catch (error) {
|
||||
// Error already handled by mutation's onError callback
|
||||
logger.error('Failed to create workflow in folder:', error)
|
||||
}
|
||||
}, [createWorkflowMutation, workspaceId, folder.id, router, expandFolder])
|
||||
@@ -128,9 +126,7 @@ export function FolderItem({ folder, level, hoverHandlers }: FolderItemProps) {
|
||||
parentId: folder.id,
|
||||
})
|
||||
if (result.id) {
|
||||
// Expand the parent folder so the new folder is visible
|
||||
expandFolder()
|
||||
// Scroll to the newly created folder
|
||||
window.dispatchEvent(
|
||||
new CustomEvent(SIDEBAR_SCROLL_EVENT, { detail: { itemId: result.id } })
|
||||
)
|
||||
@@ -147,7 +143,6 @@ export function FolderItem({ folder, level, hoverHandlers }: FolderItemProps) {
|
||||
*/
|
||||
const onDragStart = useCallback(
|
||||
(e: React.DragEvent) => {
|
||||
// Don't start drag if editing
|
||||
if (isEditing) {
|
||||
e.preventDefault()
|
||||
return
|
||||
@@ -159,12 +154,10 @@ export function FolderItem({ folder, level, hoverHandlers }: FolderItemProps) {
|
||||
[folder.id]
|
||||
)
|
||||
|
||||
// Item drag hook
|
||||
const { isDragging, shouldPreventClickRef, handleDragStart, handleDragEnd } = useItemDrag({
|
||||
onDragStart,
|
||||
})
|
||||
|
||||
// Context menu hook
|
||||
const {
|
||||
isOpen: isContextMenuOpen,
|
||||
position,
|
||||
@@ -174,7 +167,6 @@ export function FolderItem({ folder, level, hoverHandlers }: FolderItemProps) {
|
||||
preventDismiss,
|
||||
} = useContextMenu()
|
||||
|
||||
// Rename hook
|
||||
const {
|
||||
isEditing,
|
||||
editValue,
|
||||
@@ -258,7 +250,6 @@ export function FolderItem({ folder, level, hoverHandlers }: FolderItemProps) {
|
||||
e.preventDefault()
|
||||
e.stopPropagation()
|
||||
|
||||
// Toggle: close if open, open if closed
|
||||
if (isContextMenuOpen) {
|
||||
closeMenu()
|
||||
return
|
||||
@@ -365,13 +356,16 @@ export function FolderItem({ folder, level, hoverHandlers }: FolderItemProps) {
|
||||
onCreate={handleCreateWorkflowInFolder}
|
||||
onCreateFolder={handleCreateFolderInFolder}
|
||||
onDuplicate={handleDuplicateFolder}
|
||||
onExport={handleExportFolder}
|
||||
onDelete={() => setIsDeleteModalOpen(true)}
|
||||
showCreate={true}
|
||||
showCreateFolder={true}
|
||||
showExport={true}
|
||||
disableRename={!userPermissions.canEdit}
|
||||
disableCreate={!userPermissions.canEdit || createWorkflowMutation.isPending}
|
||||
disableCreateFolder={!userPermissions.canEdit || createFolderMutation.isPending}
|
||||
disableDuplicate={!userPermissions.canEdit}
|
||||
disableDuplicate={!userPermissions.canEdit || !hasWorkflows}
|
||||
disableExport={!userPermissions.canEdit || isExporting || !hasWorkflows}
|
||||
disableDelete={!userPermissions.canEdit || !canDelete}
|
||||
/>
|
||||
|
||||
|
||||
@@ -46,19 +46,15 @@ export function WorkflowItem({ workflow, active, level, onWorkflowClick }: Workf
|
||||
const userPermissions = useUserPermissionsContext()
|
||||
const isSelected = selectedWorkflows.has(workflow.id)
|
||||
|
||||
// Can delete check hook
|
||||
const { canDeleteWorkflows } = useCanDelete({ workspaceId })
|
||||
|
||||
// Delete modal state
|
||||
const [isDeleteModalOpen, setIsDeleteModalOpen] = useState(false)
|
||||
const [workflowIdsToDelete, setWorkflowIdsToDelete] = useState<string[]>([])
|
||||
const [deleteModalNames, setDeleteModalNames] = useState<string | string[]>('')
|
||||
const [canDeleteCaptured, setCanDeleteCaptured] = useState(true)
|
||||
|
||||
// Presence avatars state
|
||||
const [hasAvatars, setHasAvatars] = useState(false)
|
||||
|
||||
// Capture selection at right-click time (using ref to persist across renders)
|
||||
const capturedSelectionRef = useRef<{
|
||||
workflowIds: string[]
|
||||
workflowNames: string | string[]
|
||||
@@ -68,7 +64,6 @@ export function WorkflowItem({ workflow, active, level, onWorkflowClick }: Workf
|
||||
* Handle opening the delete modal - uses pre-captured selection state
|
||||
*/
|
||||
const handleOpenDeleteModal = useCallback(() => {
|
||||
// Use the selection captured at right-click time
|
||||
if (capturedSelectionRef.current) {
|
||||
setWorkflowIdsToDelete(capturedSelectionRef.current.workflowIds)
|
||||
setDeleteModalNames(capturedSelectionRef.current.workflowNames)
|
||||
@@ -76,42 +71,32 @@ export function WorkflowItem({ workflow, active, level, onWorkflowClick }: Workf
|
||||
}
|
||||
}, [])
|
||||
|
||||
// Delete workflow hook
|
||||
const { isDeleting, handleDeleteWorkflow } = useDeleteWorkflow({
|
||||
workspaceId,
|
||||
getWorkflowIds: () => workflowIdsToDelete,
|
||||
workflowIds: workflowIdsToDelete,
|
||||
isActive: (workflowIds) => workflowIds.includes(params.workflowId as string),
|
||||
onSuccess: () => setIsDeleteModalOpen(false),
|
||||
})
|
||||
|
||||
// Duplicate workflow hook
|
||||
const { handleDuplicateWorkflow } = useDuplicateWorkflow({
|
||||
workspaceId,
|
||||
getWorkflowIds: () => {
|
||||
// Use the selection captured at right-click time
|
||||
return capturedSelectionRef.current?.workflowIds || []
|
||||
},
|
||||
})
|
||||
const { handleDuplicateWorkflow: duplicateWorkflow } = useDuplicateWorkflow({ workspaceId })
|
||||
|
||||
// Export workflow hook
|
||||
const { handleExportWorkflow } = useExportWorkflow({
|
||||
workspaceId,
|
||||
getWorkflowIds: () => {
|
||||
// Use the selection captured at right-click time
|
||||
return capturedSelectionRef.current?.workflowIds || []
|
||||
},
|
||||
})
|
||||
const { handleExportWorkflow: exportWorkflow } = useExportWorkflow({ workspaceId })
|
||||
const handleDuplicateWorkflow = useCallback(() => {
|
||||
const workflowIds = capturedSelectionRef.current?.workflowIds || []
|
||||
if (workflowIds.length === 0) return
|
||||
duplicateWorkflow(workflowIds)
|
||||
}, [duplicateWorkflow])
|
||||
|
||||
const handleExportWorkflow = useCallback(() => {
|
||||
const workflowIds = capturedSelectionRef.current?.workflowIds || []
|
||||
if (workflowIds.length === 0) return
|
||||
exportWorkflow(workflowIds)
|
||||
}, [exportWorkflow])
|
||||
|
||||
/**
|
||||
* Opens the workflow in a new browser tab
|
||||
*/
|
||||
const handleOpenInNewTab = useCallback(() => {
|
||||
window.open(`/workspace/${workspaceId}/w/${workflow.id}`, '_blank')
|
||||
}, [workspaceId, workflow.id])
|
||||
|
||||
/**
|
||||
* Changes the workflow color
|
||||
*/
|
||||
const handleColorChange = useCallback(
|
||||
(color: string) => {
|
||||
updateWorkflow(workflow.id, { color })
|
||||
@@ -126,7 +111,6 @@ export function WorkflowItem({ workflow, active, level, onWorkflowClick }: Workf
|
||||
*/
|
||||
const onDragStart = useCallback(
|
||||
(e: React.DragEvent) => {
|
||||
// Don't start drag if editing
|
||||
if (isEditing) {
|
||||
e.preventDefault()
|
||||
return
|
||||
@@ -141,12 +125,10 @@ export function WorkflowItem({ workflow, active, level, onWorkflowClick }: Workf
|
||||
[isSelected, selectedWorkflows, workflow.id]
|
||||
)
|
||||
|
||||
// Item drag hook
|
||||
const { isDragging, shouldPreventClickRef, handleDragStart, handleDragEnd } = useItemDrag({
|
||||
onDragStart,
|
||||
})
|
||||
|
||||
// Context menu hook
|
||||
const {
|
||||
isOpen: isContextMenuOpen,
|
||||
position,
|
||||
@@ -215,14 +197,12 @@ export function WorkflowItem({ workflow, active, level, onWorkflowClick }: Workf
|
||||
e.preventDefault()
|
||||
e.stopPropagation()
|
||||
|
||||
// Toggle: close if open, open if closed
|
||||
if (isContextMenuOpen) {
|
||||
closeMenu()
|
||||
return
|
||||
}
|
||||
|
||||
captureSelectionState()
|
||||
// Open context menu aligned with the button
|
||||
const rect = e.currentTarget.getBoundingClientRect()
|
||||
handleContextMenuBase({
|
||||
preventDefault: () => {},
|
||||
@@ -234,7 +214,6 @@ export function WorkflowItem({ workflow, active, level, onWorkflowClick }: Workf
|
||||
[isContextMenuOpen, closeMenu, captureSelectionState, handleContextMenuBase]
|
||||
)
|
||||
|
||||
// Rename hook
|
||||
const {
|
||||
isEditing,
|
||||
editValue,
|
||||
@@ -281,12 +260,10 @@ export function WorkflowItem({ workflow, active, level, onWorkflowClick }: Workf
|
||||
|
||||
const isModifierClick = e.shiftKey || e.metaKey || e.ctrlKey
|
||||
|
||||
// Prevent default link behavior when using modifier keys
|
||||
if (isModifierClick) {
|
||||
e.preventDefault()
|
||||
}
|
||||
|
||||
// Use metaKey (Cmd on Mac) or ctrlKey (Ctrl on Windows/Linux)
|
||||
onWorkflowClick(workflow.id, e.shiftKey, e.metaKey || e.ctrlKey)
|
||||
},
|
||||
[shouldPreventClickRef, workflow.id, onWorkflowClick, isEditing]
|
||||
|
||||
@@ -9,7 +9,6 @@ import {
|
||||
useDragDrop,
|
||||
useWorkflowSelection,
|
||||
} from '@/app/workspace/[workspaceId]/w/components/sidebar/hooks'
|
||||
import { useImportWorkflow } from '@/app/workspace/[workspaceId]/w/hooks/use-import-workflow'
|
||||
import { useFolders } from '@/hooks/queries/folders'
|
||||
import { useFolderStore } from '@/stores/folders/store'
|
||||
import type { FolderTreeNode } from '@/stores/folders/types'
|
||||
@@ -25,15 +24,13 @@ const TREE_SPACING = {
|
||||
interface WorkflowListProps {
|
||||
regularWorkflows: WorkflowMetadata[]
|
||||
isLoading?: boolean
|
||||
isImporting: boolean
|
||||
setIsImporting: (value: boolean) => void
|
||||
handleFileChange: (event: React.ChangeEvent<HTMLInputElement>) => void
|
||||
fileInputRef: React.RefObject<HTMLInputElement | null>
|
||||
scrollContainerRef: React.RefObject<HTMLDivElement | null>
|
||||
}
|
||||
|
||||
/**
|
||||
* WorkflowList component displays workflows organized by folders with drag-and-drop support.
|
||||
* Uses the workflow import hook for handling JSON imports.
|
||||
*
|
||||
* @param props - Component props
|
||||
* @returns Workflow list with folders and drag-drop support
|
||||
@@ -41,8 +38,7 @@ interface WorkflowListProps {
|
||||
export function WorkflowList({
|
||||
regularWorkflows,
|
||||
isLoading = false,
|
||||
isImporting,
|
||||
setIsImporting,
|
||||
handleFileChange,
|
||||
fileInputRef,
|
||||
scrollContainerRef,
|
||||
}: WorkflowListProps) {
|
||||
@@ -65,9 +61,6 @@ export function WorkflowList({
|
||||
createFolderHeaderHoverHandlers,
|
||||
} = useDragDrop()
|
||||
|
||||
// Workflow import hook
|
||||
const { handleFileChange } = useImportWorkflow({ workspaceId })
|
||||
|
||||
// Set scroll container when ref changes
|
||||
useEffect(() => {
|
||||
if (scrollContainerRef.current) {
|
||||
|
||||
@@ -2,10 +2,10 @@
|
||||
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { ArrowDown, Database, HelpCircle, Layout, Plus, Search, Settings } from 'lucide-react'
|
||||
import { Database, HelpCircle, Layout, Plus, Search, Settings } from 'lucide-react'
|
||||
import Link from 'next/link'
|
||||
import { useParams, usePathname, useRouter } from 'next/navigation'
|
||||
import { Button, FolderPlus, Library, Tooltip } from '@/components/emcn'
|
||||
import { Button, Download, FolderPlus, Library, Loader, Tooltip } from '@/components/emcn'
|
||||
import { useSession } from '@/lib/auth/auth-client'
|
||||
import { getEnv, isTruthy } from '@/lib/core/config/env'
|
||||
import { useRegisterGlobalCommands } from '@/app/workspace/[workspaceId]/providers/global-commands-provider'
|
||||
@@ -30,6 +30,7 @@ import {
|
||||
import {
|
||||
useDuplicateWorkspace,
|
||||
useExportWorkspace,
|
||||
useImportWorkflow,
|
||||
useImportWorkspace,
|
||||
} from '@/app/workspace/[workspaceId]/w/hooks'
|
||||
import { usePermissionConfig } from '@/hooks/use-permission-config'
|
||||
@@ -85,9 +86,11 @@ export function Sidebar() {
|
||||
const isCollapsed = hasHydrated ? isCollapsedStore : false
|
||||
const isOnWorkflowPage = !!workflowId
|
||||
|
||||
const [isImporting, setIsImporting] = useState(false)
|
||||
const workspaceFileInputRef = useRef<HTMLInputElement>(null)
|
||||
|
||||
const { isImporting, handleFileChange: handleImportFileChange } = useImportWorkflow({
|
||||
workspaceId,
|
||||
})
|
||||
const { isImporting: isImportingWorkspace, handleImportWorkspace: importWorkspace } =
|
||||
useImportWorkspace()
|
||||
const { handleExportWorkspace: exportWorkspace } = useExportWorkspace()
|
||||
@@ -213,7 +216,7 @@ export function Sidebar() {
|
||||
}, [activeNavItemHref])
|
||||
|
||||
const { handleDuplicateWorkspace: duplicateWorkspace } = useDuplicateWorkspace({
|
||||
getWorkspaceId: () => workspaceId,
|
||||
workspaceId,
|
||||
})
|
||||
|
||||
const searchModalWorkflows = useMemo(
|
||||
@@ -565,21 +568,31 @@ export function Sidebar() {
|
||||
Workflows
|
||||
</div>
|
||||
<div className='flex items-center justify-center gap-[10px]'>
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
variant='ghost'
|
||||
className='translate-y-[-0.25px] p-[1px]'
|
||||
onClick={handleImportWorkflow}
|
||||
disabled={isImporting || !canEdit}
|
||||
>
|
||||
<ArrowDown className='h-[14px] w-[14px]' />
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content>
|
||||
<p>{isImporting ? 'Importing workflow...' : 'Import workflow'}</p>
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
{isImporting ? (
|
||||
<Button
|
||||
variant='ghost'
|
||||
className='translate-y-[-0.25px] p-[1px]'
|
||||
disabled={!canEdit || isImporting}
|
||||
>
|
||||
<Loader className='h-[14px] w-[14px]' animate />
|
||||
</Button>
|
||||
) : (
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
variant='ghost'
|
||||
className='translate-y-[-0.25px] p-[1px]'
|
||||
onClick={handleImportWorkflow}
|
||||
disabled={!canEdit}
|
||||
>
|
||||
<Download className='h-[14px] w-[14px]' />
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content>
|
||||
<p>Import workflows</p>
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
)}
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
@@ -622,8 +635,7 @@ export function Sidebar() {
|
||||
<WorkflowList
|
||||
regularWorkflows={regularWorkflows}
|
||||
isLoading={isLoading}
|
||||
isImporting={isImporting}
|
||||
setIsImporting={setIsImporting}
|
||||
handleFileChange={handleImportFileChange}
|
||||
fileInputRef={fileInputRef}
|
||||
scrollContainerRef={scrollContainerRef}
|
||||
/>
|
||||
|
||||
@@ -4,6 +4,7 @@ export { useDeleteWorkflow } from './use-delete-workflow'
|
||||
export { useDuplicateFolder } from './use-duplicate-folder'
|
||||
export { useDuplicateWorkflow } from './use-duplicate-workflow'
|
||||
export { useDuplicateWorkspace } from './use-duplicate-workspace'
|
||||
export { useExportFolder } from './use-export-folder'
|
||||
export { useExportWorkflow } from './use-export-workflow'
|
||||
export { useExportWorkspace } from './use-export-workspace'
|
||||
export { useImportWorkflow } from './use-import-workflow'
|
||||
|
||||
@@ -11,10 +11,9 @@ interface UseDeleteFolderProps {
|
||||
*/
|
||||
workspaceId: string
|
||||
/**
|
||||
* Function that returns the folder ID(s) to delete
|
||||
* This function is called when deletion occurs to get fresh selection state
|
||||
* The folder ID(s) to delete
|
||||
*/
|
||||
getFolderIds: () => string | string[]
|
||||
folderIds: string | string[]
|
||||
/**
|
||||
* Optional callback after successful deletion
|
||||
*/
|
||||
@@ -24,17 +23,10 @@ interface UseDeleteFolderProps {
|
||||
/**
|
||||
* Hook for managing folder deletion.
|
||||
*
|
||||
* Handles:
|
||||
* - Single or bulk folder deletion
|
||||
* - Calling delete API for each folder
|
||||
* - Loading state management
|
||||
* - Error handling and logging
|
||||
* - Clearing selection after deletion
|
||||
*
|
||||
* @param props - Hook configuration
|
||||
* @returns Delete folder handlers and state
|
||||
*/
|
||||
export function useDeleteFolder({ workspaceId, getFolderIds, onSuccess }: UseDeleteFolderProps) {
|
||||
export function useDeleteFolder({ workspaceId, folderIds, onSuccess }: UseDeleteFolderProps) {
|
||||
const deleteFolderMutation = useDeleteFolderMutation()
|
||||
const [isDeleting, setIsDeleting] = useState(false)
|
||||
|
||||
@@ -46,23 +38,18 @@ export function useDeleteFolder({ workspaceId, getFolderIds, onSuccess }: UseDel
|
||||
return
|
||||
}
|
||||
|
||||
if (!folderIds) {
|
||||
return
|
||||
}
|
||||
|
||||
setIsDeleting(true)
|
||||
try {
|
||||
// Get fresh folder IDs at deletion time
|
||||
const folderIdsOrId = getFolderIds()
|
||||
if (!folderIdsOrId) {
|
||||
return
|
||||
}
|
||||
const folderIdsToDelete = Array.isArray(folderIds) ? folderIds : [folderIds]
|
||||
|
||||
// Normalize to array for consistent handling
|
||||
const folderIdsToDelete = Array.isArray(folderIdsOrId) ? folderIdsOrId : [folderIdsOrId]
|
||||
|
||||
// Delete each folder sequentially
|
||||
for (const folderId of folderIdsToDelete) {
|
||||
await deleteFolderMutation.mutateAsync({ id: folderId, workspaceId })
|
||||
}
|
||||
|
||||
// Clear selection after successful deletion
|
||||
const { clearSelection } = useFolderStore.getState()
|
||||
clearSelection()
|
||||
|
||||
@@ -74,7 +61,7 @@ export function useDeleteFolder({ workspaceId, getFolderIds, onSuccess }: UseDel
|
||||
} finally {
|
||||
setIsDeleting(false)
|
||||
}
|
||||
}, [getFolderIds, isDeleting, deleteFolderMutation, workspaceId, onSuccess])
|
||||
}, [folderIds, isDeleting, deleteFolderMutation, workspaceId, onSuccess])
|
||||
|
||||
return {
|
||||
isDeleting,
|
||||
|
||||
@@ -12,10 +12,9 @@ interface UseDeleteWorkflowProps {
|
||||
*/
|
||||
workspaceId: string
|
||||
/**
|
||||
* Function that returns the workflow ID(s) to delete
|
||||
* This function is called when deletion occurs to get fresh selection state
|
||||
* Workflow ID(s) to delete
|
||||
*/
|
||||
getWorkflowIds: () => string | string[]
|
||||
workflowIds: string | string[]
|
||||
/**
|
||||
* Whether the active workflow is being deleted
|
||||
* Can be a boolean or a function that receives the workflow IDs
|
||||
@@ -30,20 +29,12 @@ interface UseDeleteWorkflowProps {
|
||||
/**
|
||||
* Hook for managing workflow deletion with navigation logic.
|
||||
*
|
||||
* Handles:
|
||||
* - Single or bulk workflow deletion
|
||||
* - Finding next workflow to navigate to
|
||||
* - Navigating before deletion (if active workflow)
|
||||
* - Removing workflow(s) from registry
|
||||
* - Loading state management
|
||||
* - Error handling and logging
|
||||
*
|
||||
* @param props - Hook configuration
|
||||
* @returns Delete workflow handlers and state
|
||||
*/
|
||||
export function useDeleteWorkflow({
|
||||
workspaceId,
|
||||
getWorkflowIds,
|
||||
workflowIds,
|
||||
isActive = false,
|
||||
onSuccess,
|
||||
}: UseDeleteWorkflowProps) {
|
||||
@@ -59,30 +50,21 @@ export function useDeleteWorkflow({
|
||||
return
|
||||
}
|
||||
|
||||
if (!workflowIds) {
|
||||
return
|
||||
}
|
||||
|
||||
setIsDeleting(true)
|
||||
try {
|
||||
// Get fresh workflow IDs at deletion time
|
||||
const workflowIdsOrId = getWorkflowIds()
|
||||
if (!workflowIdsOrId) {
|
||||
return
|
||||
}
|
||||
const workflowIdsToDelete = Array.isArray(workflowIds) ? workflowIds : [workflowIds]
|
||||
|
||||
// Normalize to array for consistent handling
|
||||
const workflowIdsToDelete = Array.isArray(workflowIdsOrId)
|
||||
? workflowIdsOrId
|
||||
: [workflowIdsOrId]
|
||||
|
||||
// Determine if active workflow is being deleted
|
||||
const isActiveWorkflowBeingDeleted =
|
||||
typeof isActive === 'function' ? isActive(workflowIdsToDelete) : isActive
|
||||
|
||||
// Find next workflow to navigate to (if active workflow is being deleted)
|
||||
const sidebarWorkflows = Object.values(workflows).filter((w) => w.workspaceId === workspaceId)
|
||||
|
||||
// Find which specific workflow is the active one (if any in the deletion list)
|
||||
let activeWorkflowId: string | null = null
|
||||
if (isActiveWorkflowBeingDeleted && typeof isActive === 'function') {
|
||||
// Check each workflow being deleted to find which one is active
|
||||
activeWorkflowId =
|
||||
workflowIdsToDelete.find((id) => isActive([id])) || workflowIdsToDelete[0]
|
||||
} else {
|
||||
@@ -93,13 +75,11 @@ export function useDeleteWorkflow({
|
||||
|
||||
let nextWorkflowId: string | null = null
|
||||
if (isActiveWorkflowBeingDeleted && sidebarWorkflows.length > workflowIdsToDelete.length) {
|
||||
// Find the first workflow that's not being deleted
|
||||
const remainingWorkflows = sidebarWorkflows.filter(
|
||||
(w) => !workflowIdsToDelete.includes(w.id)
|
||||
)
|
||||
|
||||
if (remainingWorkflows.length > 0) {
|
||||
// Try to find the next workflow after the current one
|
||||
const workflowsAfterCurrent = remainingWorkflows.filter((w) => {
|
||||
const idx = sidebarWorkflows.findIndex((sw) => sw.id === w.id)
|
||||
return idx > currentIndex
|
||||
@@ -108,13 +88,11 @@ export function useDeleteWorkflow({
|
||||
if (workflowsAfterCurrent.length > 0) {
|
||||
nextWorkflowId = workflowsAfterCurrent[0].id
|
||||
} else {
|
||||
// Otherwise, use the first remaining workflow
|
||||
nextWorkflowId = remainingWorkflows[0].id
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Navigate first if this is the active workflow
|
||||
if (isActiveWorkflowBeingDeleted) {
|
||||
if (nextWorkflowId) {
|
||||
router.push(`/workspace/${workspaceId}/w/${nextWorkflowId}`)
|
||||
@@ -123,10 +101,8 @@ export function useDeleteWorkflow({
|
||||
}
|
||||
}
|
||||
|
||||
// Delete all workflows
|
||||
await Promise.all(workflowIdsToDelete.map((id) => removeWorkflow(id)))
|
||||
|
||||
// Clear selection after successful deletion
|
||||
const { clearSelection } = useFolderStore.getState()
|
||||
clearSelection()
|
||||
|
||||
@@ -138,16 +114,7 @@ export function useDeleteWorkflow({
|
||||
} finally {
|
||||
setIsDeleting(false)
|
||||
}
|
||||
}, [
|
||||
getWorkflowIds,
|
||||
isDeleting,
|
||||
workflows,
|
||||
workspaceId,
|
||||
isActive,
|
||||
router,
|
||||
removeWorkflow,
|
||||
onSuccess,
|
||||
])
|
||||
}, [workflowIds, isDeleting, workflows, workspaceId, isActive, router, removeWorkflow, onSuccess])
|
||||
|
||||
return {
|
||||
isDeleting,
|
||||
|
||||
@@ -7,7 +7,10 @@ const logger = createLogger('useDuplicateFolder')
|
||||
|
||||
interface UseDuplicateFolderProps {
|
||||
workspaceId: string
|
||||
getFolderIds: () => string | string[]
|
||||
/**
|
||||
* The folder ID(s) to duplicate
|
||||
*/
|
||||
folderIds: string | string[]
|
||||
onSuccess?: () => void
|
||||
}
|
||||
|
||||
@@ -17,11 +20,7 @@ interface UseDuplicateFolderProps {
|
||||
* @param props - Hook configuration
|
||||
* @returns Duplicate folder handlers and state
|
||||
*/
|
||||
export function useDuplicateFolder({
|
||||
workspaceId,
|
||||
getFolderIds,
|
||||
onSuccess,
|
||||
}: UseDuplicateFolderProps) {
|
||||
export function useDuplicateFolder({ workspaceId, folderIds, onSuccess }: UseDuplicateFolderProps) {
|
||||
const duplicateFolderMutation = useDuplicateFolderMutation()
|
||||
const [isDuplicating, setIsDuplicating] = useState(false)
|
||||
|
||||
@@ -46,21 +45,17 @@ export function useDuplicateFolder({
|
||||
return
|
||||
}
|
||||
|
||||
if (!folderIds) {
|
||||
return
|
||||
}
|
||||
|
||||
setIsDuplicating(true)
|
||||
try {
|
||||
// Get fresh folder IDs at duplication time
|
||||
const folderIdsOrId = getFolderIds()
|
||||
if (!folderIdsOrId) {
|
||||
return
|
||||
}
|
||||
|
||||
// Normalize to array for consistent handling
|
||||
const folderIdsToDuplicate = Array.isArray(folderIdsOrId) ? folderIdsOrId : [folderIdsOrId]
|
||||
const folderIdsToDuplicate = Array.isArray(folderIds) ? folderIds : [folderIds]
|
||||
|
||||
const duplicatedIds: string[] = []
|
||||
const folderStore = useFolderStore.getState()
|
||||
|
||||
// Duplicate each folder sequentially
|
||||
for (const folderId of folderIdsToDuplicate) {
|
||||
const folder = folderStore.getFolderById(folderId)
|
||||
|
||||
@@ -72,7 +67,6 @@ export function useDuplicateFolder({
|
||||
const siblingNames = new Set(
|
||||
folderStore.getChildFolders(folder.parentId).map((sibling) => sibling.name)
|
||||
)
|
||||
// Avoid colliding with the original folder name
|
||||
siblingNames.add(folder.name)
|
||||
|
||||
const duplicateName = generateDuplicateName(folder.name, siblingNames)
|
||||
@@ -90,7 +84,6 @@ export function useDuplicateFolder({
|
||||
}
|
||||
}
|
||||
|
||||
// Clear selection after successful duplication
|
||||
const { clearSelection } = useFolderStore.getState()
|
||||
clearSelection()
|
||||
|
||||
@@ -107,7 +100,7 @@ export function useDuplicateFolder({
|
||||
setIsDuplicating(false)
|
||||
}
|
||||
}, [
|
||||
getFolderIds,
|
||||
folderIds,
|
||||
generateDuplicateName,
|
||||
isDuplicating,
|
||||
duplicateFolderMutation,
|
||||
|
||||
@@ -13,11 +13,6 @@ interface UseDuplicateWorkflowProps {
|
||||
* Current workspace ID
|
||||
*/
|
||||
workspaceId: string
|
||||
/**
|
||||
* Function that returns the workflow ID(s) to duplicate
|
||||
* This function is called when duplication occurs to get fresh selection state
|
||||
*/
|
||||
getWorkflowIds: () => string | string[]
|
||||
/**
|
||||
* Optional callback after successful duplication
|
||||
*/
|
||||
@@ -27,89 +22,72 @@ interface UseDuplicateWorkflowProps {
|
||||
/**
|
||||
* Hook for managing workflow duplication with optimistic updates.
|
||||
*
|
||||
* Handles:
|
||||
* - Single or bulk workflow duplication
|
||||
* - Optimistic UI updates (shows new workflow immediately)
|
||||
* - Automatic rollback on failure
|
||||
* - Loading state management
|
||||
* - Error handling and logging
|
||||
* - Clearing selection after duplication
|
||||
* - Navigation to duplicated workflow (single only)
|
||||
*
|
||||
* @param props - Hook configuration
|
||||
* @returns Duplicate workflow handlers and state
|
||||
*/
|
||||
export function useDuplicateWorkflow({
|
||||
workspaceId,
|
||||
getWorkflowIds,
|
||||
onSuccess,
|
||||
}: UseDuplicateWorkflowProps) {
|
||||
export function useDuplicateWorkflow({ workspaceId, onSuccess }: UseDuplicateWorkflowProps) {
|
||||
const router = useRouter()
|
||||
const { workflows } = useWorkflowRegistry()
|
||||
const duplicateMutation = useDuplicateWorkflowMutation()
|
||||
|
||||
/**
|
||||
* Duplicate the workflow(s)
|
||||
* @param workflowIds - The workflow ID(s) to duplicate
|
||||
*/
|
||||
const handleDuplicateWorkflow = useCallback(async () => {
|
||||
if (duplicateMutation.isPending) {
|
||||
return
|
||||
}
|
||||
const handleDuplicateWorkflow = useCallback(
|
||||
async (workflowIds: string | string[]) => {
|
||||
if (!workflowIds || (Array.isArray(workflowIds) && workflowIds.length === 0)) {
|
||||
return
|
||||
}
|
||||
|
||||
// Get fresh workflow IDs at duplication time
|
||||
const workflowIdsOrId = getWorkflowIds()
|
||||
if (!workflowIdsOrId) {
|
||||
return
|
||||
}
|
||||
if (duplicateMutation.isPending) {
|
||||
return
|
||||
}
|
||||
|
||||
// Normalize to array for consistent handling
|
||||
const workflowIdsToDuplicate = Array.isArray(workflowIdsOrId)
|
||||
? workflowIdsOrId
|
||||
: [workflowIdsOrId]
|
||||
const workflowIdsToDuplicate = Array.isArray(workflowIds) ? workflowIds : [workflowIds]
|
||||
|
||||
const duplicatedIds: string[] = []
|
||||
const duplicatedIds: string[] = []
|
||||
|
||||
try {
|
||||
// Duplicate each workflow sequentially
|
||||
for (const sourceId of workflowIdsToDuplicate) {
|
||||
const sourceWorkflow = workflows[sourceId]
|
||||
if (!sourceWorkflow) {
|
||||
logger.warn(`Workflow ${sourceId} not found, skipping`)
|
||||
continue
|
||||
try {
|
||||
for (const sourceId of workflowIdsToDuplicate) {
|
||||
const sourceWorkflow = workflows[sourceId]
|
||||
if (!sourceWorkflow) {
|
||||
logger.warn(`Workflow ${sourceId} not found, skipping`)
|
||||
continue
|
||||
}
|
||||
|
||||
const result = await duplicateMutation.mutateAsync({
|
||||
workspaceId,
|
||||
sourceId,
|
||||
name: `${sourceWorkflow.name} (Copy)`,
|
||||
description: sourceWorkflow.description,
|
||||
color: getNextWorkflowColor(),
|
||||
folderId: sourceWorkflow.folderId,
|
||||
})
|
||||
|
||||
duplicatedIds.push(result.id)
|
||||
}
|
||||
|
||||
const result = await duplicateMutation.mutateAsync({
|
||||
workspaceId,
|
||||
sourceId,
|
||||
name: `${sourceWorkflow.name} (Copy)`,
|
||||
description: sourceWorkflow.description,
|
||||
color: getNextWorkflowColor(),
|
||||
folderId: sourceWorkflow.folderId,
|
||||
const { clearSelection } = useFolderStore.getState()
|
||||
clearSelection()
|
||||
|
||||
logger.info('Workflow(s) duplicated successfully', {
|
||||
workflowIds: workflowIdsToDuplicate,
|
||||
duplicatedIds,
|
||||
})
|
||||
|
||||
duplicatedIds.push(result.id)
|
||||
if (duplicatedIds.length === 1) {
|
||||
router.push(`/workspace/${workspaceId}/w/${duplicatedIds[0]}`)
|
||||
}
|
||||
|
||||
onSuccess?.()
|
||||
} catch (error) {
|
||||
logger.error('Error duplicating workflow(s):', { error })
|
||||
throw error
|
||||
}
|
||||
|
||||
// Clear selection after successful duplication
|
||||
const { clearSelection } = useFolderStore.getState()
|
||||
clearSelection()
|
||||
|
||||
logger.info('Workflow(s) duplicated successfully', {
|
||||
workflowIds: workflowIdsToDuplicate,
|
||||
duplicatedIds,
|
||||
})
|
||||
|
||||
// Navigate to duplicated workflow if single duplication
|
||||
if (duplicatedIds.length === 1) {
|
||||
router.push(`/workspace/${workspaceId}/w/${duplicatedIds[0]}`)
|
||||
}
|
||||
|
||||
onSuccess?.()
|
||||
} catch (error) {
|
||||
logger.error('Error duplicating workflow(s):', { error })
|
||||
throw error
|
||||
}
|
||||
}, [getWorkflowIds, duplicateMutation, workflows, workspaceId, router, onSuccess])
|
||||
},
|
||||
[duplicateMutation, workflows, workspaceId, router, onSuccess]
|
||||
)
|
||||
|
||||
return {
|
||||
isDuplicating: duplicateMutation.isPending,
|
||||
|
||||
@@ -6,10 +6,9 @@ const logger = createLogger('useDuplicateWorkspace')
|
||||
|
||||
interface UseDuplicateWorkspaceProps {
|
||||
/**
|
||||
* Function that returns the workspace ID to duplicate
|
||||
* This function is called when duplication occurs to get fresh state
|
||||
* The workspace ID to duplicate
|
||||
*/
|
||||
getWorkspaceId: () => string | null
|
||||
workspaceId: string | null
|
||||
/**
|
||||
* Optional callback after successful duplication
|
||||
*/
|
||||
@@ -19,17 +18,10 @@ interface UseDuplicateWorkspaceProps {
|
||||
/**
|
||||
* Hook for managing workspace duplication.
|
||||
*
|
||||
* Handles:
|
||||
* - Workspace duplication
|
||||
* - Calling duplicate API
|
||||
* - Loading state management
|
||||
* - Error handling and logging
|
||||
* - Navigation to duplicated workspace
|
||||
*
|
||||
* @param props - Hook configuration
|
||||
* @returns Duplicate workspace handlers and state
|
||||
*/
|
||||
export function useDuplicateWorkspace({ getWorkspaceId, onSuccess }: UseDuplicateWorkspaceProps) {
|
||||
export function useDuplicateWorkspace({ workspaceId, onSuccess }: UseDuplicateWorkspaceProps) {
|
||||
const router = useRouter()
|
||||
const [isDuplicating, setIsDuplicating] = useState(false)
|
||||
|
||||
@@ -38,18 +30,12 @@ export function useDuplicateWorkspace({ getWorkspaceId, onSuccess }: UseDuplicat
|
||||
*/
|
||||
const handleDuplicateWorkspace = useCallback(
|
||||
async (workspaceName: string) => {
|
||||
if (isDuplicating) {
|
||||
if (isDuplicating || !workspaceId) {
|
||||
return
|
||||
}
|
||||
|
||||
setIsDuplicating(true)
|
||||
try {
|
||||
// Get fresh workspace ID at duplication time
|
||||
const workspaceId = getWorkspaceId()
|
||||
if (!workspaceId) {
|
||||
return
|
||||
}
|
||||
|
||||
const response = await fetch(`/api/workspaces/${workspaceId}/duplicate`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
@@ -70,7 +56,6 @@ export function useDuplicateWorkspace({ getWorkspaceId, onSuccess }: UseDuplicat
|
||||
workflowsCount: duplicatedWorkspace.workflowsCount,
|
||||
})
|
||||
|
||||
// Navigate to duplicated workspace
|
||||
router.push(`/workspace/${duplicatedWorkspace.id}/w`)
|
||||
|
||||
onSuccess?.()
|
||||
@@ -83,7 +68,7 @@ export function useDuplicateWorkspace({ getWorkspaceId, onSuccess }: UseDuplicat
|
||||
setIsDuplicating(false)
|
||||
}
|
||||
},
|
||||
[getWorkspaceId, isDuplicating, router, onSuccess]
|
||||
[workspaceId, isDuplicating, router, onSuccess]
|
||||
)
|
||||
|
||||
return {
|
||||
|
||||
@@ -0,0 +1,237 @@
|
||||
import { useCallback, useMemo, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import JSZip from 'jszip'
|
||||
import { sanitizeForExport } from '@/lib/workflows/sanitization/json-sanitizer'
|
||||
import { useFolderStore } from '@/stores/folders/store'
|
||||
import type { WorkflowFolder } from '@/stores/folders/types'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import type { WorkflowMetadata } from '@/stores/workflows/registry/types'
|
||||
import type { Variable } from '@/stores/workflows/workflow/types'
|
||||
|
||||
const logger = createLogger('useExportFolder')
|
||||
|
||||
interface UseExportFolderProps {
|
||||
/**
|
||||
* Current workspace ID
|
||||
*/
|
||||
workspaceId: string
|
||||
/**
|
||||
* The folder ID to export
|
||||
*/
|
||||
folderId: string
|
||||
/**
|
||||
* Optional callback after successful export
|
||||
*/
|
||||
onSuccess?: () => void
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively collects all workflow IDs within a folder and its subfolders.
|
||||
*
|
||||
* @param folderId - The folder ID to collect workflows from
|
||||
* @param workflows - All workflows in the workspace
|
||||
* @param folders - All folders in the workspace
|
||||
* @returns Array of workflow IDs
|
||||
*/
|
||||
function collectWorkflowsInFolder(
|
||||
folderId: string,
|
||||
workflows: Record<string, WorkflowMetadata>,
|
||||
folders: Record<string, WorkflowFolder>
|
||||
): string[] {
|
||||
const workflowIds: string[] = []
|
||||
|
||||
for (const workflow of Object.values(workflows)) {
|
||||
if (workflow.folderId === folderId) {
|
||||
workflowIds.push(workflow.id)
|
||||
}
|
||||
}
|
||||
|
||||
for (const folder of Object.values(folders)) {
|
||||
if (folder.parentId === folderId) {
|
||||
const childWorkflowIds = collectWorkflowsInFolder(folder.id, workflows, folders)
|
||||
workflowIds.push(...childWorkflowIds)
|
||||
}
|
||||
}
|
||||
|
||||
return workflowIds
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook for managing folder export to ZIP.
|
||||
*
|
||||
* @param props - Hook configuration
|
||||
* @returns Export folder handlers and state
|
||||
*/
|
||||
export function useExportFolder({ workspaceId, folderId, onSuccess }: UseExportFolderProps) {
|
||||
const { workflows } = useWorkflowRegistry()
|
||||
const { folders } = useFolderStore()
|
||||
const [isExporting, setIsExporting] = useState(false)
|
||||
|
||||
/**
|
||||
* Check if the folder has any workflows (recursively)
|
||||
*/
|
||||
const hasWorkflows = useMemo(() => {
|
||||
if (!folderId) return false
|
||||
return collectWorkflowsInFolder(folderId, workflows, folders).length > 0
|
||||
}, [folderId, workflows, folders])
|
||||
|
||||
/**
|
||||
* Download file helper
|
||||
*/
|
||||
const downloadFile = (content: Blob, filename: string, mimeType = 'application/zip') => {
|
||||
try {
|
||||
const blob = content instanceof Blob ? content : new Blob([content], { type: mimeType })
|
||||
const url = URL.createObjectURL(blob)
|
||||
const a = document.createElement('a')
|
||||
a.href = url
|
||||
a.download = filename
|
||||
document.body.appendChild(a)
|
||||
a.click()
|
||||
document.body.removeChild(a)
|
||||
URL.revokeObjectURL(url)
|
||||
} catch (error) {
|
||||
logger.error('Failed to download file:', error)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Export all workflows in the folder (including nested subfolders) to ZIP
|
||||
*/
|
||||
const handleExportFolder = useCallback(async () => {
|
||||
if (isExporting) {
|
||||
return
|
||||
}
|
||||
|
||||
if (!folderId) {
|
||||
logger.warn('No folder ID provided for export')
|
||||
return
|
||||
}
|
||||
|
||||
setIsExporting(true)
|
||||
try {
|
||||
const folderStore = useFolderStore.getState()
|
||||
const folder = folderStore.getFolderById(folderId)
|
||||
|
||||
if (!folder) {
|
||||
logger.warn('Folder not found for export', { folderId })
|
||||
return
|
||||
}
|
||||
|
||||
const workflowIdsToExport = collectWorkflowsInFolder(folderId, workflows, folderStore.folders)
|
||||
|
||||
if (workflowIdsToExport.length === 0) {
|
||||
logger.warn('No workflows found in folder to export', { folderId, folderName: folder.name })
|
||||
return
|
||||
}
|
||||
|
||||
logger.info('Starting folder export', {
|
||||
folderId,
|
||||
folderName: folder.name,
|
||||
workflowCount: workflowIdsToExport.length,
|
||||
})
|
||||
|
||||
const exportedWorkflows: Array<{ name: string; content: string }> = []
|
||||
|
||||
for (const workflowId of workflowIdsToExport) {
|
||||
try {
|
||||
const workflow = workflows[workflowId]
|
||||
if (!workflow) {
|
||||
logger.warn(`Workflow ${workflowId} not found in registry`)
|
||||
continue
|
||||
}
|
||||
|
||||
const workflowResponse = await fetch(`/api/workflows/${workflowId}`)
|
||||
if (!workflowResponse.ok) {
|
||||
logger.error(`Failed to fetch workflow ${workflowId}`)
|
||||
continue
|
||||
}
|
||||
|
||||
const { data: workflowData } = await workflowResponse.json()
|
||||
if (!workflowData?.state) {
|
||||
logger.warn(`Workflow ${workflowId} has no state`)
|
||||
continue
|
||||
}
|
||||
|
||||
const variablesResponse = await fetch(`/api/workflows/${workflowId}/variables`)
|
||||
let workflowVariables: Record<string, Variable> | undefined
|
||||
if (variablesResponse.ok) {
|
||||
const variablesData = await variablesResponse.json()
|
||||
workflowVariables = variablesData?.data
|
||||
}
|
||||
|
||||
const workflowState = {
|
||||
...workflowData.state,
|
||||
metadata: {
|
||||
name: workflow.name,
|
||||
description: workflow.description,
|
||||
color: workflow.color,
|
||||
exportedAt: new Date().toISOString(),
|
||||
},
|
||||
variables: workflowVariables,
|
||||
}
|
||||
|
||||
const exportState = sanitizeForExport(workflowState)
|
||||
const jsonString = JSON.stringify(exportState, null, 2)
|
||||
|
||||
exportedWorkflows.push({
|
||||
name: workflow.name,
|
||||
content: jsonString,
|
||||
})
|
||||
|
||||
logger.info(`Workflow ${workflowId} exported successfully`)
|
||||
} catch (error) {
|
||||
logger.error(`Failed to export workflow ${workflowId}:`, error)
|
||||
}
|
||||
}
|
||||
|
||||
if (exportedWorkflows.length === 0) {
|
||||
logger.warn('No workflows were successfully exported from folder', {
|
||||
folderId,
|
||||
folderName: folder.name,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
const zip = new JSZip()
|
||||
const seenFilenames = new Set<string>()
|
||||
|
||||
for (const exportedWorkflow of exportedWorkflows) {
|
||||
const baseName = exportedWorkflow.name.replace(/[^a-z0-9]/gi, '-')
|
||||
let filename = `${baseName}.json`
|
||||
let counter = 1
|
||||
while (seenFilenames.has(filename.toLowerCase())) {
|
||||
filename = `${baseName}-${counter}.json`
|
||||
counter++
|
||||
}
|
||||
seenFilenames.add(filename.toLowerCase())
|
||||
zip.file(filename, exportedWorkflow.content)
|
||||
}
|
||||
|
||||
const zipBlob = await zip.generateAsync({ type: 'blob' })
|
||||
const zipFilename = `${folder.name.replace(/[^a-z0-9]/gi, '-')}-export.zip`
|
||||
downloadFile(zipBlob, zipFilename, 'application/zip')
|
||||
|
||||
const { clearSelection } = useFolderStore.getState()
|
||||
clearSelection()
|
||||
|
||||
logger.info('Folder exported successfully', {
|
||||
folderId,
|
||||
folderName: folder.name,
|
||||
workflowCount: exportedWorkflows.length,
|
||||
})
|
||||
|
||||
onSuccess?.()
|
||||
} catch (error) {
|
||||
logger.error('Error exporting folder:', { error })
|
||||
throw error
|
||||
} finally {
|
||||
setIsExporting(false)
|
||||
}
|
||||
}, [folderId, isExporting, workflows, folders, onSuccess])
|
||||
|
||||
return {
|
||||
isExporting,
|
||||
hasWorkflows,
|
||||
handleExportFolder,
|
||||
}
|
||||
}
|
||||
@@ -13,11 +13,6 @@ interface UseExportWorkflowProps {
|
||||
* Current workspace ID
|
||||
*/
|
||||
workspaceId: string
|
||||
/**
|
||||
* Function that returns the workflow ID(s) to export
|
||||
* This function is called when export occurs to get fresh selection state
|
||||
*/
|
||||
getWorkflowIds: () => string | string[]
|
||||
/**
|
||||
* Optional callback after successful export
|
||||
*/
|
||||
@@ -27,23 +22,10 @@ interface UseExportWorkflowProps {
|
||||
/**
|
||||
* Hook for managing workflow export to JSON.
|
||||
*
|
||||
* Handles:
|
||||
* - Single or bulk workflow export
|
||||
* - Fetching workflow data and variables from API
|
||||
* - Sanitizing workflow state for export
|
||||
* - Downloading as JSON file(s)
|
||||
* - Loading state management
|
||||
* - Error handling and logging
|
||||
* - Clearing selection after export
|
||||
*
|
||||
* @param props - Hook configuration
|
||||
* @returns Export workflow handlers and state
|
||||
*/
|
||||
export function useExportWorkflow({
|
||||
workspaceId,
|
||||
getWorkflowIds,
|
||||
onSuccess,
|
||||
}: UseExportWorkflowProps) {
|
||||
export function useExportWorkflow({ workspaceId, onSuccess }: UseExportWorkflowProps) {
|
||||
const { workflows } = useWorkflowRegistry()
|
||||
const [isExporting, setIsExporting] = useState(false)
|
||||
|
||||
@@ -75,130 +57,129 @@ export function useExportWorkflow({
|
||||
* - Single workflow: exports as JSON file
|
||||
* - Multiple workflows: exports as ZIP file containing all JSON files
|
||||
* Fetches workflow data from API to support bulk export of non-active workflows
|
||||
* @param workflowIds - The workflow ID(s) to export
|
||||
*/
|
||||
const handleExportWorkflow = useCallback(async () => {
|
||||
if (isExporting) {
|
||||
return
|
||||
}
|
||||
|
||||
setIsExporting(true)
|
||||
try {
|
||||
// Get fresh workflow IDs at export time
|
||||
const workflowIdsOrId = getWorkflowIds()
|
||||
if (!workflowIdsOrId) {
|
||||
const handleExportWorkflow = useCallback(
|
||||
async (workflowIds: string | string[]) => {
|
||||
if (isExporting) {
|
||||
return
|
||||
}
|
||||
|
||||
// Normalize to array for consistent handling
|
||||
const workflowIdsToExport = Array.isArray(workflowIdsOrId)
|
||||
? workflowIdsOrId
|
||||
: [workflowIdsOrId]
|
||||
if (!workflowIds || (Array.isArray(workflowIds) && workflowIds.length === 0)) {
|
||||
return
|
||||
}
|
||||
|
||||
logger.info('Starting workflow export', {
|
||||
workflowIdsToExport,
|
||||
count: workflowIdsToExport.length,
|
||||
})
|
||||
setIsExporting(true)
|
||||
try {
|
||||
const workflowIdsToExport = Array.isArray(workflowIds) ? workflowIds : [workflowIds]
|
||||
|
||||
const exportedWorkflows: Array<{ name: string; content: string }> = []
|
||||
logger.info('Starting workflow export', {
|
||||
workflowIdsToExport,
|
||||
count: workflowIdsToExport.length,
|
||||
})
|
||||
|
||||
// Export each workflow
|
||||
for (const workflowId of workflowIdsToExport) {
|
||||
try {
|
||||
const workflow = workflows[workflowId]
|
||||
if (!workflow) {
|
||||
logger.warn(`Workflow ${workflowId} not found in registry`)
|
||||
continue
|
||||
}
|
||||
const exportedWorkflows: Array<{ name: string; content: string }> = []
|
||||
|
||||
// Fetch workflow state from API
|
||||
const workflowResponse = await fetch(`/api/workflows/${workflowId}`)
|
||||
if (!workflowResponse.ok) {
|
||||
logger.error(`Failed to fetch workflow ${workflowId}`)
|
||||
continue
|
||||
}
|
||||
for (const workflowId of workflowIdsToExport) {
|
||||
try {
|
||||
const workflow = workflows[workflowId]
|
||||
if (!workflow) {
|
||||
logger.warn(`Workflow ${workflowId} not found in registry`)
|
||||
continue
|
||||
}
|
||||
|
||||
const { data: workflowData } = await workflowResponse.json()
|
||||
if (!workflowData?.state) {
|
||||
logger.warn(`Workflow ${workflowId} has no state`)
|
||||
continue
|
||||
}
|
||||
const workflowResponse = await fetch(`/api/workflows/${workflowId}`)
|
||||
if (!workflowResponse.ok) {
|
||||
logger.error(`Failed to fetch workflow ${workflowId}`)
|
||||
continue
|
||||
}
|
||||
|
||||
// Fetch workflow variables (API returns Record format directly)
|
||||
const variablesResponse = await fetch(`/api/workflows/${workflowId}/variables`)
|
||||
let workflowVariables: Record<string, Variable> | undefined
|
||||
if (variablesResponse.ok) {
|
||||
const variablesData = await variablesResponse.json()
|
||||
workflowVariables = variablesData?.data
|
||||
}
|
||||
const { data: workflowData } = await workflowResponse.json()
|
||||
if (!workflowData?.state) {
|
||||
logger.warn(`Workflow ${workflowId} has no state`)
|
||||
continue
|
||||
}
|
||||
|
||||
// Prepare export state
|
||||
const workflowState = {
|
||||
...workflowData.state,
|
||||
metadata: {
|
||||
const variablesResponse = await fetch(`/api/workflows/${workflowId}/variables`)
|
||||
let workflowVariables: Record<string, Variable> | undefined
|
||||
if (variablesResponse.ok) {
|
||||
const variablesData = await variablesResponse.json()
|
||||
workflowVariables = variablesData?.data
|
||||
}
|
||||
|
||||
const workflowState = {
|
||||
...workflowData.state,
|
||||
metadata: {
|
||||
name: workflow.name,
|
||||
description: workflow.description,
|
||||
color: workflow.color,
|
||||
exportedAt: new Date().toISOString(),
|
||||
},
|
||||
variables: workflowVariables,
|
||||
}
|
||||
|
||||
const exportState = sanitizeForExport(workflowState)
|
||||
const jsonString = JSON.stringify(exportState, null, 2)
|
||||
|
||||
exportedWorkflows.push({
|
||||
name: workflow.name,
|
||||
description: workflow.description,
|
||||
color: workflow.color,
|
||||
exportedAt: new Date().toISOString(),
|
||||
},
|
||||
variables: workflowVariables,
|
||||
content: jsonString,
|
||||
})
|
||||
|
||||
logger.info(`Workflow ${workflowId} exported successfully`)
|
||||
} catch (error) {
|
||||
logger.error(`Failed to export workflow ${workflowId}:`, error)
|
||||
}
|
||||
}
|
||||
|
||||
if (exportedWorkflows.length === 0) {
|
||||
logger.warn('No workflows were successfully exported')
|
||||
return
|
||||
}
|
||||
|
||||
if (exportedWorkflows.length === 1) {
|
||||
const filename = `${exportedWorkflows[0].name.replace(/[^a-z0-9]/gi, '-')}.json`
|
||||
downloadFile(exportedWorkflows[0].content, filename, 'application/json')
|
||||
} else {
|
||||
const zip = new JSZip()
|
||||
const seenFilenames = new Set<string>()
|
||||
|
||||
for (const exportedWorkflow of exportedWorkflows) {
|
||||
const baseName = exportedWorkflow.name.replace(/[^a-z0-9]/gi, '-')
|
||||
let filename = `${baseName}.json`
|
||||
let counter = 1
|
||||
while (seenFilenames.has(filename.toLowerCase())) {
|
||||
filename = `${baseName}-${counter}.json`
|
||||
counter++
|
||||
}
|
||||
seenFilenames.add(filename.toLowerCase())
|
||||
zip.file(filename, exportedWorkflow.content)
|
||||
}
|
||||
|
||||
const exportState = sanitizeForExport(workflowState)
|
||||
const jsonString = JSON.stringify(exportState, null, 2)
|
||||
|
||||
exportedWorkflows.push({
|
||||
name: workflow.name,
|
||||
content: jsonString,
|
||||
})
|
||||
|
||||
logger.info(`Workflow ${workflowId} exported successfully`)
|
||||
} catch (error) {
|
||||
logger.error(`Failed to export workflow ${workflowId}:`, error)
|
||||
}
|
||||
}
|
||||
|
||||
if (exportedWorkflows.length === 0) {
|
||||
logger.warn('No workflows were successfully exported')
|
||||
return
|
||||
}
|
||||
|
||||
// Download as single JSON or ZIP depending on count
|
||||
if (exportedWorkflows.length === 1) {
|
||||
// Single workflow - download as JSON
|
||||
const filename = `${exportedWorkflows[0].name.replace(/[^a-z0-9]/gi, '-')}.json`
|
||||
downloadFile(exportedWorkflows[0].content, filename, 'application/json')
|
||||
} else {
|
||||
// Multiple workflows - download as ZIP
|
||||
const zip = new JSZip()
|
||||
|
||||
for (const exportedWorkflow of exportedWorkflows) {
|
||||
const filename = `${exportedWorkflow.name.replace(/[^a-z0-9]/gi, '-')}.json`
|
||||
zip.file(filename, exportedWorkflow.content)
|
||||
const zipBlob = await zip.generateAsync({ type: 'blob' })
|
||||
const zipFilename = `workflows-export-${Date.now()}.zip`
|
||||
downloadFile(zipBlob, zipFilename, 'application/zip')
|
||||
}
|
||||
|
||||
const zipBlob = await zip.generateAsync({ type: 'blob' })
|
||||
const zipFilename = `workflows-export-${Date.now()}.zip`
|
||||
downloadFile(zipBlob, zipFilename, 'application/zip')
|
||||
const { clearSelection } = useFolderStore.getState()
|
||||
clearSelection()
|
||||
|
||||
logger.info('Workflow(s) exported successfully', {
|
||||
workflowIds: workflowIdsToExport,
|
||||
count: exportedWorkflows.length,
|
||||
format: exportedWorkflows.length === 1 ? 'JSON' : 'ZIP',
|
||||
})
|
||||
|
||||
onSuccess?.()
|
||||
} catch (error) {
|
||||
logger.error('Error exporting workflow(s):', { error })
|
||||
throw error
|
||||
} finally {
|
||||
setIsExporting(false)
|
||||
}
|
||||
|
||||
// Clear selection after successful export
|
||||
const { clearSelection } = useFolderStore.getState()
|
||||
clearSelection()
|
||||
|
||||
logger.info('Workflow(s) exported successfully', {
|
||||
workflowIds: workflowIdsToExport,
|
||||
count: exportedWorkflows.length,
|
||||
format: exportedWorkflows.length === 1 ? 'JSON' : 'ZIP',
|
||||
})
|
||||
|
||||
onSuccess?.()
|
||||
} catch (error) {
|
||||
logger.error('Error exporting workflow(s):', { error })
|
||||
throw error
|
||||
} finally {
|
||||
setIsExporting(false)
|
||||
}
|
||||
}, [getWorkflowIds, isExporting, workflows, onSuccess])
|
||||
},
|
||||
[isExporting, workflows, onSuccess]
|
||||
)
|
||||
|
||||
return {
|
||||
isExporting,
|
||||
|
||||
@@ -44,21 +44,18 @@ export function useExportWorkspace({ onSuccess }: UseExportWorkspaceProps = {})
|
||||
try {
|
||||
logger.info('Exporting workspace', { workspaceId })
|
||||
|
||||
// Fetch all workflows in workspace
|
||||
const workflowsResponse = await fetch(`/api/workflows?workspaceId=${workspaceId}`)
|
||||
if (!workflowsResponse.ok) {
|
||||
throw new Error('Failed to fetch workflows')
|
||||
}
|
||||
const { data: workflows } = await workflowsResponse.json()
|
||||
|
||||
// Fetch all folders in workspace
|
||||
const foldersResponse = await fetch(`/api/folders?workspaceId=${workspaceId}`)
|
||||
if (!foldersResponse.ok) {
|
||||
throw new Error('Failed to fetch folders')
|
||||
}
|
||||
const foldersData = await foldersResponse.json()
|
||||
|
||||
// Export each workflow
|
||||
const workflowsToExport: WorkflowExportData[] = []
|
||||
|
||||
for (const workflow of workflows) {
|
||||
|
||||
@@ -33,6 +33,7 @@ export function useImportWorkflow({ workspaceId }: UseImportWorkflowProps) {
|
||||
const createWorkflowMutation = useCreateWorkflow()
|
||||
const queryClient = useQueryClient()
|
||||
const createFolderMutation = useCreateFolder()
|
||||
const clearDiff = useWorkflowDiffStore((state) => state.clearDiff)
|
||||
const [isImporting, setIsImporting] = useState(false)
|
||||
|
||||
/**
|
||||
@@ -48,9 +49,8 @@ export function useImportWorkflow({ workspaceId }: UseImportWorkflowProps) {
|
||||
}
|
||||
|
||||
const workflowName = extractWorkflowName(content, filename)
|
||||
useWorkflowDiffStore.getState().clearDiff()
|
||||
clearDiff()
|
||||
|
||||
// Extract color from metadata
|
||||
const parsedContent = JSON.parse(content)
|
||||
const workflowColor =
|
||||
parsedContent.state?.metadata?.color || parsedContent.metadata?.color || '#3972F6'
|
||||
@@ -63,7 +63,6 @@ export function useImportWorkflow({ workspaceId }: UseImportWorkflowProps) {
|
||||
})
|
||||
const newWorkflowId = result.id
|
||||
|
||||
// Update workflow color if we extracted one
|
||||
if (workflowColor !== '#3972F6') {
|
||||
await fetch(`/api/workflows/${newWorkflowId}`, {
|
||||
method: 'PATCH',
|
||||
@@ -72,16 +71,13 @@ export function useImportWorkflow({ workspaceId }: UseImportWorkflowProps) {
|
||||
})
|
||||
}
|
||||
|
||||
// Save workflow state
|
||||
await fetch(`/api/workflows/${newWorkflowId}/state`, {
|
||||
method: 'PUT',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(workflowData),
|
||||
})
|
||||
|
||||
// Save variables if any (handle both legacy Array and current Record formats)
|
||||
if (workflowData.variables) {
|
||||
// Convert to Record format for API (handles backwards compatibility with old Array exports)
|
||||
const variablesArray = Array.isArray(workflowData.variables)
|
||||
? workflowData.variables
|
||||
: Object.values(workflowData.variables)
|
||||
@@ -114,7 +110,7 @@ export function useImportWorkflow({ workspaceId }: UseImportWorkflowProps) {
|
||||
logger.info(`Imported workflow: ${workflowName}`)
|
||||
return newWorkflowId
|
||||
},
|
||||
[createWorkflowMutation, workspaceId]
|
||||
[clearDiff, createWorkflowMutation, workspaceId]
|
||||
)
|
||||
|
||||
/**
|
||||
@@ -134,7 +130,6 @@ export function useImportWorkflow({ workspaceId }: UseImportWorkflowProps) {
|
||||
const importedWorkflowIds: string[] = []
|
||||
|
||||
if (hasZip && fileArray.length === 1) {
|
||||
// Import from ZIP - preserves folder structure
|
||||
const zipFile = fileArray[0]
|
||||
const { workflows: extractedWorkflows, metadata } = await extractWorkflowsFromZip(zipFile)
|
||||
|
||||
@@ -149,7 +144,6 @@ export function useImportWorkflow({ workspaceId }: UseImportWorkflowProps) {
|
||||
try {
|
||||
let targetFolderId = importFolder.id
|
||||
|
||||
// Recreate nested folder structure
|
||||
if (workflow.folderPath.length > 0) {
|
||||
const folderPathKey = workflow.folderPath.join('/')
|
||||
|
||||
@@ -187,7 +181,6 @@ export function useImportWorkflow({ workspaceId }: UseImportWorkflowProps) {
|
||||
}
|
||||
}
|
||||
} else if (jsonFiles.length > 0) {
|
||||
// Import multiple JSON files or single JSON
|
||||
const extractedWorkflows = await extractWorkflowsFromFiles(jsonFiles)
|
||||
|
||||
for (const workflow of extractedWorkflows) {
|
||||
@@ -200,22 +193,21 @@ export function useImportWorkflow({ workspaceId }: UseImportWorkflowProps) {
|
||||
}
|
||||
}
|
||||
|
||||
// Reload workflows and folders to show newly imported ones
|
||||
await queryClient.invalidateQueries({ queryKey: workflowKeys.list(workspaceId) })
|
||||
await queryClient.invalidateQueries({ queryKey: folderKeys.list(workspaceId) })
|
||||
|
||||
logger.info(`Import complete. Imported ${importedWorkflowIds.length} workflow(s)`)
|
||||
|
||||
// Navigate to first imported workflow if any
|
||||
if (importedWorkflowIds.length > 0) {
|
||||
router.push(`/workspace/${workspaceId}/w/${importedWorkflowIds[0]}`)
|
||||
router.push(
|
||||
`/workspace/${workspaceId}/w/${importedWorkflowIds[importedWorkflowIds.length - 1]}`
|
||||
)
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Failed to import workflows:', error)
|
||||
} finally {
|
||||
setIsImporting(false)
|
||||
|
||||
// Reset file input
|
||||
if (event.target) {
|
||||
event.target.value = ''
|
||||
}
|
||||
|
||||
@@ -21,15 +21,6 @@ interface UseImportWorkspaceProps {
|
||||
/**
|
||||
* Hook for managing workspace import from ZIP files.
|
||||
*
|
||||
* Handles:
|
||||
* - Extracting workflows from ZIP file
|
||||
* - Creating new workspace
|
||||
* - Recreating folder structure
|
||||
* - Importing all workflows with states and variables
|
||||
* - Navigation to imported workspace
|
||||
* - Loading state management
|
||||
* - Error handling and logging
|
||||
*
|
||||
* @param props - Hook configuration
|
||||
* @returns Import workspace handlers and state
|
||||
*/
|
||||
@@ -37,6 +28,7 @@ export function useImportWorkspace({ onSuccess }: UseImportWorkspaceProps = {})
|
||||
const router = useRouter()
|
||||
const [isImporting, setIsImporting] = useState(false)
|
||||
const createFolderMutation = useCreateFolder()
|
||||
const clearDiff = useWorkflowDiffStore((state) => state.clearDiff)
|
||||
|
||||
/**
|
||||
* Handle workspace import from ZIP file
|
||||
@@ -56,7 +48,6 @@ export function useImportWorkspace({ onSuccess }: UseImportWorkspaceProps = {})
|
||||
try {
|
||||
logger.info('Importing workspace from ZIP')
|
||||
|
||||
// Extract workflows from ZIP
|
||||
const { workflows: extractedWorkflows, metadata } = await extractWorkflowsFromZip(zipFile)
|
||||
|
||||
if (extractedWorkflows.length === 0) {
|
||||
@@ -64,7 +55,6 @@ export function useImportWorkspace({ onSuccess }: UseImportWorkspaceProps = {})
|
||||
return
|
||||
}
|
||||
|
||||
// Create new workspace
|
||||
const workspaceName = metadata?.workspaceName || zipFile.name.replace(/\.zip$/i, '')
|
||||
const createResponse = await fetch('/api/workspaces', {
|
||||
method: 'POST',
|
||||
@@ -81,7 +71,6 @@ export function useImportWorkspace({ onSuccess }: UseImportWorkspaceProps = {})
|
||||
|
||||
const folderMap = new Map<string, string>()
|
||||
|
||||
// Import workflows
|
||||
for (const workflow of extractedWorkflows) {
|
||||
try {
|
||||
const { data: workflowData, errors: parseErrors } = parseWorkflowJson(workflow.content)
|
||||
@@ -91,7 +80,6 @@ export function useImportWorkspace({ onSuccess }: UseImportWorkspaceProps = {})
|
||||
continue
|
||||
}
|
||||
|
||||
// Recreate folder structure
|
||||
let targetFolderId: string | null = null
|
||||
if (workflow.folderPath.length > 0) {
|
||||
const folderPathKey = workflow.folderPath.join('/')
|
||||
@@ -120,14 +108,12 @@ export function useImportWorkspace({ onSuccess }: UseImportWorkspaceProps = {})
|
||||
}
|
||||
|
||||
const workflowName = extractWorkflowName(workflow.content, workflow.name)
|
||||
useWorkflowDiffStore.getState().clearDiff()
|
||||
clearDiff()
|
||||
|
||||
// Extract color from workflow metadata
|
||||
const parsedContent = JSON.parse(workflow.content)
|
||||
const workflowColor =
|
||||
parsedContent.state?.metadata?.color || parsedContent.metadata?.color || '#3972F6'
|
||||
|
||||
// Create workflow
|
||||
const createWorkflowResponse = await fetch('/api/workflows', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
@@ -147,7 +133,6 @@ export function useImportWorkspace({ onSuccess }: UseImportWorkspaceProps = {})
|
||||
|
||||
const newWorkflow = await createWorkflowResponse.json()
|
||||
|
||||
// Save workflow state
|
||||
const stateResponse = await fetch(`/api/workflows/${newWorkflow.id}/state`, {
|
||||
method: 'PUT',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
@@ -159,9 +144,7 @@ export function useImportWorkspace({ onSuccess }: UseImportWorkspaceProps = {})
|
||||
continue
|
||||
}
|
||||
|
||||
// Save variables if any (handle both legacy Array and current Record formats)
|
||||
if (workflowData.variables) {
|
||||
// Convert to Record format for API (handles backwards compatibility with old Array exports)
|
||||
const variablesArray = Array.isArray(workflowData.variables)
|
||||
? workflowData.variables
|
||||
: Object.values(workflowData.variables)
|
||||
@@ -199,7 +182,6 @@ export function useImportWorkspace({ onSuccess }: UseImportWorkspaceProps = {})
|
||||
|
||||
logger.info(`Workspace import complete. Imported ${extractedWorkflows.length} workflows`)
|
||||
|
||||
// Navigate to new workspace
|
||||
router.push(`/workspace/${newWorkspace.id}/w`)
|
||||
|
||||
onSuccess?.()
|
||||
@@ -210,7 +192,7 @@ export function useImportWorkspace({ onSuccess }: UseImportWorkspaceProps = {})
|
||||
setIsImporting(false)
|
||||
}
|
||||
},
|
||||
[isImporting, router, onSuccess, createFolderMutation]
|
||||
[isImporting, router, onSuccess, createFolderMutation, clearDiff]
|
||||
)
|
||||
|
||||
return {
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
import { DynamoDBIcon } from '@/components/icons'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import type { DynamoDBResponse } from '@/tools/dynamodb/types'
|
||||
import type { DynamoDBIntrospectResponse, DynamoDBResponse } from '@/tools/dynamodb/types'
|
||||
|
||||
export const DynamoDBBlock: BlockConfig<DynamoDBResponse> = {
|
||||
export const DynamoDBBlock: BlockConfig<DynamoDBResponse | DynamoDBIntrospectResponse> = {
|
||||
type: 'dynamodb',
|
||||
name: 'Amazon DynamoDB',
|
||||
description: 'Connect to Amazon DynamoDB',
|
||||
longDescription:
|
||||
'Integrate Amazon DynamoDB into workflows. Supports Get, Put, Query, Scan, Update, and Delete operations on DynamoDB tables.',
|
||||
'Integrate Amazon DynamoDB into workflows. Supports Get, Put, Query, Scan, Update, Delete, and Introspect operations on DynamoDB tables.',
|
||||
docsLink: 'https://docs.sim.ai/tools/dynamodb',
|
||||
category: 'tools',
|
||||
bgColor: 'linear-gradient(45deg, #2E27AD 0%, #527FFF 100%)',
|
||||
@@ -24,6 +24,7 @@ export const DynamoDBBlock: BlockConfig<DynamoDBResponse> = {
|
||||
{ label: 'Scan', id: 'scan' },
|
||||
{ label: 'Update Item', id: 'update' },
|
||||
{ label: 'Delete Item', id: 'delete' },
|
||||
{ label: 'Introspect', id: 'introspect' },
|
||||
],
|
||||
value: () => 'get',
|
||||
},
|
||||
@@ -56,6 +57,19 @@ export const DynamoDBBlock: BlockConfig<DynamoDBResponse> = {
|
||||
type: 'short-input',
|
||||
placeholder: 'my-table',
|
||||
required: true,
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'introspect',
|
||||
not: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'tableName',
|
||||
title: 'Table Name (Optional)',
|
||||
type: 'short-input',
|
||||
placeholder: 'Leave empty to list all tables',
|
||||
required: false,
|
||||
condition: { field: 'operation', value: 'introspect' },
|
||||
},
|
||||
// Key field for get, update, delete operations
|
||||
{
|
||||
@@ -420,6 +434,7 @@ Return ONLY the expression - no explanations.`,
|
||||
'dynamodb_scan',
|
||||
'dynamodb_update',
|
||||
'dynamodb_delete',
|
||||
'dynamodb_introspect',
|
||||
],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
@@ -436,6 +451,8 @@ Return ONLY the expression - no explanations.`,
|
||||
return 'dynamodb_update'
|
||||
case 'delete':
|
||||
return 'dynamodb_delete'
|
||||
case 'introspect':
|
||||
return 'dynamodb_introspect'
|
||||
default:
|
||||
throw new Error(`Invalid DynamoDB operation: ${params.operation}`)
|
||||
}
|
||||
@@ -552,5 +569,13 @@ Return ONLY the expression - no explanations.`,
|
||||
type: 'number',
|
||||
description: 'Number of items returned',
|
||||
},
|
||||
tables: {
|
||||
type: 'array',
|
||||
description: 'List of table names from introspect operation',
|
||||
},
|
||||
tableDetails: {
|
||||
type: 'json',
|
||||
description: 'Detailed schema information for a specific table from introspect operation',
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -33,6 +33,7 @@ export const ElasticsearchBlock: BlockConfig<ElasticsearchResponse> = {
|
||||
{ label: 'Create Index', id: 'elasticsearch_create_index' },
|
||||
{ label: 'Delete Index', id: 'elasticsearch_delete_index' },
|
||||
{ label: 'Get Index Info', id: 'elasticsearch_get_index' },
|
||||
{ label: 'List Indices', id: 'elasticsearch_list_indices' },
|
||||
// Cluster Operations
|
||||
{ label: 'Cluster Health', id: 'elasticsearch_cluster_health' },
|
||||
{ label: 'Cluster Stats', id: 'elasticsearch_cluster_stats' },
|
||||
@@ -452,6 +453,7 @@ Return ONLY valid JSON - no explanations, no markdown code blocks.`,
|
||||
'elasticsearch_get_index',
|
||||
'elasticsearch_cluster_health',
|
||||
'elasticsearch_cluster_stats',
|
||||
'elasticsearch_list_indices',
|
||||
],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import { MongoDBIcon } from '@/components/icons'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import type { MongoDBResponse } from '@/tools/mongodb/types'
|
||||
import type { MongoDBIntrospectResponse, MongoDBResponse } from '@/tools/mongodb/types'
|
||||
|
||||
export const MongoDBBlock: BlockConfig<MongoDBResponse> = {
|
||||
export const MongoDBBlock: BlockConfig<MongoDBResponse | MongoDBIntrospectResponse> = {
|
||||
type: 'mongodb',
|
||||
name: 'MongoDB',
|
||||
description: 'Connect to MongoDB database',
|
||||
@@ -23,6 +23,7 @@ export const MongoDBBlock: BlockConfig<MongoDBResponse> = {
|
||||
{ label: 'Update Documents', id: 'update' },
|
||||
{ label: 'Delete Documents', id: 'delete' },
|
||||
{ label: 'Aggregate Pipeline', id: 'execute' },
|
||||
{ label: 'Introspect Database', id: 'introspect' },
|
||||
],
|
||||
value: () => 'query',
|
||||
},
|
||||
@@ -86,6 +87,7 @@ export const MongoDBBlock: BlockConfig<MongoDBResponse> = {
|
||||
type: 'short-input',
|
||||
placeholder: 'users',
|
||||
required: true,
|
||||
condition: { field: 'operation', value: 'introspect', not: true },
|
||||
},
|
||||
{
|
||||
id: 'query',
|
||||
@@ -803,6 +805,7 @@ Return ONLY the MongoDB query filter as valid JSON - no explanations, no markdow
|
||||
'mongodb_update',
|
||||
'mongodb_delete',
|
||||
'mongodb_execute',
|
||||
'mongodb_introspect',
|
||||
],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
@@ -817,6 +820,8 @@ Return ONLY the MongoDB query filter as valid JSON - no explanations, no markdow
|
||||
return 'mongodb_delete'
|
||||
case 'execute':
|
||||
return 'mongodb_execute'
|
||||
case 'introspect':
|
||||
return 'mongodb_introspect'
|
||||
default:
|
||||
throw new Error(`Invalid MongoDB operation: ${params.operation}`)
|
||||
}
|
||||
@@ -936,5 +941,14 @@ Return ONLY the MongoDB query filter as valid JSON - no explanations, no markdow
|
||||
type: 'number',
|
||||
description: 'Number of documents matched (update operations)',
|
||||
},
|
||||
databases: {
|
||||
type: 'array',
|
||||
description: 'Array of database names (introspect operation)',
|
||||
},
|
||||
collections: {
|
||||
type: 'array',
|
||||
description:
|
||||
'Array of collection info with name, type, document count, and indexes (introspect operation)',
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -23,6 +23,7 @@ export const MySQLBlock: BlockConfig<MySQLResponse> = {
|
||||
{ label: 'Update Data', id: 'update' },
|
||||
{ label: 'Delete Data', id: 'delete' },
|
||||
{ label: 'Execute Raw SQL', id: 'execute' },
|
||||
{ label: 'Introspect Schema', id: 'introspect' },
|
||||
],
|
||||
value: () => 'query',
|
||||
},
|
||||
@@ -285,7 +286,14 @@ Return ONLY the SQL query - no explanations, no markdown, no extra text.`,
|
||||
},
|
||||
],
|
||||
tools: {
|
||||
access: ['mysql_query', 'mysql_insert', 'mysql_update', 'mysql_delete', 'mysql_execute'],
|
||||
access: [
|
||||
'mysql_query',
|
||||
'mysql_insert',
|
||||
'mysql_update',
|
||||
'mysql_delete',
|
||||
'mysql_execute',
|
||||
'mysql_introspect',
|
||||
],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
switch (params.operation) {
|
||||
@@ -299,6 +307,8 @@ Return ONLY the SQL query - no explanations, no markdown, no extra text.`,
|
||||
return 'mysql_delete'
|
||||
case 'execute':
|
||||
return 'mysql_execute'
|
||||
case 'introspect':
|
||||
return 'mysql_introspect'
|
||||
default:
|
||||
throw new Error(`Invalid MySQL operation: ${params.operation}`)
|
||||
}
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import { Neo4jIcon } from '@/components/icons'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import type { Neo4jResponse } from '@/tools/neo4j/types'
|
||||
import type { Neo4jIntrospectResponse, Neo4jResponse } from '@/tools/neo4j/types'
|
||||
|
||||
export const Neo4jBlock: BlockConfig<Neo4jResponse> = {
|
||||
export const Neo4jBlock: BlockConfig<Neo4jResponse | Neo4jIntrospectResponse> = {
|
||||
type: 'neo4j',
|
||||
name: 'Neo4j',
|
||||
description: 'Connect to Neo4j graph database',
|
||||
@@ -24,6 +24,7 @@ export const Neo4jBlock: BlockConfig<Neo4jResponse> = {
|
||||
{ label: 'Update Properties (SET)', id: 'update' },
|
||||
{ label: 'Delete Nodes/Relationships', id: 'delete' },
|
||||
{ label: 'Execute Cypher', id: 'execute' },
|
||||
{ label: 'Introspect Schema', id: 'introspect' },
|
||||
],
|
||||
value: () => 'query',
|
||||
},
|
||||
@@ -589,6 +590,7 @@ Return ONLY valid JSON.`,
|
||||
'neo4j_update',
|
||||
'neo4j_delete',
|
||||
'neo4j_execute',
|
||||
'neo4j_introspect',
|
||||
],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
@@ -605,6 +607,8 @@ Return ONLY valid JSON.`,
|
||||
return 'neo4j_delete'
|
||||
case 'execute':
|
||||
return 'neo4j_execute'
|
||||
case 'introspect':
|
||||
return 'neo4j_introspect'
|
||||
default:
|
||||
throw new Error(`Invalid Neo4j operation: ${params.operation}`)
|
||||
}
|
||||
|
||||
@@ -23,6 +23,7 @@ export const PostgreSQLBlock: BlockConfig<PostgresResponse> = {
|
||||
{ label: 'Update Data', id: 'update' },
|
||||
{ label: 'Delete Data', id: 'delete' },
|
||||
{ label: 'Execute Raw SQL', id: 'execute' },
|
||||
{ label: 'Introspect Schema', id: 'introspect' },
|
||||
],
|
||||
value: () => 'query',
|
||||
},
|
||||
@@ -285,6 +286,14 @@ Return ONLY the SQL query - no explanations, no markdown, no extra text.`,
|
||||
condition: { field: 'operation', value: 'delete' },
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'schema',
|
||||
title: 'Schema Name',
|
||||
type: 'short-input',
|
||||
placeholder: 'public',
|
||||
value: () => 'public',
|
||||
condition: { field: 'operation', value: 'introspect' },
|
||||
},
|
||||
],
|
||||
tools: {
|
||||
access: [
|
||||
@@ -293,6 +302,7 @@ Return ONLY the SQL query - no explanations, no markdown, no extra text.`,
|
||||
'postgresql_update',
|
||||
'postgresql_delete',
|
||||
'postgresql_execute',
|
||||
'postgresql_introspect',
|
||||
],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
@@ -307,6 +317,8 @@ Return ONLY the SQL query - no explanations, no markdown, no extra text.`,
|
||||
return 'postgresql_delete'
|
||||
case 'execute':
|
||||
return 'postgresql_execute'
|
||||
case 'introspect':
|
||||
return 'postgresql_introspect'
|
||||
default:
|
||||
throw new Error(`Invalid PostgreSQL operation: ${params.operation}`)
|
||||
}
|
||||
@@ -343,6 +355,7 @@ Return ONLY the SQL query - no explanations, no markdown, no extra text.`,
|
||||
if (rest.table) result.table = rest.table
|
||||
if (rest.query) result.query = rest.query
|
||||
if (rest.where) result.where = rest.where
|
||||
if (rest.schema) result.schema = rest.schema
|
||||
if (parsedData !== undefined) result.data = parsedData
|
||||
|
||||
return result
|
||||
@@ -361,6 +374,7 @@ Return ONLY the SQL query - no explanations, no markdown, no extra text.`,
|
||||
query: { type: 'string', description: 'SQL query to execute' },
|
||||
data: { type: 'json', description: 'Data for insert/update operations' },
|
||||
where: { type: 'string', description: 'WHERE clause for update/delete' },
|
||||
schema: { type: 'string', description: 'Schema name for introspection' },
|
||||
},
|
||||
outputs: {
|
||||
message: {
|
||||
@@ -375,5 +389,13 @@ Return ONLY the SQL query - no explanations, no markdown, no extra text.`,
|
||||
type: 'number',
|
||||
description: 'Number of rows affected by the operation',
|
||||
},
|
||||
tables: {
|
||||
type: 'array',
|
||||
description: 'Array of table schemas with columns, keys, and indexes (introspect operation)',
|
||||
},
|
||||
schemas: {
|
||||
type: 'array',
|
||||
description: 'List of available schemas in the database (introspect operation)',
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import { RDSIcon } from '@/components/icons'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import type { RdsResponse } from '@/tools/rds/types'
|
||||
import type { RdsIntrospectResponse, RdsResponse } from '@/tools/rds/types'
|
||||
|
||||
export const RDSBlock: BlockConfig<RdsResponse> = {
|
||||
export const RDSBlock: BlockConfig<RdsResponse | RdsIntrospectResponse> = {
|
||||
type: 'rds',
|
||||
name: 'Amazon RDS',
|
||||
description: 'Connect to Amazon RDS via Data API',
|
||||
@@ -23,6 +23,7 @@ export const RDSBlock: BlockConfig<RdsResponse> = {
|
||||
{ label: 'Update Data', id: 'update' },
|
||||
{ label: 'Delete Data', id: 'delete' },
|
||||
{ label: 'Execute Raw SQL', id: 'execute' },
|
||||
{ label: 'Introspect Schema', id: 'introspect' },
|
||||
],
|
||||
value: () => 'query',
|
||||
},
|
||||
@@ -340,9 +341,36 @@ Return ONLY the JSON object.`,
|
||||
generationType: 'json-object',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'schema',
|
||||
title: 'Schema Name',
|
||||
type: 'short-input',
|
||||
placeholder: 'public (PostgreSQL) or database name (MySQL)',
|
||||
condition: { field: 'operation', value: 'introspect' },
|
||||
required: false,
|
||||
},
|
||||
{
|
||||
id: 'engine',
|
||||
title: 'Database Engine',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Auto-detect', id: '' },
|
||||
{ label: 'Aurora PostgreSQL', id: 'aurora-postgresql' },
|
||||
{ label: 'Aurora MySQL', id: 'aurora-mysql' },
|
||||
],
|
||||
condition: { field: 'operation', value: 'introspect' },
|
||||
value: () => '',
|
||||
},
|
||||
],
|
||||
tools: {
|
||||
access: ['rds_query', 'rds_insert', 'rds_update', 'rds_delete', 'rds_execute'],
|
||||
access: [
|
||||
'rds_query',
|
||||
'rds_insert',
|
||||
'rds_update',
|
||||
'rds_delete',
|
||||
'rds_execute',
|
||||
'rds_introspect',
|
||||
],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
switch (params.operation) {
|
||||
@@ -356,12 +384,14 @@ Return ONLY the JSON object.`,
|
||||
return 'rds_delete'
|
||||
case 'execute':
|
||||
return 'rds_execute'
|
||||
case 'introspect':
|
||||
return 'rds_introspect'
|
||||
default:
|
||||
throw new Error(`Invalid RDS operation: ${params.operation}`)
|
||||
}
|
||||
},
|
||||
params: (params) => {
|
||||
const { operation, data, conditions, ...rest } = params
|
||||
const { operation, data, conditions, schema, engine, ...rest } = params
|
||||
|
||||
// Parse JSON fields
|
||||
const parseJson = (value: unknown, fieldName: string) => {
|
||||
@@ -399,6 +429,8 @@ Return ONLY the JSON object.`,
|
||||
if (rest.query) result.query = rest.query
|
||||
if (parsedConditions !== undefined) result.conditions = parsedConditions
|
||||
if (parsedData !== undefined) result.data = parsedData
|
||||
if (schema) result.schema = schema
|
||||
if (engine) result.engine = engine
|
||||
|
||||
return result
|
||||
},
|
||||
@@ -416,6 +448,11 @@ Return ONLY the JSON object.`,
|
||||
query: { type: 'string', description: 'SQL query to execute' },
|
||||
data: { type: 'json', description: 'Data for insert/update operations' },
|
||||
conditions: { type: 'json', description: 'Conditions for update/delete (e.g., {"id": 1})' },
|
||||
schema: { type: 'string', description: 'Schema to introspect (for introspect operation)' },
|
||||
engine: {
|
||||
type: 'string',
|
||||
description: 'Database engine (aurora-postgresql or aurora-mysql, auto-detected if not set)',
|
||||
},
|
||||
},
|
||||
outputs: {
|
||||
message: {
|
||||
@@ -430,5 +467,18 @@ Return ONLY the JSON object.`,
|
||||
type: 'number',
|
||||
description: 'Number of rows affected by the operation',
|
||||
},
|
||||
engine: {
|
||||
type: 'string',
|
||||
description: 'Detected database engine type (for introspect operation)',
|
||||
},
|
||||
tables: {
|
||||
type: 'array',
|
||||
description:
|
||||
'Array of table schemas with columns, keys, and indexes (for introspect operation)',
|
||||
},
|
||||
schemas: {
|
||||
type: 'array',
|
||||
description: 'List of available schemas in the database (for introspect operation)',
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -34,6 +34,7 @@ export const SupabaseBlock: BlockConfig<SupabaseResponse> = {
|
||||
{ label: 'Full-Text Search', id: 'text_search' },
|
||||
{ label: 'Vector Search', id: 'vector_search' },
|
||||
{ label: 'Call RPC Function', id: 'rpc' },
|
||||
{ label: 'Introspect Schema', id: 'introspect' },
|
||||
// Storage - File Operations
|
||||
{ label: 'Storage: Upload File', id: 'storage_upload' },
|
||||
{ label: 'Storage: Download File', id: 'storage_download' },
|
||||
@@ -490,6 +491,14 @@ Return ONLY the order by expression - no explanations, no extra text.`,
|
||||
placeholder: '{\n "param1": "value1",\n "param2": "value2"\n}',
|
||||
condition: { field: 'operation', value: 'rpc' },
|
||||
},
|
||||
// Introspect operation fields
|
||||
{
|
||||
id: 'schema',
|
||||
title: 'Schema',
|
||||
type: 'short-input',
|
||||
placeholder: 'public (leave empty for all user schemas)',
|
||||
condition: { field: 'operation', value: 'introspect' },
|
||||
},
|
||||
// Text Search operation fields
|
||||
{
|
||||
id: 'column',
|
||||
@@ -876,6 +885,7 @@ Return ONLY the PostgREST filter expression - no explanations, no markdown, no e
|
||||
'supabase_text_search',
|
||||
'supabase_vector_search',
|
||||
'supabase_rpc',
|
||||
'supabase_introspect',
|
||||
'supabase_storage_upload',
|
||||
'supabase_storage_download',
|
||||
'supabase_storage_list',
|
||||
@@ -911,6 +921,8 @@ Return ONLY the PostgREST filter expression - no explanations, no markdown, no e
|
||||
return 'supabase_vector_search'
|
||||
case 'rpc':
|
||||
return 'supabase_rpc'
|
||||
case 'introspect':
|
||||
return 'supabase_introspect'
|
||||
case 'storage_upload':
|
||||
return 'supabase_storage_upload'
|
||||
case 'storage_download':
|
||||
@@ -1085,7 +1097,6 @@ Return ONLY the PostgREST filter expression - no explanations, no markdown, no e
|
||||
operation: { type: 'string', description: 'Operation to perform' },
|
||||
projectId: { type: 'string', description: 'Supabase project identifier' },
|
||||
table: { type: 'string', description: 'Database table name' },
|
||||
schema: { type: 'string', description: 'Database schema (default: public)' },
|
||||
select: { type: 'string', description: 'Columns to return (comma-separated, defaults to *)' },
|
||||
apiKey: { type: 'string', description: 'Service role secret key' },
|
||||
// Data for insert/update operations
|
||||
@@ -1113,6 +1124,8 @@ Return ONLY the PostgREST filter expression - no explanations, no markdown, no e
|
||||
language: { type: 'string', description: 'Language for text search' },
|
||||
// Count operation inputs
|
||||
countType: { type: 'string', description: 'Count type: exact, planned, or estimated' },
|
||||
// Introspect operation inputs
|
||||
schema: { type: 'string', description: 'Database schema to introspect (e.g., public)' },
|
||||
// Storage operation inputs
|
||||
bucket: { type: 'string', description: 'Storage bucket name' },
|
||||
path: { type: 'string', description: 'File or folder path in storage' },
|
||||
@@ -1158,5 +1171,13 @@ Return ONLY the PostgREST filter expression - no explanations, no markdown, no e
|
||||
type: 'string',
|
||||
description: 'Temporary signed URL for storage file',
|
||||
},
|
||||
tables: {
|
||||
type: 'json',
|
||||
description: 'Array of table schemas for introspect operation',
|
||||
},
|
||||
schemas: {
|
||||
type: 'json',
|
||||
description: 'Array of schema names found in the database',
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
22
apps/sim/components/emcn/icons/animate/download.module.css
Normal file
22
apps/sim/components/emcn/icons/animate/download.module.css
Normal file
@@ -0,0 +1,22 @@
|
||||
/**
|
||||
* Download icon animation
|
||||
* Subtle continuous animation for import/download states
|
||||
* Arrow gently pulses down to suggest downloading motion
|
||||
*/
|
||||
|
||||
@keyframes arrow-pulse {
|
||||
0%,
|
||||
100% {
|
||||
transform: translateY(0);
|
||||
opacity: 1;
|
||||
}
|
||||
50% {
|
||||
transform: translateY(1.5px);
|
||||
opacity: 0.7;
|
||||
}
|
||||
}
|
||||
|
||||
.animated-download-svg {
|
||||
animation: arrow-pulse 1.5s ease-in-out infinite;
|
||||
transform-origin: center center;
|
||||
}
|
||||
42
apps/sim/components/emcn/icons/download.tsx
Normal file
42
apps/sim/components/emcn/icons/download.tsx
Normal file
@@ -0,0 +1,42 @@
|
||||
import type { SVGProps } from 'react'
|
||||
import styles from '@/components/emcn/icons/animate/download.module.css'
|
||||
|
||||
export interface DownloadProps extends SVGProps<SVGSVGElement> {
|
||||
/**
|
||||
* Enable animation on the download icon
|
||||
* @default false
|
||||
*/
|
||||
animate?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Download icon component with optional CSS-based animation
|
||||
* Based on lucide arrow-down icon structure.
|
||||
* When animate is false, this is a lightweight static icon with no animation overhead.
|
||||
* When animate is true, CSS module animations are applied for a subtle pulsing effect.
|
||||
* @param props - SVG properties including className, animate, etc.
|
||||
*/
|
||||
export function Download({ animate = false, className, ...props }: DownloadProps) {
|
||||
const svgClassName = animate
|
||||
? `${styles['animated-download-svg']} ${className || ''}`.trim()
|
||||
: className
|
||||
|
||||
return (
|
||||
<svg
|
||||
xmlns='http://www.w3.org/2000/svg'
|
||||
width='24'
|
||||
height='24'
|
||||
viewBox='0 0 24 24'
|
||||
fill='none'
|
||||
stroke='currentColor'
|
||||
strokeWidth='2'
|
||||
strokeLinecap='round'
|
||||
strokeLinejoin='round'
|
||||
className={svgClassName}
|
||||
{...props}
|
||||
>
|
||||
<path d='M12 5v14' />
|
||||
<path d='m19 12-7 7-7-7' />
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
@@ -5,6 +5,7 @@ export { ChevronDown } from './chevron-down'
|
||||
export { Connections } from './connections'
|
||||
export { Copy } from './copy'
|
||||
export { DocumentAttachment } from './document-attachment'
|
||||
export { Download } from './download'
|
||||
export { Duplicate } from './duplicate'
|
||||
export { Eye } from './eye'
|
||||
export { FolderCode } from './folder-code'
|
||||
|
||||
@@ -99,6 +99,7 @@ export interface SendMessageRequest {
|
||||
workflowId?: string
|
||||
executionId?: string
|
||||
}>
|
||||
commands?: string[]
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -10,6 +10,7 @@ import {
|
||||
GetBlockConfigInput,
|
||||
GetBlockConfigResult,
|
||||
} from '@/lib/copilot/tools/shared/schemas'
|
||||
import { getBlock } from '@/blocks/registry'
|
||||
|
||||
interface GetBlockConfigArgs {
|
||||
blockType: string
|
||||
@@ -39,7 +40,9 @@ export class GetBlockConfigClientTool extends BaseClientTool {
|
||||
},
|
||||
getDynamicText: (params, state) => {
|
||||
if (params?.blockType && typeof params.blockType === 'string') {
|
||||
const blockName = params.blockType.replace(/_/g, ' ')
|
||||
// Look up the block config to get the human-readable name
|
||||
const blockConfig = getBlock(params.blockType)
|
||||
const blockName = (blockConfig?.name ?? params.blockType.replace(/_/g, ' ')).toLowerCase()
|
||||
const opSuffix = params.operation ? ` (${params.operation})` : ''
|
||||
|
||||
switch (state) {
|
||||
|
||||
@@ -10,6 +10,7 @@ import {
|
||||
GetBlockOptionsInput,
|
||||
GetBlockOptionsResult,
|
||||
} from '@/lib/copilot/tools/shared/schemas'
|
||||
import { getBlock } from '@/blocks/registry'
|
||||
|
||||
interface GetBlockOptionsArgs {
|
||||
blockId: string
|
||||
@@ -37,7 +38,9 @@ export class GetBlockOptionsClientTool extends BaseClientTool {
|
||||
},
|
||||
getDynamicText: (params, state) => {
|
||||
if (params?.blockId && typeof params.blockId === 'string') {
|
||||
const blockName = params.blockId.replace(/_/g, ' ')
|
||||
// Look up the block config to get the human-readable name
|
||||
const blockConfig = getBlock(params.blockId)
|
||||
const blockName = (blockConfig?.name ?? params.blockId.replace(/_/g, ' ')).toLowerCase()
|
||||
|
||||
switch (state) {
|
||||
case ClientToolCallState.success:
|
||||
|
||||
@@ -18,6 +18,7 @@ import './other/make-api-request'
|
||||
import './other/plan'
|
||||
import './other/research'
|
||||
import './other/sleep'
|
||||
import './other/superagent'
|
||||
import './other/test'
|
||||
import './other/tour'
|
||||
import './other/workflow'
|
||||
|
||||
53
apps/sim/lib/copilot/tools/client/other/crawl-website.ts
Normal file
53
apps/sim/lib/copilot/tools/client/other/crawl-website.ts
Normal file
@@ -0,0 +1,53 @@
|
||||
import { Globe, Loader2, MinusCircle, XCircle } from 'lucide-react'
|
||||
import {
|
||||
BaseClientTool,
|
||||
type BaseClientToolMetadata,
|
||||
ClientToolCallState,
|
||||
} from '@/lib/copilot/tools/client/base-tool'
|
||||
|
||||
export class CrawlWebsiteClientTool extends BaseClientTool {
|
||||
static readonly id = 'crawl_website'
|
||||
|
||||
constructor(toolCallId: string) {
|
||||
super(toolCallId, CrawlWebsiteClientTool.id, CrawlWebsiteClientTool.metadata)
|
||||
}
|
||||
|
||||
static readonly metadata: BaseClientToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Crawling website', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Crawling website', icon: Loader2 },
|
||||
[ClientToolCallState.executing]: { text: 'Crawling website', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Crawled website', icon: Globe },
|
||||
[ClientToolCallState.error]: { text: 'Failed to crawl website', icon: XCircle },
|
||||
[ClientToolCallState.aborted]: { text: 'Aborted crawling website', icon: MinusCircle },
|
||||
[ClientToolCallState.rejected]: { text: 'Skipped crawling website', icon: MinusCircle },
|
||||
},
|
||||
interrupt: undefined,
|
||||
getDynamicText: (params, state) => {
|
||||
if (params?.url && typeof params.url === 'string') {
|
||||
const url = params.url
|
||||
const truncated = url.length > 50 ? `${url.slice(0, 50)}...` : url
|
||||
|
||||
switch (state) {
|
||||
case ClientToolCallState.success:
|
||||
return `Crawled ${truncated}`
|
||||
case ClientToolCallState.executing:
|
||||
case ClientToolCallState.generating:
|
||||
case ClientToolCallState.pending:
|
||||
return `Crawling ${truncated}`
|
||||
case ClientToolCallState.error:
|
||||
return `Failed to crawl ${truncated}`
|
||||
case ClientToolCallState.aborted:
|
||||
return `Aborted crawling ${truncated}`
|
||||
case ClientToolCallState.rejected:
|
||||
return `Skipped crawling ${truncated}`
|
||||
}
|
||||
}
|
||||
return undefined
|
||||
},
|
||||
}
|
||||
|
||||
async execute(): Promise<void> {
|
||||
return
|
||||
}
|
||||
}
|
||||
54
apps/sim/lib/copilot/tools/client/other/get-page-contents.ts
Normal file
54
apps/sim/lib/copilot/tools/client/other/get-page-contents.ts
Normal file
@@ -0,0 +1,54 @@
|
||||
import { FileText, Loader2, MinusCircle, XCircle } from 'lucide-react'
|
||||
import {
|
||||
BaseClientTool,
|
||||
type BaseClientToolMetadata,
|
||||
ClientToolCallState,
|
||||
} from '@/lib/copilot/tools/client/base-tool'
|
||||
|
||||
export class GetPageContentsClientTool extends BaseClientTool {
|
||||
static readonly id = 'get_page_contents'
|
||||
|
||||
constructor(toolCallId: string) {
|
||||
super(toolCallId, GetPageContentsClientTool.id, GetPageContentsClientTool.metadata)
|
||||
}
|
||||
|
||||
static readonly metadata: BaseClientToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Getting page contents', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Getting page contents', icon: Loader2 },
|
||||
[ClientToolCallState.executing]: { text: 'Getting page contents', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Retrieved page contents', icon: FileText },
|
||||
[ClientToolCallState.error]: { text: 'Failed to get page contents', icon: XCircle },
|
||||
[ClientToolCallState.aborted]: { text: 'Aborted getting page contents', icon: MinusCircle },
|
||||
[ClientToolCallState.rejected]: { text: 'Skipped getting page contents', icon: MinusCircle },
|
||||
},
|
||||
interrupt: undefined,
|
||||
getDynamicText: (params, state) => {
|
||||
if (params?.urls && Array.isArray(params.urls) && params.urls.length > 0) {
|
||||
const firstUrl = String(params.urls[0])
|
||||
const truncated = firstUrl.length > 40 ? `${firstUrl.slice(0, 40)}...` : firstUrl
|
||||
const count = params.urls.length
|
||||
|
||||
switch (state) {
|
||||
case ClientToolCallState.success:
|
||||
return count > 1 ? `Retrieved ${count} pages` : `Retrieved ${truncated}`
|
||||
case ClientToolCallState.executing:
|
||||
case ClientToolCallState.generating:
|
||||
case ClientToolCallState.pending:
|
||||
return count > 1 ? `Getting ${count} pages` : `Getting ${truncated}`
|
||||
case ClientToolCallState.error:
|
||||
return count > 1 ? `Failed to get ${count} pages` : `Failed to get ${truncated}`
|
||||
case ClientToolCallState.aborted:
|
||||
return count > 1 ? `Aborted getting ${count} pages` : `Aborted getting ${truncated}`
|
||||
case ClientToolCallState.rejected:
|
||||
return count > 1 ? `Skipped getting ${count} pages` : `Skipped getting ${truncated}`
|
||||
}
|
||||
}
|
||||
return undefined
|
||||
},
|
||||
}
|
||||
|
||||
async execute(): Promise<void> {
|
||||
return
|
||||
}
|
||||
}
|
||||
53
apps/sim/lib/copilot/tools/client/other/scrape-page.ts
Normal file
53
apps/sim/lib/copilot/tools/client/other/scrape-page.ts
Normal file
@@ -0,0 +1,53 @@
|
||||
import { Globe, Loader2, MinusCircle, XCircle } from 'lucide-react'
|
||||
import {
|
||||
BaseClientTool,
|
||||
type BaseClientToolMetadata,
|
||||
ClientToolCallState,
|
||||
} from '@/lib/copilot/tools/client/base-tool'
|
||||
|
||||
export class ScrapePageClientTool extends BaseClientTool {
|
||||
static readonly id = 'scrape_page'
|
||||
|
||||
constructor(toolCallId: string) {
|
||||
super(toolCallId, ScrapePageClientTool.id, ScrapePageClientTool.metadata)
|
||||
}
|
||||
|
||||
static readonly metadata: BaseClientToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Scraping page', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Scraping page', icon: Loader2 },
|
||||
[ClientToolCallState.executing]: { text: 'Scraping page', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Scraped page', icon: Globe },
|
||||
[ClientToolCallState.error]: { text: 'Failed to scrape page', icon: XCircle },
|
||||
[ClientToolCallState.aborted]: { text: 'Aborted scraping page', icon: MinusCircle },
|
||||
[ClientToolCallState.rejected]: { text: 'Skipped scraping page', icon: MinusCircle },
|
||||
},
|
||||
interrupt: undefined,
|
||||
getDynamicText: (params, state) => {
|
||||
if (params?.url && typeof params.url === 'string') {
|
||||
const url = params.url
|
||||
const truncated = url.length > 50 ? `${url.slice(0, 50)}...` : url
|
||||
|
||||
switch (state) {
|
||||
case ClientToolCallState.success:
|
||||
return `Scraped ${truncated}`
|
||||
case ClientToolCallState.executing:
|
||||
case ClientToolCallState.generating:
|
||||
case ClientToolCallState.pending:
|
||||
return `Scraping ${truncated}`
|
||||
case ClientToolCallState.error:
|
||||
return `Failed to scrape ${truncated}`
|
||||
case ClientToolCallState.aborted:
|
||||
return `Aborted scraping ${truncated}`
|
||||
case ClientToolCallState.rejected:
|
||||
return `Skipped scraping ${truncated}`
|
||||
}
|
||||
}
|
||||
return undefined
|
||||
},
|
||||
}
|
||||
|
||||
async execute(): Promise<void> {
|
||||
return
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,50 @@
|
||||
import { BookOpen, Loader2, MinusCircle, XCircle } from 'lucide-react'
|
||||
import {
|
||||
BaseClientTool,
|
||||
type BaseClientToolMetadata,
|
||||
ClientToolCallState,
|
||||
} from '@/lib/copilot/tools/client/base-tool'
|
||||
|
||||
export class SearchLibraryDocsClientTool extends BaseClientTool {
|
||||
static readonly id = 'search_library_docs'
|
||||
|
||||
constructor(toolCallId: string) {
|
||||
super(toolCallId, SearchLibraryDocsClientTool.id, SearchLibraryDocsClientTool.metadata)
|
||||
}
|
||||
|
||||
static readonly metadata: BaseClientToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Reading docs', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Reading docs', icon: Loader2 },
|
||||
[ClientToolCallState.executing]: { text: 'Reading docs', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Read docs', icon: BookOpen },
|
||||
[ClientToolCallState.error]: { text: 'Failed to read docs', icon: XCircle },
|
||||
[ClientToolCallState.aborted]: { text: 'Aborted reading docs', icon: XCircle },
|
||||
[ClientToolCallState.rejected]: { text: 'Skipped reading docs', icon: MinusCircle },
|
||||
},
|
||||
getDynamicText: (params, state) => {
|
||||
const libraryName = params?.library_name
|
||||
if (libraryName && typeof libraryName === 'string') {
|
||||
switch (state) {
|
||||
case ClientToolCallState.success:
|
||||
return `Read ${libraryName} docs`
|
||||
case ClientToolCallState.executing:
|
||||
case ClientToolCallState.generating:
|
||||
case ClientToolCallState.pending:
|
||||
return `Reading ${libraryName} docs`
|
||||
case ClientToolCallState.error:
|
||||
return `Failed to read ${libraryName} docs`
|
||||
case ClientToolCallState.aborted:
|
||||
return `Aborted reading ${libraryName} docs`
|
||||
case ClientToolCallState.rejected:
|
||||
return `Skipped reading ${libraryName} docs`
|
||||
}
|
||||
}
|
||||
return undefined
|
||||
},
|
||||
}
|
||||
|
||||
async execute(): Promise<void> {
|
||||
return
|
||||
}
|
||||
}
|
||||
@@ -1,19 +1,9 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { Globe, Loader2, MinusCircle, XCircle } from 'lucide-react'
|
||||
import {
|
||||
BaseClientTool,
|
||||
type BaseClientToolMetadata,
|
||||
ClientToolCallState,
|
||||
} from '@/lib/copilot/tools/client/base-tool'
|
||||
import { ExecuteResponseSuccessSchema } from '@/lib/copilot/tools/shared/schemas'
|
||||
|
||||
interface SearchOnlineArgs {
|
||||
query: string
|
||||
num?: number
|
||||
type?: string
|
||||
gl?: string
|
||||
hl?: string
|
||||
}
|
||||
|
||||
export class SearchOnlineClientTool extends BaseClientTool {
|
||||
static readonly id = 'search_online'
|
||||
@@ -32,6 +22,7 @@ export class SearchOnlineClientTool extends BaseClientTool {
|
||||
[ClientToolCallState.rejected]: { text: 'Skipped online search', icon: MinusCircle },
|
||||
[ClientToolCallState.aborted]: { text: 'Aborted online search', icon: XCircle },
|
||||
},
|
||||
interrupt: undefined,
|
||||
getDynamicText: (params, state) => {
|
||||
if (params?.query && typeof params.query === 'string') {
|
||||
const query = params.query
|
||||
@@ -56,28 +47,7 @@ export class SearchOnlineClientTool extends BaseClientTool {
|
||||
},
|
||||
}
|
||||
|
||||
async execute(args?: SearchOnlineArgs): Promise<void> {
|
||||
const logger = createLogger('SearchOnlineClientTool')
|
||||
try {
|
||||
this.setState(ClientToolCallState.executing)
|
||||
const res = await fetch('/api/copilot/execute-copilot-server-tool', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ toolName: 'search_online', payload: args || {} }),
|
||||
})
|
||||
if (!res.ok) {
|
||||
const txt = await res.text().catch(() => '')
|
||||
throw new Error(txt || `Server error (${res.status})`)
|
||||
}
|
||||
const json = await res.json()
|
||||
const parsed = ExecuteResponseSuccessSchema.parse(json)
|
||||
this.setState(ClientToolCallState.success)
|
||||
await this.markToolComplete(200, 'Online search complete', parsed.result)
|
||||
this.setState(ClientToolCallState.success)
|
||||
} catch (e: any) {
|
||||
logger.error('execute failed', { message: e?.message })
|
||||
this.setState(ClientToolCallState.error)
|
||||
await this.markToolComplete(500, e?.message || 'Search failed')
|
||||
}
|
||||
async execute(): Promise<void> {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
56
apps/sim/lib/copilot/tools/client/other/superagent.ts
Normal file
56
apps/sim/lib/copilot/tools/client/other/superagent.ts
Normal file
@@ -0,0 +1,56 @@
|
||||
import { Loader2, Sparkles, XCircle } from 'lucide-react'
|
||||
import {
|
||||
BaseClientTool,
|
||||
type BaseClientToolMetadata,
|
||||
ClientToolCallState,
|
||||
} from '@/lib/copilot/tools/client/base-tool'
|
||||
import { registerToolUIConfig } from '@/lib/copilot/tools/client/ui-config'
|
||||
|
||||
interface SuperagentArgs {
|
||||
instruction: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Superagent tool that spawns a powerful subagent for complex tasks.
|
||||
* This tool auto-executes and the actual work is done by the superagent.
|
||||
* The subagent's output is streamed as nested content under this tool call.
|
||||
*/
|
||||
export class SuperagentClientTool extends BaseClientTool {
|
||||
static readonly id = 'superagent'
|
||||
|
||||
constructor(toolCallId: string) {
|
||||
super(toolCallId, SuperagentClientTool.id, SuperagentClientTool.metadata)
|
||||
}
|
||||
|
||||
static readonly metadata: BaseClientToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Superagent working', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Superagent working', icon: Loader2 },
|
||||
[ClientToolCallState.executing]: { text: 'Superagent working', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Superagent completed', icon: Sparkles },
|
||||
[ClientToolCallState.error]: { text: 'Superagent failed', icon: XCircle },
|
||||
[ClientToolCallState.rejected]: { text: 'Superagent skipped', icon: XCircle },
|
||||
[ClientToolCallState.aborted]: { text: 'Superagent aborted', icon: XCircle },
|
||||
},
|
||||
uiConfig: {
|
||||
subagent: {
|
||||
streamingLabel: 'Superagent working',
|
||||
completedLabel: 'Superagent completed',
|
||||
shouldCollapse: true,
|
||||
outputArtifacts: [],
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute the superagent tool.
|
||||
* This just marks the tool as executing - the actual work is done server-side
|
||||
* by the superagent, and its output is streamed as subagent events.
|
||||
*/
|
||||
async execute(_args?: SuperagentArgs): Promise<void> {
|
||||
this.setState(ClientToolCallState.executing)
|
||||
}
|
||||
}
|
||||
|
||||
// Register UI config at module load
|
||||
registerToolUIConfig(SuperagentClientTool.id, SuperagentClientTool.metadata.uiConfig!)
|
||||
@@ -1,7 +1,7 @@
|
||||
/**
|
||||
* Environment utility functions for consistent environment detection across the application
|
||||
*/
|
||||
import { env, isFalsy, isTruthy } from './env'
|
||||
import { env, getEnv, isFalsy, isTruthy } from './env'
|
||||
|
||||
/**
|
||||
* Is the application running in production mode
|
||||
@@ -21,7 +21,9 @@ export const isTest = env.NODE_ENV === 'test'
|
||||
/**
|
||||
* Is this the hosted version of the application
|
||||
*/
|
||||
export const isHosted = true
|
||||
export const isHosted =
|
||||
getEnv('NEXT_PUBLIC_APP_URL') === 'https://www.sim.ai' ||
|
||||
getEnv('NEXT_PUBLIC_APP_URL') === 'https://www.staging.sim.ai'
|
||||
|
||||
/**
|
||||
* Is billing enforcement enabled
|
||||
|
||||
@@ -27,11 +27,13 @@ import {
|
||||
import { NavigateUIClientTool } from '@/lib/copilot/tools/client/navigation/navigate-ui'
|
||||
import { AuthClientTool } from '@/lib/copilot/tools/client/other/auth'
|
||||
import { CheckoffTodoClientTool } from '@/lib/copilot/tools/client/other/checkoff-todo'
|
||||
import { CrawlWebsiteClientTool } from '@/lib/copilot/tools/client/other/crawl-website'
|
||||
import { CustomToolClientTool } from '@/lib/copilot/tools/client/other/custom-tool'
|
||||
import { DebugClientTool } from '@/lib/copilot/tools/client/other/debug'
|
||||
import { DeployClientTool } from '@/lib/copilot/tools/client/other/deploy'
|
||||
import { EditClientTool } from '@/lib/copilot/tools/client/other/edit'
|
||||
import { EvaluateClientTool } from '@/lib/copilot/tools/client/other/evaluate'
|
||||
import { GetPageContentsClientTool } from '@/lib/copilot/tools/client/other/get-page-contents'
|
||||
import { InfoClientTool } from '@/lib/copilot/tools/client/other/info'
|
||||
import { KnowledgeClientTool } from '@/lib/copilot/tools/client/other/knowledge'
|
||||
import { MakeApiRequestClientTool } from '@/lib/copilot/tools/client/other/make-api-request'
|
||||
@@ -40,8 +42,10 @@ import { OAuthRequestAccessClientTool } from '@/lib/copilot/tools/client/other/o
|
||||
import { PlanClientTool } from '@/lib/copilot/tools/client/other/plan'
|
||||
import { RememberDebugClientTool } from '@/lib/copilot/tools/client/other/remember-debug'
|
||||
import { ResearchClientTool } from '@/lib/copilot/tools/client/other/research'
|
||||
import { ScrapePageClientTool } from '@/lib/copilot/tools/client/other/scrape-page'
|
||||
import { SearchDocumentationClientTool } from '@/lib/copilot/tools/client/other/search-documentation'
|
||||
import { SearchErrorsClientTool } from '@/lib/copilot/tools/client/other/search-errors'
|
||||
import { SearchLibraryDocsClientTool } from '@/lib/copilot/tools/client/other/search-library-docs'
|
||||
import { SearchOnlineClientTool } from '@/lib/copilot/tools/client/other/search-online'
|
||||
import { SearchPatternsClientTool } from '@/lib/copilot/tools/client/other/search-patterns'
|
||||
import { SleepClientTool } from '@/lib/copilot/tools/client/other/sleep'
|
||||
@@ -116,8 +120,12 @@ const CLIENT_TOOL_INSTANTIATORS: Record<string, (id: string) => any> = {
|
||||
get_trigger_blocks: (id) => new GetTriggerBlocksClientTool(id),
|
||||
search_online: (id) => new SearchOnlineClientTool(id),
|
||||
search_documentation: (id) => new SearchDocumentationClientTool(id),
|
||||
search_library_docs: (id) => new SearchLibraryDocsClientTool(id),
|
||||
search_patterns: (id) => new SearchPatternsClientTool(id),
|
||||
search_errors: (id) => new SearchErrorsClientTool(id),
|
||||
scrape_page: (id) => new ScrapePageClientTool(id),
|
||||
get_page_contents: (id) => new GetPageContentsClientTool(id),
|
||||
crawl_website: (id) => new CrawlWebsiteClientTool(id),
|
||||
remember_debug: (id) => new RememberDebugClientTool(id),
|
||||
set_environment_variables: (id) => new SetEnvironmentVariablesClientTool(id),
|
||||
get_credentials: (id) => new GetCredentialsClientTool(id),
|
||||
@@ -174,8 +182,12 @@ export const CLASS_TOOL_METADATA: Record<string, BaseClientToolMetadata | undefi
|
||||
get_trigger_blocks: (GetTriggerBlocksClientTool as any)?.metadata,
|
||||
search_online: (SearchOnlineClientTool as any)?.metadata,
|
||||
search_documentation: (SearchDocumentationClientTool as any)?.metadata,
|
||||
search_library_docs: (SearchLibraryDocsClientTool as any)?.metadata,
|
||||
search_patterns: (SearchPatternsClientTool as any)?.metadata,
|
||||
search_errors: (SearchErrorsClientTool as any)?.metadata,
|
||||
scrape_page: (ScrapePageClientTool as any)?.metadata,
|
||||
get_page_contents: (GetPageContentsClientTool as any)?.metadata,
|
||||
crawl_website: (CrawlWebsiteClientTool as any)?.metadata,
|
||||
remember_debug: (RememberDebugClientTool as any)?.metadata,
|
||||
set_environment_variables: (SetEnvironmentVariablesClientTool as any)?.metadata,
|
||||
get_credentials: (GetCredentialsClientTool as any)?.metadata,
|
||||
@@ -1851,7 +1863,7 @@ const subAgentSSEHandlers: Record<string, SSEHandler> = {
|
||||
|
||||
updateToolCallWithSubAgentData(context, get, set, parentToolCallId)
|
||||
|
||||
// Execute client tools (same logic as main tool_call handler)
|
||||
// Execute client tools in parallel (non-blocking) - same pattern as main tool_call handler
|
||||
try {
|
||||
const def = getTool(name)
|
||||
if (def) {
|
||||
@@ -1860,29 +1872,33 @@ const subAgentSSEHandlers: Record<string, SSEHandler> = {
|
||||
? !!def.hasInterrupt(args || {})
|
||||
: !!def.hasInterrupt
|
||||
if (!hasInterrupt) {
|
||||
// Auto-execute tools without interrupts
|
||||
// Auto-execute tools without interrupts - non-blocking
|
||||
const ctx = createExecutionContext({ toolCallId: id, toolName: name })
|
||||
try {
|
||||
await def.execute(ctx, args || {})
|
||||
} catch (execErr: any) {
|
||||
logger.error('[SubAgent] Tool execution failed', { id, name, error: execErr?.message })
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Fallback to class-based tools
|
||||
const instance = getClientTool(id)
|
||||
if (instance) {
|
||||
const hasInterruptDisplays = !!instance.getInterruptDisplays?.()
|
||||
if (!hasInterruptDisplays) {
|
||||
try {
|
||||
await instance.execute(args || {})
|
||||
} catch (execErr: any) {
|
||||
logger.error('[SubAgent] Class tool execution failed', {
|
||||
Promise.resolve()
|
||||
.then(() => def.execute(ctx, args || {}))
|
||||
.catch((execErr: any) => {
|
||||
logger.error('[SubAgent] Tool execution failed', {
|
||||
id,
|
||||
name,
|
||||
error: execErr?.message,
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
} else {
|
||||
// Fallback to class-based tools - non-blocking
|
||||
const instance = getClientTool(id)
|
||||
if (instance) {
|
||||
const hasInterruptDisplays = !!instance.getInterruptDisplays?.()
|
||||
if (!hasInterruptDisplays) {
|
||||
Promise.resolve()
|
||||
.then(() => instance.execute(args || {}))
|
||||
.catch((execErr: any) => {
|
||||
logger.error('[SubAgent] Class tool execution failed', {
|
||||
id,
|
||||
name,
|
||||
error: execErr?.message,
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -2433,9 +2449,10 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
|
||||
// If already sending a message, queue this one instead
|
||||
if (isSendingMessage) {
|
||||
get().addToQueue(message, { fileAttachments, contexts })
|
||||
get().addToQueue(message, { fileAttachments, contexts, messageId })
|
||||
logger.info('[Copilot] Message queued (already sending)', {
|
||||
queueLength: get().messageQueue.length + 1,
|
||||
originalMessageId: messageId,
|
||||
})
|
||||
return
|
||||
}
|
||||
@@ -2511,6 +2528,13 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
// Call copilot API
|
||||
const apiMode: 'ask' | 'agent' | 'plan' =
|
||||
mode === 'ask' ? 'ask' : mode === 'plan' ? 'plan' : 'agent'
|
||||
|
||||
// Extract slash commands from contexts (lowercase) and filter them out from contexts
|
||||
const commands = contexts
|
||||
?.filter((c) => c.kind === 'slash_command' && 'command' in c)
|
||||
.map((c) => (c as any).command.toLowerCase()) as string[] | undefined
|
||||
const filteredContexts = contexts?.filter((c) => c.kind !== 'slash_command')
|
||||
|
||||
const result = await sendStreamingMessage({
|
||||
message: messageToSend,
|
||||
userMessageId: userMessage.id,
|
||||
@@ -2522,7 +2546,8 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
createNewChat: !currentChat,
|
||||
stream,
|
||||
fileAttachments,
|
||||
contexts,
|
||||
contexts: filteredContexts,
|
||||
commands: commands?.length ? commands : undefined,
|
||||
abortSignal: abortController.signal,
|
||||
})
|
||||
|
||||
@@ -3161,8 +3186,12 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
// Process next message in queue if any
|
||||
const nextInQueue = get().messageQueue[0]
|
||||
if (nextInQueue) {
|
||||
// Use originalMessageId if available (from edit/resend), otherwise use queue entry id
|
||||
const messageIdToUse = nextInQueue.originalMessageId || nextInQueue.id
|
||||
logger.info('[Queue] Processing next queued message', {
|
||||
id: nextInQueue.id,
|
||||
originalMessageId: nextInQueue.originalMessageId,
|
||||
messageIdToUse,
|
||||
queueLength: get().messageQueue.length,
|
||||
})
|
||||
// Remove from queue and send
|
||||
@@ -3173,7 +3202,7 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
stream: true,
|
||||
fileAttachments: nextInQueue.fileAttachments,
|
||||
contexts: nextInQueue.contexts,
|
||||
messageId: nextInQueue.id,
|
||||
messageId: messageIdToUse,
|
||||
})
|
||||
}, 100)
|
||||
}
|
||||
@@ -3615,10 +3644,12 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
fileAttachments: options?.fileAttachments,
|
||||
contexts: options?.contexts,
|
||||
queuedAt: Date.now(),
|
||||
originalMessageId: options?.messageId,
|
||||
}
|
||||
set({ messageQueue: [...get().messageQueue, queuedMessage] })
|
||||
logger.info('[Queue] Message added to queue', {
|
||||
id: queuedMessage.id,
|
||||
originalMessageId: options?.messageId,
|
||||
queueLength: get().messageQueue.length,
|
||||
})
|
||||
},
|
||||
@@ -3659,12 +3690,15 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
await new Promise((resolve) => setTimeout(resolve, 50))
|
||||
}
|
||||
|
||||
// Use originalMessageId if available (from edit/resend), otherwise use queue entry id
|
||||
const messageIdToUse = message.originalMessageId || message.id
|
||||
|
||||
// Send the message
|
||||
await get().sendMessage(message.content, {
|
||||
stream: true,
|
||||
fileAttachments: message.fileAttachments,
|
||||
contexts: message.contexts,
|
||||
messageId: message.id,
|
||||
messageId: messageIdToUse,
|
||||
})
|
||||
},
|
||||
|
||||
|
||||
@@ -70,6 +70,8 @@ export interface QueuedMessage {
|
||||
fileAttachments?: MessageFileAttachment[]
|
||||
contexts?: ChatContext[]
|
||||
queuedAt: number
|
||||
/** Original messageId to use when processing (for edit/resend flows) */
|
||||
originalMessageId?: string
|
||||
}
|
||||
|
||||
// Contexts attached to a user message
|
||||
@@ -83,6 +85,7 @@ export type ChatContext =
|
||||
| { kind: 'knowledge'; knowledgeId?: string; label: string }
|
||||
| { kind: 'templates'; templateId?: string; label: string }
|
||||
| { kind: 'docs'; label: string }
|
||||
| { kind: 'slash_command'; command: string; label: string }
|
||||
|
||||
import type { CopilotChat as ApiCopilotChat } from '@/lib/copilot/api'
|
||||
|
||||
@@ -249,6 +252,8 @@ export interface CopilotActions {
|
||||
options?: {
|
||||
fileAttachments?: MessageFileAttachment[]
|
||||
contexts?: ChatContext[]
|
||||
/** Original messageId to preserve (for edit/resend flows) */
|
||||
messageId?: string
|
||||
}
|
||||
) => void
|
||||
removeFromQueue: (id: string) => void
|
||||
|
||||
@@ -97,11 +97,18 @@ export const useWorkflowRegistry = create<WorkflowRegistry>()(
|
||||
return acc
|
||||
}, {})
|
||||
|
||||
set((state) => ({
|
||||
workflows: mapped,
|
||||
error: null,
|
||||
hydration:
|
||||
state.hydration.phase === 'state-loading'
|
||||
set((state) => {
|
||||
// Preserve hydration if workflow is loading or already ready and still exists
|
||||
const shouldPreserveHydration =
|
||||
state.hydration.phase === 'state-loading' ||
|
||||
(state.hydration.phase === 'ready' &&
|
||||
state.hydration.workflowId &&
|
||||
mapped[state.hydration.workflowId])
|
||||
|
||||
return {
|
||||
workflows: mapped,
|
||||
error: null,
|
||||
hydration: shouldPreserveHydration
|
||||
? state.hydration
|
||||
: {
|
||||
phase: 'metadata-ready',
|
||||
@@ -110,7 +117,8 @@ export const useWorkflowRegistry = create<WorkflowRegistry>()(
|
||||
requestId: null,
|
||||
error: null,
|
||||
},
|
||||
}))
|
||||
}
|
||||
})
|
||||
},
|
||||
|
||||
failMetadataLoad: (workspaceId: string | null, errorMessage: string) => {
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { deleteTool } from './delete'
|
||||
import { getTool } from './get'
|
||||
import { introspectTool } from './introspect'
|
||||
import { putTool } from './put'
|
||||
import { queryTool } from './query'
|
||||
import { scanTool } from './scan'
|
||||
@@ -7,6 +8,7 @@ import { updateTool } from './update'
|
||||
|
||||
export const dynamodbDeleteTool = deleteTool
|
||||
export const dynamodbGetTool = getTool
|
||||
export const dynamodbIntrospectTool = introspectTool
|
||||
export const dynamodbPutTool = putTool
|
||||
export const dynamodbQueryTool = queryTool
|
||||
export const dynamodbScanTool = scanTool
|
||||
|
||||
78
apps/sim/tools/dynamodb/introspect.ts
Normal file
78
apps/sim/tools/dynamodb/introspect.ts
Normal file
@@ -0,0 +1,78 @@
|
||||
import type { DynamoDBIntrospectParams, DynamoDBIntrospectResponse } from '@/tools/dynamodb/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export const introspectTool: ToolConfig<DynamoDBIntrospectParams, DynamoDBIntrospectResponse> = {
|
||||
id: 'dynamodb_introspect',
|
||||
name: 'DynamoDB Introspect',
|
||||
description:
|
||||
'Introspect DynamoDB to list tables or get detailed schema information for a specific table',
|
||||
version: '1.0',
|
||||
|
||||
params: {
|
||||
region: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'AWS region (e.g., us-east-1)',
|
||||
},
|
||||
accessKeyId: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'AWS access key ID',
|
||||
},
|
||||
secretAccessKey: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'AWS secret access key',
|
||||
},
|
||||
tableName: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Optional table name to get detailed schema. If not provided, lists all tables.',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: '/api/tools/dynamodb/introspect',
|
||||
method: 'POST',
|
||||
headers: () => ({
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
body: (params) => ({
|
||||
region: params.region,
|
||||
accessKeyId: params.accessKeyId,
|
||||
secretAccessKey: params.secretAccessKey,
|
||||
...(params.tableName && { tableName: params.tableName }),
|
||||
}),
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
const data = await response.json()
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(data.error || 'DynamoDB introspection failed')
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
message: data.message || 'Introspection completed successfully',
|
||||
tables: data.tables || [],
|
||||
tableDetails: data.tableDetails,
|
||||
},
|
||||
error: undefined,
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
message: { type: 'string', description: 'Operation status message' },
|
||||
tables: { type: 'array', description: 'List of table names in the region' },
|
||||
tableDetails: {
|
||||
type: 'object',
|
||||
description: 'Detailed schema information for a specific table',
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -68,3 +68,45 @@ export interface DynamoDBScanResponse extends DynamoDBBaseResponse {}
|
||||
export interface DynamoDBUpdateResponse extends DynamoDBBaseResponse {}
|
||||
export interface DynamoDBDeleteResponse extends DynamoDBBaseResponse {}
|
||||
export interface DynamoDBResponse extends DynamoDBBaseResponse {}
|
||||
|
||||
export interface DynamoDBIntrospectParams extends DynamoDBConnectionConfig {
|
||||
tableName?: string
|
||||
}
|
||||
|
||||
export interface DynamoDBKeySchema {
|
||||
attributeName: string
|
||||
keyType: 'HASH' | 'RANGE'
|
||||
}
|
||||
|
||||
export interface DynamoDBAttributeDefinition {
|
||||
attributeName: string
|
||||
attributeType: 'S' | 'N' | 'B'
|
||||
}
|
||||
|
||||
export interface DynamoDBGSI {
|
||||
indexName: string
|
||||
keySchema: DynamoDBKeySchema[]
|
||||
projectionType: string
|
||||
indexStatus: string
|
||||
}
|
||||
|
||||
export interface DynamoDBTableSchema {
|
||||
tableName: string
|
||||
tableStatus: string
|
||||
keySchema: DynamoDBKeySchema[]
|
||||
attributeDefinitions: DynamoDBAttributeDefinition[]
|
||||
globalSecondaryIndexes: DynamoDBGSI[]
|
||||
localSecondaryIndexes: DynamoDBGSI[]
|
||||
itemCount: number
|
||||
tableSizeBytes: number
|
||||
billingMode: string
|
||||
}
|
||||
|
||||
export interface DynamoDBIntrospectResponse extends ToolResponse {
|
||||
output: {
|
||||
message: string
|
||||
tables: string[]
|
||||
tableDetails?: DynamoDBTableSchema
|
||||
}
|
||||
error?: string
|
||||
}
|
||||
|
||||
@@ -9,6 +9,7 @@ import { deleteIndexTool } from '@/tools/elasticsearch/delete_index'
|
||||
import { getDocumentTool } from '@/tools/elasticsearch/get_document'
|
||||
import { getIndexTool } from '@/tools/elasticsearch/get_index'
|
||||
import { indexDocumentTool } from '@/tools/elasticsearch/index_document'
|
||||
import { listIndicesTool } from '@/tools/elasticsearch/list_indices'
|
||||
import { searchTool } from '@/tools/elasticsearch/search'
|
||||
import { updateDocumentTool } from '@/tools/elasticsearch/update_document'
|
||||
|
||||
@@ -23,5 +24,6 @@ export const elasticsearchCountTool = countTool
|
||||
export const elasticsearchCreateIndexTool = createIndexTool
|
||||
export const elasticsearchDeleteIndexTool = deleteIndexTool
|
||||
export const elasticsearchGetIndexTool = getIndexTool
|
||||
export const elasticsearchListIndicesTool = listIndicesTool
|
||||
export const elasticsearchClusterHealthTool = clusterHealthTool
|
||||
export const elasticsearchClusterStatsTool = clusterStatsTool
|
||||
|
||||
171
apps/sim/tools/elasticsearch/list_indices.ts
Normal file
171
apps/sim/tools/elasticsearch/list_indices.ts
Normal file
@@ -0,0 +1,171 @@
|
||||
import type {
|
||||
ElasticsearchListIndicesParams,
|
||||
ElasticsearchListIndicesResponse,
|
||||
} from '@/tools/elasticsearch/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
/**
|
||||
* Builds the base URL for Elasticsearch connections.
|
||||
* Supports both self-hosted and Elastic Cloud deployments.
|
||||
*/
|
||||
function buildBaseUrl(params: ElasticsearchListIndicesParams): string {
|
||||
if (params.deploymentType === 'cloud' && params.cloudId) {
|
||||
const parts = params.cloudId.split(':')
|
||||
if (parts.length >= 2) {
|
||||
try {
|
||||
const decoded = Buffer.from(parts[1], 'base64').toString('utf-8')
|
||||
const [esHost] = decoded.split('$')
|
||||
if (esHost) {
|
||||
return `https://${parts[0]}.${esHost}`
|
||||
}
|
||||
} catch {
|
||||
// Fallback
|
||||
}
|
||||
}
|
||||
throw new Error('Invalid Cloud ID format')
|
||||
}
|
||||
|
||||
if (!params.host) {
|
||||
throw new Error('Host is required for self-hosted deployments')
|
||||
}
|
||||
|
||||
return params.host.replace(/\/$/, '')
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds authentication headers for Elasticsearch requests.
|
||||
* Supports API key and basic authentication methods.
|
||||
*/
|
||||
function buildAuthHeaders(params: ElasticsearchListIndicesParams): Record<string, string> {
|
||||
const headers: Record<string, string> = {
|
||||
'Content-Type': 'application/json',
|
||||
}
|
||||
|
||||
if (params.authMethod === 'api_key' && params.apiKey) {
|
||||
headers.Authorization = `ApiKey ${params.apiKey}`
|
||||
} else if (params.authMethod === 'basic_auth' && params.username && params.password) {
|
||||
const credentials = Buffer.from(`${params.username}:${params.password}`).toString('base64')
|
||||
headers.Authorization = `Basic ${credentials}`
|
||||
} else {
|
||||
throw new Error('Invalid authentication configuration')
|
||||
}
|
||||
|
||||
return headers
|
||||
}
|
||||
|
||||
export const listIndicesTool: ToolConfig<
|
||||
ElasticsearchListIndicesParams,
|
||||
ElasticsearchListIndicesResponse
|
||||
> = {
|
||||
id: 'elasticsearch_list_indices',
|
||||
name: 'Elasticsearch List Indices',
|
||||
description:
|
||||
'List all indices in the Elasticsearch cluster with their health, status, and statistics.',
|
||||
version: '1.0.0',
|
||||
|
||||
params: {
|
||||
deploymentType: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
description: 'Deployment type: self_hosted or cloud',
|
||||
},
|
||||
host: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
description: 'Elasticsearch host URL (for self-hosted)',
|
||||
},
|
||||
cloudId: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
description: 'Elastic Cloud ID (for cloud deployments)',
|
||||
},
|
||||
authMethod: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
description: 'Authentication method: api_key or basic_auth',
|
||||
},
|
||||
apiKey: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Elasticsearch API key',
|
||||
},
|
||||
username: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
description: 'Username for basic auth',
|
||||
},
|
||||
password: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Password for basic auth',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: (params) => {
|
||||
const baseUrl = buildBaseUrl(params)
|
||||
return `${baseUrl}/_cat/indices?format=json`
|
||||
},
|
||||
method: 'GET',
|
||||
headers: (params) => buildAuthHeaders(params),
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
let errorMessage = `Elasticsearch error: ${response.status}`
|
||||
try {
|
||||
const errorJson = JSON.parse(errorText)
|
||||
errorMessage = errorJson.error?.reason || errorJson.error?.type || errorMessage
|
||||
} catch {
|
||||
errorMessage = errorText || errorMessage
|
||||
}
|
||||
return {
|
||||
success: false,
|
||||
output: {
|
||||
message: errorMessage,
|
||||
indices: [],
|
||||
},
|
||||
error: errorMessage,
|
||||
}
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
const indices = data
|
||||
.filter((item: Record<string, unknown>) => {
|
||||
const indexName = item.index as string
|
||||
return !indexName.startsWith('.')
|
||||
})
|
||||
.map((item: Record<string, unknown>) => ({
|
||||
index: item.index as string,
|
||||
health: item.health as string,
|
||||
status: item.status as string,
|
||||
docsCount: Number.parseInt(item['docs.count'] as string, 10) || 0,
|
||||
storeSize: (item['store.size'] as string) || '0b',
|
||||
primaryShards: Number.parseInt(item.pri as string, 10) || 0,
|
||||
replicaShards: Number.parseInt(item.rep as string, 10) || 0,
|
||||
}))
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
message: `Found ${indices.length} indices`,
|
||||
indices,
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
message: {
|
||||
type: 'string',
|
||||
description: 'Summary message about the indices',
|
||||
},
|
||||
indices: {
|
||||
type: 'json',
|
||||
description: 'Array of index information objects',
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -110,6 +110,18 @@ export interface ElasticsearchClusterHealthParams extends ElasticsearchBaseParam
|
||||
|
||||
export interface ElasticsearchClusterStatsParams extends ElasticsearchBaseParams {}
|
||||
|
||||
export interface ElasticsearchListIndicesParams extends ElasticsearchBaseParams {}
|
||||
|
||||
export interface ElasticsearchIndexInfo {
|
||||
index: string
|
||||
health: string
|
||||
status: string
|
||||
docsCount: number
|
||||
storeSize: string
|
||||
primaryShards: number
|
||||
replicaShards: number
|
||||
}
|
||||
|
||||
// Response types
|
||||
export interface ElasticsearchDocumentResponse extends ToolResponse {
|
||||
output: {
|
||||
@@ -262,6 +274,14 @@ export interface ElasticsearchIndexStatsResponse extends ToolResponse {
|
||||
}
|
||||
}
|
||||
|
||||
export interface ElasticsearchListIndicesResponse extends ToolResponse {
|
||||
output: {
|
||||
message: string
|
||||
indices: ElasticsearchIndexInfo[]
|
||||
}
|
||||
error?: string
|
||||
}
|
||||
|
||||
// Union type for all Elasticsearch responses
|
||||
export type ElasticsearchResponse =
|
||||
| ElasticsearchDocumentResponse
|
||||
@@ -276,3 +296,4 @@ export type ElasticsearchResponse =
|
||||
| ElasticsearchClusterStatsResponse
|
||||
| ElasticsearchRefreshResponse
|
||||
| ElasticsearchIndexStatsResponse
|
||||
| ElasticsearchListIndicesResponse
|
||||
|
||||
@@ -1,12 +1,14 @@
|
||||
import { deleteTool } from './delete'
|
||||
import { executeTool } from './execute'
|
||||
import { insertTool } from './insert'
|
||||
import { introspectTool } from './introspect'
|
||||
import { queryTool } from './query'
|
||||
import { updateTool } from './update'
|
||||
|
||||
export const mongodbDeleteTool = deleteTool
|
||||
export const mongodbExecuteTool = executeTool
|
||||
export const mongodbInsertTool = insertTool
|
||||
export const mongodbIntrospectTool = introspectTool
|
||||
export const mongodbQueryTool = queryTool
|
||||
export const mongodbUpdateTool = updateTool
|
||||
|
||||
|
||||
98
apps/sim/tools/mongodb/introspect.ts
Normal file
98
apps/sim/tools/mongodb/introspect.ts
Normal file
@@ -0,0 +1,98 @@
|
||||
import type { MongoDBIntrospectParams, MongoDBIntrospectResponse } from '@/tools/mongodb/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export const introspectTool: ToolConfig<MongoDBIntrospectParams, MongoDBIntrospectResponse> = {
|
||||
id: 'mongodb_introspect',
|
||||
name: 'MongoDB Introspect',
|
||||
description: 'Introspect MongoDB database to list databases, collections, and indexes',
|
||||
version: '1.0',
|
||||
|
||||
params: {
|
||||
host: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'MongoDB server hostname or IP address',
|
||||
},
|
||||
port: {
|
||||
type: 'number',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'MongoDB server port (default: 27017)',
|
||||
},
|
||||
database: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Database name to introspect (optional - if not provided, lists all databases)',
|
||||
},
|
||||
username: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'MongoDB username',
|
||||
},
|
||||
password: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'MongoDB password',
|
||||
},
|
||||
authSource: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Authentication database',
|
||||
},
|
||||
ssl: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'SSL connection mode (disabled, required, preferred)',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: '/api/tools/mongodb/introspect',
|
||||
method: 'POST',
|
||||
headers: () => ({
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
body: (params) => ({
|
||||
host: params.host,
|
||||
port: Number(params.port),
|
||||
database: params.database,
|
||||
username: params.username,
|
||||
password: params.password,
|
||||
authSource: params.authSource,
|
||||
ssl: params.ssl || 'preferred',
|
||||
}),
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
const data = await response.json()
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(data.error || 'MongoDB introspect failed')
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
message: data.message || 'Introspection completed successfully',
|
||||
databases: data.databases || [],
|
||||
collections: data.collections || [],
|
||||
},
|
||||
error: undefined,
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
message: { type: 'string', description: 'Operation status message' },
|
||||
databases: { type: 'array', description: 'Array of database names' },
|
||||
collections: {
|
||||
type: 'array',
|
||||
description: 'Array of collection info with name, type, document count, and indexes',
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -41,6 +41,28 @@ export interface MongoDBExecuteParams extends MongoDBConnectionConfig {
|
||||
pipeline: string
|
||||
}
|
||||
|
||||
export interface MongoDBIntrospectParams {
|
||||
host: string
|
||||
port: number
|
||||
database?: string
|
||||
username?: string
|
||||
password?: string
|
||||
authSource?: string
|
||||
ssl?: 'disabled' | 'required' | 'preferred'
|
||||
}
|
||||
|
||||
export interface MongoDBCollectionInfo {
|
||||
name: string
|
||||
type: string
|
||||
documentCount: number
|
||||
indexes: Array<{
|
||||
name: string
|
||||
key: Record<string, number>
|
||||
unique: boolean
|
||||
sparse?: boolean
|
||||
}>
|
||||
}
|
||||
|
||||
export interface MongoDBBaseResponse extends ToolResponse {
|
||||
output: {
|
||||
message: string
|
||||
@@ -61,3 +83,12 @@ export interface MongoDBUpdateResponse extends MongoDBBaseResponse {}
|
||||
export interface MongoDBDeleteResponse extends MongoDBBaseResponse {}
|
||||
export interface MongoDBExecuteResponse extends MongoDBBaseResponse {}
|
||||
export interface MongoDBResponse extends MongoDBBaseResponse {}
|
||||
|
||||
export interface MongoDBIntrospectResponse extends ToolResponse {
|
||||
output: {
|
||||
message: string
|
||||
databases: string[]
|
||||
collections: MongoDBCollectionInfo[]
|
||||
}
|
||||
error?: string
|
||||
}
|
||||
|
||||
@@ -1,12 +1,14 @@
|
||||
import { deleteTool } from './delete'
|
||||
import { executeTool } from './execute'
|
||||
import { insertTool } from './insert'
|
||||
import { introspectTool } from './introspect'
|
||||
import { queryTool } from './query'
|
||||
import { updateTool } from './update'
|
||||
|
||||
export const mysqlDeleteTool = deleteTool
|
||||
export const mysqlExecuteTool = executeTool
|
||||
export const mysqlInsertTool = insertTool
|
||||
export const mysqlIntrospectTool = introspectTool
|
||||
export const mysqlQueryTool = queryTool
|
||||
export const mysqlUpdateTool = updateTool
|
||||
|
||||
|
||||
92
apps/sim/tools/mysql/introspect.ts
Normal file
92
apps/sim/tools/mysql/introspect.ts
Normal file
@@ -0,0 +1,92 @@
|
||||
import type { MySQLIntrospectParams, MySQLIntrospectResponse } from '@/tools/mysql/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export const introspectTool: ToolConfig<MySQLIntrospectParams, MySQLIntrospectResponse> = {
|
||||
id: 'mysql_introspect',
|
||||
name: 'MySQL Introspect',
|
||||
description:
|
||||
'Introspect MySQL database schema to retrieve table structures, columns, and relationships',
|
||||
version: '1.0',
|
||||
|
||||
params: {
|
||||
host: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'MySQL server hostname or IP address',
|
||||
},
|
||||
port: {
|
||||
type: 'number',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'MySQL server port (default: 3306)',
|
||||
},
|
||||
database: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Database name to connect to',
|
||||
},
|
||||
username: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Database username',
|
||||
},
|
||||
password: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Database password',
|
||||
},
|
||||
ssl: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'SSL connection mode (disabled, required, preferred)',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: '/api/tools/mysql/introspect',
|
||||
method: 'POST',
|
||||
headers: () => ({
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
body: (params) => ({
|
||||
host: params.host,
|
||||
port: Number(params.port),
|
||||
database: params.database,
|
||||
username: params.username,
|
||||
password: params.password,
|
||||
ssl: params.ssl || 'required',
|
||||
}),
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
const data = await response.json()
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(data.error || 'MySQL introspection failed')
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
message: data.message || 'Schema introspection completed successfully',
|
||||
tables: data.tables || [],
|
||||
databases: data.databases || [],
|
||||
},
|
||||
error: undefined,
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
message: { type: 'string', description: 'Operation status message' },
|
||||
tables: {
|
||||
type: 'array',
|
||||
description: 'Array of table schemas with columns, keys, and indexes',
|
||||
},
|
||||
databases: { type: 'array', description: 'List of available databases on the server' },
|
||||
},
|
||||
}
|
||||
@@ -48,3 +48,30 @@ export interface MySQLUpdateResponse extends MySQLBaseResponse {}
|
||||
export interface MySQLDeleteResponse extends MySQLBaseResponse {}
|
||||
export interface MySQLExecuteResponse extends MySQLBaseResponse {}
|
||||
export interface MySQLResponse extends MySQLBaseResponse {}
|
||||
|
||||
export interface MySQLIntrospectParams extends MySQLConnectionConfig {}
|
||||
|
||||
export interface MySQLTableColumn {
|
||||
name: string
|
||||
type: string
|
||||
nullable: boolean
|
||||
default: string | null
|
||||
isPrimaryKey: boolean
|
||||
isForeignKey: boolean
|
||||
autoIncrement: boolean
|
||||
references?: { table: string; column: string }
|
||||
}
|
||||
|
||||
export interface MySQLTableSchema {
|
||||
name: string
|
||||
database: string
|
||||
columns: MySQLTableColumn[]
|
||||
primaryKey: string[]
|
||||
foreignKeys: Array<{ column: string; referencesTable: string; referencesColumn: string }>
|
||||
indexes: Array<{ name: string; columns: string[]; unique: boolean }>
|
||||
}
|
||||
|
||||
export interface MySQLIntrospectResponse extends ToolResponse {
|
||||
output: { message: string; tables: MySQLTableSchema[]; databases: string[] }
|
||||
error?: string
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { createTool } from './create'
|
||||
import { deleteTool } from './delete'
|
||||
import { executeTool } from './execute'
|
||||
import { introspectTool } from './introspect'
|
||||
import { mergeTool } from './merge'
|
||||
import { queryTool } from './query'
|
||||
import { updateTool } from './update'
|
||||
@@ -8,6 +9,7 @@ import { updateTool } from './update'
|
||||
export const neo4jCreateTool = createTool
|
||||
export const neo4jDeleteTool = deleteTool
|
||||
export const neo4jExecuteTool = executeTool
|
||||
export const neo4jIntrospectTool = introspectTool
|
||||
export const neo4jMergeTool = mergeTool
|
||||
export const neo4jQueryTool = queryTool
|
||||
export const neo4jUpdateTool = updateTool
|
||||
|
||||
103
apps/sim/tools/neo4j/introspect.ts
Normal file
103
apps/sim/tools/neo4j/introspect.ts
Normal file
@@ -0,0 +1,103 @@
|
||||
import type { Neo4jIntrospectParams, Neo4jIntrospectResponse } from '@/tools/neo4j/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export const introspectTool: ToolConfig<Neo4jIntrospectParams, Neo4jIntrospectResponse> = {
|
||||
id: 'neo4j_introspect',
|
||||
name: 'Neo4j Introspect',
|
||||
description:
|
||||
'Introspect a Neo4j database to discover its schema including node labels, relationship types, properties, constraints, and indexes.',
|
||||
version: '1.0',
|
||||
|
||||
params: {
|
||||
host: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Neo4j server hostname or IP address',
|
||||
},
|
||||
port: {
|
||||
type: 'number',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Neo4j server port (default: 7687 for Bolt protocol)',
|
||||
},
|
||||
database: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Database name to connect to',
|
||||
},
|
||||
username: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Neo4j username',
|
||||
},
|
||||
password: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Neo4j password',
|
||||
},
|
||||
encryption: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Connection encryption mode (enabled, disabled)',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: '/api/tools/neo4j/introspect',
|
||||
method: 'POST',
|
||||
headers: () => ({
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
body: (params) => ({
|
||||
host: params.host,
|
||||
port: Number(params.port),
|
||||
database: params.database,
|
||||
username: params.username,
|
||||
password: params.password,
|
||||
encryption: params.encryption || 'disabled',
|
||||
}),
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
const data = await response.json()
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(data.error || 'Neo4j introspection failed')
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
message: data.message || 'Introspection completed successfully',
|
||||
labels: data.labels || [],
|
||||
relationshipTypes: data.relationshipTypes || [],
|
||||
nodeSchemas: data.nodeSchemas || [],
|
||||
relationshipSchemas: data.relationshipSchemas || [],
|
||||
constraints: data.constraints || [],
|
||||
indexes: data.indexes || [],
|
||||
},
|
||||
error: undefined,
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
message: { type: 'string', description: 'Operation status message' },
|
||||
labels: { type: 'array', description: 'Array of node labels in the database' },
|
||||
relationshipTypes: {
|
||||
type: 'array',
|
||||
description: 'Array of relationship types in the database',
|
||||
},
|
||||
nodeSchemas: { type: 'array', description: 'Array of node schemas with their properties' },
|
||||
relationshipSchemas: {
|
||||
type: 'array',
|
||||
description: 'Array of relationship schemas with their properties',
|
||||
},
|
||||
constraints: { type: 'array', description: 'Array of database constraints' },
|
||||
indexes: { type: 'array', description: 'Array of database indexes' },
|
||||
},
|
||||
}
|
||||
@@ -73,3 +73,28 @@ export interface Neo4jUpdateResponse extends Neo4jBaseResponse {}
|
||||
export interface Neo4jDeleteResponse extends Neo4jBaseResponse {}
|
||||
export interface Neo4jExecuteResponse extends Neo4jBaseResponse {}
|
||||
export interface Neo4jResponse extends Neo4jBaseResponse {}
|
||||
|
||||
export interface Neo4jIntrospectParams extends Neo4jConnectionConfig {}
|
||||
|
||||
export interface Neo4jNodeSchema {
|
||||
label: string
|
||||
properties: Array<{ name: string; types: string[] }>
|
||||
}
|
||||
|
||||
export interface Neo4jRelationshipSchema {
|
||||
type: string
|
||||
properties: Array<{ name: string; types: string[] }>
|
||||
}
|
||||
|
||||
export interface Neo4jIntrospectResponse extends ToolResponse {
|
||||
output: {
|
||||
message: string
|
||||
labels: string[]
|
||||
relationshipTypes: string[]
|
||||
nodeSchemas: Neo4jNodeSchema[]
|
||||
relationshipSchemas: Neo4jRelationshipSchema[]
|
||||
constraints: Array<{ name: string; type: string; entityType: string; properties: string[] }>
|
||||
indexes: Array<{ name: string; type: string; entityType: string; properties: string[] }>
|
||||
}
|
||||
error?: string
|
||||
}
|
||||
|
||||
@@ -163,7 +163,7 @@ export function getToolParametersConfig(
|
||||
id: 'workflowId',
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
visibility: 'user-only',
|
||||
description: 'The ID of the workflow to execute',
|
||||
uiComponent: {
|
||||
type: 'workflow-selector',
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
import { deleteTool } from './delete'
|
||||
import { executeTool } from './execute'
|
||||
import { insertTool } from './insert'
|
||||
import { introspectTool } from './introspect'
|
||||
import { queryTool } from './query'
|
||||
import { updateTool } from './update'
|
||||
|
||||
export const postgresDeleteTool = deleteTool
|
||||
export const postgresExecuteTool = executeTool
|
||||
export const postgresInsertTool = insertTool
|
||||
export const postgresIntrospectTool = introspectTool
|
||||
export const postgresQueryTool = queryTool
|
||||
export const postgresUpdateTool = updateTool
|
||||
|
||||
99
apps/sim/tools/postgresql/introspect.ts
Normal file
99
apps/sim/tools/postgresql/introspect.ts
Normal file
@@ -0,0 +1,99 @@
|
||||
import type { PostgresIntrospectParams, PostgresIntrospectResponse } from '@/tools/postgresql/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export const introspectTool: ToolConfig<PostgresIntrospectParams, PostgresIntrospectResponse> = {
|
||||
id: 'postgresql_introspect',
|
||||
name: 'PostgreSQL Introspect',
|
||||
description:
|
||||
'Introspect PostgreSQL database schema to retrieve table structures, columns, and relationships',
|
||||
version: '1.0',
|
||||
|
||||
params: {
|
||||
host: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'PostgreSQL server hostname or IP address',
|
||||
},
|
||||
port: {
|
||||
type: 'number',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'PostgreSQL server port (default: 5432)',
|
||||
},
|
||||
database: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Database name to connect to',
|
||||
},
|
||||
username: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Database username',
|
||||
},
|
||||
password: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Database password',
|
||||
},
|
||||
ssl: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'SSL connection mode (disabled, required, preferred)',
|
||||
},
|
||||
schema: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Schema to introspect (default: public)',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: '/api/tools/postgresql/introspect',
|
||||
method: 'POST',
|
||||
headers: () => ({
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
body: (params) => ({
|
||||
host: params.host,
|
||||
port: Number(params.port),
|
||||
database: params.database,
|
||||
username: params.username,
|
||||
password: params.password,
|
||||
ssl: params.ssl || 'required',
|
||||
schema: params.schema || 'public',
|
||||
}),
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
const data = await response.json()
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(data.error || 'PostgreSQL introspection failed')
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
message: data.message || 'Schema introspection completed successfully',
|
||||
tables: data.tables || [],
|
||||
schemas: data.schemas || [],
|
||||
},
|
||||
error: undefined,
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
message: { type: 'string', description: 'Operation status message' },
|
||||
tables: {
|
||||
type: 'array',
|
||||
description: 'Array of table schemas with columns, keys, and indexes',
|
||||
},
|
||||
schemas: { type: 'array', description: 'List of available schemas in the database' },
|
||||
},
|
||||
}
|
||||
@@ -33,6 +33,10 @@ export interface PostgresExecuteParams extends PostgresConnectionConfig {
|
||||
query: string
|
||||
}
|
||||
|
||||
export interface PostgresIntrospectParams extends PostgresConnectionConfig {
|
||||
schema?: string
|
||||
}
|
||||
|
||||
export interface PostgresBaseResponse extends ToolResponse {
|
||||
output: {
|
||||
message: string
|
||||
@@ -47,4 +51,44 @@ export interface PostgresInsertResponse extends PostgresBaseResponse {}
|
||||
export interface PostgresUpdateResponse extends PostgresBaseResponse {}
|
||||
export interface PostgresDeleteResponse extends PostgresBaseResponse {}
|
||||
export interface PostgresExecuteResponse extends PostgresBaseResponse {}
|
||||
|
||||
export interface TableColumn {
|
||||
name: string
|
||||
type: string
|
||||
nullable: boolean
|
||||
default: string | null
|
||||
isPrimaryKey: boolean
|
||||
isForeignKey: boolean
|
||||
references?: {
|
||||
table: string
|
||||
column: string
|
||||
}
|
||||
}
|
||||
|
||||
export interface TableSchema {
|
||||
name: string
|
||||
schema: string
|
||||
columns: TableColumn[]
|
||||
primaryKey: string[]
|
||||
foreignKeys: Array<{
|
||||
column: string
|
||||
referencesTable: string
|
||||
referencesColumn: string
|
||||
}>
|
||||
indexes: Array<{
|
||||
name: string
|
||||
columns: string[]
|
||||
unique: boolean
|
||||
}>
|
||||
}
|
||||
|
||||
export interface PostgresIntrospectResponse extends ToolResponse {
|
||||
output: {
|
||||
message: string
|
||||
tables: TableSchema[]
|
||||
schemas: string[]
|
||||
}
|
||||
error?: string
|
||||
}
|
||||
|
||||
export interface PostgresResponse extends PostgresBaseResponse {}
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
import { deleteTool } from './delete'
|
||||
import { executeTool } from './execute'
|
||||
import { insertTool } from './insert'
|
||||
import { introspectTool } from './introspect'
|
||||
import { queryTool } from './query'
|
||||
import { updateTool } from './update'
|
||||
|
||||
export const rdsDeleteTool = deleteTool
|
||||
export const rdsExecuteTool = executeTool
|
||||
export const rdsInsertTool = insertTool
|
||||
export const rdsIntrospectTool = introspectTool
|
||||
export const rdsQueryTool = queryTool
|
||||
export const rdsUpdateTool = updateTool
|
||||
|
||||
109
apps/sim/tools/rds/introspect.ts
Normal file
109
apps/sim/tools/rds/introspect.ts
Normal file
@@ -0,0 +1,109 @@
|
||||
import type { RdsIntrospectParams, RdsIntrospectResponse } from '@/tools/rds/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export const introspectTool: ToolConfig<RdsIntrospectParams, RdsIntrospectResponse> = {
|
||||
id: 'rds_introspect',
|
||||
name: 'RDS Introspect',
|
||||
description:
|
||||
'Introspect Amazon RDS Aurora database schema to retrieve table structures, columns, and relationships',
|
||||
version: '1.0',
|
||||
|
||||
params: {
|
||||
region: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'AWS region (e.g., us-east-1)',
|
||||
},
|
||||
accessKeyId: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'AWS access key ID',
|
||||
},
|
||||
secretAccessKey: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'AWS secret access key',
|
||||
},
|
||||
resourceArn: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'ARN of the Aurora DB cluster',
|
||||
},
|
||||
secretArn: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'ARN of the Secrets Manager secret containing DB credentials',
|
||||
},
|
||||
database: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Database name (optional)',
|
||||
},
|
||||
schema: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Schema to introspect (default: public for PostgreSQL, database name for MySQL)',
|
||||
},
|
||||
engine: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description:
|
||||
'Database engine (aurora-postgresql or aurora-mysql). Auto-detected if not provided.',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: '/api/tools/rds/introspect',
|
||||
method: 'POST',
|
||||
headers: () => ({
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
body: (params) => ({
|
||||
region: params.region,
|
||||
accessKeyId: params.accessKeyId,
|
||||
secretAccessKey: params.secretAccessKey,
|
||||
resourceArn: params.resourceArn,
|
||||
secretArn: params.secretArn,
|
||||
...(params.database && { database: params.database }),
|
||||
...(params.schema && { schema: params.schema }),
|
||||
...(params.engine && { engine: params.engine }),
|
||||
}),
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
const data = await response.json()
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(data.error || 'RDS introspection failed')
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
message: data.message || 'Schema introspection completed successfully',
|
||||
engine: data.engine || 'unknown',
|
||||
tables: data.tables || [],
|
||||
schemas: data.schemas || [],
|
||||
},
|
||||
error: undefined,
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
message: { type: 'string', description: 'Operation status message' },
|
||||
engine: { type: 'string', description: 'Detected database engine type' },
|
||||
tables: {
|
||||
type: 'array',
|
||||
description: 'Array of table schemas with columns, keys, and indexes',
|
||||
},
|
||||
schemas: { type: 'array', description: 'List of available schemas in the database' },
|
||||
},
|
||||
}
|
||||
@@ -33,6 +33,11 @@ export interface RdsExecuteParams extends RdsConnectionConfig {
|
||||
query: string
|
||||
}
|
||||
|
||||
export interface RdsIntrospectParams extends RdsConnectionConfig {
|
||||
schema?: string
|
||||
engine?: 'aurora-postgresql' | 'aurora-mysql'
|
||||
}
|
||||
|
||||
export interface RdsBaseResponse extends ToolResponse {
|
||||
output: {
|
||||
message: string
|
||||
@@ -48,3 +53,43 @@ export interface RdsUpdateResponse extends RdsBaseResponse {}
|
||||
export interface RdsDeleteResponse extends RdsBaseResponse {}
|
||||
export interface RdsExecuteResponse extends RdsBaseResponse {}
|
||||
export interface RdsResponse extends RdsBaseResponse {}
|
||||
|
||||
export interface RdsTableColumn {
|
||||
name: string
|
||||
type: string
|
||||
nullable: boolean
|
||||
default: string | null
|
||||
isPrimaryKey: boolean
|
||||
isForeignKey: boolean
|
||||
references?: {
|
||||
table: string
|
||||
column: string
|
||||
}
|
||||
}
|
||||
|
||||
export interface RdsTableSchema {
|
||||
name: string
|
||||
schema: string
|
||||
columns: RdsTableColumn[]
|
||||
primaryKey: string[]
|
||||
foreignKeys: Array<{
|
||||
column: string
|
||||
referencesTable: string
|
||||
referencesColumn: string
|
||||
}>
|
||||
indexes: Array<{
|
||||
name: string
|
||||
columns: string[]
|
||||
unique: boolean
|
||||
}>
|
||||
}
|
||||
|
||||
export interface RdsIntrospectResponse extends ToolResponse {
|
||||
output: {
|
||||
message: string
|
||||
engine: string
|
||||
tables: RdsTableSchema[]
|
||||
schemas: string[]
|
||||
}
|
||||
error?: string
|
||||
}
|
||||
|
||||
@@ -158,6 +158,7 @@ import { duckduckgoSearchTool } from '@/tools/duckduckgo'
|
||||
import {
|
||||
dynamodbDeleteTool,
|
||||
dynamodbGetTool,
|
||||
dynamodbIntrospectTool,
|
||||
dynamodbPutTool,
|
||||
dynamodbQueryTool,
|
||||
dynamodbScanTool,
|
||||
@@ -174,6 +175,7 @@ import {
|
||||
elasticsearchGetDocumentTool,
|
||||
elasticsearchGetIndexTool,
|
||||
elasticsearchIndexDocumentTool,
|
||||
elasticsearchListIndicesTool,
|
||||
elasticsearchSearchTool,
|
||||
elasticsearchUpdateDocumentTool,
|
||||
} from '@/tools/elasticsearch'
|
||||
@@ -761,6 +763,7 @@ import {
|
||||
mongodbDeleteTool,
|
||||
mongodbExecuteTool,
|
||||
mongodbInsertTool,
|
||||
mongodbIntrospectTool,
|
||||
mongodbQueryTool,
|
||||
mongodbUpdateTool,
|
||||
} from '@/tools/mongodb'
|
||||
@@ -768,6 +771,7 @@ import {
|
||||
mysqlDeleteTool,
|
||||
mysqlExecuteTool,
|
||||
mysqlInsertTool,
|
||||
mysqlIntrospectTool,
|
||||
mysqlQueryTool,
|
||||
mysqlUpdateTool,
|
||||
} from '@/tools/mysql'
|
||||
@@ -775,6 +779,7 @@ import {
|
||||
neo4jCreateTool,
|
||||
neo4jDeleteTool,
|
||||
neo4jExecuteTool,
|
||||
neo4jIntrospectTool,
|
||||
neo4jMergeTool,
|
||||
neo4jQueryTool,
|
||||
neo4jUpdateTool,
|
||||
@@ -859,6 +864,7 @@ import {
|
||||
postgresDeleteTool,
|
||||
postgresExecuteTool,
|
||||
postgresInsertTool,
|
||||
postgresIntrospectTool,
|
||||
postgresQueryTool,
|
||||
postgresUpdateTool,
|
||||
} from '@/tools/postgresql'
|
||||
@@ -912,6 +918,7 @@ import {
|
||||
rdsDeleteTool,
|
||||
rdsExecuteTool,
|
||||
rdsInsertTool,
|
||||
rdsIntrospectTool,
|
||||
rdsQueryTool,
|
||||
rdsUpdateTool,
|
||||
} from '@/tools/rds'
|
||||
@@ -1230,6 +1237,7 @@ import {
|
||||
supabaseDeleteTool,
|
||||
supabaseGetRowTool,
|
||||
supabaseInsertTool,
|
||||
supabaseIntrospectTool,
|
||||
supabaseQueryTool,
|
||||
supabaseRpcTool,
|
||||
supabaseStorageCopyTool,
|
||||
@@ -1627,6 +1635,7 @@ export const tools: Record<string, ToolConfig> = {
|
||||
supabase_text_search: supabaseTextSearchTool,
|
||||
supabase_vector_search: supabaseVectorSearchTool,
|
||||
supabase_rpc: supabaseRpcTool,
|
||||
supabase_introspect: supabaseIntrospectTool,
|
||||
supabase_storage_upload: supabaseStorageUploadTool,
|
||||
supabase_storage_download: supabaseStorageDownloadTool,
|
||||
supabase_storage_list: supabaseStorageListTool,
|
||||
@@ -1715,17 +1724,20 @@ export const tools: Record<string, ToolConfig> = {
|
||||
postgresql_update: postgresUpdateTool,
|
||||
postgresql_delete: postgresDeleteTool,
|
||||
postgresql_execute: postgresExecuteTool,
|
||||
postgresql_introspect: postgresIntrospectTool,
|
||||
rds_query: rdsQueryTool,
|
||||
rds_insert: rdsInsertTool,
|
||||
rds_update: rdsUpdateTool,
|
||||
rds_delete: rdsDeleteTool,
|
||||
rds_execute: rdsExecuteTool,
|
||||
rds_introspect: rdsIntrospectTool,
|
||||
dynamodb_get: dynamodbGetTool,
|
||||
dynamodb_put: dynamodbPutTool,
|
||||
dynamodb_query: dynamodbQueryTool,
|
||||
dynamodb_scan: dynamodbScanTool,
|
||||
dynamodb_update: dynamodbUpdateTool,
|
||||
dynamodb_delete: dynamodbDeleteTool,
|
||||
dynamodb_introspect: dynamodbIntrospectTool,
|
||||
dropbox_upload: dropboxUploadTool,
|
||||
dropbox_download: dropboxDownloadTool,
|
||||
dropbox_list_folder: dropboxListFolderTool,
|
||||
@@ -1742,17 +1754,20 @@ export const tools: Record<string, ToolConfig> = {
|
||||
mongodb_update: mongodbUpdateTool,
|
||||
mongodb_delete: mongodbDeleteTool,
|
||||
mongodb_execute: mongodbExecuteTool,
|
||||
mongodb_introspect: mongodbIntrospectTool,
|
||||
mysql_query: mysqlQueryTool,
|
||||
mysql_insert: mysqlInsertTool,
|
||||
mysql_update: mysqlUpdateTool,
|
||||
mysql_delete: mysqlDeleteTool,
|
||||
mysql_execute: mysqlExecuteTool,
|
||||
mysql_introspect: mysqlIntrospectTool,
|
||||
neo4j_query: neo4jQueryTool,
|
||||
neo4j_create: neo4jCreateTool,
|
||||
neo4j_merge: neo4jMergeTool,
|
||||
neo4j_update: neo4jUpdateTool,
|
||||
neo4j_delete: neo4jDeleteTool,
|
||||
neo4j_execute: neo4jExecuteTool,
|
||||
neo4j_introspect: neo4jIntrospectTool,
|
||||
github_pr: githubPrTool,
|
||||
github_comment: githubCommentTool,
|
||||
github_issue_comment: githubIssueCommentTool,
|
||||
@@ -1844,6 +1859,7 @@ export const tools: Record<string, ToolConfig> = {
|
||||
elasticsearch_create_index: elasticsearchCreateIndexTool,
|
||||
elasticsearch_delete_index: elasticsearchDeleteIndexTool,
|
||||
elasticsearch_get_index: elasticsearchGetIndexTool,
|
||||
elasticsearch_list_indices: elasticsearchListIndicesTool,
|
||||
elasticsearch_cluster_health: elasticsearchClusterHealthTool,
|
||||
elasticsearch_cluster_stats: elasticsearchClusterStatsTool,
|
||||
exa_search: exaSearchTool,
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user