Compare commits

...

2 Commits

Author SHA1 Message Date
Waleed
684ad5aeec feat(tool): added introspection tools for all db integrations (#2780)
* feat(tool): added introspection tools for all db integrations

* added sanitization for sql schema

* ack pr comments, with clarifying comments

* moved util
2026-01-12 13:30:55 -08:00
Waleed
a3dff1027f feat(tools): added workflow tools to agent tools dropdown for discoverability, enforce perms on client for redeploying via the agent (#2778)
* feat(tools): added workflow tools to agent tools dropdown for discoverability, enforce perms on client for redeploying via the agent

* added perms enforcement to workflow block header as well
2026-01-12 11:54:01 -08:00
56 changed files with 3563 additions and 29 deletions

View File

@@ -37,7 +37,7 @@ This integration empowers Sim agents to automate data management tasks within yo
## Usage Instructions
Integrate Amazon DynamoDB into workflows. Supports Get, Put, Query, Scan, Update, and Delete operations on DynamoDB tables.
Integrate Amazon DynamoDB into workflows. Supports Get, Put, Query, Scan, Update, Delete, and Introspect operations on DynamoDB tables.
@@ -185,6 +185,27 @@ Delete an item from a DynamoDB table
| --------- | ---- | ----------- |
| `message` | string | Operation status message |
### `dynamodb_introspect`
Introspect DynamoDB to list tables or get detailed schema information for a specific table
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `region` | string | Yes | AWS region \(e.g., us-east-1\) |
| `accessKeyId` | string | Yes | AWS access key ID |
| `secretAccessKey` | string | Yes | AWS secret access key |
| `tableName` | string | No | Optional table name to get detailed schema. If not provided, lists all tables. |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `message` | string | Operation status message |
| `tables` | array | List of table names in the region |
| `tableDetails` | object | Detailed schema information for a specific table |
## Notes

View File

@@ -362,6 +362,29 @@ Get comprehensive statistics about the Elasticsearch cluster.
| `nodes` | object | Node statistics including count and versions |
| `indices` | object | Index statistics including document count and store size |
### `elasticsearch_list_indices`
List all indices in the Elasticsearch cluster with their health, status, and statistics.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `deploymentType` | string | Yes | Deployment type: self_hosted or cloud |
| `host` | string | No | Elasticsearch host URL \(for self-hosted\) |
| `cloudId` | string | No | Elastic Cloud ID \(for cloud deployments\) |
| `authMethod` | string | Yes | Authentication method: api_key or basic_auth |
| `apiKey` | string | No | Elasticsearch API key |
| `username` | string | No | Username for basic auth |
| `password` | string | No | Password for basic auth |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `message` | string | Summary message about the indices |
| `indices` | json | Array of index information objects |
## Notes

View File

@@ -96,13 +96,13 @@ Download a file from Google Drive with complete metadata (exports Google Workspa
| `fileId` | string | Yes | The ID of the file to download |
| `mimeType` | string | No | The MIME type to export Google Workspace files to \(optional\) |
| `fileName` | string | No | Optional filename override |
| `includeRevisions` | boolean | No | Whether to include revision history in the metadata \(default: true\) |
| `includeRevisions` | boolean | No | Whether to include revision history in the metadata \(default: true, returns first 100 revisions\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `file` | object | Downloaded file stored in execution files |
| `file` | object | Downloaded file data |
### `google_drive_list`

View File

@@ -172,6 +172,30 @@ Execute MongoDB aggregation pipeline
| `documents` | array | Array of documents returned from aggregation |
| `documentCount` | number | Number of documents returned |
### `mongodb_introspect`
Introspect MongoDB database to list databases, collections, and indexes
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `host` | string | Yes | MongoDB server hostname or IP address |
| `port` | number | Yes | MongoDB server port \(default: 27017\) |
| `database` | string | No | Database name to introspect \(optional - if not provided, lists all databases\) |
| `username` | string | No | MongoDB username |
| `password` | string | No | MongoDB password |
| `authSource` | string | No | Authentication database |
| `ssl` | string | No | SSL connection mode \(disabled, required, preferred\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `message` | string | Operation status message |
| `databases` | array | Array of database names |
| `collections` | array | Array of collection info with name, type, document count, and indexes |
## Notes

View File

@@ -157,6 +157,29 @@ Execute raw SQL query on MySQL database
| `rows` | array | Array of rows returned from the query |
| `rowCount` | number | Number of rows affected |
### `mysql_introspect`
Introspect MySQL database schema to retrieve table structures, columns, and relationships
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `host` | string | Yes | MySQL server hostname or IP address |
| `port` | number | Yes | MySQL server port \(default: 3306\) |
| `database` | string | Yes | Database name to connect to |
| `username` | string | Yes | Database username |
| `password` | string | Yes | Database password |
| `ssl` | string | No | SSL connection mode \(disabled, required, preferred\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `message` | string | Operation status message |
| `tables` | array | Array of table schemas with columns, keys, and indexes |
| `databases` | array | List of available databases on the server |
## Notes

View File

@@ -168,6 +168,33 @@ Execute arbitrary Cypher queries on Neo4j graph database for complex operations
| `recordCount` | number | Number of records returned |
| `summary` | json | Execution summary with timing and counters |
### `neo4j_introspect`
Introspect a Neo4j database to discover its schema including node labels, relationship types, properties, constraints, and indexes.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `host` | string | Yes | Neo4j server hostname or IP address |
| `port` | number | Yes | Neo4j server port \(default: 7687 for Bolt protocol\) |
| `database` | string | Yes | Database name to connect to |
| `username` | string | Yes | Neo4j username |
| `password` | string | Yes | Neo4j password |
| `encryption` | string | No | Connection encryption mode \(enabled, disabled\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `message` | string | Operation status message |
| `labels` | array | Array of node labels in the database |
| `relationshipTypes` | array | Array of relationship types in the database |
| `nodeSchemas` | array | Array of node schemas with their properties |
| `relationshipSchemas` | array | Array of relationship schemas with their properties |
| `constraints` | array | Array of database constraints |
| `indexes` | array | Array of database indexes |
## Notes

View File

@@ -157,6 +157,30 @@ Execute raw SQL query on PostgreSQL database
| `rows` | array | Array of rows returned from the query |
| `rowCount` | number | Number of rows affected |
### `postgresql_introspect`
Introspect PostgreSQL database schema to retrieve table structures, columns, and relationships
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `host` | string | Yes | PostgreSQL server hostname or IP address |
| `port` | number | Yes | PostgreSQL server port \(default: 5432\) |
| `database` | string | Yes | Database name to connect to |
| `username` | string | Yes | Database username |
| `password` | string | Yes | Database password |
| `ssl` | string | No | SSL connection mode \(disabled, required, preferred\) |
| `schema` | string | No | Schema to introspect \(default: public\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `message` | string | Operation status message |
| `tables` | array | Array of table schemas with columns, keys, and indexes |
| `schemas` | array | List of available schemas in the database |
## Notes

View File

@@ -165,6 +165,32 @@ Execute raw SQL on Amazon RDS using the Data API
| `rows` | array | Array of rows returned or affected |
| `rowCount` | number | Number of rows affected |
### `rds_introspect`
Introspect Amazon RDS Aurora database schema to retrieve table structures, columns, and relationships
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `region` | string | Yes | AWS region \(e.g., us-east-1\) |
| `accessKeyId` | string | Yes | AWS access key ID |
| `secretAccessKey` | string | Yes | AWS secret access key |
| `resourceArn` | string | Yes | ARN of the Aurora DB cluster |
| `secretArn` | string | Yes | ARN of the Secrets Manager secret containing DB credentials |
| `database` | string | No | Database name \(optional\) |
| `schema` | string | No | Schema to introspect \(default: public for PostgreSQL, database name for MySQL\) |
| `engine` | string | No | Database engine \(aurora-postgresql or aurora-mysql\). Auto-detected if not provided. |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `message` | string | Operation status message |
| `engine` | string | Detected database engine type |
| `tables` | array | Array of table schemas with columns, keys, and indexes |
| `schemas` | array | List of available schemas in the database |
## Notes

View File

@@ -261,6 +261,25 @@ Call a PostgreSQL function in Supabase
| `message` | string | Operation status message |
| `results` | json | Result returned from the function |
### `supabase_introspect`
Introspect Supabase database schema to get table structures, columns, and relationships
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `projectId` | string | Yes | Your Supabase project ID \(e.g., jdrkgepadsdopsntdlom\) |
| `schema` | string | No | Database schema to introspect \(defaults to all user schemas, commonly "public"\) |
| `apiKey` | string | Yes | Your Supabase service role secret key |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `message` | string | Operation status message |
| `tables` | array | Array of table schemas with columns, keys, and indexes |
### `supabase_storage_upload`
Upload a file to a Supabase storage bucket

View File

@@ -0,0 +1,73 @@
import { randomUUID } from 'crypto'
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { createRawDynamoDBClient, describeTable, listTables } from '@/app/api/tools/dynamodb/utils'
const logger = createLogger('DynamoDBIntrospectAPI')
const IntrospectSchema = z.object({
region: z.string().min(1, 'AWS region is required'),
accessKeyId: z.string().min(1, 'AWS access key ID is required'),
secretAccessKey: z.string().min(1, 'AWS secret access key is required'),
tableName: z.string().optional(),
})
export async function POST(request: NextRequest) {
const requestId = randomUUID().slice(0, 8)
try {
const body = await request.json()
const params = IntrospectSchema.parse(body)
logger.info(`[${requestId}] Introspecting DynamoDB in region ${params.region}`)
const client = createRawDynamoDBClient({
region: params.region,
accessKeyId: params.accessKeyId,
secretAccessKey: params.secretAccessKey,
})
try {
const { tables } = await listTables(client)
if (params.tableName) {
logger.info(`[${requestId}] Describing table: ${params.tableName}`)
const { tableDetails } = await describeTable(client, params.tableName)
logger.info(`[${requestId}] Table description completed for '${params.tableName}'`)
return NextResponse.json({
message: `Table '${params.tableName}' described successfully.`,
tables,
tableDetails,
})
}
logger.info(`[${requestId}] Listed ${tables.length} tables`)
return NextResponse.json({
message: `Found ${tables.length} table(s) in region '${params.region}'.`,
tables,
})
} finally {
client.destroy()
}
} catch (error) {
if (error instanceof z.ZodError) {
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
return NextResponse.json(
{ error: 'Invalid request data', details: error.errors },
{ status: 400 }
)
}
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
logger.error(`[${requestId}] DynamoDB introspection failed:`, error)
return NextResponse.json(
{ error: `DynamoDB introspection failed: ${errorMessage}` },
{ status: 500 }
)
}
}

View File

@@ -1,4 +1,4 @@
import { DynamoDBClient } from '@aws-sdk/client-dynamodb'
import { DescribeTableCommand, DynamoDBClient, ListTablesCommand } from '@aws-sdk/client-dynamodb'
import {
DeleteCommand,
DynamoDBDocumentClient,
@@ -8,7 +8,7 @@ import {
ScanCommand,
UpdateCommand,
} from '@aws-sdk/lib-dynamodb'
import type { DynamoDBConnectionConfig } from '@/tools/dynamodb/types'
import type { DynamoDBConnectionConfig, DynamoDBTableSchema } from '@/tools/dynamodb/types'
export function createDynamoDBClient(config: DynamoDBConnectionConfig): DynamoDBDocumentClient {
const client = new DynamoDBClient({
@@ -172,3 +172,99 @@ export async function deleteItem(
await client.send(command)
return { success: true }
}
/**
* Creates a raw DynamoDB client for operations that don't require DocumentClient
*/
export function createRawDynamoDBClient(config: DynamoDBConnectionConfig): DynamoDBClient {
return new DynamoDBClient({
region: config.region,
credentials: {
accessKeyId: config.accessKeyId,
secretAccessKey: config.secretAccessKey,
},
})
}
/**
* Lists all DynamoDB tables in the configured region
*/
export async function listTables(client: DynamoDBClient): Promise<{ tables: string[] }> {
const tables: string[] = []
let exclusiveStartTableName: string | undefined
do {
const command = new ListTablesCommand({
ExclusiveStartTableName: exclusiveStartTableName,
})
const response = await client.send(command)
if (response.TableNames) {
tables.push(...response.TableNames)
}
exclusiveStartTableName = response.LastEvaluatedTableName
} while (exclusiveStartTableName)
return { tables }
}
/**
* Describes a specific DynamoDB table and returns its schema information
*/
export async function describeTable(
client: DynamoDBClient,
tableName: string
): Promise<{ tableDetails: DynamoDBTableSchema }> {
const command = new DescribeTableCommand({
TableName: tableName,
})
const response = await client.send(command)
const table = response.Table
if (!table) {
throw new Error(`Table '${tableName}' not found`)
}
const tableDetails: DynamoDBTableSchema = {
tableName: table.TableName || tableName,
tableStatus: table.TableStatus || 'UNKNOWN',
keySchema:
table.KeySchema?.map((key) => ({
attributeName: key.AttributeName || '',
keyType: (key.KeyType as 'HASH' | 'RANGE') || 'HASH',
})) || [],
attributeDefinitions:
table.AttributeDefinitions?.map((attr) => ({
attributeName: attr.AttributeName || '',
attributeType: (attr.AttributeType as 'S' | 'N' | 'B') || 'S',
})) || [],
globalSecondaryIndexes:
table.GlobalSecondaryIndexes?.map((gsi) => ({
indexName: gsi.IndexName || '',
keySchema:
gsi.KeySchema?.map((key) => ({
attributeName: key.AttributeName || '',
keyType: (key.KeyType as 'HASH' | 'RANGE') || 'HASH',
})) || [],
projectionType: gsi.Projection?.ProjectionType || 'ALL',
indexStatus: gsi.IndexStatus || 'UNKNOWN',
})) || [],
localSecondaryIndexes:
table.LocalSecondaryIndexes?.map((lsi) => ({
indexName: lsi.IndexName || '',
keySchema:
lsi.KeySchema?.map((key) => ({
attributeName: key.AttributeName || '',
keyType: (key.KeyType as 'HASH' | 'RANGE') || 'HASH',
})) || [],
projectionType: lsi.Projection?.ProjectionType || 'ALL',
indexStatus: 'ACTIVE',
})) || [],
itemCount: Number(table.ItemCount) || 0,
tableSizeBytes: Number(table.TableSizeBytes) || 0,
billingMode: table.BillingModeSummary?.BillingMode || 'PROVISIONED',
}
return { tableDetails }
}

View File

@@ -0,0 +1,73 @@
import { randomUUID } from 'crypto'
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { createMongoDBConnection, executeIntrospect } from '../utils'
const logger = createLogger('MongoDBIntrospectAPI')
const IntrospectSchema = z.object({
host: z.string().min(1, 'Host is required'),
port: z.coerce.number().int().positive('Port must be a positive integer'),
database: z.string().optional(),
username: z.string().optional(),
password: z.string().optional(),
authSource: z.string().optional(),
ssl: z.enum(['disabled', 'required', 'preferred']).default('preferred'),
})
export async function POST(request: NextRequest) {
const requestId = randomUUID().slice(0, 8)
let client = null
try {
const body = await request.json()
const params = IntrospectSchema.parse(body)
logger.info(
`[${requestId}] Introspecting MongoDB at ${params.host}:${params.port}${params.database ? `/${params.database}` : ''}`
)
client = await createMongoDBConnection({
host: params.host,
port: params.port,
database: params.database || 'admin',
username: params.username,
password: params.password,
authSource: params.authSource,
ssl: params.ssl,
})
const result = await executeIntrospect(client, params.database)
logger.info(
`[${requestId}] Introspection completed: ${result.databases.length} databases, ${result.collections.length} collections`
)
return NextResponse.json({
message: result.message,
databases: result.databases,
collections: result.collections,
})
} catch (error) {
if (error instanceof z.ZodError) {
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
return NextResponse.json(
{ error: 'Invalid request data', details: error.errors },
{ status: 400 }
)
}
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
logger.error(`[${requestId}] MongoDB introspect failed:`, error)
return NextResponse.json(
{ error: `MongoDB introspect failed: ${errorMessage}` },
{ status: 500 }
)
} finally {
if (client) {
await client.close()
}
}
}

View File

@@ -1,5 +1,5 @@
import { MongoClient } from 'mongodb'
import type { MongoDBConnectionConfig } from '@/tools/mongodb/types'
import type { MongoDBCollectionInfo, MongoDBConnectionConfig } from '@/tools/mongodb/types'
export async function createMongoDBConnection(config: MongoDBConnectionConfig) {
const credentials =
@@ -129,3 +129,59 @@ export function sanitizeCollectionName(name: string): string {
}
return name
}
/**
* Introspect MongoDB to get databases, collections, and indexes
*/
export async function executeIntrospect(
client: MongoClient,
database?: string
): Promise<{
message: string
databases: string[]
collections: MongoDBCollectionInfo[]
}> {
const databases: string[] = []
const collections: MongoDBCollectionInfo[] = []
if (database) {
databases.push(database)
const db = client.db(database)
const collectionList = await db.listCollections().toArray()
for (const collInfo of collectionList) {
const coll = db.collection(collInfo.name)
const indexes = await coll.indexes()
const documentCount = await coll.estimatedDocumentCount()
collections.push({
name: collInfo.name,
type: collInfo.type || 'collection',
documentCount,
indexes: indexes.map((idx) => ({
name: idx.name || '',
key: idx.key as Record<string, number>,
unique: idx.unique || false,
sparse: idx.sparse,
})),
})
}
} else {
const admin = client.db().admin()
const dbList = await admin.listDatabases()
for (const dbInfo of dbList.databases) {
databases.push(dbInfo.name)
}
}
const message = database
? `Found ${collections.length} collections in database '${database}'`
: `Found ${databases.length} databases`
return {
message,
databases,
collections,
}
}

View File

@@ -0,0 +1,70 @@
import { randomUUID } from 'crypto'
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { createMySQLConnection, executeIntrospect } from '@/app/api/tools/mysql/utils'
const logger = createLogger('MySQLIntrospectAPI')
const IntrospectSchema = z.object({
host: z.string().min(1, 'Host is required'),
port: z.coerce.number().int().positive('Port must be a positive integer'),
database: z.string().min(1, 'Database name is required'),
username: z.string().min(1, 'Username is required'),
password: z.string().min(1, 'Password is required'),
ssl: z.enum(['disabled', 'required', 'preferred']).default('preferred'),
})
export async function POST(request: NextRequest) {
const requestId = randomUUID().slice(0, 8)
try {
const body = await request.json()
const params = IntrospectSchema.parse(body)
logger.info(
`[${requestId}] Introspecting MySQL schema on ${params.host}:${params.port}/${params.database}`
)
const connection = await createMySQLConnection({
host: params.host,
port: params.port,
database: params.database,
username: params.username,
password: params.password,
ssl: params.ssl,
})
try {
const result = await executeIntrospect(connection, params.database)
logger.info(
`[${requestId}] Introspection completed successfully, found ${result.tables.length} tables`
)
return NextResponse.json({
message: `Schema introspection completed. Found ${result.tables.length} table(s) in database '${params.database}'.`,
tables: result.tables,
databases: result.databases,
})
} finally {
await connection.end()
}
} catch (error) {
if (error instanceof z.ZodError) {
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
return NextResponse.json(
{ error: 'Invalid request data', details: error.errors },
{ status: 400 }
)
}
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
logger.error(`[${requestId}] MySQL introspection failed:`, error)
return NextResponse.json(
{ error: `MySQL introspection failed: ${errorMessage}` },
{ status: 500 }
)
}
}

View File

@@ -166,3 +166,146 @@ function sanitizeSingleIdentifier(identifier: string): string {
return `\`${cleaned}\``
}
export interface MySQLIntrospectionResult {
tables: Array<{
name: string
database: string
columns: Array<{
name: string
type: string
nullable: boolean
default: string | null
isPrimaryKey: boolean
isForeignKey: boolean
autoIncrement: boolean
references?: {
table: string
column: string
}
}>
primaryKey: string[]
foreignKeys: Array<{
column: string
referencesTable: string
referencesColumn: string
}>
indexes: Array<{
name: string
columns: string[]
unique: boolean
}>
}>
databases: string[]
}
export async function executeIntrospect(
connection: mysql.Connection,
databaseName: string
): Promise<MySQLIntrospectionResult> {
const [databasesRows] = await connection.execute<mysql.RowDataPacket[]>(
`SELECT SCHEMA_NAME FROM INFORMATION_SCHEMA.SCHEMATA
WHERE SCHEMA_NAME NOT IN ('mysql', 'information_schema', 'performance_schema', 'sys')
ORDER BY SCHEMA_NAME`
)
const databases = databasesRows.map((row) => row.SCHEMA_NAME)
const [tablesRows] = await connection.execute<mysql.RowDataPacket[]>(
`SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES
WHERE TABLE_SCHEMA = ? AND TABLE_TYPE = 'BASE TABLE'
ORDER BY TABLE_NAME`,
[databaseName]
)
const tables = []
for (const tableRow of tablesRows) {
const tableName = tableRow.TABLE_NAME
const [columnsRows] = await connection.execute<mysql.RowDataPacket[]>(
`SELECT COLUMN_NAME, DATA_TYPE, COLUMN_TYPE, IS_NULLABLE, COLUMN_DEFAULT, EXTRA
FROM INFORMATION_SCHEMA.COLUMNS
WHERE TABLE_SCHEMA = ? AND TABLE_NAME = ?
ORDER BY ORDINAL_POSITION`,
[databaseName, tableName]
)
const [pkRows] = await connection.execute<mysql.RowDataPacket[]>(
`SELECT COLUMN_NAME FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE
WHERE TABLE_SCHEMA = ? AND TABLE_NAME = ? AND CONSTRAINT_NAME = 'PRIMARY'
ORDER BY ORDINAL_POSITION`,
[databaseName, tableName]
)
const primaryKeyColumns = pkRows.map((row) => row.COLUMN_NAME)
const [fkRows] = await connection.execute<mysql.RowDataPacket[]>(
`SELECT kcu.COLUMN_NAME, kcu.REFERENCED_TABLE_NAME, kcu.REFERENCED_COLUMN_NAME
FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE kcu
WHERE kcu.TABLE_SCHEMA = ? AND kcu.TABLE_NAME = ? AND kcu.REFERENCED_TABLE_NAME IS NOT NULL`,
[databaseName, tableName]
)
const foreignKeys = fkRows.map((row) => ({
column: row.COLUMN_NAME,
referencesTable: row.REFERENCED_TABLE_NAME,
referencesColumn: row.REFERENCED_COLUMN_NAME,
}))
const fkColumnSet = new Set(foreignKeys.map((fk) => fk.column))
const [indexRows] = await connection.execute<mysql.RowDataPacket[]>(
`SELECT INDEX_NAME, COLUMN_NAME, SEQ_IN_INDEX, NON_UNIQUE
FROM INFORMATION_SCHEMA.STATISTICS
WHERE TABLE_SCHEMA = ? AND TABLE_NAME = ? AND INDEX_NAME != 'PRIMARY'
ORDER BY INDEX_NAME, SEQ_IN_INDEX`,
[databaseName, tableName]
)
const indexMap = new Map<string, { name: string; columns: string[]; unique: boolean }>()
for (const row of indexRows) {
const indexName = row.INDEX_NAME
if (!indexMap.has(indexName)) {
indexMap.set(indexName, {
name: indexName,
columns: [],
unique: row.NON_UNIQUE === 0,
})
}
indexMap.get(indexName)!.columns.push(row.COLUMN_NAME)
}
const indexes = Array.from(indexMap.values())
const columns = columnsRows.map((col) => {
const columnName = col.COLUMN_NAME
const fk = foreignKeys.find((f) => f.column === columnName)
const isAutoIncrement = col.EXTRA?.toLowerCase().includes('auto_increment') || false
return {
name: columnName,
type: col.COLUMN_TYPE || col.DATA_TYPE,
nullable: col.IS_NULLABLE === 'YES',
default: col.COLUMN_DEFAULT,
isPrimaryKey: primaryKeyColumns.includes(columnName),
isForeignKey: fkColumnSet.has(columnName),
autoIncrement: isAutoIncrement,
...(fk && {
references: {
table: fk.referencesTable,
column: fk.referencesColumn,
},
}),
}
})
tables.push({
name: tableName,
database: databaseName,
columns,
primaryKey: primaryKeyColumns,
foreignKeys,
indexes,
})
}
return { tables, databases }
}

View File

@@ -0,0 +1,199 @@
import { randomUUID } from 'crypto'
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { createNeo4jDriver } from '@/app/api/tools/neo4j/utils'
import type { Neo4jNodeSchema, Neo4jRelationshipSchema } from '@/tools/neo4j/types'
const logger = createLogger('Neo4jIntrospectAPI')
const IntrospectSchema = z.object({
host: z.string().min(1, 'Host is required'),
port: z.coerce.number().int().positive('Port must be a positive integer'),
database: z.string().min(1, 'Database name is required'),
username: z.string().min(1, 'Username is required'),
password: z.string().min(1, 'Password is required'),
encryption: z.enum(['enabled', 'disabled']).default('disabled'),
})
export async function POST(request: NextRequest) {
const requestId = randomUUID().slice(0, 8)
let driver = null
let session = null
try {
const body = await request.json()
const params = IntrospectSchema.parse(body)
logger.info(
`[${requestId}] Introspecting Neo4j database at ${params.host}:${params.port}/${params.database}`
)
driver = await createNeo4jDriver({
host: params.host,
port: params.port,
database: params.database,
username: params.username,
password: params.password,
encryption: params.encryption,
})
session = driver.session({ database: params.database })
const labelsResult = await session.run(
'CALL db.labels() YIELD label RETURN label ORDER BY label'
)
const labels: string[] = labelsResult.records.map((record) => record.get('label') as string)
const relationshipTypesResult = await session.run(
'CALL db.relationshipTypes() YIELD relationshipType RETURN relationshipType ORDER BY relationshipType'
)
const relationshipTypes: string[] = relationshipTypesResult.records.map(
(record) => record.get('relationshipType') as string
)
const nodeSchemas: Neo4jNodeSchema[] = []
try {
const nodePropertiesResult = await session.run(
'CALL db.schema.nodeTypeProperties() YIELD nodeLabels, propertyName, propertyTypes RETURN nodeLabels, propertyName, propertyTypes'
)
const nodePropertiesMap = new Map<string, Array<{ name: string; types: string[] }>>()
for (const record of nodePropertiesResult.records) {
const nodeLabels = record.get('nodeLabels') as string[]
const propertyName = record.get('propertyName') as string
const propertyTypes = record.get('propertyTypes') as string[]
const labelKey = nodeLabels.join(':')
if (!nodePropertiesMap.has(labelKey)) {
nodePropertiesMap.set(labelKey, [])
}
nodePropertiesMap.get(labelKey)!.push({ name: propertyName, types: propertyTypes })
}
for (const [labelKey, properties] of nodePropertiesMap) {
nodeSchemas.push({
label: labelKey,
properties,
})
}
} catch (nodePropsError) {
logger.warn(
`[${requestId}] Could not fetch node properties (may not be supported in this Neo4j version): ${nodePropsError}`
)
}
const relationshipSchemas: Neo4jRelationshipSchema[] = []
try {
const relPropertiesResult = await session.run(
'CALL db.schema.relTypeProperties() YIELD relationshipType, propertyName, propertyTypes RETURN relationshipType, propertyName, propertyTypes'
)
const relPropertiesMap = new Map<string, Array<{ name: string; types: string[] }>>()
for (const record of relPropertiesResult.records) {
const relType = record.get('relationshipType') as string
const propertyName = record.get('propertyName') as string | null
const propertyTypes = record.get('propertyTypes') as string[]
if (!relPropertiesMap.has(relType)) {
relPropertiesMap.set(relType, [])
}
if (propertyName) {
relPropertiesMap.get(relType)!.push({ name: propertyName, types: propertyTypes })
}
}
for (const [relType, properties] of relPropertiesMap) {
relationshipSchemas.push({
type: relType,
properties,
})
}
} catch (relPropsError) {
logger.warn(
`[${requestId}] Could not fetch relationship properties (may not be supported in this Neo4j version): ${relPropsError}`
)
}
const constraints: Array<{
name: string
type: string
entityType: string
properties: string[]
}> = []
try {
const constraintsResult = await session.run('SHOW CONSTRAINTS')
for (const record of constraintsResult.records) {
const name = record.get('name') as string
const type = record.get('type') as string
const entityType = record.get('entityType') as string
const properties = (record.get('properties') as string[]) || []
constraints.push({ name, type, entityType, properties })
}
} catch (constraintsError) {
logger.warn(
`[${requestId}] Could not fetch constraints (may not be supported in this Neo4j version): ${constraintsError}`
)
}
const indexes: Array<{ name: string; type: string; entityType: string; properties: string[] }> =
[]
try {
const indexesResult = await session.run('SHOW INDEXES')
for (const record of indexesResult.records) {
const name = record.get('name') as string
const type = record.get('type') as string
const entityType = record.get('entityType') as string
const properties = (record.get('properties') as string[]) || []
indexes.push({ name, type, entityType, properties })
}
} catch (indexesError) {
logger.warn(
`[${requestId}] Could not fetch indexes (may not be supported in this Neo4j version): ${indexesError}`
)
}
logger.info(
`[${requestId}] Introspection completed: ${labels.length} labels, ${relationshipTypes.length} relationship types, ${constraints.length} constraints, ${indexes.length} indexes`
)
return NextResponse.json({
message: `Database introspection completed: found ${labels.length} labels, ${relationshipTypes.length} relationship types, ${nodeSchemas.length} node schemas, ${relationshipSchemas.length} relationship schemas, ${constraints.length} constraints, ${indexes.length} indexes`,
labels,
relationshipTypes,
nodeSchemas,
relationshipSchemas,
constraints,
indexes,
})
} catch (error) {
if (error instanceof z.ZodError) {
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
return NextResponse.json(
{ error: 'Invalid request data', details: error.errors },
{ status: 400 }
)
}
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
logger.error(`[${requestId}] Neo4j introspection failed:`, error)
return NextResponse.json(
{ error: `Neo4j introspection failed: ${errorMessage}` },
{ status: 500 }
)
} finally {
if (session) {
await session.close()
}
if (driver) {
await driver.close()
}
}
}

View File

@@ -0,0 +1,71 @@
import { randomUUID } from 'crypto'
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { createPostgresConnection, executeIntrospect } from '@/app/api/tools/postgresql/utils'
const logger = createLogger('PostgreSQLIntrospectAPI')
const IntrospectSchema = z.object({
host: z.string().min(1, 'Host is required'),
port: z.coerce.number().int().positive('Port must be a positive integer'),
database: z.string().min(1, 'Database name is required'),
username: z.string().min(1, 'Username is required'),
password: z.string().min(1, 'Password is required'),
ssl: z.enum(['disabled', 'required', 'preferred']).default('preferred'),
schema: z.string().default('public'),
})
export async function POST(request: NextRequest) {
const requestId = randomUUID().slice(0, 8)
try {
const body = await request.json()
const params = IntrospectSchema.parse(body)
logger.info(
`[${requestId}] Introspecting PostgreSQL schema on ${params.host}:${params.port}/${params.database}`
)
const sql = createPostgresConnection({
host: params.host,
port: params.port,
database: params.database,
username: params.username,
password: params.password,
ssl: params.ssl,
})
try {
const result = await executeIntrospect(sql, params.schema)
logger.info(
`[${requestId}] Introspection completed successfully, found ${result.tables.length} tables`
)
return NextResponse.json({
message: `Schema introspection completed. Found ${result.tables.length} table(s) in schema '${params.schema}'.`,
tables: result.tables,
schemas: result.schemas,
})
} finally {
await sql.end()
}
} catch (error) {
if (error instanceof z.ZodError) {
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
return NextResponse.json(
{ error: 'Invalid request data', details: error.errors },
{ status: 400 }
)
}
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
logger.error(`[${requestId}] PostgreSQL introspection failed:`, error)
return NextResponse.json(
{ error: `PostgreSQL introspection failed: ${errorMessage}` },
{ status: 500 }
)
}
}

View File

@@ -187,3 +187,184 @@ export async function executeDelete(
rowCount,
}
}
export interface IntrospectionResult {
tables: Array<{
name: string
schema: string
columns: Array<{
name: string
type: string
nullable: boolean
default: string | null
isPrimaryKey: boolean
isForeignKey: boolean
references?: {
table: string
column: string
}
}>
primaryKey: string[]
foreignKeys: Array<{
column: string
referencesTable: string
referencesColumn: string
}>
indexes: Array<{
name: string
columns: string[]
unique: boolean
}>
}>
schemas: string[]
}
export async function executeIntrospect(
sql: any,
schemaName = 'public'
): Promise<IntrospectionResult> {
const schemasResult = await sql`
SELECT schema_name
FROM information_schema.schemata
WHERE schema_name NOT IN ('pg_catalog', 'information_schema', 'pg_toast')
ORDER BY schema_name
`
const schemas = schemasResult.map((row: { schema_name: string }) => row.schema_name)
const tablesResult = await sql`
SELECT table_name, table_schema
FROM information_schema.tables
WHERE table_schema = ${schemaName}
AND table_type = 'BASE TABLE'
ORDER BY table_name
`
const tables = []
for (const tableRow of tablesResult) {
const tableName = tableRow.table_name
const tableSchema = tableRow.table_schema
const columnsResult = await sql`
SELECT
c.column_name,
c.data_type,
c.is_nullable,
c.column_default,
c.udt_name
FROM information_schema.columns c
WHERE c.table_schema = ${tableSchema}
AND c.table_name = ${tableName}
ORDER BY c.ordinal_position
`
const pkResult = await sql`
SELECT kcu.column_name
FROM information_schema.table_constraints tc
JOIN information_schema.key_column_usage kcu
ON tc.constraint_name = kcu.constraint_name
AND tc.table_schema = kcu.table_schema
WHERE tc.constraint_type = 'PRIMARY KEY'
AND tc.table_schema = ${tableSchema}
AND tc.table_name = ${tableName}
`
const primaryKeyColumns = pkResult.map((row: { column_name: string }) => row.column_name)
const fkResult = await sql`
SELECT
kcu.column_name,
ccu.table_name AS foreign_table_name,
ccu.column_name AS foreign_column_name
FROM information_schema.table_constraints tc
JOIN information_schema.key_column_usage kcu
ON tc.constraint_name = kcu.constraint_name
AND tc.table_schema = kcu.table_schema
JOIN information_schema.constraint_column_usage ccu
ON ccu.constraint_name = tc.constraint_name
AND ccu.table_schema = tc.table_schema
WHERE tc.constraint_type = 'FOREIGN KEY'
AND tc.table_schema = ${tableSchema}
AND tc.table_name = ${tableName}
`
const foreignKeys = fkResult.map(
(row: { column_name: string; foreign_table_name: string; foreign_column_name: string }) => ({
column: row.column_name,
referencesTable: row.foreign_table_name,
referencesColumn: row.foreign_column_name,
})
)
const fkColumnSet = new Set(foreignKeys.map((fk: { column: string }) => fk.column))
const indexesResult = await sql`
SELECT
i.relname AS index_name,
a.attname AS column_name,
ix.indisunique AS is_unique
FROM pg_class t
JOIN pg_index ix ON t.oid = ix.indrelid
JOIN pg_class i ON i.oid = ix.indexrelid
JOIN pg_attribute a ON a.attrelid = t.oid AND a.attnum = ANY(ix.indkey)
JOIN pg_namespace n ON n.oid = t.relnamespace
WHERE t.relkind = 'r'
AND n.nspname = ${tableSchema}
AND t.relname = ${tableName}
AND NOT ix.indisprimary
ORDER BY i.relname, a.attnum
`
const indexMap = new Map<string, { name: string; columns: string[]; unique: boolean }>()
for (const row of indexesResult) {
const indexName = row.index_name
if (!indexMap.has(indexName)) {
indexMap.set(indexName, {
name: indexName,
columns: [],
unique: row.is_unique,
})
}
indexMap.get(indexName)!.columns.push(row.column_name)
}
const indexes = Array.from(indexMap.values())
const columns = columnsResult.map(
(col: {
column_name: string
data_type: string
is_nullable: string
column_default: string | null
udt_name: string
}) => {
const columnName = col.column_name
const fk = foreignKeys.find((f: { column: string }) => f.column === columnName)
return {
name: columnName,
type: col.data_type === 'USER-DEFINED' ? col.udt_name : col.data_type,
nullable: col.is_nullable === 'YES',
default: col.column_default,
isPrimaryKey: primaryKeyColumns.includes(columnName),
isForeignKey: fkColumnSet.has(columnName),
...(fk && {
references: {
table: fk.referencesTable,
column: fk.referencesColumn,
},
}),
}
}
)
tables.push({
name: tableName,
schema: tableSchema,
columns,
primaryKey: primaryKeyColumns,
foreignKeys,
indexes,
})
}
return { tables, schemas }
}

View File

@@ -0,0 +1,80 @@
import { randomUUID } from 'crypto'
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { createRdsClient, executeIntrospect, type RdsEngine } from '@/app/api/tools/rds/utils'
const logger = createLogger('RDSIntrospectAPI')
const IntrospectSchema = z.object({
region: z.string().min(1, 'AWS region is required'),
accessKeyId: z.string().min(1, 'AWS access key ID is required'),
secretAccessKey: z.string().min(1, 'AWS secret access key is required'),
resourceArn: z.string().min(1, 'Resource ARN is required'),
secretArn: z.string().min(1, 'Secret ARN is required'),
database: z.string().optional(),
schema: z.string().optional(),
engine: z.enum(['aurora-postgresql', 'aurora-mysql']).optional(),
})
export async function POST(request: NextRequest) {
const requestId = randomUUID().slice(0, 8)
try {
const body = await request.json()
const params = IntrospectSchema.parse(body)
logger.info(
`[${requestId}] Introspecting RDS Aurora database${params.database ? ` (${params.database})` : ''}`
)
const client = createRdsClient({
region: params.region,
accessKeyId: params.accessKeyId,
secretAccessKey: params.secretAccessKey,
resourceArn: params.resourceArn,
secretArn: params.secretArn,
database: params.database,
})
try {
const result = await executeIntrospect(
client,
params.resourceArn,
params.secretArn,
params.database,
params.schema,
params.engine as RdsEngine | undefined
)
logger.info(
`[${requestId}] Introspection completed successfully. Engine: ${result.engine}, found ${result.tables.length} tables`
)
return NextResponse.json({
message: `Schema introspection completed. Engine: ${result.engine}. Found ${result.tables.length} table(s).`,
engine: result.engine,
tables: result.tables,
schemas: result.schemas,
})
} finally {
client.destroy()
}
} catch (error) {
if (error instanceof z.ZodError) {
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
return NextResponse.json(
{ error: 'Invalid request data', details: error.errors },
{ status: 400 }
)
}
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
logger.error(`[${requestId}] RDS introspection failed:`, error)
return NextResponse.json(
{ error: `RDS introspection failed: ${errorMessage}` },
{ status: 500 }
)
}
}

View File

@@ -241,3 +241,487 @@ export async function executeDelete(
return executeStatement(client, resourceArn, secretArn, database, sql, parameters)
}
export type RdsEngine = 'aurora-postgresql' | 'aurora-mysql'
export interface RdsIntrospectionResult {
engine: RdsEngine
tables: Array<{
name: string
schema: string
columns: Array<{
name: string
type: string
nullable: boolean
default: string | null
isPrimaryKey: boolean
isForeignKey: boolean
references?: {
table: string
column: string
}
}>
primaryKey: string[]
foreignKeys: Array<{
column: string
referencesTable: string
referencesColumn: string
}>
indexes: Array<{
name: string
columns: string[]
unique: boolean
}>
}>
schemas: string[]
}
/**
* Detects the database engine by querying SELECT VERSION()
*/
export async function detectEngine(
client: RDSDataClient,
resourceArn: string,
secretArn: string,
database: string | undefined
): Promise<RdsEngine> {
const result = await executeStatement(
client,
resourceArn,
secretArn,
database,
'SELECT VERSION()'
)
if (result.rows.length > 0) {
const versionRow = result.rows[0] as Record<string, unknown>
const versionValue = Object.values(versionRow)[0]
const versionString = String(versionValue).toLowerCase()
if (versionString.includes('postgresql') || versionString.includes('postgres')) {
return 'aurora-postgresql'
}
if (versionString.includes('mysql') || versionString.includes('mariadb')) {
return 'aurora-mysql'
}
}
throw new Error('Unable to detect database engine. Please specify the engine parameter.')
}
/**
* Introspects PostgreSQL schema using INFORMATION_SCHEMA
*/
async function introspectPostgresql(
client: RDSDataClient,
resourceArn: string,
secretArn: string,
database: string | undefined,
schemaName: string
): Promise<RdsIntrospectionResult> {
const schemasResult = await executeStatement(
client,
resourceArn,
secretArn,
database,
`SELECT schema_name FROM information_schema.schemata
WHERE schema_name NOT IN ('pg_catalog', 'information_schema', 'pg_toast')
ORDER BY schema_name`
)
const schemas = schemasResult.rows.map((row) => (row as { schema_name: string }).schema_name)
const tablesResult = await executeStatement(
client,
resourceArn,
secretArn,
database,
`SELECT table_name, table_schema
FROM information_schema.tables
WHERE table_schema = :schemaName
AND table_type = 'BASE TABLE'
ORDER BY table_name`,
[{ name: 'schemaName', value: { stringValue: schemaName } }]
)
const tables = []
for (const tableRow of tablesResult.rows) {
const row = tableRow as { table_name: string; table_schema: string }
const tableName = row.table_name
const tableSchema = row.table_schema
const columnsResult = await executeStatement(
client,
resourceArn,
secretArn,
database,
`SELECT
c.column_name,
c.data_type,
c.is_nullable,
c.column_default,
c.udt_name
FROM information_schema.columns c
WHERE c.table_schema = :tableSchema
AND c.table_name = :tableName
ORDER BY c.ordinal_position`,
[
{ name: 'tableSchema', value: { stringValue: tableSchema } },
{ name: 'tableName', value: { stringValue: tableName } },
]
)
const pkResult = await executeStatement(
client,
resourceArn,
secretArn,
database,
`SELECT kcu.column_name
FROM information_schema.table_constraints tc
JOIN information_schema.key_column_usage kcu
ON tc.constraint_name = kcu.constraint_name
AND tc.table_schema = kcu.table_schema
WHERE tc.constraint_type = 'PRIMARY KEY'
AND tc.table_schema = :tableSchema
AND tc.table_name = :tableName`,
[
{ name: 'tableSchema', value: { stringValue: tableSchema } },
{ name: 'tableName', value: { stringValue: tableName } },
]
)
const primaryKeyColumns = pkResult.rows.map((r) => (r as { column_name: string }).column_name)
const fkResult = await executeStatement(
client,
resourceArn,
secretArn,
database,
`SELECT
kcu.column_name,
ccu.table_name AS foreign_table_name,
ccu.column_name AS foreign_column_name
FROM information_schema.table_constraints tc
JOIN information_schema.key_column_usage kcu
ON tc.constraint_name = kcu.constraint_name
AND tc.table_schema = kcu.table_schema
JOIN information_schema.constraint_column_usage ccu
ON ccu.constraint_name = tc.constraint_name
AND ccu.table_schema = tc.table_schema
WHERE tc.constraint_type = 'FOREIGN KEY'
AND tc.table_schema = :tableSchema
AND tc.table_name = :tableName`,
[
{ name: 'tableSchema', value: { stringValue: tableSchema } },
{ name: 'tableName', value: { stringValue: tableName } },
]
)
const foreignKeys = fkResult.rows.map((r) => {
const fkRow = r as {
column_name: string
foreign_table_name: string
foreign_column_name: string
}
return {
column: fkRow.column_name,
referencesTable: fkRow.foreign_table_name,
referencesColumn: fkRow.foreign_column_name,
}
})
const fkColumnSet = new Set(foreignKeys.map((fk) => fk.column))
const indexesResult = await executeStatement(
client,
resourceArn,
secretArn,
database,
`SELECT
i.relname AS index_name,
a.attname AS column_name,
ix.indisunique AS is_unique
FROM pg_class t
JOIN pg_index ix ON t.oid = ix.indrelid
JOIN pg_class i ON i.oid = ix.indexrelid
JOIN pg_attribute a ON a.attrelid = t.oid AND a.attnum = ANY(ix.indkey)
JOIN pg_namespace n ON n.oid = t.relnamespace
WHERE t.relkind = 'r'
AND n.nspname = :tableSchema
AND t.relname = :tableName
AND NOT ix.indisprimary
ORDER BY i.relname, a.attnum`,
[
{ name: 'tableSchema', value: { stringValue: tableSchema } },
{ name: 'tableName', value: { stringValue: tableName } },
]
)
const indexMap = new Map<string, { name: string; columns: string[]; unique: boolean }>()
for (const idxRow of indexesResult.rows) {
const idx = idxRow as { index_name: string; column_name: string; is_unique: boolean }
const indexName = idx.index_name
if (!indexMap.has(indexName)) {
indexMap.set(indexName, {
name: indexName,
columns: [],
unique: idx.is_unique,
})
}
indexMap.get(indexName)!.columns.push(idx.column_name)
}
const indexes = Array.from(indexMap.values())
const columns = columnsResult.rows.map((colRow) => {
const col = colRow as {
column_name: string
data_type: string
is_nullable: string
column_default: string | null
udt_name: string
}
const columnName = col.column_name
const fk = foreignKeys.find((f) => f.column === columnName)
return {
name: columnName,
type: col.data_type === 'USER-DEFINED' ? col.udt_name : col.data_type,
nullable: col.is_nullable === 'YES',
default: col.column_default,
isPrimaryKey: primaryKeyColumns.includes(columnName),
isForeignKey: fkColumnSet.has(columnName),
...(fk && {
references: {
table: fk.referencesTable,
column: fk.referencesColumn,
},
}),
}
})
tables.push({
name: tableName,
schema: tableSchema,
columns,
primaryKey: primaryKeyColumns,
foreignKeys,
indexes,
})
}
return { engine: 'aurora-postgresql', tables, schemas }
}
/**
* Introspects MySQL schema using INFORMATION_SCHEMA
*/
async function introspectMysql(
client: RDSDataClient,
resourceArn: string,
secretArn: string,
database: string | undefined,
schemaName: string
): Promise<RdsIntrospectionResult> {
const schemasResult = await executeStatement(
client,
resourceArn,
secretArn,
database,
`SELECT SCHEMA_NAME as schema_name FROM information_schema.SCHEMATA
WHERE SCHEMA_NAME NOT IN ('mysql', 'information_schema', 'performance_schema', 'sys')
ORDER BY SCHEMA_NAME`
)
const schemas = schemasResult.rows.map((row) => (row as { schema_name: string }).schema_name)
const tablesResult = await executeStatement(
client,
resourceArn,
secretArn,
database,
`SELECT TABLE_NAME as table_name, TABLE_SCHEMA as table_schema
FROM information_schema.TABLES
WHERE TABLE_SCHEMA = :schemaName
AND TABLE_TYPE = 'BASE TABLE'
ORDER BY TABLE_NAME`,
[{ name: 'schemaName', value: { stringValue: schemaName } }]
)
const tables = []
for (const tableRow of tablesResult.rows) {
const row = tableRow as { table_name: string; table_schema: string }
const tableName = row.table_name
const tableSchema = row.table_schema
const columnsResult = await executeStatement(
client,
resourceArn,
secretArn,
database,
`SELECT
COLUMN_NAME as column_name,
DATA_TYPE as data_type,
IS_NULLABLE as is_nullable,
COLUMN_DEFAULT as column_default,
COLUMN_TYPE as column_type,
COLUMN_KEY as column_key
FROM information_schema.COLUMNS
WHERE TABLE_SCHEMA = :tableSchema
AND TABLE_NAME = :tableName
ORDER BY ORDINAL_POSITION`,
[
{ name: 'tableSchema', value: { stringValue: tableSchema } },
{ name: 'tableName', value: { stringValue: tableName } },
]
)
const pkResult = await executeStatement(
client,
resourceArn,
secretArn,
database,
`SELECT COLUMN_NAME as column_name
FROM information_schema.KEY_COLUMN_USAGE
WHERE TABLE_SCHEMA = :tableSchema
AND TABLE_NAME = :tableName
AND CONSTRAINT_NAME = 'PRIMARY'
ORDER BY ORDINAL_POSITION`,
[
{ name: 'tableSchema', value: { stringValue: tableSchema } },
{ name: 'tableName', value: { stringValue: tableName } },
]
)
const primaryKeyColumns = pkResult.rows.map((r) => (r as { column_name: string }).column_name)
const fkResult = await executeStatement(
client,
resourceArn,
secretArn,
database,
`SELECT
kcu.COLUMN_NAME as column_name,
kcu.REFERENCED_TABLE_NAME as foreign_table_name,
kcu.REFERENCED_COLUMN_NAME as foreign_column_name
FROM information_schema.KEY_COLUMN_USAGE kcu
WHERE kcu.TABLE_SCHEMA = :tableSchema
AND kcu.TABLE_NAME = :tableName
AND kcu.REFERENCED_TABLE_NAME IS NOT NULL`,
[
{ name: 'tableSchema', value: { stringValue: tableSchema } },
{ name: 'tableName', value: { stringValue: tableName } },
]
)
const foreignKeys = fkResult.rows.map((r) => {
const fkRow = r as {
column_name: string
foreign_table_name: string
foreign_column_name: string
}
return {
column: fkRow.column_name,
referencesTable: fkRow.foreign_table_name,
referencesColumn: fkRow.foreign_column_name,
}
})
const fkColumnSet = new Set(foreignKeys.map((fk) => fk.column))
const indexesResult = await executeStatement(
client,
resourceArn,
secretArn,
database,
`SELECT
INDEX_NAME as index_name,
COLUMN_NAME as column_name,
NON_UNIQUE as non_unique
FROM information_schema.STATISTICS
WHERE TABLE_SCHEMA = :tableSchema
AND TABLE_NAME = :tableName
AND INDEX_NAME != 'PRIMARY'
ORDER BY INDEX_NAME, SEQ_IN_INDEX`,
[
{ name: 'tableSchema', value: { stringValue: tableSchema } },
{ name: 'tableName', value: { stringValue: tableName } },
]
)
const indexMap = new Map<string, { name: string; columns: string[]; unique: boolean }>()
for (const idxRow of indexesResult.rows) {
const idx = idxRow as { index_name: string; column_name: string; non_unique: number }
const indexName = idx.index_name
if (!indexMap.has(indexName)) {
indexMap.set(indexName, {
name: indexName,
columns: [],
unique: idx.non_unique === 0,
})
}
indexMap.get(indexName)!.columns.push(idx.column_name)
}
const indexes = Array.from(indexMap.values())
const columns = columnsResult.rows.map((colRow) => {
const col = colRow as {
column_name: string
data_type: string
is_nullable: string
column_default: string | null
column_type: string
column_key: string
}
const columnName = col.column_name
const fk = foreignKeys.find((f) => f.column === columnName)
return {
name: columnName,
type: col.column_type || col.data_type,
nullable: col.is_nullable === 'YES',
default: col.column_default,
isPrimaryKey: col.column_key === 'PRI',
isForeignKey: fkColumnSet.has(columnName),
...(fk && {
references: {
table: fk.referencesTable,
column: fk.referencesColumn,
},
}),
}
})
tables.push({
name: tableName,
schema: tableSchema,
columns,
primaryKey: primaryKeyColumns,
foreignKeys,
indexes,
})
}
return { engine: 'aurora-mysql', tables, schemas }
}
/**
* Introspects RDS Aurora database schema with auto-detection of engine type
*/
export async function executeIntrospect(
client: RDSDataClient,
resourceArn: string,
secretArn: string,
database: string | undefined,
schemaName?: string,
engine?: RdsEngine
): Promise<RdsIntrospectionResult> {
const detectedEngine = engine || (await detectEngine(client, resourceArn, secretArn, database))
if (detectedEngine === 'aurora-postgresql') {
const schema = schemaName || 'public'
return introspectPostgresql(client, resourceArn, secretArn, database, schema)
}
const schema = schemaName || database || ''
if (!schema) {
throw new Error('Schema or database name is required for MySQL introspection')
}
return introspectMysql(client, resourceArn, secretArn, database, schema)
}

View File

@@ -16,7 +16,7 @@ import {
Switch,
Tooltip,
} from '@/components/emcn'
import { McpIcon } from '@/components/icons'
import { McpIcon, WorkflowIcon } from '@/components/icons'
import { cn } from '@/lib/core/utils/cn'
import {
getIssueBadgeLabel,
@@ -30,6 +30,7 @@ import {
type OAuthProvider,
type OAuthService,
} from '@/lib/oauth'
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
import {
CheckboxList,
Code,
@@ -769,9 +770,10 @@ function WorkflowToolDeployBadge({
}) {
const { isDeployed, needsRedeploy, isLoading, refetch } = useChildDeployment(workflowId)
const [isDeploying, setIsDeploying] = useState(false)
const userPermissions = useUserPermissionsContext()
const deployWorkflow = useCallback(async () => {
if (isDeploying || !workflowId) return
if (isDeploying || !workflowId || !userPermissions.canAdmin) return
try {
setIsDeploying(true)
@@ -796,7 +798,7 @@ function WorkflowToolDeployBadge({
} finally {
setIsDeploying(false)
}
}, [isDeploying, workflowId, refetch, onDeploySuccess])
}, [isDeploying, workflowId, refetch, onDeploySuccess, userPermissions.canAdmin])
if (isLoading || (isDeployed && !needsRedeploy)) {
return null
@@ -811,13 +813,13 @@ function WorkflowToolDeployBadge({
<Tooltip.Trigger asChild>
<Badge
variant={!isDeployed ? 'red' : 'amber'}
className='cursor-pointer'
className={userPermissions.canAdmin ? 'cursor-pointer' : 'cursor-not-allowed'}
size='sm'
dot
onClick={(e: React.MouseEvent) => {
e.stopPropagation()
e.preventDefault()
if (!isDeploying) {
if (!isDeploying && userPermissions.canAdmin) {
deployWorkflow()
}
}}
@@ -826,7 +828,13 @@ function WorkflowToolDeployBadge({
</Badge>
</Tooltip.Trigger>
<Tooltip.Content>
<span className='text-sm'>{!isDeployed ? 'Click to deploy' : 'Click to redeploy'}</span>
<span className='text-sm'>
{!userPermissions.canAdmin
? 'Admin permission required to deploy'
: !isDeployed
? 'Click to deploy'
: 'Click to redeploy'}
</span>
</Tooltip.Content>
</Tooltip.Root>
)
@@ -933,6 +941,13 @@ export function ToolInput({
const forceRefreshMcpTools = useForceRefreshMcpTools()
const openSettingsModal = useSettingsModalStore((state) => state.openModal)
const mcpDataLoading = mcpLoading || mcpServersLoading
// Fetch workflows for the Workflows section in the dropdown
const { data: workflowsList = [] } = useWorkflows(workspaceId, { syncRegistry: false })
const availableWorkflows = useMemo(
() => workflowsList.filter((w) => w.id !== workflowId),
[workflowsList, workflowId]
)
const hasRefreshedRef = useRef(false)
const hasMcpTools = selectedTools.some((tool) => tool.type === 'mcp')
@@ -1735,6 +1750,36 @@ export function ToolInput({
})
}
// Workflows section - shows available workflows that can be executed as tools
if (availableWorkflows.length > 0) {
groups.push({
section: 'Workflows',
items: availableWorkflows.map((workflow) => ({
label: workflow.name,
value: `workflow-${workflow.id}`,
iconElement: createToolIcon('#6366F1', WorkflowIcon),
onSelect: () => {
const newTool: StoredTool = {
type: 'workflow',
title: 'Workflow',
toolId: 'workflow_executor',
params: {
workflowId: workflow.id,
},
isExpanded: true,
usageControl: 'auto',
}
setStoreValue([
...selectedTools.map((tool) => ({ ...tool, isExpanded: false })),
newTool,
])
setOpen(false)
},
disabled: isPreview || disabled,
})),
})
}
return groups
}, [
customTools,
@@ -1749,6 +1794,7 @@ export function ToolInput({
handleSelectTool,
permissionConfig.disableCustomTools,
permissionConfig.disableMcpTools,
availableWorkflows,
])
const toolRequiresOAuth = (toolId: string): boolean => {

View File

@@ -1021,11 +1021,11 @@ export const WorkflowBlock = memo(function WorkflowBlock({
<Tooltip.Trigger asChild>
<Badge
variant={!childIsDeployed ? 'red' : 'amber'}
className='cursor-pointer'
className={userPermissions.canAdmin ? 'cursor-pointer' : 'cursor-not-allowed'}
dot
onClick={(e) => {
e.stopPropagation()
if (childWorkflowId && !isDeploying) {
if (childWorkflowId && !isDeploying && userPermissions.canAdmin) {
deployWorkflow(childWorkflowId)
}
}}
@@ -1035,7 +1035,11 @@ export const WorkflowBlock = memo(function WorkflowBlock({
</Tooltip.Trigger>
<Tooltip.Content>
<span className='text-sm'>
{!childIsDeployed ? 'Click to deploy' : 'Click to redeploy'}
{!userPermissions.canAdmin
? 'Admin permission required to deploy'
: !childIsDeployed
? 'Click to deploy'
: 'Click to redeploy'}
</span>
</Tooltip.Content>
</Tooltip.Root>

View File

@@ -1,13 +1,13 @@
import { DynamoDBIcon } from '@/components/icons'
import type { BlockConfig } from '@/blocks/types'
import type { DynamoDBResponse } from '@/tools/dynamodb/types'
import type { DynamoDBIntrospectResponse, DynamoDBResponse } from '@/tools/dynamodb/types'
export const DynamoDBBlock: BlockConfig<DynamoDBResponse> = {
export const DynamoDBBlock: BlockConfig<DynamoDBResponse | DynamoDBIntrospectResponse> = {
type: 'dynamodb',
name: 'Amazon DynamoDB',
description: 'Connect to Amazon DynamoDB',
longDescription:
'Integrate Amazon DynamoDB into workflows. Supports Get, Put, Query, Scan, Update, and Delete operations on DynamoDB tables.',
'Integrate Amazon DynamoDB into workflows. Supports Get, Put, Query, Scan, Update, Delete, and Introspect operations on DynamoDB tables.',
docsLink: 'https://docs.sim.ai/tools/dynamodb',
category: 'tools',
bgColor: 'linear-gradient(45deg, #2E27AD 0%, #527FFF 100%)',
@@ -24,6 +24,7 @@ export const DynamoDBBlock: BlockConfig<DynamoDBResponse> = {
{ label: 'Scan', id: 'scan' },
{ label: 'Update Item', id: 'update' },
{ label: 'Delete Item', id: 'delete' },
{ label: 'Introspect', id: 'introspect' },
],
value: () => 'get',
},
@@ -56,6 +57,19 @@ export const DynamoDBBlock: BlockConfig<DynamoDBResponse> = {
type: 'short-input',
placeholder: 'my-table',
required: true,
condition: {
field: 'operation',
value: 'introspect',
not: true,
},
},
{
id: 'tableName',
title: 'Table Name (Optional)',
type: 'short-input',
placeholder: 'Leave empty to list all tables',
required: false,
condition: { field: 'operation', value: 'introspect' },
},
// Key field for get, update, delete operations
{
@@ -420,6 +434,7 @@ Return ONLY the expression - no explanations.`,
'dynamodb_scan',
'dynamodb_update',
'dynamodb_delete',
'dynamodb_introspect',
],
config: {
tool: (params) => {
@@ -436,6 +451,8 @@ Return ONLY the expression - no explanations.`,
return 'dynamodb_update'
case 'delete':
return 'dynamodb_delete'
case 'introspect':
return 'dynamodb_introspect'
default:
throw new Error(`Invalid DynamoDB operation: ${params.operation}`)
}
@@ -552,5 +569,13 @@ Return ONLY the expression - no explanations.`,
type: 'number',
description: 'Number of items returned',
},
tables: {
type: 'array',
description: 'List of table names from introspect operation',
},
tableDetails: {
type: 'json',
description: 'Detailed schema information for a specific table from introspect operation',
},
},
}

View File

@@ -33,6 +33,7 @@ export const ElasticsearchBlock: BlockConfig<ElasticsearchResponse> = {
{ label: 'Create Index', id: 'elasticsearch_create_index' },
{ label: 'Delete Index', id: 'elasticsearch_delete_index' },
{ label: 'Get Index Info', id: 'elasticsearch_get_index' },
{ label: 'List Indices', id: 'elasticsearch_list_indices' },
// Cluster Operations
{ label: 'Cluster Health', id: 'elasticsearch_cluster_health' },
{ label: 'Cluster Stats', id: 'elasticsearch_cluster_stats' },
@@ -452,6 +453,7 @@ Return ONLY valid JSON - no explanations, no markdown code blocks.`,
'elasticsearch_get_index',
'elasticsearch_cluster_health',
'elasticsearch_cluster_stats',
'elasticsearch_list_indices',
],
config: {
tool: (params) => {

View File

@@ -1,8 +1,8 @@
import { MongoDBIcon } from '@/components/icons'
import type { BlockConfig } from '@/blocks/types'
import type { MongoDBResponse } from '@/tools/mongodb/types'
import type { MongoDBIntrospectResponse, MongoDBResponse } from '@/tools/mongodb/types'
export const MongoDBBlock: BlockConfig<MongoDBResponse> = {
export const MongoDBBlock: BlockConfig<MongoDBResponse | MongoDBIntrospectResponse> = {
type: 'mongodb',
name: 'MongoDB',
description: 'Connect to MongoDB database',
@@ -23,6 +23,7 @@ export const MongoDBBlock: BlockConfig<MongoDBResponse> = {
{ label: 'Update Documents', id: 'update' },
{ label: 'Delete Documents', id: 'delete' },
{ label: 'Aggregate Pipeline', id: 'execute' },
{ label: 'Introspect Database', id: 'introspect' },
],
value: () => 'query',
},
@@ -86,6 +87,7 @@ export const MongoDBBlock: BlockConfig<MongoDBResponse> = {
type: 'short-input',
placeholder: 'users',
required: true,
condition: { field: 'operation', value: 'introspect', not: true },
},
{
id: 'query',
@@ -803,6 +805,7 @@ Return ONLY the MongoDB query filter as valid JSON - no explanations, no markdow
'mongodb_update',
'mongodb_delete',
'mongodb_execute',
'mongodb_introspect',
],
config: {
tool: (params) => {
@@ -817,6 +820,8 @@ Return ONLY the MongoDB query filter as valid JSON - no explanations, no markdow
return 'mongodb_delete'
case 'execute':
return 'mongodb_execute'
case 'introspect':
return 'mongodb_introspect'
default:
throw new Error(`Invalid MongoDB operation: ${params.operation}`)
}
@@ -936,5 +941,14 @@ Return ONLY the MongoDB query filter as valid JSON - no explanations, no markdow
type: 'number',
description: 'Number of documents matched (update operations)',
},
databases: {
type: 'array',
description: 'Array of database names (introspect operation)',
},
collections: {
type: 'array',
description:
'Array of collection info with name, type, document count, and indexes (introspect operation)',
},
},
}

View File

@@ -23,6 +23,7 @@ export const MySQLBlock: BlockConfig<MySQLResponse> = {
{ label: 'Update Data', id: 'update' },
{ label: 'Delete Data', id: 'delete' },
{ label: 'Execute Raw SQL', id: 'execute' },
{ label: 'Introspect Schema', id: 'introspect' },
],
value: () => 'query',
},
@@ -285,7 +286,14 @@ Return ONLY the SQL query - no explanations, no markdown, no extra text.`,
},
],
tools: {
access: ['mysql_query', 'mysql_insert', 'mysql_update', 'mysql_delete', 'mysql_execute'],
access: [
'mysql_query',
'mysql_insert',
'mysql_update',
'mysql_delete',
'mysql_execute',
'mysql_introspect',
],
config: {
tool: (params) => {
switch (params.operation) {
@@ -299,6 +307,8 @@ Return ONLY the SQL query - no explanations, no markdown, no extra text.`,
return 'mysql_delete'
case 'execute':
return 'mysql_execute'
case 'introspect':
return 'mysql_introspect'
default:
throw new Error(`Invalid MySQL operation: ${params.operation}`)
}

View File

@@ -1,8 +1,8 @@
import { Neo4jIcon } from '@/components/icons'
import type { BlockConfig } from '@/blocks/types'
import type { Neo4jResponse } from '@/tools/neo4j/types'
import type { Neo4jIntrospectResponse, Neo4jResponse } from '@/tools/neo4j/types'
export const Neo4jBlock: BlockConfig<Neo4jResponse> = {
export const Neo4jBlock: BlockConfig<Neo4jResponse | Neo4jIntrospectResponse> = {
type: 'neo4j',
name: 'Neo4j',
description: 'Connect to Neo4j graph database',
@@ -24,6 +24,7 @@ export const Neo4jBlock: BlockConfig<Neo4jResponse> = {
{ label: 'Update Properties (SET)', id: 'update' },
{ label: 'Delete Nodes/Relationships', id: 'delete' },
{ label: 'Execute Cypher', id: 'execute' },
{ label: 'Introspect Schema', id: 'introspect' },
],
value: () => 'query',
},
@@ -589,6 +590,7 @@ Return ONLY valid JSON.`,
'neo4j_update',
'neo4j_delete',
'neo4j_execute',
'neo4j_introspect',
],
config: {
tool: (params) => {
@@ -605,6 +607,8 @@ Return ONLY valid JSON.`,
return 'neo4j_delete'
case 'execute':
return 'neo4j_execute'
case 'introspect':
return 'neo4j_introspect'
default:
throw new Error(`Invalid Neo4j operation: ${params.operation}`)
}

View File

@@ -23,6 +23,7 @@ export const PostgreSQLBlock: BlockConfig<PostgresResponse> = {
{ label: 'Update Data', id: 'update' },
{ label: 'Delete Data', id: 'delete' },
{ label: 'Execute Raw SQL', id: 'execute' },
{ label: 'Introspect Schema', id: 'introspect' },
],
value: () => 'query',
},
@@ -285,6 +286,14 @@ Return ONLY the SQL query - no explanations, no markdown, no extra text.`,
condition: { field: 'operation', value: 'delete' },
required: true,
},
{
id: 'schema',
title: 'Schema Name',
type: 'short-input',
placeholder: 'public',
value: () => 'public',
condition: { field: 'operation', value: 'introspect' },
},
],
tools: {
access: [
@@ -293,6 +302,7 @@ Return ONLY the SQL query - no explanations, no markdown, no extra text.`,
'postgresql_update',
'postgresql_delete',
'postgresql_execute',
'postgresql_introspect',
],
config: {
tool: (params) => {
@@ -307,6 +317,8 @@ Return ONLY the SQL query - no explanations, no markdown, no extra text.`,
return 'postgresql_delete'
case 'execute':
return 'postgresql_execute'
case 'introspect':
return 'postgresql_introspect'
default:
throw new Error(`Invalid PostgreSQL operation: ${params.operation}`)
}
@@ -343,6 +355,7 @@ Return ONLY the SQL query - no explanations, no markdown, no extra text.`,
if (rest.table) result.table = rest.table
if (rest.query) result.query = rest.query
if (rest.where) result.where = rest.where
if (rest.schema) result.schema = rest.schema
if (parsedData !== undefined) result.data = parsedData
return result
@@ -361,6 +374,7 @@ Return ONLY the SQL query - no explanations, no markdown, no extra text.`,
query: { type: 'string', description: 'SQL query to execute' },
data: { type: 'json', description: 'Data for insert/update operations' },
where: { type: 'string', description: 'WHERE clause for update/delete' },
schema: { type: 'string', description: 'Schema name for introspection' },
},
outputs: {
message: {
@@ -375,5 +389,13 @@ Return ONLY the SQL query - no explanations, no markdown, no extra text.`,
type: 'number',
description: 'Number of rows affected by the operation',
},
tables: {
type: 'array',
description: 'Array of table schemas with columns, keys, and indexes (introspect operation)',
},
schemas: {
type: 'array',
description: 'List of available schemas in the database (introspect operation)',
},
},
}

View File

@@ -1,8 +1,8 @@
import { RDSIcon } from '@/components/icons'
import type { BlockConfig } from '@/blocks/types'
import type { RdsResponse } from '@/tools/rds/types'
import type { RdsIntrospectResponse, RdsResponse } from '@/tools/rds/types'
export const RDSBlock: BlockConfig<RdsResponse> = {
export const RDSBlock: BlockConfig<RdsResponse | RdsIntrospectResponse> = {
type: 'rds',
name: 'Amazon RDS',
description: 'Connect to Amazon RDS via Data API',
@@ -23,6 +23,7 @@ export const RDSBlock: BlockConfig<RdsResponse> = {
{ label: 'Update Data', id: 'update' },
{ label: 'Delete Data', id: 'delete' },
{ label: 'Execute Raw SQL', id: 'execute' },
{ label: 'Introspect Schema', id: 'introspect' },
],
value: () => 'query',
},
@@ -340,9 +341,36 @@ Return ONLY the JSON object.`,
generationType: 'json-object',
},
},
{
id: 'schema',
title: 'Schema Name',
type: 'short-input',
placeholder: 'public (PostgreSQL) or database name (MySQL)',
condition: { field: 'operation', value: 'introspect' },
required: false,
},
{
id: 'engine',
title: 'Database Engine',
type: 'dropdown',
options: [
{ label: 'Auto-detect', id: '' },
{ label: 'Aurora PostgreSQL', id: 'aurora-postgresql' },
{ label: 'Aurora MySQL', id: 'aurora-mysql' },
],
condition: { field: 'operation', value: 'introspect' },
value: () => '',
},
],
tools: {
access: ['rds_query', 'rds_insert', 'rds_update', 'rds_delete', 'rds_execute'],
access: [
'rds_query',
'rds_insert',
'rds_update',
'rds_delete',
'rds_execute',
'rds_introspect',
],
config: {
tool: (params) => {
switch (params.operation) {
@@ -356,12 +384,14 @@ Return ONLY the JSON object.`,
return 'rds_delete'
case 'execute':
return 'rds_execute'
case 'introspect':
return 'rds_introspect'
default:
throw new Error(`Invalid RDS operation: ${params.operation}`)
}
},
params: (params) => {
const { operation, data, conditions, ...rest } = params
const { operation, data, conditions, schema, engine, ...rest } = params
// Parse JSON fields
const parseJson = (value: unknown, fieldName: string) => {
@@ -399,6 +429,8 @@ Return ONLY the JSON object.`,
if (rest.query) result.query = rest.query
if (parsedConditions !== undefined) result.conditions = parsedConditions
if (parsedData !== undefined) result.data = parsedData
if (schema) result.schema = schema
if (engine) result.engine = engine
return result
},
@@ -416,6 +448,11 @@ Return ONLY the JSON object.`,
query: { type: 'string', description: 'SQL query to execute' },
data: { type: 'json', description: 'Data for insert/update operations' },
conditions: { type: 'json', description: 'Conditions for update/delete (e.g., {"id": 1})' },
schema: { type: 'string', description: 'Schema to introspect (for introspect operation)' },
engine: {
type: 'string',
description: 'Database engine (aurora-postgresql or aurora-mysql, auto-detected if not set)',
},
},
outputs: {
message: {
@@ -430,5 +467,18 @@ Return ONLY the JSON object.`,
type: 'number',
description: 'Number of rows affected by the operation',
},
engine: {
type: 'string',
description: 'Detected database engine type (for introspect operation)',
},
tables: {
type: 'array',
description:
'Array of table schemas with columns, keys, and indexes (for introspect operation)',
},
schemas: {
type: 'array',
description: 'List of available schemas in the database (for introspect operation)',
},
},
}

View File

@@ -34,6 +34,7 @@ export const SupabaseBlock: BlockConfig<SupabaseResponse> = {
{ label: 'Full-Text Search', id: 'text_search' },
{ label: 'Vector Search', id: 'vector_search' },
{ label: 'Call RPC Function', id: 'rpc' },
{ label: 'Introspect Schema', id: 'introspect' },
// Storage - File Operations
{ label: 'Storage: Upload File', id: 'storage_upload' },
{ label: 'Storage: Download File', id: 'storage_download' },
@@ -490,6 +491,14 @@ Return ONLY the order by expression - no explanations, no extra text.`,
placeholder: '{\n "param1": "value1",\n "param2": "value2"\n}',
condition: { field: 'operation', value: 'rpc' },
},
// Introspect operation fields
{
id: 'schema',
title: 'Schema',
type: 'short-input',
placeholder: 'public (leave empty for all user schemas)',
condition: { field: 'operation', value: 'introspect' },
},
// Text Search operation fields
{
id: 'column',
@@ -876,6 +885,7 @@ Return ONLY the PostgREST filter expression - no explanations, no markdown, no e
'supabase_text_search',
'supabase_vector_search',
'supabase_rpc',
'supabase_introspect',
'supabase_storage_upload',
'supabase_storage_download',
'supabase_storage_list',
@@ -911,6 +921,8 @@ Return ONLY the PostgREST filter expression - no explanations, no markdown, no e
return 'supabase_vector_search'
case 'rpc':
return 'supabase_rpc'
case 'introspect':
return 'supabase_introspect'
case 'storage_upload':
return 'supabase_storage_upload'
case 'storage_download':
@@ -1085,7 +1097,6 @@ Return ONLY the PostgREST filter expression - no explanations, no markdown, no e
operation: { type: 'string', description: 'Operation to perform' },
projectId: { type: 'string', description: 'Supabase project identifier' },
table: { type: 'string', description: 'Database table name' },
schema: { type: 'string', description: 'Database schema (default: public)' },
select: { type: 'string', description: 'Columns to return (comma-separated, defaults to *)' },
apiKey: { type: 'string', description: 'Service role secret key' },
// Data for insert/update operations
@@ -1113,6 +1124,8 @@ Return ONLY the PostgREST filter expression - no explanations, no markdown, no e
language: { type: 'string', description: 'Language for text search' },
// Count operation inputs
countType: { type: 'string', description: 'Count type: exact, planned, or estimated' },
// Introspect operation inputs
schema: { type: 'string', description: 'Database schema to introspect (e.g., public)' },
// Storage operation inputs
bucket: { type: 'string', description: 'Storage bucket name' },
path: { type: 'string', description: 'File or folder path in storage' },
@@ -1158,5 +1171,13 @@ Return ONLY the PostgREST filter expression - no explanations, no markdown, no e
type: 'string',
description: 'Temporary signed URL for storage file',
},
tables: {
type: 'json',
description: 'Array of table schemas for introspect operation',
},
schemas: {
type: 'json',
description: 'Array of schema names found in the database',
},
},
}

View File

@@ -1,5 +1,6 @@
import { deleteTool } from './delete'
import { getTool } from './get'
import { introspectTool } from './introspect'
import { putTool } from './put'
import { queryTool } from './query'
import { scanTool } from './scan'
@@ -7,6 +8,7 @@ import { updateTool } from './update'
export const dynamodbDeleteTool = deleteTool
export const dynamodbGetTool = getTool
export const dynamodbIntrospectTool = introspectTool
export const dynamodbPutTool = putTool
export const dynamodbQueryTool = queryTool
export const dynamodbScanTool = scanTool

View File

@@ -0,0 +1,78 @@
import type { DynamoDBIntrospectParams, DynamoDBIntrospectResponse } from '@/tools/dynamodb/types'
import type { ToolConfig } from '@/tools/types'
export const introspectTool: ToolConfig<DynamoDBIntrospectParams, DynamoDBIntrospectResponse> = {
id: 'dynamodb_introspect',
name: 'DynamoDB Introspect',
description:
'Introspect DynamoDB to list tables or get detailed schema information for a specific table',
version: '1.0',
params: {
region: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'AWS region (e.g., us-east-1)',
},
accessKeyId: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'AWS access key ID',
},
secretAccessKey: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'AWS secret access key',
},
tableName: {
type: 'string',
required: false,
visibility: 'user-or-llm',
description: 'Optional table name to get detailed schema. If not provided, lists all tables.',
},
},
request: {
url: '/api/tools/dynamodb/introspect',
method: 'POST',
headers: () => ({
'Content-Type': 'application/json',
}),
body: (params) => ({
region: params.region,
accessKeyId: params.accessKeyId,
secretAccessKey: params.secretAccessKey,
...(params.tableName && { tableName: params.tableName }),
}),
},
transformResponse: async (response: Response) => {
const data = await response.json()
if (!response.ok) {
throw new Error(data.error || 'DynamoDB introspection failed')
}
return {
success: true,
output: {
message: data.message || 'Introspection completed successfully',
tables: data.tables || [],
tableDetails: data.tableDetails,
},
error: undefined,
}
},
outputs: {
message: { type: 'string', description: 'Operation status message' },
tables: { type: 'array', description: 'List of table names in the region' },
tableDetails: {
type: 'object',
description: 'Detailed schema information for a specific table',
},
},
}

View File

@@ -68,3 +68,45 @@ export interface DynamoDBScanResponse extends DynamoDBBaseResponse {}
export interface DynamoDBUpdateResponse extends DynamoDBBaseResponse {}
export interface DynamoDBDeleteResponse extends DynamoDBBaseResponse {}
export interface DynamoDBResponse extends DynamoDBBaseResponse {}
export interface DynamoDBIntrospectParams extends DynamoDBConnectionConfig {
tableName?: string
}
export interface DynamoDBKeySchema {
attributeName: string
keyType: 'HASH' | 'RANGE'
}
export interface DynamoDBAttributeDefinition {
attributeName: string
attributeType: 'S' | 'N' | 'B'
}
export interface DynamoDBGSI {
indexName: string
keySchema: DynamoDBKeySchema[]
projectionType: string
indexStatus: string
}
export interface DynamoDBTableSchema {
tableName: string
tableStatus: string
keySchema: DynamoDBKeySchema[]
attributeDefinitions: DynamoDBAttributeDefinition[]
globalSecondaryIndexes: DynamoDBGSI[]
localSecondaryIndexes: DynamoDBGSI[]
itemCount: number
tableSizeBytes: number
billingMode: string
}
export interface DynamoDBIntrospectResponse extends ToolResponse {
output: {
message: string
tables: string[]
tableDetails?: DynamoDBTableSchema
}
error?: string
}

View File

@@ -9,6 +9,7 @@ import { deleteIndexTool } from '@/tools/elasticsearch/delete_index'
import { getDocumentTool } from '@/tools/elasticsearch/get_document'
import { getIndexTool } from '@/tools/elasticsearch/get_index'
import { indexDocumentTool } from '@/tools/elasticsearch/index_document'
import { listIndicesTool } from '@/tools/elasticsearch/list_indices'
import { searchTool } from '@/tools/elasticsearch/search'
import { updateDocumentTool } from '@/tools/elasticsearch/update_document'
@@ -23,5 +24,6 @@ export const elasticsearchCountTool = countTool
export const elasticsearchCreateIndexTool = createIndexTool
export const elasticsearchDeleteIndexTool = deleteIndexTool
export const elasticsearchGetIndexTool = getIndexTool
export const elasticsearchListIndicesTool = listIndicesTool
export const elasticsearchClusterHealthTool = clusterHealthTool
export const elasticsearchClusterStatsTool = clusterStatsTool

View File

@@ -0,0 +1,171 @@
import type {
ElasticsearchListIndicesParams,
ElasticsearchListIndicesResponse,
} from '@/tools/elasticsearch/types'
import type { ToolConfig } from '@/tools/types'
/**
* Builds the base URL for Elasticsearch connections.
* Supports both self-hosted and Elastic Cloud deployments.
*/
function buildBaseUrl(params: ElasticsearchListIndicesParams): string {
if (params.deploymentType === 'cloud' && params.cloudId) {
const parts = params.cloudId.split(':')
if (parts.length >= 2) {
try {
const decoded = Buffer.from(parts[1], 'base64').toString('utf-8')
const [esHost] = decoded.split('$')
if (esHost) {
return `https://${parts[0]}.${esHost}`
}
} catch {
// Fallback
}
}
throw new Error('Invalid Cloud ID format')
}
if (!params.host) {
throw new Error('Host is required for self-hosted deployments')
}
return params.host.replace(/\/$/, '')
}
/**
* Builds authentication headers for Elasticsearch requests.
* Supports API key and basic authentication methods.
*/
function buildAuthHeaders(params: ElasticsearchListIndicesParams): Record<string, string> {
const headers: Record<string, string> = {
'Content-Type': 'application/json',
}
if (params.authMethod === 'api_key' && params.apiKey) {
headers.Authorization = `ApiKey ${params.apiKey}`
} else if (params.authMethod === 'basic_auth' && params.username && params.password) {
const credentials = Buffer.from(`${params.username}:${params.password}`).toString('base64')
headers.Authorization = `Basic ${credentials}`
} else {
throw new Error('Invalid authentication configuration')
}
return headers
}
export const listIndicesTool: ToolConfig<
ElasticsearchListIndicesParams,
ElasticsearchListIndicesResponse
> = {
id: 'elasticsearch_list_indices',
name: 'Elasticsearch List Indices',
description:
'List all indices in the Elasticsearch cluster with their health, status, and statistics.',
version: '1.0.0',
params: {
deploymentType: {
type: 'string',
required: true,
description: 'Deployment type: self_hosted or cloud',
},
host: {
type: 'string',
required: false,
description: 'Elasticsearch host URL (for self-hosted)',
},
cloudId: {
type: 'string',
required: false,
description: 'Elastic Cloud ID (for cloud deployments)',
},
authMethod: {
type: 'string',
required: true,
description: 'Authentication method: api_key or basic_auth',
},
apiKey: {
type: 'string',
required: false,
visibility: 'user-only',
description: 'Elasticsearch API key',
},
username: {
type: 'string',
required: false,
description: 'Username for basic auth',
},
password: {
type: 'string',
required: false,
visibility: 'user-only',
description: 'Password for basic auth',
},
},
request: {
url: (params) => {
const baseUrl = buildBaseUrl(params)
return `${baseUrl}/_cat/indices?format=json`
},
method: 'GET',
headers: (params) => buildAuthHeaders(params),
},
transformResponse: async (response: Response) => {
if (!response.ok) {
const errorText = await response.text()
let errorMessage = `Elasticsearch error: ${response.status}`
try {
const errorJson = JSON.parse(errorText)
errorMessage = errorJson.error?.reason || errorJson.error?.type || errorMessage
} catch {
errorMessage = errorText || errorMessage
}
return {
success: false,
output: {
message: errorMessage,
indices: [],
},
error: errorMessage,
}
}
const data = await response.json()
const indices = data
.filter((item: Record<string, unknown>) => {
const indexName = item.index as string
return !indexName.startsWith('.')
})
.map((item: Record<string, unknown>) => ({
index: item.index as string,
health: item.health as string,
status: item.status as string,
docsCount: Number.parseInt(item['docs.count'] as string, 10) || 0,
storeSize: (item['store.size'] as string) || '0b',
primaryShards: Number.parseInt(item.pri as string, 10) || 0,
replicaShards: Number.parseInt(item.rep as string, 10) || 0,
}))
return {
success: true,
output: {
message: `Found ${indices.length} indices`,
indices,
},
}
},
outputs: {
message: {
type: 'string',
description: 'Summary message about the indices',
},
indices: {
type: 'json',
description: 'Array of index information objects',
},
},
}

View File

@@ -110,6 +110,18 @@ export interface ElasticsearchClusterHealthParams extends ElasticsearchBaseParam
export interface ElasticsearchClusterStatsParams extends ElasticsearchBaseParams {}
export interface ElasticsearchListIndicesParams extends ElasticsearchBaseParams {}
export interface ElasticsearchIndexInfo {
index: string
health: string
status: string
docsCount: number
storeSize: string
primaryShards: number
replicaShards: number
}
// Response types
export interface ElasticsearchDocumentResponse extends ToolResponse {
output: {
@@ -262,6 +274,14 @@ export interface ElasticsearchIndexStatsResponse extends ToolResponse {
}
}
export interface ElasticsearchListIndicesResponse extends ToolResponse {
output: {
message: string
indices: ElasticsearchIndexInfo[]
}
error?: string
}
// Union type for all Elasticsearch responses
export type ElasticsearchResponse =
| ElasticsearchDocumentResponse
@@ -276,3 +296,4 @@ export type ElasticsearchResponse =
| ElasticsearchClusterStatsResponse
| ElasticsearchRefreshResponse
| ElasticsearchIndexStatsResponse
| ElasticsearchListIndicesResponse

View File

@@ -1,12 +1,14 @@
import { deleteTool } from './delete'
import { executeTool } from './execute'
import { insertTool } from './insert'
import { introspectTool } from './introspect'
import { queryTool } from './query'
import { updateTool } from './update'
export const mongodbDeleteTool = deleteTool
export const mongodbExecuteTool = executeTool
export const mongodbInsertTool = insertTool
export const mongodbIntrospectTool = introspectTool
export const mongodbQueryTool = queryTool
export const mongodbUpdateTool = updateTool

View File

@@ -0,0 +1,98 @@
import type { MongoDBIntrospectParams, MongoDBIntrospectResponse } from '@/tools/mongodb/types'
import type { ToolConfig } from '@/tools/types'
export const introspectTool: ToolConfig<MongoDBIntrospectParams, MongoDBIntrospectResponse> = {
id: 'mongodb_introspect',
name: 'MongoDB Introspect',
description: 'Introspect MongoDB database to list databases, collections, and indexes',
version: '1.0',
params: {
host: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'MongoDB server hostname or IP address',
},
port: {
type: 'number',
required: true,
visibility: 'user-only',
description: 'MongoDB server port (default: 27017)',
},
database: {
type: 'string',
required: false,
visibility: 'user-only',
description: 'Database name to introspect (optional - if not provided, lists all databases)',
},
username: {
type: 'string',
required: false,
visibility: 'user-only',
description: 'MongoDB username',
},
password: {
type: 'string',
required: false,
visibility: 'user-only',
description: 'MongoDB password',
},
authSource: {
type: 'string',
required: false,
visibility: 'user-only',
description: 'Authentication database',
},
ssl: {
type: 'string',
required: false,
visibility: 'user-only',
description: 'SSL connection mode (disabled, required, preferred)',
},
},
request: {
url: '/api/tools/mongodb/introspect',
method: 'POST',
headers: () => ({
'Content-Type': 'application/json',
}),
body: (params) => ({
host: params.host,
port: Number(params.port),
database: params.database,
username: params.username,
password: params.password,
authSource: params.authSource,
ssl: params.ssl || 'preferred',
}),
},
transformResponse: async (response: Response) => {
const data = await response.json()
if (!response.ok) {
throw new Error(data.error || 'MongoDB introspect failed')
}
return {
success: true,
output: {
message: data.message || 'Introspection completed successfully',
databases: data.databases || [],
collections: data.collections || [],
},
error: undefined,
}
},
outputs: {
message: { type: 'string', description: 'Operation status message' },
databases: { type: 'array', description: 'Array of database names' },
collections: {
type: 'array',
description: 'Array of collection info with name, type, document count, and indexes',
},
},
}

View File

@@ -41,6 +41,28 @@ export interface MongoDBExecuteParams extends MongoDBConnectionConfig {
pipeline: string
}
export interface MongoDBIntrospectParams {
host: string
port: number
database?: string
username?: string
password?: string
authSource?: string
ssl?: 'disabled' | 'required' | 'preferred'
}
export interface MongoDBCollectionInfo {
name: string
type: string
documentCount: number
indexes: Array<{
name: string
key: Record<string, number>
unique: boolean
sparse?: boolean
}>
}
export interface MongoDBBaseResponse extends ToolResponse {
output: {
message: string
@@ -61,3 +83,12 @@ export interface MongoDBUpdateResponse extends MongoDBBaseResponse {}
export interface MongoDBDeleteResponse extends MongoDBBaseResponse {}
export interface MongoDBExecuteResponse extends MongoDBBaseResponse {}
export interface MongoDBResponse extends MongoDBBaseResponse {}
export interface MongoDBIntrospectResponse extends ToolResponse {
output: {
message: string
databases: string[]
collections: MongoDBCollectionInfo[]
}
error?: string
}

View File

@@ -1,12 +1,14 @@
import { deleteTool } from './delete'
import { executeTool } from './execute'
import { insertTool } from './insert'
import { introspectTool } from './introspect'
import { queryTool } from './query'
import { updateTool } from './update'
export const mysqlDeleteTool = deleteTool
export const mysqlExecuteTool = executeTool
export const mysqlInsertTool = insertTool
export const mysqlIntrospectTool = introspectTool
export const mysqlQueryTool = queryTool
export const mysqlUpdateTool = updateTool

View File

@@ -0,0 +1,92 @@
import type { MySQLIntrospectParams, MySQLIntrospectResponse } from '@/tools/mysql/types'
import type { ToolConfig } from '@/tools/types'
export const introspectTool: ToolConfig<MySQLIntrospectParams, MySQLIntrospectResponse> = {
id: 'mysql_introspect',
name: 'MySQL Introspect',
description:
'Introspect MySQL database schema to retrieve table structures, columns, and relationships',
version: '1.0',
params: {
host: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'MySQL server hostname or IP address',
},
port: {
type: 'number',
required: true,
visibility: 'user-only',
description: 'MySQL server port (default: 3306)',
},
database: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'Database name to connect to',
},
username: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'Database username',
},
password: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'Database password',
},
ssl: {
type: 'string',
required: false,
visibility: 'user-only',
description: 'SSL connection mode (disabled, required, preferred)',
},
},
request: {
url: '/api/tools/mysql/introspect',
method: 'POST',
headers: () => ({
'Content-Type': 'application/json',
}),
body: (params) => ({
host: params.host,
port: Number(params.port),
database: params.database,
username: params.username,
password: params.password,
ssl: params.ssl || 'required',
}),
},
transformResponse: async (response: Response) => {
const data = await response.json()
if (!response.ok) {
throw new Error(data.error || 'MySQL introspection failed')
}
return {
success: true,
output: {
message: data.message || 'Schema introspection completed successfully',
tables: data.tables || [],
databases: data.databases || [],
},
error: undefined,
}
},
outputs: {
message: { type: 'string', description: 'Operation status message' },
tables: {
type: 'array',
description: 'Array of table schemas with columns, keys, and indexes',
},
databases: { type: 'array', description: 'List of available databases on the server' },
},
}

View File

@@ -48,3 +48,30 @@ export interface MySQLUpdateResponse extends MySQLBaseResponse {}
export interface MySQLDeleteResponse extends MySQLBaseResponse {}
export interface MySQLExecuteResponse extends MySQLBaseResponse {}
export interface MySQLResponse extends MySQLBaseResponse {}
export interface MySQLIntrospectParams extends MySQLConnectionConfig {}
export interface MySQLTableColumn {
name: string
type: string
nullable: boolean
default: string | null
isPrimaryKey: boolean
isForeignKey: boolean
autoIncrement: boolean
references?: { table: string; column: string }
}
export interface MySQLTableSchema {
name: string
database: string
columns: MySQLTableColumn[]
primaryKey: string[]
foreignKeys: Array<{ column: string; referencesTable: string; referencesColumn: string }>
indexes: Array<{ name: string; columns: string[]; unique: boolean }>
}
export interface MySQLIntrospectResponse extends ToolResponse {
output: { message: string; tables: MySQLTableSchema[]; databases: string[] }
error?: string
}

View File

@@ -1,6 +1,7 @@
import { createTool } from './create'
import { deleteTool } from './delete'
import { executeTool } from './execute'
import { introspectTool } from './introspect'
import { mergeTool } from './merge'
import { queryTool } from './query'
import { updateTool } from './update'
@@ -8,6 +9,7 @@ import { updateTool } from './update'
export const neo4jCreateTool = createTool
export const neo4jDeleteTool = deleteTool
export const neo4jExecuteTool = executeTool
export const neo4jIntrospectTool = introspectTool
export const neo4jMergeTool = mergeTool
export const neo4jQueryTool = queryTool
export const neo4jUpdateTool = updateTool

View File

@@ -0,0 +1,103 @@
import type { Neo4jIntrospectParams, Neo4jIntrospectResponse } from '@/tools/neo4j/types'
import type { ToolConfig } from '@/tools/types'
export const introspectTool: ToolConfig<Neo4jIntrospectParams, Neo4jIntrospectResponse> = {
id: 'neo4j_introspect',
name: 'Neo4j Introspect',
description:
'Introspect a Neo4j database to discover its schema including node labels, relationship types, properties, constraints, and indexes.',
version: '1.0',
params: {
host: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'Neo4j server hostname or IP address',
},
port: {
type: 'number',
required: true,
visibility: 'user-only',
description: 'Neo4j server port (default: 7687 for Bolt protocol)',
},
database: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'Database name to connect to',
},
username: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'Neo4j username',
},
password: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'Neo4j password',
},
encryption: {
type: 'string',
required: false,
visibility: 'user-only',
description: 'Connection encryption mode (enabled, disabled)',
},
},
request: {
url: '/api/tools/neo4j/introspect',
method: 'POST',
headers: () => ({
'Content-Type': 'application/json',
}),
body: (params) => ({
host: params.host,
port: Number(params.port),
database: params.database,
username: params.username,
password: params.password,
encryption: params.encryption || 'disabled',
}),
},
transformResponse: async (response: Response) => {
const data = await response.json()
if (!response.ok) {
throw new Error(data.error || 'Neo4j introspection failed')
}
return {
success: true,
output: {
message: data.message || 'Introspection completed successfully',
labels: data.labels || [],
relationshipTypes: data.relationshipTypes || [],
nodeSchemas: data.nodeSchemas || [],
relationshipSchemas: data.relationshipSchemas || [],
constraints: data.constraints || [],
indexes: data.indexes || [],
},
error: undefined,
}
},
outputs: {
message: { type: 'string', description: 'Operation status message' },
labels: { type: 'array', description: 'Array of node labels in the database' },
relationshipTypes: {
type: 'array',
description: 'Array of relationship types in the database',
},
nodeSchemas: { type: 'array', description: 'Array of node schemas with their properties' },
relationshipSchemas: {
type: 'array',
description: 'Array of relationship schemas with their properties',
},
constraints: { type: 'array', description: 'Array of database constraints' },
indexes: { type: 'array', description: 'Array of database indexes' },
},
}

View File

@@ -73,3 +73,28 @@ export interface Neo4jUpdateResponse extends Neo4jBaseResponse {}
export interface Neo4jDeleteResponse extends Neo4jBaseResponse {}
export interface Neo4jExecuteResponse extends Neo4jBaseResponse {}
export interface Neo4jResponse extends Neo4jBaseResponse {}
export interface Neo4jIntrospectParams extends Neo4jConnectionConfig {}
export interface Neo4jNodeSchema {
label: string
properties: Array<{ name: string; types: string[] }>
}
export interface Neo4jRelationshipSchema {
type: string
properties: Array<{ name: string; types: string[] }>
}
export interface Neo4jIntrospectResponse extends ToolResponse {
output: {
message: string
labels: string[]
relationshipTypes: string[]
nodeSchemas: Neo4jNodeSchema[]
relationshipSchemas: Neo4jRelationshipSchema[]
constraints: Array<{ name: string; type: string; entityType: string; properties: string[] }>
indexes: Array<{ name: string; type: string; entityType: string; properties: string[] }>
}
error?: string
}

View File

@@ -163,7 +163,7 @@ export function getToolParametersConfig(
id: 'workflowId',
type: 'string',
required: true,
visibility: 'user-or-llm',
visibility: 'user-only',
description: 'The ID of the workflow to execute',
uiComponent: {
type: 'workflow-selector',

View File

@@ -1,11 +1,13 @@
import { deleteTool } from './delete'
import { executeTool } from './execute'
import { insertTool } from './insert'
import { introspectTool } from './introspect'
import { queryTool } from './query'
import { updateTool } from './update'
export const postgresDeleteTool = deleteTool
export const postgresExecuteTool = executeTool
export const postgresInsertTool = insertTool
export const postgresIntrospectTool = introspectTool
export const postgresQueryTool = queryTool
export const postgresUpdateTool = updateTool

View File

@@ -0,0 +1,99 @@
import type { PostgresIntrospectParams, PostgresIntrospectResponse } from '@/tools/postgresql/types'
import type { ToolConfig } from '@/tools/types'
export const introspectTool: ToolConfig<PostgresIntrospectParams, PostgresIntrospectResponse> = {
id: 'postgresql_introspect',
name: 'PostgreSQL Introspect',
description:
'Introspect PostgreSQL database schema to retrieve table structures, columns, and relationships',
version: '1.0',
params: {
host: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'PostgreSQL server hostname or IP address',
},
port: {
type: 'number',
required: true,
visibility: 'user-only',
description: 'PostgreSQL server port (default: 5432)',
},
database: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'Database name to connect to',
},
username: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'Database username',
},
password: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'Database password',
},
ssl: {
type: 'string',
required: false,
visibility: 'user-only',
description: 'SSL connection mode (disabled, required, preferred)',
},
schema: {
type: 'string',
required: false,
visibility: 'user-or-llm',
description: 'Schema to introspect (default: public)',
},
},
request: {
url: '/api/tools/postgresql/introspect',
method: 'POST',
headers: () => ({
'Content-Type': 'application/json',
}),
body: (params) => ({
host: params.host,
port: Number(params.port),
database: params.database,
username: params.username,
password: params.password,
ssl: params.ssl || 'required',
schema: params.schema || 'public',
}),
},
transformResponse: async (response: Response) => {
const data = await response.json()
if (!response.ok) {
throw new Error(data.error || 'PostgreSQL introspection failed')
}
return {
success: true,
output: {
message: data.message || 'Schema introspection completed successfully',
tables: data.tables || [],
schemas: data.schemas || [],
},
error: undefined,
}
},
outputs: {
message: { type: 'string', description: 'Operation status message' },
tables: {
type: 'array',
description: 'Array of table schemas with columns, keys, and indexes',
},
schemas: { type: 'array', description: 'List of available schemas in the database' },
},
}

View File

@@ -33,6 +33,10 @@ export interface PostgresExecuteParams extends PostgresConnectionConfig {
query: string
}
export interface PostgresIntrospectParams extends PostgresConnectionConfig {
schema?: string
}
export interface PostgresBaseResponse extends ToolResponse {
output: {
message: string
@@ -47,4 +51,44 @@ export interface PostgresInsertResponse extends PostgresBaseResponse {}
export interface PostgresUpdateResponse extends PostgresBaseResponse {}
export interface PostgresDeleteResponse extends PostgresBaseResponse {}
export interface PostgresExecuteResponse extends PostgresBaseResponse {}
export interface TableColumn {
name: string
type: string
nullable: boolean
default: string | null
isPrimaryKey: boolean
isForeignKey: boolean
references?: {
table: string
column: string
}
}
export interface TableSchema {
name: string
schema: string
columns: TableColumn[]
primaryKey: string[]
foreignKeys: Array<{
column: string
referencesTable: string
referencesColumn: string
}>
indexes: Array<{
name: string
columns: string[]
unique: boolean
}>
}
export interface PostgresIntrospectResponse extends ToolResponse {
output: {
message: string
tables: TableSchema[]
schemas: string[]
}
error?: string
}
export interface PostgresResponse extends PostgresBaseResponse {}

View File

@@ -1,11 +1,13 @@
import { deleteTool } from './delete'
import { executeTool } from './execute'
import { insertTool } from './insert'
import { introspectTool } from './introspect'
import { queryTool } from './query'
import { updateTool } from './update'
export const rdsDeleteTool = deleteTool
export const rdsExecuteTool = executeTool
export const rdsInsertTool = insertTool
export const rdsIntrospectTool = introspectTool
export const rdsQueryTool = queryTool
export const rdsUpdateTool = updateTool

View File

@@ -0,0 +1,109 @@
import type { RdsIntrospectParams, RdsIntrospectResponse } from '@/tools/rds/types'
import type { ToolConfig } from '@/tools/types'
export const introspectTool: ToolConfig<RdsIntrospectParams, RdsIntrospectResponse> = {
id: 'rds_introspect',
name: 'RDS Introspect',
description:
'Introspect Amazon RDS Aurora database schema to retrieve table structures, columns, and relationships',
version: '1.0',
params: {
region: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'AWS region (e.g., us-east-1)',
},
accessKeyId: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'AWS access key ID',
},
secretAccessKey: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'AWS secret access key',
},
resourceArn: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'ARN of the Aurora DB cluster',
},
secretArn: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'ARN of the Secrets Manager secret containing DB credentials',
},
database: {
type: 'string',
required: false,
visibility: 'user-only',
description: 'Database name (optional)',
},
schema: {
type: 'string',
required: false,
visibility: 'user-or-llm',
description: 'Schema to introspect (default: public for PostgreSQL, database name for MySQL)',
},
engine: {
type: 'string',
required: false,
visibility: 'user-only',
description:
'Database engine (aurora-postgresql or aurora-mysql). Auto-detected if not provided.',
},
},
request: {
url: '/api/tools/rds/introspect',
method: 'POST',
headers: () => ({
'Content-Type': 'application/json',
}),
body: (params) => ({
region: params.region,
accessKeyId: params.accessKeyId,
secretAccessKey: params.secretAccessKey,
resourceArn: params.resourceArn,
secretArn: params.secretArn,
...(params.database && { database: params.database }),
...(params.schema && { schema: params.schema }),
...(params.engine && { engine: params.engine }),
}),
},
transformResponse: async (response: Response) => {
const data = await response.json()
if (!response.ok) {
throw new Error(data.error || 'RDS introspection failed')
}
return {
success: true,
output: {
message: data.message || 'Schema introspection completed successfully',
engine: data.engine || 'unknown',
tables: data.tables || [],
schemas: data.schemas || [],
},
error: undefined,
}
},
outputs: {
message: { type: 'string', description: 'Operation status message' },
engine: { type: 'string', description: 'Detected database engine type' },
tables: {
type: 'array',
description: 'Array of table schemas with columns, keys, and indexes',
},
schemas: { type: 'array', description: 'List of available schemas in the database' },
},
}

View File

@@ -33,6 +33,11 @@ export interface RdsExecuteParams extends RdsConnectionConfig {
query: string
}
export interface RdsIntrospectParams extends RdsConnectionConfig {
schema?: string
engine?: 'aurora-postgresql' | 'aurora-mysql'
}
export interface RdsBaseResponse extends ToolResponse {
output: {
message: string
@@ -48,3 +53,43 @@ export interface RdsUpdateResponse extends RdsBaseResponse {}
export interface RdsDeleteResponse extends RdsBaseResponse {}
export interface RdsExecuteResponse extends RdsBaseResponse {}
export interface RdsResponse extends RdsBaseResponse {}
export interface RdsTableColumn {
name: string
type: string
nullable: boolean
default: string | null
isPrimaryKey: boolean
isForeignKey: boolean
references?: {
table: string
column: string
}
}
export interface RdsTableSchema {
name: string
schema: string
columns: RdsTableColumn[]
primaryKey: string[]
foreignKeys: Array<{
column: string
referencesTable: string
referencesColumn: string
}>
indexes: Array<{
name: string
columns: string[]
unique: boolean
}>
}
export interface RdsIntrospectResponse extends ToolResponse {
output: {
message: string
engine: string
tables: RdsTableSchema[]
schemas: string[]
}
error?: string
}

View File

@@ -158,6 +158,7 @@ import { duckduckgoSearchTool } from '@/tools/duckduckgo'
import {
dynamodbDeleteTool,
dynamodbGetTool,
dynamodbIntrospectTool,
dynamodbPutTool,
dynamodbQueryTool,
dynamodbScanTool,
@@ -174,6 +175,7 @@ import {
elasticsearchGetDocumentTool,
elasticsearchGetIndexTool,
elasticsearchIndexDocumentTool,
elasticsearchListIndicesTool,
elasticsearchSearchTool,
elasticsearchUpdateDocumentTool,
} from '@/tools/elasticsearch'
@@ -761,6 +763,7 @@ import {
mongodbDeleteTool,
mongodbExecuteTool,
mongodbInsertTool,
mongodbIntrospectTool,
mongodbQueryTool,
mongodbUpdateTool,
} from '@/tools/mongodb'
@@ -768,6 +771,7 @@ import {
mysqlDeleteTool,
mysqlExecuteTool,
mysqlInsertTool,
mysqlIntrospectTool,
mysqlQueryTool,
mysqlUpdateTool,
} from '@/tools/mysql'
@@ -775,6 +779,7 @@ import {
neo4jCreateTool,
neo4jDeleteTool,
neo4jExecuteTool,
neo4jIntrospectTool,
neo4jMergeTool,
neo4jQueryTool,
neo4jUpdateTool,
@@ -859,6 +864,7 @@ import {
postgresDeleteTool,
postgresExecuteTool,
postgresInsertTool,
postgresIntrospectTool,
postgresQueryTool,
postgresUpdateTool,
} from '@/tools/postgresql'
@@ -912,6 +918,7 @@ import {
rdsDeleteTool,
rdsExecuteTool,
rdsInsertTool,
rdsIntrospectTool,
rdsQueryTool,
rdsUpdateTool,
} from '@/tools/rds'
@@ -1230,6 +1237,7 @@ import {
supabaseDeleteTool,
supabaseGetRowTool,
supabaseInsertTool,
supabaseIntrospectTool,
supabaseQueryTool,
supabaseRpcTool,
supabaseStorageCopyTool,
@@ -1627,6 +1635,7 @@ export const tools: Record<string, ToolConfig> = {
supabase_text_search: supabaseTextSearchTool,
supabase_vector_search: supabaseVectorSearchTool,
supabase_rpc: supabaseRpcTool,
supabase_introspect: supabaseIntrospectTool,
supabase_storage_upload: supabaseStorageUploadTool,
supabase_storage_download: supabaseStorageDownloadTool,
supabase_storage_list: supabaseStorageListTool,
@@ -1715,17 +1724,20 @@ export const tools: Record<string, ToolConfig> = {
postgresql_update: postgresUpdateTool,
postgresql_delete: postgresDeleteTool,
postgresql_execute: postgresExecuteTool,
postgresql_introspect: postgresIntrospectTool,
rds_query: rdsQueryTool,
rds_insert: rdsInsertTool,
rds_update: rdsUpdateTool,
rds_delete: rdsDeleteTool,
rds_execute: rdsExecuteTool,
rds_introspect: rdsIntrospectTool,
dynamodb_get: dynamodbGetTool,
dynamodb_put: dynamodbPutTool,
dynamodb_query: dynamodbQueryTool,
dynamodb_scan: dynamodbScanTool,
dynamodb_update: dynamodbUpdateTool,
dynamodb_delete: dynamodbDeleteTool,
dynamodb_introspect: dynamodbIntrospectTool,
dropbox_upload: dropboxUploadTool,
dropbox_download: dropboxDownloadTool,
dropbox_list_folder: dropboxListFolderTool,
@@ -1742,17 +1754,20 @@ export const tools: Record<string, ToolConfig> = {
mongodb_update: mongodbUpdateTool,
mongodb_delete: mongodbDeleteTool,
mongodb_execute: mongodbExecuteTool,
mongodb_introspect: mongodbIntrospectTool,
mysql_query: mysqlQueryTool,
mysql_insert: mysqlInsertTool,
mysql_update: mysqlUpdateTool,
mysql_delete: mysqlDeleteTool,
mysql_execute: mysqlExecuteTool,
mysql_introspect: mysqlIntrospectTool,
neo4j_query: neo4jQueryTool,
neo4j_create: neo4jCreateTool,
neo4j_merge: neo4jMergeTool,
neo4j_update: neo4jUpdateTool,
neo4j_delete: neo4jDeleteTool,
neo4j_execute: neo4jExecuteTool,
neo4j_introspect: neo4jIntrospectTool,
github_pr: githubPrTool,
github_comment: githubCommentTool,
github_issue_comment: githubIssueCommentTool,
@@ -1844,6 +1859,7 @@ export const tools: Record<string, ToolConfig> = {
elasticsearch_create_index: elasticsearchCreateIndexTool,
elasticsearch_delete_index: elasticsearchDeleteIndexTool,
elasticsearch_get_index: elasticsearchGetIndexTool,
elasticsearch_list_indices: elasticsearchListIndicesTool,
elasticsearch_cluster_health: elasticsearchClusterHealthTool,
elasticsearch_cluster_stats: elasticsearchClusterStatsTool,
exa_search: exaSearchTool,

View File

@@ -2,6 +2,7 @@ import { countTool } from '@/tools/supabase/count'
import { deleteTool } from '@/tools/supabase/delete'
import { getRowTool } from '@/tools/supabase/get_row'
import { insertTool } from '@/tools/supabase/insert'
import { introspectTool } from '@/tools/supabase/introspect'
import { queryTool } from '@/tools/supabase/query'
import { rpcTool } from '@/tools/supabase/rpc'
import { storageCopyTool } from '@/tools/supabase/storage_copy'
@@ -28,6 +29,7 @@ export const supabaseDeleteTool = deleteTool
export const supabaseUpsertTool = upsertTool
export const supabaseVectorSearchTool = vectorSearchTool
export const supabaseRpcTool = rpcTool
export const supabaseIntrospectTool = introspectTool
export const supabaseTextSearchTool = textSearchTool
export const supabaseCountTool = countTool
export const supabaseStorageUploadTool = storageUploadTool

View File

@@ -0,0 +1,560 @@
import { createLogger } from '@sim/logger'
import type {
SupabaseColumnSchema,
SupabaseIntrospectParams,
SupabaseIntrospectResponse,
SupabaseTableSchema,
} from '@/tools/supabase/types'
import type { ToolConfig } from '@/tools/types'
const logger = createLogger('SupabaseIntrospect')
/**
* SQL query to introspect database schema
* This query retrieves all tables, columns, primary keys, foreign keys, and indexes
*/
const INTROSPECTION_SQL = `
WITH table_info AS (
SELECT
t.table_schema,
t.table_name
FROM information_schema.tables t
WHERE t.table_type = 'BASE TABLE'
AND t.table_schema NOT IN ('pg_catalog', 'information_schema', 'auth', 'storage', 'realtime', 'supabase_functions', 'supabase_migrations', 'extensions', 'graphql', 'graphql_public', 'pgsodium', 'pgsodium_masks', 'vault', 'pgbouncer', '_timescaledb_internal', '_timescaledb_config', '_timescaledb_catalog', '_timescaledb_cache')
),
columns_info AS (
SELECT
c.table_schema,
c.table_name,
c.column_name,
c.data_type,
c.is_nullable,
c.column_default,
c.ordinal_position
FROM information_schema.columns c
INNER JOIN table_info t ON c.table_schema = t.table_schema AND c.table_name = t.table_name
),
pk_info AS (
SELECT
tc.table_schema,
tc.table_name,
kcu.column_name
FROM information_schema.table_constraints tc
JOIN information_schema.key_column_usage kcu
ON tc.constraint_name = kcu.constraint_name
AND tc.table_schema = kcu.table_schema
WHERE tc.constraint_type = 'PRIMARY KEY'
),
fk_info AS (
SELECT
tc.table_schema,
tc.table_name,
kcu.column_name,
ccu.table_name AS foreign_table_name,
ccu.column_name AS foreign_column_name
FROM information_schema.table_constraints tc
JOIN information_schema.key_column_usage kcu
ON tc.constraint_name = kcu.constraint_name
AND tc.table_schema = kcu.table_schema
JOIN information_schema.constraint_column_usage ccu
ON ccu.constraint_name = tc.constraint_name
WHERE tc.constraint_type = 'FOREIGN KEY'
),
index_info AS (
SELECT
schemaname AS table_schema,
tablename AS table_name,
indexname AS index_name,
CASE WHEN indexdef LIKE '%UNIQUE%' THEN true ELSE false END AS is_unique,
indexdef
FROM pg_indexes
WHERE schemaname NOT IN ('pg_catalog', 'information_schema', 'auth', 'storage', 'realtime', 'supabase_functions', 'supabase_migrations', 'extensions', 'graphql', 'graphql_public', 'pgsodium', 'pgsodium_masks', 'vault', 'pgbouncer', '_timescaledb_internal', '_timescaledb_config', '_timescaledb_catalog', '_timescaledb_cache')
)
SELECT json_build_object(
'tables', (
SELECT json_agg(
json_build_object(
'schema', t.table_schema,
'name', t.table_name,
'columns', (
SELECT json_agg(
json_build_object(
'name', c.column_name,
'type', c.data_type,
'nullable', c.is_nullable = 'YES',
'default', c.column_default,
'isPrimaryKey', EXISTS (
SELECT 1 FROM pk_info pk
WHERE pk.table_schema = c.table_schema
AND pk.table_name = c.table_name
AND pk.column_name = c.column_name
),
'isForeignKey', EXISTS (
SELECT 1 FROM fk_info fk
WHERE fk.table_schema = c.table_schema
AND fk.table_name = c.table_name
AND fk.column_name = c.column_name
),
'references', (
SELECT json_build_object('table', fk.foreign_table_name, 'column', fk.foreign_column_name)
FROM fk_info fk
WHERE fk.table_schema = c.table_schema
AND fk.table_name = c.table_name
AND fk.column_name = c.column_name
LIMIT 1
)
)
ORDER BY c.ordinal_position
)
FROM columns_info c
WHERE c.table_schema = t.table_schema AND c.table_name = t.table_name
),
'primaryKey', (
SELECT COALESCE(json_agg(pk.column_name), '[]'::json)
FROM pk_info pk
WHERE pk.table_schema = t.table_schema AND pk.table_name = t.table_name
),
'foreignKeys', (
SELECT COALESCE(json_agg(
json_build_object(
'column', fk.column_name,
'referencesTable', fk.foreign_table_name,
'referencesColumn', fk.foreign_column_name
)
), '[]'::json)
FROM fk_info fk
WHERE fk.table_schema = t.table_schema AND fk.table_name = t.table_name
),
'indexes', (
SELECT COALESCE(json_agg(
json_build_object(
'name', idx.index_name,
'unique', idx.is_unique,
'definition', idx.indexdef
)
), '[]'::json)
FROM index_info idx
WHERE idx.table_schema = t.table_schema AND idx.table_name = t.table_name
)
)
)
FROM table_info t
),
'schemas', (
SELECT COALESCE(json_agg(DISTINCT table_schema), '[]'::json)
FROM table_info
)
) AS result;
`
/**
* Escapes a value for single-quoted SQL strings by doubling single quotes
*/
function escapeSqlString(value: string): string {
if (!value || value.length > 63) {
throw new Error(`Invalid value: ${value}`)
}
return value.replace(/'/g, "''")
}
/**
* SQL query filtered by specific schema
*/
const getSchemaFilteredSQL = (schema: string) => {
const safeSchema = escapeSqlString(schema)
return `
WITH table_info AS (
SELECT
t.table_schema,
t.table_name
FROM information_schema.tables t
WHERE t.table_type = 'BASE TABLE'
AND t.table_schema = '${safeSchema}'
),
columns_info AS (
SELECT
c.table_schema,
c.table_name,
c.column_name,
c.data_type,
c.is_nullable,
c.column_default,
c.ordinal_position
FROM information_schema.columns c
INNER JOIN table_info t ON c.table_schema = t.table_schema AND c.table_name = t.table_name
),
pk_info AS (
SELECT
tc.table_schema,
tc.table_name,
kcu.column_name
FROM information_schema.table_constraints tc
JOIN information_schema.key_column_usage kcu
ON tc.constraint_name = kcu.constraint_name
AND tc.table_schema = kcu.table_schema
WHERE tc.constraint_type = 'PRIMARY KEY'
AND tc.table_schema = '${safeSchema}'
),
fk_info AS (
SELECT
tc.table_schema,
tc.table_name,
kcu.column_name,
ccu.table_name AS foreign_table_name,
ccu.column_name AS foreign_column_name
FROM information_schema.table_constraints tc
JOIN information_schema.key_column_usage kcu
ON tc.constraint_name = kcu.constraint_name
AND tc.table_schema = kcu.table_schema
JOIN information_schema.constraint_column_usage ccu
ON ccu.constraint_name = tc.constraint_name
WHERE tc.constraint_type = 'FOREIGN KEY'
AND tc.table_schema = '${safeSchema}'
),
index_info AS (
SELECT
schemaname AS table_schema,
tablename AS table_name,
indexname AS index_name,
CASE WHEN indexdef LIKE '%UNIQUE%' THEN true ELSE false END AS is_unique,
indexdef
FROM pg_indexes
WHERE schemaname = '${safeSchema}'
)
SELECT json_build_object(
'tables', (
SELECT json_agg(
json_build_object(
'schema', t.table_schema,
'name', t.table_name,
'columns', (
SELECT json_agg(
json_build_object(
'name', c.column_name,
'type', c.data_type,
'nullable', c.is_nullable = 'YES',
'default', c.column_default,
'isPrimaryKey', EXISTS (
SELECT 1 FROM pk_info pk
WHERE pk.table_schema = c.table_schema
AND pk.table_name = c.table_name
AND pk.column_name = c.column_name
),
'isForeignKey', EXISTS (
SELECT 1 FROM fk_info fk
WHERE fk.table_schema = c.table_schema
AND fk.table_name = c.table_name
AND fk.column_name = c.column_name
),
'references', (
SELECT json_build_object('table', fk.foreign_table_name, 'column', fk.foreign_column_name)
FROM fk_info fk
WHERE fk.table_schema = c.table_schema
AND fk.table_name = c.table_name
AND fk.column_name = c.column_name
LIMIT 1
)
)
ORDER BY c.ordinal_position
)
FROM columns_info c
WHERE c.table_schema = t.table_schema AND c.table_name = t.table_name
),
'primaryKey', (
SELECT COALESCE(json_agg(pk.column_name), '[]'::json)
FROM pk_info pk
WHERE pk.table_schema = t.table_schema AND pk.table_name = t.table_name
),
'foreignKeys', (
SELECT COALESCE(json_agg(
json_build_object(
'column', fk.column_name,
'referencesTable', fk.foreign_table_name,
'referencesColumn', fk.foreign_column_name
)
), '[]'::json)
FROM fk_info fk
WHERE fk.table_schema = t.table_schema AND fk.table_name = t.table_name
),
'indexes', (
SELECT COALESCE(json_agg(
json_build_object(
'name', idx.index_name,
'unique', idx.is_unique,
'definition', idx.indexdef
)
), '[]'::json)
FROM index_info idx
WHERE idx.table_schema = t.table_schema AND idx.table_name = t.table_name
)
)
)
FROM table_info t
),
'schemas', (
SELECT COALESCE(json_agg(DISTINCT table_schema), '[]'::json)
FROM table_info
)
) AS result;
`
}
/**
* Tool for introspecting Supabase database schema
* Uses raw SQL execution via PostgREST to retrieve table structures
*/
export const introspectTool: ToolConfig<SupabaseIntrospectParams, SupabaseIntrospectResponse> = {
id: 'supabase_introspect',
name: 'Supabase Introspect',
description:
'Introspect Supabase database schema to get table structures, columns, and relationships',
version: '1.0',
params: {
projectId: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'Your Supabase project ID (e.g., jdrkgepadsdopsntdlom)',
},
schema: {
type: 'string',
required: false,
visibility: 'user-or-llm',
description:
'Database schema to introspect (defaults to all user schemas, commonly "public")',
},
apiKey: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'Your Supabase service role secret key',
},
},
request: {
url: (params) => {
return `https://${params.projectId}.supabase.co/rest/v1/rpc/`
},
method: 'POST',
headers: (params) => ({
apikey: params.apiKey,
Authorization: `Bearer ${params.apiKey}`,
'Content-Type': 'application/json',
}),
body: () => ({}),
},
directExecution: async (
params: SupabaseIntrospectParams
): Promise<SupabaseIntrospectResponse> => {
const { apiKey, projectId, schema } = params
try {
const sqlQuery = schema ? getSchemaFilteredSQL(schema) : INTROSPECTION_SQL
const response = await fetch(`https://${projectId}.supabase.co/rest/v1/rpc/`, {
method: 'POST',
headers: {
apikey: apiKey,
Authorization: `Bearer ${apiKey}`,
'Content-Type': 'application/json',
Prefer: 'return=representation',
},
body: JSON.stringify({
query: sqlQuery,
}),
})
if (!response.ok) {
const errorText = await response.text()
logger.warn('Direct RPC call failed, attempting alternative approach', {
status: response.status,
})
const pgResponse = await fetch(`https://${projectId}.supabase.co/rest/v1/?select=*`, {
method: 'GET',
headers: {
apikey: apiKey,
Authorization: `Bearer ${apiKey}`,
Accept: 'application/openapi+json',
},
})
if (!pgResponse.ok) {
throw new Error(`Failed to introspect database: ${errorText}`)
}
const openApiSpec = await pgResponse.json()
const tables = parseOpenApiSpec(openApiSpec, schema)
return {
success: true,
output: {
message: `Successfully introspected ${tables.length} table(s) from database schema`,
tables,
schemas: [...new Set(tables.map((t) => t.schema))],
},
}
}
const data = await response.json()
const result = Array.isArray(data) && data.length > 0 ? data[0].result : data.result || data
const tables: SupabaseTableSchema[] = (result.tables || []).map((table: any) => ({
name: table.name,
schema: table.schema,
columns: (table.columns || []).map((col: any) => ({
name: col.name,
type: col.type,
nullable: col.nullable,
default: col.default,
isPrimaryKey: col.isPrimaryKey,
isForeignKey: col.isForeignKey,
references: col.references,
})),
primaryKey: table.primaryKey || [],
foreignKeys: table.foreignKeys || [],
indexes: (table.indexes || []).map((idx: any) => ({
name: idx.name,
columns: parseIndexColumns(idx.definition || ''),
unique: idx.unique,
})),
}))
return {
success: true,
output: {
message: `Successfully introspected ${tables.length} table(s) from database`,
tables,
schemas: result.schemas || [],
},
}
} catch (error) {
logger.error('Supabase introspection failed', { error })
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
return {
success: false,
output: {
message: 'Failed to introspect database schema',
tables: [],
schemas: [],
},
error: errorMessage,
}
}
},
transformResponse: async (response: Response) => {
const data = await response.json()
return {
success: true,
output: {
message: 'Schema introspection completed',
tables: data.tables || [],
schemas: data.schemas || [],
},
}
},
outputs: {
message: { type: 'string', description: 'Operation status message' },
tables: {
type: 'array',
description: 'Array of table schemas with columns, keys, and indexes',
items: {
type: 'object',
properties: {
name: { type: 'string', description: 'Table name' },
schema: { type: 'string', description: 'Database schema name' },
columns: { type: 'array', description: 'Array of column definitions' },
primaryKey: { type: 'array', description: 'Array of primary key column names' },
foreignKeys: { type: 'array', description: 'Array of foreign key relationships' },
indexes: { type: 'array', description: 'Array of index definitions' },
},
},
},
schemas: { type: 'array', description: 'List of schemas found in the database' },
},
}
/**
* Parse index definition to extract column names
*/
function parseIndexColumns(indexDef: string): string[] {
const match = indexDef.match(/\(([^)]+)\)/)
if (match) {
return match[1].split(',').map((col) => col.trim().replace(/"/g, ''))
}
return []
}
/**
* Parse OpenAPI spec to extract table schema information
* This is a fallback method when direct SQL execution is not available
*/
function parseOpenApiSpec(spec: any, filterSchema?: string): SupabaseTableSchema[] {
const tables: SupabaseTableSchema[] = []
const definitions = spec.definitions || spec.components?.schemas || {}
for (const [tableName, tableDef] of Object.entries(definitions)) {
if (tableName.startsWith('_') || tableName === 'Error') continue
const definition = tableDef as any
const properties = definition.properties || {}
const required = definition.required || []
const columns: SupabaseColumnSchema[] = []
const primaryKey: string[] = []
const foreignKeys: Array<{
column: string
referencesTable: string
referencesColumn: string
}> = []
for (const [colName, colDef] of Object.entries(properties)) {
const col = colDef as any
const isPK = col.description?.includes('primary key') || colName === 'id'
const fkMatch = col.description?.match(/references\s+(\w+)\.(\w+)/)
const column: SupabaseColumnSchema = {
name: colName,
type: col.format || col.type || 'unknown',
nullable: !required.includes(colName),
default: col.default || null,
isPrimaryKey: isPK,
isForeignKey: !!fkMatch,
}
if (fkMatch) {
column.references = { table: fkMatch[1], column: fkMatch[2] }
foreignKeys.push({
column: colName,
referencesTable: fkMatch[1],
referencesColumn: fkMatch[2],
})
}
if (isPK) {
primaryKey.push(colName)
}
columns.push(column)
}
const schemaName = filterSchema || 'public'
if (!filterSchema || schemaName === filterSchema) {
tables.push({
name: tableName,
schema: schemaName,
columns,
primaryKey,
foreignKeys,
indexes: [],
})
}
}
return tables
}

View File

@@ -274,3 +274,49 @@ export interface SupabaseStorageCreateSignedUrlResponse extends ToolResponse {
}
error?: string
}
/**
* Parameters for introspecting a Supabase database schema
*/
export interface SupabaseIntrospectParams {
apiKey: string
projectId: string
schema?: string
}
/**
* Column information for a database table
*/
export interface SupabaseColumnSchema {
name: string
type: string
nullable: boolean
default: string | null
isPrimaryKey: boolean
isForeignKey: boolean
references?: { table: string; column: string }
}
/**
* Table schema information including columns, keys, and indexes
*/
export interface SupabaseTableSchema {
name: string
schema: string
columns: SupabaseColumnSchema[]
primaryKey: string[]
foreignKeys: Array<{ column: string; referencesTable: string; referencesColumn: string }>
indexes: Array<{ name: string; columns: string[]; unique: boolean }>
}
/**
* Response from the introspect operation
*/
export interface SupabaseIntrospectResponse extends ToolResponse {
output: {
message: string
tables: SupabaseTableSchema[]
schemas: string[]
}
error?: string
}