mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-11 16:08:04 -05:00
Compare commits
2 Commits
feat/aws-l
...
v0.2.9
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c2f786e40b | ||
|
|
f3bc1fc250 |
@@ -194,6 +194,7 @@ export async function GET(
|
||||
description: deployment.description,
|
||||
customizations: deployment.customizations,
|
||||
authType: deployment.authType,
|
||||
outputConfigs: deployment.outputConfigs,
|
||||
}),
|
||||
request
|
||||
)
|
||||
@@ -219,6 +220,7 @@ export async function GET(
|
||||
description: deployment.description,
|
||||
customizations: deployment.customizations,
|
||||
authType: deployment.authType,
|
||||
outputConfigs: deployment.outputConfigs,
|
||||
}),
|
||||
request
|
||||
)
|
||||
|
||||
@@ -263,17 +263,26 @@ export async function executeWorkflowForChat(
|
||||
let outputBlockIds: string[] = []
|
||||
|
||||
// Extract output configs from the new schema format
|
||||
let selectedOutputIds: string[] = []
|
||||
if (deployment.outputConfigs && Array.isArray(deployment.outputConfigs)) {
|
||||
// Extract block IDs and paths from the new outputConfigs array format
|
||||
// Extract output IDs in the format expected by the streaming processor
|
||||
logger.debug(
|
||||
`[${requestId}] Found ${deployment.outputConfigs.length} output configs in deployment`
|
||||
)
|
||||
deployment.outputConfigs.forEach((config) => {
|
||||
|
||||
selectedOutputIds = deployment.outputConfigs.map((config) => {
|
||||
const outputId = config.path
|
||||
? `${config.blockId}_${config.path}`
|
||||
: `${config.blockId}.content`
|
||||
|
||||
logger.debug(
|
||||
`[${requestId}] Processing output config: blockId=${config.blockId}, path=${config.path || 'none'}`
|
||||
`[${requestId}] Processing output config: blockId=${config.blockId}, path=${config.path || 'content'} -> outputId=${outputId}`
|
||||
)
|
||||
|
||||
return outputId
|
||||
})
|
||||
|
||||
// Also extract block IDs for legacy compatibility
|
||||
outputBlockIds = deployment.outputConfigs.map((config) => config.blockId)
|
||||
} else {
|
||||
// Use customizations as fallback
|
||||
@@ -291,7 +300,9 @@ export async function executeWorkflowForChat(
|
||||
outputBlockIds = customizations.outputBlockIds
|
||||
}
|
||||
|
||||
logger.debug(`[${requestId}] Using ${outputBlockIds.length} output blocks for extraction`)
|
||||
logger.debug(
|
||||
`[${requestId}] Using ${outputBlockIds.length} output blocks and ${selectedOutputIds.length} selected output IDs for extraction`
|
||||
)
|
||||
|
||||
// Find the workflow (deployedState is NOT deprecated - needed for chat execution)
|
||||
const workflowResult = await db
|
||||
@@ -457,7 +468,7 @@ export async function executeWorkflowForChat(
|
||||
workflowVariables,
|
||||
contextExtensions: {
|
||||
stream: true,
|
||||
selectedOutputIds: outputBlockIds,
|
||||
selectedOutputIds: selectedOutputIds.length > 0 ? selectedOutputIds : outputBlockIds,
|
||||
edges: edges.map((e: any) => ({
|
||||
source: e.source,
|
||||
target: e.target,
|
||||
|
||||
@@ -1,480 +0,0 @@
|
||||
import {
|
||||
ApiGatewayV2Client,
|
||||
CreateApiCommand,
|
||||
CreateIntegrationCommand,
|
||||
CreateRouteCommand,
|
||||
CreateStageCommand,
|
||||
GetApisCommand,
|
||||
GetIntegrationsCommand,
|
||||
GetRoutesCommand,
|
||||
GetStagesCommand,
|
||||
} from '@aws-sdk/client-apigatewayv2'
|
||||
import { AddPermissionCommand, GetFunctionCommand, LambdaClient } from '@aws-sdk/client-lambda'
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
|
||||
|
||||
const logger = createLogger('AWSLambdaDeployEndpointAPI')
|
||||
|
||||
// Validation schema for the request body
|
||||
const DeployEndpointRequestSchema = z.object({
|
||||
accessKeyId: z.string().min(1, 'AWS Access Key ID is required'),
|
||||
secretAccessKey: z.string().min(1, 'AWS Secret Access Key is required'),
|
||||
region: z.string().min(1, 'AWS Region is required'),
|
||||
functionName: z.string().min(1, 'Function name is required'),
|
||||
endpointName: z.string().min(1, 'Endpoint name is required'),
|
||||
role: z.string().min(1, 'Role ARN is required'),
|
||||
})
|
||||
|
||||
type DeployEndpointRequest = z.infer<typeof DeployEndpointRequestSchema>
|
||||
|
||||
interface DeployEndpointResponse {
|
||||
functionArn: string
|
||||
functionName: string
|
||||
endpointName: string
|
||||
endpointUrl: string
|
||||
region: string
|
||||
status: string
|
||||
lastModified: string
|
||||
apiGatewayId: string
|
||||
stageName: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a Lambda function exists
|
||||
*/
|
||||
async function checkFunctionExists(
|
||||
lambdaClient: LambdaClient,
|
||||
functionName: string
|
||||
): Promise<boolean> {
|
||||
try {
|
||||
await lambdaClient.send(new GetFunctionCommand({ FunctionName: functionName }))
|
||||
return true
|
||||
} catch (error: any) {
|
||||
if (error.name === 'ResourceNotFoundException') {
|
||||
return false
|
||||
}
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get Lambda function details
|
||||
*/
|
||||
async function getFunctionDetails(lambdaClient: LambdaClient, functionName: string): Promise<any> {
|
||||
return await lambdaClient.send(new GetFunctionCommand({ FunctionName: functionName }))
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if API Gateway HTTP API already exists
|
||||
*/
|
||||
async function checkApiExists(
|
||||
apiGatewayClient: ApiGatewayV2Client,
|
||||
apiName: string
|
||||
): Promise<string | null> {
|
||||
try {
|
||||
const apis = await apiGatewayClient.send(new GetApisCommand({}))
|
||||
const existingApi = apis.Items?.find((api: any) => api.Name === apiName)
|
||||
return existingApi?.ApiId || null
|
||||
} catch (error) {
|
||||
logger.error('Error checking for existing API', { error })
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a route already exists for the API Gateway
|
||||
*/
|
||||
async function checkRouteExists(
|
||||
apiGatewayClient: ApiGatewayV2Client,
|
||||
apiId: string,
|
||||
routeKey: string
|
||||
): Promise<boolean> {
|
||||
try {
|
||||
const routes = await apiGatewayClient.send(new GetRoutesCommand({ ApiId: apiId }))
|
||||
return routes.Items?.some((route: any) => route.RouteKey === routeKey) || false
|
||||
} catch (error) {
|
||||
logger.error('Error checking for existing route', { error })
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if an integration already exists for the API Gateway
|
||||
*/
|
||||
async function checkIntegrationExists(
|
||||
apiGatewayClient: ApiGatewayV2Client,
|
||||
apiId: string,
|
||||
functionArn: string
|
||||
): Promise<string | null> {
|
||||
try {
|
||||
const integrations = await apiGatewayClient.send(new GetIntegrationsCommand({ ApiId: apiId }))
|
||||
const existingIntegration = integrations.Items?.find(
|
||||
(integration) => integration.IntegrationUri === functionArn
|
||||
)
|
||||
return existingIntegration?.IntegrationId || null
|
||||
} catch (error) {
|
||||
logger.error('Error checking for existing integration', { error })
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new API Gateway HTTP API
|
||||
*/
|
||||
async function createApiGateway(
|
||||
apiGatewayClient: ApiGatewayV2Client,
|
||||
apiName: string
|
||||
): Promise<string> {
|
||||
const createApiResponse = await apiGatewayClient.send(
|
||||
new CreateApiCommand({
|
||||
Name: apiName,
|
||||
ProtocolType: 'HTTP',
|
||||
Description: `HTTP API for Lambda function ${apiName}`,
|
||||
})
|
||||
)
|
||||
|
||||
if (!createApiResponse.ApiId) {
|
||||
throw new Error('Failed to create API Gateway - no ID returned')
|
||||
}
|
||||
|
||||
return createApiResponse.ApiId
|
||||
}
|
||||
|
||||
/**
|
||||
* Create API Gateway integration with Lambda
|
||||
*/
|
||||
async function createApiIntegration(
|
||||
apiGatewayClient: ApiGatewayV2Client,
|
||||
apiId: string,
|
||||
functionArn: string
|
||||
): Promise<string> {
|
||||
const integration = await apiGatewayClient.send(
|
||||
new CreateIntegrationCommand({
|
||||
ApiId: apiId,
|
||||
IntegrationType: 'AWS_PROXY',
|
||||
IntegrationUri: functionArn,
|
||||
IntegrationMethod: 'POST',
|
||||
PayloadFormatVersion: '2.0',
|
||||
})
|
||||
)
|
||||
|
||||
if (!integration.IntegrationId) {
|
||||
throw new Error('Failed to create integration - no ID returned')
|
||||
}
|
||||
|
||||
return integration.IntegrationId
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a route for the API Gateway
|
||||
*/
|
||||
async function createApiRoute(
|
||||
apiGatewayClient: ApiGatewayV2Client,
|
||||
apiId: string,
|
||||
integrationId: string
|
||||
): Promise<void> {
|
||||
await apiGatewayClient.send(
|
||||
new CreateRouteCommand({
|
||||
ApiId: apiId,
|
||||
RouteKey: 'ANY /',
|
||||
Target: `integrations/${integrationId}`,
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Add Lambda permission for API Gateway
|
||||
*/
|
||||
async function addLambdaPermission(
|
||||
lambdaClient: LambdaClient,
|
||||
functionName: string,
|
||||
apiId: string,
|
||||
region: string,
|
||||
accountId: string
|
||||
): Promise<void> {
|
||||
try {
|
||||
await lambdaClient.send(
|
||||
new AddPermissionCommand({
|
||||
FunctionName: functionName,
|
||||
StatementId: `api-gateway-${apiId}`,
|
||||
Action: 'lambda:InvokeFunction',
|
||||
Principal: 'apigateway.amazonaws.com',
|
||||
SourceArn: `arn:aws:execute-api:${region}:${accountId}:${apiId}/*/*`,
|
||||
})
|
||||
)
|
||||
} catch (error: any) {
|
||||
// If permission already exists, that's fine
|
||||
if (error.name !== 'ResourceConflictException') {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a stage exists for the API Gateway
|
||||
*/
|
||||
async function checkStageExists(
|
||||
apiGatewayClient: ApiGatewayV2Client,
|
||||
apiId: string,
|
||||
stageName: string
|
||||
): Promise<boolean> {
|
||||
try {
|
||||
const stages = await apiGatewayClient.send(
|
||||
new GetStagesCommand({
|
||||
ApiId: apiId,
|
||||
})
|
||||
)
|
||||
return stages.Items?.some((stage: any) => stage.StageName === stageName) || false
|
||||
} catch (error) {
|
||||
logger.error('Error checking for existing stage', { error })
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a stage for the API Gateway
|
||||
*/
|
||||
async function createApiStage(
|
||||
apiGatewayClient: ApiGatewayV2Client,
|
||||
apiId: string
|
||||
): Promise<string> {
|
||||
const stageName = 'prod'
|
||||
|
||||
// Check if stage already exists
|
||||
const stageExists = await checkStageExists(apiGatewayClient, apiId, stageName)
|
||||
|
||||
if (stageExists) {
|
||||
logger.info(`Stage ${stageName} already exists for API ${apiId}`)
|
||||
return stageName
|
||||
}
|
||||
|
||||
logger.info(`Creating new stage ${stageName} for API ${apiId}`)
|
||||
const stage = await apiGatewayClient.send(
|
||||
new CreateStageCommand({
|
||||
ApiId: apiId,
|
||||
StageName: stageName,
|
||||
AutoDeploy: true,
|
||||
})
|
||||
)
|
||||
|
||||
return stage.StageName || stageName
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensure API is deployed by waiting for deployment to complete
|
||||
*/
|
||||
async function ensureApiDeployed(
|
||||
apiGatewayClient: ApiGatewayV2Client,
|
||||
apiId: string,
|
||||
stageName: string
|
||||
): Promise<void> {
|
||||
// In API Gateway v2, AutoDeploy: true should handle deployment automatically
|
||||
// But we can add a small delay to ensure the deployment completes
|
||||
await new Promise((resolve) => setTimeout(resolve, 2000))
|
||||
|
||||
logger.info(`API Gateway deployment completed for API ${apiId}, stage ${stageName}`)
|
||||
}
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
logger.info(`[${requestId}] Processing AWS Lambda deploy endpoint request`)
|
||||
|
||||
// Parse and validate request body
|
||||
let body: any
|
||||
try {
|
||||
body = await request.json()
|
||||
} catch (parseError) {
|
||||
logger.error(`[${requestId}] Failed to parse request body`, {
|
||||
error: parseError instanceof Error ? parseError.message : String(parseError),
|
||||
})
|
||||
return createErrorResponse('Invalid JSON in request body', 400, 'INVALID_JSON')
|
||||
}
|
||||
|
||||
// Log the raw request body for debugging
|
||||
logger.info(`[${requestId}] Raw request body received`, {
|
||||
body: JSON.stringify(body, null, 2),
|
||||
})
|
||||
|
||||
const validationResult = DeployEndpointRequestSchema.safeParse(body)
|
||||
if (!validationResult.success) {
|
||||
logger.warn(`[${requestId}] Invalid request body`, { errors: validationResult.error.errors })
|
||||
return createErrorResponse('Invalid request parameters', 400, 'VALIDATION_ERROR')
|
||||
}
|
||||
|
||||
const params = validationResult.data
|
||||
|
||||
// Log the deployment payload (excluding sensitive credentials)
|
||||
logger.info(`[${requestId}] AWS Lambda deploy endpoint payload received`, {
|
||||
functionName: params.functionName,
|
||||
endpointName: params.endpointName,
|
||||
region: params.region,
|
||||
accessKeyId: params.accessKeyId ? `${params.accessKeyId.substring(0, 4)}...` : undefined,
|
||||
hasSecretAccessKey: !!params.secretAccessKey,
|
||||
hasRole: !!params.role,
|
||||
role: params.role ? `${params.role.substring(0, 20)}...` : undefined,
|
||||
})
|
||||
|
||||
logger.info(`[${requestId}] Deploying Lambda function as endpoint: ${params.functionName}`)
|
||||
|
||||
// Create Lambda client
|
||||
const lambdaClient = new LambdaClient({
|
||||
region: params.region,
|
||||
credentials: {
|
||||
accessKeyId: params.accessKeyId,
|
||||
secretAccessKey: params.secretAccessKey,
|
||||
},
|
||||
})
|
||||
|
||||
// Create API Gateway v2 client
|
||||
const apiGatewayClient = new ApiGatewayV2Client({
|
||||
region: params.region,
|
||||
credentials: {
|
||||
accessKeyId: params.accessKeyId,
|
||||
secretAccessKey: params.secretAccessKey,
|
||||
},
|
||||
})
|
||||
|
||||
// Check if Lambda function exists
|
||||
const functionExists = await checkFunctionExists(lambdaClient, params.functionName)
|
||||
if (!functionExists) {
|
||||
logger.error(`[${requestId}] Lambda function ${params.functionName} does not exist`)
|
||||
return createErrorResponse(
|
||||
`Lambda function ${params.functionName} does not exist. Please deploy the function first.`,
|
||||
404,
|
||||
'FUNCTION_NOT_FOUND'
|
||||
)
|
||||
}
|
||||
|
||||
// Get function details
|
||||
const functionDetails = await getFunctionDetails(lambdaClient, params.functionName)
|
||||
const functionArn = functionDetails.Configuration?.FunctionArn
|
||||
|
||||
if (!functionArn) {
|
||||
logger.error(`[${requestId}] Failed to get function ARN for ${params.functionName}`)
|
||||
return createErrorResponse('Failed to get function ARN', 500, 'FUNCTION_ARN_ERROR')
|
||||
}
|
||||
|
||||
// Extract account ID from function ARN
|
||||
const accountId = functionArn.split(':')[4]
|
||||
if (!accountId) {
|
||||
logger.error(`[${requestId}] Failed to extract account ID from function ARN: ${functionArn}`)
|
||||
return createErrorResponse(
|
||||
'Failed to extract account ID from function ARN',
|
||||
500,
|
||||
'ACCOUNT_ID_ERROR'
|
||||
)
|
||||
}
|
||||
|
||||
// Check if API Gateway already exists
|
||||
let apiId = await checkApiExists(apiGatewayClient, params.endpointName)
|
||||
|
||||
if (!apiId) {
|
||||
logger.info(`[${requestId}] Creating new API Gateway HTTP API: ${params.endpointName}`)
|
||||
apiId = await createApiGateway(apiGatewayClient, params.endpointName)
|
||||
} else {
|
||||
logger.info(
|
||||
`[${requestId}] Using existing API Gateway HTTP API: ${params.endpointName} (${apiId})`
|
||||
)
|
||||
}
|
||||
|
||||
// Check if integration already exists before creating a new one
|
||||
let integrationId = await checkIntegrationExists(apiGatewayClient, apiId, functionArn)
|
||||
|
||||
if (integrationId) {
|
||||
logger.info(
|
||||
`[${requestId}] Integration for function ${params.functionName} already exists for API ${apiId}, using existing integration`
|
||||
)
|
||||
} else {
|
||||
logger.info(`[${requestId}] Creating API Gateway integration`)
|
||||
integrationId = await createApiIntegration(apiGatewayClient, apiId, functionArn)
|
||||
}
|
||||
|
||||
// Check if route already exists before creating a new one
|
||||
const routeKey = 'ANY /'
|
||||
const routeExists = await checkRouteExists(apiGatewayClient, apiId, routeKey)
|
||||
|
||||
if (routeExists) {
|
||||
logger.info(
|
||||
`[${requestId}] Route ${routeKey} already exists for API ${apiId}, skipping route creation`
|
||||
)
|
||||
} else {
|
||||
logger.info(`[${requestId}] Creating API Gateway route`)
|
||||
await createApiRoute(apiGatewayClient, apiId, integrationId)
|
||||
}
|
||||
|
||||
// Add Lambda permission for API Gateway
|
||||
logger.info(`[${requestId}] Adding Lambda permission for API Gateway`)
|
||||
await addLambdaPermission(lambdaClient, params.functionName, apiId, params.region, accountId)
|
||||
|
||||
// Create stage for the API Gateway
|
||||
logger.info(`[${requestId}] Creating API Gateway stage`)
|
||||
const stageName = await createApiStage(apiGatewayClient, apiId)
|
||||
|
||||
if (!stageName) {
|
||||
logger.error(`[${requestId}] Failed to create or get stage for API ${apiId}`)
|
||||
return createErrorResponse('Failed to create API Gateway stage', 500, 'STAGE_CREATION_ERROR')
|
||||
}
|
||||
|
||||
// Ensure API is deployed
|
||||
logger.info(`[${requestId}] Ensuring API Gateway deployment is complete`)
|
||||
await ensureApiDeployed(apiGatewayClient, apiId, stageName)
|
||||
|
||||
// Construct the endpoint URL
|
||||
const endpointUrl = `https://${apiId}.execute-api.${params.region}.amazonaws.com/${stageName}/`
|
||||
|
||||
const response: DeployEndpointResponse = {
|
||||
functionArn,
|
||||
functionName: params.functionName,
|
||||
endpointName: params.endpointName,
|
||||
endpointUrl,
|
||||
region: params.region,
|
||||
status: 'ACTIVE',
|
||||
lastModified: new Date().toISOString(),
|
||||
apiGatewayId: apiId,
|
||||
stageName,
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Lambda function endpoint deployment completed successfully`, {
|
||||
functionName: params.functionName,
|
||||
endpointName: params.endpointName,
|
||||
endpointUrl,
|
||||
apiGatewayId: apiId,
|
||||
})
|
||||
|
||||
return createSuccessResponse({
|
||||
success: true,
|
||||
output: response,
|
||||
})
|
||||
} catch (error: any) {
|
||||
logger.error(`[${requestId}] Error deploying Lambda function endpoint`, {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
stack: error instanceof Error ? error.stack : undefined,
|
||||
})
|
||||
|
||||
// Handle specific AWS errors
|
||||
let errorMessage = 'Failed to deploy Lambda function endpoint'
|
||||
let statusCode = 500
|
||||
|
||||
if (error.name === 'AccessDeniedException') {
|
||||
errorMessage = 'Access denied. Please check your AWS credentials and permissions.'
|
||||
statusCode = 403
|
||||
} else if (error.name === 'InvalidParameterValueException') {
|
||||
errorMessage = `Invalid parameter: ${error.message}`
|
||||
statusCode = 400
|
||||
} else if (error.name === 'ResourceConflictException') {
|
||||
errorMessage = 'Resource conflict. The API may be in use or being updated.'
|
||||
statusCode = 409
|
||||
} else if (error.name === 'ServiceException') {
|
||||
errorMessage = 'AWS service error. Please try again later.'
|
||||
statusCode = 503
|
||||
} else if (error instanceof Error) {
|
||||
errorMessage = error.message
|
||||
}
|
||||
|
||||
return createErrorResponse(errorMessage, statusCode, 'DEPLOYMENT_ERROR')
|
||||
}
|
||||
}
|
||||
@@ -1,442 +0,0 @@
|
||||
import { promises as fs } from 'fs'
|
||||
import { tmpdir } from 'os'
|
||||
import { join } from 'path'
|
||||
import { GetFunctionCommand, LambdaClient } from '@aws-sdk/client-lambda'
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
|
||||
|
||||
const logger = createLogger('AWSLambdaDeployAPI')
|
||||
|
||||
// Validation schema for the request body
|
||||
const DeployRequestSchema = z.object({
|
||||
accessKeyId: z.string().min(1, 'AWS Access Key ID is required'),
|
||||
secretAccessKey: z.string().min(1, 'AWS Secret Access Key is required'),
|
||||
region: z.string().min(1, 'AWS Region is required'),
|
||||
functionName: z.string().min(1, 'Function name is required'),
|
||||
handler: z.string().optional(),
|
||||
runtime: z.string().min(1, 'Runtime is required'),
|
||||
code: z
|
||||
.record(z.string())
|
||||
.refine((val) => Object.keys(val).length > 0, 'At least one code file is required'),
|
||||
|
||||
timeout: z.coerce.number().min(1).max(900).optional().default(3),
|
||||
memorySize: z.coerce.number().min(128).max(10240).optional().default(128),
|
||||
environmentVariables: z.record(z.string()).default({}),
|
||||
tags: z.record(z.string()).default({}),
|
||||
role: z.string().min(1, 'Role ARN is required'),
|
||||
})
|
||||
|
||||
type DeployRequest = z.infer<typeof DeployRequestSchema>
|
||||
|
||||
interface LambdaFunctionDetails {
|
||||
functionArn: string
|
||||
functionName: string
|
||||
runtime: string
|
||||
region: string
|
||||
status: string
|
||||
lastModified: string
|
||||
codeSize: number
|
||||
description: string
|
||||
timeout: number
|
||||
memorySize: number
|
||||
environment: Record<string, string>
|
||||
tags: Record<string, string>
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the appropriate file extension for the given runtime
|
||||
*/
|
||||
function getFileExtension(runtime: string): string {
|
||||
if (runtime.startsWith('nodejs')) return 'js'
|
||||
if (runtime.startsWith('python')) return 'py'
|
||||
if (runtime.startsWith('java')) return 'java'
|
||||
if (runtime.startsWith('dotnet')) return 'cs'
|
||||
if (runtime.startsWith('go')) return 'go'
|
||||
if (runtime.startsWith('ruby')) return 'rb'
|
||||
return 'js' // default
|
||||
}
|
||||
|
||||
/**
|
||||
* Sanitize function name for SAM/CloudFormation resource naming
|
||||
* SAM resource names must be alphanumeric only (letters and numbers)
|
||||
*/
|
||||
function sanitizeResourceName(functionName: string): string {
|
||||
return (
|
||||
functionName
|
||||
.replace(/[^a-zA-Z0-9]/g, '') // Remove all non-alphanumeric characters
|
||||
.replace(/^(\d)/, 'Func$1') // Ensure it starts with a letter if it starts with a number
|
||||
.substring(0, 64) || // Ensure reasonable length limit
|
||||
'LambdaFunction'
|
||||
) // Fallback if name becomes empty
|
||||
}
|
||||
|
||||
/**
|
||||
* Create SAM template for the Lambda function
|
||||
*/
|
||||
function createSamTemplate(params: DeployRequest): string {
|
||||
// Sanitize the function name for CloudFormation resource naming
|
||||
const resourceName = sanitizeResourceName(params.functionName)
|
||||
|
||||
const template = {
|
||||
AWSTemplateFormatVersion: '2010-09-09',
|
||||
Transform: 'AWS::Serverless-2016-10-31',
|
||||
Resources: {
|
||||
[resourceName]: {
|
||||
Type: 'AWS::Serverless::Function',
|
||||
Properties: {
|
||||
FunctionName: params.functionName, // Use original function name for actual Lambda function
|
||||
CodeUri: './src',
|
||||
Handler: params.handler,
|
||||
Runtime: params.runtime,
|
||||
Role: params.role,
|
||||
Timeout: params.timeout,
|
||||
MemorySize: params.memorySize,
|
||||
Environment: {
|
||||
Variables: params.environmentVariables,
|
||||
},
|
||||
Tags: params.tags,
|
||||
},
|
||||
},
|
||||
},
|
||||
Outputs: {
|
||||
FunctionArn: {
|
||||
Value: { 'Fn::GetAtt': [resourceName, 'Arn'] },
|
||||
Export: { Name: `${params.functionName}-Arn` },
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
return JSON.stringify(template, null, 2)
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a shell command and return the result
|
||||
*/
|
||||
async function execCommand(
|
||||
command: string,
|
||||
cwd: string,
|
||||
env?: Record<string, string>
|
||||
): Promise<{ stdout: string; stderr: string }> {
|
||||
const { exec } = await import('child_process')
|
||||
const { promisify } = await import('util')
|
||||
const execAsync = promisify(exec)
|
||||
|
||||
return await execAsync(command, {
|
||||
cwd,
|
||||
env: env ? { ...process.env, ...env } : process.env,
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Deploy Lambda function using SAM CLI
|
||||
*/
|
||||
async function deployWithSam(
|
||||
params: DeployRequest,
|
||||
requestId: string
|
||||
): Promise<LambdaFunctionDetails> {
|
||||
const tempDir = join(tmpdir(), `lambda-deploy-${requestId}`)
|
||||
const srcDir = join(tempDir, 'src')
|
||||
|
||||
try {
|
||||
// Create temporary directory structure
|
||||
await fs.mkdir(tempDir, { recursive: true })
|
||||
await fs.mkdir(srcDir, { recursive: true })
|
||||
|
||||
logger.info(`[${requestId}] Created temporary directory: ${tempDir}`)
|
||||
|
||||
// Write SAM template
|
||||
const samTemplate = createSamTemplate(params)
|
||||
await fs.writeFile(join(tempDir, 'template.yaml'), samTemplate)
|
||||
|
||||
logger.info(`[${requestId}] Created SAM template`)
|
||||
|
||||
// Write source code files
|
||||
for (const [filePath, codeContent] of Object.entries(params.code)) {
|
||||
const fullPath = join(srcDir, filePath)
|
||||
const fileDir = join(fullPath, '..')
|
||||
|
||||
// Ensure directory exists
|
||||
await fs.mkdir(fileDir, { recursive: true })
|
||||
await fs.writeFile(fullPath, codeContent)
|
||||
|
||||
logger.info(`[${requestId}] Created source file: ${filePath}`)
|
||||
}
|
||||
|
||||
// Set AWS credentials in environment
|
||||
const env = {
|
||||
AWS_ACCESS_KEY_ID: params.accessKeyId,
|
||||
AWS_SECRET_ACCESS_KEY: params.secretAccessKey,
|
||||
AWS_DEFAULT_REGION: params.region,
|
||||
}
|
||||
|
||||
// Build the SAM application
|
||||
logger.info(`[${requestId}] Building SAM application...`)
|
||||
const buildCommand = 'sam build --no-cached'
|
||||
const buildResult = await execCommand(buildCommand, tempDir, env)
|
||||
|
||||
logger.info(`[${requestId}] SAM build output:`, {
|
||||
stdout: buildResult.stdout,
|
||||
stderr: buildResult.stderr,
|
||||
})
|
||||
|
||||
if (buildResult.stderr && !buildResult.stderr.includes('Successfully built')) {
|
||||
logger.warn(`[${requestId}] SAM build warnings:`, { stderr: buildResult.stderr })
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] SAM build completed`)
|
||||
|
||||
// Deploy the SAM application
|
||||
logger.info(`[${requestId}] Deploying SAM application...`)
|
||||
const stackName = `${sanitizeResourceName(params.functionName)}Stack`
|
||||
const deployCommand = [
|
||||
'sam deploy',
|
||||
'--no-confirm-changeset',
|
||||
'--no-fail-on-empty-changeset',
|
||||
`--stack-name ${stackName}`,
|
||||
`--region ${params.region}`,
|
||||
'--resolve-s3',
|
||||
'--capabilities CAPABILITY_IAM',
|
||||
'--no-progressbar',
|
||||
].join(' ')
|
||||
|
||||
const deployResult = await execCommand(deployCommand, tempDir, env)
|
||||
|
||||
logger.info(`[${requestId}] SAM deploy output:`, {
|
||||
stdout: deployResult.stdout,
|
||||
stderr: deployResult.stderr,
|
||||
})
|
||||
|
||||
if (
|
||||
deployResult.stderr &&
|
||||
!deployResult.stderr.includes('Successfully created/updated stack')
|
||||
) {
|
||||
logger.warn(`[${requestId}] SAM deploy warnings:`, { stderr: deployResult.stderr })
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] SAM deploy completed`)
|
||||
|
||||
// Get function details using AWS SDK
|
||||
const lambdaClient = new LambdaClient({
|
||||
region: params.region,
|
||||
credentials: {
|
||||
accessKeyId: params.accessKeyId,
|
||||
secretAccessKey: params.secretAccessKey,
|
||||
},
|
||||
})
|
||||
|
||||
const functionDetails = await getFunctionDetails(
|
||||
lambdaClient,
|
||||
params.functionName,
|
||||
params.region
|
||||
)
|
||||
|
||||
return functionDetails
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error during SAM deployment`, {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
stack: error instanceof Error ? error.stack : undefined,
|
||||
})
|
||||
throw error
|
||||
} finally {
|
||||
// Clean up temporary directory
|
||||
try {
|
||||
await fs.rm(tempDir, { recursive: true, force: true })
|
||||
logger.info(`[${requestId}] Cleaned up temporary directory: ${tempDir}`)
|
||||
} catch (cleanupError) {
|
||||
logger.warn(`[${requestId}] Failed to clean up temporary directory`, {
|
||||
error: cleanupError instanceof Error ? cleanupError.message : String(cleanupError),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get detailed information about a Lambda function
|
||||
*/
|
||||
async function getFunctionDetails(
|
||||
lambdaClient: LambdaClient,
|
||||
functionName: string,
|
||||
region: string
|
||||
): Promise<LambdaFunctionDetails> {
|
||||
const functionDetails = await lambdaClient.send(
|
||||
new GetFunctionCommand({ FunctionName: functionName })
|
||||
)
|
||||
|
||||
return {
|
||||
functionArn: functionDetails.Configuration?.FunctionArn || '',
|
||||
functionName: functionDetails.Configuration?.FunctionName || '',
|
||||
runtime: functionDetails.Configuration?.Runtime || '',
|
||||
region,
|
||||
status: functionDetails.Configuration?.State || '',
|
||||
lastModified: functionDetails.Configuration?.LastModified || '',
|
||||
codeSize: functionDetails.Configuration?.CodeSize || 0,
|
||||
description: functionDetails.Configuration?.Description || '',
|
||||
timeout: functionDetails.Configuration?.Timeout || 0,
|
||||
memorySize: functionDetails.Configuration?.MemorySize || 0,
|
||||
environment: functionDetails.Configuration?.Environment?.Variables || {},
|
||||
tags: functionDetails.Tags || {},
|
||||
}
|
||||
}
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
logger.info(`[${requestId}] Processing AWS Lambda deployment request`)
|
||||
|
||||
// Parse and validate request body
|
||||
let body: any
|
||||
try {
|
||||
body = await request.json()
|
||||
} catch (parseError) {
|
||||
logger.error(`[${requestId}] Failed to parse request body`, {
|
||||
error: parseError instanceof Error ? parseError.message : String(parseError),
|
||||
})
|
||||
return createErrorResponse('Invalid JSON in request body', 400, 'INVALID_JSON')
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Request body received:`, {
|
||||
body,
|
||||
codeType: typeof body.code,
|
||||
codeValue: body.code,
|
||||
})
|
||||
|
||||
// Parse the code field if it's a JSON string
|
||||
if (typeof body.code === 'string') {
|
||||
try {
|
||||
body.code = JSON.parse(body.code)
|
||||
logger.info(`[${requestId}] Parsed code field:`, { parsedCode: body.code })
|
||||
} catch (parseError) {
|
||||
logger.error(`[${requestId}] Failed to parse code field as JSON`, {
|
||||
error: parseError instanceof Error ? parseError.message : String(parseError),
|
||||
codeString: body.code,
|
||||
})
|
||||
return createErrorResponse('Invalid JSON in code field', 400, 'INVALID_CODE_JSON')
|
||||
}
|
||||
}
|
||||
|
||||
// Runtime field should be a string, no JSON parsing needed
|
||||
if (typeof body.runtime !== 'string') {
|
||||
logger.error(`[${requestId}] Runtime field must be a string`, {
|
||||
runtimeType: typeof body.runtime,
|
||||
runtimeValue: body.runtime,
|
||||
})
|
||||
return createErrorResponse('Runtime field must be a string', 400, 'INVALID_RUNTIME_TYPE')
|
||||
}
|
||||
|
||||
// Parse the timeout field if it's a JSON string
|
||||
if (typeof body.timeout === 'string') {
|
||||
try {
|
||||
body.timeout = JSON.parse(body.timeout)
|
||||
logger.info(`[${requestId}] Parsed timeout field:`, { parsedTimeout: body.timeout })
|
||||
} catch (parseError) {
|
||||
logger.error(`[${requestId}] Failed to parse timeout field as JSON`, {
|
||||
error: parseError instanceof Error ? parseError.message : String(parseError),
|
||||
timeoutString: body.timeout,
|
||||
})
|
||||
return createErrorResponse('Invalid JSON in timeout field', 400, 'INVALID_TIMEOUT_JSON')
|
||||
}
|
||||
}
|
||||
|
||||
// Parse the memorySize field if it's a JSON string
|
||||
if (typeof body.memorySize === 'string') {
|
||||
try {
|
||||
body.memorySize = JSON.parse(body.memorySize)
|
||||
logger.info(`[${requestId}] Parsed memorySize field:`, {
|
||||
parsedMemorySize: body.memorySize,
|
||||
})
|
||||
} catch (parseError) {
|
||||
logger.error(`[${requestId}] Failed to parse memorySize field as JSON`, {
|
||||
error: parseError instanceof Error ? parseError.message : String(parseError),
|
||||
memorySizeString: body.memorySize,
|
||||
})
|
||||
return createErrorResponse(
|
||||
'Invalid JSON in memorySize field',
|
||||
400,
|
||||
'INVALID_MEMORYSIZE_JSON'
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
const validationResult = DeployRequestSchema.safeParse(body)
|
||||
if (!validationResult.success) {
|
||||
logger.warn(`[${requestId}] Invalid request body`, {
|
||||
errors: validationResult.error.errors,
|
||||
codeField: body.code,
|
||||
codeType: typeof body.code,
|
||||
hasCode: 'code' in body,
|
||||
bodyKeys: Object.keys(body),
|
||||
})
|
||||
return createErrorResponse('Invalid request parameters', 400, 'VALIDATION_ERROR')
|
||||
}
|
||||
|
||||
const params = validationResult.data
|
||||
|
||||
// Log the deployment payload (excluding sensitive credentials)
|
||||
logger.info(`[${requestId}] AWS Lambda deployment payload received`, {
|
||||
functionName: params.functionName,
|
||||
region: params.region,
|
||||
runtime: params.runtime,
|
||||
handler: params.handler,
|
||||
timeout: params.timeout,
|
||||
memorySize: params.memorySize,
|
||||
accessKeyId: params.accessKeyId ? `${params.accessKeyId.substring(0, 4)}...` : undefined,
|
||||
hasSecretAccessKey: !!params.secretAccessKey,
|
||||
hasRole: !!params.role,
|
||||
role: params.role ? `${params.role.substring(0, 20)}...` : undefined,
|
||||
codeFiles: Object.keys(params.code),
|
||||
codeFilesCount: Object.keys(params.code).length,
|
||||
environmentVariables: params.environmentVariables,
|
||||
environmentVariablesCount: Object.keys(params.environmentVariables || {}).length,
|
||||
tags: params.tags,
|
||||
tagsCount: Object.keys(params.tags || {}).length,
|
||||
})
|
||||
|
||||
logger.info(`[${requestId}] Deploying Lambda function with SAM: ${params.functionName}`)
|
||||
|
||||
// Deploy using SAM CLI
|
||||
const functionDetails = await deployWithSam(params, requestId)
|
||||
|
||||
logger.info(`[${requestId}] Lambda function deployment completed successfully`, {
|
||||
functionName: params.functionName,
|
||||
functionArn: functionDetails.functionArn,
|
||||
})
|
||||
|
||||
return createSuccessResponse({
|
||||
success: true,
|
||||
output: functionDetails,
|
||||
})
|
||||
} catch (error: any) {
|
||||
logger.error(`[${requestId}] Error deploying Lambda function`, {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
stack: error instanceof Error ? error.stack : undefined,
|
||||
})
|
||||
|
||||
// Handle specific AWS errors
|
||||
let errorMessage = 'Failed to deploy Lambda function'
|
||||
let statusCode = 500
|
||||
|
||||
if (error.message?.includes('sam: command not found')) {
|
||||
errorMessage = 'SAM CLI is not installed or not available in PATH'
|
||||
statusCode = 500
|
||||
} else if (error.name === 'AccessDeniedException') {
|
||||
errorMessage = 'Access denied. Please check your AWS credentials and permissions.'
|
||||
statusCode = 403
|
||||
} else if (error.name === 'InvalidParameterValueException') {
|
||||
errorMessage = `Invalid parameter: ${error.message}`
|
||||
statusCode = 400
|
||||
} else if (error.name === 'ResourceConflictException') {
|
||||
errorMessage = 'Resource conflict. The function may be in use or being updated.'
|
||||
statusCode = 409
|
||||
} else if (error.name === 'ServiceException') {
|
||||
errorMessage = 'AWS Lambda service error. Please try again later.'
|
||||
statusCode = 503
|
||||
} else if (error instanceof Error) {
|
||||
errorMessage = error.message
|
||||
}
|
||||
|
||||
return createErrorResponse(errorMessage, statusCode, 'DEPLOYMENT_ERROR')
|
||||
}
|
||||
}
|
||||
@@ -1,322 +0,0 @@
|
||||
import {
|
||||
GetFunctionCommand,
|
||||
GetFunctionConfigurationCommand,
|
||||
LambdaClient,
|
||||
} from '@aws-sdk/client-lambda'
|
||||
import JSZip from 'jszip'
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('AWSLambdaFetchAPI')
|
||||
|
||||
// Validation schema for the request body
|
||||
const FetchRequestSchema = z.object({
|
||||
accessKeyId: z.string().min(1, 'AWS Access Key ID is required'),
|
||||
secretAccessKey: z.string().min(1, 'AWS Secret Access Key is required'),
|
||||
region: z.string().min(1, 'AWS Region is required'),
|
||||
functionName: z.string().min(1, 'Function name is required'),
|
||||
role: z.string().min(1, 'IAM Role ARN is required'),
|
||||
})
|
||||
|
||||
type FetchRequest = z.infer<typeof FetchRequestSchema>
|
||||
|
||||
interface LambdaFunctionDetails {
|
||||
functionArn: string
|
||||
functionName: string
|
||||
runtime: string
|
||||
region: string
|
||||
status: string
|
||||
lastModified: string
|
||||
codeSize: number
|
||||
description: string
|
||||
timeout: number
|
||||
memorySize: number
|
||||
environment: Record<string, string>
|
||||
tags: Record<string, string>
|
||||
codeFiles: Record<string, string>
|
||||
handler: string
|
||||
role: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract code from Lambda function ZIP file
|
||||
*/
|
||||
async function extractCodeFromZip(
|
||||
zipBuffer: Buffer,
|
||||
runtime: string
|
||||
): Promise<{ mainCode: string; allFiles: Record<string, string> }> {
|
||||
try {
|
||||
const zip = await JSZip.loadAsync(zipBuffer)
|
||||
const allFiles = Object.keys(zip.files)
|
||||
logger.info('Files in ZIP:', allFiles)
|
||||
|
||||
// Extract all text files
|
||||
const allFilesContent: Record<string, string> = {}
|
||||
let mainCode = ''
|
||||
|
||||
// Determine the main file based on runtime
|
||||
let mainFile = 'index.js' // default
|
||||
if (runtime.startsWith('python')) {
|
||||
mainFile = 'index.py'
|
||||
} else if (runtime.startsWith('java')) {
|
||||
mainFile = 'index.java'
|
||||
} else if (runtime.startsWith('dotnet')) {
|
||||
mainFile = 'index.cs'
|
||||
} else if (runtime.startsWith('go')) {
|
||||
mainFile = 'index.go'
|
||||
} else if (runtime.startsWith('ruby')) {
|
||||
mainFile = 'index.rb'
|
||||
}
|
||||
|
||||
logger.info('Looking for main file:', mainFile)
|
||||
|
||||
// Extract all non-directory files
|
||||
for (const fileName of allFiles) {
|
||||
if (!fileName.endsWith('/')) {
|
||||
try {
|
||||
const fileContent = await zip.file(fileName)?.async('string')
|
||||
if (fileContent !== undefined) {
|
||||
allFilesContent[fileName] = fileContent
|
||||
|
||||
// Set main code if this is the main file
|
||||
if (fileName === mainFile) {
|
||||
mainCode = fileContent
|
||||
logger.info('Found main file content, length:', mainCode.length)
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logger.warn(`Failed to extract file ${fileName}:`, error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If main file not found, try to find any code file
|
||||
if (!mainCode) {
|
||||
const codeFiles = Object.keys(allFilesContent).filter(
|
||||
(file) =>
|
||||
file.endsWith('.js') ||
|
||||
file.endsWith('.py') ||
|
||||
file.endsWith('.java') ||
|
||||
file.endsWith('.cs') ||
|
||||
file.endsWith('.go') ||
|
||||
file.endsWith('.rb')
|
||||
)
|
||||
|
||||
logger.info('Found code files:', codeFiles)
|
||||
|
||||
if (codeFiles.length > 0) {
|
||||
const firstCodeFile = codeFiles[0]
|
||||
mainCode = allFilesContent[firstCodeFile]
|
||||
logger.info('Using first code file as main, length:', mainCode.length)
|
||||
}
|
||||
}
|
||||
|
||||
// If still no main code, use the first file
|
||||
if (!mainCode && Object.keys(allFilesContent).length > 0) {
|
||||
const firstFile = Object.keys(allFilesContent)[0]
|
||||
mainCode = allFilesContent[firstFile]
|
||||
logger.info('Using first file as main, length:', mainCode.length)
|
||||
}
|
||||
|
||||
logger.info(`Extracted ${Object.keys(allFilesContent).length} files`)
|
||||
return { mainCode, allFiles: allFilesContent }
|
||||
} catch (error) {
|
||||
logger.error('Failed to extract code from ZIP', { error })
|
||||
return { mainCode: '', allFiles: {} }
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get detailed information about a Lambda function including code
|
||||
*/
|
||||
async function getFunctionDetailsWithCode(
|
||||
lambdaClient: LambdaClient,
|
||||
functionName: string,
|
||||
region: string,
|
||||
accessKeyId: string,
|
||||
secretAccessKey: string
|
||||
): Promise<LambdaFunctionDetails> {
|
||||
// Get function configuration
|
||||
const functionConfig = await lambdaClient.send(
|
||||
new GetFunctionConfigurationCommand({ FunctionName: functionName })
|
||||
)
|
||||
|
||||
// Get function code
|
||||
const functionCode = await lambdaClient.send(
|
||||
new GetFunctionCommand({ FunctionName: functionName })
|
||||
)
|
||||
|
||||
let codeFiles: Record<string, string> = {}
|
||||
if (functionCode.Code?.Location) {
|
||||
try {
|
||||
logger.info('Downloading code from:', functionCode.Code.Location)
|
||||
|
||||
const response = await fetch(functionCode.Code.Location)
|
||||
logger.info('Fetch response status:', response.status)
|
||||
|
||||
if (response.ok) {
|
||||
const zipBuffer = Buffer.from(await response.arrayBuffer())
|
||||
logger.info('ZIP buffer size:', zipBuffer.length)
|
||||
const extractedCode = await extractCodeFromZip(zipBuffer, functionConfig.Runtime || '')
|
||||
codeFiles = extractedCode.allFiles
|
||||
logger.info('Extracted files count:', Object.keys(codeFiles).length)
|
||||
} else {
|
||||
logger.warn('Fetch failed with status:', response.status)
|
||||
const errorText = await response.text()
|
||||
logger.warn('Error response:', errorText)
|
||||
}
|
||||
} catch (fetchError) {
|
||||
logger.error('Failed to download function code using fetch', { fetchError })
|
||||
}
|
||||
} else {
|
||||
logger.info('No code location found in function response')
|
||||
}
|
||||
|
||||
return {
|
||||
functionArn: functionConfig.FunctionArn || '',
|
||||
functionName: functionConfig.FunctionName || '',
|
||||
runtime: functionConfig.Runtime || '',
|
||||
region,
|
||||
status: functionConfig.State || '',
|
||||
lastModified: functionConfig.LastModified || '',
|
||||
codeSize: functionConfig.CodeSize || 0,
|
||||
description: functionConfig.Description || '',
|
||||
timeout: functionConfig.Timeout || 0,
|
||||
memorySize: functionConfig.MemorySize || 0,
|
||||
environment: functionConfig.Environment?.Variables || {},
|
||||
tags: {}, // Tags need to be fetched separately if needed
|
||||
codeFiles,
|
||||
handler: functionConfig.Handler || '',
|
||||
role: functionConfig.Role || '',
|
||||
}
|
||||
}
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
logger.info(`[${requestId}] Processing AWS Lambda fetch request`)
|
||||
|
||||
// Parse and validate request body
|
||||
let body: any
|
||||
try {
|
||||
body = await request.json()
|
||||
} catch (parseError) {
|
||||
logger.error(`[${requestId}] Failed to parse request body`, {
|
||||
error: parseError instanceof Error ? parseError.message : String(parseError),
|
||||
})
|
||||
return createErrorResponse('Invalid JSON in request body', 400, 'INVALID_JSON')
|
||||
}
|
||||
|
||||
const validationResult = FetchRequestSchema.safeParse(body)
|
||||
if (!validationResult.success) {
|
||||
logger.warn(`[${requestId}] Invalid request body`, { errors: validationResult.error.errors })
|
||||
return createErrorResponse('Invalid request parameters', 400, 'VALIDATION_ERROR')
|
||||
}
|
||||
|
||||
const params = validationResult.data
|
||||
|
||||
// Log the payload (excluding sensitive credentials)
|
||||
logger.info(`[${requestId}] AWS Lambda fetch payload received`, {
|
||||
functionName: params.functionName,
|
||||
region: params.region,
|
||||
accessKeyId: params.accessKeyId ? `${params.accessKeyId.substring(0, 4)}...` : undefined,
|
||||
hasSecretAccessKey: !!params.secretAccessKey,
|
||||
hasFunctionName: !!params.functionName,
|
||||
hasRole: !!params.role,
|
||||
role: params.role ? `${params.role.substring(0, 20)}...` : undefined,
|
||||
})
|
||||
|
||||
logger.info(`[${requestId}] Fetching Lambda function: ${params.functionName}`)
|
||||
|
||||
// Create Lambda client
|
||||
const lambdaClient = new LambdaClient({
|
||||
region: params.region,
|
||||
credentials: {
|
||||
accessKeyId: params.accessKeyId,
|
||||
secretAccessKey: params.secretAccessKey,
|
||||
},
|
||||
})
|
||||
|
||||
// Fetch function details and code
|
||||
try {
|
||||
const functionDetails = await getFunctionDetailsWithCode(
|
||||
lambdaClient,
|
||||
params.functionName,
|
||||
params.region,
|
||||
params.accessKeyId,
|
||||
params.secretAccessKey
|
||||
)
|
||||
|
||||
logger.info(`[${requestId}] Successfully fetched Lambda function: ${params.functionName}`, {
|
||||
functionName: functionDetails.functionName,
|
||||
filesCount: Object.keys(functionDetails.codeFiles).length,
|
||||
hasFiles: Object.keys(functionDetails.codeFiles).length > 0,
|
||||
})
|
||||
|
||||
return createSuccessResponse({
|
||||
success: true,
|
||||
output: functionDetails,
|
||||
})
|
||||
} catch (fetchError: any) {
|
||||
// Handle ResourceNotFoundException gracefully - return empty function details
|
||||
if (fetchError.name === 'ResourceNotFoundException') {
|
||||
logger.info(
|
||||
`[${requestId}] Lambda function '${params.functionName}' not found, returning empty response`
|
||||
)
|
||||
|
||||
const emptyFunctionDetails: LambdaFunctionDetails = {
|
||||
functionArn: '',
|
||||
functionName: params.functionName,
|
||||
runtime: '',
|
||||
region: params.region,
|
||||
status: '',
|
||||
lastModified: '',
|
||||
codeSize: 0,
|
||||
description: '',
|
||||
timeout: 0,
|
||||
memorySize: 0,
|
||||
environment: {},
|
||||
tags: {},
|
||||
codeFiles: {},
|
||||
handler: '',
|
||||
role: '',
|
||||
}
|
||||
|
||||
return createSuccessResponse({
|
||||
success: true,
|
||||
output: emptyFunctionDetails,
|
||||
})
|
||||
}
|
||||
|
||||
// Re-throw other errors to be handled by the outer catch block
|
||||
throw fetchError
|
||||
}
|
||||
} catch (error: any) {
|
||||
logger.error(`[${requestId}] Failed to fetch Lambda function`, {
|
||||
error: error.message,
|
||||
stack: error.stack,
|
||||
})
|
||||
|
||||
// Handle specific AWS errors
|
||||
// Note: ResourceNotFoundException is now handled gracefully in the inner try-catch
|
||||
|
||||
if (error.name === 'AccessDeniedException') {
|
||||
return createErrorResponse(
|
||||
'Access denied. Please check your AWS credentials and permissions.',
|
||||
403,
|
||||
'ACCESS_DENIED'
|
||||
)
|
||||
}
|
||||
|
||||
if (error.name === 'InvalidParameterValueException') {
|
||||
return createErrorResponse('Invalid parameter value provided', 400, 'INVALID_PARAMETER')
|
||||
}
|
||||
|
||||
return createErrorResponse('Failed to fetch Lambda function', 500, 'FETCH_ERROR')
|
||||
}
|
||||
}
|
||||
@@ -1,91 +0,0 @@
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
|
||||
|
||||
const logger = createLogger('AWSLambdaGetPromptsAPI')
|
||||
|
||||
// Constants for getPrompts operation
|
||||
const system_prompt = `You are an expert in writing aws lambda functions. The user will provide an input which may contain the the existing lambda code, or they may not. If the initial code is provided, make the changes to the initial code to reflect what the user wants. If no code is provided, your job is to write the lambda function, choosing a runtime and handler.
|
||||
|
||||
Your output should be a valid JSON object, with the following structure:
|
||||
|
||||
[
|
||||
"runtime": runtime string,
|
||||
"handler": handler,
|
||||
"timeout": timeout,
|
||||
"memory": memory,
|
||||
"files":
|
||||
{
|
||||
"file_path_1": "code string for first file",
|
||||
"file_path_2": "code string for second file"
|
||||
}
|
||||
]`
|
||||
|
||||
const schema = {
|
||||
name: 'aws_lambda_function',
|
||||
description: 'Defines the structure for an AWS Lambda function configuration.',
|
||||
strict: true,
|
||||
schema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
runtime: {
|
||||
type: 'string',
|
||||
description: 'The runtime environment for the Lambda function.',
|
||||
},
|
||||
handler: {
|
||||
type: 'string',
|
||||
description: 'The function handler that Lambda calls to start execution.',
|
||||
},
|
||||
memory: {
|
||||
type: 'integer',
|
||||
description: 'The amount of memory allocated to the Lambda function in MB (128-10240).',
|
||||
minimum: 128,
|
||||
maximum: 10240,
|
||||
},
|
||||
timeout: {
|
||||
type: 'integer',
|
||||
description: 'The maximum execution time for the Lambda function in seconds (1-900).',
|
||||
minimum: 1,
|
||||
maximum: 900,
|
||||
},
|
||||
files: {
|
||||
type: 'object',
|
||||
description: 'A mapping of file paths to their respective code strings.',
|
||||
additionalProperties: {
|
||||
type: 'string',
|
||||
description: 'The code string for a specific file.',
|
||||
},
|
||||
},
|
||||
},
|
||||
additionalProperties: false,
|
||||
required: ['runtime', 'handler', 'files', 'memory', 'timeout'],
|
||||
},
|
||||
}
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
|
||||
try {
|
||||
logger.info(`[${requestId}] Processing AWS Lambda get prompts request`)
|
||||
|
||||
// No validation needed since this endpoint doesn't require any parameters
|
||||
// Just return the hardcoded system prompt and schema
|
||||
|
||||
logger.info(`[${requestId}] Returning system prompt and schema`)
|
||||
|
||||
return createSuccessResponse({
|
||||
success: true,
|
||||
output: {
|
||||
systemPrompt: system_prompt,
|
||||
schema: schema,
|
||||
},
|
||||
})
|
||||
} catch (error: any) {
|
||||
logger.error(`[${requestId}] Error in get prompts operation`, {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
stack: error instanceof Error ? error.stack : undefined,
|
||||
})
|
||||
|
||||
return createErrorResponse('Failed to get prompts and schema', 500, 'GET_PROMPTS_ERROR')
|
||||
}
|
||||
}
|
||||
@@ -33,6 +33,7 @@ interface ChatConfig {
|
||||
headerText?: string
|
||||
}
|
||||
authType?: 'public' | 'password' | 'email'
|
||||
outputConfigs?: Array<{ blockId: string; path?: string }>
|
||||
}
|
||||
|
||||
interface AudioStreamingOptions {
|
||||
@@ -373,8 +374,16 @@ export default function ChatClient({ subdomain }: { subdomain: string }) {
|
||||
const json = JSON.parse(line.substring(6))
|
||||
const { blockId, chunk: contentChunk, event: eventType } = json
|
||||
|
||||
if (eventType === 'final') {
|
||||
if (eventType === 'final' && json.data) {
|
||||
setIsLoading(false)
|
||||
|
||||
// Process final execution result for field extraction
|
||||
const result = json.data
|
||||
const nonStreamingLogs =
|
||||
result.logs?.filter((log: any) => !messageIdMap.has(log.blockId)) || []
|
||||
|
||||
// Chat field extraction will be handled by the backend using deployment outputConfigs
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
|
||||
@@ -1,53 +1,57 @@
|
||||
'use client'
|
||||
|
||||
import { useEffect, useState } from 'react'
|
||||
import { AlertTriangle, RefreshCw } from 'lucide-react'
|
||||
import { Button } from '@/components/ui/button'
|
||||
import { Tooltip, TooltipContent, TooltipTrigger } from '@/components/ui/tooltip'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/w/components/providers/workspace-permissions-provider'
|
||||
|
||||
interface ConnectionStatusProps {
|
||||
isConnected: boolean
|
||||
}
|
||||
|
||||
export function ConnectionStatus({ isConnected }: ConnectionStatusProps) {
|
||||
const [showOfflineNotice, setShowOfflineNotice] = useState(false)
|
||||
const userPermissions = useUserPermissionsContext()
|
||||
|
||||
useEffect(() => {
|
||||
let timeoutId: NodeJS.Timeout
|
||||
const handleRefresh = () => {
|
||||
window.location.reload()
|
||||
}
|
||||
|
||||
if (!isConnected) {
|
||||
// Show offline notice after 6 seconds of being disconnected
|
||||
timeoutId = setTimeout(() => {
|
||||
setShowOfflineNotice(true)
|
||||
}, 6000) // 6 seconds
|
||||
} else {
|
||||
// Hide notice immediately when reconnected
|
||||
setShowOfflineNotice(false)
|
||||
}
|
||||
|
||||
return () => {
|
||||
if (timeoutId) {
|
||||
clearTimeout(timeoutId)
|
||||
}
|
||||
}
|
||||
}, [isConnected])
|
||||
|
||||
// Don't render anything if connected or if we haven't been disconnected long enough
|
||||
if (!showOfflineNotice) {
|
||||
// Don't render anything if not in offline mode
|
||||
if (!userPermissions.isOfflineMode) {
|
||||
return null
|
||||
}
|
||||
|
||||
return (
|
||||
<div className='flex items-center gap-1.5'>
|
||||
<div className='flex items-center gap-1.5 text-red-600'>
|
||||
<div className='flex items-center gap-2 rounded-md border border-red-200 bg-red-50 px-3 py-2'>
|
||||
<div className='flex items-center gap-2 text-red-700'>
|
||||
<div className='relative flex items-center justify-center'>
|
||||
<div className='absolute h-3 w-3 animate-ping rounded-full bg-red-500/20' />
|
||||
<div className='relative h-2 w-2 rounded-full bg-red-500' />
|
||||
{!isConnected && (
|
||||
<div className='absolute h-4 w-4 animate-ping rounded-full bg-red-500/20' />
|
||||
)}
|
||||
<AlertTriangle className='relative h-4 w-4' />
|
||||
</div>
|
||||
<div className='flex flex-col'>
|
||||
<span className='font-medium text-xs leading-tight'>Connection lost</span>
|
||||
<span className='text-xs leading-tight opacity-90'>
|
||||
Changes not saved - please refresh
|
||||
<span className='font-medium text-xs leading-tight'>
|
||||
{isConnected ? 'Reconnected' : 'Connection lost - please refresh'}
|
||||
</span>
|
||||
<span className='text-red-600 text-xs leading-tight'>
|
||||
{isConnected ? 'Refresh to continue editing' : 'Read-only mode active'}
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<Button
|
||||
onClick={handleRefresh}
|
||||
variant='ghost'
|
||||
size='sm'
|
||||
className='h-7 w-7 p-0 text-red-700 hover:bg-red-100 hover:text-red-800'
|
||||
>
|
||||
<RefreshCw className='h-4 w-4' />
|
||||
</Button>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent className='z-[9999]'>Refresh page to continue editing</TooltipContent>
|
||||
</Tooltip>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -44,16 +44,6 @@ export function UserAvatarStack({
|
||||
}
|
||||
}, [users, maxVisible])
|
||||
|
||||
// Show connection status component regardless of user count
|
||||
// This will handle the offline notice when disconnected for 15 seconds
|
||||
const connectionStatusElement = <ConnectionStatus isConnected={isConnected} />
|
||||
|
||||
// Only show presence when there are multiple users (>1)
|
||||
// But always show connection status
|
||||
if (users.length <= 1) {
|
||||
return connectionStatusElement
|
||||
}
|
||||
|
||||
// Determine spacing based on size
|
||||
const spacingClass = {
|
||||
sm: '-space-x-1',
|
||||
@@ -62,46 +52,55 @@ export function UserAvatarStack({
|
||||
}[size]
|
||||
|
||||
return (
|
||||
<div className={`flex items-center ${spacingClass} ${className}`}>
|
||||
{/* Connection status - always present */}
|
||||
{connectionStatusElement}
|
||||
<div className={`flex items-center gap-3 ${className}`}>
|
||||
{/* Connection status - always check, shows when offline */}
|
||||
<ConnectionStatus isConnected={isConnected} />
|
||||
|
||||
{/* Render visible user avatars */}
|
||||
{visibleUsers.map((user, index) => (
|
||||
<UserAvatar
|
||||
key={user.connectionId}
|
||||
connectionId={user.connectionId}
|
||||
name={user.name}
|
||||
color={user.color}
|
||||
size={size}
|
||||
index={index}
|
||||
tooltipContent={
|
||||
user.name ? (
|
||||
<div className='text-center'>
|
||||
<div className='font-medium'>{user.name}</div>
|
||||
{user.info && <div className='mt-1 text-muted-foreground text-xs'>{user.info}</div>}
|
||||
</div>
|
||||
) : null
|
||||
}
|
||||
/>
|
||||
))}
|
||||
{/* Only show avatar stack when there are multiple users (>1) */}
|
||||
{users.length > 1 && (
|
||||
<div className={`flex items-center ${spacingClass}`}>
|
||||
{/* Render visible user avatars */}
|
||||
{visibleUsers.map((user, index) => (
|
||||
<UserAvatar
|
||||
key={user.connectionId}
|
||||
connectionId={user.connectionId}
|
||||
name={user.name}
|
||||
color={user.color}
|
||||
size={size}
|
||||
index={index}
|
||||
tooltipContent={
|
||||
user.name ? (
|
||||
<div className='text-center'>
|
||||
<div className='font-medium'>{user.name}</div>
|
||||
{user.info && (
|
||||
<div className='mt-1 text-muted-foreground text-xs'>{user.info}</div>
|
||||
)}
|
||||
</div>
|
||||
) : null
|
||||
}
|
||||
/>
|
||||
))}
|
||||
|
||||
{/* Render overflow indicator if there are more users */}
|
||||
{overflowCount > 0 && (
|
||||
<UserAvatar
|
||||
connectionId='overflow-indicator' // Use a unique string identifier
|
||||
name={`+${overflowCount}`}
|
||||
size={size}
|
||||
index={visibleUsers.length}
|
||||
tooltipContent={
|
||||
<div className='text-center'>
|
||||
<div className='font-medium'>
|
||||
{overflowCount} more user{overflowCount > 1 ? 's' : ''}
|
||||
</div>
|
||||
<div className='mt-1 text-muted-foreground text-xs'>{users.length} total online</div>
|
||||
</div>
|
||||
}
|
||||
/>
|
||||
{/* Render overflow indicator if there are more users */}
|
||||
{overflowCount > 0 && (
|
||||
<UserAvatar
|
||||
connectionId='overflow-indicator' // Use a unique string identifier
|
||||
name={`+${overflowCount}`}
|
||||
size={size}
|
||||
index={visibleUsers.length}
|
||||
tooltipContent={
|
||||
<div className='text-center'>
|
||||
<div className='font-medium'>
|
||||
{overflowCount} more user{overflowCount > 1 ? 's' : ''}
|
||||
</div>
|
||||
<div className='mt-1 text-muted-foreground text-xs'>
|
||||
{users.length} total online
|
||||
</div>
|
||||
</div>
|
||||
}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
|
||||
@@ -670,7 +670,11 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
|
||||
</h2>
|
||||
</TooltipTrigger>
|
||||
{!canEdit && (
|
||||
<TooltipContent>Edit permissions required to rename workflows</TooltipContent>
|
||||
<TooltipContent>
|
||||
{userPermissions.isOfflineMode
|
||||
? 'Connection lost - please refresh'
|
||||
: 'Edit permissions required to rename workflows'}
|
||||
</TooltipContent>
|
||||
)}
|
||||
</Tooltip>
|
||||
)}
|
||||
@@ -934,7 +938,11 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
|
||||
)}
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
{canEdit ? 'Duplicate Workflow' : 'Admin permission required to duplicate workflows'}
|
||||
{canEdit
|
||||
? 'Duplicate Workflow'
|
||||
: userPermissions.isOfflineMode
|
||||
? 'Connection lost - please refresh'
|
||||
: 'Admin permission required to duplicate workflows'}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
)
|
||||
@@ -975,7 +983,9 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
|
||||
</TooltipTrigger>
|
||||
<TooltipContent command='Shift+L'>
|
||||
{!userPermissions.canEdit
|
||||
? 'Admin permission required to use auto-layout'
|
||||
? userPermissions.isOfflineMode
|
||||
? 'Connection lost - please refresh'
|
||||
: 'Admin permission required to use auto-layout'
|
||||
: 'Auto Layout'}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
|
||||
@@ -5,6 +5,12 @@ import { ArrowUp } from 'lucide-react'
|
||||
import { Button } from '@/components/ui/button'
|
||||
import { Input } from '@/components/ui/input'
|
||||
import { ScrollArea } from '@/components/ui/scroll-area'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import {
|
||||
extractBlockIdFromOutputId,
|
||||
extractPathFromOutputId,
|
||||
parseOutputContentSafely,
|
||||
} from '@/lib/response-format'
|
||||
import type { BlockLog, ExecutionResult } from '@/executor/types'
|
||||
import { useExecutionStore } from '@/stores/execution/store'
|
||||
import { useChatStore } from '@/stores/panel/chat/store'
|
||||
@@ -14,6 +20,8 @@ import { useWorkflowExecution } from '../../../../hooks/use-workflow-execution'
|
||||
import { ChatMessage } from './components/chat-message/chat-message'
|
||||
import { OutputSelect } from './components/output-select/output-select'
|
||||
|
||||
const logger = createLogger('ChatPanel')
|
||||
|
||||
interface ChatProps {
|
||||
panelWidth: number
|
||||
chatMessage: string
|
||||
@@ -60,8 +68,8 @@ export function Chat({ panelWidth, chatMessage, setChatMessage }: ChatProps) {
|
||||
const selected = selectedWorkflowOutputs[activeWorkflowId]
|
||||
|
||||
if (!selected || selected.length === 0) {
|
||||
const defaultSelection = outputEntries.length > 0 ? [outputEntries[0].id] : []
|
||||
return defaultSelection
|
||||
// Return empty array when nothing is explicitly selected
|
||||
return []
|
||||
}
|
||||
|
||||
// Ensure we have no duplicates in the selection
|
||||
@@ -74,7 +82,7 @@ export function Chat({ panelWidth, chatMessage, setChatMessage }: ChatProps) {
|
||||
}
|
||||
|
||||
return selected
|
||||
}, [selectedWorkflowOutputs, activeWorkflowId, outputEntries, setSelectedWorkflowOutput])
|
||||
}, [selectedWorkflowOutputs, activeWorkflowId, setSelectedWorkflowOutput])
|
||||
|
||||
// Auto-scroll to bottom when new messages are added
|
||||
useEffect(() => {
|
||||
@@ -141,25 +149,22 @@ export function Chat({ panelWidth, chatMessage, setChatMessage }: ChatProps) {
|
||||
|
||||
if (nonStreamingLogs.length > 0) {
|
||||
const outputsToRender = selectedOutputs.filter((outputId) => {
|
||||
// Extract block ID correctly - handle both formats:
|
||||
// - "blockId" (direct block ID)
|
||||
// - "blockId_response.result" (block ID with path)
|
||||
const blockIdForOutput = outputId.includes('_')
|
||||
? outputId.split('_')[0]
|
||||
: outputId.split('.')[0]
|
||||
const blockIdForOutput = extractBlockIdFromOutputId(outputId)
|
||||
return nonStreamingLogs.some((log) => log.blockId === blockIdForOutput)
|
||||
})
|
||||
|
||||
for (const outputId of outputsToRender) {
|
||||
const blockIdForOutput = outputId.includes('_')
|
||||
? outputId.split('_')[0]
|
||||
: outputId.split('.')[0]
|
||||
const path = outputId.substring(blockIdForOutput.length + 1)
|
||||
const blockIdForOutput = extractBlockIdFromOutputId(outputId)
|
||||
const path = extractPathFromOutputId(outputId, blockIdForOutput)
|
||||
const log = nonStreamingLogs.find((l) => l.blockId === blockIdForOutput)
|
||||
|
||||
if (log) {
|
||||
let outputValue: any = log.output
|
||||
|
||||
if (path) {
|
||||
// Parse JSON content safely
|
||||
outputValue = parseOutputContentSafely(outputValue)
|
||||
|
||||
const pathParts = path.split('.')
|
||||
for (const part of pathParts) {
|
||||
if (
|
||||
@@ -211,42 +216,41 @@ export function Chat({ panelWidth, chatMessage, setChatMessage }: ChatProps) {
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
console.error('Error parsing stream data:', e)
|
||||
logger.error('Error parsing stream data:', e)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
processStream().catch((e) => console.error('Error processing stream:', e))
|
||||
processStream().catch((e) => logger.error('Error processing stream:', e))
|
||||
} else if (result && 'success' in result && result.success && 'logs' in result) {
|
||||
const finalOutputs: any[] = []
|
||||
|
||||
if (selectedOutputs && selectedOutputs.length > 0) {
|
||||
if (selectedOutputs?.length > 0) {
|
||||
for (const outputId of selectedOutputs) {
|
||||
// Find the log that corresponds to the start of the outputId
|
||||
const log = result.logs?.find(
|
||||
(l: BlockLog) => l.blockId === outputId || outputId.startsWith(`${l.blockId}_`)
|
||||
)
|
||||
const blockIdForOutput = extractBlockIdFromOutputId(outputId)
|
||||
const path = extractPathFromOutputId(outputId, blockIdForOutput)
|
||||
const log = result.logs?.find((l: BlockLog) => l.blockId === blockIdForOutput)
|
||||
|
||||
if (log) {
|
||||
let output = log.output
|
||||
// Check if there is a path to traverse
|
||||
if (outputId.length > log.blockId.length) {
|
||||
const path = outputId.substring(log.blockId.length + 1)
|
||||
if (path) {
|
||||
const pathParts = path.split('.')
|
||||
let current = output
|
||||
for (const part of pathParts) {
|
||||
if (current && typeof current === 'object' && part in current) {
|
||||
current = current[part]
|
||||
} else {
|
||||
current = undefined
|
||||
break
|
||||
}
|
||||
|
||||
if (path) {
|
||||
// Parse JSON content safely
|
||||
output = parseOutputContentSafely(output)
|
||||
|
||||
const pathParts = path.split('.')
|
||||
let current = output
|
||||
for (const part of pathParts) {
|
||||
if (current && typeof current === 'object' && part in current) {
|
||||
current = current[part]
|
||||
} else {
|
||||
current = undefined
|
||||
break
|
||||
}
|
||||
output = current
|
||||
}
|
||||
output = current
|
||||
}
|
||||
if (output !== undefined) {
|
||||
finalOutputs.push(output)
|
||||
@@ -255,10 +259,8 @@ export function Chat({ panelWidth, chatMessage, setChatMessage }: ChatProps) {
|
||||
}
|
||||
}
|
||||
|
||||
// If no specific outputs could be resolved, fall back to the final workflow output
|
||||
if (finalOutputs.length === 0 && result.output) {
|
||||
finalOutputs.push(result.output)
|
||||
}
|
||||
// Only show outputs if something was explicitly selected
|
||||
// If no outputs are selected, don't show anything
|
||||
|
||||
// Add a new message for each resolved output
|
||||
finalOutputs.forEach((output) => {
|
||||
@@ -266,19 +268,8 @@ export function Chat({ panelWidth, chatMessage, setChatMessage }: ChatProps) {
|
||||
if (typeof output === 'string') {
|
||||
content = output
|
||||
} else if (output && typeof output === 'object') {
|
||||
// Handle cases where output is { response: ... }
|
||||
const outputObj = output as Record<string, any>
|
||||
const response = outputObj.response
|
||||
if (response) {
|
||||
if (typeof response.content === 'string') {
|
||||
content = response.content
|
||||
} else {
|
||||
// Pretty print for better readability
|
||||
content = `\`\`\`json\n${JSON.stringify(response, null, 2)}\n\`\`\``
|
||||
}
|
||||
} else {
|
||||
content = `\`\`\`json\n${JSON.stringify(output, null, 2)}\n\`\`\``
|
||||
}
|
||||
// For structured responses, pretty print the JSON
|
||||
content = `\`\`\`json\n${JSON.stringify(output, null, 2)}\n\`\`\``
|
||||
}
|
||||
|
||||
if (content) {
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
import { useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { Check, ChevronDown } from 'lucide-react'
|
||||
import { Button } from '@/components/ui/button'
|
||||
import { extractFieldsFromSchema, parseResponseFormatSafely } from '@/lib/response-format'
|
||||
import { cn } from '@/lib/utils'
|
||||
import { getBlock } from '@/blocks'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
|
||||
interface OutputSelectProps {
|
||||
@@ -48,8 +50,31 @@ export function OutputSelect({
|
||||
? block.name.replace(/\s+/g, '').toLowerCase()
|
||||
: `block-${block.id}`
|
||||
|
||||
// Check for custom response format first
|
||||
const responseFormatValue = useSubBlockStore.getState().getValue(block.id, 'responseFormat')
|
||||
const responseFormat = parseResponseFormatSafely(responseFormatValue, block.id)
|
||||
|
||||
let outputsToProcess: Record<string, any> = {}
|
||||
|
||||
if (responseFormat) {
|
||||
// Use custom schema properties if response format is specified
|
||||
const schemaFields = extractFieldsFromSchema(responseFormat)
|
||||
if (schemaFields.length > 0) {
|
||||
// Convert schema fields to output structure
|
||||
schemaFields.forEach((field) => {
|
||||
outputsToProcess[field.name] = { type: field.type }
|
||||
})
|
||||
} else {
|
||||
// Fallback to default outputs if schema extraction failed
|
||||
outputsToProcess = block.outputs || {}
|
||||
}
|
||||
} else {
|
||||
// Use default block outputs
|
||||
outputsToProcess = block.outputs || {}
|
||||
}
|
||||
|
||||
// Add response outputs
|
||||
if (block.outputs && typeof block.outputs === 'object') {
|
||||
if (Object.keys(outputsToProcess).length > 0) {
|
||||
const addOutput = (path: string, outputObj: any, prefix = '') => {
|
||||
const fullPath = prefix ? `${prefix}.${path}` : path
|
||||
|
||||
@@ -100,7 +125,7 @@ export function OutputSelect({
|
||||
}
|
||||
|
||||
// Process all output properties directly (flattened structure)
|
||||
Object.entries(block.outputs).forEach(([key, value]) => {
|
||||
Object.entries(outputsToProcess).forEach(([key, value]) => {
|
||||
addOutput(key, value)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -125,35 +125,33 @@ export function ConsoleEntry({ entry, consoleWidth }: ConsoleEntryProps) {
|
||||
<div className='flex items-start gap-2'>
|
||||
<Terminal className='mt-1 h-4 w-4 text-muted-foreground' />
|
||||
<div className='overflow-wrap-anywhere relative flex-1 whitespace-normal break-normal font-mono text-sm'>
|
||||
{typeof entry.output === 'object' &&
|
||||
entry.output !== null &&
|
||||
hasNestedStructure(entry.output) && (
|
||||
<div className='absolute top-0 right-0 z-10'>
|
||||
<Button
|
||||
variant='ghost'
|
||||
size='sm'
|
||||
className='h-6 px-2 text-muted-foreground hover:text-foreground'
|
||||
onClick={(e) => {
|
||||
e.stopPropagation()
|
||||
setExpandAllJson(!expandAllJson)
|
||||
}}
|
||||
>
|
||||
<span className='flex items-center'>
|
||||
{expandAllJson ? (
|
||||
<>
|
||||
<ChevronUp className='mr-1 h-3 w-3' />
|
||||
<span className='text-xs'>Collapse</span>
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
<ChevronDown className='mr-1 h-3 w-3' />
|
||||
<span className='text-xs'>Expand</span>
|
||||
</>
|
||||
)}
|
||||
</span>
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
{entry.output != null && (
|
||||
<div className='absolute top-0 right-0 z-10'>
|
||||
<Button
|
||||
variant='ghost'
|
||||
size='sm'
|
||||
className='h-6 px-2 text-muted-foreground hover:text-foreground'
|
||||
onClick={(e) => {
|
||||
e.stopPropagation()
|
||||
setExpandAllJson(!expandAllJson)
|
||||
}}
|
||||
>
|
||||
<span className='flex items-center'>
|
||||
{expandAllJson ? (
|
||||
<>
|
||||
<ChevronUp className='mr-1 h-3 w-3' />
|
||||
<span className='text-xs'>Collapse</span>
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
<ChevronDown className='mr-1 h-3 w-3' />
|
||||
<span className='text-xs'>Expand</span>
|
||||
</>
|
||||
)}
|
||||
</span>
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
<JSONView data={entry.output} initiallyExpanded={expandAllJson} />
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { useCallback } from 'react'
|
||||
import { Tooltip, TooltipContent, TooltipTrigger } from '@/components/ui/tooltip'
|
||||
import { cn } from '@/lib/utils'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/w/components/providers/workspace-permissions-provider'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
|
||||
export type ToolbarBlockProps = {
|
||||
@@ -9,6 +10,8 @@ export type ToolbarBlockProps = {
|
||||
}
|
||||
|
||||
export function ToolbarBlock({ config, disabled = false }: ToolbarBlockProps) {
|
||||
const userPermissions = useUserPermissionsContext()
|
||||
|
||||
const handleDragStart = (e: React.DragEvent) => {
|
||||
if (disabled) {
|
||||
e.preventDefault()
|
||||
@@ -66,7 +69,11 @@ export function ToolbarBlock({ config, disabled = false }: ToolbarBlockProps) {
|
||||
return (
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>{blockContent}</TooltipTrigger>
|
||||
<TooltipContent>Edit permissions required to add blocks</TooltipContent>
|
||||
<TooltipContent>
|
||||
{userPermissions.isOfflineMode
|
||||
? 'Connection lost - please refresh'
|
||||
: 'Edit permissions required to add blocks'}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { useCallback } from 'react'
|
||||
import { Tooltip, TooltipContent, TooltipTrigger } from '@/components/ui/tooltip'
|
||||
import { cn } from '@/lib/utils'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/w/components/providers/workspace-permissions-provider'
|
||||
import { LoopTool } from '../../../loop-node/loop-config'
|
||||
|
||||
type LoopToolbarItemProps = {
|
||||
@@ -9,6 +10,8 @@ type LoopToolbarItemProps = {
|
||||
|
||||
// Custom component for the Loop Tool
|
||||
export default function LoopToolbarItem({ disabled = false }: LoopToolbarItemProps) {
|
||||
const userPermissions = useUserPermissionsContext()
|
||||
|
||||
const handleDragStart = (e: React.DragEvent) => {
|
||||
if (disabled) {
|
||||
e.preventDefault()
|
||||
@@ -74,7 +77,11 @@ export default function LoopToolbarItem({ disabled = false }: LoopToolbarItemPro
|
||||
return (
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>{blockContent}</TooltipTrigger>
|
||||
<TooltipContent>Edit permissions required to add blocks</TooltipContent>
|
||||
<TooltipContent>
|
||||
{userPermissions.isOfflineMode
|
||||
? 'Connection lost - please refresh'
|
||||
: 'Edit permissions required to add blocks'}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { useCallback } from 'react'
|
||||
import { Tooltip, TooltipContent, TooltipTrigger } from '@/components/ui/tooltip'
|
||||
import { cn } from '@/lib/utils'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/w/components/providers/workspace-permissions-provider'
|
||||
import { ParallelTool } from '../../../parallel-node/parallel-config'
|
||||
|
||||
type ParallelToolbarItemProps = {
|
||||
@@ -9,6 +10,7 @@ type ParallelToolbarItemProps = {
|
||||
|
||||
// Custom component for the Parallel Tool
|
||||
export default function ParallelToolbarItem({ disabled = false }: ParallelToolbarItemProps) {
|
||||
const userPermissions = useUserPermissionsContext()
|
||||
const handleDragStart = (e: React.DragEvent) => {
|
||||
if (disabled) {
|
||||
e.preventDefault()
|
||||
@@ -75,7 +77,11 @@ export default function ParallelToolbarItem({ disabled = false }: ParallelToolba
|
||||
return (
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>{blockContent}</TooltipTrigger>
|
||||
<TooltipContent>Edit permissions required to add blocks</TooltipContent>
|
||||
<TooltipContent>
|
||||
{userPermissions.isOfflineMode
|
||||
? 'Connection lost - please refresh'
|
||||
: 'Edit permissions required to add blocks'}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ import { ArrowLeftRight, ArrowUpDown, Circle, CircleOff, Copy, Trash2 } from 'lu
|
||||
import { Button } from '@/components/ui/button'
|
||||
import { Tooltip, TooltipContent, TooltipTrigger } from '@/components/ui/tooltip'
|
||||
import { cn } from '@/lib/utils'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/w/components/providers/workspace-permissions-provider'
|
||||
import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
|
||||
@@ -22,9 +23,17 @@ export function ActionBar({ blockId, blockType, disabled = false }: ActionBarPro
|
||||
const horizontalHandles = useWorkflowStore(
|
||||
(state) => state.blocks[blockId]?.horizontalHandles ?? false
|
||||
)
|
||||
const userPermissions = useUserPermissionsContext()
|
||||
|
||||
const isStarterBlock = blockType === 'starter'
|
||||
|
||||
const getTooltipMessage = (defaultMessage: string) => {
|
||||
if (disabled) {
|
||||
return userPermissions.isOfflineMode ? 'Connection lost - please refresh' : 'Read-only mode'
|
||||
}
|
||||
return defaultMessage
|
||||
}
|
||||
|
||||
return (
|
||||
<div
|
||||
className={cn(
|
||||
@@ -68,7 +77,7 @@ export function ActionBar({ blockId, blockType, disabled = false }: ActionBarPro
|
||||
</Button>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent side='right'>
|
||||
{disabled ? 'Read-only mode' : isEnabled ? 'Disable Block' : 'Enable Block'}
|
||||
{getTooltipMessage(isEnabled ? 'Disable Block' : 'Enable Block')}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
|
||||
@@ -89,9 +98,7 @@ export function ActionBar({ blockId, blockType, disabled = false }: ActionBarPro
|
||||
<Copy className='h-4 w-4' />
|
||||
</Button>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent side='right'>
|
||||
{disabled ? 'Read-only mode' : 'Duplicate Block'}
|
||||
</TooltipContent>
|
||||
<TooltipContent side='right'>{getTooltipMessage('Duplicate Block')}</TooltipContent>
|
||||
</Tooltip>
|
||||
)}
|
||||
|
||||
@@ -116,7 +123,7 @@ export function ActionBar({ blockId, blockType, disabled = false }: ActionBarPro
|
||||
</Button>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent side='right'>
|
||||
{disabled ? 'Read-only mode' : horizontalHandles ? 'Vertical Ports' : 'Horizontal Ports'}
|
||||
{getTooltipMessage(horizontalHandles ? 'Vertical Ports' : 'Horizontal Ports')}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
|
||||
@@ -140,9 +147,7 @@ export function ActionBar({ blockId, blockType, disabled = false }: ActionBarPro
|
||||
<Trash2 className='h-4 w-4' />
|
||||
</Button>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent side='right'>
|
||||
{disabled ? 'Read-only mode' : 'Delete Block'}
|
||||
</TooltipContent>
|
||||
<TooltipContent side='right'>{getTooltipMessage('Delete Block')}</TooltipContent>
|
||||
</Tooltip>
|
||||
)}
|
||||
</div>
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { RepeatIcon, SplitIcon } from 'lucide-react'
|
||||
import { Card } from '@/components/ui/card'
|
||||
import { cn } from '@/lib/utils'
|
||||
import {
|
||||
@@ -77,8 +78,20 @@ export function ConnectionBlocks({
|
||||
// Get block configuration for icon and color
|
||||
const blockConfig = getBlock(connection.type)
|
||||
const displayName = connection.name // Use the actual block name instead of transforming it
|
||||
const Icon = blockConfig?.icon
|
||||
const bgColor = blockConfig?.bgColor || '#6B7280' // Fallback to gray
|
||||
|
||||
// Handle special blocks that aren't in the registry (loop and parallel)
|
||||
let Icon = blockConfig?.icon
|
||||
let bgColor = blockConfig?.bgColor || '#6B7280' // Fallback to gray
|
||||
|
||||
if (!blockConfig) {
|
||||
if (connection.type === 'loop') {
|
||||
Icon = RepeatIcon as typeof Icon
|
||||
bgColor = '#2FB3FF' // Blue color for loop blocks
|
||||
} else if (connection.type === 'parallel') {
|
||||
Icon = SplitIcon as typeof Icon
|
||||
bgColor = '#FEE12B' // Yellow color for parallel blocks
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
<Card
|
||||
|
||||
@@ -73,8 +73,6 @@ export function Code({
|
||||
}
|
||||
}, [generationType])
|
||||
|
||||
// State management
|
||||
const [storeValue, setStoreValue] = useSubBlockValue(blockId, subBlockId)
|
||||
const [code, setCode] = useState<string>('')
|
||||
const [_lineCount, setLineCount] = useState(1)
|
||||
const [showTags, setShowTags] = useState(false)
|
||||
@@ -98,34 +96,13 @@ export function Code({
|
||||
const toggleCollapsed = () => {
|
||||
setCollapsedValue(blockId, collapsedStateKey, !isCollapsed)
|
||||
}
|
||||
// Use preview value when in preview mode, otherwise use store value or prop value
|
||||
const value = isPreview ? previewValue : propValue !== undefined ? propValue : storeValue
|
||||
|
||||
// Create refs to hold the handlers
|
||||
const handleStreamStartRef = useRef<() => void>(() => {})
|
||||
const handleGeneratedContentRef = useRef<(generatedCode: string) => void>(() => {})
|
||||
const handleStreamChunkRef = useRef<(chunk: string) => void>(() => {})
|
||||
|
||||
// AI Code Generation Hook
|
||||
const handleStreamStart = () => {
|
||||
setCode('')
|
||||
// Optionally clear the store value too, though handleStreamChunk will update it
|
||||
// setStoreValue('')
|
||||
}
|
||||
|
||||
const handleGeneratedContent = (generatedCode: string) => {
|
||||
setCode(generatedCode)
|
||||
if (!isPreview && !disabled) {
|
||||
setStoreValue(generatedCode)
|
||||
}
|
||||
}
|
||||
|
||||
// Handle streaming chunks directly into the editor
|
||||
const handleStreamChunk = (chunk: string) => {
|
||||
setCode((currentCode) => {
|
||||
const newCode = currentCode + chunk
|
||||
if (!isPreview && !disabled) {
|
||||
setStoreValue(newCode)
|
||||
}
|
||||
return newCode
|
||||
})
|
||||
}
|
||||
|
||||
const {
|
||||
isLoading: isAiLoading,
|
||||
isStreaming: isAiStreaming,
|
||||
@@ -140,11 +117,48 @@ export function Code({
|
||||
} = useCodeGeneration({
|
||||
generationType: generationType,
|
||||
initialContext: code,
|
||||
onGeneratedContent: handleGeneratedContent,
|
||||
onStreamChunk: handleStreamChunk,
|
||||
onStreamStart: handleStreamStart,
|
||||
onGeneratedContent: (content: string) => handleGeneratedContentRef.current?.(content),
|
||||
onStreamChunk: (chunk: string) => handleStreamChunkRef.current?.(chunk),
|
||||
onStreamStart: () => handleStreamStartRef.current?.(),
|
||||
})
|
||||
|
||||
// State management - useSubBlockValue with explicit streaming control
|
||||
const [storeValue, setStoreValue] = useSubBlockValue(blockId, subBlockId, false, {
|
||||
debounceMs: 150,
|
||||
isStreaming: isAiStreaming, // Use AI streaming state directly
|
||||
onStreamingEnd: () => {
|
||||
logger.debug('AI streaming ended, value persisted', { blockId, subBlockId })
|
||||
},
|
||||
})
|
||||
|
||||
// Use preview value when in preview mode, otherwise use store value or prop value
|
||||
const value = isPreview ? previewValue : propValue !== undefined ? propValue : storeValue
|
||||
|
||||
// Define the handlers now that we have access to setStoreValue
|
||||
handleStreamStartRef.current = () => {
|
||||
setCode('')
|
||||
// Streaming state is now controlled by isAiStreaming
|
||||
}
|
||||
|
||||
handleGeneratedContentRef.current = (generatedCode: string) => {
|
||||
setCode(generatedCode)
|
||||
if (!isPreview && !disabled) {
|
||||
setStoreValue(generatedCode)
|
||||
// Final value will be persisted when isAiStreaming becomes false
|
||||
}
|
||||
}
|
||||
|
||||
handleStreamChunkRef.current = (chunk: string) => {
|
||||
setCode((currentCode) => {
|
||||
const newCode = currentCode + chunk
|
||||
if (!isPreview && !disabled) {
|
||||
// Update the value - it won't be persisted until streaming ends
|
||||
setStoreValue(newCode)
|
||||
}
|
||||
return newCode
|
||||
})
|
||||
}
|
||||
|
||||
// Effects
|
||||
useEffect(() => {
|
||||
const valueString = value?.toString() ?? ''
|
||||
|
||||
@@ -50,7 +50,11 @@ export function ResponseFormat({
|
||||
isPreview = false,
|
||||
previewValue,
|
||||
}: ResponseFormatProps) {
|
||||
const [storeValue, setStoreValue] = useSubBlockValue<JSONProperty[]>(blockId, subBlockId)
|
||||
// useSubBlockValue now includes debouncing by default
|
||||
const [storeValue, setStoreValue] = useSubBlockValue<JSONProperty[]>(blockId, subBlockId, false, {
|
||||
debounceMs: 200, // Slightly longer debounce for complex structures
|
||||
})
|
||||
|
||||
const [showPreview, setShowPreview] = useState(false)
|
||||
|
||||
const value = isPreview ? previewValue : storeValue
|
||||
|
||||
@@ -1,11 +1,15 @@
|
||||
import { useCallback, useEffect, useRef } from 'react'
|
||||
import { isEqual } from 'lodash'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow'
|
||||
import { getProviderFromModel } from '@/providers/utils'
|
||||
import { useGeneralStore } from '@/stores/settings/general/store'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
|
||||
const logger = createLogger('SubBlockValue')
|
||||
|
||||
// Helper function to dispatch collaborative subblock updates
|
||||
const dispatchSubblockUpdate = (blockId: string, subBlockId: string, value: any) => {
|
||||
const event = new CustomEvent('update-subblock-value', {
|
||||
@@ -154,20 +158,31 @@ function storeApiKeyValue(
|
||||
}
|
||||
}
|
||||
|
||||
interface UseSubBlockValueOptions {
|
||||
debounceMs?: number
|
||||
isStreaming?: boolean // Explicit streaming state
|
||||
onStreamingEnd?: () => void
|
||||
}
|
||||
|
||||
/**
|
||||
* Custom hook to get and set values for a sub-block in a workflow.
|
||||
* Handles complex object values properly by using deep equality comparison.
|
||||
* Includes automatic debouncing and explicit streaming mode for AI generation.
|
||||
*
|
||||
* @param blockId The ID of the block containing the sub-block
|
||||
* @param subBlockId The ID of the sub-block
|
||||
* @param triggerWorkflowUpdate Whether to trigger a workflow update when the value changes
|
||||
* @returns A tuple containing the current value and a setter function
|
||||
* @param options Configuration for debouncing and streaming behavior
|
||||
* @returns A tuple containing the current value and setter function
|
||||
*/
|
||||
export function useSubBlockValue<T = any>(
|
||||
blockId: string,
|
||||
subBlockId: string,
|
||||
triggerWorkflowUpdate = false
|
||||
triggerWorkflowUpdate = false,
|
||||
options?: UseSubBlockValueOptions
|
||||
): readonly [T | null, (value: T) => void] {
|
||||
const { debounceMs = 150, isStreaming = false, onStreamingEnd } = options || {}
|
||||
|
||||
const { collaborativeSetSubblockValue } = useCollaborativeWorkflow()
|
||||
|
||||
const blockType = useWorkflowStore(
|
||||
@@ -187,6 +202,12 @@ export function useSubBlockValue<T = any>(
|
||||
// Previous model reference for detecting model changes
|
||||
const prevModelRef = useRef<string | null>(null)
|
||||
|
||||
// Debouncing refs
|
||||
const debounceTimerRef = useRef<NodeJS.Timeout | null>(null)
|
||||
const lastEmittedValueRef = useRef<T | null>(null)
|
||||
const streamingValueRef = useRef<T | null>(null)
|
||||
const wasStreamingRef = useRef<boolean>(false)
|
||||
|
||||
// Get value from subblock store - always call this hook unconditionally
|
||||
const storeValue = useSubBlockStore(
|
||||
useCallback((state) => state.getValue(blockId, subBlockId), [blockId, subBlockId])
|
||||
@@ -211,6 +232,36 @@ export function useSubBlockValue<T = any>(
|
||||
// Compute the modelValue based on block type
|
||||
const modelValue = isProviderBasedBlock ? (modelSubBlockValue as string) : null
|
||||
|
||||
// Cleanup timer on unmount
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
if (debounceTimerRef.current) {
|
||||
clearTimeout(debounceTimerRef.current)
|
||||
}
|
||||
}
|
||||
}, [])
|
||||
|
||||
// Emit the value to socket/DB
|
||||
const emitValue = useCallback(
|
||||
(value: T) => {
|
||||
collaborativeSetSubblockValue(blockId, subBlockId, value)
|
||||
lastEmittedValueRef.current = value
|
||||
},
|
||||
[blockId, subBlockId, collaborativeSetSubblockValue]
|
||||
)
|
||||
|
||||
// Handle streaming mode changes
|
||||
useEffect(() => {
|
||||
// If we just exited streaming mode, emit the final value
|
||||
if (wasStreamingRef.current && !isStreaming && streamingValueRef.current !== null) {
|
||||
logger.debug('Streaming ended, persisting final value', { blockId, subBlockId })
|
||||
emitValue(streamingValueRef.current)
|
||||
streamingValueRef.current = null
|
||||
onStreamingEnd?.()
|
||||
}
|
||||
wasStreamingRef.current = isStreaming
|
||||
}, [isStreaming, blockId, subBlockId, emitValue, onStreamingEnd])
|
||||
|
||||
// Hook to set a value in the subblock store
|
||||
const setValue = useCallback(
|
||||
(newValue: T) => {
|
||||
@@ -218,6 +269,22 @@ export function useSubBlockValue<T = any>(
|
||||
if (!isEqual(valueRef.current, newValue)) {
|
||||
valueRef.current = newValue
|
||||
|
||||
// Always update local store immediately for UI responsiveness
|
||||
useSubBlockStore.setState((state) => ({
|
||||
workflowValues: {
|
||||
...state.workflowValues,
|
||||
[useWorkflowRegistry.getState().activeWorkflowId || '']: {
|
||||
...state.workflowValues[useWorkflowRegistry.getState().activeWorkflowId || ''],
|
||||
[blockId]: {
|
||||
...state.workflowValues[useWorkflowRegistry.getState().activeWorkflowId || '']?.[
|
||||
blockId
|
||||
],
|
||||
[subBlockId]: newValue,
|
||||
},
|
||||
},
|
||||
},
|
||||
}))
|
||||
|
||||
// Ensure we're passing the actual value, not a reference that might change
|
||||
const valueCopy =
|
||||
newValue === null
|
||||
@@ -231,8 +298,27 @@ export function useSubBlockValue<T = any>(
|
||||
storeApiKeyValue(blockId, blockType, modelValue, newValue, storeValue)
|
||||
}
|
||||
|
||||
// Use collaborative function which handles both local store update and socket emission
|
||||
collaborativeSetSubblockValue(blockId, subBlockId, valueCopy)
|
||||
// Clear any existing debounce timer
|
||||
if (debounceTimerRef.current) {
|
||||
clearTimeout(debounceTimerRef.current)
|
||||
debounceTimerRef.current = null
|
||||
}
|
||||
|
||||
// If streaming, just store the value without emitting
|
||||
if (isStreaming) {
|
||||
streamingValueRef.current = valueCopy
|
||||
} else {
|
||||
// Detect large changes for extended debounce
|
||||
const isLargeChange = detectLargeChange(lastEmittedValueRef.current, valueCopy)
|
||||
const effectiveDebounceMs = isLargeChange ? debounceMs * 2 : debounceMs
|
||||
|
||||
// Debounce the socket emission
|
||||
debounceTimerRef.current = setTimeout(() => {
|
||||
if (valueRef.current !== null && valueRef.current !== lastEmittedValueRef.current) {
|
||||
emitValue(valueCopy)
|
||||
}
|
||||
}, effectiveDebounceMs)
|
||||
}
|
||||
|
||||
if (triggerWorkflowUpdate) {
|
||||
useWorkflowStore.getState().triggerUpdate()
|
||||
@@ -247,7 +333,9 @@ export function useSubBlockValue<T = any>(
|
||||
storeValue,
|
||||
triggerWorkflowUpdate,
|
||||
modelValue,
|
||||
collaborativeSetSubblockValue,
|
||||
isStreaming,
|
||||
debounceMs,
|
||||
emitValue,
|
||||
]
|
||||
)
|
||||
|
||||
@@ -320,5 +408,29 @@ export function useSubBlockValue<T = any>(
|
||||
}
|
||||
}, [storeValue, initialValue])
|
||||
|
||||
// Return appropriate tuple based on whether options were provided
|
||||
return [storeValue !== undefined ? storeValue : initialValue, setValue] as const
|
||||
}
|
||||
|
||||
// Helper function to detect large changes
|
||||
function detectLargeChange(oldValue: any, newValue: any): boolean {
|
||||
// Handle null/undefined
|
||||
if (oldValue == null && newValue == null) return false
|
||||
if (oldValue == null || newValue == null) return true
|
||||
|
||||
// For strings, check if it's a large paste or deletion
|
||||
if (typeof oldValue === 'string' && typeof newValue === 'string') {
|
||||
const sizeDiff = Math.abs(newValue.length - oldValue.length)
|
||||
// Consider it a large change if more than 50 characters changed at once
|
||||
return sizeDiff > 50
|
||||
}
|
||||
|
||||
// For arrays, check length difference
|
||||
if (Array.isArray(oldValue) && Array.isArray(newValue)) {
|
||||
const sizeDiff = Math.abs(newValue.length - oldValue.length)
|
||||
return sizeDiff > 5
|
||||
}
|
||||
|
||||
// For other types, always treat as small change
|
||||
return false
|
||||
}
|
||||
|
||||
@@ -654,7 +654,9 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
|
||||
</TooltipTrigger>
|
||||
<TooltipContent side='top'>
|
||||
{!userPermissions.canEdit
|
||||
? 'Read-only mode'
|
||||
? userPermissions.isOfflineMode
|
||||
? 'Connection lost - please refresh'
|
||||
: 'Read-only mode'
|
||||
: blockAdvancedMode
|
||||
? 'Switch to Basic Mode'
|
||||
: 'Switch to Advanced Mode'}
|
||||
@@ -750,7 +752,9 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
|
||||
</TooltipTrigger>
|
||||
<TooltipContent side='top'>
|
||||
{!userPermissions.canEdit
|
||||
? 'Read-only mode'
|
||||
? userPermissions.isOfflineMode
|
||||
? 'Connection lost - please refresh'
|
||||
: 'Read-only mode'
|
||||
: isWide
|
||||
? 'Narrow Block'
|
||||
: 'Expand Block'}
|
||||
|
||||
@@ -104,10 +104,8 @@ export function useBlockConnections(blockId: string) {
|
||||
// Get the response format from the subblock store
|
||||
const responseFormatValue = useSubBlockStore.getState().getValue(sourceId, 'responseFormat')
|
||||
|
||||
let responseFormat
|
||||
|
||||
// Safely parse response format with proper error handling
|
||||
responseFormat = parseResponseFormatSafely(responseFormatValue, sourceId)
|
||||
const responseFormat = parseResponseFormatSafely(responseFormatValue, sourceId)
|
||||
|
||||
// Get the default output type from the block's outputs
|
||||
const defaultOutputs: Field[] = Object.entries(sourceBlock.outputs || {}).map(([key]) => ({
|
||||
@@ -140,10 +138,8 @@ export function useBlockConnections(blockId: string) {
|
||||
.getState()
|
||||
.getValue(edge.source, 'responseFormat')
|
||||
|
||||
let responseFormat
|
||||
|
||||
// Safely parse response format with proper error handling
|
||||
responseFormat = parseResponseFormatSafely(responseFormatValue, edge.source)
|
||||
const responseFormat = parseResponseFormatSafely(responseFormatValue, edge.source)
|
||||
|
||||
// Get the default output type from the block's outputs
|
||||
const defaultOutputs: Field[] = Object.entries(sourceBlock.outputs || {}).map(([key]) => ({
|
||||
|
||||
@@ -217,10 +217,13 @@ export function useWorkflowExecution() {
|
||||
result.logs.forEach((log: BlockLog) => {
|
||||
if (streamedContent.has(log.blockId)) {
|
||||
const content = streamedContent.get(log.blockId) || ''
|
||||
if (log.output) {
|
||||
log.output.content = content
|
||||
}
|
||||
useConsoleStore.getState().updateConsole(log.blockId, content)
|
||||
// For console display, show the actual structured block output instead of formatted streaming content
|
||||
// This ensures console logs match the block state structure
|
||||
// Use replaceOutput to completely replace the output instead of merging
|
||||
useConsoleStore.getState().updateConsole(log.blockId, {
|
||||
replaceOutput: log.output,
|
||||
success: true,
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
'use client'
|
||||
|
||||
import React, { createContext, useContext, useMemo } from 'react'
|
||||
import type React from 'react'
|
||||
import { createContext, useContext, useEffect, useMemo, useState } from 'react'
|
||||
import { useParams } from 'next/navigation'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { useUserPermissions, type WorkspaceUserPermissions } from '@/hooks/use-user-permissions'
|
||||
@@ -8,6 +9,7 @@ import {
|
||||
useWorkspacePermissions,
|
||||
type WorkspacePermissions,
|
||||
} from '@/hooks/use-workspace-permissions'
|
||||
import { usePresence } from '../../[workflowId]/hooks/use-presence'
|
||||
|
||||
const logger = createLogger('WorkspacePermissionsProvider')
|
||||
|
||||
@@ -18,88 +20,140 @@ interface WorkspacePermissionsContextType {
|
||||
permissionsError: string | null
|
||||
updatePermissions: (newPermissions: WorkspacePermissions) => void
|
||||
|
||||
// Computed user permissions
|
||||
userPermissions: WorkspaceUserPermissions
|
||||
// Computed user permissions (connection-aware)
|
||||
userPermissions: WorkspaceUserPermissions & { isOfflineMode?: boolean }
|
||||
|
||||
// Connection state management
|
||||
setOfflineMode: (isOffline: boolean) => void
|
||||
}
|
||||
|
||||
const WorkspacePermissionsContext = createContext<WorkspacePermissionsContextType | null>(null)
|
||||
const WorkspacePermissionsContext = createContext<WorkspacePermissionsContextType>({
|
||||
workspacePermissions: null,
|
||||
permissionsLoading: false,
|
||||
permissionsError: null,
|
||||
updatePermissions: () => {},
|
||||
userPermissions: {
|
||||
canRead: false,
|
||||
canEdit: false,
|
||||
canAdmin: false,
|
||||
userPermissions: 'read',
|
||||
isLoading: false,
|
||||
error: null,
|
||||
},
|
||||
setOfflineMode: () => {},
|
||||
})
|
||||
|
||||
interface WorkspacePermissionsProviderProps {
|
||||
children: React.ReactNode
|
||||
}
|
||||
|
||||
const WorkspacePermissionsProvider = React.memo<WorkspacePermissionsProviderProps>(
|
||||
({ children }) => {
|
||||
const params = useParams()
|
||||
const workspaceId = params.workspaceId as string
|
||||
/**
|
||||
* Provider that manages workspace permissions and user access
|
||||
* Also provides connection-aware permissions that enforce read-only mode when offline
|
||||
*/
|
||||
export function WorkspacePermissionsProvider({ children }: WorkspacePermissionsProviderProps) {
|
||||
const params = useParams()
|
||||
const workspaceId = params?.workspaceId as string
|
||||
|
||||
if (!workspaceId) {
|
||||
logger.warn('Workspace ID is undefined from params:', params)
|
||||
// Manage offline mode state locally
|
||||
const [isOfflineMode, setIsOfflineMode] = useState(false)
|
||||
const [hasBeenConnected, setHasBeenConnected] = useState(false)
|
||||
|
||||
// Fetch workspace permissions and loading state
|
||||
const {
|
||||
permissions: workspacePermissions,
|
||||
loading: permissionsLoading,
|
||||
error: permissionsError,
|
||||
updatePermissions,
|
||||
} = useWorkspacePermissions(workspaceId)
|
||||
|
||||
// Get base user permissions from workspace permissions
|
||||
const baseUserPermissions = useUserPermissions(
|
||||
workspacePermissions,
|
||||
permissionsLoading,
|
||||
permissionsError
|
||||
)
|
||||
|
||||
// Get connection status and update offline mode accordingly
|
||||
const { isConnected } = usePresence()
|
||||
|
||||
useEffect(() => {
|
||||
if (isConnected) {
|
||||
// Mark that we've been connected at least once
|
||||
setHasBeenConnected(true)
|
||||
// On initial connection, allow going online
|
||||
if (!hasBeenConnected) {
|
||||
setIsOfflineMode(false)
|
||||
}
|
||||
// If we were previously connected and this is a reconnection, stay offline (user must refresh)
|
||||
} else if (hasBeenConnected) {
|
||||
// Only enter offline mode if we were previously connected and now disconnected
|
||||
setIsOfflineMode(true)
|
||||
}
|
||||
// If not connected and never been connected, stay in initial state (not offline mode)
|
||||
}, [isConnected, hasBeenConnected])
|
||||
|
||||
// Create connection-aware permissions that override user permissions when offline
|
||||
const userPermissions = useMemo((): WorkspaceUserPermissions & { isOfflineMode?: boolean } => {
|
||||
if (isOfflineMode) {
|
||||
// In offline mode, force read-only permissions regardless of actual user permissions
|
||||
return {
|
||||
...baseUserPermissions,
|
||||
canEdit: false,
|
||||
canAdmin: false,
|
||||
// Keep canRead true so users can still view content
|
||||
canRead: baseUserPermissions.canRead,
|
||||
isOfflineMode: true,
|
||||
}
|
||||
}
|
||||
|
||||
const {
|
||||
permissions: workspacePermissions,
|
||||
loading: permissionsLoading,
|
||||
error: permissionsError,
|
||||
updatePermissions,
|
||||
} = useWorkspacePermissions(workspaceId)
|
||||
// When online, use normal permissions
|
||||
return {
|
||||
...baseUserPermissions,
|
||||
isOfflineMode: false,
|
||||
}
|
||||
}, [baseUserPermissions, isOfflineMode])
|
||||
|
||||
const userPermissions = useUserPermissions(
|
||||
const contextValue = useMemo(
|
||||
() => ({
|
||||
workspacePermissions,
|
||||
permissionsLoading,
|
||||
permissionsError
|
||||
)
|
||||
permissionsError,
|
||||
updatePermissions,
|
||||
userPermissions,
|
||||
setOfflineMode: setIsOfflineMode,
|
||||
}),
|
||||
[workspacePermissions, permissionsLoading, permissionsError, updatePermissions, userPermissions]
|
||||
)
|
||||
|
||||
const contextValue = useMemo(
|
||||
() => ({
|
||||
workspacePermissions,
|
||||
permissionsLoading,
|
||||
permissionsError,
|
||||
updatePermissions,
|
||||
userPermissions,
|
||||
}),
|
||||
[
|
||||
workspacePermissions,
|
||||
permissionsLoading,
|
||||
permissionsError,
|
||||
updatePermissions,
|
||||
userPermissions,
|
||||
]
|
||||
)
|
||||
|
||||
return (
|
||||
<WorkspacePermissionsContext.Provider value={contextValue}>
|
||||
{children}
|
||||
</WorkspacePermissionsContext.Provider>
|
||||
)
|
||||
}
|
||||
)
|
||||
|
||||
WorkspacePermissionsProvider.displayName = 'WorkspacePermissionsProvider'
|
||||
|
||||
export { WorkspacePermissionsProvider }
|
||||
return (
|
||||
<WorkspacePermissionsContext.Provider value={contextValue}>
|
||||
{children}
|
||||
</WorkspacePermissionsContext.Provider>
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook to access workspace permissions context
|
||||
* This replaces individual useWorkspacePermissions calls to avoid duplicate API requests
|
||||
* Hook to access workspace permissions and data from context
|
||||
* This provides both raw workspace permissions and computed user permissions
|
||||
*/
|
||||
export function useWorkspacePermissionsContext(): WorkspacePermissionsContextType {
|
||||
const context = useContext(WorkspacePermissionsContext)
|
||||
|
||||
if (!context) {
|
||||
throw new Error(
|
||||
'useWorkspacePermissionsContext must be used within a WorkspacePermissionsProvider'
|
||||
)
|
||||
}
|
||||
|
||||
return context
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook to access user permissions from context
|
||||
* This replaces individual useUserPermissions calls
|
||||
* This replaces individual useUserPermissions calls and includes connection-aware permissions
|
||||
*/
|
||||
export function useUserPermissionsContext(): WorkspaceUserPermissions {
|
||||
export function useUserPermissionsContext(): WorkspaceUserPermissions & {
|
||||
isOfflineMode?: boolean
|
||||
} {
|
||||
const { userPermissions } = useWorkspacePermissionsContext()
|
||||
return userPermissions
|
||||
}
|
||||
|
||||
@@ -7,6 +7,7 @@ import { useParams, usePathname, useRouter } from 'next/navigation'
|
||||
import { Skeleton } from '@/components/ui/skeleton'
|
||||
import { Tooltip, TooltipContent, TooltipTrigger } from '@/components/ui/tooltip'
|
||||
import { useSession } from '@/lib/auth-client'
|
||||
import { env } from '@/lib/env'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import {
|
||||
getKeyboardShortcutText,
|
||||
@@ -27,7 +28,7 @@ import { WorkspaceHeader } from './components/workspace-header/workspace-header'
|
||||
|
||||
const logger = createLogger('Sidebar')
|
||||
|
||||
const IS_DEV = process.env.NODE_ENV === 'development'
|
||||
const IS_DEV = env.NODE_ENV === 'development'
|
||||
|
||||
export function Sidebar() {
|
||||
useGlobalShortcuts()
|
||||
|
||||
@@ -1,316 +0,0 @@
|
||||
import { S3Icon } from '@/components/icons'
|
||||
import type { ToolResponse } from '@/tools/types'
|
||||
import type { BlockConfig } from '../types'
|
||||
|
||||
// Define the expected response type for AWS Lambda operations
|
||||
interface AWSLambdaResponse extends ToolResponse {
|
||||
output: {
|
||||
functionArn: string
|
||||
functionName: string
|
||||
endpointName?: string
|
||||
endpointUrl?: string
|
||||
runtime: string
|
||||
region: string
|
||||
status: string
|
||||
lastModified: string
|
||||
codeSize: number
|
||||
description: string
|
||||
timeout: number
|
||||
memorySize: number
|
||||
environment: Record<string, string>
|
||||
tags: Record<string, string>
|
||||
codeFiles: Record<string, string>
|
||||
handler: string
|
||||
apiGatewayId?: string
|
||||
stageName?: string
|
||||
}
|
||||
}
|
||||
|
||||
export const AWSLambdaBlock: BlockConfig<AWSLambdaResponse> = {
|
||||
type: 'aws_lambda',
|
||||
name: 'AWS Lambda',
|
||||
description: 'Deploy and manage AWS Lambda functions',
|
||||
longDescription:
|
||||
'Create, update, and manage AWS Lambda functions with automatic deployment. Configure runtime environments, memory allocation, timeout settings, and environment variables for serverless function execution. Use fetch to retrieve existing function details and code files to understand the current state, then deploy with any desired changes to the function configuration and code.',
|
||||
docsLink: 'https://docs.simstudio.ai/tools/aws-lambda',
|
||||
category: 'tools',
|
||||
bgColor: '#FF9900',
|
||||
icon: S3Icon,
|
||||
subBlocks: [
|
||||
{
|
||||
id: 'operation',
|
||||
title: 'Operation',
|
||||
type: 'dropdown',
|
||||
layout: 'full',
|
||||
options: [
|
||||
{ label: 'Fetch', id: 'fetch' },
|
||||
{ label: 'Create/Update', id: 'create/update' },
|
||||
{ label: 'Deploy Endpoint', id: 'deploy_endpoint' },
|
||||
{ label: 'Get Prompts', id: 'getPrompts' },
|
||||
],
|
||||
},
|
||||
{
|
||||
id: 'accessKeyId',
|
||||
title: 'AWS Access Key ID',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: 'Enter AWS Access Key ID',
|
||||
password: true,
|
||||
description: 'AWS Access Key ID for authentication. Required for all operations.',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['fetch', 'create/update', 'deploy_endpoint'],
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'secretAccessKey',
|
||||
title: 'AWS Secret Access Key',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: 'Enter AWS Secret Access Key',
|
||||
password: true,
|
||||
description: 'AWS Secret Access Key for authentication. Required for all operations.',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['fetch', 'create/update', 'deploy_endpoint'],
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'role',
|
||||
title: 'Role ARN',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: 'Enter the IAM Role ARN for Lambda execution',
|
||||
password: false,
|
||||
description:
|
||||
'IAM Role ARN that the Lambda function will assume during execution. Must have appropriate permissions.',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['fetch', 'create/update', 'deploy_endpoint'],
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'region',
|
||||
title: 'AWS Region',
|
||||
type: 'dropdown',
|
||||
layout: 'full',
|
||||
options: [
|
||||
'us-east-1',
|
||||
'us-east-2',
|
||||
'us-west-1',
|
||||
'us-west-2',
|
||||
'af-south-1',
|
||||
'ap-east-1',
|
||||
'ap-south-1',
|
||||
'ap-northeast-1',
|
||||
'ap-northeast-2',
|
||||
'ap-northeast-3',
|
||||
'ap-southeast-1',
|
||||
'ap-southeast-2',
|
||||
'ca-central-1',
|
||||
'eu-central-1',
|
||||
'eu-west-1',
|
||||
'eu-west-2',
|
||||
'eu-west-3',
|
||||
'eu-north-1',
|
||||
'eu-south-1',
|
||||
'me-south-1',
|
||||
'sa-east-1',
|
||||
],
|
||||
description: 'AWS region where the Lambda function will be deployed or is located.',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['fetch', 'create/update', 'deploy_endpoint'],
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'functionName',
|
||||
title: 'Function Name',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: 'Enter Lambda function name',
|
||||
description:
|
||||
'Name of the Lambda function. For fetch operations, this must be an existing function to understand its current state. For create/update, this will be the name of the new function or the existing function to update with any desired changes.',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['fetch', 'create/update', 'deploy_endpoint'],
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'endpointName',
|
||||
title: 'Endpoint Name',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: 'Enter API Gateway endpoint name',
|
||||
description:
|
||||
'Name for the API Gateway HTTP API endpoint. This will be used to create the API Gateway and will appear in the endpoint URL.',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['deploy_endpoint'],
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'runtime',
|
||||
title: 'Runtime',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: 'e.g., nodejs18.x, python3.11, java11',
|
||||
description:
|
||||
'Lambda runtime environment. Common values: nodejs18.x, python3.11, java11, go1.x, dotnet6, ruby2.7',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['create/update'],
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'handler',
|
||||
title: 'Handler',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: 'e.g., index.handler',
|
||||
description:
|
||||
'Function handler that Lambda calls to start execution. Format varies by runtime: index.handler (Node.js), lambda_function.lambda_handler (Python), etc.',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['create/update'],
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'timeout',
|
||||
title: 'Timeout (seconds)',
|
||||
type: 'short-input',
|
||||
layout: 'half',
|
||||
placeholder: 'Enter timeout in seconds (1-900)',
|
||||
description: 'Function timeout in seconds. Must be between 1 and 900 seconds (15 minutes).',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['create/update'],
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'memorySize',
|
||||
title: 'Memory (MB)',
|
||||
type: 'short-input',
|
||||
layout: 'half',
|
||||
placeholder: 'Enter memory in MB (128-10240)',
|
||||
description:
|
||||
'Amount of memory allocated to the function in MB. Must be between 128 and 10240 MB.',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['create/update'],
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'code',
|
||||
title: 'Function Code',
|
||||
type: 'code',
|
||||
layout: 'full',
|
||||
language: 'json',
|
||||
placeholder: '{\n "index.js": "exports.handler = async (event) => {...};"\n}',
|
||||
description:
|
||||
'Function code files as JSON object. Keys are file paths, values are file contents. For Node.js, typically include index.js with the handler function.',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['create/update'],
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'environmentVariables',
|
||||
title: 'Environment Variables',
|
||||
type: 'table',
|
||||
layout: 'full',
|
||||
columns: ['Key', 'Value'],
|
||||
placeholder: 'Add environment variables as key-value pairs',
|
||||
description:
|
||||
'Environment variables that will be available to the Lambda function during execution.',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['create/update'],
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'tags',
|
||||
title: 'Tags',
|
||||
type: 'table',
|
||||
layout: 'full',
|
||||
columns: ['Key', 'Value'],
|
||||
placeholder: 'Add tags as key-value pairs',
|
||||
description: 'Tags to associate with the Lambda function for organization and cost tracking.',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['create/update'],
|
||||
},
|
||||
},
|
||||
],
|
||||
tools: {
|
||||
access: [
|
||||
'aws_lambda_deploy',
|
||||
'aws_lambda_deploy_endpoint',
|
||||
'aws_lambda_fetch',
|
||||
'aws_lambda_get_prompts',
|
||||
],
|
||||
config: {
|
||||
tool: (params: Record<string, any>) => {
|
||||
const operation = String(params.operation || '').trim()
|
||||
// Only map user-facing names; pass through tool IDs as-is
|
||||
const operationMap: Record<string, string> = {
|
||||
fetch: 'aws_lambda_fetch',
|
||||
'create/update': 'aws_lambda_deploy',
|
||||
deploy_endpoint: 'aws_lambda_deploy_endpoint',
|
||||
getPrompts: 'aws_lambda_get_prompts',
|
||||
}
|
||||
if (operationMap[operation]) {
|
||||
return operationMap[operation]
|
||||
}
|
||||
// If already a tool ID, return as-is
|
||||
if (
|
||||
operation === 'aws_lambda_fetch' ||
|
||||
operation === 'aws_lambda_deploy' ||
|
||||
operation === 'aws_lambda_deploy_endpoint' ||
|
||||
operation === 'aws_lambda_get_prompts'
|
||||
) {
|
||||
return operation
|
||||
}
|
||||
// Default fallback
|
||||
console.warn(`Unknown operation: "${operation}", defaulting to aws_lambda_fetch`)
|
||||
return 'aws_lambda_fetch'
|
||||
},
|
||||
},
|
||||
},
|
||||
inputs: {
|
||||
accessKeyId: { type: 'string', required: true },
|
||||
secretAccessKey: { type: 'string', required: true },
|
||||
region: { type: 'string', required: true },
|
||||
role: { type: 'string', required: true },
|
||||
operation: { type: 'string', required: true },
|
||||
functionName: { type: 'string', required: true },
|
||||
endpointName: { type: 'string', required: false },
|
||||
handler: { type: 'string', required: false },
|
||||
runtime: { type: 'string', required: false },
|
||||
code: { type: 'json', required: false },
|
||||
timeout: { type: 'number', required: false },
|
||||
memorySize: { type: 'number', required: false },
|
||||
environmentVariables: { type: 'json', required: false },
|
||||
tags: { type: 'json', required: false },
|
||||
},
|
||||
outputs: {
|
||||
functionArn: 'string',
|
||||
functionName: 'string',
|
||||
endpointName: 'any',
|
||||
endpointUrl: 'any',
|
||||
runtime: 'string',
|
||||
region: 'string',
|
||||
status: 'string',
|
||||
lastModified: 'string',
|
||||
codeSize: 'number',
|
||||
description: 'string',
|
||||
timeout: 'number',
|
||||
memorySize: 'number',
|
||||
environment: 'json',
|
||||
tags: 'json',
|
||||
codeFiles: 'json',
|
||||
handler: 'string',
|
||||
apiGatewayId: 'any',
|
||||
stageName: 'any',
|
||||
},
|
||||
}
|
||||
@@ -6,7 +6,6 @@
|
||||
import { AgentBlock } from './blocks/agent'
|
||||
import { AirtableBlock } from './blocks/airtable'
|
||||
import { ApiBlock } from './blocks/api'
|
||||
import { AWSLambdaBlock } from './blocks/aws_lambda'
|
||||
// import { AutoblocksBlock } from './blocks/autoblocks'
|
||||
import { BrowserUseBlock } from './blocks/browser_use'
|
||||
import { ClayBlock } from './blocks/clay'
|
||||
@@ -73,7 +72,6 @@ export const registry: Record<string, BlockConfig> = {
|
||||
agent: AgentBlock,
|
||||
airtable: AirtableBlock,
|
||||
api: ApiBlock,
|
||||
aws_lambda: AWSLambdaBlock,
|
||||
// autoblocks: AutoblocksBlock,
|
||||
browser_use: BrowserUseBlock,
|
||||
clay: ClayBlock,
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import { describe, expect, test, vi } from 'vitest'
|
||||
import { extractFieldsFromSchema, parseResponseFormatSafely } from '@/lib/response-format'
|
||||
import type { BlockState } from '@/stores/workflows/workflow/types'
|
||||
import { generateLoopBlocks } from '@/stores/workflows/workflow/utils'
|
||||
import { checkTagTrigger, extractFieldsFromSchema } from './tag-dropdown'
|
||||
import { checkTagTrigger } from './tag-dropdown'
|
||||
|
||||
vi.mock('@/stores/workflows/workflow/store', () => ({
|
||||
useWorkflowStore: vi.fn(() => ({
|
||||
@@ -24,6 +25,15 @@ vi.mock('@/stores/panel/variables/store', () => ({
|
||||
})),
|
||||
}))
|
||||
|
||||
vi.mock('@/stores/workflows/subblock/store', () => ({
|
||||
useSubBlockStore: vi.fn(() => ({
|
||||
getValue: vi.fn(() => null),
|
||||
getState: vi.fn(() => ({
|
||||
getValue: vi.fn(() => null),
|
||||
})),
|
||||
})),
|
||||
}))
|
||||
|
||||
describe('TagDropdown Loop Suggestions', () => {
|
||||
test('should generate correct loop suggestions for forEach loops', () => {
|
||||
const blocks: Record<string, BlockState> = {
|
||||
@@ -603,3 +613,180 @@ describe('TagDropdown Tag Selection Logic', () => {
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('TagDropdown Response Format Support', () => {
|
||||
it.concurrent(
|
||||
'should use custom schema properties when response format is specified',
|
||||
async () => {
|
||||
// Mock the subblock store to return a custom response format
|
||||
const mockGetValue = vi.fn()
|
||||
const mockUseSubBlockStore = vi.mocked(
|
||||
await import('@/stores/workflows/subblock/store')
|
||||
).useSubBlockStore
|
||||
|
||||
// Set up the mock to return the example schema from the user
|
||||
const responseFormatValue = JSON.stringify({
|
||||
name: 'short_schema',
|
||||
description: 'A minimal example schema with a single string property.',
|
||||
strict: true,
|
||||
schema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
example_property: {
|
||||
type: 'string',
|
||||
description: 'A simple string property.',
|
||||
},
|
||||
},
|
||||
additionalProperties: false,
|
||||
required: ['example_property'],
|
||||
},
|
||||
})
|
||||
|
||||
mockGetValue.mockImplementation((blockId: string, subBlockId: string) => {
|
||||
if (blockId === 'agent1' && subBlockId === 'responseFormat') {
|
||||
return responseFormatValue
|
||||
}
|
||||
return null
|
||||
})
|
||||
|
||||
mockUseSubBlockStore.mockReturnValue({
|
||||
getValue: mockGetValue,
|
||||
getState: () => ({
|
||||
getValue: mockGetValue,
|
||||
}),
|
||||
} as any)
|
||||
|
||||
// Test the parseResponseFormatSafely function
|
||||
const parsedFormat = parseResponseFormatSafely(responseFormatValue, 'agent1')
|
||||
|
||||
expect(parsedFormat).toEqual({
|
||||
name: 'short_schema',
|
||||
description: 'A minimal example schema with a single string property.',
|
||||
strict: true,
|
||||
schema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
example_property: {
|
||||
type: 'string',
|
||||
description: 'A simple string property.',
|
||||
},
|
||||
},
|
||||
additionalProperties: false,
|
||||
required: ['example_property'],
|
||||
},
|
||||
})
|
||||
|
||||
// Test the extractFieldsFromSchema function with the parsed format
|
||||
const fields = extractFieldsFromSchema(parsedFormat)
|
||||
|
||||
expect(fields).toEqual([
|
||||
{
|
||||
name: 'example_property',
|
||||
type: 'string',
|
||||
description: 'A simple string property.',
|
||||
},
|
||||
])
|
||||
}
|
||||
)
|
||||
|
||||
it.concurrent(
|
||||
'should fallback to default outputs when response format parsing fails',
|
||||
async () => {
|
||||
// Test with invalid JSON
|
||||
const invalidFormat = parseResponseFormatSafely('invalid json', 'agent1')
|
||||
expect(invalidFormat).toBeNull()
|
||||
|
||||
// Test with null/undefined values
|
||||
expect(parseResponseFormatSafely(null, 'agent1')).toBeNull()
|
||||
expect(parseResponseFormatSafely(undefined, 'agent1')).toBeNull()
|
||||
expect(parseResponseFormatSafely('', 'agent1')).toBeNull()
|
||||
}
|
||||
)
|
||||
|
||||
it.concurrent('should handle response format with nested schema correctly', async () => {
|
||||
const responseFormat = {
|
||||
schema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
user: {
|
||||
type: 'object',
|
||||
description: 'User information',
|
||||
properties: {
|
||||
name: { type: 'string', description: 'User name' },
|
||||
age: { type: 'number', description: 'User age' },
|
||||
},
|
||||
},
|
||||
status: { type: 'string', description: 'Response status' },
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
const fields = extractFieldsFromSchema(responseFormat)
|
||||
|
||||
expect(fields).toEqual([
|
||||
{ name: 'user', type: 'object', description: 'User information' },
|
||||
{ name: 'status', type: 'string', description: 'Response status' },
|
||||
])
|
||||
})
|
||||
|
||||
it.concurrent('should handle response format without schema wrapper', async () => {
|
||||
const responseFormat = {
|
||||
type: 'object',
|
||||
properties: {
|
||||
result: { type: 'boolean', description: 'Operation result' },
|
||||
message: { type: 'string', description: 'Status message' },
|
||||
},
|
||||
}
|
||||
|
||||
const fields = extractFieldsFromSchema(responseFormat)
|
||||
|
||||
expect(fields).toEqual([
|
||||
{ name: 'result', type: 'boolean', description: 'Operation result' },
|
||||
{ name: 'message', type: 'string', description: 'Status message' },
|
||||
])
|
||||
})
|
||||
|
||||
it.concurrent('should return object as-is when it is already parsed', async () => {
|
||||
const responseFormat = {
|
||||
name: 'test_schema',
|
||||
schema: {
|
||||
properties: {
|
||||
data: { type: 'string' },
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
const result = parseResponseFormatSafely(responseFormat, 'agent1')
|
||||
|
||||
expect(result).toEqual(responseFormat)
|
||||
})
|
||||
|
||||
it.concurrent('should simulate block tag generation with custom response format', async () => {
|
||||
// Simulate the tag generation logic that would happen in the component
|
||||
const blockName = 'Agent 1'
|
||||
const normalizedBlockName = blockName.replace(/\s+/g, '').toLowerCase() // 'agent1'
|
||||
|
||||
// Mock response format
|
||||
const responseFormat = {
|
||||
schema: {
|
||||
properties: {
|
||||
example_property: { type: 'string', description: 'A simple string property.' },
|
||||
another_field: { type: 'number', description: 'Another field.' },
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
const schemaFields = extractFieldsFromSchema(responseFormat)
|
||||
|
||||
// Generate block tags as they would be in the component
|
||||
const blockTags = schemaFields.map((field) => `${normalizedBlockName}.${field.name}`)
|
||||
|
||||
expect(blockTags).toEqual(['agent1.example_property', 'agent1.another_field'])
|
||||
|
||||
// Verify the fields extracted correctly
|
||||
expect(schemaFields).toEqual([
|
||||
{ name: 'example_property', type: 'string', description: 'A simple string property.' },
|
||||
{ name: 'another_field', type: 'number', description: 'Another field.' },
|
||||
])
|
||||
})
|
||||
})
|
||||
|
||||
@@ -1,18 +1,16 @@
|
||||
import type React from 'react'
|
||||
import { useCallback, useEffect, useMemo, useState } from 'react'
|
||||
import { BlockPathCalculator } from '@/lib/block-path-calculator'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { extractFieldsFromSchema, parseResponseFormatSafely } from '@/lib/response-format'
|
||||
import { cn } from '@/lib/utils'
|
||||
import { getBlock } from '@/blocks'
|
||||
import { Serializer } from '@/serializer'
|
||||
import { useVariablesStore } from '@/stores/panel/variables/store'
|
||||
import type { Variable } from '@/stores/panel/variables/types'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
|
||||
const logger = createLogger('TagDropdown')
|
||||
|
||||
// Type definitions for component data structures
|
||||
interface BlockTagGroup {
|
||||
blockName: string
|
||||
blockId: string
|
||||
@@ -21,49 +19,6 @@ interface BlockTagGroup {
|
||||
distance: number
|
||||
}
|
||||
|
||||
interface Field {
|
||||
name: string
|
||||
type: string
|
||||
description?: string
|
||||
}
|
||||
|
||||
// Helper function to extract fields from JSON Schema
|
||||
export function extractFieldsFromSchema(schema: any): Field[] {
|
||||
if (!schema || typeof schema !== 'object') {
|
||||
return []
|
||||
}
|
||||
|
||||
// Handle legacy format with fields array
|
||||
if (Array.isArray(schema.fields)) {
|
||||
return schema.fields
|
||||
}
|
||||
|
||||
// Handle new JSON Schema format
|
||||
const schemaObj = schema.schema || schema
|
||||
if (!schemaObj || !schemaObj.properties || typeof schemaObj.properties !== 'object') {
|
||||
return []
|
||||
}
|
||||
|
||||
// Extract fields from schema properties
|
||||
return Object.entries(schemaObj.properties).map(([name, prop]: [string, any]) => {
|
||||
// Handle array format like ['string', 'array']
|
||||
if (Array.isArray(prop)) {
|
||||
return {
|
||||
name,
|
||||
type: prop.includes('array') ? 'array' : prop[0] || 'string',
|
||||
description: undefined,
|
||||
}
|
||||
}
|
||||
|
||||
// Handle object format like { type: 'string', description: '...' }
|
||||
return {
|
||||
name,
|
||||
type: prop.type || 'string',
|
||||
description: prop.description,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
interface TagDropdownProps {
|
||||
visible: boolean
|
||||
onSelect: (newValue: string) => void
|
||||
@@ -169,18 +124,68 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
}
|
||||
|
||||
const blockConfig = getBlock(sourceBlock.type)
|
||||
|
||||
// Handle special blocks that aren't in the registry (loop and parallel)
|
||||
if (!blockConfig) {
|
||||
if (sourceBlock.type === 'loop' || sourceBlock.type === 'parallel') {
|
||||
// Create a mock config with results output for loop/parallel blocks
|
||||
const mockConfig = {
|
||||
outputs: {
|
||||
results: 'array', // These blocks have a results array output
|
||||
},
|
||||
}
|
||||
const blockName = sourceBlock.name || sourceBlock.type
|
||||
const normalizedBlockName = blockName.replace(/\s+/g, '').toLowerCase()
|
||||
|
||||
// Generate output paths for the mock config
|
||||
const outputPaths = generateOutputPaths(mockConfig.outputs)
|
||||
const blockTags = outputPaths.map((path) => `${normalizedBlockName}.${path}`)
|
||||
|
||||
const blockTagGroups: BlockTagGroup[] = [
|
||||
{
|
||||
blockName,
|
||||
blockId: activeSourceBlockId,
|
||||
blockType: sourceBlock.type,
|
||||
tags: blockTags,
|
||||
distance: 0,
|
||||
},
|
||||
]
|
||||
|
||||
return {
|
||||
tags: blockTags,
|
||||
variableInfoMap: {},
|
||||
blockTagGroups,
|
||||
}
|
||||
}
|
||||
return { tags: [], variableInfoMap: {}, blockTagGroups: [] }
|
||||
}
|
||||
|
||||
const blockName = sourceBlock.name || sourceBlock.type
|
||||
const normalizedBlockName = blockName.replace(/\s+/g, '').toLowerCase()
|
||||
|
||||
// Handle blocks with no outputs (like starter) - show as just <blockname>
|
||||
// Check for custom response format first
|
||||
const responseFormatValue = useSubBlockStore
|
||||
.getState()
|
||||
.getValue(activeSourceBlockId, 'responseFormat')
|
||||
const responseFormat = parseResponseFormatSafely(responseFormatValue, activeSourceBlockId)
|
||||
|
||||
let blockTags: string[]
|
||||
if (Object.keys(blockConfig.outputs).length === 0) {
|
||||
|
||||
if (responseFormat) {
|
||||
// Use custom schema properties if response format is specified
|
||||
const schemaFields = extractFieldsFromSchema(responseFormat)
|
||||
if (schemaFields.length > 0) {
|
||||
blockTags = schemaFields.map((field) => `${normalizedBlockName}.${field.name}`)
|
||||
} else {
|
||||
// Fallback to default if schema extraction failed
|
||||
const outputPaths = generateOutputPaths(blockConfig.outputs)
|
||||
blockTags = outputPaths.map((path) => `${normalizedBlockName}.${path}`)
|
||||
}
|
||||
} else if (Object.keys(blockConfig.outputs).length === 0) {
|
||||
// Handle blocks with no outputs (like starter) - show as just <blockname>
|
||||
blockTags = [normalizedBlockName]
|
||||
} else {
|
||||
// Use default block outputs
|
||||
const outputPaths = generateOutputPaths(blockConfig.outputs)
|
||||
blockTags = outputPaths.map((path) => `${normalizedBlockName}.${path}`)
|
||||
}
|
||||
@@ -270,28 +275,65 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
{} as Record<string, { type: string; id: string }>
|
||||
)
|
||||
|
||||
// Generate loop tags if current block is in a loop
|
||||
const loopTags: string[] = []
|
||||
// Generate loop contextual block group if current block is in a loop
|
||||
let loopBlockGroup: BlockTagGroup | null = null
|
||||
const containingLoop = Object.entries(loops).find(([_, loop]) => loop.nodes.includes(blockId))
|
||||
let containingLoopBlockId: string | null = null
|
||||
if (containingLoop) {
|
||||
const [_loopId, loop] = containingLoop
|
||||
const [loopId, loop] = containingLoop
|
||||
containingLoopBlockId = loopId
|
||||
const loopType = loop.loopType || 'for'
|
||||
loopTags.push('loop.index')
|
||||
const contextualTags: string[] = ['index']
|
||||
if (loopType === 'forEach') {
|
||||
loopTags.push('loop.currentItem')
|
||||
loopTags.push('loop.items')
|
||||
contextualTags.push('currentItem')
|
||||
contextualTags.push('items')
|
||||
}
|
||||
|
||||
// Add the containing loop block's results to the contextual tags
|
||||
const containingLoopBlock = blocks[loopId]
|
||||
if (containingLoopBlock) {
|
||||
const loopBlockName = containingLoopBlock.name || containingLoopBlock.type
|
||||
const normalizedLoopBlockName = loopBlockName.replace(/\s+/g, '').toLowerCase()
|
||||
contextualTags.push(`${normalizedLoopBlockName}.results`)
|
||||
|
||||
// Create a block group for the loop contextual tags
|
||||
loopBlockGroup = {
|
||||
blockName: loopBlockName,
|
||||
blockId: loopId,
|
||||
blockType: 'loop',
|
||||
tags: contextualTags,
|
||||
distance: 0, // Contextual tags have highest priority
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Generate parallel tags if current block is in parallel
|
||||
const parallelTags: string[] = []
|
||||
// Generate parallel contextual block group if current block is in parallel
|
||||
let parallelBlockGroup: BlockTagGroup | null = null
|
||||
const containingParallel = Object.entries(parallels || {}).find(([_, parallel]) =>
|
||||
parallel.nodes.includes(blockId)
|
||||
)
|
||||
let containingParallelBlockId: string | null = null
|
||||
if (containingParallel) {
|
||||
parallelTags.push('parallel.index')
|
||||
parallelTags.push('parallel.currentItem')
|
||||
parallelTags.push('parallel.items')
|
||||
const [parallelId] = containingParallel
|
||||
containingParallelBlockId = parallelId
|
||||
const contextualTags: string[] = ['index', 'currentItem', 'items']
|
||||
|
||||
// Add the containing parallel block's results to the contextual tags
|
||||
const containingParallelBlock = blocks[parallelId]
|
||||
if (containingParallelBlock) {
|
||||
const parallelBlockName = containingParallelBlock.name || containingParallelBlock.type
|
||||
const normalizedParallelBlockName = parallelBlockName.replace(/\s+/g, '').toLowerCase()
|
||||
contextualTags.push(`${normalizedParallelBlockName}.results`)
|
||||
|
||||
// Create a block group for the parallel contextual tags
|
||||
parallelBlockGroup = {
|
||||
blockName: parallelBlockName,
|
||||
blockId: parallelId,
|
||||
blockType: 'parallel',
|
||||
tags: contextualTags,
|
||||
distance: 0, // Contextual tags have highest priority
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Create block tag groups from accessible blocks
|
||||
@@ -303,16 +345,70 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
if (!accessibleBlock) continue
|
||||
|
||||
const blockConfig = getBlock(accessibleBlock.type)
|
||||
if (!blockConfig) continue
|
||||
|
||||
// Handle special blocks that aren't in the registry (loop and parallel)
|
||||
if (!blockConfig) {
|
||||
// For loop and parallel blocks, create a mock config with results output
|
||||
if (accessibleBlock.type === 'loop' || accessibleBlock.type === 'parallel') {
|
||||
// Skip this block if it's the containing loop/parallel block - we'll handle it with contextual tags
|
||||
if (
|
||||
accessibleBlockId === containingLoopBlockId ||
|
||||
accessibleBlockId === containingParallelBlockId
|
||||
) {
|
||||
continue
|
||||
}
|
||||
|
||||
const mockConfig = {
|
||||
outputs: {
|
||||
results: 'array', // These blocks have a results array output
|
||||
},
|
||||
}
|
||||
const blockName = accessibleBlock.name || accessibleBlock.type
|
||||
const normalizedBlockName = blockName.replace(/\s+/g, '').toLowerCase()
|
||||
|
||||
// Generate output paths for the mock config
|
||||
const outputPaths = generateOutputPaths(mockConfig.outputs)
|
||||
const blockTags = outputPaths.map((path) => `${normalizedBlockName}.${path}`)
|
||||
|
||||
blockTagGroups.push({
|
||||
blockName,
|
||||
blockId: accessibleBlockId,
|
||||
blockType: accessibleBlock.type,
|
||||
tags: blockTags,
|
||||
distance: blockDistances[accessibleBlockId] || 0,
|
||||
})
|
||||
|
||||
allBlockTags.push(...blockTags)
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
const blockName = accessibleBlock.name || accessibleBlock.type
|
||||
const normalizedBlockName = blockName.replace(/\s+/g, '').toLowerCase()
|
||||
|
||||
// Handle blocks with no outputs (like starter) - show as just <blockname>
|
||||
// Check for custom response format first
|
||||
const responseFormatValue = useSubBlockStore
|
||||
.getState()
|
||||
.getValue(accessibleBlockId, 'responseFormat')
|
||||
const responseFormat = parseResponseFormatSafely(responseFormatValue, accessibleBlockId)
|
||||
|
||||
let blockTags: string[]
|
||||
if (Object.keys(blockConfig.outputs).length === 0) {
|
||||
|
||||
if (responseFormat) {
|
||||
// Use custom schema properties if response format is specified
|
||||
const schemaFields = extractFieldsFromSchema(responseFormat)
|
||||
if (schemaFields.length > 0) {
|
||||
blockTags = schemaFields.map((field) => `${normalizedBlockName}.${field.name}`)
|
||||
} else {
|
||||
// Fallback to default if schema extraction failed
|
||||
const outputPaths = generateOutputPaths(blockConfig.outputs)
|
||||
blockTags = outputPaths.map((path) => `${normalizedBlockName}.${path}`)
|
||||
}
|
||||
} else if (Object.keys(blockConfig.outputs).length === 0) {
|
||||
// Handle blocks with no outputs (like starter) - show as just <blockname>
|
||||
blockTags = [normalizedBlockName]
|
||||
} else {
|
||||
// Use default block outputs
|
||||
const outputPaths = generateOutputPaths(blockConfig.outputs)
|
||||
blockTags = outputPaths.map((path) => `${normalizedBlockName}.${path}`)
|
||||
}
|
||||
@@ -328,13 +424,32 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
allBlockTags.push(...blockTags)
|
||||
}
|
||||
|
||||
// Sort block groups by distance (closest first)
|
||||
// Add contextual block groups at the beginning (they have highest priority)
|
||||
const finalBlockTagGroups: BlockTagGroup[] = []
|
||||
if (loopBlockGroup) {
|
||||
finalBlockTagGroups.push(loopBlockGroup)
|
||||
}
|
||||
if (parallelBlockGroup) {
|
||||
finalBlockTagGroups.push(parallelBlockGroup)
|
||||
}
|
||||
|
||||
// Sort regular block groups by distance (closest first) and add them
|
||||
blockTagGroups.sort((a, b) => a.distance - b.distance)
|
||||
finalBlockTagGroups.push(...blockTagGroups)
|
||||
|
||||
// Collect all tags for the main tags array
|
||||
const contextualTags: string[] = []
|
||||
if (loopBlockGroup) {
|
||||
contextualTags.push(...loopBlockGroup.tags)
|
||||
}
|
||||
if (parallelBlockGroup) {
|
||||
contextualTags.push(...parallelBlockGroup.tags)
|
||||
}
|
||||
|
||||
return {
|
||||
tags: [...variableTags, ...loopTags, ...parallelTags, ...allBlockTags],
|
||||
tags: [...variableTags, ...contextualTags, ...allBlockTags],
|
||||
variableInfoMap,
|
||||
blockTagGroups,
|
||||
blockTagGroups: finalBlockTagGroups,
|
||||
}
|
||||
}, [blocks, edges, loops, parallels, blockId, activeSourceBlockId, workflowVariables])
|
||||
|
||||
@@ -345,18 +460,12 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
}, [tags, searchTerm])
|
||||
|
||||
// Group filtered tags by category
|
||||
const { variableTags, loopTags, parallelTags, filteredBlockTagGroups } = useMemo(() => {
|
||||
const { variableTags, filteredBlockTagGroups } = useMemo(() => {
|
||||
const varTags: string[] = []
|
||||
const loopTags: string[] = []
|
||||
const parTags: string[] = []
|
||||
|
||||
filteredTags.forEach((tag) => {
|
||||
if (tag.startsWith('variable.')) {
|
||||
varTags.push(tag)
|
||||
} else if (tag.startsWith('loop.')) {
|
||||
loopTags.push(tag)
|
||||
} else if (tag.startsWith('parallel.')) {
|
||||
parTags.push(tag)
|
||||
}
|
||||
})
|
||||
|
||||
@@ -370,8 +479,6 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
|
||||
return {
|
||||
variableTags: varTags,
|
||||
loopTags: loopTags,
|
||||
parallelTags: parTags,
|
||||
filteredBlockTagGroups,
|
||||
}
|
||||
}, [filteredTags, blockTagGroups, searchTerm])
|
||||
@@ -379,8 +486,8 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
// Create ordered tags for keyboard navigation
|
||||
const orderedTags = useMemo(() => {
|
||||
const allBlockTags = filteredBlockTagGroups.flatMap((group) => group.tags)
|
||||
return [...variableTags, ...loopTags, ...parallelTags, ...allBlockTags]
|
||||
}, [variableTags, loopTags, parallelTags, filteredBlockTagGroups])
|
||||
return [...variableTags, ...allBlockTags]
|
||||
}, [variableTags, filteredBlockTagGroups])
|
||||
|
||||
// Create efficient tag index lookup map
|
||||
const tagIndexMap = useMemo(() => {
|
||||
@@ -393,7 +500,7 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
|
||||
// Handle tag selection and text replacement
|
||||
const handleTagSelect = useCallback(
|
||||
(tag: string) => {
|
||||
(tag: string, blockGroup?: BlockTagGroup) => {
|
||||
const textBeforeCursor = inputValue.slice(0, cursorPosition)
|
||||
const textAfterCursor = inputValue.slice(cursorPosition)
|
||||
|
||||
@@ -401,8 +508,10 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
const lastOpenBracket = textBeforeCursor.lastIndexOf('<')
|
||||
if (lastOpenBracket === -1) return
|
||||
|
||||
// Process variable tags to maintain compatibility
|
||||
// Process different types of tags
|
||||
let processedTag = tag
|
||||
|
||||
// Handle variable tags
|
||||
if (tag.startsWith('variable.')) {
|
||||
const variableName = tag.substring('variable.'.length)
|
||||
const variableObj = Object.values(variables).find(
|
||||
@@ -413,6 +522,19 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
processedTag = tag
|
||||
}
|
||||
}
|
||||
// Handle contextual loop/parallel tags
|
||||
else if (
|
||||
blockGroup &&
|
||||
(blockGroup.blockType === 'loop' || blockGroup.blockType === 'parallel')
|
||||
) {
|
||||
// Check if this is a contextual tag (without dots) that needs a prefix
|
||||
if (!tag.includes('.') && ['index', 'currentItem', 'items'].includes(tag)) {
|
||||
processedTag = `${blockGroup.blockType}.${tag}`
|
||||
} else {
|
||||
// It's already a properly formatted tag (like blockname.results)
|
||||
processedTag = tag
|
||||
}
|
||||
}
|
||||
|
||||
// Handle existing closing bracket
|
||||
const nextCloseBracket = textAfterCursor.indexOf('>')
|
||||
@@ -465,7 +587,12 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
e.preventDefault()
|
||||
e.stopPropagation()
|
||||
if (selectedIndex >= 0 && selectedIndex < orderedTags.length) {
|
||||
handleTagSelect(orderedTags[selectedIndex])
|
||||
const selectedTag = orderedTags[selectedIndex]
|
||||
// Find which block group this tag belongs to
|
||||
const belongsToGroup = filteredBlockTagGroups.find((group) =>
|
||||
group.tags.includes(selectedTag)
|
||||
)
|
||||
handleTagSelect(selectedTag, belongsToGroup)
|
||||
}
|
||||
break
|
||||
case 'Escape':
|
||||
@@ -479,7 +606,7 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
window.addEventListener('keydown', handleKeyboardEvent, true)
|
||||
return () => window.removeEventListener('keydown', handleKeyboardEvent, true)
|
||||
}
|
||||
}, [visible, selectedIndex, orderedTags, handleTagSelect, onClose])
|
||||
}, [visible, selectedIndex, orderedTags, filteredBlockTagGroups, handleTagSelect, onClose])
|
||||
|
||||
// Early return if dropdown should not be visible
|
||||
if (!visible || tags.length === 0 || orderedTags.length === 0) return null
|
||||
@@ -552,152 +679,21 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
</>
|
||||
)}
|
||||
|
||||
{/* Loop section */}
|
||||
{loopTags.length > 0 && (
|
||||
<>
|
||||
{variableTags.length > 0 && <div className='my-0' />}
|
||||
<div className='px-2 pt-2.5 pb-0.5 font-medium text-muted-foreground text-xs'>
|
||||
Loop
|
||||
</div>
|
||||
<div className='-mx-1 -px-1'>
|
||||
{loopTags.map((tag: string) => {
|
||||
const tagIndex = tagIndexMap.get(tag) ?? -1
|
||||
const loopProperty = tag.split('.')[1]
|
||||
|
||||
// Choose appropriate icon and description based on loop property
|
||||
let tagIcon = 'L'
|
||||
let tagDescription = ''
|
||||
const bgColor = '#8857E6'
|
||||
|
||||
if (loopProperty === 'currentItem') {
|
||||
tagIcon = 'i'
|
||||
tagDescription = 'Current item'
|
||||
} else if (loopProperty === 'items') {
|
||||
tagIcon = 'I'
|
||||
tagDescription = 'All items'
|
||||
} else if (loopProperty === 'index') {
|
||||
tagIcon = '#'
|
||||
tagDescription = 'Index'
|
||||
}
|
||||
|
||||
return (
|
||||
<button
|
||||
key={tag}
|
||||
className={cn(
|
||||
'flex w-full items-center gap-2 px-3 py-1.5 text-left text-sm',
|
||||
'hover:bg-accent hover:text-accent-foreground',
|
||||
'focus:bg-accent focus:text-accent-foreground focus:outline-none',
|
||||
tagIndex === selectedIndex &&
|
||||
tagIndex >= 0 &&
|
||||
'bg-accent text-accent-foreground'
|
||||
)}
|
||||
onMouseEnter={() => setSelectedIndex(tagIndex >= 0 ? tagIndex : 0)}
|
||||
onMouseDown={(e) => {
|
||||
e.preventDefault()
|
||||
e.stopPropagation()
|
||||
handleTagSelect(tag)
|
||||
}}
|
||||
onClick={(e) => {
|
||||
e.preventDefault()
|
||||
e.stopPropagation()
|
||||
handleTagSelect(tag)
|
||||
}}
|
||||
>
|
||||
<div
|
||||
className='flex h-5 w-5 items-center justify-center rounded'
|
||||
style={{ backgroundColor: bgColor }}
|
||||
>
|
||||
<span className='h-3 w-3 font-bold text-white text-xs'>{tagIcon}</span>
|
||||
</div>
|
||||
<span className='flex-1 truncate'>{tag}</span>
|
||||
<span className='ml-auto text-muted-foreground text-xs'>
|
||||
{tagDescription}
|
||||
</span>
|
||||
</button>
|
||||
)
|
||||
})}
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
|
||||
{/* Parallel section */}
|
||||
{parallelTags.length > 0 && (
|
||||
<>
|
||||
{loopTags.length > 0 && <div className='my-0' />}
|
||||
<div className='px-2 pt-2.5 pb-0.5 font-medium text-muted-foreground text-xs'>
|
||||
Parallel
|
||||
</div>
|
||||
<div className='-mx-1 -px-1'>
|
||||
{parallelTags.map((tag: string) => {
|
||||
const tagIndex = tagIndexMap.get(tag) ?? -1
|
||||
const parallelProperty = tag.split('.')[1]
|
||||
|
||||
// Choose appropriate icon and description based on parallel property
|
||||
let tagIcon = 'P'
|
||||
let tagDescription = ''
|
||||
const bgColor = '#FF5757'
|
||||
|
||||
if (parallelProperty === 'currentItem') {
|
||||
tagIcon = 'i'
|
||||
tagDescription = 'Current item'
|
||||
} else if (parallelProperty === 'items') {
|
||||
tagIcon = 'I'
|
||||
tagDescription = 'All items'
|
||||
} else if (parallelProperty === 'index') {
|
||||
tagIcon = '#'
|
||||
tagDescription = 'Index'
|
||||
}
|
||||
|
||||
return (
|
||||
<button
|
||||
key={tag}
|
||||
className={cn(
|
||||
'flex w-full items-center gap-2 px-3 py-1.5 text-left text-sm',
|
||||
'hover:bg-accent hover:text-accent-foreground',
|
||||
'focus:bg-accent focus:text-accent-foreground focus:outline-none',
|
||||
tagIndex === selectedIndex &&
|
||||
tagIndex >= 0 &&
|
||||
'bg-accent text-accent-foreground'
|
||||
)}
|
||||
onMouseEnter={() => setSelectedIndex(tagIndex >= 0 ? tagIndex : 0)}
|
||||
onMouseDown={(e) => {
|
||||
e.preventDefault()
|
||||
e.stopPropagation()
|
||||
handleTagSelect(tag)
|
||||
}}
|
||||
onClick={(e) => {
|
||||
e.preventDefault()
|
||||
e.stopPropagation()
|
||||
handleTagSelect(tag)
|
||||
}}
|
||||
>
|
||||
<div
|
||||
className='flex h-5 w-5 items-center justify-center rounded'
|
||||
style={{ backgroundColor: bgColor }}
|
||||
>
|
||||
<span className='h-3 w-3 font-bold text-white text-xs'>{tagIcon}</span>
|
||||
</div>
|
||||
<span className='flex-1 truncate'>{tag}</span>
|
||||
<span className='ml-auto text-muted-foreground text-xs'>
|
||||
{tagDescription}
|
||||
</span>
|
||||
</button>
|
||||
)
|
||||
})}
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
|
||||
{/* Block sections */}
|
||||
{filteredBlockTagGroups.length > 0 && (
|
||||
<>
|
||||
{(variableTags.length > 0 || loopTags.length > 0 || parallelTags.length > 0) && (
|
||||
<div className='my-0' />
|
||||
)}
|
||||
{variableTags.length > 0 && <div className='my-0' />}
|
||||
{filteredBlockTagGroups.map((group) => {
|
||||
// Get block color from configuration
|
||||
const blockConfig = getBlock(group.blockType)
|
||||
const blockColor = blockConfig?.bgColor || '#2F55FF'
|
||||
let blockColor = blockConfig?.bgColor || '#2F55FF'
|
||||
|
||||
// Handle special colors for loop and parallel blocks
|
||||
if (group.blockType === 'loop') {
|
||||
blockColor = '#8857E6' // Purple color for loop blocks
|
||||
} else if (group.blockType === 'parallel') {
|
||||
blockColor = '#FF5757' // Red color for parallel blocks
|
||||
}
|
||||
|
||||
return (
|
||||
<div key={group.blockId}>
|
||||
@@ -707,11 +703,37 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
<div>
|
||||
{group.tags.map((tag: string) => {
|
||||
const tagIndex = tagIndexMap.get(tag) ?? -1
|
||||
// Extract path after block name (e.g., "field" from "blockname.field")
|
||||
// For root reference blocks, show the block name instead of empty path
|
||||
const tagParts = tag.split('.')
|
||||
const path = tagParts.slice(1).join('.')
|
||||
const displayText = path || group.blockName
|
||||
|
||||
// Handle display text based on tag type
|
||||
let displayText: string
|
||||
let tagDescription = ''
|
||||
let tagIcon = group.blockName.charAt(0).toUpperCase()
|
||||
|
||||
if (
|
||||
(group.blockType === 'loop' || group.blockType === 'parallel') &&
|
||||
!tag.includes('.')
|
||||
) {
|
||||
// Contextual tags like 'index', 'currentItem', 'items'
|
||||
displayText = tag
|
||||
if (tag === 'index') {
|
||||
tagIcon = '#'
|
||||
tagDescription = 'Index'
|
||||
} else if (tag === 'currentItem') {
|
||||
tagIcon = 'i'
|
||||
tagDescription = 'Current item'
|
||||
} else if (tag === 'items') {
|
||||
tagIcon = 'I'
|
||||
tagDescription = 'All items'
|
||||
}
|
||||
} else {
|
||||
// Regular block output tags like 'blockname.field' or 'blockname.results'
|
||||
const tagParts = tag.split('.')
|
||||
const path = tagParts.slice(1).join('.')
|
||||
displayText = path || group.blockName
|
||||
if (path === 'results') {
|
||||
tagDescription = 'Results array'
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
<button
|
||||
@@ -728,12 +750,12 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
onMouseDown={(e) => {
|
||||
e.preventDefault()
|
||||
e.stopPropagation()
|
||||
handleTagSelect(tag)
|
||||
handleTagSelect(tag, group)
|
||||
}}
|
||||
onClick={(e) => {
|
||||
e.preventDefault()
|
||||
e.stopPropagation()
|
||||
handleTagSelect(tag)
|
||||
handleTagSelect(tag, group)
|
||||
}}
|
||||
>
|
||||
<div
|
||||
@@ -741,12 +763,15 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
style={{ backgroundColor: blockColor }}
|
||||
>
|
||||
<span className='h-3 w-3 font-bold text-white text-xs'>
|
||||
{group.blockName.charAt(0).toUpperCase()}
|
||||
{tagIcon}
|
||||
</span>
|
||||
</div>
|
||||
<span className='max-w-[calc(100%-32px)] truncate'>
|
||||
{displayText}
|
||||
</span>
|
||||
<span className='flex-1 truncate'>{displayText}</span>
|
||||
{tagDescription && (
|
||||
<span className='ml-auto text-muted-foreground text-xs'>
|
||||
{tagDescription}
|
||||
</span>
|
||||
)}
|
||||
</button>
|
||||
)
|
||||
})}
|
||||
|
||||
@@ -150,9 +150,9 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
|
||||
const socketInstance = io(socketUrl, {
|
||||
transports: ['websocket', 'polling'], // Keep polling fallback for reliability
|
||||
withCredentials: true,
|
||||
reconnectionAttempts: 5, // Socket.IO handles base reconnection
|
||||
reconnectionAttempts: Number.POSITIVE_INFINITY, // Socket.IO handles base reconnection
|
||||
reconnectionDelay: 1000, // Start with 1 second delay
|
||||
reconnectionDelayMax: 5000, // Max 5 second delay
|
||||
reconnectionDelayMax: 30000, // Max 30 second delay
|
||||
timeout: 10000, // Back to original timeout
|
||||
auth: (cb) => {
|
||||
// Generate a fresh token for each connection attempt (including reconnections)
|
||||
|
||||
@@ -30,6 +30,7 @@ import type {
|
||||
NormalizedBlockOutput,
|
||||
StreamingExecution,
|
||||
} from './types'
|
||||
import { streamingResponseFormatProcessor } from './utils'
|
||||
|
||||
const logger = createLogger('Executor')
|
||||
|
||||
@@ -242,7 +243,25 @@ export class Executor {
|
||||
const streamingExec = output as StreamingExecution
|
||||
const [streamForClient, streamForExecutor] = streamingExec.stream.tee()
|
||||
|
||||
const clientStreamingExec = { ...streamingExec, stream: streamForClient }
|
||||
// Apply response format processing to the client stream if needed
|
||||
const blockId = (streamingExec.execution as any).blockId
|
||||
|
||||
// Get response format from initial block states (passed from useWorkflowExecution)
|
||||
// The initialBlockStates contain the subblock values including responseFormat
|
||||
let responseFormat: any
|
||||
if (this.initialBlockStates?.[blockId]) {
|
||||
const blockState = this.initialBlockStates[blockId] as any
|
||||
responseFormat = blockState.responseFormat
|
||||
}
|
||||
|
||||
const processedClientStream = streamingResponseFormatProcessor.processStream(
|
||||
streamForClient,
|
||||
blockId,
|
||||
context.selectedOutputIds || [],
|
||||
responseFormat
|
||||
)
|
||||
|
||||
const clientStreamingExec = { ...streamingExec, stream: processedClientStream }
|
||||
|
||||
try {
|
||||
// Handle client stream with proper error handling
|
||||
@@ -267,7 +286,41 @@ export class Executor {
|
||||
const blockId = (streamingExec.execution as any).blockId
|
||||
const blockState = context.blockStates.get(blockId)
|
||||
if (blockState?.output) {
|
||||
blockState.output.content = fullContent
|
||||
// Check if we have response format - if so, preserve structured response
|
||||
let responseFormat: any
|
||||
if (this.initialBlockStates?.[blockId]) {
|
||||
const initialBlockState = this.initialBlockStates[blockId] as any
|
||||
responseFormat = initialBlockState.responseFormat
|
||||
}
|
||||
|
||||
if (responseFormat && fullContent) {
|
||||
// For structured responses, always try to parse the raw streaming content
|
||||
// The streamForExecutor contains the raw JSON response, not the processed display text
|
||||
try {
|
||||
const parsedContent = JSON.parse(fullContent)
|
||||
// Preserve metadata but spread parsed fields at root level (same as manual execution)
|
||||
const structuredOutput = {
|
||||
...parsedContent,
|
||||
tokens: blockState.output.tokens,
|
||||
toolCalls: blockState.output.toolCalls,
|
||||
providerTiming: blockState.output.providerTiming,
|
||||
cost: blockState.output.cost,
|
||||
}
|
||||
blockState.output = structuredOutput
|
||||
|
||||
// Also update the corresponding block log with the structured output
|
||||
const blockLog = context.blockLogs.find((log) => log.blockId === blockId)
|
||||
if (blockLog) {
|
||||
blockLog.output = structuredOutput
|
||||
}
|
||||
} catch (parseError) {
|
||||
// If parsing fails, fall back to setting content
|
||||
blockState.output.content = fullContent
|
||||
}
|
||||
} else {
|
||||
// No response format, use standard content setting
|
||||
blockState.output.content = fullContent
|
||||
}
|
||||
}
|
||||
} catch (readerError: any) {
|
||||
logger.error('Error reading stream for executor:', readerError)
|
||||
@@ -275,7 +328,40 @@ export class Executor {
|
||||
const blockId = (streamingExec.execution as any).blockId
|
||||
const blockState = context.blockStates.get(blockId)
|
||||
if (blockState?.output && fullContent) {
|
||||
blockState.output.content = fullContent
|
||||
// Check if we have response format for error handling too
|
||||
let responseFormat: any
|
||||
if (this.initialBlockStates?.[blockId]) {
|
||||
const initialBlockState = this.initialBlockStates[blockId] as any
|
||||
responseFormat = initialBlockState.responseFormat
|
||||
}
|
||||
|
||||
if (responseFormat) {
|
||||
// For structured responses, always try to parse the raw streaming content
|
||||
// The streamForExecutor contains the raw JSON response, not the processed display text
|
||||
try {
|
||||
const parsedContent = JSON.parse(fullContent)
|
||||
const structuredOutput = {
|
||||
...parsedContent,
|
||||
tokens: blockState.output.tokens,
|
||||
toolCalls: blockState.output.toolCalls,
|
||||
providerTiming: blockState.output.providerTiming,
|
||||
cost: blockState.output.cost,
|
||||
}
|
||||
blockState.output = structuredOutput
|
||||
|
||||
// Also update the corresponding block log with the structured output
|
||||
const blockLog = context.blockLogs.find((log) => log.blockId === blockId)
|
||||
if (blockLog) {
|
||||
blockLog.output = structuredOutput
|
||||
}
|
||||
} catch (parseError) {
|
||||
// If parsing fails, fall back to setting content
|
||||
blockState.output.content = fullContent
|
||||
}
|
||||
} else {
|
||||
// No response format, use standard content setting
|
||||
blockState.output.content = fullContent
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
@@ -1257,6 +1343,7 @@ export class Executor {
|
||||
context.blockLogs.push(blockLog)
|
||||
|
||||
// Skip console logging for infrastructure blocks like loops and parallels
|
||||
// For streaming blocks, we'll add the console entry after stream processing
|
||||
if (block.metadata?.id !== 'loop' && block.metadata?.id !== 'parallel') {
|
||||
addConsole({
|
||||
output: blockLog.output,
|
||||
|
||||
@@ -269,3 +269,15 @@ export interface Tool<P = any, O = Record<string, any>> {
|
||||
export interface ToolRegistry {
|
||||
[key: string]: Tool
|
||||
}
|
||||
|
||||
/**
|
||||
* Interface for a stream processor that can process a stream based on a response format.
|
||||
*/
|
||||
export interface ResponseFormatStreamProcessor {
|
||||
processStream(
|
||||
originalStream: ReadableStream,
|
||||
blockId: string,
|
||||
selectedOutputIds: string[],
|
||||
responseFormat?: any
|
||||
): ReadableStream
|
||||
}
|
||||
|
||||
354
apps/sim/executor/utils.test.ts
Normal file
354
apps/sim/executor/utils.test.ts
Normal file
@@ -0,0 +1,354 @@
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { StreamingResponseFormatProcessor, streamingResponseFormatProcessor } from './utils'
|
||||
|
||||
vi.mock('@/lib/logs/console-logger', () => ({
|
||||
createLogger: vi.fn().mockReturnValue({
|
||||
debug: vi.fn(),
|
||||
info: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
error: vi.fn(),
|
||||
}),
|
||||
}))
|
||||
|
||||
describe('StreamingResponseFormatProcessor', () => {
|
||||
let processor: StreamingResponseFormatProcessor
|
||||
|
||||
beforeEach(() => {
|
||||
processor = new StreamingResponseFormatProcessor()
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
describe('processStream', () => {
|
||||
it.concurrent('should return original stream when no response format selection', async () => {
|
||||
const mockStream = new ReadableStream({
|
||||
start(controller) {
|
||||
controller.enqueue(new TextEncoder().encode('{"content": "test"}'))
|
||||
controller.close()
|
||||
},
|
||||
})
|
||||
|
||||
const result = processor.processStream(
|
||||
mockStream,
|
||||
'block-1',
|
||||
['block-1.content'], // No underscore, not response format
|
||||
{ schema: { properties: { username: { type: 'string' } } } }
|
||||
)
|
||||
|
||||
expect(result).toBe(mockStream)
|
||||
})
|
||||
|
||||
it.concurrent('should return original stream when no response format provided', async () => {
|
||||
const mockStream = new ReadableStream({
|
||||
start(controller) {
|
||||
controller.enqueue(new TextEncoder().encode('{"content": "test"}'))
|
||||
controller.close()
|
||||
},
|
||||
})
|
||||
|
||||
const result = processor.processStream(
|
||||
mockStream,
|
||||
'block-1',
|
||||
['block-1_username'], // Has underscore but no response format
|
||||
undefined
|
||||
)
|
||||
|
||||
expect(result).toBe(mockStream)
|
||||
})
|
||||
|
||||
it.concurrent('should process stream and extract single selected field', async () => {
|
||||
const mockStream = new ReadableStream({
|
||||
start(controller) {
|
||||
controller.enqueue(new TextEncoder().encode('{"username": "alice", "age": 25}'))
|
||||
controller.close()
|
||||
},
|
||||
})
|
||||
|
||||
const processedStream = processor.processStream(mockStream, 'block-1', ['block-1_username'], {
|
||||
schema: { properties: { username: { type: 'string' }, age: { type: 'number' } } },
|
||||
})
|
||||
|
||||
const reader = processedStream.getReader()
|
||||
const decoder = new TextDecoder()
|
||||
let result = ''
|
||||
|
||||
while (true) {
|
||||
const { done, value } = await reader.read()
|
||||
if (done) break
|
||||
result += decoder.decode(value)
|
||||
}
|
||||
|
||||
expect(result).toBe('alice')
|
||||
})
|
||||
|
||||
it.concurrent('should process stream and extract multiple selected fields', async () => {
|
||||
const mockStream = new ReadableStream({
|
||||
start(controller) {
|
||||
controller.enqueue(
|
||||
new TextEncoder().encode('{"username": "bob", "age": 30, "email": "bob@test.com"}')
|
||||
)
|
||||
controller.close()
|
||||
},
|
||||
})
|
||||
|
||||
const processedStream = processor.processStream(
|
||||
mockStream,
|
||||
'block-1',
|
||||
['block-1_username', 'block-1_age'],
|
||||
{ schema: { properties: { username: { type: 'string' }, age: { type: 'number' } } } }
|
||||
)
|
||||
|
||||
const reader = processedStream.getReader()
|
||||
const decoder = new TextDecoder()
|
||||
let result = ''
|
||||
|
||||
while (true) {
|
||||
const { done, value } = await reader.read()
|
||||
if (done) break
|
||||
result += decoder.decode(value)
|
||||
}
|
||||
|
||||
expect(result).toBe('bob\n30')
|
||||
})
|
||||
|
||||
it.concurrent('should handle non-string field values by JSON stringifying them', async () => {
|
||||
const mockStream = new ReadableStream({
|
||||
start(controller) {
|
||||
controller.enqueue(
|
||||
new TextEncoder().encode(
|
||||
'{"config": {"theme": "dark", "notifications": true}, "count": 42}'
|
||||
)
|
||||
)
|
||||
controller.close()
|
||||
},
|
||||
})
|
||||
|
||||
const processedStream = processor.processStream(
|
||||
mockStream,
|
||||
'block-1',
|
||||
['block-1_config', 'block-1_count'],
|
||||
{ schema: { properties: { config: { type: 'object' }, count: { type: 'number' } } } }
|
||||
)
|
||||
|
||||
const reader = processedStream.getReader()
|
||||
const decoder = new TextDecoder()
|
||||
let result = ''
|
||||
|
||||
while (true) {
|
||||
const { done, value } = await reader.read()
|
||||
if (done) break
|
||||
result += decoder.decode(value)
|
||||
}
|
||||
|
||||
expect(result).toBe('{"theme":"dark","notifications":true}\n42')
|
||||
})
|
||||
|
||||
it.concurrent('should handle streaming JSON that comes in chunks', async () => {
|
||||
const mockStream = new ReadableStream({
|
||||
start(controller) {
|
||||
// Simulate streaming JSON in chunks
|
||||
controller.enqueue(new TextEncoder().encode('{"username": "charlie"'))
|
||||
controller.enqueue(new TextEncoder().encode(', "age": 35}'))
|
||||
controller.close()
|
||||
},
|
||||
})
|
||||
|
||||
const processedStream = processor.processStream(mockStream, 'block-1', ['block-1_username'], {
|
||||
schema: { properties: { username: { type: 'string' }, age: { type: 'number' } } },
|
||||
})
|
||||
|
||||
const reader = processedStream.getReader()
|
||||
const decoder = new TextDecoder()
|
||||
let result = ''
|
||||
|
||||
while (true) {
|
||||
const { done, value } = await reader.read()
|
||||
if (done) break
|
||||
result += decoder.decode(value)
|
||||
}
|
||||
|
||||
expect(result).toBe('charlie')
|
||||
})
|
||||
|
||||
it.concurrent('should handle missing fields gracefully', async () => {
|
||||
const mockStream = new ReadableStream({
|
||||
start(controller) {
|
||||
controller.enqueue(new TextEncoder().encode('{"username": "diana"}'))
|
||||
controller.close()
|
||||
},
|
||||
})
|
||||
|
||||
const processedStream = processor.processStream(
|
||||
mockStream,
|
||||
'block-1',
|
||||
['block-1_username', 'block-1_missing_field'],
|
||||
{ schema: { properties: { username: { type: 'string' } } } }
|
||||
)
|
||||
|
||||
const reader = processedStream.getReader()
|
||||
const decoder = new TextDecoder()
|
||||
let result = ''
|
||||
|
||||
while (true) {
|
||||
const { done, value } = await reader.read()
|
||||
if (done) break
|
||||
result += decoder.decode(value)
|
||||
}
|
||||
|
||||
expect(result).toBe('diana')
|
||||
})
|
||||
|
||||
it.concurrent('should handle invalid JSON gracefully', async () => {
|
||||
const mockStream = new ReadableStream({
|
||||
start(controller) {
|
||||
controller.enqueue(new TextEncoder().encode('invalid json'))
|
||||
controller.close()
|
||||
},
|
||||
})
|
||||
|
||||
const processedStream = processor.processStream(mockStream, 'block-1', ['block-1_username'], {
|
||||
schema: { properties: { username: { type: 'string' } } },
|
||||
})
|
||||
|
||||
const reader = processedStream.getReader()
|
||||
const decoder = new TextDecoder()
|
||||
let result = ''
|
||||
|
||||
while (true) {
|
||||
const { done, value } = await reader.read()
|
||||
if (done) break
|
||||
result += decoder.decode(value)
|
||||
}
|
||||
|
||||
expect(result).toBe('')
|
||||
})
|
||||
|
||||
it.concurrent('should filter selected fields for correct block ID', async () => {
|
||||
const mockStream = new ReadableStream({
|
||||
start(controller) {
|
||||
controller.enqueue(new TextEncoder().encode('{"username": "eve", "age": 28}'))
|
||||
controller.close()
|
||||
},
|
||||
})
|
||||
|
||||
const processedStream = processor.processStream(
|
||||
mockStream,
|
||||
'block-1',
|
||||
['block-1_username', 'block-2_age'], // Different block ID should be filtered out
|
||||
{ schema: { properties: { username: { type: 'string' }, age: { type: 'number' } } } }
|
||||
)
|
||||
|
||||
const reader = processedStream.getReader()
|
||||
const decoder = new TextDecoder()
|
||||
let result = ''
|
||||
|
||||
while (true) {
|
||||
const { done, value } = await reader.read()
|
||||
if (done) break
|
||||
result += decoder.decode(value)
|
||||
}
|
||||
|
||||
expect(result).toBe('eve')
|
||||
})
|
||||
|
||||
it.concurrent('should handle empty result when no matching fields', async () => {
|
||||
const mockStream = new ReadableStream({
|
||||
start(controller) {
|
||||
controller.enqueue(new TextEncoder().encode('{"other_field": "value"}'))
|
||||
controller.close()
|
||||
},
|
||||
})
|
||||
|
||||
const processedStream = processor.processStream(mockStream, 'block-1', ['block-1_username'], {
|
||||
schema: { properties: { username: { type: 'string' } } },
|
||||
})
|
||||
|
||||
const reader = processedStream.getReader()
|
||||
const decoder = new TextDecoder()
|
||||
let result = ''
|
||||
|
||||
while (true) {
|
||||
const { done, value } = await reader.read()
|
||||
if (done) break
|
||||
result += decoder.decode(value)
|
||||
}
|
||||
|
||||
expect(result).toBe('')
|
||||
})
|
||||
})
|
||||
|
||||
describe('singleton instance', () => {
|
||||
it.concurrent('should export a singleton instance', () => {
|
||||
expect(streamingResponseFormatProcessor).toBeInstanceOf(StreamingResponseFormatProcessor)
|
||||
})
|
||||
|
||||
it.concurrent('should return the same instance on multiple imports', () => {
|
||||
const instance1 = streamingResponseFormatProcessor
|
||||
const instance2 = streamingResponseFormatProcessor
|
||||
expect(instance1).toBe(instance2)
|
||||
})
|
||||
})
|
||||
|
||||
describe('edge cases', () => {
|
||||
it.concurrent('should handle empty stream', async () => {
|
||||
const mockStream = new ReadableStream({
|
||||
start(controller) {
|
||||
controller.close()
|
||||
},
|
||||
})
|
||||
|
||||
const processedStream = processor.processStream(mockStream, 'block-1', ['block-1_username'], {
|
||||
schema: { properties: { username: { type: 'string' } } },
|
||||
})
|
||||
|
||||
const reader = processedStream.getReader()
|
||||
const decoder = new TextDecoder()
|
||||
let result = ''
|
||||
|
||||
while (true) {
|
||||
const { done, value } = await reader.read()
|
||||
if (done) break
|
||||
result += decoder.decode(value)
|
||||
}
|
||||
|
||||
expect(result).toBe('')
|
||||
})
|
||||
|
||||
it.concurrent('should handle very large JSON objects', async () => {
|
||||
const largeObject = {
|
||||
username: 'frank',
|
||||
data: 'x'.repeat(10000), // Large string
|
||||
nested: {
|
||||
deep: {
|
||||
value: 'test',
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
const mockStream = new ReadableStream({
|
||||
start(controller) {
|
||||
controller.enqueue(new TextEncoder().encode(JSON.stringify(largeObject)))
|
||||
controller.close()
|
||||
},
|
||||
})
|
||||
|
||||
const processedStream = processor.processStream(mockStream, 'block-1', ['block-1_username'], {
|
||||
schema: { properties: { username: { type: 'string' } } },
|
||||
})
|
||||
|
||||
const reader = processedStream.getReader()
|
||||
const decoder = new TextDecoder()
|
||||
let result = ''
|
||||
|
||||
while (true) {
|
||||
const { done, value } = await reader.read()
|
||||
if (done) break
|
||||
result += decoder.decode(value)
|
||||
}
|
||||
|
||||
expect(result).toBe('frank')
|
||||
})
|
||||
})
|
||||
})
|
||||
201
apps/sim/executor/utils.ts
Normal file
201
apps/sim/executor/utils.ts
Normal file
@@ -0,0 +1,201 @@
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import type { ResponseFormatStreamProcessor } from './types'
|
||||
|
||||
const logger = createLogger('ExecutorUtils')
|
||||
|
||||
/**
|
||||
* Processes a streaming response to extract only the selected response format fields
|
||||
* instead of streaming the full JSON wrapper.
|
||||
*/
|
||||
export class StreamingResponseFormatProcessor implements ResponseFormatStreamProcessor {
|
||||
processStream(
|
||||
originalStream: ReadableStream,
|
||||
blockId: string,
|
||||
selectedOutputIds: string[],
|
||||
responseFormat?: any
|
||||
): ReadableStream {
|
||||
// Check if this block has response format selected outputs
|
||||
const hasResponseFormatSelection = selectedOutputIds.some((outputId) => {
|
||||
const blockIdForOutput = outputId.includes('_')
|
||||
? outputId.split('_')[0]
|
||||
: outputId.split('.')[0]
|
||||
return blockIdForOutput === blockId && outputId.includes('_')
|
||||
})
|
||||
|
||||
// If no response format selection, return original stream unchanged
|
||||
if (!hasResponseFormatSelection || !responseFormat) {
|
||||
return originalStream
|
||||
}
|
||||
|
||||
// Get the selected field names for this block
|
||||
const selectedFields = selectedOutputIds
|
||||
.filter((outputId) => {
|
||||
const blockIdForOutput = outputId.includes('_')
|
||||
? outputId.split('_')[0]
|
||||
: outputId.split('.')[0]
|
||||
return blockIdForOutput === blockId && outputId.includes('_')
|
||||
})
|
||||
.map((outputId) => outputId.substring(blockId.length + 1))
|
||||
|
||||
logger.info('Processing streaming response format', {
|
||||
blockId,
|
||||
selectedFields,
|
||||
hasResponseFormat: !!responseFormat,
|
||||
selectedFieldsCount: selectedFields.length,
|
||||
})
|
||||
|
||||
return this.createProcessedStream(originalStream, selectedFields, blockId)
|
||||
}
|
||||
|
||||
private createProcessedStream(
|
||||
originalStream: ReadableStream,
|
||||
selectedFields: string[],
|
||||
blockId: string
|
||||
): ReadableStream {
|
||||
let buffer = ''
|
||||
let hasProcessedComplete = false // Track if we've already processed the complete JSON
|
||||
|
||||
const self = this
|
||||
|
||||
return new ReadableStream({
|
||||
async start(controller) {
|
||||
const reader = originalStream.getReader()
|
||||
const decoder = new TextDecoder()
|
||||
|
||||
try {
|
||||
while (true) {
|
||||
const { done, value } = await reader.read()
|
||||
|
||||
if (done) {
|
||||
// Handle any remaining buffer at the end only if we haven't processed complete JSON yet
|
||||
if (buffer.trim() && !hasProcessedComplete) {
|
||||
self.processCompleteJson(buffer, selectedFields, controller)
|
||||
}
|
||||
controller.close()
|
||||
break
|
||||
}
|
||||
|
||||
const chunk = decoder.decode(value, { stream: true })
|
||||
buffer += chunk
|
||||
|
||||
// Try to process the current buffer only if we haven't processed complete JSON yet
|
||||
if (!hasProcessedComplete) {
|
||||
const processedChunk = self.processStreamingChunk(buffer, selectedFields)
|
||||
|
||||
if (processedChunk) {
|
||||
controller.enqueue(new TextEncoder().encode(processedChunk))
|
||||
hasProcessedComplete = true // Mark as processed to prevent duplicate processing
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Error processing streaming response format:', { error, blockId })
|
||||
controller.error(error)
|
||||
} finally {
|
||||
reader.releaseLock()
|
||||
}
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
private processStreamingChunk(buffer: string, selectedFields: string[]): string | null {
|
||||
// For streaming response format, we need to parse the JSON as it comes in
|
||||
// and extract only the field values we care about
|
||||
|
||||
// Try to parse as complete JSON first
|
||||
try {
|
||||
const parsed = JSON.parse(buffer.trim())
|
||||
if (typeof parsed === 'object' && parsed !== null) {
|
||||
// We have a complete JSON object, extract the selected fields
|
||||
// Process all selected fields and format them properly
|
||||
const results: string[] = []
|
||||
for (const field of selectedFields) {
|
||||
if (field in parsed) {
|
||||
const value = parsed[field]
|
||||
const formattedValue = typeof value === 'string' ? value : JSON.stringify(value)
|
||||
results.push(formattedValue)
|
||||
}
|
||||
}
|
||||
|
||||
if (results.length > 0) {
|
||||
// Join multiple fields with newlines for readability
|
||||
const result = results.join('\n')
|
||||
return result
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
} catch (e) {
|
||||
// Not complete JSON yet, continue buffering
|
||||
}
|
||||
|
||||
// For real-time extraction during streaming, we'd need more sophisticated parsing
|
||||
// For now, let's handle the case where we receive chunks that might be partial JSON
|
||||
|
||||
// Simple heuristic: if buffer contains what looks like a complete JSON object
|
||||
const openBraces = (buffer.match(/\{/g) || []).length
|
||||
const closeBraces = (buffer.match(/\}/g) || []).length
|
||||
|
||||
if (openBraces > 0 && openBraces === closeBraces) {
|
||||
// Likely a complete JSON object
|
||||
try {
|
||||
const parsed = JSON.parse(buffer.trim())
|
||||
if (typeof parsed === 'object' && parsed !== null) {
|
||||
// Process all selected fields and format them properly
|
||||
const results: string[] = []
|
||||
for (const field of selectedFields) {
|
||||
if (field in parsed) {
|
||||
const value = parsed[field]
|
||||
const formattedValue = typeof value === 'string' ? value : JSON.stringify(value)
|
||||
results.push(formattedValue)
|
||||
}
|
||||
}
|
||||
|
||||
if (results.length > 0) {
|
||||
// Join multiple fields with newlines for readability
|
||||
const result = results.join('\n')
|
||||
return result
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
} catch (e) {
|
||||
// Still not valid JSON, continue
|
||||
}
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
private processCompleteJson(
|
||||
buffer: string,
|
||||
selectedFields: string[],
|
||||
controller: ReadableStreamDefaultController
|
||||
): void {
|
||||
try {
|
||||
const parsed = JSON.parse(buffer.trim())
|
||||
if (typeof parsed === 'object' && parsed !== null) {
|
||||
// Process all selected fields and format them properly
|
||||
const results: string[] = []
|
||||
for (const field of selectedFields) {
|
||||
if (field in parsed) {
|
||||
const value = parsed[field]
|
||||
const formattedValue = typeof value === 'string' ? value : JSON.stringify(value)
|
||||
results.push(formattedValue)
|
||||
}
|
||||
}
|
||||
|
||||
if (results.length > 0) {
|
||||
// Join multiple fields with newlines for readability
|
||||
const result = results.join('\n')
|
||||
controller.enqueue(new TextEncoder().encode(result))
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logger.warn('Failed to parse complete JSON in streaming processor:', { error })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Create singleton instance
|
||||
export const streamingResponseFormatProcessor = new StreamingResponseFormatProcessor()
|
||||
185
apps/sim/lib/response-format.ts
Normal file
185
apps/sim/lib/response-format.ts
Normal file
@@ -0,0 +1,185 @@
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
|
||||
const logger = createLogger('ResponseFormatUtils')
|
||||
|
||||
// Type definitions for component data structures
|
||||
export interface Field {
|
||||
name: string
|
||||
type: string
|
||||
description?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper function to extract fields from JSON Schema
|
||||
* Handles both legacy format with fields array and new JSON Schema format
|
||||
*/
|
||||
export function extractFieldsFromSchema(schema: any): Field[] {
|
||||
if (!schema || typeof schema !== 'object') {
|
||||
return []
|
||||
}
|
||||
|
||||
// Handle legacy format with fields array
|
||||
if (Array.isArray(schema.fields)) {
|
||||
return schema.fields
|
||||
}
|
||||
|
||||
// Handle new JSON Schema format
|
||||
const schemaObj = schema.schema || schema
|
||||
if (!schemaObj || !schemaObj.properties || typeof schemaObj.properties !== 'object') {
|
||||
return []
|
||||
}
|
||||
|
||||
// Extract fields from schema properties
|
||||
return Object.entries(schemaObj.properties).map(([name, prop]: [string, any]) => {
|
||||
// Handle array format like ['string', 'array']
|
||||
if (Array.isArray(prop)) {
|
||||
return {
|
||||
name,
|
||||
type: prop.includes('array') ? 'array' : prop[0] || 'string',
|
||||
description: undefined,
|
||||
}
|
||||
}
|
||||
|
||||
// Handle object format like { type: 'string', description: '...' }
|
||||
return {
|
||||
name,
|
||||
type: prop.type || 'string',
|
||||
description: prop.description,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper function to safely parse response format
|
||||
* Handles both string and object formats
|
||||
*/
|
||||
export function parseResponseFormatSafely(responseFormatValue: any, blockId: string): any {
|
||||
if (!responseFormatValue) {
|
||||
return null
|
||||
}
|
||||
|
||||
try {
|
||||
if (typeof responseFormatValue === 'string') {
|
||||
return JSON.parse(responseFormatValue)
|
||||
}
|
||||
return responseFormatValue
|
||||
} catch (error) {
|
||||
logger.warn(`Failed to parse response format for block ${blockId}:`, error)
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract field values from a parsed JSON object based on selected output paths
|
||||
* Used for both workspace and chat client field extraction
|
||||
*/
|
||||
export function extractFieldValues(
|
||||
parsedContent: any,
|
||||
selectedOutputIds: string[],
|
||||
blockId: string
|
||||
): Record<string, any> {
|
||||
const extractedValues: Record<string, any> = {}
|
||||
|
||||
for (const outputId of selectedOutputIds) {
|
||||
const blockIdForOutput = extractBlockIdFromOutputId(outputId)
|
||||
|
||||
if (blockIdForOutput !== blockId) {
|
||||
continue
|
||||
}
|
||||
|
||||
const path = extractPathFromOutputId(outputId, blockIdForOutput)
|
||||
|
||||
if (path) {
|
||||
const pathParts = path.split('.')
|
||||
let current = parsedContent
|
||||
|
||||
for (const part of pathParts) {
|
||||
if (current && typeof current === 'object' && part in current) {
|
||||
current = current[part]
|
||||
} else {
|
||||
current = undefined
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if (current !== undefined) {
|
||||
extractedValues[path] = current
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return extractedValues
|
||||
}
|
||||
|
||||
/**
|
||||
* Format extracted field values for display
|
||||
* Returns formatted string representation of field values
|
||||
*/
|
||||
export function formatFieldValues(extractedValues: Record<string, any>): string {
|
||||
const formattedValues: string[] = []
|
||||
|
||||
for (const [fieldName, value] of Object.entries(extractedValues)) {
|
||||
const formattedValue = typeof value === 'string' ? value : JSON.stringify(value)
|
||||
formattedValues.push(formattedValue)
|
||||
}
|
||||
|
||||
return formattedValues.join('\n')
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract block ID from output ID
|
||||
* Handles both formats: "blockId" and "blockId_path" or "blockId.path"
|
||||
*/
|
||||
export function extractBlockIdFromOutputId(outputId: string): string {
|
||||
return outputId.includes('_') ? outputId.split('_')[0] : outputId.split('.')[0]
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract path from output ID after the block ID
|
||||
*/
|
||||
export function extractPathFromOutputId(outputId: string, blockId: string): string {
|
||||
return outputId.substring(blockId.length + 1)
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse JSON content from output safely
|
||||
* Handles both string and object formats with proper error handling
|
||||
*/
|
||||
export function parseOutputContentSafely(output: any): any {
|
||||
if (!output?.content) {
|
||||
return output
|
||||
}
|
||||
|
||||
if (typeof output.content === 'string') {
|
||||
try {
|
||||
return JSON.parse(output.content)
|
||||
} catch (e) {
|
||||
// Fallback to original structure if parsing fails
|
||||
return output
|
||||
}
|
||||
}
|
||||
|
||||
return output
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a set of output IDs contains response format selections for a specific block
|
||||
*/
|
||||
export function hasResponseFormatSelection(selectedOutputIds: string[], blockId: string): boolean {
|
||||
return selectedOutputIds.some((outputId) => {
|
||||
const blockIdForOutput = extractBlockIdFromOutputId(outputId)
|
||||
return blockIdForOutput === blockId && outputId.includes('_')
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Get selected field names for a specific block from output IDs
|
||||
*/
|
||||
export function getSelectedFieldNames(selectedOutputIds: string[], blockId: string): string[] {
|
||||
return selectedOutputIds
|
||||
.filter((outputId) => {
|
||||
const blockIdForOutput = extractBlockIdFromOutputId(outputId)
|
||||
return blockIdForOutput === blockId && outputId.includes('_')
|
||||
})
|
||||
.map((outputId) => extractPathFromOutputId(outputId, blockId))
|
||||
}
|
||||
@@ -78,7 +78,7 @@ export function processStreamingBlockLog(log: BlockLog, streamedContent: string)
|
||||
log.output.cost = result.cost
|
||||
log.output.model = result.model
|
||||
|
||||
logTokenizationDetails(`✅ Streaming tokenization completed for ${log.blockType}`, {
|
||||
logTokenizationDetails(`Streaming tokenization completed for ${log.blockType}`, {
|
||||
blockId: log.blockId,
|
||||
blockType: log.blockType,
|
||||
model: result.model,
|
||||
@@ -92,7 +92,7 @@ export function processStreamingBlockLog(log: BlockLog, streamedContent: string)
|
||||
|
||||
return true
|
||||
} catch (error) {
|
||||
logger.error(`❌ Streaming tokenization failed for block ${log.blockId}`, {
|
||||
logger.error(`Streaming tokenization failed for block ${log.blockId}`, {
|
||||
blockType: log.blockType,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
contentLength: streamedContent?.length || 0,
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
"version": "0.1.0",
|
||||
"private": true,
|
||||
"license": "Apache-2.0",
|
||||
"type": "module",
|
||||
"engines": {
|
||||
"bun": ">=1.2.13",
|
||||
"node": ">=20.0.0"
|
||||
@@ -27,10 +26,8 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@anthropic-ai/sdk": "^0.39.0",
|
||||
"@aws-sdk/client-apigatewayv2": "3.840.0",
|
||||
"@aws-sdk/client-lambda": "3.840.0",
|
||||
"@aws-sdk/client-s3": "3.842.0",
|
||||
"@aws-sdk/s3-request-presigner": "3.842.0",
|
||||
"@aws-sdk/client-s3": "^3.779.0",
|
||||
"@aws-sdk/s3-request-presigner": "^3.779.0",
|
||||
"@azure/storage-blob": "12.27.0",
|
||||
"@better-auth/stripe": "^1.2.9",
|
||||
"@browserbasehq/stagehand": "^2.0.0",
|
||||
@@ -71,7 +68,6 @@
|
||||
"@vercel/og": "^0.6.5",
|
||||
"@vercel/speed-insights": "^1.2.0",
|
||||
"ai": "^4.3.2",
|
||||
"archiver": "7.0.1",
|
||||
"better-auth": "^1.2.9",
|
||||
"browser-image-compression": "^2.0.2",
|
||||
"class-variance-authority": "^0.7.1",
|
||||
@@ -89,7 +85,6 @@
|
||||
"input-otp": "^1.4.2",
|
||||
"ioredis": "^5.6.0",
|
||||
"jose": "6.0.11",
|
||||
"jszip": "^3.10.1",
|
||||
"jwt-decode": "^4.0.0",
|
||||
"lenis": "^1.2.3",
|
||||
"lucide-react": "^0.479.0",
|
||||
|
||||
@@ -28,7 +28,5 @@ export function setupConnectionHandlers(
|
||||
roomManager.cleanupUserFromRoom(socket.id, workflowId)
|
||||
roomManager.broadcastPresenceUpdate(workflowId)
|
||||
}
|
||||
|
||||
roomManager.clearPendingOperations(socket.id)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -75,11 +75,6 @@ export class RoomManager {
|
||||
this.userSessions.delete(socketId)
|
||||
}
|
||||
|
||||
// This would be used if we implement operation queuing
|
||||
clearPendingOperations(socketId: string) {
|
||||
logger.debug(`Cleared pending operations for socket ${socketId}`)
|
||||
}
|
||||
|
||||
handleWorkflowDeletion(workflowId: string) {
|
||||
logger.info(`Handling workflow deletion notification for ${workflowId}`)
|
||||
|
||||
|
||||
@@ -193,7 +193,10 @@ export const useConsoleStore = create<ConsoleStore>()(
|
||||
updatedEntry.output = newOutput
|
||||
}
|
||||
|
||||
if (update.output !== undefined) {
|
||||
if (update.replaceOutput !== undefined) {
|
||||
// Complete replacement of output
|
||||
updatedEntry.output = update.replaceOutput
|
||||
} else if (update.output !== undefined) {
|
||||
const existingOutput = entry.output || {}
|
||||
updatedEntry.output = {
|
||||
...existingOutput,
|
||||
|
||||
@@ -20,6 +20,7 @@ export interface ConsoleEntry {
|
||||
export interface ConsoleUpdate {
|
||||
content?: string
|
||||
output?: Partial<NormalizedBlockOutput>
|
||||
replaceOutput?: NormalizedBlockOutput // New field for complete replacement
|
||||
error?: string
|
||||
warning?: string
|
||||
success?: boolean
|
||||
|
||||
@@ -1,159 +0,0 @@
|
||||
import type { ToolConfig } from '../types'
|
||||
|
||||
interface AWSLambdaDeployInput {
|
||||
accessKeyId: string
|
||||
secretAccessKey: string
|
||||
region: string
|
||||
role: string
|
||||
functionName: string
|
||||
handler?: string
|
||||
runtime: string
|
||||
code: Record<string, string>
|
||||
timeout?: number
|
||||
memorySize?: number
|
||||
environmentVariables: Record<string, string>
|
||||
tags: Record<string, string>
|
||||
}
|
||||
|
||||
interface AWSLambdaDeployOutput {
|
||||
functionArn: string
|
||||
functionName: string
|
||||
runtime: string
|
||||
region: string
|
||||
status: string
|
||||
lastModified: string
|
||||
codeSize: number
|
||||
description: string
|
||||
timeout: number
|
||||
memorySize: number
|
||||
environment: Record<string, string>
|
||||
tags: Record<string, string>
|
||||
}
|
||||
|
||||
export const awsLambdaDeployTool: ToolConfig<AWSLambdaDeployInput, AWSLambdaDeployOutput> = {
|
||||
id: 'aws_lambda_deploy',
|
||||
name: 'AWS Lambda Deploy',
|
||||
description:
|
||||
'Make sure to satisfy the user request.Deploy or update an AWS Lambda function with the specified configuration. This tool can create a new Lambda function or update an existing one with any changes you specify. It accepts function code as a JSON object where keys are file paths and values are file contents. For Node.js functions, typically include an index.js file with the handler function. The tool will package and deploy the code to AWS Lambda with the specified runtime, memory, timeout, and environment variables. When updating an existing function, this tool can make whatever changes you want to the function configuration and code.',
|
||||
version: '1.0.0',
|
||||
|
||||
params: {
|
||||
// Common AWS parameters (always at the top)
|
||||
accessKeyId: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
requiredForToolCall: true,
|
||||
description: 'AWS Access Key ID for authentication. This is required to access AWS services.',
|
||||
},
|
||||
secretAccessKey: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
requiredForToolCall: true,
|
||||
description:
|
||||
'AWS Secret Access Key for authentication. This is required to access AWS services.',
|
||||
},
|
||||
region: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
requiredForToolCall: true,
|
||||
description:
|
||||
'AWS region where the Lambda function will be deployed. Examples: us-east-1, eu-west-1, ap-southeast-2',
|
||||
},
|
||||
role: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
requiredForToolCall: true,
|
||||
description:
|
||||
'IAM Role ARN that the Lambda function will assume during execution. This role must have appropriate permissions for the function to operate correctly.',
|
||||
},
|
||||
// Operation-specific parameters
|
||||
functionName: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
optionalToolInput: true,
|
||||
description:
|
||||
'Name of the Lambda function to create or update. If the function already exists, it will be updated with any changes you specify to the configuration and code.',
|
||||
},
|
||||
handler: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
optionalToolInput: true,
|
||||
description:
|
||||
'Function handler that Lambda calls to start execution. Format varies by runtime: index.handler (Node.js), lambda_function.lambda_handler (Python), etc. If not provided, a default will be used based on the runtime.',
|
||||
},
|
||||
runtime: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
optionalToolInput: true,
|
||||
description:
|
||||
'Lambda runtime environment. Common values: nodejs18.x, python3.11, java11, go1.x, dotnet6, ruby2.7. This determines the execution environment for your function.',
|
||||
},
|
||||
code: {
|
||||
type: 'json',
|
||||
required: true,
|
||||
description:
|
||||
'Function code files as JSON object with file paths as keys and code content as values. For Node.js, typically include {"index.js": "exports.handler = async (event) => { return { statusCode: 200, body: JSON.stringify({ message: \"Hello World\" }) }; };"}. For Python, include {"lambda_function.py": "def lambda_handler(event, context): return { \"statusCode\": 200, \"body\": \"Hello World\" }"}. The code object must contain at least one file with non-empty string content.',
|
||||
},
|
||||
|
||||
timeout: {
|
||||
type: 'number',
|
||||
required: true,
|
||||
optionalToolInput: true,
|
||||
description:
|
||||
'Function timeout in seconds. Must be between 1 and 900 seconds (15 minutes). Default is 3 seconds.',
|
||||
default: 3,
|
||||
},
|
||||
memorySize: {
|
||||
type: 'number',
|
||||
required: true,
|
||||
optionalToolInput: true,
|
||||
description:
|
||||
'Function memory size in MB. Must be between 128 and 10240 MB. More memory also means more CPU power. Default is 128 MB.',
|
||||
default: 128,
|
||||
},
|
||||
environmentVariables: {
|
||||
type: 'object',
|
||||
required: false,
|
||||
description:
|
||||
'Environment variables for the function. These will be available to your function during execution. Example: {"API_KEY": "your-api-key", "ENVIRONMENT": "production"}.',
|
||||
default: {},
|
||||
},
|
||||
tags: {
|
||||
type: 'object',
|
||||
required: false,
|
||||
description:
|
||||
'Tags for the function. Useful for organization and cost tracking. Example: {"Environment": "production", "Project": "my-app"}.',
|
||||
default: {},
|
||||
},
|
||||
endpointName: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
optionalToolInput: true,
|
||||
description:
|
||||
'Name of the API Gateway endpoint to create or update. This will be used to create the API Gateway and will appear in the endpoint URL.',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: '/api/tools/aws-lambda/deploy',
|
||||
method: 'POST',
|
||||
headers: () => ({
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
body: (params: AWSLambdaDeployInput) => ({
|
||||
accessKeyId: params.accessKeyId,
|
||||
secretAccessKey: params.secretAccessKey,
|
||||
region: params.region,
|
||||
role: params.role,
|
||||
functionName: params.functionName,
|
||||
handler: params.handler,
|
||||
runtime: params.runtime,
|
||||
code: typeof params.code === 'string' ? params.code : JSON.stringify(params.code),
|
||||
|
||||
timeout: params.timeout || 30,
|
||||
memorySize: params.memorySize || 128,
|
||||
environmentVariables: params.environmentVariables || {},
|
||||
tags: params.tags || {},
|
||||
}),
|
||||
},
|
||||
}
|
||||
@@ -1,95 +0,0 @@
|
||||
import type { ToolConfig } from '../types'
|
||||
|
||||
interface AWSLambdaDeployEndpointParams {
|
||||
accessKeyId: string
|
||||
secretAccessKey: string
|
||||
region: string
|
||||
role: string
|
||||
functionName: string
|
||||
endpointName: string
|
||||
}
|
||||
|
||||
interface AWSLambdaDeployEndpointResponse {
|
||||
functionArn: string
|
||||
functionName: string
|
||||
endpointName: string
|
||||
endpointUrl: string
|
||||
region: string
|
||||
status: string
|
||||
lastModified: string
|
||||
apiGatewayId: string
|
||||
stageName: string
|
||||
}
|
||||
|
||||
export const awsLambdaDeployEndpointTool: ToolConfig<
|
||||
AWSLambdaDeployEndpointParams,
|
||||
AWSLambdaDeployEndpointResponse
|
||||
> = {
|
||||
id: 'aws_lambda_deploy_endpoint',
|
||||
name: 'AWS Lambda Deploy Endpoint',
|
||||
description:
|
||||
'Deploy an AWS Lambda function as an HTTP endpoint using API Gateway. This tool creates or updates an API Gateway REST API and connects it to the specified Lambda function, making it accessible via HTTP requests. The endpoint will be publicly accessible and can handle GET, POST, PUT, DELETE, and other HTTP methods. This is useful for creating web APIs, webhooks, or any HTTP-based service using Lambda functions.',
|
||||
version: '1.0.0',
|
||||
|
||||
params: {
|
||||
// Common AWS parameters (always at the top)
|
||||
accessKeyId: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
requiredForToolCall: true,
|
||||
description: 'AWS Access Key ID for authentication. This is required to access AWS services.',
|
||||
},
|
||||
secretAccessKey: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
requiredForToolCall: true,
|
||||
description:
|
||||
'AWS Secret Access Key for authentication. This is required to access AWS services.',
|
||||
},
|
||||
region: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
requiredForToolCall: true,
|
||||
description:
|
||||
'AWS region where the Lambda function and API Gateway will be deployed. Examples: us-east-1, eu-west-1, ap-southeast-2',
|
||||
},
|
||||
role: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
requiredForToolCall: true,
|
||||
description:
|
||||
'IAM Role ARN that the Lambda function will assume during execution. This role must have appropriate permissions for the function to operate correctly and be invoked by API Gateway.',
|
||||
},
|
||||
// Operation-specific parameters
|
||||
functionName: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
requiredForToolCall: true,
|
||||
description:
|
||||
'Name of the existing Lambda function to deploy as an endpoint. This function must already exist in the specified region and be properly configured to handle HTTP requests.',
|
||||
},
|
||||
endpointName: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
requiredForToolCall: true,
|
||||
description:
|
||||
'Name for the API Gateway endpoint. This will be used to create the API Gateway REST API and will appear in the endpoint URL. Should be descriptive and unique within your AWS account.',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: '/api/tools/aws-lambda/deploy-endpoint',
|
||||
method: 'POST',
|
||||
headers: () => ({
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
body: (params: AWSLambdaDeployEndpointParams) => ({
|
||||
accessKeyId: params.accessKeyId,
|
||||
secretAccessKey: params.secretAccessKey,
|
||||
region: params.region,
|
||||
role: params.role,
|
||||
functionName: params.functionName,
|
||||
endpointName: params.endpointName,
|
||||
}),
|
||||
},
|
||||
}
|
||||
@@ -1,89 +0,0 @@
|
||||
import type { ToolConfig } from '../types'
|
||||
|
||||
interface AWSLambdaFetchParams {
|
||||
accessKeyId: string
|
||||
secretAccessKey: string
|
||||
region: string
|
||||
functionName: string
|
||||
role: string
|
||||
}
|
||||
|
||||
interface AWSLambdaFetchResponse {
|
||||
functionArn: string
|
||||
functionName: string
|
||||
runtime: string
|
||||
region: string
|
||||
status: string
|
||||
lastModified: string
|
||||
codeSize: number
|
||||
description: string
|
||||
timeout: number
|
||||
memorySize: number
|
||||
environment: Record<string, string>
|
||||
tags: Record<string, string>
|
||||
codeFiles: Record<string, string>
|
||||
handler: string
|
||||
role: string
|
||||
}
|
||||
|
||||
export const awsLambdaFetchTool: ToolConfig<AWSLambdaFetchParams, AWSLambdaFetchResponse> = {
|
||||
id: 'aws_lambda_fetch',
|
||||
name: 'AWS Lambda Fetch',
|
||||
description:
|
||||
'Fetch AWS Lambda function details, configuration, and code files. Use this to retrieve information about an existing Lambda function including its runtime, handler, timeout, memory settings, environment variables, tags, and actual code files. This is used to understand the current state of a function before making changes. The fetch operation is read-only and does not modify the function.',
|
||||
version: '1.0.0',
|
||||
|
||||
params: {
|
||||
// Common AWS parameters (always at the top)
|
||||
accessKeyId: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
requiredForToolCall: true,
|
||||
description: 'AWS Access Key ID for authentication. This is required to access AWS services.',
|
||||
},
|
||||
secretAccessKey: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
requiredForToolCall: true,
|
||||
description:
|
||||
'AWS Secret Access Key for authentication. This is required to access AWS services.',
|
||||
},
|
||||
region: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
requiredForToolCall: true,
|
||||
description:
|
||||
'AWS region where the Lambda function is located. Examples: us-east-1, eu-west-1, ap-southeast-2',
|
||||
},
|
||||
role: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
requiredForToolCall: true,
|
||||
description:
|
||||
'IAM Role ARN that the Lambda function will assume during execution. This role must have appropriate permissions for the function to operate correctly.',
|
||||
},
|
||||
// Operation-specific parameters
|
||||
functionName: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
optionalToolInput: true,
|
||||
description:
|
||||
'Name of the existing Lambda function to fetch and understand. This must be the exact name of a function that already exists in the specified region. Use this to retrieve the current state before making changes.',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: '/api/tools/aws-lambda/fetch',
|
||||
method: 'POST',
|
||||
headers: () => ({
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
body: (params: AWSLambdaFetchParams) => ({
|
||||
accessKeyId: params.accessKeyId,
|
||||
secretAccessKey: params.secretAccessKey,
|
||||
region: params.region,
|
||||
functionName: params.functionName,
|
||||
role: params.role,
|
||||
}),
|
||||
},
|
||||
}
|
||||
@@ -1,32 +0,0 @@
|
||||
import type { ToolConfig } from '../types'
|
||||
|
||||
type AWSLambdaGetPromptsParams = {}
|
||||
|
||||
interface AWSLambdaGetPromptsResponse {
|
||||
systemPrompt: string
|
||||
schema: Record<string, any>
|
||||
}
|
||||
|
||||
export const awsLambdaGetPromptsTool: ToolConfig<
|
||||
AWSLambdaGetPromptsParams,
|
||||
AWSLambdaGetPromptsResponse
|
||||
> = {
|
||||
id: 'aws_lambda_get_prompts',
|
||||
name: 'AWS Lambda Get Prompts',
|
||||
description:
|
||||
'Get system prompt and schema for AWS Lambda operations. This tool provides AI assistance prompts and schemas to help with Lambda function development, including best practices, common patterns, and code examples.',
|
||||
version: '1.0.0',
|
||||
|
||||
params: {
|
||||
// No parameters needed for this operation
|
||||
},
|
||||
|
||||
request: {
|
||||
url: '/api/tools/aws-lambda/get-prompts',
|
||||
method: 'POST',
|
||||
headers: () => ({
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
body: () => ({}), // No body needed
|
||||
},
|
||||
}
|
||||
@@ -1,4 +0,0 @@
|
||||
export { awsLambdaDeployTool } from './deploy'
|
||||
export { awsLambdaDeployEndpointTool } from './deploy_endpoint'
|
||||
export { awsLambdaFetchTool } from './fetch'
|
||||
export { awsLambdaGetPromptsTool } from './get_prompts'
|
||||
@@ -5,12 +5,6 @@ import {
|
||||
airtableUpdateRecordTool,
|
||||
} from './airtable'
|
||||
import { autoblocksPromptManagerTool } from './autoblocks'
|
||||
import {
|
||||
awsLambdaDeployEndpointTool,
|
||||
awsLambdaDeployTool,
|
||||
awsLambdaFetchTool,
|
||||
awsLambdaGetPromptsTool,
|
||||
} from './aws_lambda'
|
||||
import { browserUseRunTaskTool } from './browser_use'
|
||||
import { clayPopulateTool } from './clay'
|
||||
import { confluenceRetrieveTool, confluenceUpdateTool } from './confluence'
|
||||
@@ -229,8 +223,4 @@ export const tools: Record<string, ToolConfig> = {
|
||||
google_calendar_quick_add: googleCalendarQuickAddTool,
|
||||
google_calendar_invite: googleCalendarInviteTool,
|
||||
workflow_executor: workflowExecutorTool,
|
||||
aws_lambda_deploy: awsLambdaDeployTool,
|
||||
aws_lambda_deploy_endpoint: awsLambdaDeployEndpointTool,
|
||||
aws_lambda_fetch: awsLambdaFetchTool,
|
||||
aws_lambda_get_prompts: awsLambdaGetPromptsTool,
|
||||
}
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import path, { resolve } from 'path'
|
||||
/// <reference types="vitest" />
|
||||
import nextEnv from '@next/env'
|
||||
import react from '@vitejs/plugin-react'
|
||||
import { configDefaults, defineConfig } from 'vitest/config'
|
||||
|
||||
const { loadEnvConfig } = nextEnv
|
||||
const nextEnv = require('@next/env')
|
||||
const { loadEnvConfig } = nextEnv.default || nextEnv
|
||||
|
||||
const projectDir = process.cwd()
|
||||
loadEnvConfig(projectDir)
|
||||
|
||||
166
bun.lock
166
bun.lock
@@ -57,10 +57,8 @@
|
||||
"version": "0.1.0",
|
||||
"dependencies": {
|
||||
"@anthropic-ai/sdk": "^0.39.0",
|
||||
"@aws-sdk/client-apigatewayv2": "3.840.0",
|
||||
"@aws-sdk/client-lambda": "3.840.0",
|
||||
"@aws-sdk/client-s3": "3.842.0",
|
||||
"@aws-sdk/s3-request-presigner": "3.842.0",
|
||||
"@aws-sdk/client-s3": "^3.779.0",
|
||||
"@aws-sdk/s3-request-presigner": "^3.779.0",
|
||||
"@azure/storage-blob": "12.27.0",
|
||||
"@better-auth/stripe": "^1.2.9",
|
||||
"@browserbasehq/stagehand": "^2.0.0",
|
||||
@@ -101,7 +99,6 @@
|
||||
"@vercel/og": "^0.6.5",
|
||||
"@vercel/speed-insights": "^1.2.0",
|
||||
"ai": "^4.3.2",
|
||||
"archiver": "7.0.1",
|
||||
"better-auth": "^1.2.9",
|
||||
"browser-image-compression": "^2.0.2",
|
||||
"class-variance-authority": "^0.7.1",
|
||||
@@ -119,7 +116,6 @@
|
||||
"input-otp": "^1.4.2",
|
||||
"ioredis": "^5.6.0",
|
||||
"jose": "6.0.11",
|
||||
"jszip": "^3.10.1",
|
||||
"jwt-decode": "^4.0.0",
|
||||
"lenis": "^1.2.3",
|
||||
"lucide-react": "^0.479.0",
|
||||
@@ -278,73 +274,69 @@
|
||||
|
||||
"@aws-crypto/util": ["@aws-crypto/util@5.2.0", "", { "dependencies": { "@aws-sdk/types": "^3.222.0", "@smithy/util-utf8": "^2.0.0", "tslib": "^2.6.2" } }, "sha512-4RkU9EsI6ZpBve5fseQlGNUWKMa1RLPQ1dnjnQoe07ldfIzcsGb5hC5W0Dm7u423KWzawlrpbjXBrXCEv9zazQ=="],
|
||||
|
||||
"@aws-sdk/client-apigatewayv2": ["@aws-sdk/client-apigatewayv2@3.840.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "3.840.0", "@aws-sdk/credential-provider-node": "3.840.0", "@aws-sdk/middleware-host-header": "3.840.0", "@aws-sdk/middleware-logger": "3.840.0", "@aws-sdk/middleware-recursion-detection": "3.840.0", "@aws-sdk/middleware-user-agent": "3.840.0", "@aws-sdk/region-config-resolver": "3.840.0", "@aws-sdk/types": "3.840.0", "@aws-sdk/util-endpoints": "3.840.0", "@aws-sdk/util-user-agent-browser": "3.840.0", "@aws-sdk/util-user-agent-node": "3.840.0", "@smithy/config-resolver": "^4.1.4", "@smithy/core": "^3.6.0", "@smithy/fetch-http-handler": "^5.0.4", "@smithy/hash-node": "^4.0.4", "@smithy/invalid-dependency": "^4.0.4", "@smithy/middleware-content-length": "^4.0.4", "@smithy/middleware-endpoint": "^4.1.13", "@smithy/middleware-retry": "^4.1.14", "@smithy/middleware-serde": "^4.0.8", "@smithy/middleware-stack": "^4.0.4", "@smithy/node-config-provider": "^4.1.3", "@smithy/node-http-handler": "^4.0.6", "@smithy/protocol-http": "^5.1.2", "@smithy/smithy-client": "^4.4.5", "@smithy/types": "^4.3.1", "@smithy/url-parser": "^4.0.4", "@smithy/util-base64": "^4.0.0", "@smithy/util-body-length-browser": "^4.0.0", "@smithy/util-body-length-node": "^4.0.0", "@smithy/util-defaults-mode-browser": "^4.0.21", "@smithy/util-defaults-mode-node": "^4.0.21", "@smithy/util-endpoints": "^3.0.6", "@smithy/util-middleware": "^4.0.4", "@smithy/util-retry": "^4.0.6", "@smithy/util-stream": "^4.2.2", "@smithy/util-utf8": "^4.0.0", "tslib": "^2.6.2" } }, "sha512-r9lIhflnQrhLRX1Z1U5lb2a/fhyG/qs8KZQ7kBVYsXEJAV36pIQTjRlyigDHF0gbV/je9NJpfSA2MdgeRgOyuA=="],
|
||||
"@aws-sdk/client-s3": ["@aws-sdk/client-s3@3.832.0", "", { "dependencies": { "@aws-crypto/sha1-browser": "5.2.0", "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "3.826.0", "@aws-sdk/credential-provider-node": "3.830.0", "@aws-sdk/middleware-bucket-endpoint": "3.830.0", "@aws-sdk/middleware-expect-continue": "3.821.0", "@aws-sdk/middleware-flexible-checksums": "3.826.0", "@aws-sdk/middleware-host-header": "3.821.0", "@aws-sdk/middleware-location-constraint": "3.821.0", "@aws-sdk/middleware-logger": "3.821.0", "@aws-sdk/middleware-recursion-detection": "3.821.0", "@aws-sdk/middleware-sdk-s3": "3.826.0", "@aws-sdk/middleware-ssec": "3.821.0", "@aws-sdk/middleware-user-agent": "3.828.0", "@aws-sdk/region-config-resolver": "3.821.0", "@aws-sdk/signature-v4-multi-region": "3.826.0", "@aws-sdk/types": "3.821.0", "@aws-sdk/util-endpoints": "3.828.0", "@aws-sdk/util-user-agent-browser": "3.821.0", "@aws-sdk/util-user-agent-node": "3.828.0", "@aws-sdk/xml-builder": "3.821.0", "@smithy/config-resolver": "^4.1.4", "@smithy/core": "^3.5.3", "@smithy/eventstream-serde-browser": "^4.0.4", "@smithy/eventstream-serde-config-resolver": "^4.1.2", "@smithy/eventstream-serde-node": "^4.0.4", "@smithy/fetch-http-handler": "^5.0.4", "@smithy/hash-blob-browser": "^4.0.4", "@smithy/hash-node": "^4.0.4", "@smithy/hash-stream-node": "^4.0.4", "@smithy/invalid-dependency": "^4.0.4", "@smithy/md5-js": "^4.0.4", "@smithy/middleware-content-length": "^4.0.4", "@smithy/middleware-endpoint": "^4.1.11", "@smithy/middleware-retry": "^4.1.12", "@smithy/middleware-serde": "^4.0.8", "@smithy/middleware-stack": "^4.0.4", "@smithy/node-config-provider": "^4.1.3", "@smithy/node-http-handler": "^4.0.6", "@smithy/protocol-http": "^5.1.2", "@smithy/smithy-client": "^4.4.3", "@smithy/types": "^4.3.1", "@smithy/url-parser": "^4.0.4", "@smithy/util-base64": "^4.0.0", "@smithy/util-body-length-browser": "^4.0.0", "@smithy/util-body-length-node": "^4.0.0", "@smithy/util-defaults-mode-browser": "^4.0.19", "@smithy/util-defaults-mode-node": "^4.0.19", "@smithy/util-endpoints": "^3.0.6", "@smithy/util-middleware": "^4.0.4", "@smithy/util-retry": "^4.0.5", "@smithy/util-stream": "^4.2.2", "@smithy/util-utf8": "^4.0.0", "@smithy/util-waiter": "^4.0.5", "@types/uuid": "^9.0.1", "tslib": "^2.6.2", "uuid": "^9.0.1" } }, "sha512-S+md1zCe71SEuaRDuLHq4mzhYYkVxR1ENa8NwrgInfYoC4xo8/pESoR6i0ZZpcLs0Jw4EyVInWYs4GgDHW70qQ=="],
|
||||
|
||||
"@aws-sdk/client-lambda": ["@aws-sdk/client-lambda@3.840.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "3.840.0", "@aws-sdk/credential-provider-node": "3.840.0", "@aws-sdk/middleware-host-header": "3.840.0", "@aws-sdk/middleware-logger": "3.840.0", "@aws-sdk/middleware-recursion-detection": "3.840.0", "@aws-sdk/middleware-user-agent": "3.840.0", "@aws-sdk/region-config-resolver": "3.840.0", "@aws-sdk/types": "3.840.0", "@aws-sdk/util-endpoints": "3.840.0", "@aws-sdk/util-user-agent-browser": "3.840.0", "@aws-sdk/util-user-agent-node": "3.840.0", "@smithy/config-resolver": "^4.1.4", "@smithy/core": "^3.6.0", "@smithy/eventstream-serde-browser": "^4.0.4", "@smithy/eventstream-serde-config-resolver": "^4.1.2", "@smithy/eventstream-serde-node": "^4.0.4", "@smithy/fetch-http-handler": "^5.0.4", "@smithy/hash-node": "^4.0.4", "@smithy/invalid-dependency": "^4.0.4", "@smithy/middleware-content-length": "^4.0.4", "@smithy/middleware-endpoint": "^4.1.13", "@smithy/middleware-retry": "^4.1.14", "@smithy/middleware-serde": "^4.0.8", "@smithy/middleware-stack": "^4.0.4", "@smithy/node-config-provider": "^4.1.3", "@smithy/node-http-handler": "^4.0.6", "@smithy/protocol-http": "^5.1.2", "@smithy/smithy-client": "^4.4.5", "@smithy/types": "^4.3.1", "@smithy/url-parser": "^4.0.4", "@smithy/util-base64": "^4.0.0", "@smithy/util-body-length-browser": "^4.0.0", "@smithy/util-body-length-node": "^4.0.0", "@smithy/util-defaults-mode-browser": "^4.0.21", "@smithy/util-defaults-mode-node": "^4.0.21", "@smithy/util-endpoints": "^3.0.6", "@smithy/util-middleware": "^4.0.4", "@smithy/util-retry": "^4.0.6", "@smithy/util-stream": "^4.2.2", "@smithy/util-utf8": "^4.0.0", "@smithy/util-waiter": "^4.0.6", "tslib": "^2.6.2" } }, "sha512-aUKHKWW4Z1nxQ0q/shHkSA278oyv+lRJSvpin1GJXQumDdMKcOuXktmufOCZzjbl6UVw/Pqaw6V1Vo2gda6RdQ=="],
|
||||
"@aws-sdk/client-sso": ["@aws-sdk/client-sso@3.830.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "3.826.0", "@aws-sdk/middleware-host-header": "3.821.0", "@aws-sdk/middleware-logger": "3.821.0", "@aws-sdk/middleware-recursion-detection": "3.821.0", "@aws-sdk/middleware-user-agent": "3.828.0", "@aws-sdk/region-config-resolver": "3.821.0", "@aws-sdk/types": "3.821.0", "@aws-sdk/util-endpoints": "3.828.0", "@aws-sdk/util-user-agent-browser": "3.821.0", "@aws-sdk/util-user-agent-node": "3.828.0", "@smithy/config-resolver": "^4.1.4", "@smithy/core": "^3.5.3", "@smithy/fetch-http-handler": "^5.0.4", "@smithy/hash-node": "^4.0.4", "@smithy/invalid-dependency": "^4.0.4", "@smithy/middleware-content-length": "^4.0.4", "@smithy/middleware-endpoint": "^4.1.11", "@smithy/middleware-retry": "^4.1.12", "@smithy/middleware-serde": "^4.0.8", "@smithy/middleware-stack": "^4.0.4", "@smithy/node-config-provider": "^4.1.3", "@smithy/node-http-handler": "^4.0.6", "@smithy/protocol-http": "^5.1.2", "@smithy/smithy-client": "^4.4.3", "@smithy/types": "^4.3.1", "@smithy/url-parser": "^4.0.4", "@smithy/util-base64": "^4.0.0", "@smithy/util-body-length-browser": "^4.0.0", "@smithy/util-body-length-node": "^4.0.0", "@smithy/util-defaults-mode-browser": "^4.0.19", "@smithy/util-defaults-mode-node": "^4.0.19", "@smithy/util-endpoints": "^3.0.6", "@smithy/util-middleware": "^4.0.4", "@smithy/util-retry": "^4.0.5", "@smithy/util-utf8": "^4.0.0", "tslib": "^2.6.2" } }, "sha512-5zCEpfI+zwX2SIa258L+TItNbBoAvQQ6w74qdFM6YJufQ1F9tvwjTX8T+eSTT9nsFIvfYnUaGalWwJVfmJUgVQ=="],
|
||||
|
||||
"@aws-sdk/client-s3": ["@aws-sdk/client-s3@3.842.0", "", { "dependencies": { "@aws-crypto/sha1-browser": "5.2.0", "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "3.840.0", "@aws-sdk/credential-provider-node": "3.840.0", "@aws-sdk/middleware-bucket-endpoint": "3.840.0", "@aws-sdk/middleware-expect-continue": "3.840.0", "@aws-sdk/middleware-flexible-checksums": "3.840.0", "@aws-sdk/middleware-host-header": "3.840.0", "@aws-sdk/middleware-location-constraint": "3.840.0", "@aws-sdk/middleware-logger": "3.840.0", "@aws-sdk/middleware-recursion-detection": "3.840.0", "@aws-sdk/middleware-sdk-s3": "3.840.0", "@aws-sdk/middleware-ssec": "3.840.0", "@aws-sdk/middleware-user-agent": "3.840.0", "@aws-sdk/region-config-resolver": "3.840.0", "@aws-sdk/signature-v4-multi-region": "3.840.0", "@aws-sdk/types": "3.840.0", "@aws-sdk/util-endpoints": "3.840.0", "@aws-sdk/util-user-agent-browser": "3.840.0", "@aws-sdk/util-user-agent-node": "3.840.0", "@aws-sdk/xml-builder": "3.821.0", "@smithy/config-resolver": "^4.1.4", "@smithy/core": "^3.6.0", "@smithy/eventstream-serde-browser": "^4.0.4", "@smithy/eventstream-serde-config-resolver": "^4.1.2", "@smithy/eventstream-serde-node": "^4.0.4", "@smithy/fetch-http-handler": "^5.0.4", "@smithy/hash-blob-browser": "^4.0.4", "@smithy/hash-node": "^4.0.4", "@smithy/hash-stream-node": "^4.0.4", "@smithy/invalid-dependency": "^4.0.4", "@smithy/md5-js": "^4.0.4", "@smithy/middleware-content-length": "^4.0.4", "@smithy/middleware-endpoint": "^4.1.13", "@smithy/middleware-retry": "^4.1.14", "@smithy/middleware-serde": "^4.0.8", "@smithy/middleware-stack": "^4.0.4", "@smithy/node-config-provider": "^4.1.3", "@smithy/node-http-handler": "^4.0.6", "@smithy/protocol-http": "^5.1.2", "@smithy/smithy-client": "^4.4.5", "@smithy/types": "^4.3.1", "@smithy/url-parser": "^4.0.4", "@smithy/util-base64": "^4.0.0", "@smithy/util-body-length-browser": "^4.0.0", "@smithy/util-body-length-node": "^4.0.0", "@smithy/util-defaults-mode-browser": "^4.0.21", "@smithy/util-defaults-mode-node": "^4.0.21", "@smithy/util-endpoints": "^3.0.6", "@smithy/util-middleware": "^4.0.4", "@smithy/util-retry": "^4.0.6", "@smithy/util-stream": "^4.2.2", "@smithy/util-utf8": "^4.0.0", "@smithy/util-waiter": "^4.0.6", "@types/uuid": "^9.0.1", "tslib": "^2.6.2", "uuid": "^9.0.1" } }, "sha512-T5Rh72Rcq1xIaM8KkTr1Wpr7/WPCYO++KrM+/Em0rq2jxpjMMhj77ITpgH7eEmNxWmwIndTwqpgfmbpNfk7Gbw=="],
|
||||
"@aws-sdk/core": ["@aws-sdk/core@3.826.0", "", { "dependencies": { "@aws-sdk/types": "3.821.0", "@aws-sdk/xml-builder": "3.821.0", "@smithy/core": "^3.5.3", "@smithy/node-config-provider": "^4.1.3", "@smithy/property-provider": "^4.0.4", "@smithy/protocol-http": "^5.1.2", "@smithy/signature-v4": "^5.1.2", "@smithy/smithy-client": "^4.4.3", "@smithy/types": "^4.3.1", "@smithy/util-base64": "^4.0.0", "@smithy/util-body-length-browser": "^4.0.0", "@smithy/util-middleware": "^4.0.4", "@smithy/util-utf8": "^4.0.0", "fast-xml-parser": "4.4.1", "tslib": "^2.6.2" } }, "sha512-BGbQYzWj3ps+dblq33FY5tz/SsgJCcXX0zjQlSC07tYvU1jHTUvsefphyig+fY38xZ4wdKjbTop+KUmXUYrOXw=="],
|
||||
|
||||
"@aws-sdk/client-sso": ["@aws-sdk/client-sso@3.840.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "3.840.0", "@aws-sdk/middleware-host-header": "3.840.0", "@aws-sdk/middleware-logger": "3.840.0", "@aws-sdk/middleware-recursion-detection": "3.840.0", "@aws-sdk/middleware-user-agent": "3.840.0", "@aws-sdk/region-config-resolver": "3.840.0", "@aws-sdk/types": "3.840.0", "@aws-sdk/util-endpoints": "3.840.0", "@aws-sdk/util-user-agent-browser": "3.840.0", "@aws-sdk/util-user-agent-node": "3.840.0", "@smithy/config-resolver": "^4.1.4", "@smithy/core": "^3.6.0", "@smithy/fetch-http-handler": "^5.0.4", "@smithy/hash-node": "^4.0.4", "@smithy/invalid-dependency": "^4.0.4", "@smithy/middleware-content-length": "^4.0.4", "@smithy/middleware-endpoint": "^4.1.13", "@smithy/middleware-retry": "^4.1.14", "@smithy/middleware-serde": "^4.0.8", "@smithy/middleware-stack": "^4.0.4", "@smithy/node-config-provider": "^4.1.3", "@smithy/node-http-handler": "^4.0.6", "@smithy/protocol-http": "^5.1.2", "@smithy/smithy-client": "^4.4.5", "@smithy/types": "^4.3.1", "@smithy/url-parser": "^4.0.4", "@smithy/util-base64": "^4.0.0", "@smithy/util-body-length-browser": "^4.0.0", "@smithy/util-body-length-node": "^4.0.0", "@smithy/util-defaults-mode-browser": "^4.0.21", "@smithy/util-defaults-mode-node": "^4.0.21", "@smithy/util-endpoints": "^3.0.6", "@smithy/util-middleware": "^4.0.4", "@smithy/util-retry": "^4.0.6", "@smithy/util-utf8": "^4.0.0", "tslib": "^2.6.2" } }, "sha512-3Zp+FWN2hhmKdpS0Ragi5V2ZPsZNScE3jlbgoJjzjI/roHZqO+e3/+XFN4TlM0DsPKYJNp+1TAjmhxN6rOnfYA=="],
|
||||
"@aws-sdk/credential-provider-env": ["@aws-sdk/credential-provider-env@3.826.0", "", { "dependencies": { "@aws-sdk/core": "3.826.0", "@aws-sdk/types": "3.821.0", "@smithy/property-provider": "^4.0.4", "@smithy/types": "^4.3.1", "tslib": "^2.6.2" } }, "sha512-DK3pQY8+iKK3MGDdC3uOZQ2psU01obaKlTYhEwNu4VWzgwQL4Vi3sWj4xSWGEK41vqZxiRLq6fOq7ysRI+qEZA=="],
|
||||
|
||||
"@aws-sdk/core": ["@aws-sdk/core@3.840.0", "", { "dependencies": { "@aws-sdk/types": "3.840.0", "@aws-sdk/xml-builder": "3.821.0", "@smithy/core": "^3.6.0", "@smithy/node-config-provider": "^4.1.3", "@smithy/property-provider": "^4.0.4", "@smithy/protocol-http": "^5.1.2", "@smithy/signature-v4": "^5.1.2", "@smithy/smithy-client": "^4.4.5", "@smithy/types": "^4.3.1", "@smithy/util-base64": "^4.0.0", "@smithy/util-body-length-browser": "^4.0.0", "@smithy/util-middleware": "^4.0.4", "@smithy/util-utf8": "^4.0.0", "fast-xml-parser": "4.4.1", "tslib": "^2.6.2" } }, "sha512-x3Zgb39tF1h2XpU+yA4OAAQlW6LVEfXNlSedSYJ7HGKXqA/E9h3rWQVpYfhXXVVsLdYXdNw5KBUkoAoruoZSZA=="],
|
||||
"@aws-sdk/credential-provider-http": ["@aws-sdk/credential-provider-http@3.826.0", "", { "dependencies": { "@aws-sdk/core": "3.826.0", "@aws-sdk/types": "3.821.0", "@smithy/fetch-http-handler": "^5.0.4", "@smithy/node-http-handler": "^4.0.6", "@smithy/property-provider": "^4.0.4", "@smithy/protocol-http": "^5.1.2", "@smithy/smithy-client": "^4.4.3", "@smithy/types": "^4.3.1", "@smithy/util-stream": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-N+IVZBh+yx/9GbMZTKO/gErBi/FYZQtcFRItoLbY+6WU+0cSWyZYfkoeOxHmQV3iX9k65oljERIWUmL9x6OSQg=="],
|
||||
|
||||
"@aws-sdk/credential-provider-env": ["@aws-sdk/credential-provider-env@3.840.0", "", { "dependencies": { "@aws-sdk/core": "3.840.0", "@aws-sdk/types": "3.840.0", "@smithy/property-provider": "^4.0.4", "@smithy/types": "^4.3.1", "tslib": "^2.6.2" } }, "sha512-EzF6VcJK7XvQ/G15AVEfJzN2mNXU8fcVpXo4bRyr1S6t2q5zx6UPH/XjDbn18xyUmOq01t+r8gG+TmHEVo18fA=="],
|
||||
"@aws-sdk/credential-provider-ini": ["@aws-sdk/credential-provider-ini@3.830.0", "", { "dependencies": { "@aws-sdk/core": "3.826.0", "@aws-sdk/credential-provider-env": "3.826.0", "@aws-sdk/credential-provider-http": "3.826.0", "@aws-sdk/credential-provider-process": "3.826.0", "@aws-sdk/credential-provider-sso": "3.830.0", "@aws-sdk/credential-provider-web-identity": "3.830.0", "@aws-sdk/nested-clients": "3.830.0", "@aws-sdk/types": "3.821.0", "@smithy/credential-provider-imds": "^4.0.6", "@smithy/property-provider": "^4.0.4", "@smithy/shared-ini-file-loader": "^4.0.4", "@smithy/types": "^4.3.1", "tslib": "^2.6.2" } }, "sha512-zeQenzvh8JRY5nULd8izdjVGoCM1tgsVVsrLSwDkHxZTTW0hW/bmOmXfvdaE0wDdomXW7m2CkQDSmP7XdvNXZg=="],
|
||||
|
||||
"@aws-sdk/credential-provider-http": ["@aws-sdk/credential-provider-http@3.840.0", "", { "dependencies": { "@aws-sdk/core": "3.840.0", "@aws-sdk/types": "3.840.0", "@smithy/fetch-http-handler": "^5.0.4", "@smithy/node-http-handler": "^4.0.6", "@smithy/property-provider": "^4.0.4", "@smithy/protocol-http": "^5.1.2", "@smithy/smithy-client": "^4.4.5", "@smithy/types": "^4.3.1", "@smithy/util-stream": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-wbnUiPGLVea6mXbUh04fu+VJmGkQvmToPeTYdHE8eRZq3NRDi3t3WltT+jArLBKD/4NppRpMjf2ju4coMCz91g=="],
|
||||
"@aws-sdk/credential-provider-node": ["@aws-sdk/credential-provider-node@3.830.0", "", { "dependencies": { "@aws-sdk/credential-provider-env": "3.826.0", "@aws-sdk/credential-provider-http": "3.826.0", "@aws-sdk/credential-provider-ini": "3.830.0", "@aws-sdk/credential-provider-process": "3.826.0", "@aws-sdk/credential-provider-sso": "3.830.0", "@aws-sdk/credential-provider-web-identity": "3.830.0", "@aws-sdk/types": "3.821.0", "@smithy/credential-provider-imds": "^4.0.6", "@smithy/property-provider": "^4.0.4", "@smithy/shared-ini-file-loader": "^4.0.4", "@smithy/types": "^4.3.1", "tslib": "^2.6.2" } }, "sha512-X/2LrTgwtK1pkWrvofxQBI8VTi6QVLtSMpsKKPPnJQ0vgqC0e4czSIs3ZxiEsOkCBaQ2usXSiKyh0ccsQ6k2OA=="],
|
||||
|
||||
"@aws-sdk/credential-provider-ini": ["@aws-sdk/credential-provider-ini@3.840.0", "", { "dependencies": { "@aws-sdk/core": "3.840.0", "@aws-sdk/credential-provider-env": "3.840.0", "@aws-sdk/credential-provider-http": "3.840.0", "@aws-sdk/credential-provider-process": "3.840.0", "@aws-sdk/credential-provider-sso": "3.840.0", "@aws-sdk/credential-provider-web-identity": "3.840.0", "@aws-sdk/nested-clients": "3.840.0", "@aws-sdk/types": "3.840.0", "@smithy/credential-provider-imds": "^4.0.6", "@smithy/property-provider": "^4.0.4", "@smithy/shared-ini-file-loader": "^4.0.4", "@smithy/types": "^4.3.1", "tslib": "^2.6.2" } }, "sha512-7F290BsWydShHb+7InXd+IjJc3mlEIm9I0R57F/Pjl1xZB69MdkhVGCnuETWoBt4g53ktJd6NEjzm/iAhFXFmw=="],
|
||||
"@aws-sdk/credential-provider-process": ["@aws-sdk/credential-provider-process@3.826.0", "", { "dependencies": { "@aws-sdk/core": "3.826.0", "@aws-sdk/types": "3.821.0", "@smithy/property-provider": "^4.0.4", "@smithy/shared-ini-file-loader": "^4.0.4", "@smithy/types": "^4.3.1", "tslib": "^2.6.2" } }, "sha512-kURrc4amu3NLtw1yZw7EoLNEVhmOMRUTs+chaNcmS+ERm3yK0nKjaJzmKahmwlTQTSl3wJ8jjK7x962VPo+zWw=="],
|
||||
|
||||
"@aws-sdk/credential-provider-node": ["@aws-sdk/credential-provider-node@3.840.0", "", { "dependencies": { "@aws-sdk/credential-provider-env": "3.840.0", "@aws-sdk/credential-provider-http": "3.840.0", "@aws-sdk/credential-provider-ini": "3.840.0", "@aws-sdk/credential-provider-process": "3.840.0", "@aws-sdk/credential-provider-sso": "3.840.0", "@aws-sdk/credential-provider-web-identity": "3.840.0", "@aws-sdk/types": "3.840.0", "@smithy/credential-provider-imds": "^4.0.6", "@smithy/property-provider": "^4.0.4", "@smithy/shared-ini-file-loader": "^4.0.4", "@smithy/types": "^4.3.1", "tslib": "^2.6.2" } }, "sha512-KufP8JnxA31wxklLm63evUPSFApGcH8X86z3mv9SRbpCm5ycgWIGVCTXpTOdgq6rPZrwT9pftzv2/b4mV/9clg=="],
|
||||
"@aws-sdk/credential-provider-sso": ["@aws-sdk/credential-provider-sso@3.830.0", "", { "dependencies": { "@aws-sdk/client-sso": "3.830.0", "@aws-sdk/core": "3.826.0", "@aws-sdk/token-providers": "3.830.0", "@aws-sdk/types": "3.821.0", "@smithy/property-provider": "^4.0.4", "@smithy/shared-ini-file-loader": "^4.0.4", "@smithy/types": "^4.3.1", "tslib": "^2.6.2" } }, "sha512-+VdRpZmfekzpySqZikAKx6l5ndnLGluioIgUG4ZznrButgFD/iogzFtGmBDFB3ZLViX1l4pMXru0zFwJEZT21Q=="],
|
||||
|
||||
"@aws-sdk/credential-provider-process": ["@aws-sdk/credential-provider-process@3.840.0", "", { "dependencies": { "@aws-sdk/core": "3.840.0", "@aws-sdk/types": "3.840.0", "@smithy/property-provider": "^4.0.4", "@smithy/shared-ini-file-loader": "^4.0.4", "@smithy/types": "^4.3.1", "tslib": "^2.6.2" } }, "sha512-HkDQWHy8tCI4A0Ps2NVtuVYMv9cB4y/IuD/TdOsqeRIAT12h8jDb98BwQPNLAImAOwOWzZJ8Cu0xtSpX7CQhMw=="],
|
||||
"@aws-sdk/credential-provider-web-identity": ["@aws-sdk/credential-provider-web-identity@3.830.0", "", { "dependencies": { "@aws-sdk/core": "3.826.0", "@aws-sdk/nested-clients": "3.830.0", "@aws-sdk/types": "3.821.0", "@smithy/property-provider": "^4.0.4", "@smithy/types": "^4.3.1", "tslib": "^2.6.2" } }, "sha512-hPYrKsZeeOdLROJ59T6Y8yZ0iwC/60L3qhZXjapBFjbqBtMaQiMTI645K6xVXBioA6vxXq7B4aLOhYqk6Fy/Ww=="],
|
||||
|
||||
"@aws-sdk/credential-provider-sso": ["@aws-sdk/credential-provider-sso@3.840.0", "", { "dependencies": { "@aws-sdk/client-sso": "3.840.0", "@aws-sdk/core": "3.840.0", "@aws-sdk/token-providers": "3.840.0", "@aws-sdk/types": "3.840.0", "@smithy/property-provider": "^4.0.4", "@smithy/shared-ini-file-loader": "^4.0.4", "@smithy/types": "^4.3.1", "tslib": "^2.6.2" } }, "sha512-2qgdtdd6R0Z1y0KL8gzzwFUGmhBHSUx4zy85L2XV1CXhpRNwV71SVWJqLDVV5RVWVf9mg50Pm3AWrUC0xb0pcA=="],
|
||||
"@aws-sdk/middleware-bucket-endpoint": ["@aws-sdk/middleware-bucket-endpoint@3.830.0", "", { "dependencies": { "@aws-sdk/types": "3.821.0", "@aws-sdk/util-arn-parser": "3.804.0", "@smithy/node-config-provider": "^4.1.3", "@smithy/protocol-http": "^5.1.2", "@smithy/types": "^4.3.1", "@smithy/util-config-provider": "^4.0.0", "tslib": "^2.6.2" } }, "sha512-ElVeCReZSH5Ds+/pkL5ebneJjuo8f49e9JXV1cYizuH0OAOQfYaBU9+M+7+rn61pTttOFE8W//qKzrXBBJhfMg=="],
|
||||
|
||||
"@aws-sdk/credential-provider-web-identity": ["@aws-sdk/credential-provider-web-identity@3.840.0", "", { "dependencies": { "@aws-sdk/core": "3.840.0", "@aws-sdk/nested-clients": "3.840.0", "@aws-sdk/types": "3.840.0", "@smithy/property-provider": "^4.0.4", "@smithy/types": "^4.3.1", "tslib": "^2.6.2" } }, "sha512-dpEeVXG8uNZSmVXReE4WP0lwoioX2gstk4RnUgrdUE3YaPq8A+hJiVAyc3h+cjDeIqfbsQbZm9qFetKC2LF9dQ=="],
|
||||
"@aws-sdk/middleware-expect-continue": ["@aws-sdk/middleware-expect-continue@3.821.0", "", { "dependencies": { "@aws-sdk/types": "3.821.0", "@smithy/protocol-http": "^5.1.2", "@smithy/types": "^4.3.1", "tslib": "^2.6.2" } }, "sha512-zAOoSZKe1njOrtynvK6ZORU57YGv5I7KP4+rwOvUN3ZhJbQ7QPf8gKtFUCYAPRMegaXCKF/ADPtDZBAmM+zZ9g=="],
|
||||
|
||||
"@aws-sdk/middleware-bucket-endpoint": ["@aws-sdk/middleware-bucket-endpoint@3.840.0", "", { "dependencies": { "@aws-sdk/types": "3.840.0", "@aws-sdk/util-arn-parser": "3.804.0", "@smithy/node-config-provider": "^4.1.3", "@smithy/protocol-http": "^5.1.2", "@smithy/types": "^4.3.1", "@smithy/util-config-provider": "^4.0.0", "tslib": "^2.6.2" } }, "sha512-+gkQNtPwcSMmlwBHFd4saVVS11In6ID1HczNzpM3MXKXRBfSlbZJbCt6wN//AZ8HMklZEik4tcEOG0qa9UY8SQ=="],
|
||||
"@aws-sdk/middleware-flexible-checksums": ["@aws-sdk/middleware-flexible-checksums@3.826.0", "", { "dependencies": { "@aws-crypto/crc32": "5.2.0", "@aws-crypto/crc32c": "5.2.0", "@aws-crypto/util": "5.2.0", "@aws-sdk/core": "3.826.0", "@aws-sdk/types": "3.821.0", "@smithy/is-array-buffer": "^4.0.0", "@smithy/node-config-provider": "^4.1.3", "@smithy/protocol-http": "^5.1.2", "@smithy/types": "^4.3.1", "@smithy/util-middleware": "^4.0.4", "@smithy/util-stream": "^4.2.2", "@smithy/util-utf8": "^4.0.0", "tslib": "^2.6.2" } }, "sha512-Fz9w8CFYPfSlHEB6feSsi06hdS+s+FB8k5pO4L7IV0tUa78mlhxF/VNlAJaVWYyOkZXl4HPH2K48aapACSQOXw=="],
|
||||
|
||||
"@aws-sdk/middleware-expect-continue": ["@aws-sdk/middleware-expect-continue@3.840.0", "", { "dependencies": { "@aws-sdk/types": "3.840.0", "@smithy/protocol-http": "^5.1.2", "@smithy/types": "^4.3.1", "tslib": "^2.6.2" } }, "sha512-iJg2r6FKsKKvdiU4oCOuCf7Ro/YE0Q2BT/QyEZN3/Rt8Nr4SAZiQOlcBXOCpGvuIKOEAhvDOUnW3aDHL01PdVw=="],
|
||||
"@aws-sdk/middleware-host-header": ["@aws-sdk/middleware-host-header@3.821.0", "", { "dependencies": { "@aws-sdk/types": "3.821.0", "@smithy/protocol-http": "^5.1.2", "@smithy/types": "^4.3.1", "tslib": "^2.6.2" } }, "sha512-xSMR+sopSeWGx5/4pAGhhfMvGBHioVBbqGvDs6pG64xfNwM5vq5s5v6D04e2i+uSTj4qGa71dLUs5I0UzAK3sw=="],
|
||||
|
||||
"@aws-sdk/middleware-flexible-checksums": ["@aws-sdk/middleware-flexible-checksums@3.840.0", "", { "dependencies": { "@aws-crypto/crc32": "5.2.0", "@aws-crypto/crc32c": "5.2.0", "@aws-crypto/util": "5.2.0", "@aws-sdk/core": "3.840.0", "@aws-sdk/types": "3.840.0", "@smithy/is-array-buffer": "^4.0.0", "@smithy/node-config-provider": "^4.1.3", "@smithy/protocol-http": "^5.1.2", "@smithy/types": "^4.3.1", "@smithy/util-middleware": "^4.0.4", "@smithy/util-stream": "^4.2.2", "@smithy/util-utf8": "^4.0.0", "tslib": "^2.6.2" } }, "sha512-Kg/o2G6o72sdoRH0J+avdcf668gM1bp6O4VeEXpXwUj/urQnV5qiB2q1EYT110INHUKWOLXPND3sQAqh6sTqHw=="],
|
||||
"@aws-sdk/middleware-location-constraint": ["@aws-sdk/middleware-location-constraint@3.821.0", "", { "dependencies": { "@aws-sdk/types": "3.821.0", "@smithy/types": "^4.3.1", "tslib": "^2.6.2" } }, "sha512-sKrm80k0t3R0on8aA/WhWFoMaAl4yvdk+riotmMElLUpcMcRXAd1+600uFVrxJqZdbrKQ0mjX0PjT68DlkYXLg=="],
|
||||
|
||||
"@aws-sdk/middleware-host-header": ["@aws-sdk/middleware-host-header@3.840.0", "", { "dependencies": { "@aws-sdk/types": "3.840.0", "@smithy/protocol-http": "^5.1.2", "@smithy/types": "^4.3.1", "tslib": "^2.6.2" } }, "sha512-ub+hXJAbAje94+Ya6c6eL7sYujoE8D4Bumu1NUI8TXjUhVVn0HzVWQjpRLshdLsUp1AW7XyeJaxyajRaJQ8+Xg=="],
|
||||
"@aws-sdk/middleware-logger": ["@aws-sdk/middleware-logger@3.821.0", "", { "dependencies": { "@aws-sdk/types": "3.821.0", "@smithy/types": "^4.3.1", "tslib": "^2.6.2" } }, "sha512-0cvI0ipf2tGx7fXYEEN5fBeZDz2RnHyb9xftSgUsEq7NBxjV0yTZfLJw6Za5rjE6snC80dRN8+bTNR1tuG89zA=="],
|
||||
|
||||
"@aws-sdk/middleware-location-constraint": ["@aws-sdk/middleware-location-constraint@3.840.0", "", { "dependencies": { "@aws-sdk/types": "3.840.0", "@smithy/types": "^4.3.1", "tslib": "^2.6.2" } }, "sha512-KVLD0u0YMF3aQkVF8bdyHAGWSUY6N1Du89htTLgqCcIhSxxAJ9qifrosVZ9jkAzqRW99hcufyt2LylcVU2yoKQ=="],
|
||||
"@aws-sdk/middleware-recursion-detection": ["@aws-sdk/middleware-recursion-detection@3.821.0", "", { "dependencies": { "@aws-sdk/types": "3.821.0", "@smithy/protocol-http": "^5.1.2", "@smithy/types": "^4.3.1", "tslib": "^2.6.2" } }, "sha512-efmaifbhBoqKG3bAoEfDdcM8hn1psF+4qa7ykWuYmfmah59JBeqHLfz5W9m9JoTwoKPkFcVLWZxnyZzAnVBOIg=="],
|
||||
|
||||
"@aws-sdk/middleware-logger": ["@aws-sdk/middleware-logger@3.840.0", "", { "dependencies": { "@aws-sdk/types": "3.840.0", "@smithy/types": "^4.3.1", "tslib": "^2.6.2" } }, "sha512-lSV8FvjpdllpGaRspywss4CtXV8M7NNNH+2/j86vMH+YCOZ6fu2T/TyFd/tHwZ92vDfHctWkRbQxg0bagqwovA=="],
|
||||
"@aws-sdk/middleware-sdk-s3": ["@aws-sdk/middleware-sdk-s3@3.826.0", "", { "dependencies": { "@aws-sdk/core": "3.826.0", "@aws-sdk/types": "3.821.0", "@aws-sdk/util-arn-parser": "3.804.0", "@smithy/core": "^3.5.3", "@smithy/node-config-provider": "^4.1.3", "@smithy/protocol-http": "^5.1.2", "@smithy/signature-v4": "^5.1.2", "@smithy/smithy-client": "^4.4.3", "@smithy/types": "^4.3.1", "@smithy/util-config-provider": "^4.0.0", "@smithy/util-middleware": "^4.0.4", "@smithy/util-stream": "^4.2.2", "@smithy/util-utf8": "^4.0.0", "tslib": "^2.6.2" } }, "sha512-8F0qWaYKfvD/de1AKccXuigM+gb/IZSncCqxdnFWqd+TFzo9qI9Hh+TpUhWOMYSgxsMsYQ8ipmLzlD/lDhjrmA=="],
|
||||
|
||||
"@aws-sdk/middleware-recursion-detection": ["@aws-sdk/middleware-recursion-detection@3.840.0", "", { "dependencies": { "@aws-sdk/types": "3.840.0", "@smithy/protocol-http": "^5.1.2", "@smithy/types": "^4.3.1", "tslib": "^2.6.2" } }, "sha512-Gu7lGDyfddyhIkj1Z1JtrY5NHb5+x/CRiB87GjaSrKxkDaydtX2CU977JIABtt69l9wLbcGDIQ+W0uJ5xPof7g=="],
|
||||
"@aws-sdk/middleware-ssec": ["@aws-sdk/middleware-ssec@3.821.0", "", { "dependencies": { "@aws-sdk/types": "3.821.0", "@smithy/types": "^4.3.1", "tslib": "^2.6.2" } }, "sha512-YYi1Hhr2AYiU/24cQc8HIB+SWbQo6FBkMYojVuz/zgrtkFmALxENGF/21OPg7f/QWd+eadZJRxCjmRwh5F2Cxg=="],
|
||||
|
||||
"@aws-sdk/middleware-sdk-s3": ["@aws-sdk/middleware-sdk-s3@3.840.0", "", { "dependencies": { "@aws-sdk/core": "3.840.0", "@aws-sdk/types": "3.840.0", "@aws-sdk/util-arn-parser": "3.804.0", "@smithy/core": "^3.6.0", "@smithy/node-config-provider": "^4.1.3", "@smithy/protocol-http": "^5.1.2", "@smithy/signature-v4": "^5.1.2", "@smithy/smithy-client": "^4.4.5", "@smithy/types": "^4.3.1", "@smithy/util-config-provider": "^4.0.0", "@smithy/util-middleware": "^4.0.4", "@smithy/util-stream": "^4.2.2", "@smithy/util-utf8": "^4.0.0", "tslib": "^2.6.2" } }, "sha512-rOUji7CayWN3O09zvvgLzDVQe0HiJdZkxoTS6vzOS3WbbdT7joGdVtAJHtn+x776QT3hHzbKU5gnfhel0o6gQA=="],
|
||||
"@aws-sdk/middleware-user-agent": ["@aws-sdk/middleware-user-agent@3.828.0", "", { "dependencies": { "@aws-sdk/core": "3.826.0", "@aws-sdk/types": "3.821.0", "@aws-sdk/util-endpoints": "3.828.0", "@smithy/core": "^3.5.3", "@smithy/protocol-http": "^5.1.2", "@smithy/types": "^4.3.1", "tslib": "^2.6.2" } }, "sha512-nixvI/SETXRdmrVab4D9LvXT3lrXkwAWGWk2GVvQvzlqN1/M/RfClj+o37Sn4FqRkGH9o9g7Fqb1YqZ4mqDAtA=="],
|
||||
|
||||
"@aws-sdk/middleware-ssec": ["@aws-sdk/middleware-ssec@3.840.0", "", { "dependencies": { "@aws-sdk/types": "3.840.0", "@smithy/types": "^4.3.1", "tslib": "^2.6.2" } }, "sha512-CBZP9t1QbjDFGOrtnUEHL1oAvmnCUUm7p0aPNbIdSzNtH42TNKjPRN3TuEIJDGjkrqpL3MXyDSmNayDcw/XW7Q=="],
|
||||
"@aws-sdk/nested-clients": ["@aws-sdk/nested-clients@3.830.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "3.826.0", "@aws-sdk/middleware-host-header": "3.821.0", "@aws-sdk/middleware-logger": "3.821.0", "@aws-sdk/middleware-recursion-detection": "3.821.0", "@aws-sdk/middleware-user-agent": "3.828.0", "@aws-sdk/region-config-resolver": "3.821.0", "@aws-sdk/types": "3.821.0", "@aws-sdk/util-endpoints": "3.828.0", "@aws-sdk/util-user-agent-browser": "3.821.0", "@aws-sdk/util-user-agent-node": "3.828.0", "@smithy/config-resolver": "^4.1.4", "@smithy/core": "^3.5.3", "@smithy/fetch-http-handler": "^5.0.4", "@smithy/hash-node": "^4.0.4", "@smithy/invalid-dependency": "^4.0.4", "@smithy/middleware-content-length": "^4.0.4", "@smithy/middleware-endpoint": "^4.1.11", "@smithy/middleware-retry": "^4.1.12", "@smithy/middleware-serde": "^4.0.8", "@smithy/middleware-stack": "^4.0.4", "@smithy/node-config-provider": "^4.1.3", "@smithy/node-http-handler": "^4.0.6", "@smithy/protocol-http": "^5.1.2", "@smithy/smithy-client": "^4.4.3", "@smithy/types": "^4.3.1", "@smithy/url-parser": "^4.0.4", "@smithy/util-base64": "^4.0.0", "@smithy/util-body-length-browser": "^4.0.0", "@smithy/util-body-length-node": "^4.0.0", "@smithy/util-defaults-mode-browser": "^4.0.19", "@smithy/util-defaults-mode-node": "^4.0.19", "@smithy/util-endpoints": "^3.0.6", "@smithy/util-middleware": "^4.0.4", "@smithy/util-retry": "^4.0.5", "@smithy/util-utf8": "^4.0.0", "tslib": "^2.6.2" } }, "sha512-5N5YTlBr1vtxf7+t+UaIQ625KEAmm7fY9o1e3MgGOi/paBoI0+axr3ud24qLIy0NSzFlAHEaxUSWxcERNjIoZw=="],
|
||||
|
||||
"@aws-sdk/middleware-user-agent": ["@aws-sdk/middleware-user-agent@3.840.0", "", { "dependencies": { "@aws-sdk/core": "3.840.0", "@aws-sdk/types": "3.840.0", "@aws-sdk/util-endpoints": "3.840.0", "@smithy/core": "^3.6.0", "@smithy/protocol-http": "^5.1.2", "@smithy/types": "^4.3.1", "tslib": "^2.6.2" } }, "sha512-hiiMf7BP5ZkAFAvWRcK67Mw/g55ar7OCrvrynC92hunx/xhMkrgSLM0EXIZ1oTn3uql9kH/qqGF0nqsK6K555A=="],
|
||||
"@aws-sdk/region-config-resolver": ["@aws-sdk/region-config-resolver@3.821.0", "", { "dependencies": { "@aws-sdk/types": "3.821.0", "@smithy/node-config-provider": "^4.1.3", "@smithy/types": "^4.3.1", "@smithy/util-config-provider": "^4.0.0", "@smithy/util-middleware": "^4.0.4", "tslib": "^2.6.2" } }, "sha512-t8og+lRCIIy5nlId0bScNpCkif8sc0LhmtaKsbm0ZPm3sCa/WhCbSZibjbZ28FNjVCV+p0D9RYZx0VDDbtWyjw=="],
|
||||
|
||||
"@aws-sdk/nested-clients": ["@aws-sdk/nested-clients@3.840.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "3.840.0", "@aws-sdk/middleware-host-header": "3.840.0", "@aws-sdk/middleware-logger": "3.840.0", "@aws-sdk/middleware-recursion-detection": "3.840.0", "@aws-sdk/middleware-user-agent": "3.840.0", "@aws-sdk/region-config-resolver": "3.840.0", "@aws-sdk/types": "3.840.0", "@aws-sdk/util-endpoints": "3.840.0", "@aws-sdk/util-user-agent-browser": "3.840.0", "@aws-sdk/util-user-agent-node": "3.840.0", "@smithy/config-resolver": "^4.1.4", "@smithy/core": "^3.6.0", "@smithy/fetch-http-handler": "^5.0.4", "@smithy/hash-node": "^4.0.4", "@smithy/invalid-dependency": "^4.0.4", "@smithy/middleware-content-length": "^4.0.4", "@smithy/middleware-endpoint": "^4.1.13", "@smithy/middleware-retry": "^4.1.14", "@smithy/middleware-serde": "^4.0.8", "@smithy/middleware-stack": "^4.0.4", "@smithy/node-config-provider": "^4.1.3", "@smithy/node-http-handler": "^4.0.6", "@smithy/protocol-http": "^5.1.2", "@smithy/smithy-client": "^4.4.5", "@smithy/types": "^4.3.1", "@smithy/url-parser": "^4.0.4", "@smithy/util-base64": "^4.0.0", "@smithy/util-body-length-browser": "^4.0.0", "@smithy/util-body-length-node": "^4.0.0", "@smithy/util-defaults-mode-browser": "^4.0.21", "@smithy/util-defaults-mode-node": "^4.0.21", "@smithy/util-endpoints": "^3.0.6", "@smithy/util-middleware": "^4.0.4", "@smithy/util-retry": "^4.0.6", "@smithy/util-utf8": "^4.0.0", "tslib": "^2.6.2" } }, "sha512-LXYYo9+n4hRqnRSIMXLBb+BLz+cEmjMtTudwK1BF6Bn2RfdDv29KuyeDRrPCS3TwKl7ZKmXUmE9n5UuHAPfBpA=="],
|
||||
"@aws-sdk/s3-request-presigner": ["@aws-sdk/s3-request-presigner@3.832.0", "", { "dependencies": { "@aws-sdk/signature-v4-multi-region": "3.826.0", "@aws-sdk/types": "3.821.0", "@aws-sdk/util-format-url": "3.821.0", "@smithy/middleware-endpoint": "^4.1.11", "@smithy/protocol-http": "^5.1.2", "@smithy/smithy-client": "^4.4.3", "@smithy/types": "^4.3.1", "tslib": "^2.6.2" } }, "sha512-zXuwfaAYu99LUF7/6iBr3UlKCMaMImBwfmLXJQlvtE3ebrERXQuISME9Vjd2oG+hJ6XcX6RJqkeIvZBytMzvRw=="],
|
||||
|
||||
"@aws-sdk/region-config-resolver": ["@aws-sdk/region-config-resolver@3.840.0", "", { "dependencies": { "@aws-sdk/types": "3.840.0", "@smithy/node-config-provider": "^4.1.3", "@smithy/types": "^4.3.1", "@smithy/util-config-provider": "^4.0.0", "@smithy/util-middleware": "^4.0.4", "tslib": "^2.6.2" } }, "sha512-Qjnxd/yDv9KpIMWr90ZDPtRj0v75AqGC92Lm9+oHXZ8p1MjG5JE2CW0HL8JRgK9iKzgKBL7pPQRXI8FkvEVfrA=="],
|
||||
"@aws-sdk/signature-v4-multi-region": ["@aws-sdk/signature-v4-multi-region@3.826.0", "", { "dependencies": { "@aws-sdk/middleware-sdk-s3": "3.826.0", "@aws-sdk/types": "3.821.0", "@smithy/protocol-http": "^5.1.2", "@smithy/signature-v4": "^5.1.2", "@smithy/types": "^4.3.1", "tslib": "^2.6.2" } }, "sha512-3fEi/zy6tpMzomYosksGtu7jZqGFcdBXoL7YRsG7OEeQzBbOW9B+fVaQZ4jnsViSjzA/yKydLahMrfPnt+iaxg=="],
|
||||
|
||||
"@aws-sdk/s3-request-presigner": ["@aws-sdk/s3-request-presigner@3.842.0", "", { "dependencies": { "@aws-sdk/signature-v4-multi-region": "3.840.0", "@aws-sdk/types": "3.840.0", "@aws-sdk/util-format-url": "3.840.0", "@smithy/middleware-endpoint": "^4.1.13", "@smithy/protocol-http": "^5.1.2", "@smithy/smithy-client": "^4.4.5", "@smithy/types": "^4.3.1", "tslib": "^2.6.2" } }, "sha512-daS69IJ20X+BzsiEtj3XuyyM765iFOdZ648lrptHncQHRWdpzahk67/nP/SKYhWvnNrQ4pw2vYlVxpOs9vl1yg=="],
|
||||
"@aws-sdk/token-providers": ["@aws-sdk/token-providers@3.830.0", "", { "dependencies": { "@aws-sdk/core": "3.826.0", "@aws-sdk/nested-clients": "3.830.0", "@aws-sdk/types": "3.821.0", "@smithy/property-provider": "^4.0.4", "@smithy/shared-ini-file-loader": "^4.0.4", "@smithy/types": "^4.3.1", "tslib": "^2.6.2" } }, "sha512-aJ4guFwj92nV9D+EgJPaCFKK0I3y2uMchiDfh69Zqnmwfxxxfxat6F79VA7PS0BdbjRfhLbn+Ghjftnomu2c1g=="],
|
||||
|
||||
"@aws-sdk/signature-v4-multi-region": ["@aws-sdk/signature-v4-multi-region@3.840.0", "", { "dependencies": { "@aws-sdk/middleware-sdk-s3": "3.840.0", "@aws-sdk/types": "3.840.0", "@smithy/protocol-http": "^5.1.2", "@smithy/signature-v4": "^5.1.2", "@smithy/types": "^4.3.1", "tslib": "^2.6.2" } }, "sha512-8AoVgHrkSfhvGPtwx23hIUO4MmMnux2pjnso1lrLZGqxfElM6jm2w4jTNLlNXk8uKHGyX89HaAIuT0lL6dJj9g=="],
|
||||
|
||||
"@aws-sdk/token-providers": ["@aws-sdk/token-providers@3.840.0", "", { "dependencies": { "@aws-sdk/core": "3.840.0", "@aws-sdk/nested-clients": "3.840.0", "@aws-sdk/types": "3.840.0", "@smithy/property-provider": "^4.0.4", "@smithy/shared-ini-file-loader": "^4.0.4", "@smithy/types": "^4.3.1", "tslib": "^2.6.2" } }, "sha512-6BuTOLTXvmgwjK7ve7aTg9JaWFdM5UoMolLVPMyh3wTv9Ufalh8oklxYHUBIxsKkBGO2WiHXytveuxH6tAgTYg=="],
|
||||
|
||||
"@aws-sdk/types": ["@aws-sdk/types@3.840.0", "", { "dependencies": { "@smithy/types": "^4.3.1", "tslib": "^2.6.2" } }, "sha512-xliuHaUFZxEx1NSXeLLZ9Dyu6+EJVQKEoD+yM+zqUo3YDZ7medKJWY6fIOKiPX/N7XbLdBYwajb15Q7IL8KkeA=="],
|
||||
"@aws-sdk/types": ["@aws-sdk/types@3.821.0", "", { "dependencies": { "@smithy/types": "^4.3.1", "tslib": "^2.6.2" } }, "sha512-Znroqdai1a90TlxGaJ+FK1lwC0fHpo97Xjsp5UKGR5JODYm7f9+/fF17ebO1KdoBr/Rm0UIFiF5VmI8ts9F1eA=="],
|
||||
|
||||
"@aws-sdk/util-arn-parser": ["@aws-sdk/util-arn-parser@3.804.0", "", { "dependencies": { "tslib": "^2.6.2" } }, "sha512-wmBJqn1DRXnZu3b4EkE6CWnoWMo1ZMvlfkqU5zPz67xx1GMaXlDCchFvKAXMjk4jn/L1O3tKnoFDNsoLV1kgNQ=="],
|
||||
|
||||
"@aws-sdk/util-endpoints": ["@aws-sdk/util-endpoints@3.840.0", "", { "dependencies": { "@aws-sdk/types": "3.840.0", "@smithy/types": "^4.3.1", "@smithy/util-endpoints": "^3.0.6", "tslib": "^2.6.2" } }, "sha512-eqE9ROdg/Kk0rj3poutyRCFauPDXIf/WSvCqFiRDDVi6QOnCv/M0g2XW8/jSvkJlOyaXkNCptapIp6BeeFFGYw=="],
|
||||
"@aws-sdk/util-endpoints": ["@aws-sdk/util-endpoints@3.828.0", "", { "dependencies": { "@aws-sdk/types": "3.821.0", "@smithy/types": "^4.3.1", "@smithy/util-endpoints": "^3.0.6", "tslib": "^2.6.2" } }, "sha512-RvKch111SblqdkPzg3oCIdlGxlQs+k+P7Etory9FmxPHyPDvsP1j1c74PmgYqtzzMWmoXTjd+c9naUHh9xG8xg=="],
|
||||
|
||||
"@aws-sdk/util-format-url": ["@aws-sdk/util-format-url@3.840.0", "", { "dependencies": { "@aws-sdk/types": "3.840.0", "@smithy/querystring-builder": "^4.0.4", "@smithy/types": "^4.3.1", "tslib": "^2.6.2" } }, "sha512-VB1PWyI1TQPiPvg4w7tgUGGQER1xxXPNUqfh3baxUSFi1Oh8wHrDnFywkxLm3NMmgDmnLnSZ5Q326qAoyqKLSg=="],
|
||||
"@aws-sdk/util-format-url": ["@aws-sdk/util-format-url@3.821.0", "", { "dependencies": { "@aws-sdk/types": "3.821.0", "@smithy/querystring-builder": "^4.0.4", "@smithy/types": "^4.3.1", "tslib": "^2.6.2" } }, "sha512-h+xqmPToxDrZ0a7rxE1a8Oh4zpWfZe9oiQUphGtfiGFA6j75UiURH5J3MmGHa/G4t15I3iLLbYtUXxvb1i7evg=="],
|
||||
|
||||
"@aws-sdk/util-locate-window": ["@aws-sdk/util-locate-window@3.804.0", "", { "dependencies": { "tslib": "^2.6.2" } }, "sha512-zVoRfpmBVPodYlnMjgVjfGoEZagyRF5IPn3Uo6ZvOZp24chnW/FRstH7ESDHDDRga4z3V+ElUQHKpFDXWyBW5A=="],
|
||||
|
||||
"@aws-sdk/util-user-agent-browser": ["@aws-sdk/util-user-agent-browser@3.840.0", "", { "dependencies": { "@aws-sdk/types": "3.840.0", "@smithy/types": "^4.3.1", "bowser": "^2.11.0", "tslib": "^2.6.2" } }, "sha512-JdyZM3EhhL4PqwFpttZu1afDpPJCCc3eyZOLi+srpX11LsGj6sThf47TYQN75HT1CarZ7cCdQHGzP2uy3/xHfQ=="],
|
||||
"@aws-sdk/util-user-agent-browser": ["@aws-sdk/util-user-agent-browser@3.821.0", "", { "dependencies": { "@aws-sdk/types": "3.821.0", "@smithy/types": "^4.3.1", "bowser": "^2.11.0", "tslib": "^2.6.2" } }, "sha512-irWZHyM0Jr1xhC+38OuZ7JB6OXMLPZlj48thElpsO1ZSLRkLZx5+I7VV6k3sp2yZ7BYbKz/G2ojSv4wdm7XTLw=="],
|
||||
|
||||
"@aws-sdk/util-user-agent-node": ["@aws-sdk/util-user-agent-node@3.840.0", "", { "dependencies": { "@aws-sdk/middleware-user-agent": "3.840.0", "@aws-sdk/types": "3.840.0", "@smithy/node-config-provider": "^4.1.3", "@smithy/types": "^4.3.1", "tslib": "^2.6.2" }, "peerDependencies": { "aws-crt": ">=1.0.0" }, "optionalPeers": ["aws-crt"] }, "sha512-Fy5JUEDQU1tPm2Yw/YqRYYc27W5+QD/J4mYvQvdWjUGZLB5q3eLFMGD35Uc28ZFoGMufPr4OCxK/bRfWROBRHQ=="],
|
||||
"@aws-sdk/util-user-agent-node": ["@aws-sdk/util-user-agent-node@3.828.0", "", { "dependencies": { "@aws-sdk/middleware-user-agent": "3.828.0", "@aws-sdk/types": "3.821.0", "@smithy/node-config-provider": "^4.1.3", "@smithy/types": "^4.3.1", "tslib": "^2.6.2" }, "peerDependencies": { "aws-crt": ">=1.0.0" }, "optionalPeers": ["aws-crt"] }, "sha512-LdN6fTBzTlQmc8O8f1wiZN0qF3yBWVGis7NwpWK7FUEzP9bEZRxYfIkV9oV9zpt6iNRze1SedK3JQVB/udxBoA=="],
|
||||
|
||||
"@aws-sdk/xml-builder": ["@aws-sdk/xml-builder@3.821.0", "", { "dependencies": { "@smithy/types": "^4.3.1", "tslib": "^2.6.2" } }, "sha512-DIIotRnefVL6DiaHtO6/21DhJ4JZnnIwdNbpwiAhdt/AVbttcE4yw925gsjur0OGv5BTYXQXU3YnANBYnZjuQA=="],
|
||||
|
||||
@@ -1074,7 +1066,7 @@
|
||||
|
||||
"@smithy/config-resolver": ["@smithy/config-resolver@4.1.4", "", { "dependencies": { "@smithy/node-config-provider": "^4.1.3", "@smithy/types": "^4.3.1", "@smithy/util-config-provider": "^4.0.0", "@smithy/util-middleware": "^4.0.4", "tslib": "^2.6.2" } }, "sha512-prmU+rDddxHOH0oNcwemL+SwnzcG65sBF2yXRO7aeXIn/xTlq2pX7JLVbkBnVLowHLg4/OL4+jBmv9hVrVGS+w=="],
|
||||
|
||||
"@smithy/core": ["@smithy/core@3.6.0", "", { "dependencies": { "@smithy/middleware-serde": "^4.0.8", "@smithy/protocol-http": "^5.1.2", "@smithy/types": "^4.3.1", "@smithy/util-base64": "^4.0.0", "@smithy/util-body-length-browser": "^4.0.0", "@smithy/util-middleware": "^4.0.4", "@smithy/util-stream": "^4.2.2", "@smithy/util-utf8": "^4.0.0", "tslib": "^2.6.2" } }, "sha512-Pgvfb+TQ4wUNLyHzvgCP4aYZMh16y7GcfF59oirRHcgGgkH1e/s9C0nv/v3WP+Quymyr5je71HeFQCwh+44XLg=="],
|
||||
"@smithy/core": ["@smithy/core@3.5.3", "", { "dependencies": { "@smithy/middleware-serde": "^4.0.8", "@smithy/protocol-http": "^5.1.2", "@smithy/types": "^4.3.1", "@smithy/util-base64": "^4.0.0", "@smithy/util-body-length-browser": "^4.0.0", "@smithy/util-middleware": "^4.0.4", "@smithy/util-stream": "^4.2.2", "@smithy/util-utf8": "^4.0.0", "tslib": "^2.6.2" } }, "sha512-xa5byV9fEguZNofCclv6v9ra0FYh5FATQW/da7FQUVTic94DfrN/NvmKZjrMyzbpqfot9ZjBaO8U1UeTbmSLuA=="],
|
||||
|
||||
"@smithy/credential-provider-imds": ["@smithy/credential-provider-imds@4.0.6", "", { "dependencies": { "@smithy/node-config-provider": "^4.1.3", "@smithy/property-provider": "^4.0.4", "@smithy/types": "^4.3.1", "@smithy/url-parser": "^4.0.4", "tslib": "^2.6.2" } }, "sha512-hKMWcANhUiNbCJouYkZ9V3+/Qf9pteR1dnwgdyzR09R4ODEYx8BbUysHwRSyex4rZ9zapddZhLFTnT4ZijR4pw=="],
|
||||
|
||||
@@ -1104,9 +1096,9 @@
|
||||
|
||||
"@smithy/middleware-content-length": ["@smithy/middleware-content-length@4.0.4", "", { "dependencies": { "@smithy/protocol-http": "^5.1.2", "@smithy/types": "^4.3.1", "tslib": "^2.6.2" } }, "sha512-F7gDyfI2BB1Kc+4M6rpuOLne5LOcEknH1n6UQB69qv+HucXBR1rkzXBnQTB2q46sFy1PM/zuSJOB532yc8bg3w=="],
|
||||
|
||||
"@smithy/middleware-endpoint": ["@smithy/middleware-endpoint@4.1.13", "", { "dependencies": { "@smithy/core": "^3.6.0", "@smithy/middleware-serde": "^4.0.8", "@smithy/node-config-provider": "^4.1.3", "@smithy/shared-ini-file-loader": "^4.0.4", "@smithy/types": "^4.3.1", "@smithy/url-parser": "^4.0.4", "@smithy/util-middleware": "^4.0.4", "tslib": "^2.6.2" } }, "sha512-xg3EHV/Q5ZdAO5b0UiIMj3RIOCobuS40pBBODguUDVdko6YK6QIzCVRrHTogVuEKglBWqWenRnZ71iZnLL3ZAQ=="],
|
||||
"@smithy/middleware-endpoint": ["@smithy/middleware-endpoint@4.1.11", "", { "dependencies": { "@smithy/core": "^3.5.3", "@smithy/middleware-serde": "^4.0.8", "@smithy/node-config-provider": "^4.1.3", "@smithy/shared-ini-file-loader": "^4.0.4", "@smithy/types": "^4.3.1", "@smithy/url-parser": "^4.0.4", "@smithy/util-middleware": "^4.0.4", "tslib": "^2.6.2" } }, "sha512-zDogwtRLzKl58lVS8wPcARevFZNBOOqnmzWWxVe9XiaXU2CADFjvJ9XfNibgkOWs08sxLuSr81NrpY4mgp9OwQ=="],
|
||||
|
||||
"@smithy/middleware-retry": ["@smithy/middleware-retry@4.1.14", "", { "dependencies": { "@smithy/node-config-provider": "^4.1.3", "@smithy/protocol-http": "^5.1.2", "@smithy/service-error-classification": "^4.0.6", "@smithy/smithy-client": "^4.4.5", "@smithy/types": "^4.3.1", "@smithy/util-middleware": "^4.0.4", "@smithy/util-retry": "^4.0.6", "tslib": "^2.6.2", "uuid": "^9.0.1" } }, "sha512-eoXaLlDGpKvdmvt+YBfRXE7HmIEtFF+DJCbTPwuLunP0YUnrydl+C4tS+vEM0+nyxXrX3PSUFqC+lP1+EHB1Tw=="],
|
||||
"@smithy/middleware-retry": ["@smithy/middleware-retry@4.1.12", "", { "dependencies": { "@smithy/node-config-provider": "^4.1.3", "@smithy/protocol-http": "^5.1.2", "@smithy/service-error-classification": "^4.0.5", "@smithy/smithy-client": "^4.4.3", "@smithy/types": "^4.3.1", "@smithy/util-middleware": "^4.0.4", "@smithy/util-retry": "^4.0.5", "tslib": "^2.6.2", "uuid": "^9.0.1" } }, "sha512-wvIH70c4e91NtRxdaLZF+mbLZ/HcC6yg7ySKUiufL6ESp6zJUSnJucZ309AvG9nqCFHSRB5I6T3Ez1Q9wCh0Ww=="],
|
||||
|
||||
"@smithy/middleware-serde": ["@smithy/middleware-serde@4.0.8", "", { "dependencies": { "@smithy/protocol-http": "^5.1.2", "@smithy/types": "^4.3.1", "tslib": "^2.6.2" } }, "sha512-iSSl7HJoJaGyMIoNn2B7czghOVwJ9nD7TMvLhMWeSB5vt0TnEYyRRqPJu/TqW76WScaNvYYB8nRoiBHR9S1Ddw=="],
|
||||
|
||||
@@ -1124,13 +1116,13 @@
|
||||
|
||||
"@smithy/querystring-parser": ["@smithy/querystring-parser@4.0.4", "", { "dependencies": { "@smithy/types": "^4.3.1", "tslib": "^2.6.2" } }, "sha512-6yZf53i/qB8gRHH/l2ZwUG5xgkPgQF15/KxH0DdXMDHjesA9MeZje/853ifkSY0x4m5S+dfDZ+c4x439PF0M2w=="],
|
||||
|
||||
"@smithy/service-error-classification": ["@smithy/service-error-classification@4.0.6", "", { "dependencies": { "@smithy/types": "^4.3.1" } }, "sha512-RRoTDL//7xi4tn5FrN2NzH17jbgmnKidUqd4KvquT0954/i6CXXkh1884jBiunq24g9cGtPBEXlU40W6EpNOOg=="],
|
||||
"@smithy/service-error-classification": ["@smithy/service-error-classification@4.0.5", "", { "dependencies": { "@smithy/types": "^4.3.1" } }, "sha512-LvcfhrnCBvCmTee81pRlh1F39yTS/+kYleVeLCwNtkY8wtGg8V/ca9rbZZvYIl8OjlMtL6KIjaiL/lgVqHD2nA=="],
|
||||
|
||||
"@smithy/shared-ini-file-loader": ["@smithy/shared-ini-file-loader@4.0.4", "", { "dependencies": { "@smithy/types": "^4.3.1", "tslib": "^2.6.2" } }, "sha512-63X0260LoFBjrHifPDs+nM9tV0VMkOTl4JRMYNuKh/f5PauSjowTfvF3LogfkWdcPoxsA9UjqEOgjeYIbhb7Nw=="],
|
||||
|
||||
"@smithy/signature-v4": ["@smithy/signature-v4@5.1.2", "", { "dependencies": { "@smithy/is-array-buffer": "^4.0.0", "@smithy/protocol-http": "^5.1.2", "@smithy/types": "^4.3.1", "@smithy/util-hex-encoding": "^4.0.0", "@smithy/util-middleware": "^4.0.4", "@smithy/util-uri-escape": "^4.0.0", "@smithy/util-utf8": "^4.0.0", "tslib": "^2.6.2" } }, "sha512-d3+U/VpX7a60seHziWnVZOHuEgJlclufjkS6zhXvxcJgkJq4UWdH5eOBLzHRMx6gXjsdT9h6lfpmLzbrdupHgQ=="],
|
||||
|
||||
"@smithy/smithy-client": ["@smithy/smithy-client@4.4.5", "", { "dependencies": { "@smithy/core": "^3.6.0", "@smithy/middleware-endpoint": "^4.1.13", "@smithy/middleware-stack": "^4.0.4", "@smithy/protocol-http": "^5.1.2", "@smithy/types": "^4.3.1", "@smithy/util-stream": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-+lynZjGuUFJaMdDYSTMnP/uPBBXXukVfrJlP+1U/Dp5SFTEI++w6NMga8DjOENxecOF71V9Z2DllaVDYRnGlkg=="],
|
||||
"@smithy/smithy-client": ["@smithy/smithy-client@4.4.3", "", { "dependencies": { "@smithy/core": "^3.5.3", "@smithy/middleware-endpoint": "^4.1.11", "@smithy/middleware-stack": "^4.0.4", "@smithy/protocol-http": "^5.1.2", "@smithy/types": "^4.3.1", "@smithy/util-stream": "^4.2.2", "tslib": "^2.6.2" } }, "sha512-xxzNYgA0HD6ETCe5QJubsxP0hQH3QK3kbpJz3QrosBCuIWyEXLR/CO5hFb2OeawEKUxMNhz3a1nuJNN2np2RMA=="],
|
||||
|
||||
"@smithy/types": ["@smithy/types@4.3.1", "", { "dependencies": { "tslib": "^2.6.2" } }, "sha512-UqKOQBL2x6+HWl3P+3QqFD4ncKq0I8Nuz9QItGv5WuKuMHuuwlhvqcZCoXGfc+P1QmfJE7VieykoYYmrOoFJxA=="],
|
||||
|
||||
@@ -1146,9 +1138,9 @@
|
||||
|
||||
"@smithy/util-config-provider": ["@smithy/util-config-provider@4.0.0", "", { "dependencies": { "tslib": "^2.6.2" } }, "sha512-L1RBVzLyfE8OXH+1hsJ8p+acNUSirQnWQ6/EgpchV88G6zGBTDPdXiiExei6Z1wR2RxYvxY/XLw6AMNCCt8H3w=="],
|
||||
|
||||
"@smithy/util-defaults-mode-browser": ["@smithy/util-defaults-mode-browser@4.0.21", "", { "dependencies": { "@smithy/property-provider": "^4.0.4", "@smithy/smithy-client": "^4.4.5", "@smithy/types": "^4.3.1", "bowser": "^2.11.0", "tslib": "^2.6.2" } }, "sha512-wM0jhTytgXu3wzJoIqpbBAG5U6BwiubZ6QKzSbP7/VbmF1v96xlAbX2Am/mz0Zep0NLvLh84JT0tuZnk3wmYQA=="],
|
||||
"@smithy/util-defaults-mode-browser": ["@smithy/util-defaults-mode-browser@4.0.19", "", { "dependencies": { "@smithy/property-provider": "^4.0.4", "@smithy/smithy-client": "^4.4.3", "@smithy/types": "^4.3.1", "bowser": "^2.11.0", "tslib": "^2.6.2" } }, "sha512-mvLMh87xSmQrV5XqnUYEPoiFFeEGYeAKIDDKdhE2ahqitm8OHM3aSvhqL6rrK6wm1brIk90JhxDf5lf2hbrLbQ=="],
|
||||
|
||||
"@smithy/util-defaults-mode-node": ["@smithy/util-defaults-mode-node@4.0.21", "", { "dependencies": { "@smithy/config-resolver": "^4.1.4", "@smithy/credential-provider-imds": "^4.0.6", "@smithy/node-config-provider": "^4.1.3", "@smithy/property-provider": "^4.0.4", "@smithy/smithy-client": "^4.4.5", "@smithy/types": "^4.3.1", "tslib": "^2.6.2" } }, "sha512-/F34zkoU0GzpUgLJydHY8Rxu9lBn8xQC/s/0M0U9lLBkYbA1htaAFjWYJzpzsbXPuri5D1H8gjp2jBum05qBrA=="],
|
||||
"@smithy/util-defaults-mode-node": ["@smithy/util-defaults-mode-node@4.0.19", "", { "dependencies": { "@smithy/config-resolver": "^4.1.4", "@smithy/credential-provider-imds": "^4.0.6", "@smithy/node-config-provider": "^4.1.3", "@smithy/property-provider": "^4.0.4", "@smithy/smithy-client": "^4.4.3", "@smithy/types": "^4.3.1", "tslib": "^2.6.2" } }, "sha512-8tYnx+LUfj6m+zkUUIrIQJxPM1xVxfRBvoGHua7R/i6qAxOMjqR6CpEpDwKoIs1o0+hOjGvkKE23CafKL0vJ9w=="],
|
||||
|
||||
"@smithy/util-endpoints": ["@smithy/util-endpoints@3.0.6", "", { "dependencies": { "@smithy/node-config-provider": "^4.1.3", "@smithy/types": "^4.3.1", "tslib": "^2.6.2" } }, "sha512-YARl3tFL3WgPuLzljRUnrS2ngLiUtkwhQtj8PAL13XZSyUiNLQxwG3fBBq3QXFqGFUXepIN73pINp3y8c2nBmA=="],
|
||||
|
||||
@@ -1156,7 +1148,7 @@
|
||||
|
||||
"@smithy/util-middleware": ["@smithy/util-middleware@4.0.4", "", { "dependencies": { "@smithy/types": "^4.3.1", "tslib": "^2.6.2" } }, "sha512-9MLKmkBmf4PRb0ONJikCbCwORACcil6gUWojwARCClT7RmLzF04hUR4WdRprIXal7XVyrddadYNfp2eF3nrvtQ=="],
|
||||
|
||||
"@smithy/util-retry": ["@smithy/util-retry@4.0.6", "", { "dependencies": { "@smithy/service-error-classification": "^4.0.6", "@smithy/types": "^4.3.1", "tslib": "^2.6.2" } }, "sha512-+YekoF2CaSMv6zKrA6iI/N9yva3Gzn4L6n35Luydweu5MMPYpiGZlWqehPHDHyNbnyaYlz/WJyYAZnC+loBDZg=="],
|
||||
"@smithy/util-retry": ["@smithy/util-retry@4.0.5", "", { "dependencies": { "@smithy/service-error-classification": "^4.0.5", "@smithy/types": "^4.3.1", "tslib": "^2.6.2" } }, "sha512-V7MSjVDTlEt/plmOFBn1762Dyu5uqMrV2Pl2X0dYk4XvWfdWJNe9Bs5Bzb56wkCuiWjSfClVMGcsuKrGj7S/yg=="],
|
||||
|
||||
"@smithy/util-stream": ["@smithy/util-stream@4.2.2", "", { "dependencies": { "@smithy/fetch-http-handler": "^5.0.4", "@smithy/node-http-handler": "^4.0.6", "@smithy/types": "^4.3.1", "@smithy/util-base64": "^4.0.0", "@smithy/util-buffer-from": "^4.0.0", "@smithy/util-hex-encoding": "^4.0.0", "@smithy/util-utf8": "^4.0.0", "tslib": "^2.6.2" } }, "sha512-aI+GLi7MJoVxg24/3J1ipwLoYzgkB4kUfogZfnslcYlynj3xsQ0e7vk4TnTro9hhsS5PvX1mwmkRqqHQjwcU7w=="],
|
||||
|
||||
@@ -1164,7 +1156,7 @@
|
||||
|
||||
"@smithy/util-utf8": ["@smithy/util-utf8@4.0.0", "", { "dependencies": { "@smithy/util-buffer-from": "^4.0.0", "tslib": "^2.6.2" } }, "sha512-b+zebfKCfRdgNJDknHCob3O7FpeYQN6ZG6YLExMcasDHsCXlsXCEuiPZeLnJLpwa5dvPetGlnGCiMHuLwGvFow=="],
|
||||
|
||||
"@smithy/util-waiter": ["@smithy/util-waiter@4.0.6", "", { "dependencies": { "@smithy/abort-controller": "^4.0.4", "@smithy/types": "^4.3.1", "tslib": "^2.6.2" } }, "sha512-slcr1wdRbX7NFphXZOxtxRNA7hXAAtJAXJDE/wdoMAos27SIquVCKiSqfB6/28YzQ8FCsB5NKkhdM5gMADbqxg=="],
|
||||
"@smithy/util-waiter": ["@smithy/util-waiter@4.0.5", "", { "dependencies": { "@smithy/abort-controller": "^4.0.4", "@smithy/types": "^4.3.1", "tslib": "^2.6.2" } }, "sha512-4QvC49HTteI1gfemu0I1syWovJgPvGn7CVUoN9ZFkdvr/cCFkrEL7qNCdx/2eICqDWEGnnr68oMdSIPCLAriSQ=="],
|
||||
|
||||
"@socket.io/component-emitter": ["@socket.io/component-emitter@3.1.2", "", {}, "sha512-9BCxFwvbGg/RsZK9tjXd8s4UcwR0MWeFQ1XEKIQVVvAGJyINdrqKMcTRyLoK8Rse1GjzLV9cwjWV1olXRWEXVA=="],
|
||||
|
||||
@@ -1488,10 +1480,6 @@
|
||||
|
||||
"anymatch": ["anymatch@3.1.3", "", { "dependencies": { "normalize-path": "^3.0.0", "picomatch": "^2.0.4" } }, "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw=="],
|
||||
|
||||
"archiver": ["archiver@7.0.1", "", { "dependencies": { "archiver-utils": "^5.0.2", "async": "^3.2.4", "buffer-crc32": "^1.0.0", "readable-stream": "^4.0.0", "readdir-glob": "^1.1.2", "tar-stream": "^3.0.0", "zip-stream": "^6.0.1" } }, "sha512-ZcbTaIqJOfCc03QwD468Unz/5Ir8ATtvAHsK+FdXbDIbGfihqh9mrvdcYunQzqn4HrvWWaFyaxJhGZagaJJpPQ=="],
|
||||
|
||||
"archiver-utils": ["archiver-utils@5.0.2", "", { "dependencies": { "glob": "^10.0.0", "graceful-fs": "^4.2.0", "is-stream": "^2.0.1", "lazystream": "^1.0.0", "lodash": "^4.17.15", "normalize-path": "^3.0.0", "readable-stream": "^4.0.0" } }, "sha512-wuLJMmIBQYCsGZgYLTy5FIB2pF6Lfb6cXMSF8Qywwk3t20zWnAi7zLcQFdKQmIB8wyZpY5ER38x08GbwtR2cLA=="],
|
||||
|
||||
"arg": ["arg@5.0.2", "", {}, "sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg=="],
|
||||
|
||||
"argparse": ["argparse@1.0.10", "", { "dependencies": { "sprintf-js": "~1.0.2" } }, "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg=="],
|
||||
@@ -1510,20 +1498,14 @@
|
||||
|
||||
"astring": ["astring@1.9.0", "", { "bin": { "astring": "bin/astring" } }, "sha512-LElXdjswlqjWrPpJFg1Fx4wpkOCxj1TDHlSV4PlaRxHGWko024xICaa97ZkMfs6DRKlCguiAI+rbXv5GWwXIkg=="],
|
||||
|
||||
"async": ["async@3.2.6", "", {}, "sha512-htCUDlxyyCLMgaM3xXg0C0LW2xqfuQ6p05pCEIsXuyQ+a1koYKTuBMzRNwmybfLgvJDMd0r1LTn4+E0Ti6C2AA=="],
|
||||
|
||||
"asynckit": ["asynckit@0.4.0", "", {}, "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="],
|
||||
|
||||
"atomic-sleep": ["atomic-sleep@1.0.0", "", {}, "sha512-kNOjDqAh7px0XWNI+4QbzoiR/nTkHAWNud2uvnJquD1/x5a7EQZMJT0AczqK0Qn67oY/TTQ1LbUKajZpp3I9tQ=="],
|
||||
|
||||
"b4a": ["b4a@1.6.7", "", {}, "sha512-OnAYlL5b7LEkALw87fUVafQw5rVR9RjwGd4KUwNQ6DrrNmaVaUCgLipfVlzrPQ4tWOR9P0IXGNOx50jYCCdSJg=="],
|
||||
|
||||
"bail": ["bail@2.0.2", "", {}, "sha512-0xO6mYd7JB2YesxDKplafRpsiOzPt9V02ddPCLbY1xYGPOX24NTyN50qnUxgCPcSoYMhKpAuBTjQoRZCAkUDRw=="],
|
||||
|
||||
"balanced-match": ["balanced-match@1.0.2", "", {}, "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw=="],
|
||||
|
||||
"bare-events": ["bare-events@2.5.4", "", {}, "sha512-+gFfDkR8pj4/TrWCGUGWmJIkBwuxPS5F+a5yWjOHQt2hHvNZd5YLzadjmDUtFmMM4y429bnKLa8bYBMHcYdnQA=="],
|
||||
|
||||
"base64-js": ["base64-js@1.5.1", "", {}, "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA=="],
|
||||
|
||||
"base64id": ["base64id@2.0.0", "", {}, "sha512-lGe34o6EHj9y3Kts9R4ZYs/Gr+6N7MCaMlIFA3F1R2O5/m7K06AxfSeO5530PEERE6/WyEg3lsuyw4GHlPZHog=="],
|
||||
@@ -1552,9 +1534,7 @@
|
||||
|
||||
"browserslist": ["browserslist@4.25.0", "", { "dependencies": { "caniuse-lite": "^1.0.30001718", "electron-to-chromium": "^1.5.160", "node-releases": "^2.0.19", "update-browserslist-db": "^1.1.3" }, "bin": { "browserslist": "cli.js" } }, "sha512-PJ8gYKeS5e/whHBh8xrwYK+dAvEj7JXtz6uTucnMRB8OiGTsKccFekoRrjajPBHV8oOY+2tI4uxeceSimKwMFA=="],
|
||||
|
||||
"buffer": ["buffer@6.0.3", "", { "dependencies": { "base64-js": "^1.3.1", "ieee754": "^1.2.1" } }, "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA=="],
|
||||
|
||||
"buffer-crc32": ["buffer-crc32@1.0.0", "", {}, "sha512-Db1SbgBS/fg/392AblrMJk97KggmvYhr4pB5ZIMTWtaivCPMWLkmb7m21cJvpvgK+J3nsU2CmmixNBZx4vFj/w=="],
|
||||
"buffer": ["buffer@5.7.1", "", { "dependencies": { "base64-js": "^1.3.1", "ieee754": "^1.1.13" } }, "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ=="],
|
||||
|
||||
"buffer-equal-constant-time": ["buffer-equal-constant-time@1.0.1", "", {}, "sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA=="],
|
||||
|
||||
@@ -1658,8 +1638,6 @@
|
||||
|
||||
"commondir": ["commondir@1.0.1", "", {}, "sha512-W9pAhw0ja1Edb5GVdIF1mjZw/ASI0AlShXM83UUGe2DVr5TdAPEA1OA8m/g8zWp9x6On7gqufY+FatDbC3MDQg=="],
|
||||
|
||||
"compress-commons": ["compress-commons@6.0.2", "", { "dependencies": { "crc-32": "^1.2.0", "crc32-stream": "^6.0.0", "is-stream": "^2.0.1", "normalize-path": "^3.0.0", "readable-stream": "^4.0.0" } }, "sha512-6FqVXeETqWPoGcfzrXb37E50NP0LXT8kAMu5ooZayhWWdgEY4lBEEcbQNXtkuKQsGduxiIcI4gOTsxTmuq/bSg=="],
|
||||
|
||||
"compute-scroll-into-view": ["compute-scroll-into-view@3.1.1", "", {}, "sha512-VRhuHOLoKYOy4UbilLbUzbYg93XLjv2PncJC50EuTWPA3gaja1UjBsUP/D/9/juV3vQFr6XBEzn9KCAHdUvOHw=="],
|
||||
|
||||
"concurrently": ["concurrently@9.1.2", "", { "dependencies": { "chalk": "^4.1.2", "lodash": "^4.17.21", "rxjs": "^7.8.1", "shell-quote": "^1.8.1", "supports-color": "^8.1.1", "tree-kill": "^1.2.2", "yargs": "^17.7.2" }, "bin": { "conc": "dist/bin/concurrently.js", "concurrently": "dist/bin/concurrently.js" } }, "sha512-H9MWcoPsYddwbOGM6difjVwVZHl63nwMEwDJG/L7VGtuaJhb12h2caPG2tVPWs7emuYix252iGfqOyrz1GczTQ=="],
|
||||
@@ -1678,8 +1656,6 @@
|
||||
|
||||
"crc-32": ["crc-32@1.2.2", "", { "bin": { "crc32": "bin/crc32.njs" } }, "sha512-ROmzCKrTnOwybPcJApAA6WBWij23HVfGVNKqqrZpuyZOHqK2CwHSvpGuyt/UNNvaIjEd8X5IFGp4Mh+Ie1IHJQ=="],
|
||||
|
||||
"crc32-stream": ["crc32-stream@6.0.0", "", { "dependencies": { "crc-32": "^1.2.0", "readable-stream": "^4.0.0" } }, "sha512-piICUB6ei4IlTv1+653yq5+KoqfBYmj9bw6LqXoOneTMDXk5nM1qt12mFW1caG3LlJXEKW1Bp0WggEmIfQB34g=="],
|
||||
|
||||
"critters": ["critters@0.0.23", "", { "dependencies": { "chalk": "^4.1.0", "css-select": "^5.1.0", "dom-serializer": "^2.0.0", "domhandler": "^5.0.2", "htmlparser2": "^8.0.2", "postcss": "^8.4.23", "postcss-media-query-parser": "^0.2.3" } }, "sha512-/MCsQbuzTPA/ZTOjjyr2Na5o3lRpr8vd0MZE8tMP0OBNg/VrLxWHteVKalQ8KR+fBmUadbJLdoyEz9sT+q84qg=="],
|
||||
|
||||
"croner": ["croner@9.1.0", "", {}, "sha512-p9nwwR4qyT5W996vBZhdvBCnMhicY5ytZkR4D1Xj0wuTDEiMnjwR57Q3RXYY/s0EpX6Ay3vgIcfaR+ewGHsi+g=="],
|
||||
@@ -1918,8 +1894,6 @@
|
||||
|
||||
"fast-equals": ["fast-equals@5.2.2", "", {}, "sha512-V7/RktU11J3I36Nwq2JnZEM7tNm17eBJz+u25qdxBZeCKiX6BkVSZQjwWIr+IobgnZy+ag73tTZgZi7tr0LrBw=="],
|
||||
|
||||
"fast-fifo": ["fast-fifo@1.3.2", "", {}, "sha512-/d9sfos4yxzpwkDkuN7k2SqFKtYNmCTzgfEpz82x34IM9/zc8KGxQoXg1liNC/izpRM/MBdt44Nmx41ZWqk+FQ=="],
|
||||
|
||||
"fast-glob": ["fast-glob@3.3.3", "", { "dependencies": { "@nodelib/fs.stat": "^2.0.2", "@nodelib/fs.walk": "^1.2.3", "glob-parent": "^5.1.2", "merge2": "^1.3.0", "micromatch": "^4.0.8" } }, "sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg=="],
|
||||
|
||||
"fast-redact": ["fast-redact@3.5.0", "", {}, "sha512-dwsoQlS7h9hMeYUq1W++23NDcBLV4KqONnITDV9DjfS3q1SgDGVrBdvvTLUotWtPSD7asWDV9/CmsZPy8Hf70A=="],
|
||||
@@ -2192,8 +2166,6 @@
|
||||
|
||||
"kysely": ["kysely@0.28.2", "", {}, "sha512-4YAVLoF0Sf0UTqlhgQMFU9iQECdah7n+13ANkiuVfRvlK+uI0Etbgd7bVP36dKlG+NXWbhGua8vnGt+sdhvT7A=="],
|
||||
|
||||
"lazystream": ["lazystream@1.0.1", "", { "dependencies": { "readable-stream": "^2.0.5" } }, "sha512-b94GiNHQNy6JNTrt5w6zNyffMrNkXZb3KTkCZJb2V1xaEGCk093vkZ2jk3tpaeP33/OiXC+WvK9AxUebnf5nbw=="],
|
||||
|
||||
"leac": ["leac@0.6.0", "", {}, "sha512-y+SqErxb8h7nE/fiEX07jsbuhrpO9lL8eca7/Y1nuWV2moNlXhyd59iDGcRf6moVyDMbmTNzL40SUyrFU/yDpg=="],
|
||||
|
||||
"lenis": ["lenis@1.3.4", "", { "peerDependencies": { "@nuxt/kit": ">=3.0.0", "react": ">=17.0.0", "vue": ">=3.0.0" }, "optionalPeers": ["@nuxt/kit", "react", "vue"] }, "sha512-WIGk8wiV2ABm/T7M+NC+tAV8fjzNJD1J4z11aZ3mTtx7WAZX/4QdCNhBO0g/TqXISA+/3hTbzrPC4FW1nhoNMQ=="],
|
||||
@@ -2602,7 +2574,7 @@
|
||||
|
||||
"prismjs": ["prismjs@1.30.0", "", {}, "sha512-DEvV2ZF2r2/63V+tK8hQvrR2ZGn10srHbXviTlcv7Kpzw8jWiNTqbVgjO3IY8RxrrOUF8VPMQQFysYYYv0YZxw=="],
|
||||
|
||||
"process": ["process@0.11.10", "", {}, "sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A=="],
|
||||
"process": ["process@0.10.1", "", {}, "sha512-dyIett8dgGIZ/TXKUzeYExt7WA6ldDzys9vTDU/cCA9L17Ypme+KzS+NjQCjpn9xsvi/shbMC+yP/BcFMBz0NA=="],
|
||||
|
||||
"process-nextick-args": ["process-nextick-args@2.0.1", "", {}, "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag=="],
|
||||
|
||||
@@ -2674,9 +2646,7 @@
|
||||
|
||||
"read-cache": ["read-cache@1.0.0", "", { "dependencies": { "pify": "^2.3.0" } }, "sha512-Owdv/Ft7IjOgm/i0xvNDZ1LrRANRfew4b2prF3OWMQLxLfu3bS8FVhCsrSCMK4lR56Y9ya+AThoTpDCTxCmpRA=="],
|
||||
|
||||
"readable-stream": ["readable-stream@4.7.0", "", { "dependencies": { "abort-controller": "^3.0.0", "buffer": "^6.0.3", "events": "^3.3.0", "process": "^0.11.10", "string_decoder": "^1.3.0" } }, "sha512-oIGGmcpTLwPga8Bn6/Z75SVaH1z5dUut2ibSyAMVhmUggWpmDn2dapB0n7f8nwaSiRtepAsfJyfXIO5DCVAODg=="],
|
||||
|
||||
"readdir-glob": ["readdir-glob@1.1.3", "", { "dependencies": { "minimatch": "^5.1.0" } }, "sha512-v05I2k7xN8zXvPD9N+z/uhXPaj0sUFCe2rcWZIpBsqxfP7xXFQ0tipAd/wjj1YxWyWtUS5IDJpOG82JKt2EAVA=="],
|
||||
"readable-stream": ["readable-stream@2.3.8", "", { "dependencies": { "core-util-is": "~1.0.0", "inherits": "~2.0.3", "isarray": "~1.0.0", "process-nextick-args": "~2.0.0", "safe-buffer": "~5.1.1", "string_decoder": "~1.1.1", "util-deprecate": "~1.0.1" } }, "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA=="],
|
||||
|
||||
"readdirp": ["readdirp@3.6.0", "", { "dependencies": { "picomatch": "^2.2.1" } }, "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA=="],
|
||||
|
||||
@@ -2856,8 +2826,6 @@
|
||||
|
||||
"streamsearch": ["streamsearch@1.1.0", "", {}, "sha512-Mcc5wHehp9aXz1ax6bZUyY5afg9u2rv5cqQI3mRrYkGC8rW2hM02jWuwjtL++LS5qinSyhj2QfLyNsuc+VsExg=="],
|
||||
|
||||
"streamx": ["streamx@2.22.1", "", { "dependencies": { "fast-fifo": "^1.3.2", "text-decoder": "^1.1.0" }, "optionalDependencies": { "bare-events": "^2.2.0" } }, "sha512-znKXEBxfatz2GBNK02kRnCXjV+AA4kjZIUxeWSr3UGirZMJfTE9uiwKHobnbgxWyL/JWro8tTq+vOqAK1/qbSA=="],
|
||||
|
||||
"string-argv": ["string-argv@0.3.2", "", {}, "sha512-aqD2Q0144Z+/RqG52NeHEkZauTAUWJO8c6yTftGJKO3Tja5tUgIfmIl6kExvhtxSDP7fXB6DvzkfMpCd/F3G+Q=="],
|
||||
|
||||
"string-template": ["string-template@0.2.1", "", {}, "sha512-Yptehjogou2xm4UJbxJ4CxgZx12HBfeystp0y3x7s4Dj32ltVVG1Gg8YhKjHZkHicuKpZX/ffilA8505VbUbpw=="],
|
||||
@@ -2868,7 +2836,7 @@
|
||||
|
||||
"string.prototype.codepointat": ["string.prototype.codepointat@0.2.1", "", {}, "sha512-2cBVCj6I4IOvEnjgO/hWqXjqBGsY+zwPmHl12Srk9IXSZ56Jwwmy+66XO5Iut/oQVR7t5ihYdLB0GMa4alEUcg=="],
|
||||
|
||||
"string_decoder": ["string_decoder@1.3.0", "", { "dependencies": { "safe-buffer": "~5.2.0" } }, "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA=="],
|
||||
"string_decoder": ["string_decoder@1.1.1", "", { "dependencies": { "safe-buffer": "~5.1.0" } }, "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg=="],
|
||||
|
||||
"stringify-entities": ["stringify-entities@4.0.4", "", { "dependencies": { "character-entities-html4": "^2.0.0", "character-entities-legacy": "^3.0.0" } }, "sha512-IwfBptatlO+QCJUo19AqvrPNqlVMpW9YEL2LIVY+Rpv2qsjCGxaDLNRgeGsQWJhfItebuJhsGSLjaBbNSQ+ieg=="],
|
||||
|
||||
@@ -2912,16 +2880,12 @@
|
||||
|
||||
"tar": ["tar@7.4.3", "", { "dependencies": { "@isaacs/fs-minipass": "^4.0.0", "chownr": "^3.0.0", "minipass": "^7.1.2", "minizlib": "^3.0.1", "mkdirp": "^3.0.1", "yallist": "^5.0.0" } }, "sha512-5S7Va8hKfV7W5U6g3aYxXmlPoZVAwUMy9AOKyF2fVuZa2UD3qZjg578OrLRt8PcNN1PleVaL/5/yYATNL0ICUw=="],
|
||||
|
||||
"tar-stream": ["tar-stream@3.1.7", "", { "dependencies": { "b4a": "^1.6.4", "fast-fifo": "^1.2.0", "streamx": "^2.15.0" } }, "sha512-qJj60CXt7IU1Ffyc3NJMjh6EkuCFej46zUqJ4J7pqYlThyd9bO0XBTmcOIhSzZJVWfsLks0+nle/j538YAW9RQ=="],
|
||||
|
||||
"terser": ["terser@5.43.1", "", { "dependencies": { "@jridgewell/source-map": "^0.3.3", "acorn": "^8.14.0", "commander": "^2.20.0", "source-map-support": "~0.5.20" }, "bin": { "terser": "bin/terser" } }, "sha512-+6erLbBm0+LROX2sPXlUYx/ux5PyE9K/a92Wrt6oA+WDAoFTdpHE5tCYCI5PNzq2y8df4rA+QgHLJuR4jNymsg=="],
|
||||
|
||||
"terser-webpack-plugin": ["terser-webpack-plugin@5.3.14", "", { "dependencies": { "@jridgewell/trace-mapping": "^0.3.25", "jest-worker": "^27.4.5", "schema-utils": "^4.3.0", "serialize-javascript": "^6.0.2", "terser": "^5.31.1" }, "peerDependencies": { "webpack": "^5.1.0" } }, "sha512-vkZjpUjb6OMS7dhV+tILUW6BhpDR7P2L/aQSAv+Uwk+m8KATX9EccViHTJR2qDtACKPIYndLGCyl3FMo+r2LMw=="],
|
||||
|
||||
"test-exclude": ["test-exclude@7.0.1", "", { "dependencies": { "@istanbuljs/schema": "^0.1.2", "glob": "^10.4.1", "minimatch": "^9.0.4" } }, "sha512-pFYqmTw68LXVjeWJMST4+borgQP2AyMNbg1BpZh9LbyhUeNkeaPF9gzfPGUAnSMV3qPYdWUwDIjjCLiSDOl7vg=="],
|
||||
|
||||
"text-decoder": ["text-decoder@1.2.3", "", { "dependencies": { "b4a": "^1.6.4" } }, "sha512-3/o9z3X0X0fTupwsYvR03pJ/DjWuqqrfwBgTQzdWDiQSm9KitAyz/9WqsT2JQW7KV2m+bC2ol/zqpW37NHxLaA=="],
|
||||
|
||||
"thenify": ["thenify@3.3.1", "", { "dependencies": { "any-promise": "^1.0.0" } }, "sha512-RVZSIV5IG10Hk3enotrhvz0T9em6cyHBLkH/YAZuKqd8hRkKhSfCGIcP2KUY0EPxndzANBmNllzWPwak+bheSw=="],
|
||||
|
||||
"thenify-all": ["thenify-all@1.6.0", "", { "dependencies": { "thenify": ">= 3.1.0 < 4" } }, "sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA=="],
|
||||
@@ -3124,8 +3088,6 @@
|
||||
|
||||
"yoga-wasm-web": ["yoga-wasm-web@0.3.3", "", {}, "sha512-N+d4UJSJbt/R3wqY7Coqs5pcV0aUj2j9IaQ3rNj9bVCLld8tTGKRa2USARjnvZJWVx1NDmQev8EknoczaOQDOA=="],
|
||||
|
||||
"zip-stream": ["zip-stream@6.0.1", "", { "dependencies": { "archiver-utils": "^5.0.0", "compress-commons": "^6.0.2", "readable-stream": "^4.0.0" } }, "sha512-zK7YHHz4ZXpW89AHXUPbQVGKI7uvkd3hzusTdotCg1UxyaVtg0zFJSTfW/Dq5f7OBBVnq6cZIaC8Ti4hb6dtCA=="],
|
||||
|
||||
"zod": ["zod@3.25.67", "", {}, "sha512-idA2YXwpCdqUSKRCACDE6ItZD9TZzy3OZMtpfLoh6oPR47lipysRrJfjzMqFxQ3uJuUPyUeWe1r9vLH33xO/Qw=="],
|
||||
|
||||
"zod-to-json-schema": ["zod-to-json-schema@3.24.5", "", { "peerDependencies": { "zod": "^3.24.1" } }, "sha512-/AuWwMP+YqiPbsJx5D6TfgRTc4kTLjsh5SOcd4bLsfUg2RcEXrFMJl1DGgdHy2aCfsIA/cr/1JM0xcB2GZji8g=="],
|
||||
@@ -3142,22 +3104,10 @@
|
||||
|
||||
"@asamuzakjp/css-color/lru-cache": ["lru-cache@10.4.3", "", {}, "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ=="],
|
||||
|
||||
"@aws-crypto/crc32/@aws-sdk/types": ["@aws-sdk/types@3.821.0", "", { "dependencies": { "@smithy/types": "^4.3.1", "tslib": "^2.6.2" } }, "sha512-Znroqdai1a90TlxGaJ+FK1lwC0fHpo97Xjsp5UKGR5JODYm7f9+/fF17ebO1KdoBr/Rm0UIFiF5VmI8ts9F1eA=="],
|
||||
|
||||
"@aws-crypto/crc32c/@aws-sdk/types": ["@aws-sdk/types@3.821.0", "", { "dependencies": { "@smithy/types": "^4.3.1", "tslib": "^2.6.2" } }, "sha512-Znroqdai1a90TlxGaJ+FK1lwC0fHpo97Xjsp5UKGR5JODYm7f9+/fF17ebO1KdoBr/Rm0UIFiF5VmI8ts9F1eA=="],
|
||||
|
||||
"@aws-crypto/sha1-browser/@aws-sdk/types": ["@aws-sdk/types@3.821.0", "", { "dependencies": { "@smithy/types": "^4.3.1", "tslib": "^2.6.2" } }, "sha512-Znroqdai1a90TlxGaJ+FK1lwC0fHpo97Xjsp5UKGR5JODYm7f9+/fF17ebO1KdoBr/Rm0UIFiF5VmI8ts9F1eA=="],
|
||||
|
||||
"@aws-crypto/sha1-browser/@smithy/util-utf8": ["@smithy/util-utf8@2.3.0", "", { "dependencies": { "@smithy/util-buffer-from": "^2.2.0", "tslib": "^2.6.2" } }, "sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A=="],
|
||||
|
||||
"@aws-crypto/sha256-browser/@aws-sdk/types": ["@aws-sdk/types@3.821.0", "", { "dependencies": { "@smithy/types": "^4.3.1", "tslib": "^2.6.2" } }, "sha512-Znroqdai1a90TlxGaJ+FK1lwC0fHpo97Xjsp5UKGR5JODYm7f9+/fF17ebO1KdoBr/Rm0UIFiF5VmI8ts9F1eA=="],
|
||||
|
||||
"@aws-crypto/sha256-browser/@smithy/util-utf8": ["@smithy/util-utf8@2.3.0", "", { "dependencies": { "@smithy/util-buffer-from": "^2.2.0", "tslib": "^2.6.2" } }, "sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A=="],
|
||||
|
||||
"@aws-crypto/sha256-js/@aws-sdk/types": ["@aws-sdk/types@3.821.0", "", { "dependencies": { "@smithy/types": "^4.3.1", "tslib": "^2.6.2" } }, "sha512-Znroqdai1a90TlxGaJ+FK1lwC0fHpo97Xjsp5UKGR5JODYm7f9+/fF17ebO1KdoBr/Rm0UIFiF5VmI8ts9F1eA=="],
|
||||
|
||||
"@aws-crypto/util/@aws-sdk/types": ["@aws-sdk/types@3.821.0", "", { "dependencies": { "@smithy/types": "^4.3.1", "tslib": "^2.6.2" } }, "sha512-Znroqdai1a90TlxGaJ+FK1lwC0fHpo97Xjsp5UKGR5JODYm7f9+/fF17ebO1KdoBr/Rm0UIFiF5VmI8ts9F1eA=="],
|
||||
|
||||
"@aws-crypto/util/@smithy/util-utf8": ["@smithy/util-utf8@2.3.0", "", { "dependencies": { "@smithy/util-buffer-from": "^2.2.0", "tslib": "^2.6.2" } }, "sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A=="],
|
||||
|
||||
"@aws-sdk/client-s3/uuid": ["uuid@9.0.1", "", { "bin": { "uuid": "dist/bin/uuid" } }, "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA=="],
|
||||
@@ -3456,12 +3406,8 @@
|
||||
|
||||
"anymatch/picomatch": ["picomatch@2.3.1", "", {}, "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA=="],
|
||||
|
||||
"archiver-utils/glob": ["glob@10.4.5", "", { "dependencies": { "foreground-child": "^3.1.0", "jackspeak": "^3.1.2", "minimatch": "^9.0.4", "minipass": "^7.1.2", "package-json-from-dist": "^1.0.0", "path-scurry": "^1.11.1" }, "bin": { "glob": "dist/esm/bin.mjs" } }, "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg=="],
|
||||
|
||||
"better-auth/jose": ["jose@5.10.0", "", {}, "sha512-s+3Al/p9g32Iq+oqXxkW//7jk2Vig6FF1CFqzVXoTUXt2qz89YWbL+OwS17NFYEvxC35n0FKeGO2LGYSxeM2Gg=="],
|
||||
|
||||
"bl/buffer": ["buffer@5.7.1", "", { "dependencies": { "base64-js": "^1.3.1", "ieee754": "^1.1.13" } }, "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ=="],
|
||||
|
||||
"bl/readable-stream": ["readable-stream@3.6.2", "", { "dependencies": { "inherits": "^2.0.3", "string_decoder": "^1.1.1", "util-deprecate": "^1.0.1" } }, "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA=="],
|
||||
|
||||
"chalk/supports-color": ["supports-color@7.2.0", "", { "dependencies": { "has-flag": "^4.0.0" } }, "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw=="],
|
||||
@@ -3512,8 +3458,6 @@
|
||||
|
||||
"groq-sdk/node-fetch": ["node-fetch@2.7.0", "", { "dependencies": { "whatwg-url": "^5.0.0" }, "peerDependencies": { "encoding": "^0.1.0" }, "optionalPeers": ["encoding"] }, "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A=="],
|
||||
|
||||
"hexer/process": ["process@0.10.1", "", {}, "sha512-dyIett8dgGIZ/TXKUzeYExt7WA6ldDzys9vTDU/cCA9L17Ypme+KzS+NjQCjpn9xsvi/shbMC+yP/BcFMBz0NA=="],
|
||||
|
||||
"hoist-non-react-statics/react-is": ["react-is@16.13.1", "", {}, "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ=="],
|
||||
|
||||
"htmlparser2/entities": ["entities@4.5.0", "", {}, "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw=="],
|
||||
@@ -3532,10 +3476,6 @@
|
||||
|
||||
"jsondiffpatch/chalk": ["chalk@5.4.1", "", {}, "sha512-zgVZuo2WcZgfUEmsn6eO3kINexW8RAE4maiQ8QNs8CtpPCSyMiYsULR3HQYkm3w8FIA3SberyMJMSldGsW+U3w=="],
|
||||
|
||||
"jszip/readable-stream": ["readable-stream@2.3.8", "", { "dependencies": { "core-util-is": "~1.0.0", "inherits": "~2.0.3", "isarray": "~1.0.0", "process-nextick-args": "~2.0.0", "safe-buffer": "~5.1.1", "string_decoder": "~1.1.1", "util-deprecate": "~1.0.1" } }, "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA=="],
|
||||
|
||||
"lazystream/readable-stream": ["readable-stream@2.3.8", "", { "dependencies": { "core-util-is": "~1.0.0", "inherits": "~2.0.3", "isarray": "~1.0.0", "process-nextick-args": "~2.0.0", "safe-buffer": "~5.1.1", "string_decoder": "~1.1.1", "util-deprecate": "~1.0.1" } }, "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA=="],
|
||||
|
||||
"linebreak/base64-js": ["base64-js@0.0.8", "", {}, "sha512-3XSA2cR/h/73EzlXXdU6YNycmYI7+kicTxks4eJg2g39biHR84slg2+des+p7iHYhbRg/udIS4TD53WabcOUkw=="],
|
||||
|
||||
"lint-staged/chalk": ["chalk@5.4.1", "", {}, "sha512-zgVZuo2WcZgfUEmsn6eO3kINexW8RAE4maiQ8QNs8CtpPCSyMiYsULR3HQYkm3w8FIA3SberyMJMSldGsW+U3w=="],
|
||||
@@ -3608,8 +3548,6 @@
|
||||
|
||||
"react-email/commander": ["commander@13.1.0", "", {}, "sha512-/rFeCpNJQbhSZjGVwO9RFV3xPqbnERS8MmIQzCtD/zl6gpJuV/bMLuN92oG3F7d8oDEHHRrujSXNUr8fpjntKw=="],
|
||||
|
||||
"readdir-glob/minimatch": ["minimatch@5.1.6", "", { "dependencies": { "brace-expansion": "^2.0.1" } }, "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g=="],
|
||||
|
||||
"readdirp/picomatch": ["picomatch@2.3.1", "", {}, "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA=="],
|
||||
|
||||
"resend/@react-email/render": ["@react-email/render@1.1.2", "", { "dependencies": { "html-to-text": "^9.0.5", "prettier": "^3.5.3", "react-promise-suspense": "^0.3.4" }, "peerDependencies": { "react": "^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-RnRehYN3v9gVlNMehHPHhyp2RQo7+pSkHDtXPvg3s0GbzM9SQMW4Qrf8GRNvtpLC4gsI+Wt0VatNRUFqjvevbw=="],
|
||||
@@ -3644,8 +3582,6 @@
|
||||
|
||||
"string-width-cjs/emoji-regex": ["emoji-regex@8.0.0", "", {}, "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="],
|
||||
|
||||
"string_decoder/safe-buffer": ["safe-buffer@5.2.1", "", {}, "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ=="],
|
||||
|
||||
"sucrase/commander": ["commander@4.1.1", "", {}, "sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA=="],
|
||||
|
||||
"sucrase/glob": ["glob@10.4.5", "", { "dependencies": { "foreground-child": "^3.1.0", "jackspeak": "^3.1.2", "minimatch": "^9.0.4", "minipass": "^7.1.2", "package-json-from-dist": "^1.0.0", "path-scurry": "^1.11.1" }, "bin": { "glob": "dist/esm/bin.mjs" } }, "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg=="],
|
||||
@@ -3832,12 +3768,6 @@
|
||||
|
||||
"accepts/mime-types/mime-db": ["mime-db@1.52.0", "", {}, "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg=="],
|
||||
|
||||
"archiver-utils/glob/jackspeak": ["jackspeak@3.4.3", "", { "dependencies": { "@isaacs/cliui": "^8.0.2" }, "optionalDependencies": { "@pkgjs/parseargs": "^0.11.0" } }, "sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw=="],
|
||||
|
||||
"archiver-utils/glob/path-scurry": ["path-scurry@1.11.1", "", { "dependencies": { "lru-cache": "^10.2.0", "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" } }, "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA=="],
|
||||
|
||||
"bl/readable-stream/string_decoder": ["string_decoder@1.1.1", "", { "dependencies": { "safe-buffer": "~5.1.0" } }, "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg=="],
|
||||
|
||||
"cli-truncate/string-width/emoji-regex": ["emoji-regex@9.2.2", "", {}, "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg=="],
|
||||
|
||||
"cli-truncate/string-width/strip-ansi": ["strip-ansi@7.1.0", "", { "dependencies": { "ansi-regex": "^6.0.1" } }, "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ=="],
|
||||
@@ -3862,10 +3792,6 @@
|
||||
|
||||
"jest-diff/pretty-format/react-is": ["react-is@17.0.2", "", {}, "sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w=="],
|
||||
|
||||
"jszip/readable-stream/string_decoder": ["string_decoder@1.1.1", "", { "dependencies": { "safe-buffer": "~5.1.0" } }, "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg=="],
|
||||
|
||||
"lazystream/readable-stream/string_decoder": ["string_decoder@1.1.1", "", { "dependencies": { "safe-buffer": "~5.1.0" } }, "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg=="],
|
||||
|
||||
"lint-staged/listr2/cli-truncate": ["cli-truncate@4.0.0", "", { "dependencies": { "slice-ansi": "^5.0.0", "string-width": "^7.0.0" } }, "sha512-nPdaFdQ0h/GEigbPClz11D0v/ZJEwxmeVZGeMo3Z5StPtUTkA9o1lD6QwoirYiSDzbcwn2XcjwmCp68W1IS4TA=="],
|
||||
|
||||
"lint-staged/listr2/eventemitter3": ["eventemitter3@5.0.1", "", {}, "sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA=="],
|
||||
@@ -3964,8 +3890,6 @@
|
||||
|
||||
"@sentry/cli/node-fetch/whatwg-url/webidl-conversions": ["webidl-conversions@3.0.1", "", {}, "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ=="],
|
||||
|
||||
"archiver-utils/glob/path-scurry/lru-cache": ["lru-cache@10.4.3", "", {}, "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ=="],
|
||||
|
||||
"cli-truncate/string-width/strip-ansi/ansi-regex": ["ansi-regex@6.1.0", "", {}, "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA=="],
|
||||
|
||||
"gaxios/node-fetch/whatwg-url/tr46": ["tr46@0.0.3", "", {}, "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw=="],
|
||||
|
||||
20938
package-lock.json
generated
20938
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user