mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-10 07:27:57 -05:00
feat(bedrock): added aws bedrock as a model provider (#2722)
This commit is contained in:
@@ -41,6 +41,9 @@ export async function POST(request: NextRequest) {
|
||||
vertexProject,
|
||||
vertexLocation,
|
||||
vertexCredential,
|
||||
bedrockAccessKeyId,
|
||||
bedrockSecretKey,
|
||||
bedrockRegion,
|
||||
responseFormat,
|
||||
workflowId,
|
||||
workspaceId,
|
||||
@@ -67,6 +70,9 @@ export async function POST(request: NextRequest) {
|
||||
hasVertexProject: !!vertexProject,
|
||||
hasVertexLocation: !!vertexLocation,
|
||||
hasVertexCredential: !!vertexCredential,
|
||||
hasBedrockAccessKeyId: !!bedrockAccessKeyId,
|
||||
hasBedrockSecretKey: !!bedrockSecretKey,
|
||||
hasBedrockRegion: !!bedrockRegion,
|
||||
hasResponseFormat: !!responseFormat,
|
||||
workflowId,
|
||||
stream: !!stream,
|
||||
@@ -116,6 +122,9 @@ export async function POST(request: NextRequest) {
|
||||
azureApiVersion,
|
||||
vertexProject,
|
||||
vertexLocation,
|
||||
bedrockAccessKeyId,
|
||||
bedrockSecretKey,
|
||||
bedrockRegion,
|
||||
responseFormat,
|
||||
workflowId,
|
||||
workspaceId,
|
||||
|
||||
@@ -94,7 +94,6 @@ export const AgentBlock: BlockConfig<AgentResponse> = {
|
||||
placeholder: 'Type or select a model...',
|
||||
required: true,
|
||||
defaultValue: 'claude-sonnet-4-5',
|
||||
searchable: true,
|
||||
options: () => {
|
||||
const providersState = useProvidersStore.getState()
|
||||
const baseModels = providersState.providers.base.models
|
||||
@@ -329,6 +328,43 @@ export const AgentBlock: BlockConfig<AgentResponse> = {
|
||||
value: providers.vertex.models,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'bedrockAccessKeyId',
|
||||
title: 'AWS Access Key ID',
|
||||
type: 'short-input',
|
||||
password: true,
|
||||
placeholder: 'Enter your AWS Access Key ID',
|
||||
connectionDroppable: false,
|
||||
required: true,
|
||||
condition: {
|
||||
field: 'model',
|
||||
value: providers.bedrock.models,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'bedrockSecretKey',
|
||||
title: 'AWS Secret Access Key',
|
||||
type: 'short-input',
|
||||
password: true,
|
||||
placeholder: 'Enter your AWS Secret Access Key',
|
||||
connectionDroppable: false,
|
||||
required: true,
|
||||
condition: {
|
||||
field: 'model',
|
||||
value: providers.bedrock.models,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'bedrockRegion',
|
||||
title: 'AWS Region',
|
||||
type: 'short-input',
|
||||
placeholder: 'us-east-1',
|
||||
connectionDroppable: false,
|
||||
condition: {
|
||||
field: 'model',
|
||||
value: providers.bedrock.models,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'tools',
|
||||
title: 'Tools',
|
||||
@@ -343,11 +379,11 @@ export const AgentBlock: BlockConfig<AgentResponse> = {
|
||||
password: true,
|
||||
connectionDroppable: false,
|
||||
required: true,
|
||||
// Hide API key for hosted models, Ollama models, vLLM models, and Vertex models (uses OAuth)
|
||||
// Hide API key for hosted models, Ollama models, vLLM models, Vertex models (uses OAuth), and Bedrock (uses AWS credentials)
|
||||
condition: isHosted
|
||||
? {
|
||||
field: 'model',
|
||||
value: [...getHostedModels(), ...providers.vertex.models],
|
||||
value: [...getHostedModels(), ...providers.vertex.models, ...providers.bedrock.models],
|
||||
not: true, // Show for all models EXCEPT those listed
|
||||
}
|
||||
: () => ({
|
||||
@@ -356,8 +392,9 @@ export const AgentBlock: BlockConfig<AgentResponse> = {
|
||||
...getCurrentOllamaModels(),
|
||||
...getCurrentVLLMModels(),
|
||||
...providers.vertex.models,
|
||||
...providers.bedrock.models,
|
||||
],
|
||||
not: true, // Show for all models EXCEPT Ollama, vLLM, and Vertex models
|
||||
not: true, // Show for all models EXCEPT Ollama, vLLM, Vertex, and Bedrock models
|
||||
}),
|
||||
},
|
||||
{
|
||||
@@ -634,6 +671,9 @@ Example 3 (Array Input):
|
||||
azureApiVersion: { type: 'string', description: 'Azure API version' },
|
||||
vertexProject: { type: 'string', description: 'Google Cloud project ID for Vertex AI' },
|
||||
vertexLocation: { type: 'string', description: 'Google Cloud location for Vertex AI' },
|
||||
bedrockAccessKeyId: { type: 'string', description: 'AWS Access Key ID for Bedrock' },
|
||||
bedrockSecretKey: { type: 'string', description: 'AWS Secret Access Key for Bedrock' },
|
||||
bedrockRegion: { type: 'string', description: 'AWS region for Bedrock' },
|
||||
responseFormat: {
|
||||
type: 'json',
|
||||
description: 'JSON response format schema',
|
||||
|
||||
@@ -1,27 +1,14 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { ChartBarIcon } from '@/components/icons'
|
||||
import { isHosted } from '@/lib/core/config/feature-flags'
|
||||
import type { BlockConfig, ParamType } from '@/blocks/types'
|
||||
import { getProviderCredentialSubBlocks, PROVIDER_CREDENTIAL_INPUTS } from '@/blocks/utils'
|
||||
import type { ProviderId } from '@/providers/types'
|
||||
import {
|
||||
getBaseModelProviders,
|
||||
getHostedModels,
|
||||
getProviderIcon,
|
||||
providers,
|
||||
} from '@/providers/utils'
|
||||
import { getBaseModelProviders, getProviderIcon } from '@/providers/utils'
|
||||
import { useProvidersStore } from '@/stores/providers/store'
|
||||
import type { ToolResponse } from '@/tools/types'
|
||||
|
||||
const logger = createLogger('EvaluatorBlock')
|
||||
|
||||
const getCurrentOllamaModels = () => {
|
||||
return useProvidersStore.getState().providers.ollama.models
|
||||
}
|
||||
|
||||
const getCurrentVLLMModels = () => {
|
||||
return useProvidersStore.getState().providers.vllm.models
|
||||
}
|
||||
|
||||
interface Metric {
|
||||
name: string
|
||||
description: string
|
||||
@@ -204,91 +191,7 @@ export const EvaluatorBlock: BlockConfig<EvaluatorResponse> = {
|
||||
})
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'vertexCredential',
|
||||
title: 'Google Cloud Account',
|
||||
type: 'oauth-input',
|
||||
serviceId: 'vertex-ai',
|
||||
requiredScopes: ['https://www.googleapis.com/auth/cloud-platform'],
|
||||
placeholder: 'Select Google Cloud account',
|
||||
required: true,
|
||||
condition: {
|
||||
field: 'model',
|
||||
value: providers.vertex.models,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'apiKey',
|
||||
title: 'API Key',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter your API key',
|
||||
password: true,
|
||||
connectionDroppable: false,
|
||||
required: true,
|
||||
// Hide API key for hosted models, Ollama models, vLLM models, and Vertex models (uses OAuth)
|
||||
condition: isHosted
|
||||
? {
|
||||
field: 'model',
|
||||
value: [...getHostedModels(), ...providers.vertex.models],
|
||||
not: true, // Show for all models EXCEPT those listed
|
||||
}
|
||||
: () => ({
|
||||
field: 'model',
|
||||
value: [
|
||||
...getCurrentOllamaModels(),
|
||||
...getCurrentVLLMModels(),
|
||||
...providers.vertex.models,
|
||||
],
|
||||
not: true, // Show for all models EXCEPT Ollama, vLLM, and Vertex models
|
||||
}),
|
||||
},
|
||||
{
|
||||
id: 'azureEndpoint',
|
||||
title: 'Azure OpenAI Endpoint',
|
||||
type: 'short-input',
|
||||
password: true,
|
||||
placeholder: 'https://your-resource.openai.azure.com',
|
||||
connectionDroppable: false,
|
||||
condition: {
|
||||
field: 'model',
|
||||
value: providers['azure-openai'].models,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'azureApiVersion',
|
||||
title: 'Azure API Version',
|
||||
type: 'short-input',
|
||||
placeholder: '2024-07-01-preview',
|
||||
connectionDroppable: false,
|
||||
condition: {
|
||||
field: 'model',
|
||||
value: providers['azure-openai'].models,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'vertexProject',
|
||||
title: 'Vertex AI Project',
|
||||
type: 'short-input',
|
||||
placeholder: 'your-gcp-project-id',
|
||||
connectionDroppable: false,
|
||||
required: true,
|
||||
condition: {
|
||||
field: 'model',
|
||||
value: providers.vertex.models,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'vertexLocation',
|
||||
title: 'Vertex AI Location',
|
||||
type: 'short-input',
|
||||
placeholder: 'us-central1',
|
||||
connectionDroppable: false,
|
||||
required: true,
|
||||
condition: {
|
||||
field: 'model',
|
||||
value: providers.vertex.models,
|
||||
},
|
||||
},
|
||||
...getProviderCredentialSubBlocks(),
|
||||
{
|
||||
id: 'temperature',
|
||||
title: 'Temperature',
|
||||
@@ -403,21 +306,7 @@ export const EvaluatorBlock: BlockConfig<EvaluatorResponse> = {
|
||||
},
|
||||
},
|
||||
model: { type: 'string' as ParamType, description: 'AI model to use' },
|
||||
apiKey: { type: 'string' as ParamType, description: 'Provider API key' },
|
||||
azureEndpoint: { type: 'string' as ParamType, description: 'Azure OpenAI endpoint URL' },
|
||||
azureApiVersion: { type: 'string' as ParamType, description: 'Azure API version' },
|
||||
vertexProject: {
|
||||
type: 'string' as ParamType,
|
||||
description: 'Google Cloud project ID for Vertex AI',
|
||||
},
|
||||
vertexLocation: {
|
||||
type: 'string' as ParamType,
|
||||
description: 'Google Cloud location for Vertex AI',
|
||||
},
|
||||
vertexCredential: {
|
||||
type: 'string' as ParamType,
|
||||
description: 'Google Cloud OAuth credential ID for Vertex AI',
|
||||
},
|
||||
...PROVIDER_CREDENTIAL_INPUTS,
|
||||
temperature: {
|
||||
type: 'number' as ParamType,
|
||||
description: 'Response randomness level (low for consistent evaluation)',
|
||||
|
||||
@@ -1,15 +1,10 @@
|
||||
import { ShieldCheckIcon } from '@/components/icons'
|
||||
import { isHosted } from '@/lib/core/config/feature-flags'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import { getHostedModels, getProviderIcon } from '@/providers/utils'
|
||||
import { getProviderCredentialSubBlocks, PROVIDER_CREDENTIAL_INPUTS } from '@/blocks/utils'
|
||||
import { getProviderIcon } from '@/providers/utils'
|
||||
import { useProvidersStore } from '@/stores/providers/store'
|
||||
import type { ToolResponse } from '@/tools/types'
|
||||
|
||||
const getCurrentOllamaModels = () => {
|
||||
const providersState = useProvidersStore.getState()
|
||||
return providersState.providers.ollama.models
|
||||
}
|
||||
|
||||
export interface GuardrailsResponse extends ToolResponse {
|
||||
output: {
|
||||
passed: boolean
|
||||
@@ -120,8 +115,11 @@ Return ONLY the regex pattern - no explanations, no quotes, no forward slashes,
|
||||
const providersState = useProvidersStore.getState()
|
||||
const baseModels = providersState.providers.base.models
|
||||
const ollamaModels = providersState.providers.ollama.models
|
||||
const vllmModels = providersState.providers.vllm.models
|
||||
const openrouterModels = providersState.providers.openrouter.models
|
||||
const allModels = Array.from(new Set([...baseModels, ...ollamaModels, ...openrouterModels]))
|
||||
const allModels = Array.from(
|
||||
new Set([...baseModels, ...ollamaModels, ...vllmModels, ...openrouterModels])
|
||||
)
|
||||
|
||||
return allModels.map((model) => {
|
||||
const icon = getProviderIcon(model)
|
||||
@@ -160,44 +158,19 @@ Return ONLY the regex pattern - no explanations, no quotes, no forward slashes,
|
||||
value: ['hallucination'],
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'apiKey',
|
||||
title: 'API Key',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter your API key',
|
||||
password: true,
|
||||
connectionDroppable: false,
|
||||
required: true,
|
||||
// Show API key field only for hallucination validation
|
||||
// Hide for hosted models and Ollama models
|
||||
condition: () => {
|
||||
const baseCondition = {
|
||||
field: 'validationType' as const,
|
||||
value: ['hallucination'],
|
||||
}
|
||||
|
||||
if (isHosted) {
|
||||
// In hosted mode, hide for hosted models
|
||||
return {
|
||||
...baseCondition,
|
||||
and: {
|
||||
field: 'model' as const,
|
||||
value: getHostedModels(),
|
||||
not: true, // Show for all models EXCEPT hosted ones
|
||||
},
|
||||
// Provider credential subblocks - only shown for hallucination validation
|
||||
...getProviderCredentialSubBlocks().map((subBlock) => ({
|
||||
...subBlock,
|
||||
// Combine with hallucination condition
|
||||
condition: subBlock.condition
|
||||
? {
|
||||
field: 'validationType' as const,
|
||||
value: ['hallucination'],
|
||||
and:
|
||||
typeof subBlock.condition === 'function' ? subBlock.condition() : subBlock.condition,
|
||||
}
|
||||
}
|
||||
// In self-hosted mode, hide for Ollama models
|
||||
return {
|
||||
...baseCondition,
|
||||
and: {
|
||||
field: 'model' as const,
|
||||
value: getCurrentOllamaModels(),
|
||||
not: true, // Show for all models EXCEPT Ollama ones
|
||||
},
|
||||
}
|
||||
},
|
||||
},
|
||||
: { field: 'validationType' as const, value: ['hallucination'] },
|
||||
})),
|
||||
{
|
||||
id: 'piiEntityTypes',
|
||||
title: 'PII Types to Detect',
|
||||
@@ -332,10 +305,7 @@ Return ONLY the regex pattern - no explanations, no quotes, no forward slashes,
|
||||
type: 'string',
|
||||
description: 'LLM model for hallucination scoring (default: gpt-4o-mini)',
|
||||
},
|
||||
apiKey: {
|
||||
type: 'string',
|
||||
description: 'API key for LLM provider (optional if using hosted)',
|
||||
},
|
||||
...PROVIDER_CREDENTIAL_INPUTS,
|
||||
piiEntityTypes: {
|
||||
type: 'json',
|
||||
description: 'PII entity types to detect (array of strings, empty = detect all)',
|
||||
|
||||
@@ -1,24 +1,11 @@
|
||||
import { ConnectIcon } from '@/components/icons'
|
||||
import { isHosted } from '@/lib/core/config/feature-flags'
|
||||
import { AuthMode, type BlockConfig } from '@/blocks/types'
|
||||
import { getProviderCredentialSubBlocks, PROVIDER_CREDENTIAL_INPUTS } from '@/blocks/utils'
|
||||
import type { ProviderId } from '@/providers/types'
|
||||
import {
|
||||
getBaseModelProviders,
|
||||
getHostedModels,
|
||||
getProviderIcon,
|
||||
providers,
|
||||
} from '@/providers/utils'
|
||||
import { getBaseModelProviders, getProviderIcon } from '@/providers/utils'
|
||||
import { useProvidersStore } from '@/stores/providers/store'
|
||||
import type { ToolResponse } from '@/tools/types'
|
||||
|
||||
const getCurrentOllamaModels = () => {
|
||||
return useProvidersStore.getState().providers.ollama.models
|
||||
}
|
||||
|
||||
const getCurrentVLLMModels = () => {
|
||||
return useProvidersStore.getState().providers.vllm.models
|
||||
}
|
||||
|
||||
interface RouterResponse extends ToolResponse {
|
||||
output: {
|
||||
prompt: string
|
||||
@@ -168,23 +155,6 @@ const getModelOptions = () => {
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to get API key condition for both router versions.
|
||||
*/
|
||||
const getApiKeyCondition = () => {
|
||||
return isHosted
|
||||
? {
|
||||
field: 'model',
|
||||
value: [...getHostedModels(), ...providers.vertex.models],
|
||||
not: true,
|
||||
}
|
||||
: () => ({
|
||||
field: 'model',
|
||||
value: [...getCurrentOllamaModels(), ...getCurrentVLLMModels(), ...providers.vertex.models],
|
||||
not: true,
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Legacy Router Block (block-based routing).
|
||||
* Hidden from toolbar but still supported for existing workflows.
|
||||
@@ -221,76 +191,7 @@ export const RouterBlock: BlockConfig<RouterResponse> = {
|
||||
defaultValue: 'claude-sonnet-4-5',
|
||||
options: getModelOptions,
|
||||
},
|
||||
{
|
||||
id: 'vertexCredential',
|
||||
title: 'Google Cloud Account',
|
||||
type: 'oauth-input',
|
||||
serviceId: 'vertex-ai',
|
||||
requiredScopes: ['https://www.googleapis.com/auth/cloud-platform'],
|
||||
placeholder: 'Select Google Cloud account',
|
||||
required: true,
|
||||
condition: {
|
||||
field: 'model',
|
||||
value: providers.vertex.models,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'apiKey',
|
||||
title: 'API Key',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter your API key',
|
||||
password: true,
|
||||
connectionDroppable: false,
|
||||
required: true,
|
||||
condition: getApiKeyCondition(),
|
||||
},
|
||||
{
|
||||
id: 'azureEndpoint',
|
||||
title: 'Azure OpenAI Endpoint',
|
||||
type: 'short-input',
|
||||
password: true,
|
||||
placeholder: 'https://your-resource.openai.azure.com',
|
||||
connectionDroppable: false,
|
||||
condition: {
|
||||
field: 'model',
|
||||
value: providers['azure-openai'].models,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'azureApiVersion',
|
||||
title: 'Azure API Version',
|
||||
type: 'short-input',
|
||||
placeholder: '2024-07-01-preview',
|
||||
connectionDroppable: false,
|
||||
condition: {
|
||||
field: 'model',
|
||||
value: providers['azure-openai'].models,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'vertexProject',
|
||||
title: 'Vertex AI Project',
|
||||
type: 'short-input',
|
||||
placeholder: 'your-gcp-project-id',
|
||||
connectionDroppable: false,
|
||||
required: true,
|
||||
condition: {
|
||||
field: 'model',
|
||||
value: providers.vertex.models,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'vertexLocation',
|
||||
title: 'Vertex AI Location',
|
||||
type: 'short-input',
|
||||
placeholder: 'us-central1',
|
||||
connectionDroppable: false,
|
||||
required: true,
|
||||
condition: {
|
||||
field: 'model',
|
||||
value: providers.vertex.models,
|
||||
},
|
||||
},
|
||||
...getProviderCredentialSubBlocks(),
|
||||
{
|
||||
id: 'temperature',
|
||||
title: 'Temperature',
|
||||
@@ -335,15 +236,7 @@ export const RouterBlock: BlockConfig<RouterResponse> = {
|
||||
inputs: {
|
||||
prompt: { type: 'string', description: 'Routing prompt content' },
|
||||
model: { type: 'string', description: 'AI model to use' },
|
||||
apiKey: { type: 'string', description: 'Provider API key' },
|
||||
azureEndpoint: { type: 'string', description: 'Azure OpenAI endpoint URL' },
|
||||
azureApiVersion: { type: 'string', description: 'Azure API version' },
|
||||
vertexProject: { type: 'string', description: 'Google Cloud project ID for Vertex AI' },
|
||||
vertexLocation: { type: 'string', description: 'Google Cloud location for Vertex AI' },
|
||||
vertexCredential: {
|
||||
type: 'string',
|
||||
description: 'Google Cloud OAuth credential ID for Vertex AI',
|
||||
},
|
||||
...PROVIDER_CREDENTIAL_INPUTS,
|
||||
temperature: {
|
||||
type: 'number',
|
||||
description: 'Response randomness level (low for consistent routing)',
|
||||
@@ -422,76 +315,7 @@ export const RouterV2Block: BlockConfig<RouterV2Response> = {
|
||||
defaultValue: 'claude-sonnet-4-5',
|
||||
options: getModelOptions,
|
||||
},
|
||||
{
|
||||
id: 'vertexCredential',
|
||||
title: 'Google Cloud Account',
|
||||
type: 'oauth-input',
|
||||
serviceId: 'vertex-ai',
|
||||
requiredScopes: ['https://www.googleapis.com/auth/cloud-platform'],
|
||||
placeholder: 'Select Google Cloud account',
|
||||
required: true,
|
||||
condition: {
|
||||
field: 'model',
|
||||
value: providers.vertex.models,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'apiKey',
|
||||
title: 'API Key',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter your API key',
|
||||
password: true,
|
||||
connectionDroppable: false,
|
||||
required: true,
|
||||
condition: getApiKeyCondition(),
|
||||
},
|
||||
{
|
||||
id: 'azureEndpoint',
|
||||
title: 'Azure OpenAI Endpoint',
|
||||
type: 'short-input',
|
||||
password: true,
|
||||
placeholder: 'https://your-resource.openai.azure.com',
|
||||
connectionDroppable: false,
|
||||
condition: {
|
||||
field: 'model',
|
||||
value: providers['azure-openai'].models,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'azureApiVersion',
|
||||
title: 'Azure API Version',
|
||||
type: 'short-input',
|
||||
placeholder: '2024-07-01-preview',
|
||||
connectionDroppable: false,
|
||||
condition: {
|
||||
field: 'model',
|
||||
value: providers['azure-openai'].models,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'vertexProject',
|
||||
title: 'Vertex AI Project',
|
||||
type: 'short-input',
|
||||
placeholder: 'your-gcp-project-id',
|
||||
connectionDroppable: false,
|
||||
required: true,
|
||||
condition: {
|
||||
field: 'model',
|
||||
value: providers.vertex.models,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'vertexLocation',
|
||||
title: 'Vertex AI Location',
|
||||
type: 'short-input',
|
||||
placeholder: 'us-central1',
|
||||
connectionDroppable: false,
|
||||
required: true,
|
||||
condition: {
|
||||
field: 'model',
|
||||
value: providers.vertex.models,
|
||||
},
|
||||
},
|
||||
...getProviderCredentialSubBlocks(),
|
||||
],
|
||||
tools: {
|
||||
access: [
|
||||
@@ -520,15 +344,7 @@ export const RouterV2Block: BlockConfig<RouterV2Response> = {
|
||||
context: { type: 'string', description: 'Context for routing decision' },
|
||||
routes: { type: 'json', description: 'Route definitions with descriptions' },
|
||||
model: { type: 'string', description: 'AI model to use' },
|
||||
apiKey: { type: 'string', description: 'Provider API key' },
|
||||
azureEndpoint: { type: 'string', description: 'Azure OpenAI endpoint URL' },
|
||||
azureApiVersion: { type: 'string', description: 'Azure API version' },
|
||||
vertexProject: { type: 'string', description: 'Google Cloud project ID for Vertex AI' },
|
||||
vertexLocation: { type: 'string', description: 'Google Cloud location for Vertex AI' },
|
||||
vertexCredential: {
|
||||
type: 'string',
|
||||
description: 'Google Cloud OAuth credential ID for Vertex AI',
|
||||
},
|
||||
...PROVIDER_CREDENTIAL_INPUTS,
|
||||
},
|
||||
outputs: {
|
||||
context: { type: 'string', description: 'Context used for routing' },
|
||||
|
||||
@@ -1,17 +1,9 @@
|
||||
import { TranslateIcon } from '@/components/icons'
|
||||
import { isHosted } from '@/lib/core/config/feature-flags'
|
||||
import { AuthMode, type BlockConfig } from '@/blocks/types'
|
||||
import { getHostedModels, getProviderIcon, providers } from '@/providers/utils'
|
||||
import { getProviderCredentialSubBlocks, PROVIDER_CREDENTIAL_INPUTS } from '@/blocks/utils'
|
||||
import { getProviderIcon } from '@/providers/utils'
|
||||
import { useProvidersStore } from '@/stores/providers/store'
|
||||
|
||||
const getCurrentOllamaModels = () => {
|
||||
return useProvidersStore.getState().providers.ollama.models
|
||||
}
|
||||
|
||||
const getCurrentVLLMModels = () => {
|
||||
return useProvidersStore.getState().providers.vllm.models
|
||||
}
|
||||
|
||||
const getTranslationPrompt = (targetLanguage: string) =>
|
||||
`Translate the following text into ${targetLanguage || 'English'}. Output ONLY the translated text with no additional commentary, explanations, or notes.`
|
||||
|
||||
@@ -59,91 +51,7 @@ export const TranslateBlock: BlockConfig = {
|
||||
})
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'vertexCredential',
|
||||
title: 'Google Cloud Account',
|
||||
type: 'oauth-input',
|
||||
serviceId: 'vertex-ai',
|
||||
requiredScopes: ['https://www.googleapis.com/auth/cloud-platform'],
|
||||
placeholder: 'Select Google Cloud account',
|
||||
required: true,
|
||||
condition: {
|
||||
field: 'model',
|
||||
value: providers.vertex.models,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'apiKey',
|
||||
title: 'API Key',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter your API key',
|
||||
password: true,
|
||||
connectionDroppable: false,
|
||||
required: true,
|
||||
// Hide API key for hosted models, Ollama models, vLLM models, and Vertex models (uses OAuth)
|
||||
condition: isHosted
|
||||
? {
|
||||
field: 'model',
|
||||
value: [...getHostedModels(), ...providers.vertex.models],
|
||||
not: true, // Show for all models EXCEPT those listed
|
||||
}
|
||||
: () => ({
|
||||
field: 'model',
|
||||
value: [
|
||||
...getCurrentOllamaModels(),
|
||||
...getCurrentVLLMModels(),
|
||||
...providers.vertex.models,
|
||||
],
|
||||
not: true, // Show for all models EXCEPT Ollama, vLLM, and Vertex models
|
||||
}),
|
||||
},
|
||||
{
|
||||
id: 'azureEndpoint',
|
||||
title: 'Azure OpenAI Endpoint',
|
||||
type: 'short-input',
|
||||
password: true,
|
||||
placeholder: 'https://your-resource.openai.azure.com',
|
||||
connectionDroppable: false,
|
||||
condition: {
|
||||
field: 'model',
|
||||
value: providers['azure-openai'].models,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'azureApiVersion',
|
||||
title: 'Azure API Version',
|
||||
type: 'short-input',
|
||||
placeholder: '2024-07-01-preview',
|
||||
connectionDroppable: false,
|
||||
condition: {
|
||||
field: 'model',
|
||||
value: providers['azure-openai'].models,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'vertexProject',
|
||||
title: 'Vertex AI Project',
|
||||
type: 'short-input',
|
||||
placeholder: 'your-gcp-project-id',
|
||||
connectionDroppable: false,
|
||||
required: true,
|
||||
condition: {
|
||||
field: 'model',
|
||||
value: providers.vertex.models,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'vertexLocation',
|
||||
title: 'Vertex AI Location',
|
||||
type: 'short-input',
|
||||
placeholder: 'us-central1',
|
||||
connectionDroppable: false,
|
||||
required: true,
|
||||
condition: {
|
||||
field: 'model',
|
||||
value: providers.vertex.models,
|
||||
},
|
||||
},
|
||||
...getProviderCredentialSubBlocks(),
|
||||
{
|
||||
id: 'systemPrompt',
|
||||
title: 'System Prompt',
|
||||
@@ -168,21 +76,15 @@ export const TranslateBlock: BlockConfig = {
|
||||
vertexProject: params.vertexProject,
|
||||
vertexLocation: params.vertexLocation,
|
||||
vertexCredential: params.vertexCredential,
|
||||
bedrockRegion: params.bedrockRegion,
|
||||
bedrockSecretKey: params.bedrockSecretKey,
|
||||
}),
|
||||
},
|
||||
},
|
||||
inputs: {
|
||||
context: { type: 'string', description: 'Text to translate' },
|
||||
targetLanguage: { type: 'string', description: 'Target language' },
|
||||
apiKey: { type: 'string', description: 'Provider API key' },
|
||||
azureEndpoint: { type: 'string', description: 'Azure OpenAI endpoint URL' },
|
||||
azureApiVersion: { type: 'string', description: 'Azure API version' },
|
||||
vertexProject: { type: 'string', description: 'Google Cloud project ID for Vertex AI' },
|
||||
vertexLocation: { type: 'string', description: 'Google Cloud location for Vertex AI' },
|
||||
vertexCredential: {
|
||||
type: 'string',
|
||||
description: 'Google Cloud OAuth credential ID for Vertex AI',
|
||||
},
|
||||
...PROVIDER_CREDENTIAL_INPUTS,
|
||||
systemPrompt: { type: 'string', description: 'Translation instructions' },
|
||||
},
|
||||
outputs: {
|
||||
|
||||
@@ -1,4 +1,7 @@
|
||||
import { isHosted } from '@/lib/core/config/feature-flags'
|
||||
import type { BlockOutput, OutputFieldDefinition, SubBlockConfig } from '@/blocks/types'
|
||||
import { getHostedModels, providers } from '@/providers/utils'
|
||||
import { useProvidersStore } from '@/stores/providers/store'
|
||||
|
||||
/**
|
||||
* Checks if a field is included in the dependsOn config.
|
||||
@@ -37,3 +40,177 @@ export function resolveOutputType(
|
||||
|
||||
return resolvedOutputs
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to get current Ollama models from store
|
||||
*/
|
||||
const getCurrentOllamaModels = () => {
|
||||
return useProvidersStore.getState().providers.ollama.models
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to get current vLLM models from store
|
||||
*/
|
||||
const getCurrentVLLMModels = () => {
|
||||
return useProvidersStore.getState().providers.vllm.models
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the API key condition for provider credential subblocks.
|
||||
* Handles hosted vs self-hosted environments and excludes providers that don't need API key.
|
||||
*/
|
||||
export function getApiKeyCondition() {
|
||||
return isHosted
|
||||
? {
|
||||
field: 'model',
|
||||
value: [...getHostedModels(), ...providers.vertex.models, ...providers.bedrock.models],
|
||||
not: true,
|
||||
}
|
||||
: () => ({
|
||||
field: 'model',
|
||||
value: [
|
||||
...getCurrentOllamaModels(),
|
||||
...getCurrentVLLMModels(),
|
||||
...providers.vertex.models,
|
||||
...providers.bedrock.models,
|
||||
],
|
||||
not: true,
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the standard provider credential subblocks used by LLM-based blocks.
|
||||
* This includes: Vertex AI OAuth, API Key, Azure OpenAI, Vertex AI config, and Bedrock config.
|
||||
*
|
||||
* Usage: Spread into your block's subBlocks array after block-specific fields
|
||||
*/
|
||||
export function getProviderCredentialSubBlocks(): SubBlockConfig[] {
|
||||
return [
|
||||
{
|
||||
id: 'vertexCredential',
|
||||
title: 'Google Cloud Account',
|
||||
type: 'oauth-input',
|
||||
serviceId: 'vertex-ai',
|
||||
requiredScopes: ['https://www.googleapis.com/auth/cloud-platform'],
|
||||
placeholder: 'Select Google Cloud account',
|
||||
required: true,
|
||||
condition: {
|
||||
field: 'model',
|
||||
value: providers.vertex.models,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'apiKey',
|
||||
title: 'API Key',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter your API key',
|
||||
password: true,
|
||||
connectionDroppable: false,
|
||||
required: true,
|
||||
condition: getApiKeyCondition(),
|
||||
},
|
||||
{
|
||||
id: 'azureEndpoint',
|
||||
title: 'Azure OpenAI Endpoint',
|
||||
type: 'short-input',
|
||||
password: true,
|
||||
placeholder: 'https://your-resource.openai.azure.com',
|
||||
connectionDroppable: false,
|
||||
condition: {
|
||||
field: 'model',
|
||||
value: providers['azure-openai'].models,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'azureApiVersion',
|
||||
title: 'Azure API Version',
|
||||
type: 'short-input',
|
||||
placeholder: '2024-07-01-preview',
|
||||
connectionDroppable: false,
|
||||
condition: {
|
||||
field: 'model',
|
||||
value: providers['azure-openai'].models,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'vertexProject',
|
||||
title: 'Vertex AI Project',
|
||||
type: 'short-input',
|
||||
placeholder: 'your-gcp-project-id',
|
||||
connectionDroppable: false,
|
||||
required: true,
|
||||
condition: {
|
||||
field: 'model',
|
||||
value: providers.vertex.models,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'vertexLocation',
|
||||
title: 'Vertex AI Location',
|
||||
type: 'short-input',
|
||||
placeholder: 'us-central1',
|
||||
connectionDroppable: false,
|
||||
required: true,
|
||||
condition: {
|
||||
field: 'model',
|
||||
value: providers.vertex.models,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'bedrockAccessKeyId',
|
||||
title: 'AWS Access Key ID',
|
||||
type: 'short-input',
|
||||
password: true,
|
||||
placeholder: 'Enter your AWS Access Key ID',
|
||||
connectionDroppable: false,
|
||||
required: true,
|
||||
condition: {
|
||||
field: 'model',
|
||||
value: providers.bedrock.models,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'bedrockSecretKey',
|
||||
title: 'AWS Secret Access Key',
|
||||
type: 'short-input',
|
||||
password: true,
|
||||
placeholder: 'Enter your AWS Secret Access Key',
|
||||
connectionDroppable: false,
|
||||
required: true,
|
||||
condition: {
|
||||
field: 'model',
|
||||
value: providers.bedrock.models,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'bedrockRegion',
|
||||
title: 'AWS Region',
|
||||
type: 'short-input',
|
||||
placeholder: 'us-east-1',
|
||||
connectionDroppable: false,
|
||||
condition: {
|
||||
field: 'model',
|
||||
value: providers.bedrock.models,
|
||||
},
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the standard input definitions for provider credentials.
|
||||
* Use this in your block's inputs definition.
|
||||
*/
|
||||
export const PROVIDER_CREDENTIAL_INPUTS = {
|
||||
apiKey: { type: 'string', description: 'Provider API key' },
|
||||
azureEndpoint: { type: 'string', description: 'Azure OpenAI endpoint URL' },
|
||||
azureApiVersion: { type: 'string', description: 'Azure API version' },
|
||||
vertexProject: { type: 'string', description: 'Google Cloud project ID for Vertex AI' },
|
||||
vertexLocation: { type: 'string', description: 'Google Cloud location for Vertex AI' },
|
||||
vertexCredential: {
|
||||
type: 'string',
|
||||
description: 'Google Cloud OAuth credential ID for Vertex AI',
|
||||
},
|
||||
bedrockAccessKeyId: { type: 'string', description: 'AWS Access Key ID for Bedrock' },
|
||||
bedrockSecretKey: { type: 'string', description: 'AWS Secret Access Key for Bedrock' },
|
||||
bedrockRegion: { type: 'string', description: 'AWS region for Bedrock' },
|
||||
} as const
|
||||
|
||||
@@ -4575,3 +4575,22 @@ export function FirefliesIcon(props: SVGProps<SVGSVGElement>) {
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
export function BedrockIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg {...props} viewBox='0 0 24 24' xmlns='http://www.w3.org/2000/svg'>
|
||||
<defs>
|
||||
<linearGradient id='bedrock_gradient' x1='80%' x2='20%' y1='20%' y2='80%'>
|
||||
<stop offset='0%' stopColor='#6350FB' />
|
||||
<stop offset='50%' stopColor='#3D8FFF' />
|
||||
<stop offset='100%' stopColor='#9AD8F8' />
|
||||
</linearGradient>
|
||||
</defs>
|
||||
<path
|
||||
d='M13.05 15.513h3.08c.214 0 .389.177.389.394v1.82a1.704 1.704 0 011.296 1.661c0 .943-.755 1.708-1.685 1.708-.931 0-1.686-.765-1.686-1.708 0-.807.554-1.484 1.297-1.662v-1.425h-2.69v4.663a.395.395 0 01-.188.338l-2.69 1.641a.385.385 0 01-.405-.002l-4.926-3.086a.395.395 0 01-.185-.336V16.3L2.196 14.87A.395.395 0 012 14.555L2 14.528V9.406c0-.14.073-.27.192-.34l2.465-1.462V4.448c0-.129.062-.249.165-.322l.021-.014L9.77 1.058a.385.385 0 01.407 0l2.69 1.675a.395.395 0 01.185.336V7.6h3.856V5.683a1.704 1.704 0 01-1.296-1.662c0-.943.755-1.708 1.685-1.708.931 0 1.685.765 1.685 1.708 0 .807-.553 1.484-1.296 1.662v2.311a.391.391 0 01-.389.394h-4.245v1.806h6.624a1.69 1.69 0 011.64-1.313c.93 0 1.685.764 1.685 1.707 0 .943-.754 1.708-1.685 1.708a1.69 1.69 0 01-1.64-1.314H13.05v1.937h4.953l.915 1.18a1.66 1.66 0 01.84-.227c.931 0 1.685.764 1.685 1.707 0 .943-.754 1.708-1.685 1.708-.93 0-1.685-.765-1.685-1.708 0-.346.102-.668.276-.937l-.724-.935H13.05v1.806zM9.973 1.856L7.93 3.122V6.09h-.778V3.604L5.435 4.669v2.945l2.11 1.36L9.712 7.61V5.334h.778V7.83c0 .136-.07.263-.184.335L7.963 9.638v2.081l1.422 1.009-.446.646-1.406-.998-1.53 1.005-.423-.66 1.605-1.055v-1.99L5.038 8.29l-2.26 1.34v1.676l1.972-1.189.398.677-2.37 1.429V14.3l2.166 1.258 2.27-1.368.397.677-2.176 1.311V19.3l1.876 1.175 2.365-1.426.398.678-2.017 1.216 1.918 1.201 2.298-1.403v-5.78l-4.758 2.893-.4-.675 5.158-3.136V3.289L9.972 1.856zM16.13 18.47a.913.913 0 00-.908.92c0 .507.406.918.908.918a.913.913 0 00.907-.919.913.913 0 00-.907-.92zm3.63-3.81a.913.913 0 00-.908.92c0 .508.406.92.907.92a.913.913 0 00.908-.92.913.913 0 00-.908-.92zm1.555-4.99a.913.913 0 00-.908.92c0 .507.407.918.908.918a.913.913 0 00.907-.919.913.913 0 00-.907-.92zM17.296 3.1a.913.913 0 00-.907.92c0 .508.406.92.907.92a.913.913 0 00.908-.92.913.913 0 00-.908-.92z'
|
||||
fill='url(#bedrock_gradient)'
|
||||
fillRule='nonzero'
|
||||
/>
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -339,7 +339,7 @@ export class BlockExecutor {
|
||||
|
||||
if (isTrigger) {
|
||||
const filtered: NormalizedBlockOutput = {}
|
||||
const internalKeys = ['webhook', 'workflowId', 'input']
|
||||
const internalKeys = ['webhook', 'workflowId']
|
||||
for (const [key, value] of Object.entries(output)) {
|
||||
if (internalKeys.includes(key)) continue
|
||||
filtered[key] = value
|
||||
|
||||
@@ -928,6 +928,9 @@ export class AgentBlockHandler implements BlockHandler {
|
||||
vertexProject: inputs.vertexProject,
|
||||
vertexLocation: inputs.vertexLocation,
|
||||
vertexCredential: inputs.vertexCredential,
|
||||
bedrockAccessKeyId: inputs.bedrockAccessKeyId,
|
||||
bedrockSecretKey: inputs.bedrockSecretKey,
|
||||
bedrockRegion: inputs.bedrockRegion,
|
||||
responseFormat,
|
||||
workflowId: ctx.workflowId,
|
||||
workspaceId: ctx.workspaceId,
|
||||
@@ -1029,6 +1032,9 @@ export class AgentBlockHandler implements BlockHandler {
|
||||
azureApiVersion: providerRequest.azureApiVersion,
|
||||
vertexProject: providerRequest.vertexProject,
|
||||
vertexLocation: providerRequest.vertexLocation,
|
||||
bedrockAccessKeyId: providerRequest.bedrockAccessKeyId,
|
||||
bedrockSecretKey: providerRequest.bedrockSecretKey,
|
||||
bedrockRegion: providerRequest.bedrockRegion,
|
||||
responseFormat: providerRequest.responseFormat,
|
||||
workflowId: providerRequest.workflowId,
|
||||
workspaceId: ctx.workspaceId,
|
||||
|
||||
@@ -22,6 +22,9 @@ export interface AgentInputs {
|
||||
vertexProject?: string
|
||||
vertexLocation?: string
|
||||
vertexCredential?: string
|
||||
bedrockAccessKeyId?: string
|
||||
bedrockSecretKey?: string
|
||||
bedrockRegion?: string
|
||||
reasoningEffort?: string
|
||||
verbosity?: string
|
||||
}
|
||||
|
||||
@@ -32,6 +32,9 @@ export class EvaluatorBlockHandler implements BlockHandler {
|
||||
vertexProject: inputs.vertexProject,
|
||||
vertexLocation: inputs.vertexLocation,
|
||||
vertexCredential: inputs.vertexCredential,
|
||||
bedrockAccessKeyId: inputs.bedrockAccessKeyId,
|
||||
bedrockSecretKey: inputs.bedrockSecretKey,
|
||||
bedrockRegion: inputs.bedrockRegion,
|
||||
}
|
||||
const providerId = getProviderFromModel(evaluatorConfig.model)
|
||||
|
||||
@@ -128,6 +131,12 @@ export class EvaluatorBlockHandler implements BlockHandler {
|
||||
providerRequest.azureApiVersion = inputs.azureApiVersion
|
||||
}
|
||||
|
||||
if (providerId === 'bedrock') {
|
||||
providerRequest.bedrockAccessKeyId = evaluatorConfig.bedrockAccessKeyId
|
||||
providerRequest.bedrockSecretKey = evaluatorConfig.bedrockSecretKey
|
||||
providerRequest.bedrockRegion = evaluatorConfig.bedrockRegion
|
||||
}
|
||||
|
||||
const response = await fetch(url.toString(), {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
|
||||
@@ -68,6 +68,9 @@ export class RouterBlockHandler implements BlockHandler {
|
||||
vertexProject: inputs.vertexProject,
|
||||
vertexLocation: inputs.vertexLocation,
|
||||
vertexCredential: inputs.vertexCredential,
|
||||
bedrockAccessKeyId: inputs.bedrockAccessKeyId,
|
||||
bedrockSecretKey: inputs.bedrockSecretKey,
|
||||
bedrockRegion: inputs.bedrockRegion,
|
||||
}
|
||||
|
||||
const providerId = getProviderFromModel(routerConfig.model)
|
||||
@@ -104,6 +107,12 @@ export class RouterBlockHandler implements BlockHandler {
|
||||
providerRequest.azureApiVersion = inputs.azureApiVersion
|
||||
}
|
||||
|
||||
if (providerId === 'bedrock') {
|
||||
providerRequest.bedrockAccessKeyId = routerConfig.bedrockAccessKeyId
|
||||
providerRequest.bedrockSecretKey = routerConfig.bedrockSecretKey
|
||||
providerRequest.bedrockRegion = routerConfig.bedrockRegion
|
||||
}
|
||||
|
||||
const response = await fetch(url.toString(), {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
@@ -197,6 +206,9 @@ export class RouterBlockHandler implements BlockHandler {
|
||||
vertexProject: inputs.vertexProject,
|
||||
vertexLocation: inputs.vertexLocation,
|
||||
vertexCredential: inputs.vertexCredential,
|
||||
bedrockAccessKeyId: inputs.bedrockAccessKeyId,
|
||||
bedrockSecretKey: inputs.bedrockSecretKey,
|
||||
bedrockRegion: inputs.bedrockRegion,
|
||||
}
|
||||
|
||||
const providerId = getProviderFromModel(routerConfig.model)
|
||||
@@ -233,6 +245,12 @@ export class RouterBlockHandler implements BlockHandler {
|
||||
providerRequest.azureApiVersion = inputs.azureApiVersion
|
||||
}
|
||||
|
||||
if (providerId === 'bedrock') {
|
||||
providerRequest.bedrockAccessKeyId = routerConfig.bedrockAccessKeyId
|
||||
providerRequest.bedrockSecretKey = routerConfig.bedrockSecretKey
|
||||
providerRequest.bedrockRegion = routerConfig.bedrockRegion
|
||||
}
|
||||
|
||||
const response = await fetch(url.toString(), {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
|
||||
@@ -66,6 +66,11 @@ export async function getApiKeyWithBYOK(
|
||||
return { apiKey: userProvidedKey || 'empty', isBYOK: false }
|
||||
}
|
||||
|
||||
const isBedrockModel = provider === 'bedrock' || model.startsWith('bedrock/')
|
||||
if (isBedrockModel) {
|
||||
return { apiKey: 'bedrock-uses-own-credentials', isBYOK: false }
|
||||
}
|
||||
|
||||
const isOpenAIModel = provider === 'openai'
|
||||
const isClaudeModel = provider === 'anthropic'
|
||||
const isGeminiModel = provider === 'google'
|
||||
|
||||
@@ -24,6 +24,7 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@anthropic-ai/sdk": "^0.39.0",
|
||||
"@aws-sdk/client-bedrock-runtime": "3.940.0",
|
||||
"@aws-sdk/client-dynamodb": "3.940.0",
|
||||
"@aws-sdk/client-rds-data": "3.940.0",
|
||||
"@aws-sdk/client-s3": "^3.779.0",
|
||||
|
||||
905
apps/sim/providers/bedrock/index.ts
Normal file
905
apps/sim/providers/bedrock/index.ts
Normal file
@@ -0,0 +1,905 @@
|
||||
import {
|
||||
type Message as BedrockMessage,
|
||||
BedrockRuntimeClient,
|
||||
type ContentBlock,
|
||||
type ConversationRole,
|
||||
ConverseCommand,
|
||||
ConverseStreamCommand,
|
||||
type SystemContentBlock,
|
||||
type Tool,
|
||||
type ToolConfiguration,
|
||||
type ToolResultBlock,
|
||||
type ToolUseBlock,
|
||||
} from '@aws-sdk/client-bedrock-runtime'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type { StreamingExecution } from '@/executor/types'
|
||||
import { MAX_TOOL_ITERATIONS } from '@/providers'
|
||||
import {
|
||||
checkForForcedToolUsage,
|
||||
createReadableStreamFromBedrockStream,
|
||||
generateToolUseId,
|
||||
getBedrockInferenceProfileId,
|
||||
} from '@/providers/bedrock/utils'
|
||||
import { getProviderDefaultModel, getProviderModels } from '@/providers/models'
|
||||
import type {
|
||||
ProviderConfig,
|
||||
ProviderRequest,
|
||||
ProviderResponse,
|
||||
TimeSegment,
|
||||
} from '@/providers/types'
|
||||
import {
|
||||
calculateCost,
|
||||
prepareToolExecution,
|
||||
prepareToolsWithUsageControl,
|
||||
} from '@/providers/utils'
|
||||
import { executeTool } from '@/tools'
|
||||
|
||||
const logger = createLogger('BedrockProvider')
|
||||
|
||||
export const bedrockProvider: ProviderConfig = {
|
||||
id: 'bedrock',
|
||||
name: 'AWS Bedrock',
|
||||
description: 'AWS Bedrock foundation models',
|
||||
version: '1.0.0',
|
||||
models: getProviderModels('bedrock'),
|
||||
defaultModel: getProviderDefaultModel('bedrock'),
|
||||
|
||||
executeRequest: async (
|
||||
request: ProviderRequest
|
||||
): Promise<ProviderResponse | StreamingExecution> => {
|
||||
if (!request.bedrockAccessKeyId) {
|
||||
throw new Error('AWS Access Key ID is required for Bedrock')
|
||||
}
|
||||
|
||||
if (!request.bedrockSecretKey) {
|
||||
throw new Error('AWS Secret Access Key is required for Bedrock')
|
||||
}
|
||||
|
||||
const region = request.bedrockRegion || 'us-east-1'
|
||||
const bedrockModelId = getBedrockInferenceProfileId(request.model, region)
|
||||
|
||||
logger.info('Bedrock request', {
|
||||
requestModel: request.model,
|
||||
inferenceProfileId: bedrockModelId,
|
||||
region,
|
||||
})
|
||||
|
||||
const client = new BedrockRuntimeClient({
|
||||
region,
|
||||
credentials: {
|
||||
accessKeyId: request.bedrockAccessKeyId || '',
|
||||
secretAccessKey: request.bedrockSecretKey || '',
|
||||
},
|
||||
})
|
||||
|
||||
const messages: BedrockMessage[] = []
|
||||
const systemContent: SystemContentBlock[] = []
|
||||
|
||||
if (request.systemPrompt) {
|
||||
systemContent.push({ text: request.systemPrompt })
|
||||
}
|
||||
|
||||
if (request.context) {
|
||||
messages.push({
|
||||
role: 'user' as ConversationRole,
|
||||
content: [{ text: request.context }],
|
||||
})
|
||||
}
|
||||
|
||||
if (request.messages) {
|
||||
for (const msg of request.messages) {
|
||||
if (msg.role === 'function' || msg.role === 'tool') {
|
||||
const toolResultBlock: ToolResultBlock = {
|
||||
toolUseId: msg.tool_call_id || msg.name || generateToolUseId('tool'),
|
||||
content: [{ text: msg.content || '' }],
|
||||
}
|
||||
messages.push({
|
||||
role: 'user' as ConversationRole,
|
||||
content: [{ toolResult: toolResultBlock }],
|
||||
})
|
||||
} else if (msg.function_call || msg.tool_calls) {
|
||||
const toolCall = msg.function_call || msg.tool_calls?.[0]?.function
|
||||
if (toolCall) {
|
||||
const toolUseBlock: ToolUseBlock = {
|
||||
toolUseId: msg.tool_calls?.[0]?.id || generateToolUseId(toolCall.name),
|
||||
name: toolCall.name,
|
||||
input: JSON.parse(toolCall.arguments),
|
||||
}
|
||||
messages.push({
|
||||
role: 'assistant' as ConversationRole,
|
||||
content: [{ toolUse: toolUseBlock }],
|
||||
})
|
||||
}
|
||||
} else {
|
||||
const role: ConversationRole = msg.role === 'assistant' ? 'assistant' : 'user'
|
||||
messages.push({
|
||||
role,
|
||||
content: [{ text: msg.content || '' }],
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (messages.length === 0) {
|
||||
messages.push({
|
||||
role: 'user' as ConversationRole,
|
||||
content: [{ text: request.systemPrompt || 'Hello' }],
|
||||
})
|
||||
systemContent.length = 0
|
||||
}
|
||||
|
||||
let structuredOutputTool: Tool | undefined
|
||||
const structuredOutputToolName = 'structured_output'
|
||||
|
||||
if (request.responseFormat) {
|
||||
const schema = request.responseFormat.schema || request.responseFormat
|
||||
const schemaName = request.responseFormat.name || 'response'
|
||||
|
||||
structuredOutputTool = {
|
||||
toolSpec: {
|
||||
name: structuredOutputToolName,
|
||||
description: `Output the response as structured JSON matching the ${schemaName} schema. You MUST call this tool to provide your final response.`,
|
||||
inputSchema: {
|
||||
json: schema,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
logger.info(`Using Tool Use approach for structured outputs: ${schemaName}`)
|
||||
}
|
||||
|
||||
let bedrockTools: Tool[] | undefined
|
||||
let toolChoice: any = { auto: {} }
|
||||
let preparedTools: ReturnType<typeof prepareToolsWithUsageControl> | null = null
|
||||
|
||||
if (request.tools?.length) {
|
||||
bedrockTools = request.tools.map((tool) => ({
|
||||
toolSpec: {
|
||||
name: tool.id,
|
||||
description: tool.description,
|
||||
inputSchema: {
|
||||
json: {
|
||||
type: 'object',
|
||||
properties: tool.parameters.properties,
|
||||
required: tool.parameters.required,
|
||||
},
|
||||
},
|
||||
},
|
||||
}))
|
||||
|
||||
try {
|
||||
preparedTools = prepareToolsWithUsageControl(
|
||||
bedrockTools.map((t) => ({
|
||||
name: t.toolSpec?.name || '',
|
||||
description: t.toolSpec?.description || '',
|
||||
input_schema: t.toolSpec?.inputSchema?.json,
|
||||
})),
|
||||
request.tools,
|
||||
logger,
|
||||
'bedrock'
|
||||
)
|
||||
|
||||
const { tools: filteredTools, toolChoice: tc } = preparedTools
|
||||
|
||||
if (filteredTools?.length) {
|
||||
bedrockTools = filteredTools.map((t: any) => ({
|
||||
toolSpec: {
|
||||
name: t.name,
|
||||
description: t.description,
|
||||
inputSchema: { json: t.input_schema },
|
||||
},
|
||||
}))
|
||||
|
||||
if (typeof tc === 'object' && tc !== null) {
|
||||
if (tc.type === 'tool' && tc.name) {
|
||||
toolChoice = { tool: { name: tc.name } }
|
||||
logger.info(`Using Bedrock tool_choice format: force tool "${tc.name}"`)
|
||||
} else if (tc.type === 'function' && tc.function?.name) {
|
||||
toolChoice = { tool: { name: tc.function.name } }
|
||||
logger.info(`Using Bedrock tool_choice format: force tool "${tc.function.name}"`)
|
||||
} else {
|
||||
toolChoice = { auto: {} }
|
||||
}
|
||||
} else if (tc === 'none') {
|
||||
toolChoice = undefined
|
||||
bedrockTools = undefined
|
||||
} else {
|
||||
toolChoice = { auto: {} }
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Error in prepareToolsWithUsageControl:', { error })
|
||||
toolChoice = { auto: {} }
|
||||
}
|
||||
} else if (structuredOutputTool) {
|
||||
bedrockTools = [structuredOutputTool]
|
||||
toolChoice = { tool: { name: structuredOutputToolName } }
|
||||
logger.info('Using structured_output tool as only tool (forced)')
|
||||
}
|
||||
|
||||
const hasToolContentInMessages = messages.some((msg) =>
|
||||
msg.content?.some(
|
||||
(block) =>
|
||||
('toolUse' in block && block.toolUse) || ('toolResult' in block && block.toolResult)
|
||||
)
|
||||
)
|
||||
|
||||
const toolConfig: ToolConfiguration | undefined = bedrockTools?.length
|
||||
? {
|
||||
tools: bedrockTools,
|
||||
toolChoice,
|
||||
}
|
||||
: hasToolContentInMessages && request.tools?.length
|
||||
? {
|
||||
tools: request.tools.map((tool) => ({
|
||||
toolSpec: {
|
||||
name: tool.id,
|
||||
description: tool.description,
|
||||
inputSchema: {
|
||||
json: {
|
||||
type: 'object',
|
||||
properties: tool.parameters.properties,
|
||||
required: tool.parameters.required,
|
||||
},
|
||||
},
|
||||
},
|
||||
})),
|
||||
toolChoice: { auto: {} },
|
||||
}
|
||||
: undefined
|
||||
|
||||
if (hasToolContentInMessages && !toolConfig) {
|
||||
throw new Error(
|
||||
'Messages contain tool use/result blocks but no tools were provided. ' +
|
||||
'Bedrock requires toolConfig when processing messages with tool content.'
|
||||
)
|
||||
}
|
||||
|
||||
const systemPromptWithSchema = systemContent
|
||||
|
||||
const inferenceConfig = {
|
||||
temperature: Number.parseFloat(String(request.temperature ?? 0.7)),
|
||||
maxTokens: Number.parseInt(String(request.maxTokens)) || 4096,
|
||||
}
|
||||
|
||||
const shouldStreamToolCalls = request.streamToolCalls ?? false
|
||||
|
||||
if (request.stream && (!bedrockTools || bedrockTools.length === 0)) {
|
||||
logger.info('Using streaming response for Bedrock request (no tools)')
|
||||
|
||||
const providerStartTime = Date.now()
|
||||
const providerStartTimeISO = new Date(providerStartTime).toISOString()
|
||||
|
||||
const command = new ConverseStreamCommand({
|
||||
modelId: bedrockModelId,
|
||||
messages,
|
||||
system: systemPromptWithSchema.length > 0 ? systemPromptWithSchema : undefined,
|
||||
inferenceConfig,
|
||||
})
|
||||
|
||||
const streamResponse = await client.send(command)
|
||||
|
||||
if (!streamResponse.stream) {
|
||||
throw new Error('No stream returned from Bedrock')
|
||||
}
|
||||
|
||||
const streamingResult = {
|
||||
stream: createReadableStreamFromBedrockStream(streamResponse.stream, (content, usage) => {
|
||||
streamingResult.execution.output.content = content
|
||||
streamingResult.execution.output.tokens = {
|
||||
input: usage.inputTokens,
|
||||
output: usage.outputTokens,
|
||||
total: usage.inputTokens + usage.outputTokens,
|
||||
}
|
||||
|
||||
const costResult = calculateCost(request.model, usage.inputTokens, usage.outputTokens)
|
||||
streamingResult.execution.output.cost = {
|
||||
input: costResult.input,
|
||||
output: costResult.output,
|
||||
total: costResult.total,
|
||||
}
|
||||
|
||||
const streamEndTime = Date.now()
|
||||
const streamEndTimeISO = new Date(streamEndTime).toISOString()
|
||||
|
||||
if (streamingResult.execution.output.providerTiming) {
|
||||
streamingResult.execution.output.providerTiming.endTime = streamEndTimeISO
|
||||
streamingResult.execution.output.providerTiming.duration =
|
||||
streamEndTime - providerStartTime
|
||||
|
||||
if (streamingResult.execution.output.providerTiming.timeSegments?.[0]) {
|
||||
streamingResult.execution.output.providerTiming.timeSegments[0].endTime =
|
||||
streamEndTime
|
||||
streamingResult.execution.output.providerTiming.timeSegments[0].duration =
|
||||
streamEndTime - providerStartTime
|
||||
}
|
||||
}
|
||||
}),
|
||||
execution: {
|
||||
success: true,
|
||||
output: {
|
||||
content: '',
|
||||
model: request.model,
|
||||
tokens: { input: 0, output: 0, total: 0 },
|
||||
toolCalls: undefined,
|
||||
providerTiming: {
|
||||
startTime: providerStartTimeISO,
|
||||
endTime: new Date().toISOString(),
|
||||
duration: Date.now() - providerStartTime,
|
||||
timeSegments: [
|
||||
{
|
||||
type: 'model',
|
||||
name: 'Streaming response',
|
||||
startTime: providerStartTime,
|
||||
endTime: Date.now(),
|
||||
duration: Date.now() - providerStartTime,
|
||||
},
|
||||
],
|
||||
},
|
||||
cost: {
|
||||
total: 0.0,
|
||||
input: 0.0,
|
||||
output: 0.0,
|
||||
},
|
||||
},
|
||||
logs: [],
|
||||
metadata: {
|
||||
startTime: providerStartTimeISO,
|
||||
endTime: new Date().toISOString(),
|
||||
duration: Date.now() - providerStartTime,
|
||||
},
|
||||
isStreaming: true,
|
||||
},
|
||||
}
|
||||
|
||||
return streamingResult as StreamingExecution
|
||||
}
|
||||
|
||||
const providerStartTime = Date.now()
|
||||
const providerStartTimeISO = new Date(providerStartTime).toISOString()
|
||||
|
||||
try {
|
||||
const initialCallTime = Date.now()
|
||||
const originalToolChoice = toolChoice
|
||||
const forcedTools = preparedTools?.forcedTools || []
|
||||
let usedForcedTools: string[] = []
|
||||
|
||||
const command = new ConverseCommand({
|
||||
modelId: bedrockModelId,
|
||||
messages,
|
||||
system: systemPromptWithSchema.length > 0 ? systemPromptWithSchema : undefined,
|
||||
inferenceConfig,
|
||||
toolConfig,
|
||||
})
|
||||
|
||||
let currentResponse = await client.send(command)
|
||||
const firstResponseTime = Date.now() - initialCallTime
|
||||
|
||||
let content = ''
|
||||
let hasExtractedStructuredOutput = false
|
||||
if (currentResponse.output?.message?.content) {
|
||||
const structuredOutputCall = currentResponse.output.message.content.find(
|
||||
(block): block is ContentBlock & { toolUse: ToolUseBlock } =>
|
||||
'toolUse' in block && block.toolUse?.name === structuredOutputToolName
|
||||
)
|
||||
|
||||
if (structuredOutputCall && structuredOutputTool) {
|
||||
content = JSON.stringify(structuredOutputCall.toolUse.input, null, 2)
|
||||
hasExtractedStructuredOutput = true
|
||||
logger.info('Extracted structured output from tool call')
|
||||
} else {
|
||||
const textBlocks = currentResponse.output.message.content.filter(
|
||||
(block): block is ContentBlock & { text: string } => 'text' in block
|
||||
)
|
||||
content = textBlocks.map((block) => block.text).join('\n')
|
||||
}
|
||||
}
|
||||
|
||||
const tokens = {
|
||||
input: currentResponse.usage?.inputTokens || 0,
|
||||
output: currentResponse.usage?.outputTokens || 0,
|
||||
total:
|
||||
(currentResponse.usage?.inputTokens || 0) + (currentResponse.usage?.outputTokens || 0),
|
||||
}
|
||||
|
||||
const initialCost = calculateCost(
|
||||
request.model,
|
||||
currentResponse.usage?.inputTokens || 0,
|
||||
currentResponse.usage?.outputTokens || 0
|
||||
)
|
||||
const cost = {
|
||||
input: initialCost.input,
|
||||
output: initialCost.output,
|
||||
total: initialCost.total,
|
||||
}
|
||||
|
||||
const toolCalls: any[] = []
|
||||
const toolResults: any[] = []
|
||||
const currentMessages = [...messages]
|
||||
let iterationCount = 0
|
||||
let hasUsedForcedTool = false
|
||||
let modelTime = firstResponseTime
|
||||
let toolsTime = 0
|
||||
|
||||
const timeSegments: TimeSegment[] = [
|
||||
{
|
||||
type: 'model',
|
||||
name: 'Initial response',
|
||||
startTime: initialCallTime,
|
||||
endTime: initialCallTime + firstResponseTime,
|
||||
duration: firstResponseTime,
|
||||
},
|
||||
]
|
||||
|
||||
const initialToolUseContentBlocks = (currentResponse.output?.message?.content || []).filter(
|
||||
(block): block is ContentBlock & { toolUse: ToolUseBlock } => 'toolUse' in block
|
||||
)
|
||||
const toolUseBlocks = initialToolUseContentBlocks.map((block) => ({
|
||||
name: block.toolUse.name || '',
|
||||
}))
|
||||
|
||||
const firstCheckResult = checkForForcedToolUsage(
|
||||
toolUseBlocks,
|
||||
originalToolChoice,
|
||||
forcedTools,
|
||||
usedForcedTools
|
||||
)
|
||||
if (firstCheckResult) {
|
||||
hasUsedForcedTool = firstCheckResult.hasUsedForcedTool
|
||||
usedForcedTools = firstCheckResult.usedForcedTools
|
||||
}
|
||||
|
||||
while (iterationCount < MAX_TOOL_ITERATIONS) {
|
||||
const textContentBlocks = (currentResponse.output?.message?.content || []).filter(
|
||||
(block): block is ContentBlock & { text: string } => 'text' in block
|
||||
)
|
||||
const textContent = textContentBlocks.map((block) => block.text).join('\n')
|
||||
|
||||
if (textContent) {
|
||||
content = textContent
|
||||
}
|
||||
|
||||
const toolUseContentBlocks = (currentResponse.output?.message?.content || []).filter(
|
||||
(block): block is ContentBlock & { toolUse: ToolUseBlock } => 'toolUse' in block
|
||||
)
|
||||
const currentToolUses = toolUseContentBlocks.map((block) => block.toolUse)
|
||||
|
||||
if (!currentToolUses || currentToolUses.length === 0) {
|
||||
break
|
||||
}
|
||||
|
||||
const toolsStartTime = Date.now()
|
||||
|
||||
const toolExecutionPromises = currentToolUses.map(async (toolUse: ToolUseBlock) => {
|
||||
const toolCallStartTime = Date.now()
|
||||
const toolName = toolUse.name || ''
|
||||
const toolArgs = (toolUse.input as Record<string, any>) || {}
|
||||
const toolUseId = toolUse.toolUseId || generateToolUseId(toolName)
|
||||
|
||||
try {
|
||||
const tool = request.tools?.find((t) => t.id === toolName)
|
||||
if (!tool) return null
|
||||
|
||||
const { toolParams, executionParams } = prepareToolExecution(tool, toolArgs, request)
|
||||
const result = await executeTool(toolName, executionParams, true)
|
||||
const toolCallEndTime = Date.now()
|
||||
|
||||
return {
|
||||
toolUseId,
|
||||
toolName,
|
||||
toolArgs,
|
||||
toolParams,
|
||||
result,
|
||||
startTime: toolCallStartTime,
|
||||
endTime: toolCallEndTime,
|
||||
duration: toolCallEndTime - toolCallStartTime,
|
||||
}
|
||||
} catch (error) {
|
||||
const toolCallEndTime = Date.now()
|
||||
logger.error('Error processing tool call:', { error, toolName })
|
||||
|
||||
return {
|
||||
toolUseId,
|
||||
toolName,
|
||||
toolArgs,
|
||||
toolParams: {},
|
||||
result: {
|
||||
success: false,
|
||||
output: undefined,
|
||||
error: error instanceof Error ? error.message : 'Tool execution failed',
|
||||
},
|
||||
startTime: toolCallStartTime,
|
||||
endTime: toolCallEndTime,
|
||||
duration: toolCallEndTime - toolCallStartTime,
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const executionResults = await Promise.allSettled(toolExecutionPromises)
|
||||
|
||||
const assistantContent: ContentBlock[] = currentToolUses.map((toolUse: ToolUseBlock) => ({
|
||||
toolUse: {
|
||||
toolUseId: toolUse.toolUseId,
|
||||
name: toolUse.name,
|
||||
input: toolUse.input,
|
||||
},
|
||||
}))
|
||||
currentMessages.push({
|
||||
role: 'assistant' as ConversationRole,
|
||||
content: assistantContent,
|
||||
})
|
||||
|
||||
const toolResultContent: ContentBlock[] = []
|
||||
|
||||
for (const settledResult of executionResults) {
|
||||
if (settledResult.status === 'rejected' || !settledResult.value) continue
|
||||
|
||||
const {
|
||||
toolUseId,
|
||||
toolName,
|
||||
toolArgs,
|
||||
toolParams,
|
||||
result,
|
||||
startTime,
|
||||
endTime,
|
||||
duration,
|
||||
} = settledResult.value
|
||||
|
||||
timeSegments.push({
|
||||
type: 'tool',
|
||||
name: toolName,
|
||||
startTime,
|
||||
endTime,
|
||||
duration,
|
||||
})
|
||||
|
||||
let resultContent: any
|
||||
if (result.success) {
|
||||
toolResults.push(result.output)
|
||||
resultContent = result.output
|
||||
} else {
|
||||
resultContent = {
|
||||
error: true,
|
||||
message: result.error || 'Tool execution failed',
|
||||
tool: toolName,
|
||||
}
|
||||
}
|
||||
|
||||
toolCalls.push({
|
||||
name: toolName,
|
||||
arguments: toolParams,
|
||||
startTime: new Date(startTime).toISOString(),
|
||||
endTime: new Date(endTime).toISOString(),
|
||||
duration,
|
||||
result: resultContent,
|
||||
success: result.success,
|
||||
})
|
||||
|
||||
const toolResultBlock: ToolResultBlock = {
|
||||
toolUseId,
|
||||
content: [{ text: JSON.stringify(resultContent) }],
|
||||
}
|
||||
toolResultContent.push({ toolResult: toolResultBlock })
|
||||
}
|
||||
|
||||
if (toolResultContent.length > 0) {
|
||||
currentMessages.push({
|
||||
role: 'user' as ConversationRole,
|
||||
content: toolResultContent,
|
||||
})
|
||||
}
|
||||
|
||||
const thisToolsTime = Date.now() - toolsStartTime
|
||||
toolsTime += thisToolsTime
|
||||
|
||||
let nextToolChoice = toolChoice
|
||||
if (typeof originalToolChoice === 'object' && hasUsedForcedTool && forcedTools.length > 0) {
|
||||
const remainingTools = forcedTools.filter((tool) => !usedForcedTools.includes(tool))
|
||||
|
||||
if (remainingTools.length > 0) {
|
||||
nextToolChoice = { tool: { name: remainingTools[0] } }
|
||||
logger.info(`Forcing next tool: ${remainingTools[0]}`)
|
||||
} else {
|
||||
nextToolChoice = { auto: {} }
|
||||
logger.info('All forced tools have been used, switching to auto')
|
||||
}
|
||||
} else if (hasUsedForcedTool && typeof originalToolChoice === 'object') {
|
||||
nextToolChoice = { auto: {} }
|
||||
logger.info('Switching to auto tool choice after forced tool was used')
|
||||
}
|
||||
|
||||
const nextModelStartTime = Date.now()
|
||||
|
||||
const nextCommand = new ConverseCommand({
|
||||
modelId: bedrockModelId,
|
||||
messages: currentMessages,
|
||||
system: systemPromptWithSchema.length > 0 ? systemPromptWithSchema : undefined,
|
||||
inferenceConfig,
|
||||
toolConfig: bedrockTools?.length
|
||||
? { tools: bedrockTools, toolChoice: nextToolChoice }
|
||||
: undefined,
|
||||
})
|
||||
|
||||
currentResponse = await client.send(nextCommand)
|
||||
|
||||
const nextToolUseContentBlocks = (currentResponse.output?.message?.content || []).filter(
|
||||
(block): block is ContentBlock & { toolUse: ToolUseBlock } => 'toolUse' in block
|
||||
)
|
||||
const nextToolUseBlocks = nextToolUseContentBlocks.map((block) => ({
|
||||
name: block.toolUse.name || '',
|
||||
}))
|
||||
|
||||
const nextCheckResult = checkForForcedToolUsage(
|
||||
nextToolUseBlocks,
|
||||
nextToolChoice,
|
||||
forcedTools,
|
||||
usedForcedTools
|
||||
)
|
||||
if (nextCheckResult) {
|
||||
hasUsedForcedTool = nextCheckResult.hasUsedForcedTool
|
||||
usedForcedTools = nextCheckResult.usedForcedTools
|
||||
}
|
||||
|
||||
const nextModelEndTime = Date.now()
|
||||
const thisModelTime = nextModelEndTime - nextModelStartTime
|
||||
|
||||
timeSegments.push({
|
||||
type: 'model',
|
||||
name: `Model response (iteration ${iterationCount + 1})`,
|
||||
startTime: nextModelStartTime,
|
||||
endTime: nextModelEndTime,
|
||||
duration: thisModelTime,
|
||||
})
|
||||
|
||||
modelTime += thisModelTime
|
||||
|
||||
if (currentResponse.usage) {
|
||||
tokens.input += currentResponse.usage.inputTokens || 0
|
||||
tokens.output += currentResponse.usage.outputTokens || 0
|
||||
tokens.total +=
|
||||
(currentResponse.usage.inputTokens || 0) + (currentResponse.usage.outputTokens || 0)
|
||||
|
||||
const iterationCost = calculateCost(
|
||||
request.model,
|
||||
currentResponse.usage.inputTokens || 0,
|
||||
currentResponse.usage.outputTokens || 0
|
||||
)
|
||||
cost.input += iterationCost.input
|
||||
cost.output += iterationCost.output
|
||||
cost.total += iterationCost.total
|
||||
}
|
||||
|
||||
iterationCount++
|
||||
}
|
||||
|
||||
if (structuredOutputTool && request.tools?.length) {
|
||||
logger.info('Making final call with forced structured_output tool')
|
||||
|
||||
const structuredOutputStartTime = Date.now()
|
||||
|
||||
const structuredOutputCommand = new ConverseCommand({
|
||||
modelId: bedrockModelId,
|
||||
messages: currentMessages,
|
||||
system: systemPromptWithSchema.length > 0 ? systemPromptWithSchema : undefined,
|
||||
inferenceConfig,
|
||||
toolConfig: {
|
||||
tools: [structuredOutputTool],
|
||||
toolChoice: { tool: { name: structuredOutputToolName } },
|
||||
},
|
||||
})
|
||||
|
||||
const structuredResponse = await client.send(structuredOutputCommand)
|
||||
const structuredOutputEndTime = Date.now()
|
||||
|
||||
timeSegments.push({
|
||||
type: 'model',
|
||||
name: 'Structured output extraction',
|
||||
startTime: structuredOutputStartTime,
|
||||
endTime: structuredOutputEndTime,
|
||||
duration: structuredOutputEndTime - structuredOutputStartTime,
|
||||
})
|
||||
|
||||
modelTime += structuredOutputEndTime - structuredOutputStartTime
|
||||
|
||||
const structuredOutputCall = structuredResponse.output?.message?.content?.find(
|
||||
(block): block is ContentBlock & { toolUse: ToolUseBlock } =>
|
||||
'toolUse' in block && block.toolUse?.name === structuredOutputToolName
|
||||
)
|
||||
|
||||
if (structuredOutputCall) {
|
||||
content = JSON.stringify(structuredOutputCall.toolUse.input, null, 2)
|
||||
hasExtractedStructuredOutput = true
|
||||
logger.info('Extracted structured output from forced tool call')
|
||||
} else {
|
||||
logger.warn('Structured output tool was forced but no tool call found in response')
|
||||
}
|
||||
|
||||
if (structuredResponse.usage) {
|
||||
tokens.input += structuredResponse.usage.inputTokens || 0
|
||||
tokens.output += structuredResponse.usage.outputTokens || 0
|
||||
tokens.total +=
|
||||
(structuredResponse.usage.inputTokens || 0) +
|
||||
(structuredResponse.usage.outputTokens || 0)
|
||||
|
||||
const structuredCost = calculateCost(
|
||||
request.model,
|
||||
structuredResponse.usage.inputTokens || 0,
|
||||
structuredResponse.usage.outputTokens || 0
|
||||
)
|
||||
cost.input += structuredCost.input
|
||||
cost.output += structuredCost.output
|
||||
cost.total += structuredCost.total
|
||||
}
|
||||
}
|
||||
|
||||
const providerEndTime = Date.now()
|
||||
const providerEndTimeISO = new Date(providerEndTime).toISOString()
|
||||
const totalDuration = providerEndTime - providerStartTime
|
||||
|
||||
if (request.stream && !shouldStreamToolCalls && !hasExtractedStructuredOutput) {
|
||||
logger.info('Using streaming for final Bedrock response after tool processing')
|
||||
|
||||
const messagesHaveToolContent = currentMessages.some((msg) =>
|
||||
msg.content?.some(
|
||||
(block) =>
|
||||
('toolUse' in block && block.toolUse) || ('toolResult' in block && block.toolResult)
|
||||
)
|
||||
)
|
||||
|
||||
const streamToolConfig: ToolConfiguration | undefined =
|
||||
messagesHaveToolContent && request.tools?.length
|
||||
? {
|
||||
tools: request.tools.map((tool) => ({
|
||||
toolSpec: {
|
||||
name: tool.id,
|
||||
description: tool.description,
|
||||
inputSchema: {
|
||||
json: {
|
||||
type: 'object',
|
||||
properties: tool.parameters.properties,
|
||||
required: tool.parameters.required,
|
||||
},
|
||||
},
|
||||
},
|
||||
})),
|
||||
toolChoice: { auto: {} },
|
||||
}
|
||||
: undefined
|
||||
|
||||
const streamCommand = new ConverseStreamCommand({
|
||||
modelId: bedrockModelId,
|
||||
messages: currentMessages,
|
||||
system: systemPromptWithSchema.length > 0 ? systemPromptWithSchema : undefined,
|
||||
inferenceConfig,
|
||||
toolConfig: streamToolConfig,
|
||||
})
|
||||
|
||||
const streamResponse = await client.send(streamCommand)
|
||||
|
||||
if (!streamResponse.stream) {
|
||||
throw new Error('No stream returned from Bedrock')
|
||||
}
|
||||
|
||||
const streamingResult = {
|
||||
stream: createReadableStreamFromBedrockStream(
|
||||
streamResponse.stream,
|
||||
(streamContent, usage) => {
|
||||
streamingResult.execution.output.content = streamContent
|
||||
streamingResult.execution.output.tokens = {
|
||||
input: tokens.input + usage.inputTokens,
|
||||
output: tokens.output + usage.outputTokens,
|
||||
total: tokens.total + usage.inputTokens + usage.outputTokens,
|
||||
}
|
||||
|
||||
const streamCost = calculateCost(request.model, usage.inputTokens, usage.outputTokens)
|
||||
streamingResult.execution.output.cost = {
|
||||
input: cost.input + streamCost.input,
|
||||
output: cost.output + streamCost.output,
|
||||
total: cost.total + streamCost.total,
|
||||
}
|
||||
|
||||
const streamEndTime = Date.now()
|
||||
const streamEndTimeISO = new Date(streamEndTime).toISOString()
|
||||
|
||||
if (streamingResult.execution.output.providerTiming) {
|
||||
streamingResult.execution.output.providerTiming.endTime = streamEndTimeISO
|
||||
streamingResult.execution.output.providerTiming.duration =
|
||||
streamEndTime - providerStartTime
|
||||
}
|
||||
}
|
||||
),
|
||||
execution: {
|
||||
success: true,
|
||||
output: {
|
||||
content: '',
|
||||
model: request.model,
|
||||
tokens: {
|
||||
input: tokens.input,
|
||||
output: tokens.output,
|
||||
total: tokens.total,
|
||||
},
|
||||
toolCalls:
|
||||
toolCalls.length > 0
|
||||
? {
|
||||
list: toolCalls,
|
||||
count: toolCalls.length,
|
||||
}
|
||||
: undefined,
|
||||
providerTiming: {
|
||||
startTime: providerStartTimeISO,
|
||||
endTime: new Date().toISOString(),
|
||||
duration: Date.now() - providerStartTime,
|
||||
modelTime,
|
||||
toolsTime,
|
||||
firstResponseTime,
|
||||
iterations: iterationCount + 1,
|
||||
timeSegments,
|
||||
},
|
||||
cost: {
|
||||
input: cost.input,
|
||||
output: cost.output,
|
||||
total: cost.total,
|
||||
},
|
||||
},
|
||||
logs: [],
|
||||
metadata: {
|
||||
startTime: providerStartTimeISO,
|
||||
endTime: new Date().toISOString(),
|
||||
duration: Date.now() - providerStartTime,
|
||||
},
|
||||
isStreaming: true,
|
||||
},
|
||||
}
|
||||
|
||||
return streamingResult as StreamingExecution
|
||||
}
|
||||
|
||||
return {
|
||||
content,
|
||||
model: request.model,
|
||||
tokens,
|
||||
toolCalls:
|
||||
toolCalls.length > 0
|
||||
? toolCalls.map((tc) => ({
|
||||
name: tc.name,
|
||||
arguments: tc.arguments as Record<string, any>,
|
||||
startTime: tc.startTime,
|
||||
endTime: tc.endTime,
|
||||
duration: tc.duration,
|
||||
result: tc.result,
|
||||
}))
|
||||
: undefined,
|
||||
toolResults: toolResults.length > 0 ? toolResults : undefined,
|
||||
timing: {
|
||||
startTime: providerStartTimeISO,
|
||||
endTime: providerEndTimeISO,
|
||||
duration: totalDuration,
|
||||
modelTime,
|
||||
toolsTime,
|
||||
firstResponseTime,
|
||||
iterations: iterationCount + 1,
|
||||
timeSegments,
|
||||
},
|
||||
}
|
||||
} catch (error) {
|
||||
const providerEndTime = Date.now()
|
||||
const providerEndTimeISO = new Date(providerEndTime).toISOString()
|
||||
const totalDuration = providerEndTime - providerStartTime
|
||||
|
||||
logger.error('Error in Bedrock request:', {
|
||||
error,
|
||||
duration: totalDuration,
|
||||
})
|
||||
|
||||
const enhancedError = new Error(error instanceof Error ? error.message : String(error))
|
||||
// @ts-ignore
|
||||
enhancedError.timing = {
|
||||
startTime: providerStartTimeISO,
|
||||
endTime: providerEndTimeISO,
|
||||
duration: totalDuration,
|
||||
}
|
||||
|
||||
throw enhancedError
|
||||
}
|
||||
},
|
||||
}
|
||||
108
apps/sim/providers/bedrock/utils.ts
Normal file
108
apps/sim/providers/bedrock/utils.ts
Normal file
@@ -0,0 +1,108 @@
|
||||
import type { ConverseStreamOutput } from '@aws-sdk/client-bedrock-runtime'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { trackForcedToolUsage } from '@/providers/utils'
|
||||
|
||||
const logger = createLogger('BedrockUtils')
|
||||
|
||||
export interface BedrockStreamUsage {
|
||||
inputTokens: number
|
||||
outputTokens: number
|
||||
}
|
||||
|
||||
export function createReadableStreamFromBedrockStream(
|
||||
bedrockStream: AsyncIterable<ConverseStreamOutput>,
|
||||
onComplete?: (content: string, usage: BedrockStreamUsage) => void
|
||||
): ReadableStream<Uint8Array> {
|
||||
let fullContent = ''
|
||||
let inputTokens = 0
|
||||
let outputTokens = 0
|
||||
|
||||
return new ReadableStream({
|
||||
async start(controller) {
|
||||
try {
|
||||
for await (const event of bedrockStream) {
|
||||
if (event.contentBlockDelta?.delta?.text) {
|
||||
const text = event.contentBlockDelta.delta.text
|
||||
fullContent += text
|
||||
controller.enqueue(new TextEncoder().encode(text))
|
||||
} else if (event.metadata?.usage) {
|
||||
inputTokens = event.metadata.usage.inputTokens ?? 0
|
||||
outputTokens = event.metadata.usage.outputTokens ?? 0
|
||||
}
|
||||
}
|
||||
|
||||
if (onComplete) {
|
||||
onComplete(fullContent, { inputTokens, outputTokens })
|
||||
}
|
||||
|
||||
controller.close()
|
||||
} catch (err) {
|
||||
controller.error(err)
|
||||
}
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export function checkForForcedToolUsage(
|
||||
toolUseBlocks: Array<{ name: string }>,
|
||||
toolChoice: any,
|
||||
forcedTools: string[],
|
||||
usedForcedTools: string[]
|
||||
): { hasUsedForcedTool: boolean; usedForcedTools: string[] } | null {
|
||||
if (typeof toolChoice === 'object' && toolChoice !== null && toolUseBlocks.length > 0) {
|
||||
const adaptedToolCalls = toolUseBlocks.map((tool) => ({ name: tool.name }))
|
||||
const adaptedToolChoice = toolChoice.tool
|
||||
? { function: { name: toolChoice.tool.name } }
|
||||
: toolChoice
|
||||
|
||||
return trackForcedToolUsage(
|
||||
adaptedToolCalls,
|
||||
adaptedToolChoice,
|
||||
logger,
|
||||
'bedrock',
|
||||
forcedTools,
|
||||
usedForcedTools
|
||||
)
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
export function generateToolUseId(toolName: string): string {
|
||||
return `${toolName}-${Date.now()}-${Math.random().toString(36).substring(2, 7)}`
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a model ID to the Bedrock inference profile format.
|
||||
* AWS Bedrock requires inference profile IDs (e.g., us.anthropic.claude-...)
|
||||
* for on-demand invocation of newer models.
|
||||
*
|
||||
* @param modelId - The model ID (e.g., "bedrock/anthropic.claude-sonnet-4-5-20250929-v1:0")
|
||||
* @param region - The AWS region (e.g., "us-east-1")
|
||||
* @returns The inference profile ID (e.g., "us.anthropic.claude-sonnet-4-5-20250929-v1:0")
|
||||
*/
|
||||
export function getBedrockInferenceProfileId(modelId: string, region: string): string {
|
||||
const baseModelId = modelId.startsWith('bedrock/') ? modelId.slice(8) : modelId
|
||||
|
||||
if (/^(us-gov|us|eu|apac|au|ca|jp|global)\./.test(baseModelId)) {
|
||||
return baseModelId
|
||||
}
|
||||
|
||||
let inferencePrefix: string
|
||||
if (region.startsWith('us-gov-')) {
|
||||
inferencePrefix = 'us-gov'
|
||||
} else if (region.startsWith('us-') || region.startsWith('ca-')) {
|
||||
inferencePrefix = 'us'
|
||||
} else if (region.startsWith('eu-') || region === 'il-central-1') {
|
||||
inferencePrefix = 'eu'
|
||||
} else if (region.startsWith('ap-') || region.startsWith('me-')) {
|
||||
inferencePrefix = 'apac'
|
||||
} else if (region.startsWith('sa-')) {
|
||||
inferencePrefix = 'us'
|
||||
} else if (region.startsWith('af-')) {
|
||||
inferencePrefix = 'eu'
|
||||
} else {
|
||||
inferencePrefix = 'us'
|
||||
}
|
||||
|
||||
return `${inferencePrefix}.${baseModelId}`
|
||||
}
|
||||
@@ -11,6 +11,7 @@ import type React from 'react'
|
||||
import {
|
||||
AnthropicIcon,
|
||||
AzureIcon,
|
||||
BedrockIcon,
|
||||
CerebrasIcon,
|
||||
DeepseekIcon,
|
||||
GeminiIcon,
|
||||
@@ -1632,6 +1633,408 @@ export const PROVIDER_DEFINITIONS: Record<string, ProviderDefinition> = {
|
||||
contextInformationAvailable: false,
|
||||
models: [], // Populated dynamically
|
||||
},
|
||||
bedrock: {
|
||||
id: 'bedrock',
|
||||
name: 'AWS Bedrock',
|
||||
description: 'AWS Bedrock foundation models',
|
||||
defaultModel: 'bedrock/anthropic.claude-sonnet-4-5-20250929-v1:0',
|
||||
modelPatterns: [/^bedrock\//],
|
||||
icon: BedrockIcon,
|
||||
capabilities: {
|
||||
temperature: { min: 0, max: 1 },
|
||||
toolUsageControl: true,
|
||||
},
|
||||
models: [
|
||||
{
|
||||
id: 'bedrock/anthropic.claude-opus-4-5-20251101-v1:0',
|
||||
pricing: {
|
||||
input: 5.0,
|
||||
output: 25.0,
|
||||
updatedAt: '2026-01-07',
|
||||
},
|
||||
capabilities: {
|
||||
temperature: { min: 0, max: 1 },
|
||||
nativeStructuredOutputs: true,
|
||||
},
|
||||
contextWindow: 200000,
|
||||
},
|
||||
{
|
||||
id: 'bedrock/anthropic.claude-sonnet-4-5-20250929-v1:0',
|
||||
pricing: {
|
||||
input: 3.0,
|
||||
output: 15.0,
|
||||
updatedAt: '2026-01-07',
|
||||
},
|
||||
capabilities: {
|
||||
temperature: { min: 0, max: 1 },
|
||||
nativeStructuredOutputs: true,
|
||||
},
|
||||
contextWindow: 200000,
|
||||
},
|
||||
{
|
||||
id: 'bedrock/anthropic.claude-haiku-4-5-20251001-v1:0',
|
||||
pricing: {
|
||||
input: 1.0,
|
||||
output: 5.0,
|
||||
updatedAt: '2026-01-07',
|
||||
},
|
||||
capabilities: {
|
||||
temperature: { min: 0, max: 1 },
|
||||
nativeStructuredOutputs: true,
|
||||
},
|
||||
contextWindow: 200000,
|
||||
},
|
||||
{
|
||||
id: 'bedrock/anthropic.claude-opus-4-1-20250805-v1:0',
|
||||
pricing: {
|
||||
input: 15.0,
|
||||
output: 75.0,
|
||||
updatedAt: '2026-01-07',
|
||||
},
|
||||
capabilities: {
|
||||
temperature: { min: 0, max: 1 },
|
||||
nativeStructuredOutputs: true,
|
||||
},
|
||||
contextWindow: 200000,
|
||||
},
|
||||
{
|
||||
id: 'bedrock/amazon.nova-2-pro-v1:0',
|
||||
pricing: {
|
||||
input: 1.0,
|
||||
output: 4.0,
|
||||
updatedAt: '2026-01-07',
|
||||
},
|
||||
capabilities: {
|
||||
temperature: { min: 0, max: 1 },
|
||||
},
|
||||
contextWindow: 1000000,
|
||||
},
|
||||
{
|
||||
id: 'bedrock/amazon.nova-2-lite-v1:0',
|
||||
pricing: {
|
||||
input: 0.08,
|
||||
output: 0.32,
|
||||
updatedAt: '2026-01-07',
|
||||
},
|
||||
capabilities: {
|
||||
temperature: { min: 0, max: 1 },
|
||||
},
|
||||
contextWindow: 1000000,
|
||||
},
|
||||
{
|
||||
id: 'bedrock/amazon.nova-premier-v1:0',
|
||||
pricing: {
|
||||
input: 2.5,
|
||||
output: 10.0,
|
||||
updatedAt: '2026-01-07',
|
||||
},
|
||||
capabilities: {
|
||||
temperature: { min: 0, max: 1 },
|
||||
},
|
||||
contextWindow: 1000000,
|
||||
},
|
||||
{
|
||||
id: 'bedrock/amazon.nova-pro-v1:0',
|
||||
pricing: {
|
||||
input: 0.8,
|
||||
output: 3.2,
|
||||
updatedAt: '2026-01-07',
|
||||
},
|
||||
capabilities: {
|
||||
temperature: { min: 0, max: 1 },
|
||||
},
|
||||
contextWindow: 300000,
|
||||
},
|
||||
{
|
||||
id: 'bedrock/amazon.nova-lite-v1:0',
|
||||
pricing: {
|
||||
input: 0.06,
|
||||
output: 0.24,
|
||||
updatedAt: '2026-01-07',
|
||||
},
|
||||
capabilities: {
|
||||
temperature: { min: 0, max: 1 },
|
||||
},
|
||||
contextWindow: 300000,
|
||||
},
|
||||
{
|
||||
id: 'bedrock/amazon.nova-micro-v1:0',
|
||||
pricing: {
|
||||
input: 0.035,
|
||||
output: 0.14,
|
||||
updatedAt: '2026-01-07',
|
||||
},
|
||||
capabilities: {
|
||||
temperature: { min: 0, max: 1 },
|
||||
},
|
||||
contextWindow: 128000,
|
||||
},
|
||||
{
|
||||
id: 'bedrock/meta.llama4-maverick-17b-instruct-v1:0',
|
||||
pricing: {
|
||||
input: 0.24,
|
||||
output: 0.97,
|
||||
updatedAt: '2026-01-07',
|
||||
},
|
||||
capabilities: {
|
||||
temperature: { min: 0, max: 1 },
|
||||
},
|
||||
contextWindow: 1000000,
|
||||
},
|
||||
{
|
||||
id: 'bedrock/meta.llama4-scout-17b-instruct-v1:0',
|
||||
pricing: {
|
||||
input: 0.18,
|
||||
output: 0.72,
|
||||
updatedAt: '2026-01-07',
|
||||
},
|
||||
capabilities: {
|
||||
temperature: { min: 0, max: 1 },
|
||||
},
|
||||
contextWindow: 3500000,
|
||||
},
|
||||
{
|
||||
id: 'bedrock/meta.llama3-3-70b-instruct-v1:0',
|
||||
pricing: {
|
||||
input: 0.72,
|
||||
output: 0.72,
|
||||
updatedAt: '2026-01-07',
|
||||
},
|
||||
capabilities: {
|
||||
temperature: { min: 0, max: 1 },
|
||||
},
|
||||
contextWindow: 128000,
|
||||
},
|
||||
{
|
||||
id: 'bedrock/meta.llama3-2-90b-instruct-v1:0',
|
||||
pricing: {
|
||||
input: 2.0,
|
||||
output: 2.0,
|
||||
updatedAt: '2026-01-07',
|
||||
},
|
||||
capabilities: {
|
||||
temperature: { min: 0, max: 1 },
|
||||
},
|
||||
contextWindow: 128000,
|
||||
},
|
||||
{
|
||||
id: 'bedrock/meta.llama3-2-11b-instruct-v1:0',
|
||||
pricing: {
|
||||
input: 0.16,
|
||||
output: 0.16,
|
||||
updatedAt: '2026-01-07',
|
||||
},
|
||||
capabilities: {
|
||||
temperature: { min: 0, max: 1 },
|
||||
},
|
||||
contextWindow: 128000,
|
||||
},
|
||||
{
|
||||
id: 'bedrock/meta.llama3-2-3b-instruct-v1:0',
|
||||
pricing: {
|
||||
input: 0.15,
|
||||
output: 0.15,
|
||||
updatedAt: '2026-01-07',
|
||||
},
|
||||
capabilities: {
|
||||
temperature: { min: 0, max: 1 },
|
||||
},
|
||||
contextWindow: 128000,
|
||||
},
|
||||
{
|
||||
id: 'bedrock/meta.llama3-2-1b-instruct-v1:0',
|
||||
pricing: {
|
||||
input: 0.1,
|
||||
output: 0.1,
|
||||
updatedAt: '2026-01-07',
|
||||
},
|
||||
capabilities: {
|
||||
temperature: { min: 0, max: 1 },
|
||||
},
|
||||
contextWindow: 128000,
|
||||
},
|
||||
{
|
||||
id: 'bedrock/meta.llama3-1-405b-instruct-v1:0',
|
||||
pricing: {
|
||||
input: 5.32,
|
||||
output: 16.0,
|
||||
updatedAt: '2026-01-07',
|
||||
},
|
||||
capabilities: {
|
||||
temperature: { min: 0, max: 1 },
|
||||
},
|
||||
contextWindow: 128000,
|
||||
},
|
||||
{
|
||||
id: 'bedrock/meta.llama3-1-70b-instruct-v1:0',
|
||||
pricing: {
|
||||
input: 2.65,
|
||||
output: 3.5,
|
||||
updatedAt: '2026-01-07',
|
||||
},
|
||||
capabilities: {
|
||||
temperature: { min: 0, max: 1 },
|
||||
},
|
||||
contextWindow: 128000,
|
||||
},
|
||||
{
|
||||
id: 'bedrock/meta.llama3-1-8b-instruct-v1:0',
|
||||
pricing: {
|
||||
input: 0.3,
|
||||
output: 0.6,
|
||||
updatedAt: '2026-01-07',
|
||||
},
|
||||
capabilities: {
|
||||
temperature: { min: 0, max: 1 },
|
||||
},
|
||||
contextWindow: 128000,
|
||||
},
|
||||
{
|
||||
id: 'bedrock/mistral.mistral-large-3-675b-instruct',
|
||||
pricing: {
|
||||
input: 2.0,
|
||||
output: 6.0,
|
||||
updatedAt: '2026-01-07',
|
||||
},
|
||||
capabilities: {
|
||||
temperature: { min: 0, max: 1 },
|
||||
},
|
||||
contextWindow: 128000,
|
||||
},
|
||||
{
|
||||
id: 'bedrock/mistral.mistral-large-2411-v1:0',
|
||||
pricing: {
|
||||
input: 2.0,
|
||||
output: 6.0,
|
||||
updatedAt: '2026-01-07',
|
||||
},
|
||||
capabilities: {
|
||||
temperature: { min: 0, max: 1 },
|
||||
},
|
||||
contextWindow: 128000,
|
||||
},
|
||||
{
|
||||
id: 'bedrock/mistral.mistral-large-2407-v1:0',
|
||||
pricing: {
|
||||
input: 4.0,
|
||||
output: 12.0,
|
||||
updatedAt: '2026-01-07',
|
||||
},
|
||||
capabilities: {
|
||||
temperature: { min: 0, max: 1 },
|
||||
},
|
||||
contextWindow: 128000,
|
||||
},
|
||||
{
|
||||
id: 'bedrock/mistral.pixtral-large-2502-v1:0',
|
||||
pricing: {
|
||||
input: 2.0,
|
||||
output: 6.0,
|
||||
updatedAt: '2026-01-07',
|
||||
},
|
||||
capabilities: {
|
||||
temperature: { min: 0, max: 1 },
|
||||
},
|
||||
contextWindow: 128000,
|
||||
},
|
||||
{
|
||||
id: 'bedrock/mistral.magistral-small-2509',
|
||||
pricing: {
|
||||
input: 0.5,
|
||||
output: 1.5,
|
||||
updatedAt: '2026-01-07',
|
||||
},
|
||||
capabilities: {
|
||||
temperature: { min: 0, max: 1 },
|
||||
},
|
||||
contextWindow: 128000,
|
||||
},
|
||||
{
|
||||
id: 'bedrock/mistral.ministral-3-14b-instruct',
|
||||
pricing: {
|
||||
input: 0.2,
|
||||
output: 0.2,
|
||||
updatedAt: '2026-01-07',
|
||||
},
|
||||
capabilities: {
|
||||
temperature: { min: 0, max: 1 },
|
||||
},
|
||||
contextWindow: 128000,
|
||||
},
|
||||
{
|
||||
id: 'bedrock/mistral.ministral-3-8b-instruct',
|
||||
pricing: {
|
||||
input: 0.1,
|
||||
output: 0.1,
|
||||
updatedAt: '2026-01-07',
|
||||
},
|
||||
capabilities: {
|
||||
temperature: { min: 0, max: 1 },
|
||||
},
|
||||
contextWindow: 128000,
|
||||
},
|
||||
{
|
||||
id: 'bedrock/mistral.ministral-3-3b-instruct',
|
||||
pricing: {
|
||||
input: 0.04,
|
||||
output: 0.04,
|
||||
updatedAt: '2026-01-07',
|
||||
},
|
||||
capabilities: {
|
||||
temperature: { min: 0, max: 1 },
|
||||
},
|
||||
contextWindow: 128000,
|
||||
},
|
||||
{
|
||||
id: 'bedrock/mistral.mixtral-8x7b-instruct-v0:1',
|
||||
pricing: {
|
||||
input: 0.45,
|
||||
output: 0.7,
|
||||
updatedAt: '2026-01-07',
|
||||
},
|
||||
capabilities: {
|
||||
temperature: { min: 0, max: 1 },
|
||||
},
|
||||
contextWindow: 32000,
|
||||
},
|
||||
{
|
||||
id: 'bedrock/amazon.titan-text-premier-v1:0',
|
||||
pricing: {
|
||||
input: 0.5,
|
||||
output: 1.5,
|
||||
updatedAt: '2026-01-07',
|
||||
},
|
||||
capabilities: {
|
||||
temperature: { min: 0, max: 1 },
|
||||
},
|
||||
contextWindow: 32000,
|
||||
},
|
||||
{
|
||||
id: 'bedrock/cohere.command-r-plus-v1:0',
|
||||
pricing: {
|
||||
input: 3.0,
|
||||
output: 15.0,
|
||||
updatedAt: '2026-01-07',
|
||||
},
|
||||
capabilities: {
|
||||
temperature: { min: 0, max: 1 },
|
||||
},
|
||||
contextWindow: 128000,
|
||||
},
|
||||
{
|
||||
id: 'bedrock/cohere.command-r-v1:0',
|
||||
pricing: {
|
||||
input: 0.5,
|
||||
output: 1.5,
|
||||
updatedAt: '2026-01-07',
|
||||
},
|
||||
capabilities: {
|
||||
temperature: { min: 0, max: 1 },
|
||||
},
|
||||
contextWindow: 128000,
|
||||
},
|
||||
],
|
||||
},
|
||||
}
|
||||
|
||||
export function getProviderModels(providerId: string): string[] {
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { anthropicProvider } from '@/providers/anthropic'
|
||||
import { azureOpenAIProvider } from '@/providers/azure-openai'
|
||||
import { bedrockProvider } from '@/providers/bedrock'
|
||||
import { cerebrasProvider } from '@/providers/cerebras'
|
||||
import { deepseekProvider } from '@/providers/deepseek'
|
||||
import { googleProvider } from '@/providers/google'
|
||||
@@ -30,6 +31,7 @@ const providerRegistry: Record<ProviderId, ProviderConfig> = {
|
||||
'azure-openai': azureOpenAIProvider,
|
||||
openrouter: openRouterProvider,
|
||||
ollama: ollamaProvider,
|
||||
bedrock: bedrockProvider,
|
||||
}
|
||||
|
||||
export async function getProviderExecutor(
|
||||
|
||||
@@ -14,10 +14,8 @@ export type ProviderId =
|
||||
| 'ollama'
|
||||
| 'openrouter'
|
||||
| 'vllm'
|
||||
| 'bedrock'
|
||||
|
||||
/**
|
||||
* Model pricing information per million tokens
|
||||
*/
|
||||
export interface ModelPricing {
|
||||
input: number // Per 1M tokens
|
||||
cachedInput?: number // Per 1M tokens (if supported)
|
||||
@@ -163,6 +161,9 @@ export interface ProviderRequest {
|
||||
azureApiVersion?: string
|
||||
vertexProject?: string
|
||||
vertexLocation?: string
|
||||
bedrockAccessKeyId?: string
|
||||
bedrockSecretKey?: string
|
||||
bedrockRegion?: string
|
||||
reasoningEffort?: string
|
||||
verbosity?: string
|
||||
thinkingLevel?: string
|
||||
|
||||
@@ -88,6 +88,7 @@ export const providers: Record<ProviderId, ProviderMetadata> = {
|
||||
'azure-openai': buildProviderMetadata('azure-openai'),
|
||||
openrouter: buildProviderMetadata('openrouter'),
|
||||
ollama: buildProviderMetadata('ollama'),
|
||||
bedrock: buildProviderMetadata('bedrock'),
|
||||
}
|
||||
|
||||
export function updateOllamaProviderModels(models: string[]): void {
|
||||
@@ -622,6 +623,12 @@ export function getApiKey(provider: string, model: string, userProvidedKey?: str
|
||||
return userProvidedKey || 'empty'
|
||||
}
|
||||
|
||||
// Bedrock uses its own credentials (bedrockAccessKeyId/bedrockSecretKey), not apiKey
|
||||
const isBedrockModel = provider === 'bedrock' || model.startsWith('bedrock/')
|
||||
if (isBedrockModel) {
|
||||
return 'bedrock-uses-own-credentials'
|
||||
}
|
||||
|
||||
const isOpenAIModel = provider === 'openai'
|
||||
const isClaudeModel = provider === 'anthropic'
|
||||
const isGeminiModel = provider === 'google'
|
||||
|
||||
@@ -16,6 +16,9 @@ interface LLMChatParams {
|
||||
vertexProject?: string
|
||||
vertexLocation?: string
|
||||
vertexCredential?: string
|
||||
bedrockAccessKeyId?: string
|
||||
bedrockSecretKey?: string
|
||||
bedrockRegion?: string
|
||||
}
|
||||
|
||||
interface LLMChatResponse extends ToolResponse {
|
||||
@@ -98,6 +101,24 @@ export const llmChatTool: ToolConfig<LLMChatParams, LLMChatResponse> = {
|
||||
visibility: 'hidden',
|
||||
description: 'Google Cloud OAuth credential ID for Vertex AI',
|
||||
},
|
||||
bedrockAccessKeyId: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'hidden',
|
||||
description: 'AWS Access Key ID for Bedrock',
|
||||
},
|
||||
bedrockSecretKey: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'hidden',
|
||||
description: 'AWS Secret Access Key for Bedrock',
|
||||
},
|
||||
bedrockRegion: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'hidden',
|
||||
description: 'AWS region for Bedrock (defaults to us-east-1)',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
@@ -122,6 +143,9 @@ export const llmChatTool: ToolConfig<LLMChatParams, LLMChatResponse> = {
|
||||
vertexProject: params.vertexProject,
|
||||
vertexLocation: params.vertexLocation,
|
||||
vertexCredential: params.vertexCredential,
|
||||
bedrockAccessKeyId: params.bedrockAccessKeyId,
|
||||
bedrockSecretKey: params.bedrockSecretKey,
|
||||
bedrockRegion: params.bedrockRegion,
|
||||
}
|
||||
},
|
||||
},
|
||||
|
||||
11
bun.lock
11
bun.lock
@@ -53,6 +53,7 @@
|
||||
"version": "0.1.0",
|
||||
"dependencies": {
|
||||
"@anthropic-ai/sdk": "^0.39.0",
|
||||
"@aws-sdk/client-bedrock-runtime": "3.940.0",
|
||||
"@aws-sdk/client-dynamodb": "3.940.0",
|
||||
"@aws-sdk/client-rds-data": "3.940.0",
|
||||
"@aws-sdk/client-s3": "^3.779.0",
|
||||
@@ -377,6 +378,8 @@
|
||||
|
||||
"@aws-crypto/util": ["@aws-crypto/util@5.2.0", "", { "dependencies": { "@aws-sdk/types": "^3.222.0", "@smithy/util-utf8": "^2.0.0", "tslib": "^2.6.2" } }, "sha512-4RkU9EsI6ZpBve5fseQlGNUWKMa1RLPQ1dnjnQoe07ldfIzcsGb5hC5W0Dm7u423KWzawlrpbjXBrXCEv9zazQ=="],
|
||||
|
||||
"@aws-sdk/client-bedrock-runtime": ["@aws-sdk/client-bedrock-runtime@3.940.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "3.940.0", "@aws-sdk/credential-provider-node": "3.940.0", "@aws-sdk/eventstream-handler-node": "3.936.0", "@aws-sdk/middleware-eventstream": "3.936.0", "@aws-sdk/middleware-host-header": "3.936.0", "@aws-sdk/middleware-logger": "3.936.0", "@aws-sdk/middleware-recursion-detection": "3.936.0", "@aws-sdk/middleware-user-agent": "3.940.0", "@aws-sdk/middleware-websocket": "3.936.0", "@aws-sdk/region-config-resolver": "3.936.0", "@aws-sdk/token-providers": "3.940.0", "@aws-sdk/types": "3.936.0", "@aws-sdk/util-endpoints": "3.936.0", "@aws-sdk/util-user-agent-browser": "3.936.0", "@aws-sdk/util-user-agent-node": "3.940.0", "@smithy/config-resolver": "^4.4.3", "@smithy/core": "^3.18.5", "@smithy/eventstream-serde-browser": "^4.2.5", "@smithy/eventstream-serde-config-resolver": "^4.3.5", "@smithy/eventstream-serde-node": "^4.2.5", "@smithy/fetch-http-handler": "^5.3.6", "@smithy/hash-node": "^4.2.5", "@smithy/invalid-dependency": "^4.2.5", "@smithy/middleware-content-length": "^4.2.5", "@smithy/middleware-endpoint": "^4.3.12", "@smithy/middleware-retry": "^4.4.12", "@smithy/middleware-serde": "^4.2.6", "@smithy/middleware-stack": "^4.2.5", "@smithy/node-config-provider": "^4.3.5", "@smithy/node-http-handler": "^4.4.5", "@smithy/protocol-http": "^5.3.5", "@smithy/smithy-client": "^4.9.8", "@smithy/types": "^4.9.0", "@smithy/url-parser": "^4.2.5", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-body-length-node": "^4.2.1", "@smithy/util-defaults-mode-browser": "^4.3.11", "@smithy/util-defaults-mode-node": "^4.2.14", "@smithy/util-endpoints": "^3.2.5", "@smithy/util-middleware": "^4.2.5", "@smithy/util-retry": "^4.2.5", "@smithy/util-stream": "^4.5.6", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-Gs6UUQP1zt8vahOxJ3BADcb3B+2KldUNA3bKa+KdK58de7N7tLJFJfZuXhFGGtwyNPh1aw6phtdP6dauq3OLWA=="],
|
||||
|
||||
"@aws-sdk/client-dynamodb": ["@aws-sdk/client-dynamodb@3.940.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "3.940.0", "@aws-sdk/credential-provider-node": "3.940.0", "@aws-sdk/middleware-endpoint-discovery": "3.936.0", "@aws-sdk/middleware-host-header": "3.936.0", "@aws-sdk/middleware-logger": "3.936.0", "@aws-sdk/middleware-recursion-detection": "3.936.0", "@aws-sdk/middleware-user-agent": "3.940.0", "@aws-sdk/region-config-resolver": "3.936.0", "@aws-sdk/types": "3.936.0", "@aws-sdk/util-endpoints": "3.936.0", "@aws-sdk/util-user-agent-browser": "3.936.0", "@aws-sdk/util-user-agent-node": "3.940.0", "@smithy/config-resolver": "^4.4.3", "@smithy/core": "^3.18.5", "@smithy/fetch-http-handler": "^5.3.6", "@smithy/hash-node": "^4.2.5", "@smithy/invalid-dependency": "^4.2.5", "@smithy/middleware-content-length": "^4.2.5", "@smithy/middleware-endpoint": "^4.3.12", "@smithy/middleware-retry": "^4.4.12", "@smithy/middleware-serde": "^4.2.6", "@smithy/middleware-stack": "^4.2.5", "@smithy/node-config-provider": "^4.3.5", "@smithy/node-http-handler": "^4.4.5", "@smithy/protocol-http": "^5.3.5", "@smithy/smithy-client": "^4.9.8", "@smithy/types": "^4.9.0", "@smithy/url-parser": "^4.2.5", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-body-length-node": "^4.2.1", "@smithy/util-defaults-mode-browser": "^4.3.11", "@smithy/util-defaults-mode-node": "^4.2.14", "@smithy/util-endpoints": "^3.2.5", "@smithy/util-middleware": "^4.2.5", "@smithy/util-retry": "^4.2.5", "@smithy/util-utf8": "^4.2.0", "@smithy/util-waiter": "^4.2.5", "tslib": "^2.6.2" } }, "sha512-u2sXsNJazJbuHeWICvsj6RvNyJh3isedEfPvB21jK/kxcriK+dE/izlKC2cyxUjERCmku0zTFNzY9FhrLbYHjQ=="],
|
||||
|
||||
"@aws-sdk/client-rds-data": ["@aws-sdk/client-rds-data@3.940.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "3.940.0", "@aws-sdk/credential-provider-node": "3.940.0", "@aws-sdk/middleware-host-header": "3.936.0", "@aws-sdk/middleware-logger": "3.936.0", "@aws-sdk/middleware-recursion-detection": "3.936.0", "@aws-sdk/middleware-user-agent": "3.940.0", "@aws-sdk/region-config-resolver": "3.936.0", "@aws-sdk/types": "3.936.0", "@aws-sdk/util-endpoints": "3.936.0", "@aws-sdk/util-user-agent-browser": "3.936.0", "@aws-sdk/util-user-agent-node": "3.940.0", "@smithy/config-resolver": "^4.4.3", "@smithy/core": "^3.18.5", "@smithy/fetch-http-handler": "^5.3.6", "@smithy/hash-node": "^4.2.5", "@smithy/invalid-dependency": "^4.2.5", "@smithy/middleware-content-length": "^4.2.5", "@smithy/middleware-endpoint": "^4.3.12", "@smithy/middleware-retry": "^4.4.12", "@smithy/middleware-serde": "^4.2.6", "@smithy/middleware-stack": "^4.2.5", "@smithy/node-config-provider": "^4.3.5", "@smithy/node-http-handler": "^4.4.5", "@smithy/protocol-http": "^5.3.5", "@smithy/smithy-client": "^4.9.8", "@smithy/types": "^4.9.0", "@smithy/url-parser": "^4.2.5", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-body-length-node": "^4.2.1", "@smithy/util-defaults-mode-browser": "^4.3.11", "@smithy/util-defaults-mode-node": "^4.2.14", "@smithy/util-endpoints": "^3.2.5", "@smithy/util-middleware": "^4.2.5", "@smithy/util-retry": "^4.2.5", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-68NH61MvS48CVPfzBNCPdCG4KnNjM+Uj/3DSw7rT9PJvdML9ARS4M2Uqco9POPw+Aj20KBumsEUd6FMVcYBXAA=="],
|
||||
@@ -411,12 +414,16 @@
|
||||
|
||||
"@aws-sdk/endpoint-cache": ["@aws-sdk/endpoint-cache@3.893.0", "", { "dependencies": { "mnemonist": "0.38.3", "tslib": "^2.6.2" } }, "sha512-KSwTfyLZyNLszz5f/yoLC+LC+CRKpeJii/+zVAy7JUOQsKhSykiRUPYUx7o2Sdc4oJfqqUl26A/jSttKYnYtAA=="],
|
||||
|
||||
"@aws-sdk/eventstream-handler-node": ["@aws-sdk/eventstream-handler-node@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/eventstream-codec": "^4.2.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-4zIbhdRmol2KosIHmU31ATvNP0tkJhDlRj9GuawVJoEnMvJA1pd2U3SRdiOImJU3j8pT46VeS4YMmYxfjGHByg=="],
|
||||
|
||||
"@aws-sdk/lib-dynamodb": ["@aws-sdk/lib-dynamodb@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/util-dynamodb": "3.940.0", "@smithy/core": "^3.18.5", "@smithy/smithy-client": "^4.9.8", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" }, "peerDependencies": { "@aws-sdk/client-dynamodb": "^3.940.0" } }, "sha512-5ApYAix2wvJuMszj1lrpg8lm4ipoZMFO8crxtzsdAvxM8TV5bKSRQQ2GA3CMIODrBuSzpXvWueHHrfkx05ZAQw=="],
|
||||
|
||||
"@aws-sdk/middleware-bucket-endpoint": ["@aws-sdk/middleware-bucket-endpoint@3.957.0", "", { "dependencies": { "@aws-sdk/types": "3.957.0", "@aws-sdk/util-arn-parser": "3.957.0", "@smithy/node-config-provider": "^4.3.7", "@smithy/protocol-http": "^5.3.7", "@smithy/types": "^4.11.0", "@smithy/util-config-provider": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-iczcn/QRIBSpvsdAS/rbzmoBpleX1JBjXvCynMbDceVLBIcVrwT1hXECrhtIC2cjh4HaLo9ClAbiOiWuqt+6MA=="],
|
||||
|
||||
"@aws-sdk/middleware-endpoint-discovery": ["@aws-sdk/middleware-endpoint-discovery@3.936.0", "", { "dependencies": { "@aws-sdk/endpoint-cache": "3.893.0", "@aws-sdk/types": "3.936.0", "@smithy/node-config-provider": "^4.3.5", "@smithy/protocol-http": "^5.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-wNJZ8PDw0eQK2x4z1q8JqiDvw9l9xd36EoklVT2CIBt8FnqGdrMGjAx93RRbH3G6Fmvwoe+D3VJXbWHBlhD0Bw=="],
|
||||
|
||||
"@aws-sdk/middleware-eventstream": ["@aws-sdk/middleware-eventstream@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/protocol-http": "^5.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-XQSH8gzLkk8CDUDxyt4Rdm9owTpRIPdtg2yw9Y2Wl5iSI55YQSiC3x8nM3c4Y4WqReJprunFPK225ZUDoYCfZA=="],
|
||||
|
||||
"@aws-sdk/middleware-expect-continue": ["@aws-sdk/middleware-expect-continue@3.957.0", "", { "dependencies": { "@aws-sdk/types": "3.957.0", "@smithy/protocol-http": "^5.3.7", "@smithy/types": "^4.11.0", "tslib": "^2.6.2" } }, "sha512-AlbK3OeVNwZZil0wlClgeI/ISlOt/SPUxBsIns876IFaVu/Pj3DgImnYhpcJuFRek4r4XM51xzIaGQXM6GDHGg=="],
|
||||
|
||||
"@aws-sdk/middleware-flexible-checksums": ["@aws-sdk/middleware-flexible-checksums@3.957.0", "", { "dependencies": { "@aws-crypto/crc32": "5.2.0", "@aws-crypto/crc32c": "5.2.0", "@aws-crypto/util": "5.2.0", "@aws-sdk/core": "3.957.0", "@aws-sdk/crc64-nvme": "3.957.0", "@aws-sdk/types": "3.957.0", "@smithy/is-array-buffer": "^4.2.0", "@smithy/node-config-provider": "^4.3.7", "@smithy/protocol-http": "^5.3.7", "@smithy/types": "^4.11.0", "@smithy/util-middleware": "^4.2.7", "@smithy/util-stream": "^4.5.8", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-iJpeVR5V8se1hl2pt+k8bF/e9JO4KWgPCMjg8BtRspNtKIUGy7j6msYvbDixaKZaF2Veg9+HoYcOhwnZumjXSA=="],
|
||||
@@ -437,6 +444,8 @@
|
||||
|
||||
"@aws-sdk/middleware-user-agent": ["@aws-sdk/middleware-user-agent@3.940.0", "", { "dependencies": { "@aws-sdk/core": "3.940.0", "@aws-sdk/types": "3.936.0", "@aws-sdk/util-endpoints": "3.936.0", "@smithy/core": "^3.18.5", "@smithy/protocol-http": "^5.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-nJbLrUj6fY+l2W2rIB9P4Qvpiy0tnTdg/dmixRxrU1z3e8wBdspJlyE+AZN4fuVbeL6rrRrO/zxQC1bB3cw5IA=="],
|
||||
|
||||
"@aws-sdk/middleware-websocket": ["@aws-sdk/middleware-websocket@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@aws-sdk/util-format-url": "3.936.0", "@smithy/eventstream-codec": "^4.2.5", "@smithy/eventstream-serde-browser": "^4.2.5", "@smithy/fetch-http-handler": "^5.3.6", "@smithy/protocol-http": "^5.3.5", "@smithy/signature-v4": "^5.3.5", "@smithy/types": "^4.9.0", "@smithy/util-hex-encoding": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-bPe3rqeugyj/MmjP0yBSZox2v1Wa8Dv39KN+RxVbQroLO8VUitBo6xyZ0oZebhZ5sASwSg58aDcMlX0uFLQnTA=="],
|
||||
|
||||
"@aws-sdk/nested-clients": ["@aws-sdk/nested-clients@3.940.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "3.940.0", "@aws-sdk/middleware-host-header": "3.936.0", "@aws-sdk/middleware-logger": "3.936.0", "@aws-sdk/middleware-recursion-detection": "3.936.0", "@aws-sdk/middleware-user-agent": "3.940.0", "@aws-sdk/region-config-resolver": "3.936.0", "@aws-sdk/types": "3.936.0", "@aws-sdk/util-endpoints": "3.936.0", "@aws-sdk/util-user-agent-browser": "3.936.0", "@aws-sdk/util-user-agent-node": "3.940.0", "@smithy/config-resolver": "^4.4.3", "@smithy/core": "^3.18.5", "@smithy/fetch-http-handler": "^5.3.6", "@smithy/hash-node": "^4.2.5", "@smithy/invalid-dependency": "^4.2.5", "@smithy/middleware-content-length": "^4.2.5", "@smithy/middleware-endpoint": "^4.3.12", "@smithy/middleware-retry": "^4.4.12", "@smithy/middleware-serde": "^4.2.6", "@smithy/middleware-stack": "^4.2.5", "@smithy/node-config-provider": "^4.3.5", "@smithy/node-http-handler": "^4.4.5", "@smithy/protocol-http": "^5.3.5", "@smithy/smithy-client": "^4.9.8", "@smithy/types": "^4.9.0", "@smithy/url-parser": "^4.2.5", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-body-length-node": "^4.2.1", "@smithy/util-defaults-mode-browser": "^4.3.11", "@smithy/util-defaults-mode-node": "^4.2.14", "@smithy/util-endpoints": "^3.2.5", "@smithy/util-middleware": "^4.2.5", "@smithy/util-retry": "^4.2.5", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-x0mdv6DkjXqXEcQj3URbCltEzW6hoy/1uIL+i8gExP6YKrnhiZ7SzuB4gPls2UOpK5UqLiqXjhRLfBb1C9i4Dw=="],
|
||||
|
||||
"@aws-sdk/region-config-resolver": ["@aws-sdk/region-config-resolver@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/config-resolver": "^4.4.3", "@smithy/node-config-provider": "^4.3.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-wOKhzzWsshXGduxO4pqSiNyL9oUtk4BEvjWm9aaq6Hmfdoydq6v6t0rAGHWPjFwy9z2haovGRi3C8IxdMB4muw=="],
|
||||
@@ -3649,6 +3658,8 @@
|
||||
|
||||
"@aws-sdk/middleware-ssec/@aws-sdk/types": ["@aws-sdk/types@3.957.0", "", { "dependencies": { "@smithy/types": "^4.11.0", "tslib": "^2.6.2" } }, "sha512-wzWC2Nrt859ABk6UCAVY/WYEbAd7FjkdrQL6m24+tfmWYDNRByTJ9uOgU/kw9zqLCAwb//CPvrJdhqjTznWXAg=="],
|
||||
|
||||
"@aws-sdk/middleware-websocket/@aws-sdk/util-format-url": ["@aws-sdk/util-format-url@3.936.0", "", { "dependencies": { "@aws-sdk/types": "3.936.0", "@smithy/querystring-builder": "^4.2.5", "@smithy/types": "^4.9.0", "tslib": "^2.6.2" } }, "sha512-MS5eSEtDUFIAMHrJaMERiHAvDPdfxc/T869ZjDNFAIiZhyc037REw0aoTNeimNXDNy2txRNZJaAUn/kE4RwN+g=="],
|
||||
|
||||
"@aws-sdk/s3-request-presigner/@aws-sdk/types": ["@aws-sdk/types@3.957.0", "", { "dependencies": { "@smithy/types": "^4.11.0", "tslib": "^2.6.2" } }, "sha512-wzWC2Nrt859ABk6UCAVY/WYEbAd7FjkdrQL6m24+tfmWYDNRByTJ9uOgU/kw9zqLCAwb//CPvrJdhqjTznWXAg=="],
|
||||
|
||||
"@aws-sdk/signature-v4-multi-region/@aws-sdk/types": ["@aws-sdk/types@3.957.0", "", { "dependencies": { "@smithy/types": "^4.11.0", "tslib": "^2.6.2" } }, "sha512-wzWC2Nrt859ABk6UCAVY/WYEbAd7FjkdrQL6m24+tfmWYDNRByTJ9uOgU/kw9zqLCAwb//CPvrJdhqjTznWXAg=="],
|
||||
|
||||
Reference in New Issue
Block a user