feat(llms): added additional params to llm-based blocks for alternative models (#1223)

* feat(llms): added additional params to llm-based blocks for alternative models

* add hidden temp param to other LLM-based blocks
This commit is contained in:
Waleed
2025-09-02 13:29:03 -07:00
committed by GitHub
parent 3db73ff721
commit 9de0d91f9a
4 changed files with 216 additions and 32 deletions

View File

@@ -14,7 +14,6 @@ import {
supportsTemperature,
} from '@/providers/utils'
// Get current Ollama models dynamically
const getCurrentOllamaModels = () => {
return useProvidersStore.getState().providers.ollama.models
}

View File

@@ -3,12 +3,22 @@ import { isHosted } from '@/lib/environment'
import { createLogger } from '@/lib/logs/console/logger'
import type { BlockConfig, ParamType } from '@/blocks/types'
import type { ProviderId } from '@/providers/types'
import { getAllModelProviders, getBaseModelProviders, getHostedModels } from '@/providers/utils'
import {
getAllModelProviders,
getBaseModelProviders,
getHostedModels,
getProviderIcon,
providers,
} from '@/providers/utils'
import { useProvidersStore } from '@/stores/providers/store'
import type { ToolResponse } from '@/tools/types'
const logger = createLogger('EvaluatorBlock')
const getCurrentOllamaModels = () => {
return useProvidersStore.getState().providers.ollama.models
}
interface Metric {
name: string
description: string
@@ -173,16 +183,21 @@ export const EvaluatorBlock: BlockConfig<EvaluatorResponse> = {
{
id: 'model',
title: 'Model',
type: 'dropdown',
type: 'combobox',
layout: 'half',
placeholder: 'Type or select a model...',
required: true,
options: () => {
const ollamaModels = useProvidersStore.getState().providers.ollama.models
const providersState = useProvidersStore.getState()
const ollamaModels = providersState.providers.ollama.models
const openrouterModels = providersState.providers.openrouter.models
const baseModels = Object.keys(getBaseModelProviders())
return [...baseModels, ...ollamaModels].map((model) => ({
label: model,
id: model,
}))
const allModels = Array.from(new Set([...baseModels, ...ollamaModels, ...openrouterModels]))
return allModels.map((model) => {
const icon = getProviderIcon(model)
return { label: model, id: model, ...(icon && { icon }) }
})
},
},
{
@@ -198,9 +213,48 @@ export const EvaluatorBlock: BlockConfig<EvaluatorResponse> = {
? {
field: 'model',
value: getHostedModels(),
not: true,
not: true, // Show for all models EXCEPT those listed
}
: undefined,
: () => ({
field: 'model',
value: getCurrentOllamaModels(),
not: true, // Show for all models EXCEPT Ollama models
}),
},
{
id: 'azureEndpoint',
title: 'Azure OpenAI Endpoint',
type: 'short-input',
layout: 'full',
password: true,
placeholder: 'https://your-resource.openai.azure.com',
connectionDroppable: false,
condition: {
field: 'model',
value: providers['azure-openai'].models,
},
},
{
id: 'azureApiVersion',
title: 'Azure API Version',
type: 'short-input',
layout: 'full',
placeholder: '2024-07-01-preview',
connectionDroppable: false,
condition: {
field: 'model',
value: providers['azure-openai'].models,
},
},
{
id: 'temperature',
title: 'Temperature',
type: 'slider',
layout: 'half',
min: 0,
max: 2,
value: () => '0.1',
hidden: true,
},
{
id: 'systemPrompt',
@@ -310,6 +364,12 @@ export const EvaluatorBlock: BlockConfig<EvaluatorResponse> = {
},
model: { type: 'string' as ParamType, description: 'AI model to use' },
apiKey: { type: 'string' as ParamType, description: 'Provider API key' },
azureEndpoint: { type: 'string' as ParamType, description: 'Azure OpenAI endpoint URL' },
azureApiVersion: { type: 'string' as ParamType, description: 'Azure API version' },
temperature: {
type: 'number' as ParamType,
description: 'Response randomness level (low for consistent evaluation)',
},
content: { type: 'string' as ParamType, description: 'Content to evaluate' },
},
outputs: {

View File

@@ -2,10 +2,20 @@ import { ConnectIcon } from '@/components/icons'
import { isHosted } from '@/lib/environment'
import type { BlockConfig } from '@/blocks/types'
import type { ProviderId } from '@/providers/types'
import { getAllModelProviders, getBaseModelProviders, getHostedModels } from '@/providers/utils'
import {
getAllModelProviders,
getBaseModelProviders,
getHostedModels,
getProviderIcon,
providers,
} from '@/providers/utils'
import { useProvidersStore } from '@/stores/providers/store'
import type { ToolResponse } from '@/tools/types'
const getCurrentOllamaModels = () => {
return useProvidersStore.getState().providers.ollama.models
}
interface RouterResponse extends ToolResponse {
output: {
content: string
@@ -116,17 +126,22 @@ export const RouterBlock: BlockConfig<RouterResponse> = {
{
id: 'model',
title: 'Model',
type: 'dropdown',
type: 'combobox',
layout: 'half',
options: () => {
const ollamaModels = useProvidersStore.getState().providers.ollama.models
const baseModels = Object.keys(getBaseModelProviders())
return [...baseModels, ...ollamaModels].map((model) => ({
label: model,
id: model,
}))
},
placeholder: 'Type or select a model...',
required: true,
options: () => {
const providersState = useProvidersStore.getState()
const ollamaModels = providersState.providers.ollama.models
const openrouterModels = providersState.providers.openrouter.models
const baseModels = Object.keys(getBaseModelProviders())
const allModels = Array.from(new Set([...baseModels, ...ollamaModels, ...openrouterModels]))
return allModels.map((model) => {
const icon = getProviderIcon(model)
return { label: model, id: model, ...(icon && { icon }) }
})
},
},
{
id: 'apiKey',
@@ -137,14 +152,53 @@ export const RouterBlock: BlockConfig<RouterResponse> = {
password: true,
connectionDroppable: false,
required: true,
// Hide API key for all hosted models when running on hosted version
// Hide API key for hosted models and Ollama models
condition: isHosted
? {
field: 'model',
value: getHostedModels(),
not: true, // Show for all models EXCEPT those listed
}
: undefined, // Show for all models in non-hosted environments
: () => ({
field: 'model',
value: getCurrentOllamaModels(),
not: true, // Show for all models EXCEPT Ollama models
}),
},
{
id: 'azureEndpoint',
title: 'Azure OpenAI Endpoint',
type: 'short-input',
layout: 'full',
password: true,
placeholder: 'https://your-resource.openai.azure.com',
connectionDroppable: false,
condition: {
field: 'model',
value: providers['azure-openai'].models,
},
},
{
id: 'azureApiVersion',
title: 'Azure API Version',
type: 'short-input',
layout: 'full',
placeholder: '2024-07-01-preview',
connectionDroppable: false,
condition: {
field: 'model',
value: providers['azure-openai'].models,
},
},
{
id: 'temperature',
title: 'Temperature',
type: 'slider',
layout: 'half',
hidden: true,
min: 0,
max: 2,
value: () => '0.1',
},
{
id: 'systemPrompt',
@@ -184,6 +238,12 @@ export const RouterBlock: BlockConfig<RouterResponse> = {
prompt: { type: 'string', description: 'Routing prompt content' },
model: { type: 'string', description: 'AI model to use' },
apiKey: { type: 'string', description: 'Provider API key' },
azureEndpoint: { type: 'string', description: 'Azure OpenAI endpoint URL' },
azureApiVersion: { type: 'string', description: 'Azure API version' },
temperature: {
type: 'number',
description: 'Response randomness level (low for consistent routing)',
},
},
outputs: {
content: { type: 'string', description: 'Routing response content' },

View File

@@ -1,7 +1,18 @@
import { TranslateIcon } from '@/components/icons'
import { isHosted } from '@/lib/environment'
import type { BlockConfig } from '@/blocks/types'
import type { ProviderId } from '@/providers/types'
import { getBaseModelProviders } from '@/providers/utils'
import {
getAllModelProviders,
getBaseModelProviders,
getHostedModels,
getProviderIcon,
providers,
} from '@/providers/utils'
import { useProvidersStore } from '@/stores/providers/store'
const getCurrentOllamaModels = () => {
return useProvidersStore.getState().providers.ollama.models
}
const getTranslationPrompt = (
targetLanguage: string
@@ -44,10 +55,22 @@ export const TranslateBlock: BlockConfig = {
{
id: 'model',
title: 'Model',
type: 'dropdown',
type: 'combobox',
layout: 'half',
options: Object.keys(getBaseModelProviders()).map((key) => ({ label: key, id: key })),
placeholder: 'Type or select a model...',
required: true,
options: () => {
const providersState = useProvidersStore.getState()
const ollamaModels = providersState.providers.ollama.models
const openrouterModels = providersState.providers.openrouter.models
const baseModels = Object.keys(getBaseModelProviders())
const allModels = Array.from(new Set([...baseModels, ...ollamaModels, ...openrouterModels]))
return allModels.map((model) => {
const icon = getProviderIcon(model)
return { label: model, id: model, ...(icon && { icon }) }
})
},
},
{
id: 'apiKey',
@@ -58,6 +81,43 @@ export const TranslateBlock: BlockConfig = {
password: true,
connectionDroppable: false,
required: true,
// Hide API key for hosted models and Ollama models
condition: isHosted
? {
field: 'model',
value: getHostedModels(),
not: true, // Show for all models EXCEPT those listed
}
: () => ({
field: 'model',
value: getCurrentOllamaModels(),
not: true, // Show for all models EXCEPT Ollama models
}),
},
{
id: 'azureEndpoint',
title: 'Azure OpenAI Endpoint',
type: 'short-input',
layout: 'full',
password: true,
placeholder: 'https://your-resource.openai.azure.com',
connectionDroppable: false,
condition: {
field: 'model',
value: providers['azure-openai'].models,
},
},
{
id: 'azureApiVersion',
title: 'Azure API Version',
type: 'short-input',
layout: 'full',
placeholder: '2024-07-01-preview',
connectionDroppable: false,
condition: {
field: 'model',
value: providers['azure-openai'].models,
},
},
{
id: 'systemPrompt',
@@ -71,21 +131,24 @@ export const TranslateBlock: BlockConfig = {
},
],
tools: {
access: ['openai_chat', 'anthropic_chat', 'google_chat'],
access: [
'openai_chat',
'anthropic_chat',
'google_chat',
'xai_chat',
'deepseek_chat',
'deepseek_reasoner',
],
config: {
tool: (params: Record<string, any>) => {
const model = params.model || 'gpt-4o'
if (!model) {
throw new Error('No model selected')
}
const tool = getBaseModelProviders()[model as ProviderId]
const tool = getAllModelProviders()[model]
if (!tool) {
throw new Error(`Invalid model selected: ${model}`)
}
return tool
},
},
@@ -94,6 +157,8 @@ export const TranslateBlock: BlockConfig = {
context: { type: 'string', description: 'Text to translate' },
targetLanguage: { type: 'string', description: 'Target language' },
apiKey: { type: 'string', description: 'Provider API key' },
azureEndpoint: { type: 'string', description: 'Azure OpenAI endpoint URL' },
azureApiVersion: { type: 'string', description: 'Azure API version' },
systemPrompt: { type: 'string', description: 'Translation instructions' },
},
outputs: {