improvement(models): system to blacklist models (#1687)

* improvment(models): system to blacklist models

* move base models endpoint to right place

* remove tngtech models too
This commit is contained in:
Vikhyath Mondreti
2025-10-18 12:09:18 -07:00
committed by GitHub
parent 728a4c82c6
commit de1ac9a704
11 changed files with 94 additions and 24 deletions

View File

@@ -0,0 +1,11 @@
import { NextResponse } from 'next/server'
import { getBaseModelProviders } from '@/providers/utils'
export async function GET() {
try {
const allModels = Object.keys(getBaseModelProviders())
return NextResponse.json({ models: allModels })
} catch (error) {
return NextResponse.json({ models: [], error: 'Failed to fetch models' }, { status: 500 })
}
}

View File

@@ -1,10 +1,19 @@
import { type NextRequest, NextResponse } from 'next/server'
import { createLogger } from '@/lib/logs/console/logger'
import { filterBlacklistedModels } from '@/providers/utils'
const logger = createLogger('OpenRouterModelsAPI')
export const dynamic = 'force-dynamic'
interface OpenRouterModel {
id: string
}
interface OpenRouterResponse {
data: OpenRouterModel[]
}
export async function GET(_request: NextRequest) {
try {
const response = await fetch('https://openrouter.ai/api/v1/models', {
@@ -20,20 +29,13 @@ export async function GET(_request: NextRequest) {
return NextResponse.json({ models: [] })
}
const data = await response.json()
const models = Array.isArray(data?.data)
? Array.from(
new Set(
data.data
.map((m: any) => m?.id)
.filter((id: unknown): id is string => typeof id === 'string' && id.length > 0)
.map((id: string) => `openrouter/${id}`)
)
)
: []
const data = (await response.json()) as OpenRouterResponse
const allModels = Array.from(new Set(data.data?.map((model) => `openrouter/${model.id}`) ?? []))
const models = filterBlacklistedModels(allModels)
logger.info('Successfully fetched OpenRouter models', {
count: models.length,
filtered: allModels.length - models.length,
})
return NextResponse.json({ models })

View File

@@ -5,7 +5,6 @@ import type { BlockConfig } from '@/blocks/types'
import { AuthMode } from '@/blocks/types'
import {
getAllModelProviders,
getBaseModelProviders,
getHostedModels,
getMaxTemperature,
getProviderIcon,
@@ -164,9 +163,9 @@ Create a system prompt appropriately detailed for the request, using clear langu
required: true,
options: () => {
const providersState = useProvidersStore.getState()
const baseModels = providersState.providers.base.models
const ollamaModels = providersState.providers.ollama.models
const openrouterModels = providersState.providers.openrouter.models
const baseModels = Object.keys(getBaseModelProviders())
const allModels = Array.from(new Set([...baseModels, ...ollamaModels, ...openrouterModels]))
return allModels.map((model) => {

View File

@@ -5,7 +5,6 @@ import type { BlockConfig, ParamType } from '@/blocks/types'
import type { ProviderId } from '@/providers/types'
import {
getAllModelProviders,
getBaseModelProviders,
getHostedModels,
getProviderIcon,
providers,
@@ -189,9 +188,9 @@ export const EvaluatorBlock: BlockConfig<EvaluatorResponse> = {
required: true,
options: () => {
const providersState = useProvidersStore.getState()
const baseModels = providersState.providers.base.models
const ollamaModels = providersState.providers.ollama.models
const openrouterModels = providersState.providers.openrouter.models
const baseModels = Object.keys(getBaseModelProviders())
const allModels = Array.from(new Set([...baseModels, ...ollamaModels, ...openrouterModels]))
return allModels.map((model) => {

View File

@@ -1,7 +1,7 @@
import { ShieldCheckIcon } from '@/components/icons'
import { isHosted } from '@/lib/environment'
import type { BlockConfig } from '@/blocks/types'
import { getBaseModelProviders, getHostedModels, getProviderIcon } from '@/providers/utils'
import { getHostedModels, getProviderIcon } from '@/providers/utils'
import { useProvidersStore } from '@/stores/providers/store'
import type { ToolResponse } from '@/tools/types'
@@ -99,9 +99,9 @@ export const GuardrailsBlock: BlockConfig<GuardrailsResponse> = {
required: true,
options: () => {
const providersState = useProvidersStore.getState()
const baseModels = providersState.providers.base.models
const ollamaModels = providersState.providers.ollama.models
const openrouterModels = providersState.providers.openrouter.models
const baseModels = Object.keys(getBaseModelProviders())
const allModels = Array.from(new Set([...baseModels, ...ollamaModels, ...openrouterModels]))
return allModels.map((model) => {

View File

@@ -4,7 +4,6 @@ import { AuthMode, type BlockConfig } from '@/blocks/types'
import type { ProviderId } from '@/providers/types'
import {
getAllModelProviders,
getBaseModelProviders,
getHostedModels,
getProviderIcon,
providers,
@@ -136,9 +135,9 @@ export const RouterBlock: BlockConfig<RouterResponse> = {
required: true,
options: () => {
const providersState = useProvidersStore.getState()
const baseModels = providersState.providers.base.models
const ollamaModels = providersState.providers.ollama.models
const openrouterModels = providersState.providers.openrouter.models
const baseModels = Object.keys(getBaseModelProviders())
const allModels = Array.from(new Set([...baseModels, ...ollamaModels, ...openrouterModels]))
return allModels.map((model) => {

View File

@@ -3,7 +3,6 @@ import { isHosted } from '@/lib/environment'
import { AuthMode, type BlockConfig } from '@/blocks/types'
import {
getAllModelProviders,
getBaseModelProviders,
getHostedModels,
getProviderIcon,
providers,
@@ -61,9 +60,9 @@ export const TranslateBlock: BlockConfig = {
required: true,
options: () => {
const providersState = useProvidersStore.getState()
const baseModels = providersState.providers.base.models
const ollamaModels = providersState.providers.ollama.models
const openrouterModels = providersState.providers.openrouter.models
const baseModels = Object.keys(getBaseModelProviders())
const allModels = Array.from(new Set([...baseModels, ...ollamaModels, ...openrouterModels]))
return allModels.map((model) => {

View File

@@ -79,6 +79,7 @@ export const env = createEnv({
OLLAMA_URL: z.string().url().optional(), // Ollama local LLM server URL
ELEVENLABS_API_KEY: z.string().min(1).optional(), // ElevenLabs API key for text-to-speech in deployed chat
SERPER_API_KEY: z.string().min(1).optional(), // Serper API key for online search
DEEPSEEK_MODELS_ENABLED: z.boolean().optional().default(false), // Enable Deepseek models in UI (defaults to false for compliance)
// Azure Configuration - Shared credentials with feature-specific models
AZURE_OPENAI_ENDPOINT: z.string().url().optional(), // Shared Azure OpenAI service endpoint

View File

@@ -1,3 +1,4 @@
import { getEnv, isTruthy } from '@/lib/env'
import { isHosted } from '@/lib/environment'
import { createLogger } from '@/lib/logs/console/logger'
import { anthropicProvider } from '@/providers/anthropic'
@@ -129,8 +130,8 @@ export async function updateOpenRouterProviderModels(models: string[]): Promise<
}
export function getBaseModelProviders(): Record<string, ProviderId> {
return Object.entries(providers)
.filter(([providerId]) => providerId !== 'ollama')
const allProviders = Object.entries(providers)
.filter(([providerId]) => providerId !== 'ollama' && providerId !== 'openrouter')
.reduce(
(map, [providerId, config]) => {
config.models.forEach((model) => {
@@ -140,6 +141,20 @@ export function getBaseModelProviders(): Record<string, ProviderId> {
},
{} as Record<string, ProviderId>
)
return filterBlacklistedModelsFromProviderMap(allProviders)
}
function filterBlacklistedModelsFromProviderMap(
providerMap: Record<string, ProviderId>
): Record<string, ProviderId> {
const filtered: Record<string, ProviderId> = {}
for (const [model, providerId] of Object.entries(providerMap)) {
if (!isModelBlacklisted(model)) {
filtered[model] = providerId
}
}
return filtered
}
export function getAllModelProviders(): Record<string, ProviderId> {
@@ -197,6 +212,44 @@ export function getProviderModels(providerId: ProviderId): string[] {
return getProviderModelsFromDefinitions(providerId)
}
interface ModelBlacklist {
models: string[]
prefixes: string[]
envOverride?: string
}
const MODEL_BLACKLISTS: ModelBlacklist[] = [
{
models: ['deepseek-chat', 'deepseek-v3', 'deepseek-r1'],
prefixes: ['openrouter/deepseek', 'openrouter/tngtech'],
envOverride: 'DEEPSEEK_MODELS_ENABLED',
},
]
function isModelBlacklisted(model: string): boolean {
const lowerModel = model.toLowerCase()
for (const blacklist of MODEL_BLACKLISTS) {
if (blacklist.envOverride && isTruthy(getEnv(blacklist.envOverride))) {
continue
}
if (blacklist.models.includes(lowerModel)) {
return true
}
if (blacklist.prefixes.some((prefix) => lowerModel.startsWith(prefix))) {
return true
}
}
return false
}
export function filterBlacklistedModels(models: string[]): string[] {
return models.filter((model) => !isModelBlacklisted(model))
}
/**
* Get provider icon for a given model
*/

View File

@@ -6,6 +6,11 @@ import type { ProviderConfig, ProviderName, ProvidersStore } from './types'
const logger = createLogger('ProvidersStore')
const PROVIDER_CONFIGS: Record<ProviderName, ProviderConfig> = {
base: {
apiEndpoint: '/api/providers/base/models',
dedupeModels: true,
updateFunction: () => {},
},
ollama: {
apiEndpoint: '/api/providers/ollama/models',
updateFunction: updateOllamaProviderModels,
@@ -42,6 +47,7 @@ const fetchProviderModels = async (provider: ProviderName): Promise<string[]> =>
export const useProvidersStore = create<ProvidersStore>((set, get) => ({
providers: {
base: { models: [], isLoading: false },
ollama: { models: [], isLoading: false },
openrouter: { models: [], isLoading: false },
},
@@ -120,6 +126,7 @@ export const useProvidersStore = create<ProvidersStore>((set, get) => ({
if (typeof window !== 'undefined') {
setTimeout(() => {
const store = useProvidersStore.getState()
store.fetchModels('base')
store.fetchModels('ollama')
store.fetchModels('openrouter')
}, 1000)

View File

@@ -1,4 +1,4 @@
export type ProviderName = 'ollama' | 'openrouter'
export type ProviderName = 'ollama' | 'openrouter' | 'base'
export interface ProviderState {
models: string[]