From 92b53c88760664f363c0235fd370a9a6de0764a6 Mon Sep 17 00:00:00 2001 From: Kayvan Sylvan Date: Thu, 12 Feb 2026 13:33:20 -0800 Subject: [PATCH 1/2] feat: add dynamic Abacus RouteLLM model fetching and update static model lists - Add `fetchAbacusModels()` to dynamically retrieve models from Abacus RouteLLM API - Fall back to static model list when Abacus API fetch fails - Add GPT-5 Codex variants (5, 5.1, 5.1-max, 5.2) to static models - Add GPT-5.2 and GPT-5.2-chat-latest to static model list - Add claude-opus-4-6 and gemini-3-flash-preview to static models - Add kimi-k2.5, glm-4.7, and glm-5 to static model list - Rename `qwen/qwen3-Max` to lowercase `qwen3-max` - Handle `static:abacus` as special case in `ListModels` routing --- .../ai/openai_compatible/providers_config.go | 67 ++++++++++++++++++- 1 file changed, 66 insertions(+), 1 deletion(-) diff --git a/internal/plugins/ai/openai_compatible/providers_config.go b/internal/plugins/ai/openai_compatible/providers_config.go index 39748f09..f3712c23 100644 --- a/internal/plugins/ai/openai_compatible/providers_config.go +++ b/internal/plugins/ai/openai_compatible/providers_config.go @@ -2,13 +2,18 @@ package openai_compatible import ( "context" + "encoding/json" "fmt" + "net/http" "os" "strings" + "time" "github.com/danielmiessler/fabric/internal/plugins/ai/openai" ) +const abacusRouteLLMModelsURL = "https://routellm.abacus.ai/api/v0/_listRouteLLMModels" + // ProviderConfig defines the configuration for an OpenAI-compatible API provider type ProviderConfig struct { Name string @@ -41,6 +46,14 @@ func NewClient(providerConfig ProviderConfig) *Client { func (c *Client) ListModels() ([]string, error) { // If a custom models URL is provided, handle it if c.modelsURL != "" { + if c.modelsURL == "static:abacus" { + models, err := c.fetchAbacusModels() + if err == nil && len(models) > 0 { + return models, nil + } + return c.getStaticModels(c.modelsURL) + } + // Check for static model list if strings.HasPrefix(c.modelsURL, "static:") { return c.getStaticModels(c.modelsURL) @@ -60,6 +73,47 @@ func (c *Client) ListModels() ([]string, error) { return c.DirectlyGetModels(context.Background()) } +func (c *Client) fetchAbacusModels() ([]string, error) { + req, err := http.NewRequestWithContext(context.Background(), http.MethodGet, abacusRouteLLMModelsURL, nil) + if err != nil { + return nil, err + } + + req.Header.Set("Accept", "application/json") + if c.Client.ApiKey.Value != "" { + req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", c.Client.ApiKey.Value)) + } + + httpClient := &http.Client{Timeout: 10 * time.Second} + resp, err := httpClient.Do(req) + if err != nil { + return nil, err + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return nil, fmt.Errorf("abacus models endpoint returned status %d", resp.StatusCode) + } + + var response struct { + Result []struct { + Name string `json:"name"` + } `json:"result"` + } + if err := json.NewDecoder(resp.Body).Decode(&response); err != nil { + return nil, err + } + + models := make([]string, 0, len(response.Result)) + for _, item := range response.Result { + if item.Name != "" { + models = append(models, item.Name) + } + } + + return models, nil +} + // NeedsRawMode overrides the parent implementation to handle provider-specific raw mode requirements func (c *Client) NeedsRawMode(modelName string) bool { // MiniMax models require raw mode for proper message formatting @@ -88,8 +142,14 @@ func (c *Client) getStaticModels(modelsKey string) ([]string, error) { "gpt-5", "gpt-5-mini", "gpt-5-nano", + "gpt-5-codex", "gpt-5.1", + "gpt-5.1-codex", + "gpt-5.1-codex-max", "gpt-5.1-chat-latest", + "gpt-5.2", + "gpt-5.2-chat-latest", + "gpt-5.2-codex", "openai/gpt-oss-120b", "claude-3-7-sonnet-20250219", "claude-sonnet-4-20250514", @@ -98,6 +158,7 @@ func (c *Client) getStaticModels(modelsKey string) ([]string, error) { "claude-sonnet-4-5-20250929", "claude-haiku-4-5-20251001", "claude-opus-4-5-20251101", + "claude-opus-4-6", "meta-llama/Llama-4-Maverick-17B-128E-Instruct-FP8", "meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo", "meta-llama/Meta-Llama-3.1-70B-Instruct", @@ -108,24 +169,28 @@ func (c *Client) getStaticModels(modelsKey string) ([]string, error) { "gemini-2.5-pro", "gemini-2.5-flash", "gemini-3-pro-preview", + "gemini-3-flash-preview", "qwen-2.5-coder-32b", "Qwen/Qwen2.5-72B-Instruct", "Qwen/QwQ-32B", "Qwen/Qwen3-235B-A22B-Instruct-2507", "Qwen/Qwen3-32B", "qwen/qwen3-coder-480b-a35b-instruct", - "qwen/qwen3-Max", + "qwen3-max", "grok-4-0709", "grok-4-fast-non-reasoning", "grok-4-1-fast-non-reasoning", "grok-code-fast-1", "kimi-k2-turbo-preview", + "kimi-k2.5", "deepseek/deepseek-v3.1", "deepseek-ai/DeepSeek-V3.1-Terminus", "deepseek-ai/DeepSeek-R1", "deepseek-ai/DeepSeek-V3.2", "zai-org/glm-4.5", "zai-org/glm-4.6", + "zai-org/glm-4.7", + "zai-org/glm-5", }, nil case "static:minimax": return []string{ From 7cd8f9a434e554216442f8bcb62c45d156a9c52a Mon Sep 17 00:00:00 2001 From: Kayvan Sylvan Date: Thu, 12 Feb 2026 13:38:08 -0800 Subject: [PATCH 2/2] chore: incoming 1993 changelog entry --- cmd/generate_changelog/incoming/1993.txt | 7 +++++++ 1 file changed, 7 insertions(+) create mode 100644 cmd/generate_changelog/incoming/1993.txt diff --git a/cmd/generate_changelog/incoming/1993.txt b/cmd/generate_changelog/incoming/1993.txt new file mode 100644 index 00000000..5bb045eb --- /dev/null +++ b/cmd/generate_changelog/incoming/1993.txt @@ -0,0 +1,7 @@ +### PR [#1993](https://github.com/danielmiessler/Fabric/pull/1993) by [ksylvan](https://github.com/ksylvan): Add dynamic Abacus RouteLLM model list and update fallback static list + +- Add dynamic model fetching from Abacus RouteLLM API with `fetchAbacusModels()` function and fallback to static list on failure +- Add GPT-5 Codex variants (5, 5.1, 5.1-max, 5.2) and GPT-5.2-chat-latest to static model list +- Add claude-opus-4-6, gemini-3-flash-preview, kimi-k2.5, glm-4.7, and glm-5 to static models +- Rename `qwen/qwen3-Max` to lowercase `qwen3-max` for consistency +- Handle `static:abacus` as special case in `ListModels` routing logic