Compare commits

...

13 Commits

Author SHA1 Message Date
github-actions[bot]
31e4e42a94 Update version to v1.4.180 and commit 2025-04-22 11:40:37 +00:00
Eugen Eisler
196db04fc2 Merge pull request #1435 from ksylvan/0421-fix-raw-input-with-stratetgies
chore: Fix user input handling when using raw mode and `--strategy` flag
2025-04-22 13:39:22 +02:00
Kayvan Sylvan
b3b1b5a471 chore: unify raw mode message handling and preserve env vars in extension executor
## CHANGES

- refactor BuildSession raw mode to prepend system to user content
- ensure raw mode messages always have User role
- keep existing user message when no systemMessage provided
- append systemMessage separately in non-raw mode sessions
- store original cmd.Env before context-based exec command creation
- recreate exec command with context then restore originalEnv
- add comments clarifying raw vs non-raw handling behavior
2025-04-21 17:04:11 -07:00
github-actions[bot]
ba2e178e03 Update version to v1.4.179 and commit 2025-04-21 18:08:47 +00:00
Eugen Eisler
ed298bcedd Merge pull request #1432 from ksylvan/0421-fix-tools-selection-in-setup
chore: fix fabric setup mess-up introduced by sorting lists (tools and models)
2025-04-21 20:07:33 +02:00
Kayvan Sylvan
6b04e6e674 chore: sort AI models alphabetically for consistent listing
CHANGES
*   Import `sort` and `strings` packages for sorting functionality.
*   Sort retrieved AI model names alphabetically, ignoring case.
*   Ensure consistent ordering of AI models in lists.
2025-04-21 10:41:41 -07:00
Kayvan Sylvan
04c0f6a0a5 chore: alphabetize the order of plugin tools 2025-04-21 10:26:04 -07:00
github-actions[bot]
f7ab484510 Update version to v1.4.178 and commit 2025-04-21 13:21:52 +00:00
Eugen Eisler
f50a14305a Merge pull request #1427 from ksylvan/0420-refactor-openai-compatible-providers
Refactor OpenAI-compatible AI providers and add `--listvendors` flag
2025-04-21 15:20:33 +02:00
github-actions[bot]
d5f0cd7616 Update version to v1.4.177 and commit 2025-04-21 07:10:40 +00:00
Eugen Eisler
67c658f5b4 Merge pull request #1428 from ksylvan/0420-sorted-group-lists
feat: Alphabetical case-insensitive sorting for groups and items
2025-04-21 09:09:21 +02:00
Kayvan Sylvan
8b2174897a feat: add alphabetical sorting to groups and items in Print method**
### CHANGES
- Import `sort` and `strings` packages for sorting functionality.
- Create a copy of groups for stable sorting.
- Sort groups alphabetically in a case-insensitive manner.
- Create a copy of items within each group for sorting.
- Sort items alphabetically in a case-insensitive manner.
- Iterate over sorted groups and items for display.
2025-04-20 10:56:52 -07:00
Kayvan Sylvan
ac5eab0563 feat: add --listvendors command to list AI vendors
### CHANGES
- Introduce `--listvendors` flag to display all AI vendors.
- Refactor OpenAI-compatible providers into a unified configuration.
- Remove individual vendor packages for streamlined management.
- Add sorting for consistent vendor listing output.
- Update documentation to include new `--listvendors` option.
2025-04-20 08:53:20 -07:00
26 changed files with 216 additions and 230 deletions

View File

@@ -76,6 +76,7 @@ Fabric is graciously supported by…
- [Clipboard Support](#clipboard-support)
- [Meta](#meta)
- [Primary contributors](#primary-contributors)
- [Contributors](#contributors)
<br />
@@ -475,6 +476,7 @@ Application Options:
--rmextension= Remove a registered extension by name
--strategy= Choose a strategy from the available strategies
--liststrategies List all strategies
--listvendors List all vendors
Help Options:
-h, --help Show this help message

View File

@@ -1,20 +1,25 @@
# YAML Configuration Support
## Overview
Fabric now supports YAML configuration files for commonly used options. This allows users to persist settings and share configurations across multiple runs.
## Usage
Use the `--config` flag to specify a YAML configuration file:
```bash
fabric --config ~/.config/fabric/config.yaml "Tell me about APIs"
```
## Configuration Precedence
1. CLI flags (highest priority)
2. YAML config values
3. Default values (lowest priority)
## Supported Configuration Options
```yaml
# Model selection
model: gpt-4
@@ -36,6 +41,7 @@ raw: false
```
## Rules and Behavior
- Only long flag names are supported in YAML (e.g., `temperature` not `-t`)
- CLI flags always override YAML values
- Unknown YAML declarations are ignored
@@ -43,12 +49,15 @@ raw: false
- The order of YAML declarations doesn't matter
## Type Conversions
The following string-to-type conversions are supported:
- String to number: `"42"``42`
- String to float: `"42.5"``42.5`
- String to boolean: `"true"``true`
## Example Config
```yaml
# ~/.config/fabric/config.yaml
model: gpt-4
@@ -61,8 +70,8 @@ frequencypenalty: 0.2
```
## CLI Override Example
```bash
# Override temperature from config
fabric --config ~/.config/fabric/config.yaml --temperature 0.9 "Query"
```

View File

@@ -164,6 +164,11 @@ func Cli(version string) (err error) {
return
}
if currentFlags.ListVendors {
err = registry.ListVendors(os.Stdout)
return
}
// if the interactive flag is set, run the interactive function
// if currentFlags.Interactive {
// interactive.Interactive()
@@ -211,7 +216,9 @@ func Cli(version string) (err error) {
return
}
messageTools, err = processYoutubeVideo(currentFlags, registry, videoId)
if messageTools, err = processYoutubeVideo(currentFlags, registry, videoId); err != nil {
return
}
if !currentFlags.IsChatRequest() {
err = currentFlags.WriteOutput(messageTools)
return

View File

@@ -72,6 +72,7 @@ type Flags struct {
RemoveExtension string `long:"rmextension" description:"Remove a registered extension by name"`
Strategy string `long:"strategy" description:"Choose a strategy from the available strategies" default:""`
ListStrategies bool `long:"liststrategies" description:"List all strategies"`
ListVendors bool `long:"listvendors" description:"List all vendors"`
}
var debug = false
@@ -334,7 +335,6 @@ func (o *Flags) BuildChatRequest(Meta string) (ret *common.ChatRequest, err erro
func (o *Flags) AppendMessage(message string) {
o.Message = AppendMessage(o.Message, message)
return
}
func (o *Flags) IsChatRequest() (ret bool) {

View File

@@ -2,6 +2,8 @@ package common
import (
"fmt"
"sort"
"strings"
"github.com/samber/lo"
)
@@ -71,15 +73,31 @@ func (o *GroupsItemsSelector[I]) Print() {
fmt.Printf("\n%v:\n", o.SelectionLabel)
var currentItemIndex int
for _, groupItems := range o.GroupsItems {
// Create a copy of groups to sort
sortedGroupsItems := make([]*GroupItems[I], len(o.GroupsItems))
copy(sortedGroupsItems, o.GroupsItems)
// Sort groups alphabetically case-insensitive
sort.SliceStable(sortedGroupsItems, func(i, j int) bool {
return strings.ToLower(sortedGroupsItems[i].Group) < strings.ToLower(sortedGroupsItems[j].Group)
})
for _, groupItems := range sortedGroupsItems {
fmt.Println()
fmt.Printf("%s\n", groupItems.Group)
fmt.Println()
for _, item := range groupItems.Items {
// Create a copy of items to sort
sortedItems := make([]I, len(groupItems.Items))
copy(sortedItems, groupItems.Items)
// Sort items alphabetically case-insensitive
sort.SliceStable(sortedItems, func(i, j int) bool {
return strings.ToLower(o.GetItemKey(sortedItems[i])) < strings.ToLower(o.GetItemKey(sortedItems[j]))
})
for _, item := range sortedItems {
currentItemIndex++
fmt.Printf("\t[%d]\t%s\n", currentItemIndex, o.GetItemKey(item))
}
}
}

View File

@@ -192,17 +192,19 @@ func (o *Chatter) BuildSession(request *common.ChatRequest, raw bool) (session *
}
if raw {
if request.Message != nil {
if systemMessage != "" {
request.Message.Content = systemMessage
// system contains pattern which contains user input
// In raw mode, combine system message (potentially with strategy) and user message into a single user message
if systemMessage != "" {
if request.Message != nil {
// Prepend system message to user content, ensuring user input is preserved
request.Message.Content = fmt.Sprintf("%s\n\n%s", systemMessage, request.Message.Content)
request.Message.Role = goopenai.ChatMessageRoleUser // Ensure role is User in raw mode
} else {
// If no user message, create one with the system content, marked as User role
request.Message = &goopenai.ChatCompletionMessage{Role: goopenai.ChatMessageRoleUser, Content: systemMessage}
}
} else {
if systemMessage != "" {
request.Message = &goopenai.ChatCompletionMessage{Role: goopenai.ChatMessageRoleSystem, Content: systemMessage}
}
}
} // else: no system message, user message (if any) remains unchanged
} else {
// Not raw mode, append system message separately if it exists
if systemMessage != "" {
session.Append(&goopenai.ChatCompletionMessage{Role: goopenai.ChatMessageRoleSystem, Content: systemMessage})
}

View File

@@ -3,12 +3,14 @@ package core
import (
"bytes"
"fmt"
"io"
"os"
"path/filepath"
"sort"
"strconv"
"strings"
"github.com/danielmiessler/fabric/plugins/ai/exolab"
"github.com/danielmiessler/fabric/plugins/ai/grokai"
"github.com/danielmiessler/fabric/plugins/strategy"
"github.com/samber/lo"
@@ -18,18 +20,12 @@ import (
"github.com/danielmiessler/fabric/plugins/ai"
"github.com/danielmiessler/fabric/plugins/ai/anthropic"
"github.com/danielmiessler/fabric/plugins/ai/azure"
"github.com/danielmiessler/fabric/plugins/ai/cerebras"
"github.com/danielmiessler/fabric/plugins/ai/deepseek"
"github.com/danielmiessler/fabric/plugins/ai/dryrun"
"github.com/danielmiessler/fabric/plugins/ai/gemini"
"github.com/danielmiessler/fabric/plugins/ai/groq"
"github.com/danielmiessler/fabric/plugins/ai/litellm"
"github.com/danielmiessler/fabric/plugins/ai/lmstudio"
"github.com/danielmiessler/fabric/plugins/ai/mistral"
"github.com/danielmiessler/fabric/plugins/ai/ollama"
"github.com/danielmiessler/fabric/plugins/ai/openai"
"github.com/danielmiessler/fabric/plugins/ai/openrouter"
"github.com/danielmiessler/fabric/plugins/ai/siliconcloud"
"github.com/danielmiessler/fabric/plugins/ai/openai_compatible"
"github.com/danielmiessler/fabric/plugins/db/fsdb"
"github.com/danielmiessler/fabric/plugins/template"
"github.com/danielmiessler/fabric/plugins/tools"
@@ -58,29 +54,49 @@ func NewPluginRegistry(db *fsdb.Db) (ret *PluginRegistry, err error) {
ret.Defaults = tools.NeeDefaults(ret.GetModels)
ret.VendorsAll.AddVendors(
// Create a vendors slice to hold all vendors (order doesn't matter initially)
vendors := []ai.Vendor{}
// Add non-OpenAI compatible clients
vendors = append(vendors,
openai.NewClient(),
ollama.NewClient(),
azure.NewClient(),
groq.NewClient(),
gemini.NewClient(),
//gemini_openai.NewClient(),
anthropic.NewClient(),
siliconcloud.NewClient(),
openrouter.NewClient(),
lmstudio.NewClient(),
mistral.NewClient(),
deepseek.NewClient(),
exolab.NewClient(),
litellm.NewClient(),
grokai.NewClient(),
cerebras.NewClient(),
)
// Add all OpenAI-compatible providers
for providerName := range openai_compatible.ProviderMap {
provider, _ := openai_compatible.GetProviderByName(providerName)
vendors = append(vendors, openai_compatible.NewClient(provider))
}
// Sort vendors by name for consistent ordering (case-insensitive)
sort.Slice(vendors, func(i, j int) bool {
return strings.ToLower(vendors[i].GetName()) < strings.ToLower(vendors[j].GetName())
})
// Add all sorted vendors to VendorsAll
ret.VendorsAll.AddVendors(vendors...)
_ = ret.Configure()
return
}
func (o *PluginRegistry) ListVendors(out io.Writer) error {
vendors := lo.Map(o.VendorsAll.Vendors, func(vendor ai.Vendor, _ int) string {
return vendor.GetName()
})
fmt.Fprint(out, "Available Vendors:\n\n")
for _, vendor := range vendors {
fmt.Fprintf(out, "%s\n", vendor)
}
return nil
}
type PluginRegistry struct {
Db *fsdb.Db
@@ -133,7 +149,7 @@ func (o *PluginRegistry) Setup() (err error) {
return vendor
})...)
groupsPlugins.AddGroupItems("Tools", o.Defaults, o.PatternsLoader, o.YouTube, o.Language, o.Jina, o.Strategies)
groupsPlugins.AddGroupItems("Tools", o.Defaults, o.Jina, o.Language, o.PatternsLoader, o.Strategies, o.YouTube)
for {
groupsPlugins.Print()

View File

@@ -1 +1 @@
"1.4.176"
"1.4.180"

View File

@@ -1,18 +0,0 @@
// File: plugins/ai/cerebras/cerebras.go
package cerebras
import (
"github.com/danielmiessler/fabric/plugins/ai/openai"
)
// NewClient initializes and returns a new Cerebras Client.
func NewClient() (ret *Client) {
ret = &Client{}
ret.Client = openai.NewClientCompatible("Cerebras", "https://api.cerebras.ai/v1", nil)
return
}
// Client wraps the openai.Client to provide additional functionality specific to Cerebras.
type Client struct {
*openai.Client
}

View File

@@ -1,27 +0,0 @@
// File: plugins/ai/cerebras/cerebras_test.go
package cerebras
import (
"testing"
)
// Test the client initialization
func TestNewClient_EmbeddedClientNotNil(t *testing.T) {
client := NewClient()
if client.Client == nil {
t.Fatalf("Expected embedded openai.Client to be non-nil, got nil")
}
}
// Test the client name and URL configuration
func TestNewClient_ConfiguredCorrectly(t *testing.T) {
client := NewClient()
if client.GetName() != "Cerebras" {
t.Errorf("Expected client name to be 'Cerebras', got '%s'", client.GetName())
}
// Check if the ApiBaseURL is set correctly
if client.ApiBaseURL.Value != "https://api.cerebras.ai/v1" {
t.Errorf("Expected base URL to be 'https://api.cerebras.ai/v1', got '%s'", client.ApiBaseURL.Value)
}
}

View File

@@ -1,15 +0,0 @@
package deepseek
import (
"github.com/danielmiessler/fabric/plugins/ai/openai"
)
func NewClient() (ret *Client) {
ret = &Client{}
ret.Client = openai.NewClientCompatible("DeepSeek", "https://api.deepseek.com", nil)
return
}
type Client struct {
*openai.Client
}

View File

@@ -1,13 +0,0 @@
package deepseek
// Test generated using Keploy
import (
"testing"
)
func TestNewClient_EmbeddedClientNotNil(t *testing.T) {
client := NewClient()
if client.Client == nil {
t.Fatalf("Expected embedded openai.Client to be non-nil, got nil")
}
}

View File

@@ -1,15 +0,0 @@
package grokai
import (
"github.com/danielmiessler/fabric/plugins/ai/openai"
)
func NewClient() (ret *Client) {
ret = &Client{}
ret.Client = openai.NewClientCompatible("GrokAI", "https://api.x.ai/v1", nil)
return
}
type Client struct {
*openai.Client
}

View File

@@ -1,13 +0,0 @@
package grokai
// Test generated using Keploy
import (
"testing"
)
func TestNewClient_EmbeddedClientNotNil(t *testing.T) {
client := NewClient()
if client.Client == nil {
t.Fatalf("Expected embedded openai.Client to be non-nil, got nil")
}
}

View File

@@ -1,17 +0,0 @@
package groq
import (
"github.com/danielmiessler/fabric/plugins/ai/openai"
)
// NewClient initializes and returns a new Groq Client.
func NewClient() (ret *Client) {
ret = &Client{}
ret.Client = openai.NewClientCompatible("Groq", "https://api.groq.com/openai/v1", nil)
return
}
// Client wraps the openai.Client to provide additional functionality specific to Groq.
type Client struct {
*openai.Client
}

View File

@@ -1,13 +0,0 @@
package groq
// Test generated using Keploy
import (
"testing"
)
func TestNewClientEmbeddedClientNotNil(t *testing.T) {
client := NewClient()
if client.Client == nil {
t.Fatalf("Expected embedded openai.Client to be non-nil, got nil")
}
}

View File

@@ -1,15 +0,0 @@
package litellm
import (
"github.com/danielmiessler/fabric/plugins/ai/openai"
)
func NewClient() (ret *Client) {
ret = &Client{}
ret.Client = openai.NewClientCompatible("LiteLLM", "http://localhost:4000", nil)
return
}
type Client struct {
*openai.Client
}

View File

@@ -1,15 +0,0 @@
package mistral
import (
"github.com/danielmiessler/fabric/plugins/ai/openai"
)
func NewClient() (ret *Client) {
ret = &Client{}
ret.Client = openai.NewClientCompatible("Mistral", "https://api.mistral.ai/v1", nil)
return
}
type Client struct {
*openai.Client
}

View File

@@ -0,0 +1,74 @@
package openai_compatible
import (
"github.com/danielmiessler/fabric/plugins/ai/openai"
)
// ProviderConfig defines the configuration for an OpenAI-compatible API provider
type ProviderConfig struct {
Name string
BaseURL string
}
// Client is the common structure for all OpenAI-compatible providers
type Client struct {
*openai.Client
}
// NewClient creates a new OpenAI-compatible client for the specified provider
func NewClient(providerConfig ProviderConfig) *Client {
client := &Client{}
client.Client = openai.NewClientCompatible(providerConfig.Name, providerConfig.BaseURL, nil)
return client
}
// ProviderMap is a map of provider name to ProviderConfig for O(1) lookup
var ProviderMap = map[string]ProviderConfig{
"Mistral": {
Name: "Mistral",
BaseURL: "https://api.mistral.ai/v1",
},
"LiteLLM": {
Name: "LiteLLM",
BaseURL: "http://localhost:4000",
},
"Groq": {
Name: "Groq",
BaseURL: "https://api.groq.com/openai/v1",
},
"GrokAI": {
Name: "GrokAI",
BaseURL: "https://api.x.ai/v1",
},
"DeepSeek": {
Name: "DeepSeek",
BaseURL: "https://api.deepseek.com",
},
"Cerebras": {
Name: "Cerebras",
BaseURL: "https://api.cerebras.ai/v1",
},
"OpenRouter": {
Name: "OpenRouter",
BaseURL: "https://openrouter.ai/api/v1",
},
"SiliconCloud": {
Name: "SiliconCloud",
BaseURL: "https://api.siliconflow.cn/v1",
},
}
// GetProviderByName returns the provider configuration for a given name with O(1) lookup
func GetProviderByName(name string) (ProviderConfig, bool) {
provider, found := ProviderMap[name]
return provider, found
}
// CreateClient creates a new client for a provider by name
func CreateClient(providerName string) (*Client, bool) {
providerConfig, found := GetProviderByName(providerName)
if !found {
return nil, false
}
return NewClient(providerConfig), true
}

View File

@@ -0,0 +1,42 @@
package openai_compatible
import (
"testing"
)
func TestCreateClient(t *testing.T) {
testCases := []struct {
name string
provider string
exists bool
}{
{
name: "Existing provider - Mistral",
provider: "Mistral",
exists: true,
},
{
name: "Existing provider - Groq",
provider: "Groq",
exists: true,
},
{
name: "Non-existent provider",
provider: "NonExistent",
exists: false,
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
client, exists := CreateClient(tc.provider)
if exists != tc.exists {
t.Errorf("Expected exists=%v for provider %s, got %v",
tc.exists, tc.provider, exists)
}
if exists && client == nil {
t.Errorf("Expected non-nil client for provider %s", tc.provider)
}
})
}
}

View File

@@ -1,16 +0,0 @@
package openrouter
import (
"github.com/danielmiessler/fabric/plugins/ai/openai"
)
func NewClient() (ret *Client) {
ret = &Client{}
ret.Client = openai.NewClientCompatible("OpenRouter", "https://openrouter.ai/api/v1", nil)
return
}
type Client struct {
*openai.Client
}

View File

@@ -1 +0,0 @@
package openrouter

View File

@@ -1,15 +0,0 @@
package siliconcloud
import (
"github.com/danielmiessler/fabric/plugins/ai/openai"
)
func NewClient() (ret *Client) {
ret = &Client{}
ret.Client = openai.NewClientCompatible("SiliconCloud", "https://api.siliconflow.cn/v1", nil)
return
}
type Client struct {
*openai.Client
}

View File

@@ -4,6 +4,8 @@ import (
"bytes"
"context"
"fmt"
"sort"
"strings"
"sync"
"github.com/danielmiessler/fabric/plugins"
@@ -95,6 +97,9 @@ func (o *VendorsManager) readModels() (err error) {
if result.err != nil {
fmt.Println(result.vendorName, result.err)
} else {
sort.Slice(result.models, func(i, j int) bool {
return strings.ToLower(result.models[i]) < strings.ToLower(result.models[j])
})
o.Models.AddGroupItems(result.vendorName, result.models...)
}
}

View File

@@ -117,8 +117,12 @@ func (e *ExtensionExecutor) executeWithFile(cmd *exec.Cmd, ext *ExtensionDefinit
// Create context with timeout
ctx, cancel := context.WithTimeout(context.Background(), timeout)
defer cancel()
// Store the original environment
originalEnv := cmd.Env
// Create a new command with context. This might reset Env, depending on the Go version.
cmd = exec.CommandContext(ctx, cmd.Path, cmd.Args[1:]...)
cmd.Env = cmd.Env
// Restore the environment variables explicitly
cmd.Env = originalEnv
fileConfig := ext.GetFileConfig()
if fileConfig == nil {

View File

@@ -1,3 +1,3 @@
package main
var version = "v1.4.176"
var version = "v1.4.180"