feat: add -V/--vendor flag and vendor-aware model selection

CHANGES
- Add -V/--vendor flag to specify model vendor
- Implement vendor-aware model resolution and availability validation
- Warn on ambiguous models; suggest --vendor to disambiguate
- Update bash, zsh, fish completions with vendor suggestions
- Extend --listmodels to print vendor|model when interactive
- Add VendorsModels.PrintWithVendor; sort vendors and models alphabetically
- Pass vendor through API; update server chat handler
- Standardize docs and errors to --yt-dlp-args="..." syntax
- Add test covering ambiguous model warning across multiple vendors
- Promote go-shellquote to direct dependency in go.mod
This commit is contained in:
Kayvan Sylvan
2025-08-12 06:39:02 -07:00
parent 0f994d8136
commit cc3e4226d7
19 changed files with 205 additions and 56 deletions

View File

@@ -498,6 +498,7 @@ Application Options:
-U, --updatepatterns Update patterns
-c, --copy Copy to clipboard
-m, --model= Choose model
-V, --vendor= Specify vendor for chosen model (e.g., -V "LM Studio" -m openai/gpt-oss-20b)
--modelContextLength= Model context length (only affects ollama)
-o, --output= Output to file
--output-session Output the entire session (also a temporary one) to the output file

View File

@@ -17,6 +17,13 @@ _fabric_models() {
compadd -X "Models:" ${models}
}
_fabric_vendors() {
local -a vendors
local cmd=${words[1]}
vendors=(${(f)"$($cmd --listvendors --shell-complete-list 2>/dev/null)"})
compadd -X "Vendors:" ${vendors}
}
_fabric_contexts() {
local -a contexts
local cmd=${words[1]}
@@ -76,6 +83,7 @@ _fabric() {
'(-U --updatepatterns)'{-U,--updatepatterns}'[Update patterns]' \
'(-c --copy)'{-c,--copy}'[Copy to clipboard]' \
'(-m --model)'{-m,--model}'[Choose model]:model:_fabric_models' \
'(-V --vendor)'{-V,--vendor}'[Specify vendor for chosen model (e.g., -V "LM Studio" -m openai/gpt-oss-20b)]:vendor:_fabric_vendors' \
'(--modelContextLength)--modelContextLength[Model context length (only affects ollama)]:length:' \
'(-o --output)'{-o,--output}'[Output to file]:file:_files' \
'(--output-session)--output-session[Output the entire session to the output file]' \

View File

@@ -13,7 +13,7 @@ _fabric() {
_get_comp_words_by_ref -n : cur prev words cword
# Define all possible options/flags
local opts="--pattern -p --variable -v --context -C --session --attachment -a --setup -S --temperature -t --topp -T --stream -s --presencepenalty -P --raw -r --frequencypenalty -F --listpatterns -l --listmodels -L --listcontexts -x --listsessions -X --updatepatterns -U --copy -c --model -m --modelContextLength --output -o --output-session --latest -n --changeDefaultModel -d --youtube -y --playlist --transcript --transcript-with-timestamps --comments --metadata --yt-dlp-args --language -g --scrape_url -u --scrape_question -q --seed -e --wipecontext -w --wipesession -W --printcontext --printsession --readability --input-has-vars --dry-run --serve --serveOllama --address --api-key --config --search --search-location --image-file --image-size --image-quality --image-compression --image-background --suppress-think --think-start-tag --think-end-tag --disable-responses-api --voice --list-gemini-voices --notification --notification-command --version --listextensions --addextension --rmextension --strategy --liststrategies --listvendors --shell-complete-list --help -h"
local opts="--pattern -p --variable -v --context -C --session --attachment -a --setup -S --temperature -t --topp -T --stream -s --presencepenalty -P --raw -r --frequencypenalty -F --listpatterns -l --listmodels -L --listcontexts -x --listsessions -X --updatepatterns -U --copy -c --model -m --vendor -V --modelContextLength --output -o --output-session --latest -n --changeDefaultModel -d --youtube -y --playlist --transcript --transcript-with-timestamps --comments --metadata --yt-dlp-args --language -g --scrape_url -u --scrape_question -q --seed -e --wipecontext -w --wipesession -W --printcontext --printsession --readability --input-has-vars --dry-run --serve --serveOllama --address --api-key --config --search --search-location --image-file --image-size --image-quality --image-compression --image-background --suppress-think --think-start-tag --think-end-tag --disable-responses-api --voice --list-gemini-voices --notification --notification-command --version --listextensions --addextension --rmextension --strategy --liststrategies --listvendors --shell-complete-list --help -h"
# Helper function for dynamic completions
_fabric_get_list() {
@@ -38,6 +38,10 @@ _fabric() {
COMPREPLY=($(compgen -W "$(_fabric_get_list --listmodels)" -- "${cur}"))
return 0
;;
-V | --vendor)
COMPREPLY=($(compgen -W "$(_fabric_get_list --listvendors)" -- "${cur}"))
return 0
;;
-w | --wipecontext)
COMPREPLY=($(compgen -W "$(_fabric_get_list --listcontexts)" -- "${cur}"))
return 0

View File

@@ -17,6 +17,11 @@ function __fabric_get_models
$cmd --listmodels --shell-complete-list 2>/dev/null
end
function __fabric_get_vendors
set cmd (commandline -opc)[1]
$cmd --listvendors --shell-complete-list 2>/dev/null
end
function __fabric_get_contexts
set cmd (commandline -opc)[1]
$cmd --listcontexts --shell-complete-list 2>/dev/null
@@ -58,6 +63,7 @@ function __fabric_register_completions
complete -c $cmd -s P -l presencepenalty -d "Set presence penalty (default: 0.0)"
complete -c $cmd -s F -l frequencypenalty -d "Set frequency penalty (default: 0.0)"
complete -c $cmd -s m -l model -d "Choose model" -a "(__fabric_get_models)"
complete -c $cmd -s V -l vendor -d "Specify vendor for chosen model (e.g., -V \"LM Studio\" -m openai/gpt-oss-20b)" -a "(__fabric_get_vendors)"
complete -c $cmd -l modelContextLength -d "Model context length (only affects ollama)"
complete -c $cmd -s o -l output -d "Output to file" -r
complete -c $cmd -s n -l latest -d "Number of latest patterns to list (default: 0)"

View File

@@ -167,6 +167,8 @@ us the results in
Select the model to use. NOTE: Will not work if you
have set a default model. please use --clear to clear
persistence before using this flag
--vendor VENDOR, -V VENDOR
Specify vendor for the selected model (e.g., -V "LM Studio" -m openai/gpt-oss-20b)
--listmodels List all available models
--remoteOllamaServer REMOTEOLLAMASERVER
The URL of the remote ollamaserver to use. ONLY USE

View File

@@ -62,25 +62,25 @@ Pass additional arguments to yt-dlp for advanced functionality. **User-provided
```bash
# Use browser cookies for age-restricted or private videos
fabric -y "https://www.youtube.com/watch?v=VIDEO_ID" --yt-dlp-args "--cookies-from-browser brave"
fabric -y "https://www.youtube.com/watch?v=VIDEO_ID" --yt-dlp-args="--cookies-from-browser brave"
# Override language selection (takes precedence over -g flag)
fabric -g en -y "https://www.youtube.com/watch?v=VIDEO_ID" --yt-dlp-args "--sub-langs es,fr"
fabric -g en -y "https://www.youtube.com/watch?v=VIDEO_ID" --yt-dlp-args="--sub-langs es,fr"
# Use specific format
fabric -y "https://www.youtube.com/watch?v=VIDEO_ID" --yt-dlp-args "--format best"
fabric -y "https://www.youtube.com/watch?v=VIDEO_ID" --yt-dlp-args="--format best"
# Handle rate limiting (slow down requests)
fabric -y "https://www.youtube.com/watch?v=VIDEO_ID" --yt-dlp-args "--sleep-requests 1"
fabric -y "https://www.youtube.com/watch?v=VIDEO_ID" --yt-dlp-args="--sleep-requests 1"
# Multiple arguments (use quotes)
fabric -y "https://www.youtube.com/watch?v=VIDEO_ID" --yt-dlp-args "--cookies-from-browser firefox --write-info-json"
fabric -y "https://www.youtube.com/watch?v=VIDEO_ID" --yt-dlp-args="--cookies-from-browser firefox --write-info-json"
# Combine rate limiting with authentication
fabric -y "https://www.youtube.com/watch?v=VIDEO_ID" --yt-dlp-args "--cookies-from-browser brave --sleep-requests 1"
fabric -y "https://www.youtube.com/watch?v=VIDEO_ID" --yt-dlp-args="--cookies-from-browser brave --sleep-requests 1"
# Override subtitle format (takes precedence over built-in --sub-format vtt)
fabric -y "https://www.youtube.com/watch?v=VIDEO_ID" --yt-dlp-args "--sub-format srt"
fabric -y "https://www.youtube.com/watch?v=VIDEO_ID" --yt-dlp-args="--sub-format srt"
```
#### Argument Precedence
@@ -196,7 +196,7 @@ fabric -y "https://www.youtube.com/watch?v=VIDEO_ID" --pattern write_blog_post
### Common Issues
1. **"yt-dlp not found"**: Install yt-dlp using pip or your package manager
2. **Age-restricted videos**: Use `--yt-dlp-args "--cookies-from-browser BROWSER"`
2. **Age-restricted videos**: Use `--yt-dlp-args="--cookies-from-browser BROWSER"`
3. **No subtitles available**: Some videos don't have auto-generated subtitles
4. **API rate limits**: YouTube API has daily quotas for comments/metadata
5. **HTTP 429 errors**: YouTube is rate limiting subtitle requests
@@ -208,8 +208,8 @@ fabric -y "https://www.youtube.com/watch?v=VIDEO_ID" --pattern write_blog_post
- **"No transcript content found"**: Video may not have subtitles available
- **"HTTP Error 429: Too Many Requests"**: YouTube rate limit exceeded. This is increasingly common. Solutions:
- **Wait 10-30 minutes and try again** (most effective)
- Use longer sleep: `--yt-dlp-args "--sleep-requests 5"`
- Try with browser cookies: `--yt-dlp-args "--cookies-from-browser brave --sleep-requests 5"`
- Use longer sleep: `--yt-dlp-args="--sleep-requests 5"`
- Try with browser cookies: `--yt-dlp-args="--cookies-from-browser brave --sleep-requests 5"`
- **Try a different video** - some videos are less restricted
- **Use a VPN** - different IP address may help
- **Try without language specification** - let yt-dlp choose any available language
@@ -270,7 +270,7 @@ fabric -y "https://www.youtube.com/watch?v=dQw4w9WgXcQ" --pattern summarize --st
```bash
fabric -y "https://www.youtube.com/watch?v=VIDEO_ID" \
--yt-dlp-args "--cookies-from-browser chrome" \
--yt-dlp-args="--cookies-from-browser chrome" \
--transcript-with-timestamps \
--comments \
--pattern comprehensive_analysis \
@@ -291,7 +291,7 @@ fabric -y "https://www.youtube.com/playlist?list=PLrAXtmRdnEQy6nuLvVUxpDnx4C0823
```bash
# Built-in language selection (-g es) is overridden by user args
fabric -g es -y "https://www.youtube.com/watch?v=VIDEO_ID" \
--yt-dlp-args "--sub-langs fr,de,en" \
--yt-dlp-args="--sub-langs fr,de,en" \
--pattern translate
```

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.1 MiB

2
go.mod
View File

@@ -19,6 +19,7 @@ require (
github.com/hasura/go-graphql-client v0.14.4
github.com/jessevdk/go-flags v1.6.1
github.com/joho/godotenv v1.5.1
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51
github.com/mattn/go-sqlite3 v1.14.28
github.com/ollama/ollama v0.9.0
github.com/openai/openai-go v1.8.2
@@ -37,7 +38,6 @@ require (
require (
github.com/google/go-cmp v0.7.0 // indirect
github.com/gorilla/websocket v1.5.3 // indirect
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 // indirect
)
require (

2
go.sum
View File

@@ -17,8 +17,6 @@ github.com/andybalholm/cascadia v1.3.3 h1:AG2YHrzJIm4BZ19iwJ/DAua6Btl3IwJX+VI4kk
github.com/andybalholm/cascadia v1.3.3/go.mod h1:xNd9bqTn98Ln4DwST8/nG+H0yuB8Hmgu1YHNnWw0GeA=
github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be h1:9AeTilPcZAjCFIImctFaOjnTIavg87rW78vTPkQqLI8=
github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be/go.mod h1:ySMOLuWl6zY27l47sB3qLNK6tF2fkHG55UZxx8oIVo4=
github.com/anthropics/anthropic-sdk-go v1.4.0 h1:fU1jKxYbQdQDiEXCxeW5XZRIOwKevn/PMg8Ay1nnUx0=
github.com/anthropics/anthropic-sdk-go v1.4.0/go.mod h1:AapDW22irxK2PSumZiQXYUFvsdQgkwIWlpESweWZI/c=
github.com/anthropics/anthropic-sdk-go v1.7.0 h1:5iVf5fG/2gqVsOce8mq02r/WdgqpokM/8DXg2Ue6C9Y=
github.com/anthropics/anthropic-sdk-go v1.7.0/go.mod h1:3qSNQ5NrAmjC8A2ykuruSQttfqfdEYNZY5o8c0XSHB8=
github.com/araddon/dateparse v0.0.0-20210429162001-6b43995a97de h1:FxWPpzIjnTlhPwqqXc4/vE0f7GvRjuAsbW+HOIe8KnA=

View File

@@ -21,7 +21,7 @@ func handleChatProcessing(currentFlags *Flags, registry *core.PluginRegistry, me
var chatter *core.Chatter
if chatter, err = registry.GetChatter(currentFlags.Model, currentFlags.ModelContextLength,
currentFlags.Strategy, currentFlags.Stream, currentFlags.DryRun); err != nil {
currentFlags.Vendor, currentFlags.Strategy, currentFlags.Stream, currentFlags.DryRun); err != nil {
return
}

View File

@@ -43,6 +43,7 @@ type Flags struct {
Message string `hidden:"true" description:"Messages to send to chat"`
Copy bool `short:"c" long:"copy" description:"Copy to clipboard"`
Model string `short:"m" long:"model" yaml:"model" description:"Choose model"`
Vendor string `short:"V" long:"vendor" yaml:"vendor" description:"Specify vendor for the selected model (e.g., -V \"LM Studio\" -m openai/gpt-oss-20b)"`
ModelContextLength int `long:"modelContextLength" yaml:"modelContextLength" description:"Model context length (only affects ollama)"`
Output string `short:"o" long:"output" description:"Output to file" default:""`
OutputSession bool `long:"output-session" description:"Output the entire session (also a temporary one) to the output file"`

View File

@@ -36,7 +36,11 @@ func handleListingCommands(currentFlags *Flags, fabricDb *fsdb.Db, registry *cor
if models, err = registry.VendorManager.GetModels(); err != nil {
return true, err
}
models.Print(currentFlags.ShellCompleteOutput)
if currentFlags.ShellCompleteOutput {
models.Print(true)
} else {
models.PrintWithVendor(false)
}
return true, nil
}

View File

@@ -288,7 +288,7 @@ func (o *PluginRegistry) Configure() (err error) {
return
}
func (o *PluginRegistry) GetChatter(model string, modelContextLength int, strategy string, stream bool, dryRun bool) (ret *Chatter, err error) {
func (o *PluginRegistry) GetChatter(model string, modelContextLength int, vendorName string, strategy string, stream bool, dryRun bool) (ret *Chatter, err error) {
ret = &Chatter{
db: o.Db,
Stream: stream,
@@ -317,14 +317,32 @@ func (o *PluginRegistry) GetChatter(model string, modelContextLength int, strate
ret.model = defaultModel
}
} else if model == "" {
ret.vendor = vendorManager.FindByName(defaultVendor)
if vendorName != "" {
ret.vendor = vendorManager.FindByName(vendorName)
} else {
ret.vendor = vendorManager.FindByName(defaultVendor)
}
ret.model = defaultModel
} else {
var models *ai.VendorsModels
if models, err = vendorManager.GetModels(); err != nil {
return
}
ret.vendor = vendorManager.FindByName(models.FindGroupsByItemFirst(model))
if vendorName != "" {
// ensure vendor exists and provides model
ret.vendor = vendorManager.FindByName(vendorName)
availableVendors := models.FindGroupsByItem(model)
if ret.vendor == nil || !lo.Contains(availableVendors, vendorName) {
err = fmt.Errorf("model %s not available for vendor %s", model, vendorName)
return
}
} else {
availableVendors := models.FindGroupsByItem(model)
if len(availableVendors) > 1 {
fmt.Fprintf(os.Stderr, "Warning: multiple vendors provide model %s: %s. Using %s. Specify --vendor to select a vendor.\n", model, strings.Join(availableVendors, ", "), availableVendors[0])
}
ret.vendor = vendorManager.FindByName(models.FindGroupsByItemFirst(model))
}
ret.model = model
}

View File

@@ -1,10 +1,19 @@
package core
import (
"bytes"
"context"
"io"
"os"
"strings"
"testing"
"github.com/danielmiessler/fabric/internal/chat"
"github.com/danielmiessler/fabric/internal/domain"
"github.com/danielmiessler/fabric/internal/plugins"
"github.com/danielmiessler/fabric/internal/plugins/ai"
"github.com/danielmiessler/fabric/internal/plugins/db/fsdb"
"github.com/danielmiessler/fabric/internal/tools"
)
func TestSaveEnvFile(t *testing.T) {
@@ -19,3 +28,63 @@ func TestSaveEnvFile(t *testing.T) {
t.Fatalf("SaveEnvFile() error = %v", err)
}
}
// testVendor implements ai.Vendor for testing purposes
type testVendor struct {
name string
models []string
}
func (m *testVendor) GetName() string { return m.name }
func (m *testVendor) GetSetupDescription() string { return m.name }
func (m *testVendor) IsConfigured() bool { return true }
func (m *testVendor) Configure() error { return nil }
func (m *testVendor) Setup() error { return nil }
func (m *testVendor) SetupFillEnvFileContent(*bytes.Buffer) {}
func (m *testVendor) ListModels() ([]string, error) { return m.models, nil }
func (m *testVendor) SendStream([]*chat.ChatCompletionMessage, *domain.ChatOptions, chan string) error {
return nil
}
func (m *testVendor) Send(context.Context, []*chat.ChatCompletionMessage, *domain.ChatOptions) (string, error) {
return "", nil
}
func (m *testVendor) NeedsRawMode(string) bool { return false }
func TestGetChatter_WarnsOnAmbiguousModel(t *testing.T) {
tempDir := t.TempDir()
db := fsdb.NewDb(tempDir)
vendorA := &testVendor{name: "VendorA", models: []string{"shared-model"}}
vendorB := &testVendor{name: "VendorB", models: []string{"shared-model"}}
vm := ai.NewVendorsManager()
vm.AddVendors(vendorA, vendorB)
defaults := &tools.Defaults{
PluginBase: &plugins.PluginBase{},
Vendor: &plugins.Setting{Value: "VendorA"},
Model: &plugins.SetupQuestion{Setting: &plugins.Setting{Value: "shared-model"}},
ModelContextLength: &plugins.SetupQuestion{Setting: &plugins.Setting{Value: "0"}},
}
registry := &PluginRegistry{Db: db, VendorManager: vm, Defaults: defaults}
r, w, _ := os.Pipe()
oldStderr := os.Stderr
os.Stderr = w
defer func() { os.Stderr = oldStderr }()
chatter, err := registry.GetChatter("shared-model", 0, "", "", false, false)
w.Close()
warning, _ := io.ReadAll(r)
if err != nil {
t.Fatalf("GetChatter() error = %v", err)
}
if chatter.vendor.GetName() != "VendorA" {
t.Fatalf("expected vendor VendorA, got %s", chatter.vendor.GetName())
}
if !strings.Contains(string(warning), "multiple vendors provide model shared-model") {
t.Fatalf("expected warning about multiple vendors, got %q", string(warning))
}
}

View File

@@ -1,6 +1,10 @@
package ai
import (
"fmt"
"sort"
"strings"
"github.com/danielmiessler/fabric/internal/util"
)
@@ -11,3 +15,35 @@ func NewVendorsModels() *VendorsModels {
type VendorsModels struct {
*util.GroupsItemsSelectorString
}
// PrintWithVendor prints models including their vendor on each line.
// When shellCompleteList is true, output is suitable for shell completion.
func (o *VendorsModels) PrintWithVendor(shellCompleteList bool) {
if !shellCompleteList {
fmt.Printf("\n%v:\n", o.SelectionLabel)
}
var currentItemIndex int
sortedGroups := make([]*util.GroupItems[string], len(o.GroupsItems))
copy(sortedGroups, o.GroupsItems)
sort.SliceStable(sortedGroups, func(i, j int) bool {
return strings.ToLower(sortedGroups[i].Group) < strings.ToLower(sortedGroups[j].Group)
})
for _, groupItems := range sortedGroups {
items := make([]string, len(groupItems.Items))
copy(items, groupItems.Items)
sort.SliceStable(items, func(i, j int) bool {
return strings.ToLower(items[i]) < strings.ToLower(items[j])
})
for _, item := range items {
currentItemIndex++
if shellCompleteList {
fmt.Printf("%s|%s\n", groupItems.Group, item)
} else {
fmt.Printf("\t[%d]\t%s|%s\n", currentItemIndex, groupItems.Group, item)
}
}
}
}

View File

@@ -105,7 +105,7 @@ func (h *ChatHandler) HandleChat(c *gin.Context) {
}
}
chatter, err := h.registry.GetChatter(p.Model, 2048, "", false, false)
chatter, err := h.registry.GetChatter(p.Model, 2048, p.Vendor, "", false, false)
if err != nil {
log.Printf("Error creating chatter: %v", err)
streamChan <- fmt.Sprintf("Error: %v", err)

View File

@@ -210,7 +210,7 @@ func (o *YouTube) tryMethodYtDlpInternal(videoId string, language string, additi
}
if strings.Contains(stderrStr, "Sign in to confirm you're not a bot") || strings.Contains(stderrStr, "Use --cookies-from-browser") {
err = fmt.Errorf("YouTube requires authentication (bot detection). Use --yt-dlp-args '--cookies-from-browser BROWSER' where BROWSER is chrome, firefox, brave, etc. Error: %v", err)
err = fmt.Errorf("YouTube requires authentication (bot detection). Use --yt-dlp-args='--cookies-from-browser BROWSER' where BROWSER is chrome, firefox, brave, etc. Error: %v", err)
return
}

View File

@@ -11,7 +11,7 @@ This is a web app for Fabric. It was built using [Svelte][svelte], [SkeletonUI][
The goal of this app is to not only provide a user interface for Fabric, but also an out-of-the-box website for those who want to get started with web development, blogging, or to just have a web interface for fabric. You can use this app as a GUI interface for Fabric, a ready to go blog-site, or a website template for your own projects.
![Preview](./static/preview.png)
![Preview](../docs/images/svelte-preview.png)
## Installing

View File

@@ -1,6 +1,6 @@
---
title: README
description: fabric is an open-source framework for augmenting humans using AI. It provides a modular framework for solving specific problems using a crowdsourced set of AI prompts that can be used anywhere.
description: fabric is an open-source framework for augmenting humans using AI. It provides a modular framework for solving specific problems using a crowd-sourced set of AI prompts that can be used anywhere.
aliases: Fabric/Docs
date: 2024-1-12
updated: 2024-11-22
@@ -12,7 +12,6 @@ updated: 2024-11-22
# `fabric`
<div class="justify-left flex gap-2">
<img src="https://img.shields.io/github/languages/top/danielmiessler/fabric" alt="Github top language">
<img src="https://img.shields.io/github/last-commit/danielmiessler/fabric" alt="GitHub last commit">
@@ -23,10 +22,10 @@ updated: 2024-11-22
<h4><code>fabric</code> is an open-source framework for augmenting humans using AI.</h4>
[Updates](#updates) •
[What and Why](#whatandwhy) •
[What and Why](#what-and-why) •
[Philosophy](#philosophy) •
[Installation](#Installation) •
[Usage](#Usage) •
[Installation](#installation) •
[Usage](#usage) •
[Examples](#examples) •
[Just Use the Patterns](#just-use-the-patterns) •
[Custom Patterns](#custom-patterns) •
@@ -42,8 +41,8 @@ updated: 2024-11-22
- [`fabric`](#fabric)
- [Navigation](#navigation)
- [Updates](#updates)
- [Intro videos](#intro-videos)
- [What and why](#what-and-why)
- [Intro videos](#intro-videos)
- [Philosophy](#philosophy)
- [Breaking problems into components](#breaking-problems-into-components)
- [Too many prompts](#too-many-prompts)
@@ -65,7 +64,9 @@ updated: 2024-11-22
- [`to_pdf`](#to_pdf)
- [`to_pdf` Installation](#to_pdf-installation)
- [pbpaste](#pbpaste)
- [Web Interface](#Web_Interface)
- [Web Interface](#web-interface)
- [Installing](#installing)
- [Streamlit UI](#streamlit-ui)
- [Meta](#meta)
- [Primary contributors](#primary-contributors)
@@ -76,7 +77,7 @@ updated: 2024-11-22
> [!NOTE]
> November 8, 2024
>
> - **Multimodal Support**: You can now use `-a` (attachment) for Multimodal submissions to OpenAI models that support it. Example: `fabric -a https://path/to/image "Give me a description of this image."`
> - **Multi-modal Support**: You can now use `-a` (attachment) for Multi-modal submissions to OpenAI models that support it. Example: `fabric -a https://path/to/image "Give me a description of this image."`
## What and why
@@ -90,7 +91,7 @@ Fabric was created to address this by enabling everyone to granularly apply AI t
## Intro videos
Keep in mind that many of these were recorded when Fabric was Python-based, so remember to use the current [install instructions](#Installation) below.
Keep in mind that many of these were recorded when Fabric was Python-based, so remember to use the current [install instructions](#installation) below.
- [Network Chuck](https://www.youtube.com/watch?v=UbDyjIIGaxQ)
- [David Bombal](https://www.youtube.com/watch?v=vF-MQmVxnCs)
@@ -223,7 +224,7 @@ This also creates a `yt` alias that allows you to use `yt https://www.youtube.co
#### Save your files in markdown using aliases
If in addition to the above aliases you would like to have the option to save the output to your favourite markdown note vault like Obsidian then instead of the above add the following to your `.zshrc` or `.bashrc` file:
If in addition to the above aliases you would like to have the option to save the output to your favorite markdown note vault like Obsidian then instead of the above add the following to your `.zshrc` or `.bashrc` file:
```bash
# Define the base directory for Obsidian notes
@@ -281,7 +282,7 @@ go install github.com/danielmiessler/fabric@latest
fabric --setup
```
Then [set your environmental variables](#environmental-variables) as shown above.
Then [set your environmental variables](#environment-variables) as shown above.
### Upgrading
@@ -324,6 +325,7 @@ Application Options:
-U, --updatepatterns Update patterns
-c, --copy Copy to clipboard
-m, --model= Choose model
-V, --vendor= Specify vendor for chosen model (e.g., -V "LM Studio" -m openai/gpt-oss-20b)
-o, --output= Output to file
--output-session Output the entire session (also a temporary one) to the output file
-n, --latest= Number of latest patterns to list (default: 0)
@@ -375,21 +377,21 @@ Now let's look at some things you can do with Fabric.
1. Run the `summarize` Pattern based on input from `stdin`. In this case, the body of an article.
```bash
pbpaste | fabric --pattern summarize
```
```bash
pbpaste | fabric --pattern summarize
```
2. Run the `analyze_claims` Pattern with the `--stream` option to get immediate and streaming results.
```bash
pbpaste | fabric --stream --pattern analyze_claims
```
```bash
pbpaste | fabric --stream --pattern analyze_claims
```
3. Run the `extract_wisdom` Pattern with the `--stream` option to get immediate and streaming results from any Youtube video (much like in the original introduction video).
```bash
fabric -y "https://youtube.com/watch?v=uXs-zPc63kM" --stream --pattern extract_wisdom
```
```bash
fabric -y "https://youtube.com/watch?v=uXs-zPc63kM" --stream --pattern extract_wisdom
```
4. Create patterns- you must create a .md file with the pattern and save it to ~/.config/fabric/patterns/[yourpatternname].
@@ -414,11 +416,7 @@ You may want to use Fabric to create your own custom Patterns—but not share th
Just make a directory in `~/.config/custompatterns/` (or wherever) and put your `.md` files in there.
When you're ready to use them, copy them into:
```
~/.config/fabric/patterns/
```
When you're ready to use them, copy them into: `~/.config/fabric/patterns/`
You can then use them like any other Patterns, but they won't be public unless you explicitly submit them as Pull Requests to the Fabric project. So don't worry—they're private to you.
@@ -462,7 +460,7 @@ The [examples](#examples) use the macOS program `pbpaste` to paste content from
On Windows, you can use the PowerShell command `Get-Clipboard` from a PowerShell command prompt. If you like, you can also alias it to `pbpaste`. If you are using classic PowerShell, edit the file `~\Documents\WindowsPowerShell\.profile.ps1`, or if you are using PowerShell Core, edit `~\Documents\PowerShell\.profile.ps1` and add the alias,
```
```powershell
Set-Alias pbpaste Get-Clipboard
```
@@ -481,17 +479,19 @@ alias pbpaste='xclip -selection clipboard -o'
## Web Interface
Fabric now includes a built-in web interface that provides a GUI alternative to the command-line interface and an out-of-the-box website for those who want to get started with web development or blogging.
You can use this app as a GUI interface for Fabric, a ready to go blog-site, or a website template for your own projects.
Fabric now includes a built-in web interface that provides a GUI alternative to the command-line interface and an out-of-the-box website for those who want to get started with web development or blogging.
You can use this app as a GUI interface for Fabric, a ready to go blog-site, or a website template for your own projects.
The `web/src/lib/content` directory includes starter `.obsidian/` and `templates/` directories, allowing you to open up the `web/src/lib/content/` directory as an [Obsidian.md](https://obsidian.md) vault. You can place your posts in the posts directory when you're ready to publish.
The `web/src/lib/content` directory includes starter `.obsidian/` and `templates/` directories, allowing you to open up the `web/src/lib/content/` directory as an [Obsidian.md](https://obsidian.md) vault. You can place your posts in the posts directory when you're ready to publish.
### Installing
The GUI can be installed by navigating to the `web` directory and using `npm install`, `pnpm install`, or your favorite package manager. Then simply run the development server to start the app. 
The GUI can be installed by navigating to the `web` directory and using `npm install`, `pnpm install`, or your favorite package manager. Then simply run the development server to start the app.
_You will need to run fabric in a separate terminal with the `fabric --serve` command._
_You will need to run fabric in a separate terminal with the `fabric --serve` command._
**From the fabric project `web/` directory:**
```shell
npm run dev
@@ -499,7 +499,7 @@ npm run dev
pnpm run dev
## or your equivalent
## or your equivalent
```
### Streamlit UI
@@ -515,10 +515,12 @@ streamlit run streamlit.py
```
The Streamlit UI provides a user-friendly interface for:
- Running and chaining patterns
- Managing pattern outputs
- Creating and editing patterns
- Analyzing pattern results
## Meta
> [!NOTE]