mirror of
https://github.com/danielmiessler/Fabric.git
synced 2026-01-10 23:08:06 -05:00
## CHANGES - Add disable-responses-api flag to CLI completions - Update zsh completion with new API flag - Update bash completion options list - Add fish shell completion for API flag - Add testpattern to VSCode spell checker dictionary - Configure disableResponsesAPI in example YAML config - Enable flag for llama-server compatibility
31 lines
566 B
YAML
31 lines
566 B
YAML
#this is an example yaml config file for fabric
|
|
|
|
# use fabric pattern names
|
|
pattern: ai
|
|
|
|
# or use a filename
|
|
# pattern: ~/testpattern.md
|
|
|
|
model: phi3:latest
|
|
|
|
# for models that support context length
|
|
modelContextLength: 2048
|
|
|
|
frequencypenalty: 0.5
|
|
presencepenalty: 0.5
|
|
topp: 0.67
|
|
temperature: 0.88
|
|
seed: 42
|
|
|
|
stream: true
|
|
raw: false
|
|
|
|
# suppress vendor thinking output
|
|
suppressThink: false
|
|
thinkStartTag: "<think>"
|
|
thinkEndTag: "</think>"
|
|
|
|
# OpenAI Responses API settings
|
|
# (use this for llama-server or other OpenAI-compatible local servers)
|
|
disableResponsesAPI: true
|