mirror of
https://github.com/danielmiessler/Fabric.git
synced 2026-01-09 22:38:10 -05:00
Compare commits
15 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6b9f4c1fb8 | ||
|
|
4d2061a641 | ||
|
|
713f6e46fe | ||
|
|
efadc81974 | ||
|
|
ea54f60dcc | ||
|
|
4008125e37 | ||
|
|
da94411bf3 | ||
|
|
ab7b37be10 | ||
|
|
772337bf0d | ||
|
|
1e30c4e136 | ||
|
|
e12a40ad4f | ||
|
|
97beaecbeb | ||
|
|
7af6817bac | ||
|
|
50ecc32d85 | ||
|
|
ff1ef380a7 |
4
.vscode/settings.json
vendored
4
.vscode/settings.json
vendored
@@ -25,6 +25,7 @@
|
||||
"danielmiessler",
|
||||
"davidanson",
|
||||
"Debugf",
|
||||
"debuglog",
|
||||
"dedup",
|
||||
"deepseek",
|
||||
"Despina",
|
||||
@@ -55,6 +56,7 @@
|
||||
"godotenv",
|
||||
"gofmt",
|
||||
"goimports",
|
||||
"golint",
|
||||
"gomod",
|
||||
"gonic",
|
||||
"goopenai",
|
||||
@@ -131,7 +133,9 @@
|
||||
"seaborn",
|
||||
"semgrep",
|
||||
"sess",
|
||||
"sgaunet",
|
||||
"shellquote",
|
||||
"SSEHTTP",
|
||||
"storer",
|
||||
"Streamlit",
|
||||
"stretchr",
|
||||
|
||||
50
CHANGELOG.md
50
CHANGELOG.md
@@ -1,5 +1,55 @@
|
||||
# Changelog
|
||||
|
||||
## v1.4.297 (2025-08-26)
|
||||
|
||||
### PR [#1729](https://github.com/danielmiessler/Fabric/pull/1729) by [ksylvan](https://github.com/ksylvan): Add GitHub Community Health Documents
|
||||
|
||||
- Add CODE_OF_CONDUCT defining respectful, collaborative community behavior
|
||||
- Add CONTRIBUTING with setup, testing, PR, changelog requirements
|
||||
- Add SECURITY policy with reporting process and response timelines
|
||||
- Add SUPPORT guide for bugs, features, discussions, expectations
|
||||
- Add docs README indexing guides, quick starts, contributor essentials
|
||||
|
||||
## v1.4.296 (2025-08-26)
|
||||
|
||||
### PR [#1728](https://github.com/danielmiessler/Fabric/pull/1728) by [ksylvan](https://github.com/ksylvan): Refactor Logging System to Use Centralized Debug Logger
|
||||
|
||||
- Replace fmt.Fprintf/os.Stderr with centralized debuglog.Log across CLI and add unconditional Log function for important messages
|
||||
- Improve OAuth flow messaging and token refresh diagnostics with better error handling
|
||||
- Update tests to capture debuglog output via SetOutput for better test coverage
|
||||
- Convert Perplexity streaming errors to unified debug logging and emit file write notifications through debuglog
|
||||
- Standardize extension registry warnings and announce large audio processing steps via centralized logger
|
||||
|
||||
## v1.4.295 (2025-08-24)
|
||||
|
||||
### PR [#1727](https://github.com/danielmiessler/Fabric/pull/1727) by [ksylvan](https://github.com/ksylvan): Standardize Anthropic Beta Failure Logging
|
||||
|
||||
- Refactor: route Anthropic beta failure logs through internal debug logger
|
||||
- Replace fmt.Fprintf stderr with debuglog.Debug for beta failures
|
||||
- Import internal log package and remove os dependency
|
||||
- Standardize logging level to debuglog.Basic for beta errors
|
||||
- Preserve fallback stream behavior when beta features fail
|
||||
|
||||
## v1.4.294 (2025-08-20)
|
||||
|
||||
### PR [#1723](https://github.com/danielmiessler/Fabric/pull/1723) by [ksylvan](https://github.com/ksylvan): docs: update README with Venice AI provider and Windows install script
|
||||
|
||||
- Add Venice AI provider configuration with API endpoint
|
||||
- Document Venice AI as privacy-first open-source provider
|
||||
- Include PowerShell installation script for Windows users
|
||||
- Add debug levels section to table of contents
|
||||
- Update recent major features with v1.4.294 release notes
|
||||
|
||||
## v1.4.293 (2025-08-19)
|
||||
|
||||
### PR [#1718](https://github.com/danielmiessler/Fabric/pull/1718) by [ksylvan](https://github.com/ksylvan): Implement Configurable Debug Logging Levels
|
||||
|
||||
- Add --debug flag controlling runtime logging verbosity levels
|
||||
- Introduce internal/log package with Off, Basic, Detailed, Trace
|
||||
- Replace ad-hoc Debugf and globals with centralized debug logger
|
||||
- Wire debug level during early CLI argument parsing
|
||||
- Add bash, zsh, fish completions for --debug levels
|
||||
|
||||
## v1.4.292 (2025-08-18)
|
||||
|
||||
### PR [#1717](https://github.com/danielmiessler/Fabric/pull/1717) by [ksylvan](https://github.com/ksylvan): Highlight default vendor/model in model listing
|
||||
|
||||
23
README.md
23
README.md
@@ -57,6 +57,7 @@ Below are the **new features and capabilities** we've added (newest first):
|
||||
|
||||
### Recent Major Features
|
||||
|
||||
- [v1.4.294](https://github.com/danielmiessler/fabric/releases/tag/v1.4.294) (Aug 20, 2025) — **Venice AI Support**: Added the Venice AI provider. Venice is a Privacy-First, Open-Source AI provider. See their ["About Venice"](https://docs.venice.ai/overview/about-venice) page for details.
|
||||
- [v1.4.291](https://github.com/danielmiessler/fabric/releases/tag/v1.4.291) (Aug 18, 2025) — **Speech To Text**: Add OpenAI speech-to-text support with `--transcribe-file`, `--transcribe-model`, and `--split-media-file` flags.
|
||||
- [v1.4.287](https://github.com/danielmiessler/fabric/releases/tag/v1.4.287) (Aug 16, 2025) — **AI Reasoning**: Add Thinking to Gemini models and introduce `readme_updates` python script
|
||||
- [v1.4.286](https://github.com/danielmiessler/fabric/releases/tag/v1.4.286) (Aug 14, 2025) — **AI Reasoning**: Introduce Thinking Config Across Anthropic and OpenAI Providers
|
||||
@@ -139,6 +140,7 @@ Keep in mind that many of these were recorded when Fabric was Python-based, so r
|
||||
- [Bash Completion](#bash-completion)
|
||||
- [Fish Completion](#fish-completion)
|
||||
- [Usage](#usage)
|
||||
- [Debug Levels](#debug-levels)
|
||||
- [Our approach to prompting](#our-approach-to-prompting)
|
||||
- [Examples](#examples)
|
||||
- [Just use the Patterns](#just-use-the-patterns)
|
||||
@@ -209,6 +211,17 @@ To install Fabric, you can use the latest release binaries or install it from th
|
||||
|
||||
`https://github.com/danielmiessler/fabric/releases/latest/download/fabric-windows-amd64.exe`
|
||||
|
||||
Or via PowerShell, just copy and paste and run the following snippet to install the binary into `{HOME}\.local\bin`. Please make sure that directory is included in your `PATH`.
|
||||
|
||||
```powershell
|
||||
$ErrorActionPreference = "Stop"
|
||||
$LATEST="https://github.com/danielmiessler/fabric/releases/latest/download/fabric-windows-amd64.exe"
|
||||
$DIR="${HOME}\.local\bin"
|
||||
New-Item -Path $DIR -ItemType Directory -Force
|
||||
Invoke-WebRequest -URI "${LATEST}" -outfile "${DIR}\fabric.exe"
|
||||
& "${DIR}\fabric.exe" /version
|
||||
```
|
||||
|
||||
#### macOS (arm64)
|
||||
|
||||
`curl -L https://github.com/danielmiessler/fabric/releases/latest/download/fabric-darwin-arm64 > fabric && chmod +x fabric && ./fabric --version`
|
||||
@@ -636,10 +649,20 @@ Application Options:
|
||||
--yt-dlp-args= Additional arguments to pass to yt-dlp (e.g. '--cookies-from-browser brave')
|
||||
--thinking= Set reasoning/thinking level (e.g., off, low, medium, high, or
|
||||
numeric tokens for Anthropic or Google Gemini)
|
||||
--debug= Set debug level (0: off, 1: basic, 2: detailed, 3: trace)
|
||||
Help Options:
|
||||
-h, --help Show this help message
|
||||
```
|
||||
|
||||
### Debug Levels
|
||||
|
||||
Use the `--debug` flag to control runtime logging:
|
||||
|
||||
- `0`: off (default)
|
||||
- `1`: basic debug info
|
||||
- `2`: detailed debugging
|
||||
- `3`: trace level
|
||||
|
||||
## Our approach to prompting
|
||||
|
||||
Fabric _Patterns_ are different than most prompts you'll see.
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
package main
|
||||
|
||||
var version = "v1.4.292"
|
||||
var version = "v1.4.297"
|
||||
|
||||
Binary file not shown.
@@ -145,6 +145,7 @@ _fabric() {
|
||||
'(--transcribe-file)--transcribe-file[Audio or video file to transcribe]:audio file:_files -g "*.mp3 *.mp4 *.mpeg *.mpga *.m4a *.wav *.webm"' \
|
||||
'(--transcribe-model)--transcribe-model[Model to use for transcription (separate from chat model)]:transcribe model:_fabric_transcription_models' \
|
||||
'(--split-media-file)--split-media-file[Split audio/video files larger than 25MB using ffmpeg]' \
|
||||
'(--debug)--debug[Set debug level (0=off, 1=basic, 2=detailed, 3=trace)]:debug level:(0 1 2 3)' \
|
||||
'(--notification)--notification[Send desktop notification when command completes]' \
|
||||
'(--notification-command)--notification-command[Custom command to run for notifications]:notification command:' \
|
||||
'(-h --help)'{-h,--help}'[Show this help message]' \
|
||||
|
||||
@@ -13,7 +13,7 @@ _fabric() {
|
||||
_get_comp_words_by_ref -n : cur prev words cword
|
||||
|
||||
# Define all possible options/flags
|
||||
local opts="--pattern -p --variable -v --context -C --session --attachment -a --setup -S --temperature -t --topp -T --stream -s --presencepenalty -P --raw -r --frequencypenalty -F --listpatterns -l --listmodels -L --listcontexts -x --listsessions -X --updatepatterns -U --copy -c --model -m --vendor -V --modelContextLength --output -o --output-session --latest -n --changeDefaultModel -d --youtube -y --playlist --transcript --transcript-with-timestamps --comments --metadata --yt-dlp-args --language -g --scrape_url -u --scrape_question -q --seed -e --thinking --wipecontext -w --wipesession -W --printcontext --printsession --readability --input-has-vars --no-variable-replacement --dry-run --serve --serveOllama --address --api-key --config --search --search-location --image-file --image-size --image-quality --image-compression --image-background --suppress-think --think-start-tag --think-end-tag --disable-responses-api --transcribe-file --transcribe-model --split-media-file --voice --list-gemini-voices --notification --notification-command --version --listextensions --addextension --rmextension --strategy --liststrategies --listvendors --shell-complete-list --help -h"
|
||||
local opts="--pattern -p --variable -v --context -C --session --attachment -a --setup -S --temperature -t --topp -T --stream -s --presencepenalty -P --raw -r --frequencypenalty -F --listpatterns -l --listmodels -L --listcontexts -x --listsessions -X --updatepatterns -U --copy -c --model -m --vendor -V --modelContextLength --output -o --output-session --latest -n --changeDefaultModel -d --youtube -y --playlist --transcript --transcript-with-timestamps --comments --metadata --yt-dlp-args --language -g --scrape_url -u --scrape_question -q --seed -e --thinking --wipecontext -w --wipesession -W --printcontext --printsession --readability --input-has-vars --no-variable-replacement --dry-run --serve --serveOllama --address --api-key --config --search --search-location --image-file --image-size --image-quality --image-compression --image-background --suppress-think --think-start-tag --think-end-tag --disable-responses-api --transcribe-file --transcribe-model --split-media-file --voice --list-gemini-voices --notification --notification-command --debug --version --listextensions --addextension --rmextension --strategy --liststrategies --listvendors --shell-complete-list --help -h"
|
||||
|
||||
# Helper function for dynamic completions
|
||||
_fabric_get_list() {
|
||||
@@ -78,6 +78,10 @@ _fabric() {
|
||||
COMPREPLY=($(compgen -W "$(_fabric_get_list --list-transcription-models)" -- "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
--debug)
|
||||
COMPREPLY=($(compgen -W "0 1 2 3" -- "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
# Options requiring file/directory paths
|
||||
-a | --attachment | -o | --output | --config | --addextension | --image-file | --transcribe-file)
|
||||
_filedir
|
||||
|
||||
@@ -99,6 +99,7 @@ function __fabric_register_completions
|
||||
complete -c $cmd -l voice -d "TTS voice name for supported models (e.g., Kore, Charon, Puck)" -a "(__fabric_get_gemini_voices)"
|
||||
complete -c $cmd -l transcribe-file -d "Audio or video file to transcribe" -r -a "*.mp3 *.mp4 *.mpeg *.mpga *.m4a *.wav *.webm"
|
||||
complete -c $cmd -l transcribe-model -d "Model to use for transcription (separate from chat model)" -a "(__fabric_get_transcription_models)"
|
||||
complete -c $cmd -l debug -d "Set debug level (0=off, 1=basic, 2=detailed, 3=trace)" -a "0 1 2 3"
|
||||
complete -c $cmd -l notification-command -d "Custom command to run for notifications (overrides built-in notifications)"
|
||||
|
||||
# Boolean flags (no arguments)
|
||||
|
||||
26
docs/CODE_OF_CONDUCT.md
Normal file
26
docs/CODE_OF_CONDUCT.md
Normal file
@@ -0,0 +1,26 @@
|
||||
# Code of Conduct
|
||||
|
||||
## Our Expectation
|
||||
|
||||
We expect all contributors and community members to act with basic human decency and common sense.
|
||||
|
||||
This project exists to help people augment their capabilities with AI, and we welcome contributions from anyone who shares this mission. We assume good faith and trust that everyone involved is here to build something valuable together.
|
||||
|
||||
## Guidelines
|
||||
|
||||
- **Be respectful**: Treat others as you'd want to be treated in a professional setting
|
||||
- **Be constructive**: Focus on the work and help make the project better
|
||||
- **Be collaborative**: We're all working toward the same goal - making Fabric more useful
|
||||
- **Use good judgment**: If you're not sure whether something is appropriate, it probably isn't
|
||||
|
||||
## Reporting Issues
|
||||
|
||||
If someone is being genuinely disruptive or harmful, please email the maintainers directly. We'll address legitimate concerns promptly and fairly.
|
||||
|
||||
## Enforcement
|
||||
|
||||
Maintainers reserve the right to remove content and restrict access for anyone who consistently acts in bad faith or disrupts the community.
|
||||
|
||||
---
|
||||
|
||||
*This project assumes contributors are adults who can work together professionally. If you can't do that, this isn't the right place for you.*
|
||||
155
docs/CONTRIBUTING.md
Normal file
155
docs/CONTRIBUTING.md
Normal file
@@ -0,0 +1,155 @@
|
||||
# Contributing to Fabric
|
||||
|
||||
Thanks for contributing to Fabric! Here's what you need to know to get started quickly.
|
||||
|
||||
## Quick Setup
|
||||
|
||||
### Prerequisites
|
||||
|
||||
- Go 1.24+ installed
|
||||
- Git configured with your details
|
||||
|
||||
### Getting Started
|
||||
|
||||
```bash
|
||||
# Clone and setup
|
||||
git clone https://github.com/danielmiessler/fabric.git
|
||||
cd fabric
|
||||
go build -o fabric ./cmd/fabric
|
||||
./fabric --setup
|
||||
|
||||
# Run tests
|
||||
go test ./...
|
||||
```
|
||||
|
||||
## Development Guidelines
|
||||
|
||||
### Code Style
|
||||
|
||||
- Follow standard Go conventions (`gofmt`, `golint`)
|
||||
- Use meaningful variable and function names
|
||||
- Write tests for new functionality
|
||||
- Keep functions focused and small
|
||||
|
||||
### Commit Messages
|
||||
|
||||
Use descriptive commit messages:
|
||||
|
||||
```text
|
||||
feat: add new pattern for code analysis
|
||||
fix: resolve OAuth token refresh issue
|
||||
docs: update installation instructions
|
||||
```
|
||||
|
||||
### Project Structure
|
||||
|
||||
- `cmd/` - Executable commands
|
||||
- `internal/` - Private application code
|
||||
- `data/patterns/` - AI patterns
|
||||
- `docs/` - Documentation
|
||||
|
||||
## Pull Request Process
|
||||
|
||||
### Changelog Generation (REQUIRED)
|
||||
|
||||
Before submitting your PR, generate a changelog entry:
|
||||
|
||||
```bash
|
||||
cd cmd/generate_changelog
|
||||
go build -o generate_changelog .
|
||||
./generate_changelog --incoming-pr YOUR_PR_NUMBER
|
||||
```
|
||||
|
||||
**Requirements:**
|
||||
|
||||
- PR must be open and mergeable
|
||||
- Working directory must be clean
|
||||
- GitHub token available (GITHUB_TOKEN env var)
|
||||
|
||||
**Optional flags:**
|
||||
|
||||
- `--ai-summarize` - Enhanced AI-generated summaries
|
||||
- `--push` - Auto-push the changelog commit
|
||||
|
||||
### PR Guidelines
|
||||
|
||||
1. Fork the repository
|
||||
2. Create a feature branch
|
||||
3. Make your changes
|
||||
4. Write/update tests
|
||||
5. Generate changelog entry (see above)
|
||||
6. Submit PR with clear description
|
||||
|
||||
### Review Process
|
||||
|
||||
- PRs require maintainer review
|
||||
- Address feedback promptly
|
||||
- Keep PRs focused on single features/fixes
|
||||
- Update changelog if you make significant changes
|
||||
|
||||
## Testing
|
||||
|
||||
### Run Tests
|
||||
|
||||
```bash
|
||||
# All tests
|
||||
go test ./...
|
||||
|
||||
# Specific package
|
||||
go test ./internal/cli
|
||||
|
||||
# With coverage
|
||||
go test -cover ./...
|
||||
```
|
||||
|
||||
### Test Requirements
|
||||
|
||||
- Unit tests for core functionality
|
||||
- Integration tests for external dependencies
|
||||
- Examples in documentation
|
||||
|
||||
## Patterns
|
||||
|
||||
### Creating Patterns
|
||||
|
||||
Patterns go in `data/patterns/[pattern-name]/system.md`:
|
||||
|
||||
```markdown
|
||||
# IDENTITY and PURPOSE
|
||||
You are an expert at...
|
||||
|
||||
# STEPS
|
||||
- Step 1
|
||||
- Step 2
|
||||
|
||||
# OUTPUT
|
||||
- Output format requirements
|
||||
|
||||
# EXAMPLE
|
||||
Example output here
|
||||
```
|
||||
|
||||
### Pattern Guidelines
|
||||
|
||||
- Use clear, actionable language
|
||||
- Provide specific output formats
|
||||
- Include examples when helpful
|
||||
- Test with multiple AI providers
|
||||
|
||||
## Documentation
|
||||
|
||||
- Update README.md for new features
|
||||
- Add docs to `docs/` for complex features
|
||||
- Include usage examples
|
||||
- Keep documentation current
|
||||
|
||||
## Getting Help
|
||||
|
||||
- Check existing issues first
|
||||
- Ask questions in discussions
|
||||
- Tag maintainers for urgent issues
|
||||
- Be patient - maintainers are volunteers
|
||||
|
||||
## License
|
||||
|
||||
By contributing, you agree your contributions will be licensed under the MIT License.
|
||||
88
docs/README.md
Normal file
88
docs/README.md
Normal file
@@ -0,0 +1,88 @@
|
||||
# Fabric Documentation
|
||||
|
||||
Welcome to the Fabric documentation! This directory contains detailed guides and technical documentation for various features and components of Fabric.
|
||||
|
||||
## 📚 Available Documentation
|
||||
|
||||
### Core Features
|
||||
|
||||
**[Automated-Changelog-Usage.md](./Automated-Changelog-Usage.md)**
|
||||
Complete guide for developers on using the automated changelog system. Covers the workflow for generating PR changelog entries during development, including setup, validation, and CI/CD integration.
|
||||
|
||||
**[YouTube-Processing.md](./YouTube-Processing.md)**
|
||||
Comprehensive guide for processing YouTube videos and playlists with Fabric. Covers transcript extraction, comment processing, metadata retrieval, and advanced yt-dlp configurations.
|
||||
|
||||
**[Using-Speech-To-Text.md](./Using-Speech-To-Text.md)**
|
||||
Documentation for Fabric's speech-to-text capabilities using OpenAI's Whisper models. Learn how to transcribe audio and video files and process them through Fabric patterns.
|
||||
|
||||
### User Interface & Experience
|
||||
|
||||
**[Desktop-Notifications.md](./Desktop-Notifications.md)**
|
||||
Guide to setting up desktop notifications for Fabric commands. Useful for long-running tasks and multitasking scenarios with cross-platform notification support.
|
||||
|
||||
**[Shell-Completions.md](./Shell-Completions.md)**
|
||||
Instructions for setting up intelligent tab completion for Fabric in Zsh, Bash, and Fish shells. Includes automated installation and manual setup options.
|
||||
|
||||
**[Gemini-TTS.md](./Gemini-TTS.md)**
|
||||
Complete guide for using Google Gemini's text-to-speech features with Fabric. Covers voice selection, audio generation, and integration with Fabric patterns.
|
||||
|
||||
### Development & Architecture
|
||||
|
||||
**[Automated-ChangeLog.md](./Automated-ChangeLog.md)**
|
||||
Technical documentation outlining the automated CHANGELOG system architecture for CI/CD integration. Details the infrastructure and workflow for maintainers.
|
||||
|
||||
**[Project-Restructured.md](./Project-Restructured.md)**
|
||||
Project restructuring plan and architectural decisions. Documents the transition to standard Go conventions and project organization improvements.
|
||||
|
||||
**[NOTES.md](./NOTES.md)**
|
||||
Development notes on refactoring efforts, model management improvements, and architectural changes. Includes technical details on vendor and model abstraction.
|
||||
|
||||
### Audio Resources
|
||||
|
||||
**[voices/README.md](./voices/README.md)**
|
||||
Index of Gemini TTS voice samples demonstrating different AI voice characteristics available in Fabric.
|
||||
|
||||
## 🗂️ Additional Resources
|
||||
|
||||
### Configuration Files
|
||||
|
||||
- `./notification-config.yaml` - Example notification configuration
|
||||
|
||||
### Images
|
||||
|
||||
- `images/` - Screenshots and visual documentation assets
|
||||
- `fabric-logo-gif.gif` - Animated Fabric logo
|
||||
- `fabric-summarize.png` - Screenshot of summarization feature
|
||||
- `svelte-preview.png` - Web interface preview
|
||||
|
||||
## 🚀 Quick Start
|
||||
|
||||
New to Fabric? Start with these essential docs:
|
||||
|
||||
1. **[../README.md](../README.md)** - Main project README with installation and basic usage
|
||||
2. **[Shell-Completions.md](./Shell-Completions.md)** - Set up tab completion for better CLI experience
|
||||
3. **[YouTube-Processing.md](./YouTube-Processing.md)** - Learn one of Fabric's most popular features
|
||||
4. **[Desktop-Notifications.md](./Desktop-Notifications.md)** - Get notified when long tasks complete
|
||||
|
||||
## 🔧 For Contributors
|
||||
|
||||
Contributing to Fabric? These docs are essential:
|
||||
|
||||
1. **[./CONTRIBUTING.md](./CONTRIBUTING.md)** - Contribution guidelines and setup
|
||||
2. **[Automated-Changelog-Usage.md](./Automated-Changelog-Usage.md)** - Required workflow for PR submissions
|
||||
3. **[Project-Restructured.md](./Project-Restructured.md)** - Understanding project architecture
|
||||
4. **[NOTES.md](./NOTES.md)** - Current development priorities and patterns
|
||||
|
||||
## 📝 Documentation Standards
|
||||
|
||||
When adding new documentation:
|
||||
|
||||
- Use clear, descriptive filenames
|
||||
- Include practical examples and use cases
|
||||
- Update this README index with your new docs
|
||||
- Follow the established markdown formatting conventions
|
||||
- Test all code examples before publication
|
||||
|
||||
---
|
||||
|
||||
*For general help and support, see [./SUPPORT.md](./SUPPORT.md)*
|
||||
158
docs/SECURITY.md
Normal file
158
docs/SECURITY.md
Normal file
@@ -0,0 +1,158 @@
|
||||
# Security Policy
|
||||
|
||||
## Supported Versions
|
||||
|
||||
We aim to provide security updates for the latest version of Fabric.
|
||||
|
||||
We recommend always using the latest version of Fabric for security fixes and improvements.
|
||||
|
||||
## Reporting Security Vulnerabilities
|
||||
|
||||
**Please DO NOT report security vulnerabilities through public GitHub issues.**
|
||||
|
||||
### Preferred Reporting Method
|
||||
|
||||
Send security reports directly to: **<kayvan@sylvan.com>** and CC to the project maintainer at **<daniel@danielmiessler.com>**
|
||||
|
||||
### What to Include
|
||||
|
||||
Please provide the following information:
|
||||
|
||||
1. **Vulnerability Type**: What kind of security issue (e.g., injection, authentication bypass, etc.)
|
||||
2. **Affected Components**: Which parts of Fabric are affected
|
||||
3. **Impact Assessment**: What could an attacker accomplish
|
||||
4. **Reproduction Steps**: Clear steps to reproduce the vulnerability
|
||||
5. **Proposed Fix**: If you have suggestions for remediation
|
||||
6. **Disclosure Timeline**: Your preferred timeline for public disclosure
|
||||
|
||||
### Example Report Format
|
||||
|
||||
```text
|
||||
Subject: [SECURITY] Brief description of vulnerability
|
||||
|
||||
Vulnerability Type: SQL Injection
|
||||
Affected Component: Pattern database queries
|
||||
Impact: Potential data exposure
|
||||
Severity: High
|
||||
|
||||
Reproduction Steps:
|
||||
1. Navigate to...
|
||||
2. Submit payload: ...
|
||||
3. Observe...
|
||||
|
||||
Evidence:
|
||||
[Screenshots, logs, or proof of concept]
|
||||
|
||||
Suggested Fix:
|
||||
Use parameterized queries instead of string concatenation...
|
||||
```
|
||||
|
||||
## Security Considerations
|
||||
|
||||
### API Keys and Secrets
|
||||
|
||||
- Never commit API keys to the repository
|
||||
- Store secrets in environment variables or secure configuration
|
||||
- Use the built-in setup process for key management
|
||||
- Regularly rotate API keys
|
||||
|
||||
### Input Validation
|
||||
|
||||
- All user inputs are validated before processing
|
||||
- Special attention to pattern definitions and user content
|
||||
- URL validation for web scraping features
|
||||
|
||||
### AI Provider Integration
|
||||
|
||||
- Secure communication with AI providers (HTTPS/TLS)
|
||||
- Token handling follows provider best practices
|
||||
- No sensitive data logged or cached unencrypted
|
||||
|
||||
### Network Security
|
||||
|
||||
- Web server endpoints properly authenticated when required
|
||||
- CORS policies appropriately configured
|
||||
- Rate limiting implemented where necessary
|
||||
|
||||
## Vulnerability Response Process
|
||||
|
||||
1. **Report Received**: We'll acknowledge receipt within 24 hours
|
||||
2. **Initial Assessment**: We'll evaluate severity and impact within 72 hours
|
||||
3. **Investigation**: We'll investigate and develop fixes
|
||||
4. **Fix Development**: We'll create and test patches
|
||||
5. **Coordinated Disclosure**: We'll work with reporter on disclosure timeline
|
||||
6. **Release**: We'll release patched version with security advisory
|
||||
|
||||
### Timeline Expectations
|
||||
|
||||
- **Critical**: 1-7 days
|
||||
- **High**: 7-30 days
|
||||
- **Medium**: 30-90 days
|
||||
- **Low**: Next scheduled release
|
||||
|
||||
## Bug Bounty
|
||||
|
||||
We don't currently offer a formal bug bounty program, but we deeply appreciate security research and will:
|
||||
|
||||
- Acknowledge contributors in release notes
|
||||
- Provide credit in security advisories
|
||||
- Consider swag or small rewards for significant findings
|
||||
|
||||
## Security Best Practices for Users
|
||||
|
||||
### Installation
|
||||
|
||||
- Download Fabric only from official sources
|
||||
- Verify checksums when available
|
||||
- Keep installations up to date
|
||||
|
||||
### Configuration
|
||||
|
||||
- Use strong, unique API keys
|
||||
- Don't share configuration files containing secrets
|
||||
- Set appropriate file permissions on config directories
|
||||
|
||||
### Usage
|
||||
|
||||
- Be cautious with patterns that process sensitive data
|
||||
- Review AI provider terms for data handling
|
||||
- Consider using local models for sensitive content
|
||||
|
||||
## Known Security Limitations
|
||||
|
||||
### AI Provider Dependencies
|
||||
|
||||
Fabric relies on external AI providers. Security depends partly on:
|
||||
|
||||
- Provider security practices
|
||||
- Data transmission security
|
||||
- Provider data handling policies
|
||||
|
||||
### Pattern Execution
|
||||
|
||||
Custom patterns could potentially:
|
||||
|
||||
- Process sensitive inputs inappropriately
|
||||
- Generate outputs containing sensitive information
|
||||
- Be used for adversarial prompt injection
|
||||
|
||||
**Recommendation**: Review patterns carefully, especially those from untrusted sources.
|
||||
|
||||
## Security Updates
|
||||
|
||||
Security updates are distributed through:
|
||||
|
||||
- GitHub Releases with security tags
|
||||
- Security advisories on GitHub
|
||||
- Project documentation updates
|
||||
|
||||
Subscribe to the repository to receive notifications about security updates.
|
||||
|
||||
## Contact
|
||||
|
||||
For non-security issues, please use GitHub issues.
|
||||
For security concerns, email: **<kayvan@sylvan.com>** and CC to **<daniel@danielmiessler.com>**
|
||||
|
||||
---
|
||||
|
||||
*We take security seriously and appreciate the security research community's help in keeping Fabric secure.*
|
||||
148
docs/SUPPORT.md
Normal file
148
docs/SUPPORT.md
Normal file
@@ -0,0 +1,148 @@
|
||||
# Support
|
||||
|
||||
## Getting Help with Fabric
|
||||
|
||||
Need help with Fabric? Here are the best ways to get assistance:
|
||||
|
||||
## 📖 Documentation First
|
||||
|
||||
Before reaching out, check these resources:
|
||||
|
||||
- **[README.md](../README.md)** - Installation, usage, and examples
|
||||
- **[docs/](./README.md)** - Detailed documentation
|
||||
- **[Patterns](../data/patterns/)** - Browse available AI patterns
|
||||
|
||||
## 🐛 Bug Reports
|
||||
|
||||
Found a bug? Please create an issue:
|
||||
|
||||
**[Report a Bug](https://github.com/danielmiessler/fabric/issues/new?template=bug.yml)**
|
||||
|
||||
Include:
|
||||
|
||||
- Fabric version (`fabric --version`)
|
||||
- Operating system
|
||||
- Steps to reproduce
|
||||
- Expected vs actual behavior
|
||||
- Error messages/logs
|
||||
|
||||
## 💡 Feature Requests
|
||||
|
||||
Have an idea for Fabric? We'd love to hear it:
|
||||
|
||||
**[Request a Feature](https://github.com/danielmiessler/fabric/issues/new)**
|
||||
|
||||
Describe:
|
||||
|
||||
- What you want to achieve
|
||||
- Why it would be useful
|
||||
- How you envision it working
|
||||
- Any alternatives you've considered
|
||||
|
||||
## 🤔 Questions & Discussions
|
||||
|
||||
For general questions, usage help, or community discussion:
|
||||
|
||||
**[GitHub Discussions](https://github.com/danielmiessler/fabric/discussions)**
|
||||
|
||||
Great for:
|
||||
|
||||
- "How do I...?" questions
|
||||
- Sharing patterns you've created
|
||||
- Getting community advice
|
||||
- Feature brainstorming
|
||||
|
||||
## 🏷️ Issue Labels
|
||||
|
||||
When creating issues, maintainers will add appropriate labels:
|
||||
|
||||
- `bug` - Something isn't working
|
||||
- `enhancement` - New feature request
|
||||
- `documentation` - Documentation improvements
|
||||
- `help wanted` - Community contributions welcome
|
||||
- `good first issue` - Great for new contributors
|
||||
- `question` - General questions
|
||||
- `pattern` - Related to AI patterns
|
||||
|
||||
## 📋 Issue Templates
|
||||
|
||||
We provide templates to help you create detailed reports:
|
||||
|
||||
- **Bug Report** - Structured bug reporting
|
||||
- **Feature Request** - Detailed feature proposals
|
||||
- **Pattern Submission** - New pattern contributions
|
||||
|
||||
## 🔒 Security Issues
|
||||
|
||||
**DO NOT create public issues for security vulnerabilities.**
|
||||
|
||||
See our [Security Policy](./SECURITY.md) for proper reporting procedures.
|
||||
|
||||
## ⚡ Response Times
|
||||
|
||||
We're a community-driven project with volunteer maintainers:
|
||||
|
||||
- **Bugs**: We aim to acknowledge within 48 hours
|
||||
- **Features**: Response time varies based on complexity
|
||||
- **Questions**: Community often responds quickly
|
||||
- **Security**: See security policy for timelines
|
||||
|
||||
## 🛠️ Self-Help Tips
|
||||
|
||||
Before creating an issue, try:
|
||||
|
||||
1. **Update Fabric**: `go install github.com/danielmiessler/fabric/cmd/fabric@latest`
|
||||
2. **Check existing issues**: Someone might have the same problem
|
||||
3. **Run setup**: `fabric --setup` can fix configuration issues
|
||||
4. **Test minimal example**: Isolate the problem
|
||||
|
||||
## 🤝 Community Guidelines
|
||||
|
||||
When asking for help:
|
||||
|
||||
- Be specific and provide context
|
||||
- Include relevant details and error messages
|
||||
- Be patient - maintainers are volunteers
|
||||
- Help others when you can
|
||||
- Say thanks when someone helps you
|
||||
|
||||
## 📞 Emergency Contact
|
||||
|
||||
For urgent security issues only:
|
||||
|
||||
- Email: <security@fabric.ai> (if available)
|
||||
- Maintainer: <daniel@danielmiessler.com>
|
||||
|
||||
## 🎯 What We Can Help With
|
||||
|
||||
✅ **We can help with:**
|
||||
|
||||
- Installation and setup issues
|
||||
- Usage questions and examples
|
||||
- Bug reports and fixes
|
||||
- Feature discussions
|
||||
- Pattern creation guidance
|
||||
- Integration questions
|
||||
|
||||
❌ **We cannot help with:**
|
||||
|
||||
- Custom development for your specific use case
|
||||
- Troubleshooting your specific AI provider issues
|
||||
- General AI or programming tutorials
|
||||
- Commercial support agreements
|
||||
|
||||
## 💪 Contributing Back
|
||||
|
||||
The best way to get help is to help others:
|
||||
|
||||
- Answer questions in discussions
|
||||
- Improve documentation
|
||||
- Share useful patterns
|
||||
- Report bugs clearly
|
||||
- Review pull requests
|
||||
|
||||
See our [Contributing Guide](./CONTRIBUTING.md) for details.
|
||||
|
||||
---
|
||||
|
||||
*Remember: We're all here to make Fabric better. Be kind, be helpful, and let's build something amazing together!*
|
||||
139
docs/Using-Speech-To-Text.md
Normal file
139
docs/Using-Speech-To-Text.md
Normal file
@@ -0,0 +1,139 @@
|
||||
# Using Speech-To-Text (STT) with Fabric
|
||||
|
||||
Fabric supports speech-to-text transcription of audio and video files using OpenAI's transcription models. This feature allows you to convert spoken content into text that can then be processed through Fabric's patterns.
|
||||
|
||||
## Overview
|
||||
|
||||
The STT feature integrates OpenAI's Whisper and GPT-4o transcription models to convert audio/video files into text. The transcribed text is automatically passed as input to your chosen pattern or chat session.
|
||||
|
||||
## Requirements
|
||||
|
||||
- OpenAI API key configured in Fabric
|
||||
- For files larger than 25MB: `ffmpeg` installed on your system
|
||||
- Supported audio/video formats: `.mp3`, `.mp4`, `.mpeg`, `.mpga`, `.m4a`, `.wav`, `.webm`
|
||||
|
||||
## Basic Usage
|
||||
|
||||
### Simple Transcription
|
||||
|
||||
To transcribe an audio file and send the result to a pattern:
|
||||
|
||||
```bash
|
||||
fabric --transcribe-file /path/to/audio.mp3 --transcribe-model whisper-1 --pattern summarize
|
||||
```
|
||||
|
||||
### Transcription Only
|
||||
|
||||
To just transcribe a file without applying a pattern:
|
||||
|
||||
```bash
|
||||
fabric --transcribe-file /path/to/audio.mp3 --transcribe-model whisper-1
|
||||
```
|
||||
|
||||
## Command Line Flags
|
||||
|
||||
### Required Flags
|
||||
|
||||
- `--transcribe-file`: Path to the audio or video file to transcribe
|
||||
- `--transcribe-model`: Model to use for transcription (required when using transcription)
|
||||
|
||||
### Optional Flags
|
||||
|
||||
- `--split-media-file`: Automatically split files larger than 25MB into chunks using ffmpeg
|
||||
|
||||
## Available Models
|
||||
|
||||
You can list all available transcription models with:
|
||||
|
||||
```bash
|
||||
fabric --list-transcription-models
|
||||
```
|
||||
|
||||
Currently supported models:
|
||||
|
||||
- `whisper-1`: OpenAI's Whisper model
|
||||
- `gpt-4o-mini-transcribe`: GPT-4o Mini transcription model
|
||||
- `gpt-4o-transcribe`: GPT-4o transcription model
|
||||
|
||||
## File Size Handling
|
||||
|
||||
### Files Under 25MB
|
||||
|
||||
Files under the 25MB limit are processed directly without any special handling.
|
||||
|
||||
### Files Over 25MB
|
||||
|
||||
For files exceeding OpenAI's 25MB limit, you have two options:
|
||||
|
||||
1. **Manual handling**: The command will fail with an error message suggesting to use `--split-media-file`
|
||||
2. **Automatic splitting**: Use the `--split-media-file` flag to automatically split the file into chunks
|
||||
|
||||
```bash
|
||||
fabric --transcribe-file large_recording.mp4 --transcribe-model whisper-1 --split-media-file --pattern summarize
|
||||
```
|
||||
|
||||
When splitting is enabled:
|
||||
|
||||
- Fabric uses `ffmpeg` to split the file into 10-minute segments initially
|
||||
- If segments are still too large, it reduces the segment time by half repeatedly
|
||||
- All segments are transcribed and the results are concatenated
|
||||
- Temporary files are automatically cleaned up after processing
|
||||
|
||||
## Integration with Patterns
|
||||
|
||||
The transcribed text is seamlessly integrated into Fabric's workflow:
|
||||
|
||||
1. File is transcribed using the specified model
|
||||
2. Transcribed text becomes the input message
|
||||
3. Text is sent to the specified pattern or chat session
|
||||
|
||||
### Example Workflows
|
||||
|
||||
**Meeting transcription and summarization:**
|
||||
|
||||
```bash
|
||||
fabric --transcribe-file meeting.mp4 --transcribe-model gpt-4o-transcribe --pattern summarize
|
||||
```
|
||||
|
||||
**Interview analysis:**
|
||||
|
||||
```bash
|
||||
fabric --transcribe-file interview.mp3 --transcribe-model whisper-1 --pattern extract_insights
|
||||
```
|
||||
|
||||
**Large video file processing:**
|
||||
|
||||
```bash
|
||||
fabric --transcribe-file presentation.mp4 --transcribe-model gpt-4o-transcribe --split-media-file --pattern create_summary
|
||||
```
|
||||
|
||||
## Error Handling
|
||||
|
||||
Common error scenarios:
|
||||
|
||||
- **Unsupported format**: Only the listed audio/video formats are supported
|
||||
- **File too large**: Use `--split-media-file` for files over 25MB
|
||||
- **Missing ffmpeg**: Install ffmpeg for automatic file splitting
|
||||
- **Invalid model**: Use `--list-transcription-models` to see available models
|
||||
- **Missing model**: The `--transcribe-model` flag is required when using `--transcribe-file`
|
||||
|
||||
## Technical Details
|
||||
|
||||
### Implementation
|
||||
|
||||
- Transcription is handled in `internal/cli/transcribe.go:14`
|
||||
- OpenAI-specific implementation in `internal/plugins/ai/openai/openai_audio.go:41`
|
||||
- File splitting uses ffmpeg with configurable segment duration
|
||||
- Supports any vendor that implements the `transcriber` interface
|
||||
|
||||
### Processing Pipeline
|
||||
|
||||
1. CLI validates file format and size
|
||||
2. If file > 25MB and splitting enabled, file is split using ffmpeg
|
||||
3. Each file/segment is sent to OpenAI's transcription API
|
||||
4. Results are concatenated with spaces between segments
|
||||
5. Transcribed text is passed as input to the main Fabric pipeline
|
||||
|
||||
### Vendor Support
|
||||
|
||||
Currently, only OpenAI is supported for transcription, but the interface allows for future expansion to other vendors that provide transcription capabilities.
|
||||
@@ -9,6 +9,7 @@ import (
|
||||
|
||||
"github.com/danielmiessler/fabric/internal/core"
|
||||
"github.com/danielmiessler/fabric/internal/domain"
|
||||
debuglog "github.com/danielmiessler/fabric/internal/log"
|
||||
"github.com/danielmiessler/fabric/internal/plugins/db/fsdb"
|
||||
"github.com/danielmiessler/fabric/internal/tools/notifications"
|
||||
)
|
||||
@@ -135,7 +136,7 @@ func handleChatProcessing(currentFlags *Flags, registry *core.PluginRegistry, me
|
||||
if chatOptions.Notification {
|
||||
if err = sendNotification(chatOptions, chatReq.PatternName, result); err != nil {
|
||||
// Log notification error but don't fail the main command
|
||||
fmt.Fprintf(os.Stderr, "Failed to send notification: %v\n", err)
|
||||
debuglog.Log("Failed to send notification: %v\n", err)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -3,10 +3,10 @@ package cli
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/danielmiessler/fabric/internal/core"
|
||||
debuglog "github.com/danielmiessler/fabric/internal/log"
|
||||
"github.com/danielmiessler/fabric/internal/plugins/ai/openai"
|
||||
"github.com/danielmiessler/fabric/internal/tools/converter"
|
||||
"github.com/danielmiessler/fabric/internal/tools/youtube"
|
||||
@@ -34,7 +34,7 @@ func Cli(version string) (err error) {
|
||||
var registry, err2 = initializeFabric()
|
||||
if err2 != nil {
|
||||
if !currentFlags.Setup {
|
||||
fmt.Fprintln(os.Stderr, err2.Error())
|
||||
debuglog.Log("%s\n", err2.Error())
|
||||
currentFlags.Setup = true
|
||||
}
|
||||
// Return early if registry is nil to prevent panics in subsequent handlers
|
||||
|
||||
@@ -13,6 +13,7 @@ import (
|
||||
|
||||
"github.com/danielmiessler/fabric/internal/chat"
|
||||
"github.com/danielmiessler/fabric/internal/domain"
|
||||
debuglog "github.com/danielmiessler/fabric/internal/log"
|
||||
"github.com/danielmiessler/fabric/internal/util"
|
||||
"github.com/jessevdk/go-flags"
|
||||
"golang.org/x/text/language"
|
||||
@@ -101,18 +102,12 @@ type Flags struct {
|
||||
Notification bool `long:"notification" yaml:"notification" description:"Send desktop notification when command completes"`
|
||||
NotificationCommand string `long:"notification-command" yaml:"notificationCommand" description:"Custom command to run for notifications (overrides built-in notifications)"`
|
||||
Thinking domain.ThinkingLevel `long:"thinking" yaml:"thinking" description:"Set reasoning/thinking level (e.g., off, low, medium, high, or numeric tokens for Anthropic or Google Gemini)"`
|
||||
}
|
||||
|
||||
var debug = false
|
||||
|
||||
func Debugf(format string, a ...interface{}) {
|
||||
if debug {
|
||||
fmt.Printf("DEBUG: "+format, a...)
|
||||
}
|
||||
Debug int `long:"debug" description:"Set debug level (0=off, 1=basic, 2=detailed, 3=trace)" default:"0"`
|
||||
}
|
||||
|
||||
// Init Initialize flags. returns a Flags struct and an error
|
||||
func Init() (ret *Flags, err error) {
|
||||
debuglog.SetLevel(debuglog.LevelFromInt(parseDebugLevel(os.Args[1:])))
|
||||
// Track which yaml-configured flags were set on CLI
|
||||
usedFlags := make(map[string]bool)
|
||||
yamlArgsScan := os.Args[1:]
|
||||
@@ -128,11 +123,11 @@ func Init() (ret *Flags, err error) {
|
||||
shortTag := field.Tag.Get("short")
|
||||
if longTag != "" {
|
||||
flagToYamlTag[longTag] = yamlTag
|
||||
Debugf("Mapped long flag %s to yaml tag %s\n", longTag, yamlTag)
|
||||
debuglog.Debug(debuglog.Detailed, "Mapped long flag %s to yaml tag %s\n", longTag, yamlTag)
|
||||
}
|
||||
if shortTag != "" {
|
||||
flagToYamlTag[shortTag] = yamlTag
|
||||
Debugf("Mapped short flag %s to yaml tag %s\n", shortTag, yamlTag)
|
||||
debuglog.Debug(debuglog.Detailed, "Mapped short flag %s to yaml tag %s\n", shortTag, yamlTag)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -144,7 +139,7 @@ func Init() (ret *Flags, err error) {
|
||||
if flag != "" {
|
||||
if yamlTag, exists := flagToYamlTag[flag]; exists {
|
||||
usedFlags[yamlTag] = true
|
||||
Debugf("CLI flag used: %s (yaml: %s)\n", flag, yamlTag)
|
||||
debuglog.Debug(debuglog.Detailed, "CLI flag used: %s (yaml: %s)\n", flag, yamlTag)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -156,6 +151,7 @@ func Init() (ret *Flags, err error) {
|
||||
if args, err = parser.Parse(); err != nil {
|
||||
return
|
||||
}
|
||||
debuglog.SetLevel(debuglog.LevelFromInt(ret.Debug))
|
||||
|
||||
// Check to see if a ~/.config/fabric/config.yaml config file exists (only when user didn't specify a config)
|
||||
if ret.Config == "" {
|
||||
@@ -163,7 +159,7 @@ func Init() (ret *Flags, err error) {
|
||||
if defaultConfigPath, err := util.GetDefaultConfigPath(); err == nil && defaultConfigPath != "" {
|
||||
ret.Config = defaultConfigPath
|
||||
} else if err != nil {
|
||||
Debugf("Could not determine default config path: %v\n", err)
|
||||
debuglog.Debug(debuglog.Detailed, "Could not determine default config path: %v\n", err)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -188,13 +184,13 @@ func Init() (ret *Flags, err error) {
|
||||
if flagField.CanSet() {
|
||||
if yamlField.Type() != flagField.Type() {
|
||||
if err := assignWithConversion(flagField, yamlField); err != nil {
|
||||
Debugf("Type conversion failed for %s: %v\n", yamlTag, err)
|
||||
debuglog.Debug(debuglog.Detailed, "Type conversion failed for %s: %v\n", yamlTag, err)
|
||||
continue
|
||||
}
|
||||
} else {
|
||||
flagField.Set(yamlField)
|
||||
}
|
||||
Debugf("Applied YAML value for %s: %v\n", yamlTag, yamlField.Interface())
|
||||
debuglog.Debug(debuglog.Detailed, "Applied YAML value for %s: %v\n", yamlTag, yamlField.Interface())
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -220,6 +216,22 @@ func Init() (ret *Flags, err error) {
|
||||
return
|
||||
}
|
||||
|
||||
func parseDebugLevel(args []string) int {
|
||||
for i := 0; i < len(args); i++ {
|
||||
arg := args[i]
|
||||
if arg == "--debug" && i+1 < len(args) {
|
||||
if lvl, err := strconv.Atoi(args[i+1]); err == nil {
|
||||
return lvl
|
||||
}
|
||||
} else if strings.HasPrefix(arg, "--debug=") {
|
||||
if lvl, err := strconv.Atoi(strings.TrimPrefix(arg, "--debug=")); err == nil {
|
||||
return lvl
|
||||
}
|
||||
}
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func extractFlag(arg string) string {
|
||||
var flag string
|
||||
if strings.HasPrefix(arg, "--") {
|
||||
@@ -289,7 +301,7 @@ func loadYAMLConfig(configPath string) (*Flags, error) {
|
||||
return nil, fmt.Errorf("error parsing config file: %w", err)
|
||||
}
|
||||
|
||||
Debugf("Config: %v\n", config)
|
||||
debuglog.Debug(debuglog.Detailed, "Config: %v\n", config)
|
||||
|
||||
return config, nil
|
||||
}
|
||||
|
||||
@@ -7,6 +7,7 @@ import (
|
||||
"strings"
|
||||
|
||||
"github.com/atotto/clipboard"
|
||||
debuglog "github.com/danielmiessler/fabric/internal/log"
|
||||
)
|
||||
|
||||
func CopyToClipboard(message string) (err error) {
|
||||
@@ -30,7 +31,7 @@ func CreateOutputFile(message string, fileName string) (err error) {
|
||||
if _, err = file.WriteString(message); err != nil {
|
||||
err = fmt.Errorf("error writing to file: %v", err)
|
||||
} else {
|
||||
fmt.Fprintf(os.Stderr, "\n\n[Output also written to %s]\n", fileName)
|
||||
debuglog.Log("\n\n[Output also written to %s]\n", fileName)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
@@ -10,6 +10,7 @@ import (
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
debuglog "github.com/danielmiessler/fabric/internal/log"
|
||||
"github.com/danielmiessler/fabric/internal/plugins/ai/anthropic"
|
||||
"github.com/danielmiessler/fabric/internal/plugins/ai/azure"
|
||||
"github.com/danielmiessler/fabric/internal/plugins/ai/bedrock"
|
||||
@@ -20,7 +21,7 @@ import (
|
||||
"github.com/danielmiessler/fabric/internal/plugins/ai/ollama"
|
||||
"github.com/danielmiessler/fabric/internal/plugins/ai/openai"
|
||||
"github.com/danielmiessler/fabric/internal/plugins/ai/openai_compatible"
|
||||
"github.com/danielmiessler/fabric/internal/plugins/ai/perplexity" // Added Perplexity plugin
|
||||
"github.com/danielmiessler/fabric/internal/plugins/ai/perplexity"
|
||||
"github.com/danielmiessler/fabric/internal/plugins/strategy"
|
||||
|
||||
"github.com/samber/lo"
|
||||
@@ -339,7 +340,7 @@ func (o *PluginRegistry) GetChatter(model string, modelContextLength int, vendor
|
||||
} else {
|
||||
availableVendors := models.FindGroupsByItem(model)
|
||||
if len(availableVendors) > 1 {
|
||||
fmt.Fprintf(os.Stderr, "Warning: multiple vendors provide model %s: %s. Using %s. Specify --vendor to select a vendor.\n", model, strings.Join(availableVendors, ", "), availableVendors[0])
|
||||
debuglog.Log("Warning: multiple vendors provide model %s: %s. Using %s. Specify --vendor to select a vendor.\n", model, strings.Join(availableVendors, ", "), availableVendors[0])
|
||||
}
|
||||
ret.vendor = vendorManager.FindByName(models.FindGroupsByItemFirst(model))
|
||||
}
|
||||
|
||||
@@ -10,6 +10,7 @@ import (
|
||||
|
||||
"github.com/danielmiessler/fabric/internal/chat"
|
||||
"github.com/danielmiessler/fabric/internal/domain"
|
||||
debuglog "github.com/danielmiessler/fabric/internal/log"
|
||||
"github.com/danielmiessler/fabric/internal/plugins"
|
||||
"github.com/danielmiessler/fabric/internal/plugins/ai"
|
||||
"github.com/danielmiessler/fabric/internal/plugins/db/fsdb"
|
||||
@@ -72,7 +73,12 @@ func TestGetChatter_WarnsOnAmbiguousModel(t *testing.T) {
|
||||
r, w, _ := os.Pipe()
|
||||
oldStderr := os.Stderr
|
||||
os.Stderr = w
|
||||
defer func() { os.Stderr = oldStderr }()
|
||||
// Redirect log output to our pipe to capture unconditional log messages
|
||||
debuglog.SetOutput(w)
|
||||
defer func() {
|
||||
os.Stderr = oldStderr
|
||||
debuglog.SetOutput(oldStderr)
|
||||
}()
|
||||
|
||||
chatter, err := registry.GetChatter("shared-model", 0, "", "", false, false)
|
||||
w.Close()
|
||||
|
||||
78
internal/log/log.go
Normal file
78
internal/log/log.go
Normal file
@@ -0,0 +1,78 @@
|
||||
package log
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"sync"
|
||||
)
|
||||
|
||||
// Level represents the debug verbosity.
|
||||
type Level int
|
||||
|
||||
const (
|
||||
// Off disables all debug output.
|
||||
Off Level = iota
|
||||
// Basic provides minimal debugging information.
|
||||
Basic
|
||||
// Detailed provides more verbose debugging.
|
||||
Detailed
|
||||
// Trace is the most verbose level.
|
||||
Trace
|
||||
)
|
||||
|
||||
var (
|
||||
mu sync.RWMutex
|
||||
level Level = Off
|
||||
output io.Writer = os.Stderr
|
||||
)
|
||||
|
||||
// SetLevel sets the global debug level.
|
||||
func SetLevel(l Level) {
|
||||
mu.Lock()
|
||||
level = l
|
||||
mu.Unlock()
|
||||
}
|
||||
|
||||
// LevelFromInt converts an int to a Level.
|
||||
func LevelFromInt(i int) Level {
|
||||
switch {
|
||||
case i <= 0:
|
||||
return Off
|
||||
case i == 1:
|
||||
return Basic
|
||||
case i == 2:
|
||||
return Detailed
|
||||
case i >= 3:
|
||||
return Trace
|
||||
default:
|
||||
return Off
|
||||
}
|
||||
}
|
||||
|
||||
// Debug writes a debug message if the global level permits.
|
||||
func Debug(l Level, format string, a ...interface{}) {
|
||||
mu.RLock()
|
||||
current := level
|
||||
w := output
|
||||
mu.RUnlock()
|
||||
if current >= l {
|
||||
fmt.Fprintf(w, "DEBUG: "+format, a...)
|
||||
}
|
||||
}
|
||||
|
||||
// Log writes a message unconditionally to stderr.
|
||||
// This is for important messages that should always be shown regardless of debug level.
|
||||
func Log(format string, a ...interface{}) {
|
||||
mu.RLock()
|
||||
w := output
|
||||
mu.RUnlock()
|
||||
fmt.Fprintf(w, format, a...)
|
||||
}
|
||||
|
||||
// SetOutput allows overriding the output destination for debug logs.
|
||||
func SetOutput(w io.Writer) {
|
||||
mu.Lock()
|
||||
output = w
|
||||
mu.Unlock()
|
||||
}
|
||||
@@ -4,7 +4,6 @@ import (
|
||||
"context"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"os"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
@@ -12,6 +11,7 @@ import (
|
||||
"github.com/anthropics/anthropic-sdk-go/option"
|
||||
"github.com/danielmiessler/fabric/internal/chat"
|
||||
"github.com/danielmiessler/fabric/internal/domain"
|
||||
debuglog "github.com/danielmiessler/fabric/internal/log"
|
||||
"github.com/danielmiessler/fabric/internal/plugins"
|
||||
"github.com/danielmiessler/fabric/internal/util"
|
||||
)
|
||||
@@ -195,7 +195,7 @@ func (an *Client) SendStream(
|
||||
}
|
||||
stream := an.client.Messages.NewStreaming(ctx, params, reqOpts...)
|
||||
if stream.Err() != nil && len(betas) > 0 {
|
||||
fmt.Fprintf(os.Stderr, "Anthropic beta feature %s failed: %v\n", strings.Join(betas, ","), stream.Err())
|
||||
debuglog.Debug(debuglog.Basic, "Anthropic beta feature %s failed: %v\n", strings.Join(betas, ","), stream.Err())
|
||||
stream = an.client.Messages.NewStreaming(ctx, params)
|
||||
}
|
||||
|
||||
@@ -289,7 +289,7 @@ func (an *Client) Send(ctx context.Context, msgs []*chat.ChatCompletionMessage,
|
||||
}
|
||||
if message, err = an.client.Messages.New(ctx, params, reqOpts...); err != nil {
|
||||
if len(betas) > 0 {
|
||||
fmt.Fprintf(os.Stderr, "Anthropic beta feature %s failed: %v\n", strings.Join(betas, ","), err)
|
||||
debuglog.Debug(debuglog.Basic, "Anthropic beta feature %s failed: %v\n", strings.Join(betas, ","), err)
|
||||
if message, err = an.client.Messages.New(ctx, params); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
@@ -9,11 +9,11 @@ import (
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"os"
|
||||
"os/exec"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
debuglog "github.com/danielmiessler/fabric/internal/log"
|
||||
"github.com/danielmiessler/fabric/internal/util"
|
||||
"golang.org/x/oauth2"
|
||||
)
|
||||
@@ -77,7 +77,7 @@ func (t *OAuthTransport) getValidToken(tokenIdentifier string) (string, error) {
|
||||
}
|
||||
// If no token exists, run OAuth flow
|
||||
if token == nil {
|
||||
fmt.Fprintln(os.Stderr, "No OAuth token found, initiating authentication...")
|
||||
debuglog.Log("No OAuth token found, initiating authentication...\n")
|
||||
newAccessToken, err := RunOAuthFlow(tokenIdentifier)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to authenticate: %w", err)
|
||||
@@ -87,11 +87,11 @@ func (t *OAuthTransport) getValidToken(tokenIdentifier string) (string, error) {
|
||||
|
||||
// Check if token needs refresh (5 minute buffer)
|
||||
if token.IsExpired(5) {
|
||||
fmt.Fprintln(os.Stderr, "OAuth token expired, refreshing...")
|
||||
debuglog.Log("OAuth token expired, refreshing...\n")
|
||||
newAccessToken, err := RefreshToken(tokenIdentifier)
|
||||
if err != nil {
|
||||
// If refresh fails, try re-authentication
|
||||
fmt.Fprintln(os.Stderr, "Token refresh failed, re-authenticating...")
|
||||
debuglog.Log("Token refresh failed, re-authenticating...\n")
|
||||
newAccessToken, err = RunOAuthFlow(tokenIdentifier)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to refresh or re-authenticate: %w", err)
|
||||
@@ -143,13 +143,13 @@ func RunOAuthFlow(tokenIdentifier string) (token string, err error) {
|
||||
if err == nil && existingToken != nil {
|
||||
// If token exists but is expired, try refreshing first
|
||||
if existingToken.IsExpired(5) {
|
||||
fmt.Fprintln(os.Stderr, "Found expired OAuth token, attempting refresh...")
|
||||
debuglog.Log("Found expired OAuth token, attempting refresh...\n")
|
||||
refreshedToken, refreshErr := RefreshToken(tokenIdentifier)
|
||||
if refreshErr == nil {
|
||||
fmt.Fprintln(os.Stderr, "Token refresh successful")
|
||||
debuglog.Log("Token refresh successful\n")
|
||||
return refreshedToken, nil
|
||||
}
|
||||
fmt.Fprintf(os.Stderr, "Token refresh failed (%v), proceeding with full OAuth flow...\n", refreshErr)
|
||||
debuglog.Log("Token refresh failed (%v), proceeding with full OAuth flow...\n", refreshErr)
|
||||
} else {
|
||||
// Token exists and is still valid
|
||||
return existingToken.AccessToken, nil
|
||||
@@ -176,10 +176,10 @@ func RunOAuthFlow(tokenIdentifier string) (token string, err error) {
|
||||
oauth2.SetAuthURLParam("state", verifier),
|
||||
)
|
||||
|
||||
fmt.Fprintln(os.Stderr, "Open the following URL in your browser. Fabric would like to authorize:")
|
||||
fmt.Fprintln(os.Stderr, authURL)
|
||||
debuglog.Log("Open the following URL in your browser. Fabric would like to authorize:\n")
|
||||
debuglog.Log("%s\n", authURL)
|
||||
openBrowser(authURL)
|
||||
fmt.Fprint(os.Stderr, "Paste the authorization code here: ")
|
||||
debuglog.Log("Paste the authorization code here: ")
|
||||
var code string
|
||||
fmt.Scanln(&code)
|
||||
parts := strings.SplitN(code, "#", 2)
|
||||
|
||||
@@ -11,6 +11,8 @@ import (
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
debuglog "github.com/danielmiessler/fabric/internal/log"
|
||||
|
||||
openai "github.com/openai/openai-go"
|
||||
)
|
||||
|
||||
@@ -56,18 +58,14 @@ func (o *Client) TranscribeFile(ctx context.Context, filePath, model string, spl
|
||||
return "", err
|
||||
}
|
||||
|
||||
debug := os.Getenv("FABRIC_STT_DEBUG") != ""
|
||||
|
||||
var files []string
|
||||
var cleanup func()
|
||||
if info.Size() > MaxAudioFileSize {
|
||||
if !split {
|
||||
return "", fmt.Errorf("file %s exceeds 25MB limit; use --split-media-file to enable automatic splitting", filePath)
|
||||
}
|
||||
if debug {
|
||||
fmt.Fprintf(os.Stderr, "File %s is larger than the size limit... breaking it up into chunks...\n", filePath)
|
||||
}
|
||||
if files, cleanup, err = splitAudioFile(filePath, ext, MaxAudioFileSize, debug); err != nil {
|
||||
debuglog.Log("File %s is larger than the size limit... breaking it up into chunks...\n", filePath)
|
||||
if files, cleanup, err = splitAudioFile(filePath, ext, MaxAudioFileSize); err != nil {
|
||||
return "", err
|
||||
}
|
||||
defer cleanup()
|
||||
@@ -77,9 +75,7 @@ func (o *Client) TranscribeFile(ctx context.Context, filePath, model string, spl
|
||||
|
||||
var builder strings.Builder
|
||||
for i, f := range files {
|
||||
if debug {
|
||||
fmt.Fprintf(os.Stderr, "Using model %s to transcribe part %d (file name: %s)...\n", model, i+1, f)
|
||||
}
|
||||
debuglog.Log("Using model %s to transcribe part %d (file name: %s)...\n", model, i+1, f)
|
||||
var chunk *os.File
|
||||
if chunk, err = os.Open(f); err != nil {
|
||||
return "", err
|
||||
@@ -105,7 +101,7 @@ func (o *Client) TranscribeFile(ctx context.Context, filePath, model string, spl
|
||||
|
||||
// splitAudioFile splits the source file into chunks smaller than maxSize using ffmpeg.
|
||||
// It returns the list of chunk file paths and a cleanup function.
|
||||
func splitAudioFile(src, ext string, maxSize int64, debug bool) (files []string, cleanup func(), err error) {
|
||||
func splitAudioFile(src, ext string, maxSize int64) (files []string, cleanup func(), err error) {
|
||||
if _, err = exec.LookPath("ffmpeg"); err != nil {
|
||||
return nil, nil, fmt.Errorf("ffmpeg not found: please install it")
|
||||
}
|
||||
@@ -119,9 +115,7 @@ func splitAudioFile(src, ext string, maxSize int64, debug bool) (files []string,
|
||||
segmentTime := 600 // start with 10 minutes
|
||||
for {
|
||||
pattern := filepath.Join(dir, "chunk-%03d"+ext)
|
||||
if debug {
|
||||
fmt.Fprintf(os.Stderr, "Running ffmpeg to split audio into %d-second chunks...\n", segmentTime)
|
||||
}
|
||||
debuglog.Log("Running ffmpeg to split audio into %d-second chunks...\n", segmentTime)
|
||||
cmd := exec.Command("ffmpeg", "-y", "-i", src, "-f", "segment", "-segment_time", fmt.Sprintf("%d", segmentTime), "-c", "copy", pattern)
|
||||
var stderr bytes.Buffer
|
||||
cmd.Stderr = &stderr
|
||||
|
||||
@@ -102,6 +102,11 @@ var ProviderMap = map[string]ProviderConfig{
|
||||
BaseURL: "https://api.together.xyz/v1",
|
||||
ImplementsResponses: false,
|
||||
},
|
||||
"Venice AI": {
|
||||
Name: "Venice AI",
|
||||
BaseURL: "https://api.venice.ai/api/v1",
|
||||
ImplementsResponses: false,
|
||||
},
|
||||
}
|
||||
|
||||
// GetProviderByName returns the provider configuration for a given name with O(1) lookup
|
||||
|
||||
@@ -4,9 +4,10 @@ import (
|
||||
"context"
|
||||
"fmt"
|
||||
"os"
|
||||
"sync" // Added sync package
|
||||
"sync"
|
||||
|
||||
"github.com/danielmiessler/fabric/internal/domain"
|
||||
debuglog "github.com/danielmiessler/fabric/internal/log"
|
||||
"github.com/danielmiessler/fabric/internal/plugins"
|
||||
perplexity "github.com/sgaunet/perplexity-go/v2"
|
||||
|
||||
@@ -171,7 +172,7 @@ func (c *Client) SendStream(msgs []*chat.ChatCompletionMessage, opts *domain.Cha
|
||||
if err != nil {
|
||||
// Log error, can't send to string channel directly.
|
||||
// Consider a mechanism to propagate this error if needed.
|
||||
fmt.Fprintf(os.Stderr, "perplexity streaming error: %v\\n", err) // Corrected capitalization
|
||||
debuglog.Log("perplexity streaming error: %v\n", err)
|
||||
// If the error occurs during stream setup, the channel might not have been closed by the receiver loop.
|
||||
// However, closing it here might cause a panic if the receiver loop also tries to close it.
|
||||
// close(channel) // Caution: Uncommenting this may cause panic, as channel is closed in the receiver goroutine.
|
||||
|
||||
@@ -148,7 +148,6 @@ func (o *VendorsManager) setupVendorTo(vendor Vendor, configuredVendors map[stri
|
||||
delete(configuredVendors, vendor.GetName())
|
||||
fmt.Printf("[%v] skipped\n", vendor.GetName())
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
type modelResult struct {
|
||||
|
||||
@@ -10,8 +10,9 @@ import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
debuglog "github.com/danielmiessler/fabric/internal/log"
|
||||
|
||||
"gopkg.in/yaml.v3"
|
||||
// Add this import
|
||||
)
|
||||
|
||||
// ExtensionDefinition represents a single extension configuration
|
||||
@@ -87,9 +88,7 @@ func NewExtensionRegistry(configDir string) *ExtensionRegistry {
|
||||
r.ensureConfigDir()
|
||||
|
||||
if err := r.loadRegistry(); err != nil {
|
||||
if Debug {
|
||||
fmt.Printf("Warning: could not load extension registry: %v\n", err)
|
||||
}
|
||||
debuglog.Log("Warning: could not load extension registry: %v\n", err)
|
||||
}
|
||||
|
||||
return r
|
||||
|
||||
@@ -6,6 +6,8 @@ import (
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
debuglog "github.com/danielmiessler/fabric/internal/log"
|
||||
)
|
||||
|
||||
var (
|
||||
@@ -14,7 +16,6 @@ var (
|
||||
filePlugin = &FilePlugin{}
|
||||
fetchPlugin = &FetchPlugin{}
|
||||
sysPlugin = &SysPlugin{}
|
||||
Debug = false // Debug flag
|
||||
)
|
||||
|
||||
var extensionManager *ExtensionManager
|
||||
@@ -33,9 +34,7 @@ var pluginPattern = regexp.MustCompile(`\{\{plugin:([^:]+):([^:]+)(?::([^}]+))?\
|
||||
var extensionPattern = regexp.MustCompile(`\{\{ext:([^:]+):([^:]+)(?::([^}]+))?\}\}`)
|
||||
|
||||
func debugf(format string, a ...interface{}) {
|
||||
if Debug {
|
||||
fmt.Printf(format, a...)
|
||||
}
|
||||
debuglog.Debug(debuglog.Trace, format, a...)
|
||||
}
|
||||
|
||||
func ApplyTemplate(content string, variables map[string]string, input string) (string, error) {
|
||||
|
||||
@@ -7,6 +7,7 @@ import (
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
debuglog "github.com/danielmiessler/fabric/internal/log"
|
||||
"github.com/danielmiessler/fabric/internal/plugins"
|
||||
"github.com/danielmiessler/fabric/internal/plugins/db/fsdb"
|
||||
"github.com/danielmiessler/fabric/internal/tools/githelper"
|
||||
@@ -335,9 +336,9 @@ func (o *PatternsLoader) createUniquePatternsFile() (err error) {
|
||||
patternNamesMap[entry.Name()] = true
|
||||
}
|
||||
}
|
||||
fmt.Fprintf(os.Stderr, "📂 Also included patterns from custom directory: %s\n", o.Patterns.CustomPatternsDir)
|
||||
debuglog.Log("📂 Also included patterns from custom directory: %s\n", o.Patterns.CustomPatternsDir)
|
||||
} else {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Could not read custom patterns directory %s: %v\n", o.Patterns.CustomPatternsDir, customErr)
|
||||
debuglog.Log("Warning: Could not read custom patterns directory %s: %v\n", o.Patterns.CustomPatternsDir, customErr)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1 +1 @@
|
||||
"1.4.292"
|
||||
"1.4.297"
|
||||
|
||||
Reference in New Issue
Block a user