mirror of
https://github.com/danielmiessler/Fabric.git
synced 2026-01-09 22:38:10 -05:00
Compare commits
280 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
09d2d7efc5 | ||
|
|
4c2ebf25fa | ||
|
|
b1b748dc9c | ||
|
|
cc3e4226d7 | ||
|
|
0f994d8136 | ||
|
|
298a9007ad | ||
|
|
b36e5d3372 | ||
|
|
d1b8eb10ce | ||
|
|
6000e7469e | ||
|
|
88d3fe65f3 | ||
|
|
558e7f877d | ||
|
|
f33d27f836 | ||
|
|
1694324261 | ||
|
|
3a3f5c50a8 | ||
|
|
b1abfd71c2 | ||
|
|
f5b7279225 | ||
|
|
b974e1bfd5 | ||
|
|
8dda68b3b9 | ||
|
|
33c24e0cb2 | ||
|
|
8fb0c5b8a8 | ||
|
|
d82122b624 | ||
|
|
f5966af95a | ||
|
|
9470ee1655 | ||
|
|
9a118cf637 | ||
|
|
d69757908f | ||
|
|
30525ef1c0 | ||
|
|
8414e72545 | ||
|
|
caca366511 | ||
|
|
261eb30951 | ||
|
|
bdb36ee296 | ||
|
|
1351f138fb | ||
|
|
8da51968dc | ||
|
|
30d23f15be | ||
|
|
0a718be622 | ||
|
|
21f258caa4 | ||
|
|
3584f83b30 | ||
|
|
056791233a | ||
|
|
dc435dcc6e | ||
|
|
6edbc9dd38 | ||
|
|
fd60d66c0d | ||
|
|
08ec89bbe1 | ||
|
|
836557f41c | ||
|
|
f7c5c6d344 | ||
|
|
9d18ad523e | ||
|
|
efcd7dcac2 | ||
|
|
768e87879e | ||
|
|
3c51cad614 | ||
|
|
bc642904e0 | ||
|
|
fa135036f4 | ||
|
|
2d414ec394 | ||
|
|
9e72df9c6c | ||
|
|
1a933e1c9a | ||
|
|
d5431f9843 | ||
|
|
e2dabc406d | ||
|
|
31f7f22629 | ||
|
|
29aaf430ca | ||
|
|
9ef3518a07 | ||
|
|
0b40bad986 | ||
|
|
34ff4d30f2 | ||
|
|
2b195f204d | ||
|
|
1d9596bf3d | ||
|
|
72d099d40a | ||
|
|
7ab6fe3baa | ||
|
|
198964df82 | ||
|
|
f0998d3686 | ||
|
|
75875ba9f5 | ||
|
|
ea009ff64b | ||
|
|
3c317f088b | ||
|
|
f91ee2ce3c | ||
|
|
98968d972f | ||
|
|
8ea264e96c | ||
|
|
5203cba5a7 | ||
|
|
f5fba12360 | ||
|
|
d7cc3ff8f1 | ||
|
|
4887cdc353 | ||
|
|
6aa38d2abc | ||
|
|
737e37f00e | ||
|
|
42bb72ab65 | ||
|
|
612ae4e3b5 | ||
|
|
27f9134912 | ||
|
|
c02718855d | ||
|
|
4f16222b31 | ||
|
|
8c27b34d0f | ||
|
|
0b71b54698 | ||
|
|
614b1322d5 | ||
|
|
eab335873e | ||
|
|
577dc9896d | ||
|
|
3a4bb4b9b2 | ||
|
|
c766915764 | ||
|
|
71c08648c6 | ||
|
|
95e2e6a5ac | ||
|
|
5cdf297d85 | ||
|
|
5d7137804a | ||
|
|
8b6b8fbd44 | ||
|
|
3e75aa260f | ||
|
|
92aca524a4 | ||
|
|
f70eff2e41 | ||
|
|
489c481acc | ||
|
|
3a1eaf375f | ||
|
|
52246dda28 | ||
|
|
3c200e2883 | ||
|
|
bda6505d5c | ||
|
|
a241c98837 | ||
|
|
12d7803044 | ||
|
|
d37a1acc9b | ||
|
|
7254571501 | ||
|
|
c300262804 | ||
|
|
e8ba57be90 | ||
|
|
15fad3da87 | ||
|
|
e2b0d3c368 | ||
|
|
3de85eb50e | ||
|
|
58e635c873 | ||
|
|
dde21d2337 | ||
|
|
e3fcbcb12b | ||
|
|
839296e3ba | ||
|
|
5b97b0e56a | ||
|
|
38ff2288da | ||
|
|
771a1ac2e6 | ||
|
|
f6fd6f535a | ||
|
|
f548ca5f82 | ||
|
|
616f51748e | ||
|
|
db5aaf9da6 | ||
|
|
a922032756 | ||
|
|
a415409a48 | ||
|
|
19d95b9014 | ||
|
|
73c7a8c147 | ||
|
|
4dc84bd64d | ||
|
|
dd96014f9b | ||
|
|
3cf2557af3 | ||
|
|
fcda0338cb | ||
|
|
ac19c81ef0 | ||
|
|
a83d57065f | ||
|
|
055ed32ab8 | ||
|
|
8d62165444 | ||
|
|
63bc7a7e79 | ||
|
|
f2b2501767 | ||
|
|
be1e2485ee | ||
|
|
38c4211649 | ||
|
|
71e6355c10 | ||
|
|
64411cdc02 | ||
|
|
9a2ff983a4 | ||
|
|
a522d4a411 | ||
|
|
9bdd77c277 | ||
|
|
cc68dddfe8 | ||
|
|
07ee7f8b21 | ||
|
|
15a355f08a | ||
|
|
c50486b611 | ||
|
|
edaca7a045 | ||
|
|
28432a50f0 | ||
|
|
8ab891fcff | ||
|
|
cab6df88ea | ||
|
|
42afd92f31 | ||
|
|
76d6b1721e | ||
|
|
7d562096d1 | ||
|
|
91c1aca0dd | ||
|
|
b8008a34fb | ||
|
|
482759ae72 | ||
|
|
b0d096d0ea | ||
|
|
e56ecfb7ae | ||
|
|
951bd134eb | ||
|
|
7ff04658f3 | ||
|
|
272f04dd32 | ||
|
|
29cb3796bf | ||
|
|
f51f9e75a9 | ||
|
|
63475784c7 | ||
|
|
1a7bb27370 | ||
|
|
4badaa4c85 | ||
|
|
bf6be964fd | ||
|
|
cdbcb0a512 | ||
|
|
f81cf193a2 | ||
|
|
cba56fcde6 | ||
|
|
72cbd13917 | ||
|
|
dc722f9724 | ||
|
|
1a35f32a48 | ||
|
|
65bd2753c2 | ||
|
|
570c9a9404 | ||
|
|
15151fe9ee | ||
|
|
2aad4caf9b | ||
|
|
289fda8c74 | ||
|
|
fd40778472 | ||
|
|
bc1641a68c | ||
|
|
5cf15d22d3 | ||
|
|
2b2a25daaa | ||
|
|
75a7f25642 | ||
|
|
8bab58f225 | ||
|
|
8ec006e02c | ||
|
|
2508dc6397 | ||
|
|
7670df35ad | ||
|
|
3b9782f942 | ||
|
|
3fca3489fb | ||
|
|
bb2d58eae0 | ||
|
|
87df7dc383 | ||
|
|
1d69afa1c9 | ||
|
|
96c18b4c99 | ||
|
|
dd5173963b | ||
|
|
da1c8ec979 | ||
|
|
ac97f9984f | ||
|
|
181b812eaf | ||
|
|
fe94165d31 | ||
|
|
16e92690aa | ||
|
|
1c33799aa8 | ||
|
|
9559e618c3 | ||
|
|
ac32e8e64a | ||
|
|
82340e6126 | ||
|
|
5dec53726a | ||
|
|
b0eb136cbb | ||
|
|
63f4370ff1 | ||
|
|
b3cc2c737d | ||
|
|
e43b4191e4 | ||
|
|
744c565120 | ||
|
|
1473ac1465 | ||
|
|
c38c16f0db | ||
|
|
a4b1db4193 | ||
|
|
d44bc19a84 | ||
|
|
a2e618e11c | ||
|
|
cb90379b30 | ||
|
|
4868687746 | ||
|
|
85780fee76 | ||
|
|
497b1ed682 | ||
|
|
135433b749 | ||
|
|
f185dedb37 | ||
|
|
c74a157dcf | ||
|
|
91a336e870 | ||
|
|
5212fbcc37 | ||
|
|
6d8eb3d2b9 | ||
|
|
d3bba5d026 | ||
|
|
699762b694 | ||
|
|
f2a6f1bd98 | ||
|
|
3176adf59b | ||
|
|
7e29966622 | ||
|
|
0af0ab683d | ||
|
|
e72e67de71 | ||
|
|
414b6174e7 | ||
|
|
f63e0dfc05 | ||
|
|
4ef8578e47 | ||
|
|
12ee690ae4 | ||
|
|
cc378be485 | ||
|
|
06fc8d8732 | ||
|
|
9e4ed8ecb3 | ||
|
|
c369425708 | ||
|
|
cf074d3411 | ||
|
|
47f75237ff | ||
|
|
fad0a065d4 | ||
|
|
a59a3517d8 | ||
|
|
04c3c0c512 | ||
|
|
cb837bde2d | ||
|
|
2ad454b6dc | ||
|
|
c0ea25f816 | ||
|
|
87796d4fa9 | ||
|
|
e1945a0b62 | ||
|
|
ecac2b4c34 | ||
|
|
7ed4de269e | ||
|
|
6bd305906d | ||
|
|
6aeca6e4da | ||
|
|
b34f249e24 | ||
|
|
b187a80275 | ||
|
|
a6fc54a991 | ||
|
|
b9f4b9837a | ||
|
|
2bedf35957 | ||
|
|
b9df64a0d8 | ||
|
|
6b07b33ff2 | ||
|
|
ff245edd51 | ||
|
|
2e0a4da876 | ||
|
|
1f3befbbbc | ||
|
|
8988206fbe | ||
|
|
1bd5f9d7e4 | ||
|
|
832fd2f718 | ||
|
|
dd0935fb70 | ||
|
|
e64bdd849c | ||
|
|
be82b4b013 | ||
|
|
6e2f00090c | ||
|
|
7d6505fe98 | ||
|
|
23c1437794 | ||
|
|
dd5e57477f | ||
|
|
2c2b374664 | ||
|
|
b884c529bd | ||
|
|
137aff2268 | ||
|
|
42d3f45c57 | ||
|
|
f8ada0b148 | ||
|
|
cb5fa50f68 |
6
.github/workflows/patterns.yaml
vendored
6
.github/workflows/patterns.yaml
vendored
@@ -3,7 +3,7 @@ name: Patterns Artifact
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- "patterns/**" # Trigger only on changes to files in the patterns folder
|
||||
- "data/patterns/**" # Trigger only on changes to files in the patterns folder
|
||||
|
||||
jobs:
|
||||
zip-and-upload:
|
||||
@@ -18,13 +18,13 @@ jobs:
|
||||
- name: Verify Changes in Patterns Folder
|
||||
run: |
|
||||
git fetch origin
|
||||
if git diff --quiet HEAD~1 -- patterns; then
|
||||
if git diff --quiet HEAD~1 -- data/patterns; then
|
||||
echo "No changes detected in patterns folder."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Zip the Patterns Folder
|
||||
run: zip -r patterns.zip patterns/
|
||||
run: zip -r patterns.zip data/patterns/
|
||||
|
||||
- name: Upload Patterns Artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
|
||||
100
.github/workflows/release.yml
vendored
100
.github/workflows/release.yml
vendored
@@ -27,8 +27,39 @@ jobs:
|
||||
- name: Run tests
|
||||
run: go test -v ./...
|
||||
|
||||
get_version:
|
||||
name: Get version
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
latest_tag: ${{ steps.get_version.outputs.latest_tag }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Get version from source
|
||||
id: get_version
|
||||
shell: bash
|
||||
run: |
|
||||
if [ ! -f "nix/pkgs/fabric/version.nix" ]; then
|
||||
echo "Error: version.nix file not found"
|
||||
exit 1
|
||||
fi
|
||||
version=$(cat nix/pkgs/fabric/version.nix | tr -d '"' | tr -cd '0-9.')
|
||||
if [ -z "$version" ]; then
|
||||
echo "Error: version is empty"
|
||||
exit 1
|
||||
fi
|
||||
if ! echo "$version" | grep -E '^[0-9]+\.[0-9]+\.[0-9]+' > /dev/null; then
|
||||
echo "Error: Invalid version format: $version"
|
||||
exit 1
|
||||
fi
|
||||
echo "latest_tag=v$version" >> $GITHUB_OUTPUT
|
||||
|
||||
build:
|
||||
name: Build binaries for Windows, macOS, and Linux
|
||||
needs: [test, get_version]
|
||||
runs-on: ${{ matrix.os }}
|
||||
permissions:
|
||||
contents: write
|
||||
@@ -51,25 +82,14 @@ jobs:
|
||||
with:
|
||||
go-version-file: ./go.mod
|
||||
|
||||
- name: Determine OS Name
|
||||
id: os-name
|
||||
run: |
|
||||
if [ "${{ matrix.os }}" == "ubuntu-latest" ]; then
|
||||
echo "OS=linux" >> $GITHUB_ENV
|
||||
elif [ "${{ matrix.os }}" == "macos-latest" ]; then
|
||||
echo "OS=darwin" >> $GITHUB_ENV
|
||||
else
|
||||
echo "OS=windows" >> $GITHUB_ENV
|
||||
fi
|
||||
shell: bash
|
||||
|
||||
- name: Build binary on Linux and macOS
|
||||
if: matrix.os != 'windows-latest'
|
||||
env:
|
||||
GOOS: ${{ env.OS }}
|
||||
GOOS: ${{ matrix.os == 'ubuntu-latest' && 'linux' || 'darwin' }}
|
||||
GOARCH: ${{ matrix.arch }}
|
||||
run: |
|
||||
go build -o fabric-${OS}-${{ matrix.arch }} ./cmd/fabric
|
||||
OS_NAME="${{ matrix.os == 'ubuntu-latest' && 'linux' || 'darwin' }}"
|
||||
go build -o fabric-${OS_NAME}-${{ matrix.arch }} ./cmd/fabric
|
||||
|
||||
- name: Build binary on Windows
|
||||
if: matrix.os == 'windows-latest'
|
||||
@@ -83,8 +103,8 @@ jobs:
|
||||
if: matrix.os != 'windows-latest'
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: fabric-${OS}-${{ matrix.arch }}
|
||||
path: fabric-${OS}-${{ matrix.arch }}
|
||||
name: fabric-${{ matrix.os == 'ubuntu-latest' && 'linux' || 'darwin' }}-${{ matrix.arch }}
|
||||
path: fabric-${{ matrix.os == 'ubuntu-latest' && 'linux' || 'darwin' }}-${{ matrix.arch }}
|
||||
|
||||
- name: Upload build artifact
|
||||
if: matrix.os == 'windows-latest'
|
||||
@@ -93,29 +113,15 @@ jobs:
|
||||
name: fabric-windows-${{ matrix.arch }}.exe
|
||||
path: fabric-windows-${{ matrix.arch }}.exe
|
||||
|
||||
- name: Get latest tag
|
||||
if: matrix.os != 'windows-latest'
|
||||
id: get_latest_tag
|
||||
run: |
|
||||
latest_tag=$(git tag --sort=-creatordate | head -n 1)
|
||||
echo "latest_tag=$latest_tag" >> $GITHUB_ENV
|
||||
|
||||
- name: Get latest tag
|
||||
if: matrix.os == 'windows-latest'
|
||||
id: get_latest_tag_windows
|
||||
run: |
|
||||
$latest_tag = git tag --sort=-creatordate | Select-Object -First 1
|
||||
Add-Content -Path $env:GITHUB_ENV -Value "latest_tag=$latest_tag"
|
||||
|
||||
- name: Create release if it doesn't exist
|
||||
shell: bash
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
if ! gh release view ${{ env.latest_tag }} >/dev/null 2>&1; then
|
||||
gh release create ${{ env.latest_tag }} --title "Release ${{ env.latest_tag }}" --notes "Automated release for ${{ env.latest_tag }}"
|
||||
if ! gh release view ${{ needs.get_version.outputs.latest_tag }} >/dev/null 2>&1; then
|
||||
gh release create ${{ needs.get_version.outputs.latest_tag }} --title "Release ${{ needs.get_version.outputs.latest_tag }}" --notes "Automated release for ${{ needs.get_version.outputs.latest_tag }}"
|
||||
else
|
||||
echo "Release ${{ env.latest_tag }} already exists."
|
||||
echo "Release ${{ needs.get_version.outputs.latest_tag }} already exists."
|
||||
fi
|
||||
|
||||
- name: Upload release artifact
|
||||
@@ -123,11 +129,35 @@ jobs:
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
gh release upload ${{ env.latest_tag }} fabric-windows-${{ matrix.arch }}.exe
|
||||
gh release upload ${{ needs.get_version.outputs.latest_tag }} fabric-windows-${{ matrix.arch }}.exe
|
||||
|
||||
- name: Upload release artifact
|
||||
if: matrix.os != 'windows-latest'
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
gh release upload ${{ env.latest_tag }} fabric-${OS}-${{ matrix.arch }}
|
||||
OS_NAME="${{ matrix.os == 'ubuntu-latest' && 'linux' || 'darwin' }}"
|
||||
gh release upload ${{ needs.get_version.outputs.latest_tag }} fabric-${OS_NAME}-${{ matrix.arch }}
|
||||
|
||||
update_release_notes:
|
||||
needs: [build, get_version]
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version-file: ./go.mod
|
||||
|
||||
- name: Update release description
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
go run ./cmd/generate_changelog --sync-db
|
||||
go run ./cmd/generate_changelog --release ${{ needs.get_version.outputs.latest_tag }}
|
||||
|
||||
@@ -7,6 +7,11 @@ on:
|
||||
paths-ignore:
|
||||
- "data/patterns/**"
|
||||
- "**/*.md"
|
||||
- "data/strategies/**"
|
||||
- "cmd/generate_changelog/*.db"
|
||||
- "cmd/generate_changelog/incoming/*.txt"
|
||||
- "scripts/pattern_descriptions/*.json"
|
||||
- "web/static/data/pattern_descriptions.json"
|
||||
|
||||
permissions:
|
||||
contents: write # Ensure the workflow has write permissions
|
||||
@@ -79,14 +84,24 @@ jobs:
|
||||
run: |
|
||||
nix run .#gomod2nix -- --outdir nix/pkgs/fabric
|
||||
|
||||
- name: Generate Changelog Entry
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
go run ./cmd/generate_changelog --process-prs ${{ env.new_tag }}
|
||||
go run ./cmd/generate_changelog --sync-db
|
||||
- name: Commit changes
|
||||
run: |
|
||||
# These files are modified by the version bump process
|
||||
git add cmd/fabric/version.go
|
||||
git add nix/pkgs/fabric/version.nix
|
||||
git add nix/pkgs/fabric/gomod2nix.toml
|
||||
git add .
|
||||
|
||||
# The changelog tool is responsible for staging CHANGELOG.md, changelog.db,
|
||||
# and removing the incoming/ directory.
|
||||
|
||||
if ! git diff --staged --quiet; then
|
||||
git commit -m "Update version to ${{ env.new_tag }} and commit $commit_hash"
|
||||
git commit -m "chore(release): Update version to ${{ env.new_tag }}"
|
||||
else
|
||||
echo "No changes to commit."
|
||||
fi
|
||||
|
||||
10
.gitignore
vendored
10
.gitignore
vendored
@@ -131,9 +131,7 @@ celerybeat.pid
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
@@ -349,5 +347,9 @@ web/package-lock.json
|
||||
.gitignore_backup
|
||||
web/static/*.png
|
||||
|
||||
# Local VSCode project settings
|
||||
.vscode/
|
||||
# Local tmp directory
|
||||
.tmp/
|
||||
tmp/
|
||||
|
||||
# Ignore .claude/
|
||||
.claude/
|
||||
|
||||
3
.vscode/extensions.json
vendored
Normal file
3
.vscode/extensions.json
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"recommendations": ["davidanson.vscode-markdownlint"]
|
||||
}
|
||||
180
.vscode/settings.json
vendored
Normal file
180
.vscode/settings.json
vendored
Normal file
@@ -0,0 +1,180 @@
|
||||
{
|
||||
"cSpell.words": [
|
||||
"Achird",
|
||||
"addextension",
|
||||
"adduser",
|
||||
"AIML",
|
||||
"anthropics",
|
||||
"Aoede",
|
||||
"atotto",
|
||||
"Autonoe",
|
||||
"badfile",
|
||||
"Behrens",
|
||||
"blindspots",
|
||||
"Bombal",
|
||||
"Callirhoe",
|
||||
"Callirrhoe",
|
||||
"Cerebras",
|
||||
"compadd",
|
||||
"compdef",
|
||||
"compinit",
|
||||
"creatordate",
|
||||
"curcontext",
|
||||
"custompatterns",
|
||||
"danielmiessler",
|
||||
"davidanson",
|
||||
"Debugf",
|
||||
"dedup",
|
||||
"deepseek",
|
||||
"Despina",
|
||||
"direnv",
|
||||
"dryrun",
|
||||
"dsrp",
|
||||
"editability",
|
||||
"Eisler",
|
||||
"elif",
|
||||
"envrc",
|
||||
"Erinome",
|
||||
"Errorf",
|
||||
"eugeis",
|
||||
"Eugen",
|
||||
"excalidraw",
|
||||
"exolab",
|
||||
"fabriclogo",
|
||||
"flac",
|
||||
"fpath",
|
||||
"frequencypenalty",
|
||||
"fsdb",
|
||||
"gantt",
|
||||
"genai",
|
||||
"githelper",
|
||||
"gjson",
|
||||
"GOARCH",
|
||||
"godotenv",
|
||||
"gofmt",
|
||||
"goimports",
|
||||
"gomod",
|
||||
"gonic",
|
||||
"goopenai",
|
||||
"GOPATH",
|
||||
"gopkg",
|
||||
"GOROOT",
|
||||
"Graphviz",
|
||||
"grokai",
|
||||
"Groq",
|
||||
"hackerone",
|
||||
"Haddix",
|
||||
"hasura",
|
||||
"hormozi",
|
||||
"Hormozi's",
|
||||
"horts",
|
||||
"HTMLURL",
|
||||
"jaredmontoya",
|
||||
"jessevdk",
|
||||
"Jina",
|
||||
"joho",
|
||||
"Keploy",
|
||||
"Kore",
|
||||
"ksylvan",
|
||||
"Langdock",
|
||||
"Laomedeia",
|
||||
"ldflags",
|
||||
"libexec",
|
||||
"libnotify",
|
||||
"listcontexts",
|
||||
"listextensions",
|
||||
"listmodels",
|
||||
"listpatterns",
|
||||
"listsessions",
|
||||
"liststrategies",
|
||||
"listvendors",
|
||||
"lmstudio",
|
||||
"Makefiles",
|
||||
"markmap",
|
||||
"matplotlib",
|
||||
"mattn",
|
||||
"mbed",
|
||||
"metacharacters",
|
||||
"Miessler",
|
||||
"nometa",
|
||||
"numpy",
|
||||
"ollama",
|
||||
"ollamaapi",
|
||||
"openaiapi",
|
||||
"opencode",
|
||||
"openrouter",
|
||||
"Orus",
|
||||
"osascript",
|
||||
"otiai",
|
||||
"pdflatex",
|
||||
"pipx",
|
||||
"PKCE",
|
||||
"pkgs",
|
||||
"presencepenalty",
|
||||
"printcontext",
|
||||
"printsession",
|
||||
"Pulcherrima",
|
||||
"pycache",
|
||||
"pyperclip",
|
||||
"readystream",
|
||||
"restapi",
|
||||
"rmextension",
|
||||
"Sadachbia",
|
||||
"Sadaltager",
|
||||
"samber",
|
||||
"sashabaranov",
|
||||
"sdist",
|
||||
"seaborn",
|
||||
"semgrep",
|
||||
"sess",
|
||||
"storer",
|
||||
"Streamlit",
|
||||
"stretchr",
|
||||
"subchunk",
|
||||
"Sulafat",
|
||||
"talkpanel",
|
||||
"Telos",
|
||||
"testpattern",
|
||||
"testuser",
|
||||
"Thacker",
|
||||
"tidwall",
|
||||
"topp",
|
||||
"ttrc",
|
||||
"unalias",
|
||||
"unconfigured",
|
||||
"unmarshalling",
|
||||
"updatepatterns",
|
||||
"videoid",
|
||||
"webp",
|
||||
"WEBVTT",
|
||||
"wipecontext",
|
||||
"wipesession",
|
||||
"Worktree",
|
||||
"writeups",
|
||||
"xclip",
|
||||
"yourpatternname",
|
||||
"youtu"
|
||||
],
|
||||
"cSpell.ignorePaths": ["go.mod", ".gitignore", "CHANGELOG.md"],
|
||||
"markdownlint.config": {
|
||||
"MD004": false,
|
||||
"MD011": false,
|
||||
"MD024": false,
|
||||
"MD025": false,
|
||||
"M032": false,
|
||||
"MD033": {
|
||||
"allowed_elements": [
|
||||
"a",
|
||||
"br",
|
||||
"code",
|
||||
"div",
|
||||
"em",
|
||||
"h4",
|
||||
"img",
|
||||
"module",
|
||||
"p"
|
||||
]
|
||||
},
|
||||
"MD041": false
|
||||
}
|
||||
}
|
||||
2622
CHANGELOG.md
Normal file
2622
CHANGELOG.md
Normal file
File diff suppressed because it is too large
Load Diff
52
README.md
52
README.md
@@ -3,7 +3,7 @@ Fabric is graciously supported by…
|
||||
|
||||
[](https://warp.dev/fabric)
|
||||
|
||||
<img src="./images/fabric-logo-gif.gif" alt="fabriclogo" width="400" height="400"/>
|
||||
<img src="./docs/images/fabric-logo-gif.gif" alt="fabriclogo" width="400" height="400"/>
|
||||
|
||||
# `fabric`
|
||||
|
||||
@@ -29,7 +29,7 @@ Fabric is graciously supported by…
|
||||
[Helper Apps](#helper-apps) •
|
||||
[Meta](#meta)
|
||||
|
||||

|
||||

|
||||
|
||||
</div>
|
||||
|
||||
@@ -113,30 +113,9 @@ Keep in mind that many of these were recorded when Fabric was Python-based, so r
|
||||
|
||||
## Updates
|
||||
|
||||
> [!NOTE]
|
||||
>
|
||||
> July 4, 2025
|
||||
>
|
||||
> - **Web Search**: Fabric now supports web search for Anthropic and OpenAI models using the `--search` and `--search-location` flags. This replaces the previous plugin-based search, so you may want to remove the old `ANTHROPIC_WEB_SEARCH_TOOL_*` variables from your `~/.config/fabric/.env` file.
|
||||
> - **Image Generation**: Fabric now has powerful image generation capabilities with OpenAI.
|
||||
> - Generate images from text prompts and save them using `--image-file`.
|
||||
> - Edit existing images by providing an input image with `--attachment`.
|
||||
> - Control image `size`, `quality`, `compression`, and `background` with the new `--image-*` flags.
|
||||
>
|
||||
>June 17, 2025
|
||||
>
|
||||
>- Fabric now supports Perplexity AI. Configure it by using `fabric -S` to add your Perplexity AI API Key,
|
||||
> and then try:
|
||||
>
|
||||
> ```bash
|
||||
> fabric -m sonar-pro "What is the latest world news?"
|
||||
> ```
|
||||
>
|
||||
>June 11, 2025
|
||||
>
|
||||
>- Fabric's YouTube transcription now needs `yt-dlp` to be installed. Make sure to install the latest
|
||||
> version (2025.06.09 as of this note). The YouTube API key is only needed for comments (the `--comments` flag)
|
||||
> and metadata extraction (the `--metadata` flag).
|
||||
Fabric is evolving rapidly.
|
||||
|
||||
Stay current with the latest features by reviewing the [CHANGELOG](./CHANGELOG.md) for all recent changes.
|
||||
|
||||
## Philosophy
|
||||
|
||||
@@ -519,6 +498,7 @@ Application Options:
|
||||
-U, --updatepatterns Update patterns
|
||||
-c, --copy Copy to clipboard
|
||||
-m, --model= Choose model
|
||||
-V, --vendor= Specify vendor for chosen model (e.g., -V "LM Studio" -m openai/gpt-oss-20b)
|
||||
--modelContextLength= Model context length (only affects ollama)
|
||||
-o, --output= Output to file
|
||||
--output-session Output the entire session (also a temporary one) to the output file
|
||||
@@ -557,7 +537,7 @@ Application Options:
|
||||
--liststrategies List all strategies
|
||||
--listvendors List all vendors
|
||||
--shell-complete-list Output raw list without headers/formatting (for shell completion)
|
||||
--search Enable web search tool for supported models (Anthropic, OpenAI)
|
||||
--search Enable web search tool for supported models (Anthropic, OpenAI, Gemini)
|
||||
--search-location= Set location for web search results (e.g., 'America/Los_Angeles')
|
||||
--image-file= Save generated image to specified file path (e.g., 'output.png')
|
||||
--image-size= Image dimensions: 1024x1024, 1536x1024, 1024x1536, auto (default: auto)
|
||||
@@ -565,10 +545,20 @@ Application Options:
|
||||
--image-compression= Compression level 0-100 for JPEG/WebP formats (default: not set)
|
||||
--image-background= Background type: opaque, transparent (default: opaque, only for
|
||||
PNG/WebP)
|
||||
--suppress-think Suppress text enclosed in thinking tags
|
||||
--think-start-tag= Start tag for thinking sections (default: <think>)
|
||||
--think-end-tag= End tag for thinking sections (default: </think>)
|
||||
--disable-responses-api Disable OpenAI Responses API (default: false)
|
||||
--voice= TTS voice name for supported models (e.g., Kore, Charon, Puck)
|
||||
(default: Kore)
|
||||
--list-gemini-voices List all available Gemini TTS voices
|
||||
--notification Send desktop notification when command completes
|
||||
--notification-command= Custom command to run for notifications (overrides built-in
|
||||
notifications)
|
||||
--yt-dlp-args= Additional arguments to pass to yt-dlp (e.g. '--cookies-from-browser brave')
|
||||
|
||||
Help Options:
|
||||
-h, --help Show this help message
|
||||
|
||||
```
|
||||
|
||||
## Our approach to prompting
|
||||
@@ -628,7 +618,7 @@ Now let's look at some things you can do with Fabric.
|
||||
<br />
|
||||
<br />
|
||||
|
||||
If you're not looking to do anything fancy, and you just want a lot of great prompts, you can navigate to the [`/patterns`](https://github.com/danielmiessler/fabric/tree/main/patterns) directory and start exploring!
|
||||
If you're not looking to do anything fancy, and you just want a lot of great prompts, you can navigate to the [`/patterns`](https://github.com/danielmiessler/fabric/tree/main/data/patterns) directory and start exploring!
|
||||
|
||||
We hope that if you used nothing else from Fabric, the Patterns by themselves will make the project useful.
|
||||
|
||||
@@ -644,7 +634,7 @@ be used in addition to the basic patterns.
|
||||
See the [Thinking Faster by Writing Less](https://arxiv.org/pdf/2502.18600) paper and
|
||||
the [Thought Generation section of Learn Prompting](https://learnprompting.org/docs/advanced/thought_generation/introduction) for examples of prompt strategies.
|
||||
|
||||
Each strategy is available as a small `json` file in the [`/strategies`](https://github.com/danielmiessler/fabric/tree/main/strategies) directory.
|
||||
Each strategy is available as a small `json` file in the [`/strategies`](https://github.com/danielmiessler/fabric/tree/main/data/strategies) directory.
|
||||
|
||||
The prompt modification of the strategy is applied to the system prompt and passed on to the
|
||||
LLM in the chat session.
|
||||
@@ -736,7 +726,7 @@ Make sure you have a LaTeX distribution (like TeX Live or MiKTeX) installed on y
|
||||
It generates a `json` representation of a directory of code that can be fed into an AI model
|
||||
with instructions to create a new feature or edit the code in a specified way.
|
||||
|
||||
See [the Create Coding Feature Pattern README](./patterns/create_coding_feature/README.md) for details.
|
||||
See [the Create Coding Feature Pattern README](./data/patterns/create_coding_feature/README.md) for details.
|
||||
|
||||
Install it first using:
|
||||
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
package main
|
||||
|
||||
var version = "v1.4.241"
|
||||
var version = "v1.4.283"
|
||||
|
||||
151
cmd/generate_changelog/PRD.md
Normal file
151
cmd/generate_changelog/PRD.md
Normal file
@@ -0,0 +1,151 @@
|
||||
# Product Requirements Document: Changelog Generator
|
||||
|
||||
## Overview
|
||||
|
||||
The Changelog Generator is a high-performance Go tool that automatically generates comprehensive changelogs from git history and GitHub pull requests.
|
||||
|
||||
## Goals
|
||||
|
||||
1. **Performance**: Very fast. Efficient enough to be used in CI/CD as part of release process.
|
||||
2. **Completeness**: Capture ALL commits including unreleased changes
|
||||
3. **Efficiency**: Minimize API calls through caching and batch operations
|
||||
4. **Reliability**: Handle errors gracefully with proper Go error handling
|
||||
5. **Simplicity**: Single binary with no runtime dependencies
|
||||
|
||||
## Key Features
|
||||
|
||||
### 1. One-Pass Git History Algorithm
|
||||
|
||||
- Walk git history once from newest to oldest
|
||||
- Start with "Unreleased" bucket for all new commits
|
||||
- Switch buckets when encountering version commits
|
||||
- No need to calculate ranges between versions
|
||||
|
||||
### 2. Native Library Integration
|
||||
|
||||
- **go-git**: Pure Go git implementation (no git binary required)
|
||||
- **go-github**: Official GitHub Go client library
|
||||
- Benefits: Type safety, better error handling, no subprocess overhead
|
||||
|
||||
### 3. Smart Caching System
|
||||
|
||||
- SQLite-based persistent cache
|
||||
- Stores: versions, commits, PR details, last processed commit
|
||||
- Enables incremental updates on subsequent runs
|
||||
- Instant changelog regeneration from cache
|
||||
|
||||
### 4. Concurrent Processing
|
||||
|
||||
- Parallel GitHub API calls (up to 10 concurrent)
|
||||
- Batch PR fetching with deduplication
|
||||
- Rate limiting awareness
|
||||
|
||||
### 5. Enhanced Output
|
||||
|
||||
- "Unreleased" section for commits since last version
|
||||
- Clean markdown formatting
|
||||
- Configurable version limiting
|
||||
- Direct commit tracking (non-PR commits)
|
||||
|
||||
## Technical Architecture
|
||||
|
||||
### Module Structure
|
||||
|
||||
```text
|
||||
cmd/generate_changelog/
|
||||
├── main.go # CLI entry point with cobra
|
||||
├── internal/
|
||||
│ ├── git/ # Git operations (go-git)
|
||||
│ ├── github/ # GitHub API client (go-github)
|
||||
│ ├── cache/ # SQLite caching layer
|
||||
│ ├── changelog/ # Core generation logic
|
||||
│ └── config/ # Configuration management
|
||||
└── changelog.db # SQLite cache (generated)
|
||||
```
|
||||
|
||||
### Data Flow
|
||||
|
||||
1. Git walker collects all commits in one pass
|
||||
2. Commits bucketed by version (starting with "Unreleased")
|
||||
3. PR numbers extracted from merge commits
|
||||
4. GitHub API batch-fetches PR details
|
||||
5. Cache stores everything for future runs
|
||||
6. Formatter generates markdown output
|
||||
|
||||
### Cache Schema
|
||||
|
||||
- **metadata**: Last processed commit SHA
|
||||
- **versions**: Version names, dates, commit SHAs
|
||||
- **commits**: Full commit details with version associations
|
||||
- **pull_requests**: PR details including commits
|
||||
- Indexes on version and PR number for fast lookups
|
||||
|
||||
### Features
|
||||
|
||||
- **Unreleased section**: Shows all new commits
|
||||
- **Better caching**: SQLite vs JSON, incremental updates
|
||||
- **Smarter deduplication**: Removes consecutive duplicate commits
|
||||
- **Direct commit tracking**: Shows non-PR commits
|
||||
|
||||
### Reliability
|
||||
|
||||
- **No subprocess errors**: Direct library usage
|
||||
- **Type safety**: Compile-time checking
|
||||
- **Better error handling**: Go's explicit error returns
|
||||
|
||||
### Deployment
|
||||
|
||||
- **Single binary**: No Python/pip/dependencies
|
||||
- **Cross-platform**: Compile for any OS/architecture
|
||||
- **No git CLI required**: Uses go-git library
|
||||
|
||||
## Configuration
|
||||
|
||||
### Environment Variables
|
||||
|
||||
- `GITHUB_TOKEN`: GitHub API authentication token
|
||||
|
||||
### Command Line Flags
|
||||
|
||||
- `--repo, -r`: Repository path (default: current directory)
|
||||
- `--output, -o`: Output file (default: stdout)
|
||||
- `--limit, -l`: Version limit (default: all)
|
||||
- `--version, -v`: Target specific version
|
||||
- `--save-data`: Export debug JSON
|
||||
- `--cache`: Cache file location
|
||||
- `--no-cache`: Disable caching
|
||||
- `--rebuild-cache`: Force cache rebuild
|
||||
- `--token`: GitHub token override
|
||||
|
||||
## Success Metrics
|
||||
|
||||
1. **Performance**: Generate full changelog in <5 seconds for fabric repo
|
||||
2. **Completeness**: 100% commit coverage including unreleased
|
||||
3. **Accuracy**: Correct PR associations and change extraction
|
||||
4. **Reliability**: Handle network failures gracefully
|
||||
5. **Usability**: Simple CLI with sensible defaults
|
||||
|
||||
## Future Enhancements
|
||||
|
||||
1. **Multiple output formats**: JSON, HTML, etc.
|
||||
2. **Custom version patterns**: Configurable regex
|
||||
3. **Change categorization**: feat/fix/docs auto-grouping
|
||||
4. **Conventional commits**: Full support for semantic versioning
|
||||
5. **GitLab/Bitbucket**: Support other platforms
|
||||
6. **Web UI**: Interactive changelog browser
|
||||
7. **Incremental updates**: Update existing CHANGELOG.md file
|
||||
8. **Breaking change detection**: Highlight breaking changes
|
||||
|
||||
## Implementation Status
|
||||
|
||||
- ✅ Core architecture and modules
|
||||
- ✅ One-pass git walking algorithm
|
||||
- ✅ GitHub API integration with concurrency
|
||||
- ✅ SQLite caching system
|
||||
- ✅ Changelog formatting and generation
|
||||
- ✅ CLI with all planned flags
|
||||
- ✅ Documentation (README and PRD)
|
||||
|
||||
## Conclusion
|
||||
|
||||
This Go implementation provides a modern, efficient, and feature-rich changelog generator.
|
||||
264
cmd/generate_changelog/README.md
Normal file
264
cmd/generate_changelog/README.md
Normal file
@@ -0,0 +1,264 @@
|
||||
# Changelog Generator
|
||||
|
||||
A high-performance changelog generator for Git repositories that automatically creates comprehensive, well-formatted changelogs from your git history and GitHub pull requests.
|
||||
|
||||
## Features
|
||||
|
||||
- **One-pass git history walking**: Efficiently processes entire repository history in a single pass
|
||||
- **Automatic PR detection**: Extracts pull request information from merge commits
|
||||
- **GitHub API integration**: Fetches detailed PR information including commits, authors, and descriptions
|
||||
- **Smart caching**: SQLite-based caching for instant incremental updates
|
||||
- **Unreleased changes**: Tracks all commits since the last release
|
||||
- **Concurrent processing**: Parallel GitHub API calls for improved performance
|
||||
- **Flexible output**: Generate complete changelogs or target specific versions
|
||||
- **GraphQL optimization**: Ultra-fast PR fetching using GitHub GraphQL API (~5-10 calls vs 1000s)
|
||||
- **Intelligent sync**: Automatically syncs new PRs every 24 hours or when missing PRs are detected
|
||||
- **AI-powered summaries**: Optional Fabric integration for enhanced changelog summaries
|
||||
- **Advanced caching**: Content-based change detection for AI summaries with hash comparison
|
||||
- **Author type detection**: Distinguishes between users, bots, and organizations
|
||||
- **Lightning-fast incremental updates**: SHA→PR mapping for instant git operations
|
||||
|
||||
## Installation
|
||||
|
||||
```bash
|
||||
go install github.com/danielmiessler/fabric/cmd/generate_changelog@latest
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
### Basic usage (generate complete changelog)
|
||||
|
||||
```bash
|
||||
generate_changelog
|
||||
```
|
||||
|
||||
### Save to file
|
||||
|
||||
```bash
|
||||
generate_changelog -o CHANGELOG.md
|
||||
```
|
||||
|
||||
### Generate for specific version
|
||||
|
||||
```bash
|
||||
generate_changelog -v v1.4.244
|
||||
```
|
||||
|
||||
### Limit to recent versions
|
||||
|
||||
```bash
|
||||
generate_changelog -l 10
|
||||
```
|
||||
|
||||
### Using GitHub token for private repos or higher rate limits
|
||||
|
||||
```bash
|
||||
export GITHUB_TOKEN=your_token_here
|
||||
generate_changelog
|
||||
|
||||
# Or pass directly
|
||||
generate_changelog --token your_token_here
|
||||
```
|
||||
|
||||
### AI-enhanced summaries
|
||||
|
||||
```bash
|
||||
# Enable AI summaries using Fabric
|
||||
generate_changelog --ai-summarize
|
||||
|
||||
# Use custom model for AI summaries
|
||||
FABRIC_CHANGELOG_SUMMARIZE_MODEL=claude-opus-4 generate_changelog --ai-summarize
|
||||
```
|
||||
|
||||
### Cache management
|
||||
|
||||
```bash
|
||||
# Rebuild cache from scratch
|
||||
generate_changelog --rebuild-cache
|
||||
|
||||
# Force a full PR sync from GitHub
|
||||
generate_changelog --force-pr-sync
|
||||
|
||||
# Disable cache usage
|
||||
generate_changelog --no-cache
|
||||
|
||||
# Use custom cache location
|
||||
generate_changelog --cache /path/to/cache.db
|
||||
```
|
||||
|
||||
## Command Line Options
|
||||
|
||||
| Flag | Short | Description | Default |
|
||||
|------|-------|-------------|---------|
|
||||
| `--repo` | `-r` | Repository path | `.` (current directory) |
|
||||
| `--output` | `-o` | Output file | stdout |
|
||||
| `--limit` | `-l` | Limit number of versions | 0 (all) |
|
||||
| `--version` | `-v` | Generate for specific version | |
|
||||
| `--save-data` | | Save version data to JSON | false |
|
||||
| `--cache` | | Cache database file | `./cmd/generate_changelog/changelog.db` |
|
||||
| `--no-cache` | | Disable cache usage | false |
|
||||
| `--rebuild-cache` | | Rebuild cache from scratch | false |
|
||||
| `--force-pr-sync` | | Force a full PR sync from GitHub | false |
|
||||
| `--token` | | GitHub API token | `$GITHUB_TOKEN` |
|
||||
| `--ai-summarize` | | Generate AI-enhanced summaries using Fabric | false |
|
||||
| `--release` | | Update GitHub release description with AI summary for version | |
|
||||
|
||||
## Output Format
|
||||
|
||||
The generated changelog follows this structure:
|
||||
|
||||
```markdown
|
||||
# Changelog
|
||||
|
||||
## Unreleased
|
||||
|
||||
### PR [#1601](url) by [author](profile): PR Title
|
||||
- Change description 1
|
||||
- Change description 2
|
||||
|
||||
### Direct commits
|
||||
- Direct commit message 1
|
||||
- Direct commit message 2
|
||||
|
||||
## v1.4.244 (2025-07-09)
|
||||
|
||||
### PR [#1598](url) by [author](profile): PR Title
|
||||
- Change description
|
||||
...
|
||||
```
|
||||
|
||||
## How It Works
|
||||
|
||||
1. **Git History Walking**: The tool walks through your git history from newest to oldest commits
|
||||
2. **Version Detection**: Identifies version bump commits (pattern: "Update version to vX.Y.Z")
|
||||
3. **PR Extraction**: Detects merge commits and extracts PR numbers
|
||||
4. **GitHub API Calls**: Fetches detailed PR information in parallel batches
|
||||
5. **Change Extraction**: Extracts changes from PR commit messages or PR body
|
||||
6. **Formatting**: Generates clean, organized markdown output
|
||||
|
||||
## Performance
|
||||
|
||||
- **Native Go libraries**: Uses go-git and go-github for maximum performance
|
||||
- **Concurrent API calls**: Processes up to 10 GitHub API requests in parallel
|
||||
- **Smart caching**: SQLite cache eliminates redundant API calls
|
||||
- **Incremental updates**: Only processes new commits on subsequent runs
|
||||
- **GraphQL optimization**: Uses GitHub GraphQL API to fetch all PR data in ~5-10 calls
|
||||
- **AI-powered summaries**: Optional Fabric integration with intelligent caching
|
||||
- **Content-based change detection**: AI summaries only regenerated when content changes
|
||||
- **Lightning-fast git operations**: SHA→PR mapping stored in database for instant lookups
|
||||
|
||||
### Major Optimization: GraphQL + Advanced Caching
|
||||
|
||||
The tool has been optimized to drastically reduce GitHub API calls and improve performance:
|
||||
|
||||
**Previous approach**: Individual API calls for each PR (2 API calls per PR)
|
||||
|
||||
- For a repo with 500 PRs: 1,000 API calls
|
||||
|
||||
**Current approach**: GraphQL batch fetching with intelligent caching
|
||||
|
||||
- For a repo with 500 PRs: ~5-10 GraphQL calls (initial fetch) + 0 calls (subsequent runs with cache)
|
||||
- **99%+ reduction in API calls after initial run!**
|
||||
|
||||
The optimization includes:
|
||||
|
||||
1. **GraphQL Batch Fetch**: Uses GitHub's GraphQL API to fetch all merged PRs with commits in minimal calls
|
||||
2. **Smart Caching**: Stores complete PR data, commits, and SHA mappings in SQLite
|
||||
3. **Incremental Sync**: Only fetches PRs merged after the last sync timestamp
|
||||
4. **Automatic Refresh**: PRs are synced every 24 hours or when missing PRs are detected
|
||||
5. **AI Summary Caching**: Content-based change detection prevents unnecessary AI regeneration
|
||||
6. **Fallback Support**: If GraphQL fails, falls back to REST API batch fetching
|
||||
7. **Lightning Git Operations**: Pre-computed SHA→PR mappings for instant commit association
|
||||
|
||||
## Requirements
|
||||
|
||||
- Go 1.24+ (for installation from source)
|
||||
- Git repository
|
||||
- GitHub token (optional, for private repos or higher rate limits)
|
||||
- Fabric CLI (optional, for AI-enhanced summaries)
|
||||
|
||||
## Authentication
|
||||
|
||||
The tool supports GitHub authentication via:
|
||||
|
||||
1. Environment variable: `export GITHUB_TOKEN=your_token`
|
||||
2. Command line flag: `--token your_token`
|
||||
3. `.env` file in the same directory as the binary
|
||||
|
||||
### Environment File Support
|
||||
|
||||
Create a `.env` file next to the `generate_changelog` binary:
|
||||
|
||||
```bash
|
||||
GITHUB_TOKEN=your_github_token_here
|
||||
FABRIC_CHANGELOG_SUMMARIZE_MODEL=claude-sonnet-4-20250514
|
||||
```
|
||||
|
||||
The tool automatically loads `.env` files for convenient configuration management.
|
||||
|
||||
Without authentication, the tool is limited to 60 GitHub API requests per hour.
|
||||
|
||||
## Caching
|
||||
|
||||
The SQLite cache stores:
|
||||
|
||||
- Version information and commit associations
|
||||
- Pull request details (title, body, commits, authors)
|
||||
- Last processed commit SHA for incremental updates
|
||||
- Last PR sync timestamp for intelligent refresh
|
||||
- AI summaries with content-based change detection
|
||||
- SHA→PR mappings for lightning-fast git operations
|
||||
|
||||
Cache benefits:
|
||||
|
||||
- Instant changelog regeneration
|
||||
- Drastically reduced GitHub API usage (99%+ reduction after initial run)
|
||||
- Offline changelog generation (after initial cache build)
|
||||
- Automatic PR data refresh every 24 hours
|
||||
- Batch database transactions for better performance
|
||||
- Content-aware AI summary regeneration
|
||||
|
||||
## AI-Enhanced Summaries
|
||||
|
||||
The tool can generate AI-powered summaries using Fabric for more polished, professional changelogs:
|
||||
|
||||
```bash
|
||||
# Enable AI summarization
|
||||
generate_changelog --ai-summarize
|
||||
|
||||
# Custom model (default: claude-sonnet-4-20250514)
|
||||
FABRIC_CHANGELOG_SUMMARIZE_MODEL=claude-opus-4 generate_changelog --ai-summarize
|
||||
```
|
||||
|
||||
### AI Summary Features
|
||||
|
||||
- **Content-based change detection**: AI summaries are only regenerated when version content changes
|
||||
- **Intelligent caching**: Preserves existing summaries and only processes changed versions
|
||||
- **Content hash comparison**: Uses SHA256 hashing to detect when "Unreleased" content changes
|
||||
- **Automatic fallback**: Falls back to raw content if AI processing fails
|
||||
- **Error detection**: Identifies and handles AI processing errors gracefully
|
||||
- **Minimum content filtering**: Skips AI processing for very brief content (< 256 characters)
|
||||
|
||||
### AI Model Configuration
|
||||
|
||||
Set the model via environment variable:
|
||||
|
||||
```bash
|
||||
export FABRIC_CHANGELOG_SUMMARIZE_MODEL=claude-opus-4
|
||||
# or
|
||||
export FABRIC_CHANGELOG_SUMMARIZE_MODEL=gpt-4
|
||||
```
|
||||
|
||||
AI summaries are cached and only regenerated when:
|
||||
|
||||
- Version content changes (detected via hash comparison)
|
||||
- No existing AI summary exists for the version
|
||||
- Force rebuild is requested
|
||||
|
||||
## Contributing
|
||||
|
||||
This tool is part of the Fabric project. Contributions are welcome!
|
||||
|
||||
## License
|
||||
|
||||
The MIT License. Same as the Fabric project.
|
||||
BIN
cmd/generate_changelog/changelog.db
Normal file
BIN
cmd/generate_changelog/changelog.db
Normal file
Binary file not shown.
476
cmd/generate_changelog/internal/cache/cache.go
vendored
Normal file
476
cmd/generate_changelog/internal/cache/cache.go
vendored
Normal file
@@ -0,0 +1,476 @@
|
||||
package cache
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
"time"
|
||||
|
||||
"github.com/danielmiessler/fabric/cmd/generate_changelog/internal/git"
|
||||
"github.com/danielmiessler/fabric/cmd/generate_changelog/internal/github"
|
||||
_ "github.com/mattn/go-sqlite3"
|
||||
)
|
||||
|
||||
type Cache struct {
|
||||
db *sql.DB
|
||||
}
|
||||
|
||||
func New(dbPath string) (*Cache, error) {
|
||||
db, err := sql.Open("sqlite3", dbPath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to open database: %w", err)
|
||||
}
|
||||
|
||||
cache := &Cache{db: db}
|
||||
if err := cache.createTables(); err != nil {
|
||||
return nil, fmt.Errorf("failed to create tables: %w", err)
|
||||
}
|
||||
|
||||
return cache, nil
|
||||
}
|
||||
|
||||
func (c *Cache) Close() error {
|
||||
return c.db.Close()
|
||||
}
|
||||
|
||||
func (c *Cache) createTables() error {
|
||||
queries := []string{
|
||||
`CREATE TABLE IF NOT EXISTS metadata (
|
||||
key TEXT PRIMARY KEY,
|
||||
value TEXT NOT NULL,
|
||||
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
)`,
|
||||
`CREATE TABLE IF NOT EXISTS versions (
|
||||
name TEXT PRIMARY KEY,
|
||||
date DATETIME,
|
||||
commit_sha TEXT,
|
||||
pr_numbers TEXT,
|
||||
ai_summary TEXT,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
)`,
|
||||
`CREATE TABLE IF NOT EXISTS commits (
|
||||
sha TEXT PRIMARY KEY,
|
||||
version TEXT NOT NULL,
|
||||
message TEXT,
|
||||
author TEXT,
|
||||
email TEXT,
|
||||
date DATETIME,
|
||||
is_merge BOOLEAN,
|
||||
pr_number INTEGER,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (version) REFERENCES versions(name)
|
||||
)`,
|
||||
`CREATE TABLE IF NOT EXISTS pull_requests (
|
||||
number INTEGER PRIMARY KEY,
|
||||
title TEXT,
|
||||
body TEXT,
|
||||
author TEXT,
|
||||
author_url TEXT,
|
||||
author_type TEXT DEFAULT 'user',
|
||||
url TEXT,
|
||||
merged_at DATETIME,
|
||||
merge_commit TEXT,
|
||||
commits TEXT,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
)`,
|
||||
`CREATE INDEX IF NOT EXISTS idx_commits_version ON commits(version)`,
|
||||
`CREATE INDEX IF NOT EXISTS idx_commits_pr_number ON commits(pr_number)`,
|
||||
`CREATE TABLE IF NOT EXISTS commit_pr_mapping (
|
||||
commit_sha TEXT PRIMARY KEY,
|
||||
pr_number INTEGER NOT NULL,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (pr_number) REFERENCES pull_requests(number)
|
||||
)`,
|
||||
`CREATE INDEX IF NOT EXISTS idx_commit_pr_mapping_sha ON commit_pr_mapping(commit_sha)`,
|
||||
}
|
||||
|
||||
for _, query := range queries {
|
||||
if _, err := c.db.Exec(query); err != nil {
|
||||
return fmt.Errorf("failed to execute query: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *Cache) GetLastProcessedTag() (string, error) {
|
||||
var tag string
|
||||
err := c.db.QueryRow("SELECT value FROM metadata WHERE key = 'last_processed_tag'").Scan(&tag)
|
||||
if err == sql.ErrNoRows {
|
||||
return "", nil
|
||||
}
|
||||
return tag, err
|
||||
}
|
||||
|
||||
func (c *Cache) SetLastProcessedTag(tag string) error {
|
||||
_, err := c.db.Exec(`
|
||||
INSERT OR REPLACE INTO metadata (key, value, updated_at)
|
||||
VALUES ('last_processed_tag', ?, CURRENT_TIMESTAMP)
|
||||
`, tag)
|
||||
return err
|
||||
}
|
||||
|
||||
func (c *Cache) SaveVersion(v *git.Version) error {
|
||||
prNumbers, _ := json.Marshal(v.PRNumbers)
|
||||
|
||||
_, err := c.db.Exec(`
|
||||
INSERT OR REPLACE INTO versions (name, date, commit_sha, pr_numbers, ai_summary)
|
||||
VALUES (?, ?, ?, ?, ?)
|
||||
`, v.Name, v.Date, v.CommitSHA, string(prNumbers), v.AISummary)
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
// UpdateVersionAISummary updates only the AI summary for a specific version
|
||||
func (c *Cache) UpdateVersionAISummary(versionName, aiSummary string) error {
|
||||
_, err := c.db.Exec(`
|
||||
UPDATE versions SET ai_summary = ? WHERE name = ?
|
||||
`, aiSummary, versionName)
|
||||
return err
|
||||
}
|
||||
|
||||
func (c *Cache) SaveCommit(commit *git.Commit, version string) error {
|
||||
_, err := c.db.Exec(`
|
||||
INSERT OR REPLACE INTO commits
|
||||
(sha, version, message, author, email, date, is_merge, pr_number)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
||||
`, commit.SHA, version, commit.Message, commit.Author, commit.Email,
|
||||
commit.Date, commit.IsMerge, commit.PRNumber)
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func (c *Cache) SavePR(pr *github.PR) error {
|
||||
commits, _ := json.Marshal(pr.Commits)
|
||||
|
||||
_, err := c.db.Exec(`
|
||||
INSERT OR REPLACE INTO pull_requests
|
||||
(number, title, body, author, author_url, author_type, url, merged_at, merge_commit, commits)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
`, pr.Number, pr.Title, pr.Body, pr.Author, pr.AuthorURL, pr.AuthorType,
|
||||
pr.URL, pr.MergedAt, pr.MergeCommit, string(commits))
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func (c *Cache) GetPR(number int) (*github.PR, error) {
|
||||
var pr github.PR
|
||||
var commitsJSON string
|
||||
|
||||
err := c.db.QueryRow(`
|
||||
SELECT number, title, body, author, author_url, COALESCE(author_type, 'user'), url, merged_at, merge_commit, commits
|
||||
FROM pull_requests WHERE number = ?
|
||||
`, number).Scan(
|
||||
&pr.Number, &pr.Title, &pr.Body, &pr.Author, &pr.AuthorURL, &pr.AuthorType,
|
||||
&pr.URL, &pr.MergedAt, &pr.MergeCommit, &commitsJSON,
|
||||
)
|
||||
|
||||
if err == sql.ErrNoRows {
|
||||
return nil, nil
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := json.Unmarshal([]byte(commitsJSON), &pr.Commits); err != nil {
|
||||
return nil, fmt.Errorf("failed to unmarshal commits: %w", err)
|
||||
}
|
||||
|
||||
return &pr, nil
|
||||
}
|
||||
|
||||
func (c *Cache) GetVersions() (map[string]*git.Version, error) {
|
||||
rows, err := c.db.Query(`
|
||||
SELECT name, date, commit_sha, pr_numbers, ai_summary FROM versions
|
||||
`)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
versions := make(map[string]*git.Version)
|
||||
|
||||
for rows.Next() {
|
||||
var v git.Version
|
||||
var dateStr sql.NullString
|
||||
var prNumbersJSON string
|
||||
var aiSummary sql.NullString
|
||||
|
||||
if err := rows.Scan(&v.Name, &dateStr, &v.CommitSHA, &prNumbersJSON, &aiSummary); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if dateStr.Valid {
|
||||
// Try RFC3339Nano first (for nanosecond precision), then fall back to RFC3339
|
||||
v.Date, err = time.Parse(time.RFC3339Nano, dateStr.String)
|
||||
if err != nil {
|
||||
v.Date, err = time.Parse(time.RFC3339, dateStr.String)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Error parsing date '%s' for version '%s': %v. Expected format: RFC3339 or RFC3339Nano.\n", dateStr.String, v.Name, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if prNumbersJSON != "" {
|
||||
json.Unmarshal([]byte(prNumbersJSON), &v.PRNumbers)
|
||||
}
|
||||
|
||||
if aiSummary.Valid {
|
||||
v.AISummary = aiSummary.String
|
||||
}
|
||||
|
||||
v.Commits, err = c.getCommitsForVersion(v.Name)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
versions[v.Name] = &v
|
||||
}
|
||||
|
||||
return versions, rows.Err()
|
||||
}
|
||||
|
||||
func (c *Cache) getCommitsForVersion(version string) ([]*git.Commit, error) {
|
||||
rows, err := c.db.Query(`
|
||||
SELECT sha, message, author, email, date, is_merge, pr_number
|
||||
FROM commits WHERE version = ?
|
||||
ORDER BY date DESC
|
||||
`, version)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var commits []*git.Commit
|
||||
|
||||
for rows.Next() {
|
||||
var commit git.Commit
|
||||
if err := rows.Scan(
|
||||
&commit.SHA, &commit.Message, &commit.Author, &commit.Email,
|
||||
&commit.Date, &commit.IsMerge, &commit.PRNumber,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
commits = append(commits, &commit)
|
||||
}
|
||||
|
||||
return commits, rows.Err()
|
||||
}
|
||||
|
||||
func (c *Cache) Clear() error {
|
||||
tables := []string{"metadata", "versions", "commits", "pull_requests"}
|
||||
for _, table := range tables {
|
||||
if _, err := c.db.Exec("DELETE FROM " + table); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// VersionExists checks if a version already exists in the cache
|
||||
func (c *Cache) VersionExists(version string) (bool, error) {
|
||||
var count int
|
||||
err := c.db.QueryRow("SELECT COUNT(*) FROM versions WHERE name = ?", version).Scan(&count)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
return count > 0, nil
|
||||
}
|
||||
|
||||
// CommitExists checks if a commit already exists in the cache
|
||||
func (c *Cache) CommitExists(hash string) (bool, error) {
|
||||
var count int
|
||||
err := c.db.QueryRow("SELECT COUNT(*) FROM commits WHERE sha = ?", hash).Scan(&count)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
return count > 0, nil
|
||||
}
|
||||
|
||||
// GetLastPRSync returns the timestamp of the last PR sync
|
||||
func (c *Cache) GetLastPRSync() (time.Time, error) {
|
||||
var timestamp string
|
||||
err := c.db.QueryRow("SELECT value FROM metadata WHERE key = 'last_pr_sync'").Scan(×tamp)
|
||||
if err == sql.ErrNoRows {
|
||||
return time.Time{}, nil
|
||||
}
|
||||
if err != nil {
|
||||
return time.Time{}, err
|
||||
}
|
||||
|
||||
return time.Parse(time.RFC3339, timestamp)
|
||||
}
|
||||
|
||||
// SetLastPRSync updates the timestamp of the last PR sync
|
||||
func (c *Cache) SetLastPRSync(timestamp time.Time) error {
|
||||
_, err := c.db.Exec(`
|
||||
INSERT OR REPLACE INTO metadata (key, value, updated_at)
|
||||
VALUES ('last_pr_sync', ?, CURRENT_TIMESTAMP)
|
||||
`, timestamp.Format(time.RFC3339))
|
||||
return err
|
||||
}
|
||||
|
||||
// SavePRBatch saves multiple PRs in a single transaction for better performance
|
||||
func (c *Cache) SavePRBatch(prs []*github.PR) error {
|
||||
tx, err := c.db.Begin()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to begin transaction: %w", err)
|
||||
}
|
||||
defer tx.Rollback()
|
||||
|
||||
stmt, err := tx.Prepare(`
|
||||
INSERT OR REPLACE INTO pull_requests
|
||||
(number, title, body, author, author_url, author_type, url, merged_at, merge_commit, commits)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
`)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to prepare statement: %w", err)
|
||||
}
|
||||
defer stmt.Close()
|
||||
|
||||
for _, pr := range prs {
|
||||
commits, _ := json.Marshal(pr.Commits)
|
||||
_, err := stmt.Exec(
|
||||
pr.Number, pr.Title, pr.Body, pr.Author, pr.AuthorURL, pr.AuthorType,
|
||||
pr.URL, pr.MergedAt, pr.MergeCommit, string(commits),
|
||||
)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to save PR #%d: %w", pr.Number, err)
|
||||
}
|
||||
}
|
||||
|
||||
return tx.Commit()
|
||||
}
|
||||
|
||||
// GetAllPRs returns all cached PRs
|
||||
func (c *Cache) GetAllPRs() (map[int]*github.PR, error) {
|
||||
rows, err := c.db.Query(`
|
||||
SELECT number, title, body, author, author_url, COALESCE(author_type, 'user'), url, merged_at, merge_commit, commits
|
||||
FROM pull_requests
|
||||
`)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
prs := make(map[int]*github.PR)
|
||||
|
||||
for rows.Next() {
|
||||
var pr github.PR
|
||||
var commitsJSON string
|
||||
|
||||
if err := rows.Scan(
|
||||
&pr.Number, &pr.Title, &pr.Body, &pr.Author, &pr.AuthorURL, &pr.AuthorType,
|
||||
&pr.URL, &pr.MergedAt, &pr.MergeCommit, &commitsJSON,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := json.Unmarshal([]byte(commitsJSON), &pr.Commits); err != nil {
|
||||
return nil, fmt.Errorf("failed to unmarshal commits for PR #%d: %w", pr.Number, err)
|
||||
}
|
||||
|
||||
prs[pr.Number] = &pr
|
||||
}
|
||||
|
||||
return prs, rows.Err()
|
||||
}
|
||||
|
||||
// MarkPRAsNonExistent marks a PR number as non-existent to avoid future fetches
|
||||
func (c *Cache) MarkPRAsNonExistent(prNumber int) error {
|
||||
_, err := c.db.Exec(`
|
||||
INSERT OR REPLACE INTO metadata (key, value, updated_at)
|
||||
VALUES (?, 'non_existent', CURRENT_TIMESTAMP)
|
||||
`, fmt.Sprintf("pr_non_existent_%d", prNumber))
|
||||
return err
|
||||
}
|
||||
|
||||
// IsPRMarkedAsNonExistent checks if a PR is marked as non-existent
|
||||
func (c *Cache) IsPRMarkedAsNonExistent(prNumber int) bool {
|
||||
var value string
|
||||
err := c.db.QueryRow("SELECT value FROM metadata WHERE key = ?",
|
||||
fmt.Sprintf("pr_non_existent_%d", prNumber)).Scan(&value)
|
||||
return err == nil && value == "non_existent"
|
||||
}
|
||||
|
||||
// SaveCommitPRMappings saves SHA→PR mappings for all commits in PRs
|
||||
func (c *Cache) SaveCommitPRMappings(prs []*github.PR) error {
|
||||
tx, err := c.db.Begin()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to begin transaction: %w", err)
|
||||
}
|
||||
defer tx.Rollback()
|
||||
|
||||
stmt, err := tx.Prepare(`
|
||||
INSERT OR REPLACE INTO commit_pr_mapping (commit_sha, pr_number)
|
||||
VALUES (?, ?)
|
||||
`)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to prepare statement: %w", err)
|
||||
}
|
||||
defer stmt.Close()
|
||||
|
||||
for _, pr := range prs {
|
||||
for _, commit := range pr.Commits {
|
||||
_, err := stmt.Exec(commit.SHA, pr.Number)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to save commit mapping %s→%d: %w", commit.SHA, pr.Number, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return tx.Commit()
|
||||
}
|
||||
|
||||
// GetPRNumberBySHA returns the PR number for a given commit SHA
|
||||
func (c *Cache) GetPRNumberBySHA(sha string) (int, bool) {
|
||||
var prNumber int
|
||||
err := c.db.QueryRow("SELECT pr_number FROM commit_pr_mapping WHERE commit_sha = ?", sha).Scan(&prNumber)
|
||||
if err == sql.ErrNoRows {
|
||||
return 0, false
|
||||
}
|
||||
if err != nil {
|
||||
return 0, false
|
||||
}
|
||||
return prNumber, true
|
||||
}
|
||||
|
||||
// GetCommitSHAsForPR returns all commit SHAs for a given PR number
|
||||
func (c *Cache) GetCommitSHAsForPR(prNumber int) ([]string, error) {
|
||||
rows, err := c.db.Query("SELECT commit_sha FROM commit_pr_mapping WHERE pr_number = ?", prNumber)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var shas []string
|
||||
for rows.Next() {
|
||||
var sha string
|
||||
if err := rows.Scan(&sha); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
shas = append(shas, sha)
|
||||
}
|
||||
|
||||
return shas, rows.Err()
|
||||
}
|
||||
|
||||
// GetUnreleasedContentHash returns the cached content hash for Unreleased
|
||||
func (c *Cache) GetUnreleasedContentHash() (string, error) {
|
||||
var hash string
|
||||
err := c.db.QueryRow("SELECT value FROM metadata WHERE key = 'unreleased_content_hash'").Scan(&hash)
|
||||
if err == sql.ErrNoRows {
|
||||
return "", fmt.Errorf("no content hash found")
|
||||
}
|
||||
return hash, err
|
||||
}
|
||||
|
||||
// SetUnreleasedContentHash stores the content hash for Unreleased
|
||||
func (c *Cache) SetUnreleasedContentHash(hash string) error {
|
||||
_, err := c.db.Exec(`
|
||||
INSERT OR REPLACE INTO metadata (key, value, updated_at)
|
||||
VALUES ('unreleased_content_hash', ?, CURRENT_TIMESTAMP)
|
||||
`, hash)
|
||||
return err
|
||||
}
|
||||
805
cmd/generate_changelog/internal/changelog/generator.go
Normal file
805
cmd/generate_changelog/internal/changelog/generator.go
Normal file
@@ -0,0 +1,805 @@
|
||||
package changelog
|
||||
|
||||
import (
|
||||
"crypto/sha256"
|
||||
"fmt"
|
||||
"os"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/danielmiessler/fabric/cmd/generate_changelog/internal/cache"
|
||||
"github.com/danielmiessler/fabric/cmd/generate_changelog/internal/config"
|
||||
"github.com/danielmiessler/fabric/cmd/generate_changelog/internal/git"
|
||||
"github.com/danielmiessler/fabric/cmd/generate_changelog/internal/github"
|
||||
)
|
||||
|
||||
type Generator struct {
|
||||
cfg *config.Config
|
||||
gitWalker *git.Walker
|
||||
ghClient *github.Client
|
||||
cache *cache.Cache
|
||||
versions map[string]*git.Version
|
||||
prs map[int]*github.PR
|
||||
}
|
||||
|
||||
func New(cfg *config.Config) (*Generator, error) {
|
||||
gitWalker, err := git.NewWalker(cfg.RepoPath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to create git walker: %w", err)
|
||||
}
|
||||
|
||||
owner, repo, err := gitWalker.GetRepoInfo()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get repo info: %w", err)
|
||||
}
|
||||
|
||||
ghClient := github.NewClient(cfg.GitHubToken, owner, repo)
|
||||
|
||||
var c *cache.Cache
|
||||
if !cfg.NoCache {
|
||||
c, err = cache.New(cfg.CacheFile)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to create cache: %w", err)
|
||||
}
|
||||
|
||||
if cfg.RebuildCache {
|
||||
if err := c.Clear(); err != nil {
|
||||
return nil, fmt.Errorf("failed to clear cache: %w", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return &Generator{
|
||||
cfg: cfg,
|
||||
gitWalker: gitWalker,
|
||||
ghClient: ghClient,
|
||||
cache: c,
|
||||
prs: make(map[int]*github.PR),
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (g *Generator) Generate() (string, error) {
|
||||
if err := g.collectData(); err != nil {
|
||||
return "", fmt.Errorf("failed to collect data: %w", err)
|
||||
}
|
||||
|
||||
if err := g.fetchPRs(g.cfg.ForcePRSync); err != nil {
|
||||
return "", fmt.Errorf("failed to fetch PRs: %w", err)
|
||||
}
|
||||
|
||||
return g.formatChangelog(), nil
|
||||
}
|
||||
|
||||
func (g *Generator) collectData() error {
|
||||
if g.cache != nil && !g.cfg.RebuildCache {
|
||||
cachedTag, err := g.cache.GetLastProcessedTag()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get last processed tag: %w", err)
|
||||
}
|
||||
|
||||
if cachedTag != "" {
|
||||
// Get the current latest tag from git
|
||||
currentTag, err := g.gitWalker.GetLatestTag()
|
||||
if err == nil {
|
||||
// Load cached data - we can use it even if there are new tags
|
||||
cachedVersions, err := g.cache.GetVersions()
|
||||
if err == nil && len(cachedVersions) > 0 {
|
||||
g.versions = cachedVersions
|
||||
|
||||
// Load cached PRs
|
||||
for _, version := range g.versions {
|
||||
for _, prNum := range version.PRNumbers {
|
||||
if pr, err := g.cache.GetPR(prNum); err == nil && pr != nil {
|
||||
g.prs[prNum] = pr
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If we have new tags since cache, process the new versions only
|
||||
if currentTag != cachedTag {
|
||||
fmt.Fprintf(os.Stderr, "Processing new versions since %s...\n", cachedTag)
|
||||
newVersions, err := g.gitWalker.WalkHistorySinceTag(cachedTag)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Failed to walk history since tag %s: %v\n", cachedTag, err)
|
||||
} else {
|
||||
// Merge new versions into cached versions (only add if not already cached)
|
||||
for name, version := range newVersions {
|
||||
if name != "Unreleased" { // Handle Unreleased separately
|
||||
if existingVersion, exists := g.versions[name]; !exists {
|
||||
g.versions[name] = version
|
||||
} else {
|
||||
// Update existing version with new PR numbers if they're missing
|
||||
if len(existingVersion.PRNumbers) == 0 && len(version.PRNumbers) > 0 {
|
||||
existingVersion.PRNumbers = version.PRNumbers
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Always update Unreleased section with latest commits
|
||||
unreleasedVersion, err := g.gitWalker.WalkCommitsSinceTag(currentTag)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Failed to walk commits since tag %s: %v\n", currentTag, err)
|
||||
} else if unreleasedVersion != nil {
|
||||
// Preserve existing AI summary if available
|
||||
if existingUnreleased, exists := g.versions["Unreleased"]; exists {
|
||||
unreleasedVersion.AISummary = existingUnreleased.AISummary
|
||||
}
|
||||
// Replace or add the unreleased version
|
||||
g.versions["Unreleased"] = unreleasedVersion
|
||||
}
|
||||
|
||||
// Save any new versions to cache (after potential AI processing)
|
||||
if currentTag != cachedTag {
|
||||
for _, version := range g.versions {
|
||||
// Skip versions that were already cached and Unreleased
|
||||
if version.Name != "Unreleased" {
|
||||
if err := g.cache.SaveVersion(version); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Failed to save version to cache: %v\n", err)
|
||||
}
|
||||
|
||||
for _, commit := range version.Commits {
|
||||
if err := g.cache.SaveCommit(commit, version.Name); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Failed to save commit to cache: %v\n", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Update the last processed tag
|
||||
if err := g.cache.SetLastProcessedTag(currentTag); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Failed to update last processed tag: %v\n", err)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
versions, err := g.gitWalker.WalkHistory()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to walk history: %w", err)
|
||||
}
|
||||
|
||||
g.versions = versions
|
||||
|
||||
if g.cache != nil {
|
||||
for _, version := range versions {
|
||||
if err := g.cache.SaveVersion(version); err != nil {
|
||||
return fmt.Errorf("failed to save version to cache: %w", err)
|
||||
}
|
||||
|
||||
for _, commit := range version.Commits {
|
||||
if err := g.cache.SaveCommit(commit, version.Name); err != nil {
|
||||
return fmt.Errorf("failed to save commit to cache: %w", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Save the latest tag as our cache anchor point
|
||||
if latestTag, err := g.gitWalker.GetLatestTag(); err == nil && latestTag != "" {
|
||||
if err := g.cache.SetLastProcessedTag(latestTag); err != nil {
|
||||
return fmt.Errorf("failed to save last processed tag: %w", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (g *Generator) fetchPRs(forcePRSync bool) error {
|
||||
// First, load all cached PRs
|
||||
if g.cache != nil {
|
||||
cachedPRs, err := g.cache.GetAllPRs()
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Failed to load cached PRs: %v\n", err)
|
||||
} else {
|
||||
g.prs = cachedPRs
|
||||
}
|
||||
}
|
||||
|
||||
// Check if we need to fetch new PRs
|
||||
var lastSync time.Time
|
||||
if g.cache != nil {
|
||||
lastSync, _ = g.cache.GetLastPRSync()
|
||||
}
|
||||
|
||||
// Check if we need to sync for missing PRs
|
||||
missingPRs := false
|
||||
for _, version := range g.versions {
|
||||
for _, prNum := range version.PRNumbers {
|
||||
if _, exists := g.prs[prNum]; !exists {
|
||||
missingPRs = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if missingPRs {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if missingPRs {
|
||||
fmt.Fprintf(os.Stderr, "Full sync triggered due to missing PRs in cache.\n")
|
||||
}
|
||||
// If we have never synced or it's been more than 24 hours, do a full sync
|
||||
// Also sync if we have versions with PR numbers that aren't cached
|
||||
needsSync := lastSync.IsZero() || time.Since(lastSync) > 24*time.Hour || forcePRSync || missingPRs
|
||||
|
||||
if !needsSync {
|
||||
fmt.Fprintf(os.Stderr, "Using cached PR data (last sync: %s)\n", lastSync.Format("2006-01-02 15:04:05"))
|
||||
return nil
|
||||
}
|
||||
|
||||
fmt.Fprintf(os.Stderr, "Fetching merged PRs from GitHub using GraphQL...\n")
|
||||
|
||||
// Use GraphQL for ultimate performance - gets everything in ~5-10 calls
|
||||
prs, err := g.ghClient.FetchAllMergedPRsGraphQL(lastSync)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "GraphQL fetch failed, falling back to REST API: %v\n", err)
|
||||
// Fall back to REST API
|
||||
prs, err = g.ghClient.FetchAllMergedPRs(lastSync)
|
||||
if err != nil {
|
||||
return fmt.Errorf("both GraphQL and REST API failed: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
// Update our PR map with new data
|
||||
for _, pr := range prs {
|
||||
g.prs[pr.Number] = pr
|
||||
}
|
||||
|
||||
// Save all PRs to cache in a batch transaction
|
||||
if g.cache != nil && len(prs) > 0 {
|
||||
// Save PRs
|
||||
if err := g.cache.SavePRBatch(prs); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Failed to cache PRs: %v\n", err)
|
||||
}
|
||||
|
||||
// Save SHA→PR mappings for lightning-fast git operations
|
||||
if err := g.cache.SaveCommitPRMappings(prs); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Failed to cache commit mappings: %v\n", err)
|
||||
}
|
||||
|
||||
// Update last sync timestamp
|
||||
if err := g.cache.SetLastPRSync(time.Now()); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Failed to update last sync timestamp: %v\n", err)
|
||||
}
|
||||
}
|
||||
|
||||
if len(prs) > 0 {
|
||||
fmt.Fprintf(os.Stderr, "Fetched %d PRs with commits (total cached: %d)\n", len(prs), len(g.prs))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (g *Generator) formatChangelog() string {
|
||||
var sb strings.Builder
|
||||
sb.WriteString("# Changelog\n")
|
||||
|
||||
versionList := g.getSortedVersions()
|
||||
|
||||
for _, version := range versionList {
|
||||
if g.cfg.Version != "" && version.Name != g.cfg.Version {
|
||||
continue
|
||||
}
|
||||
|
||||
versionText := g.formatVersion(version)
|
||||
if versionText != "" {
|
||||
sb.WriteString("\n")
|
||||
sb.WriteString(versionText)
|
||||
}
|
||||
}
|
||||
|
||||
return sb.String()
|
||||
}
|
||||
|
||||
func (g *Generator) getSortedVersions() []*git.Version {
|
||||
var versions []*git.Version
|
||||
var releasedVersions []*git.Version
|
||||
|
||||
// Collect all released versions (non-"Unreleased")
|
||||
for name, version := range g.versions {
|
||||
if name != "Unreleased" {
|
||||
releasedVersions = append(releasedVersions, version)
|
||||
}
|
||||
}
|
||||
|
||||
// Sort released versions by date (newest first)
|
||||
sort.Slice(releasedVersions, func(i, j int) bool {
|
||||
return releasedVersions[i].Date.After(releasedVersions[j].Date)
|
||||
})
|
||||
|
||||
// Add "Unreleased" first if it exists and has commits
|
||||
if unreleased, exists := g.versions["Unreleased"]; exists && len(unreleased.Commits) > 0 {
|
||||
versions = append(versions, unreleased)
|
||||
}
|
||||
|
||||
// Add sorted released versions
|
||||
versions = append(versions, releasedVersions...)
|
||||
|
||||
if g.cfg.Limit > 0 && len(versions) > g.cfg.Limit {
|
||||
versions = versions[:g.cfg.Limit]
|
||||
}
|
||||
|
||||
return versions
|
||||
}
|
||||
|
||||
func (g *Generator) formatVersion(version *git.Version) string {
|
||||
var sb strings.Builder
|
||||
|
||||
// Generate raw content
|
||||
rawContent := g.generateRawVersionContent(version)
|
||||
if rawContent == "" {
|
||||
return ""
|
||||
}
|
||||
|
||||
header := g.formatVersionHeader(version)
|
||||
sb.WriteString(("\n"))
|
||||
sb.WriteString(header)
|
||||
|
||||
// If AI summarization is enabled, enhance with AI
|
||||
if g.cfg.EnableAISummary {
|
||||
// For "Unreleased", check if content has changed since last AI summary
|
||||
if version.Name == "Unreleased" && version.AISummary != "" && g.cache != nil {
|
||||
// Get cached content hash
|
||||
cachedHash, err := g.cache.GetUnreleasedContentHash()
|
||||
if err == nil {
|
||||
// Calculate current content hash
|
||||
currentHash := hashContent(rawContent)
|
||||
if cachedHash == currentHash {
|
||||
// Content unchanged, use cached summary
|
||||
fmt.Fprintf(os.Stderr, "✅ %s content unchanged (skipping AI)\n", version.Name)
|
||||
sb.WriteString(version.AISummary)
|
||||
return fixMarkdown(sb.String())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// For released versions, if we have cached AI summary, use it!
|
||||
if version.Name != "Unreleased" && version.AISummary != "" {
|
||||
fmt.Fprintf(os.Stderr, "✅ %s already summarized (skipping)\n", version.Name)
|
||||
sb.WriteString(version.AISummary)
|
||||
return fixMarkdown(sb.String())
|
||||
}
|
||||
|
||||
fmt.Fprintf(os.Stderr, "🤖 AI summarizing %s...", version.Name)
|
||||
|
||||
aiSummary, err := SummarizeVersionContent(rawContent)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, " Failed: %v\n", err)
|
||||
sb.WriteString((rawContent))
|
||||
return fixMarkdown(sb.String())
|
||||
}
|
||||
if checkForAIError(aiSummary) {
|
||||
fmt.Fprintf(os.Stderr, " AI error detected, using raw content instead\n")
|
||||
sb.WriteString(rawContent)
|
||||
fmt.Fprintf(os.Stderr, "Raw Content was: (%d bytes) %s \n", len(rawContent), rawContent)
|
||||
fmt.Fprintf(os.Stderr, "AI Summary was: (%d bytes) %s\n", len(aiSummary), aiSummary)
|
||||
return fixMarkdown(sb.String())
|
||||
}
|
||||
|
||||
fmt.Fprintf(os.Stderr, " Done!\n")
|
||||
aiSummary = strings.TrimSpace(aiSummary)
|
||||
|
||||
// Cache the AI summary and content hash
|
||||
version.AISummary = aiSummary
|
||||
if g.cache != nil {
|
||||
if err := g.cache.UpdateVersionAISummary(version.Name, aiSummary); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Failed to cache AI summary: %v\n", err)
|
||||
}
|
||||
// Cache content hash for "Unreleased" to detect changes
|
||||
if version.Name == "Unreleased" {
|
||||
if err := g.cache.SetUnreleasedContentHash(hashContent(rawContent)); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Failed to cache content hash: %v\n", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
sb.WriteString(aiSummary)
|
||||
return fixMarkdown(sb.String())
|
||||
}
|
||||
|
||||
sb.WriteString(rawContent)
|
||||
return fixMarkdown(sb.String())
|
||||
}
|
||||
|
||||
func checkForAIError(summary string) bool {
|
||||
// Check for common AI error patterns
|
||||
errorPatterns := []string{
|
||||
"I don't see any", "please provide",
|
||||
"content you've provided appears to be incomplete",
|
||||
}
|
||||
|
||||
for _, pattern := range errorPatterns {
|
||||
if strings.Contains(summary, pattern) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
// formatVersionHeader formats just the version header (## ...)
|
||||
func (g *Generator) formatVersionHeader(version *git.Version) string {
|
||||
if version.Name == "Unreleased" {
|
||||
return "## Unreleased\n\n"
|
||||
}
|
||||
return fmt.Sprintf("\n## %s (%s)\n\n", version.Name, version.Date.Format("2006-01-02"))
|
||||
}
|
||||
|
||||
// generateRawVersionContent generates the raw content (PRs + commits) for a version
|
||||
func (g *Generator) generateRawVersionContent(version *git.Version) string {
|
||||
var sb strings.Builder
|
||||
|
||||
// Build a set of commit SHAs that are part of fetched PRs
|
||||
prCommitSHAs := make(map[string]bool)
|
||||
for _, prNum := range version.PRNumbers {
|
||||
if pr, exists := g.prs[prNum]; exists {
|
||||
for _, prCommit := range pr.Commits {
|
||||
prCommitSHAs[prCommit.SHA] = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
prCommits := make(map[int][]*git.Commit)
|
||||
directCommits := []*git.Commit{}
|
||||
|
||||
for _, commit := range version.Commits {
|
||||
// Skip version bump commits from output
|
||||
if commit.IsVersion {
|
||||
continue
|
||||
}
|
||||
|
||||
// If this commit is part of a fetched PR, don't include it in direct commits
|
||||
if prCommitSHAs[commit.SHA] {
|
||||
continue
|
||||
}
|
||||
|
||||
if commit.PRNumber > 0 {
|
||||
prCommits[commit.PRNumber] = append(prCommits[commit.PRNumber], commit)
|
||||
} else {
|
||||
directCommits = append(directCommits, commit)
|
||||
}
|
||||
}
|
||||
|
||||
// There are occasionally no PRs or direct commits other than version bumps, so we handle that gracefully
|
||||
if len(prCommits) == 0 && len(directCommits) == 0 {
|
||||
return ""
|
||||
}
|
||||
|
||||
prependNewline := ""
|
||||
for _, prNum := range version.PRNumbers {
|
||||
if pr, exists := g.prs[prNum]; exists {
|
||||
sb.WriteString(prependNewline)
|
||||
sb.WriteString(g.formatPR(pr))
|
||||
prependNewline = "\n"
|
||||
}
|
||||
}
|
||||
|
||||
if len(directCommits) > 0 {
|
||||
// Sort direct commits by date (newest first) for consistent ordering
|
||||
sort.Slice(directCommits, func(i, j int) bool {
|
||||
return directCommits[i].Date.After(directCommits[j].Date)
|
||||
})
|
||||
|
||||
sb.WriteString(prependNewline + "### Direct commits\n\n")
|
||||
for _, commit := range directCommits {
|
||||
message := g.formatCommitMessage(strings.TrimSpace(commit.Message))
|
||||
if message != "" && !g.isDuplicateMessage(message, directCommits) {
|
||||
sb.WriteString(fmt.Sprintf("- %s\n", message))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return fixMarkdown(
|
||||
strings.ReplaceAll(sb.String(), "\n-\n", "\n"), // Remove empty list items
|
||||
)
|
||||
}
|
||||
|
||||
func fixMarkdown(content string) string {
|
||||
|
||||
// Fix MD032/blank-around-lists: Lists should be surrounded by blank lines
|
||||
lines := strings.Split(content, "\n")
|
||||
inList := false
|
||||
preListNewline := false
|
||||
for i := range lines {
|
||||
line := strings.TrimSpace(lines[i])
|
||||
if strings.HasPrefix(line, "- ") || strings.HasPrefix(line, "* ") {
|
||||
if !inList {
|
||||
inList = true
|
||||
// Ensure there's a blank line before the list starts
|
||||
if !preListNewline && i > 0 && lines[i-1] != "" {
|
||||
line = "\n" + line
|
||||
preListNewline = true
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if inList {
|
||||
inList = false
|
||||
preListNewline = false
|
||||
}
|
||||
}
|
||||
lines[i] = strings.TrimRight(line, " \t")
|
||||
}
|
||||
|
||||
fixedContent := strings.TrimSpace(strings.Join(lines, "\n"))
|
||||
|
||||
return fixedContent + "\n"
|
||||
}
|
||||
|
||||
func (g *Generator) formatPR(pr *github.PR) string {
|
||||
var sb strings.Builder
|
||||
|
||||
pr.Title = strings.TrimRight(strings.TrimSpace(pr.Title), ".")
|
||||
|
||||
// Add type indicator for non-users
|
||||
authorName := pr.Author
|
||||
switch pr.AuthorType {
|
||||
case "bot":
|
||||
authorName += "[bot]"
|
||||
case "organization":
|
||||
authorName += "[org]"
|
||||
}
|
||||
|
||||
sb.WriteString(fmt.Sprintf("### PR [#%d](%s) by [%s](%s): %s\n\n",
|
||||
pr.Number, pr.URL, authorName, pr.AuthorURL, strings.TrimSpace(pr.Title)))
|
||||
|
||||
changes := g.extractChanges(pr)
|
||||
for _, change := range changes {
|
||||
if change != "" {
|
||||
sb.WriteString(fmt.Sprintf("- %s\n", change))
|
||||
}
|
||||
}
|
||||
|
||||
return sb.String()
|
||||
}
|
||||
|
||||
func (g *Generator) extractChanges(pr *github.PR) []string {
|
||||
var changes []string
|
||||
seen := make(map[string]bool)
|
||||
|
||||
for _, commit := range pr.Commits {
|
||||
message := g.formatCommitMessage(commit.Message)
|
||||
if message != "" && !seen[message] {
|
||||
seen[message] = true
|
||||
changes = append(changes, message)
|
||||
}
|
||||
}
|
||||
|
||||
if len(changes) == 0 && pr.Body != "" {
|
||||
lines := strings.Split(pr.Body, "\n")
|
||||
for _, line := range lines {
|
||||
line = strings.TrimSpace(line)
|
||||
if strings.HasPrefix(line, "- ") || strings.HasPrefix(line, "* ") {
|
||||
change := strings.TrimPrefix(strings.TrimPrefix(line, "- "), "* ")
|
||||
if change != "" {
|
||||
changes = append(changes, change)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return changes
|
||||
}
|
||||
|
||||
func normalizeLineEndings(content string) string {
|
||||
return strings.ReplaceAll(content, "\r\n", "\n")
|
||||
}
|
||||
|
||||
func (g *Generator) formatCommitMessage(message string) string {
|
||||
strings_to_remove := []string{
|
||||
"### CHANGES\n", "## CHANGES\n", "# CHANGES\n",
|
||||
"...\n", "---\n", "## Changes\n", "## Change",
|
||||
"Update version to v..1 and commit\n",
|
||||
"# What this Pull Request (PR) does\n",
|
||||
"# Conflicts:",
|
||||
}
|
||||
|
||||
message = normalizeLineEndings(message)
|
||||
// No hard tabs
|
||||
message = strings.ReplaceAll(message, "\t", " ")
|
||||
|
||||
if len(message) > 0 {
|
||||
message = strings.ToUpper(message[:1]) + message[1:]
|
||||
}
|
||||
|
||||
for _, str := range strings_to_remove {
|
||||
if strings.Contains(message, str) {
|
||||
message = strings.ReplaceAll(message, str, "")
|
||||
}
|
||||
}
|
||||
|
||||
message = fixFormatting(message)
|
||||
|
||||
return message
|
||||
}
|
||||
|
||||
func fixFormatting(message string) string {
|
||||
// Turn "*"" lists into "-" lists"
|
||||
message = strings.ReplaceAll(message, "* ", "- ")
|
||||
// Remove extra spaces around dashes
|
||||
message = strings.ReplaceAll(message, "- ", "- ")
|
||||
message = strings.ReplaceAll(message, "- ", "- ")
|
||||
// turn bare URL into <URL>
|
||||
if strings.Contains(message, "http://") || strings.Contains(message, "https://") {
|
||||
// Use regex to wrap bare URLs with angle brackets
|
||||
urlRegex := regexp.MustCompile(`\b(https?://[^\s<>]+)`)
|
||||
message = urlRegex.ReplaceAllString(message, "<$1>")
|
||||
}
|
||||
|
||||
// Replace "## LINKS\n" with "- "
|
||||
message = strings.ReplaceAll(message, "## LINKS\n", "- ")
|
||||
// Dependabot messages: "- [Commits]" should become "\n- [Commits]"
|
||||
message = strings.TrimSpace(message)
|
||||
// Turn multiple newlines into a single newline
|
||||
message = strings.TrimSpace(strings.ReplaceAll(message, "\n\n", "\n"))
|
||||
// Fix inline trailing spaces
|
||||
message = strings.ReplaceAll(message, " \n", "\n")
|
||||
// Fix weird indent before list,
|
||||
message = strings.ReplaceAll(message, "\n - ", "\n- ")
|
||||
|
||||
// blanks-around-lists MD032 fix
|
||||
// Use regex to ensure blank line before list items that don't already have one
|
||||
listRegex := regexp.MustCompile(`(?m)([^\n-].*[^:\n])\n([-*] .*)`)
|
||||
message = listRegex.ReplaceAllString(message, "$1\n\n$2")
|
||||
|
||||
// Change random first-level "#" to 4th level "####"
|
||||
// This is a hack to fix spurious first-level headings that are not actual headings
|
||||
// but rather just comments or notes in the commit message.
|
||||
message = strings.ReplaceAll(message, "# ", "\n#### ")
|
||||
message = strings.ReplaceAll(message, "\n\n\n", "\n\n")
|
||||
|
||||
// Wrap any non-wrapped Emails with angle brackets
|
||||
emailRegex := regexp.MustCompile(`([a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,})`)
|
||||
message = emailRegex.ReplaceAllString(message, "<$1>")
|
||||
|
||||
// Wrap any non-wrapped URLs with angle brackets
|
||||
urlRegex := regexp.MustCompile(`(https?://[^\s<]+)`)
|
||||
message = urlRegex.ReplaceAllString(message, "<$1>")
|
||||
|
||||
message = strings.ReplaceAll(message, "<<", "<")
|
||||
message = strings.ReplaceAll(message, ">>", ">")
|
||||
|
||||
// Fix some spurious Issue/PR links at the beginning of a commit message line
|
||||
prOrIssueLinkRegex := regexp.MustCompile("\n" + `(#\d+)`)
|
||||
message = prOrIssueLinkRegex.ReplaceAllString(message, " $1")
|
||||
|
||||
// Remove leading/trailing whitespace
|
||||
message = strings.TrimSpace(message)
|
||||
return message
|
||||
}
|
||||
|
||||
func (g *Generator) isDuplicateMessage(message string, commits []*git.Commit) bool {
|
||||
if message == "." || strings.ToLower(message) == "fix" {
|
||||
count := 0
|
||||
for _, commit := range commits {
|
||||
formatted := g.formatCommitMessage(commit.Message)
|
||||
if formatted == message {
|
||||
count++
|
||||
if count > 1 {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// hashContent generates a SHA256 hash of the content for change detection
|
||||
func hashContent(content string) string {
|
||||
hash := sha256.Sum256([]byte(content))
|
||||
return fmt.Sprintf("%x", hash)
|
||||
}
|
||||
|
||||
// SyncDatabase performs a comprehensive database synchronization and validation
|
||||
func (g *Generator) SyncDatabase() error {
|
||||
if g.cache == nil {
|
||||
return fmt.Errorf("cache is disabled, cannot sync database")
|
||||
}
|
||||
|
||||
fmt.Fprintf(os.Stderr, "[SYNC] Starting database synchronization...\n")
|
||||
|
||||
// Step 1: Force PR sync (pass true explicitly)
|
||||
fmt.Fprintf(os.Stderr, "[PR_SYNC] Forcing PR sync from GitHub...\n")
|
||||
if err := g.fetchPRs(true); err != nil {
|
||||
return fmt.Errorf("failed to sync PRs: %w", err)
|
||||
}
|
||||
|
||||
// Step 2: Rebuild git history and verify versions/commits completeness
|
||||
fmt.Fprintf(os.Stderr, "[VERIFY] Verifying git history and version completeness...\n")
|
||||
if err := g.syncGitHistory(); err != nil {
|
||||
return fmt.Errorf("failed to sync git history: %w", err)
|
||||
}
|
||||
|
||||
// Step 3: Verify commit-PR mappings
|
||||
fmt.Fprintf(os.Stderr, "[MAPPING] Verifying commit-PR mappings...\n")
|
||||
if err := g.verifyCommitPRMappings(); err != nil {
|
||||
return fmt.Errorf("failed to verify commit-PR mappings: %w", err)
|
||||
}
|
||||
|
||||
fmt.Fprintf(os.Stderr, "[SUCCESS] Database synchronization completed successfully!\n")
|
||||
return nil
|
||||
}
|
||||
|
||||
// syncGitHistory walks the complete git history and ensures all versions and commits are cached
|
||||
func (g *Generator) syncGitHistory() error {
|
||||
// Walk complete git history (reuse existing logic)
|
||||
versions, err := g.gitWalker.WalkHistory()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to walk git history: %w", err)
|
||||
}
|
||||
|
||||
// Save only new versions and commits (preserve existing data)
|
||||
var newVersions, newCommits int
|
||||
for _, version := range versions {
|
||||
// Only save version if it doesn't exist
|
||||
exists, err := g.cache.VersionExists(version.Name)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Failed to check existence of version %s: %v. This may affect the completeness of the sync operation.\n", version.Name, err)
|
||||
continue
|
||||
}
|
||||
if !exists {
|
||||
if err := g.cache.SaveVersion(version); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Failed to save version %s: %v\n", version.Name, err)
|
||||
} else {
|
||||
newVersions++
|
||||
}
|
||||
}
|
||||
|
||||
// Only save commits that don't exist
|
||||
for _, commit := range version.Commits {
|
||||
exists, err := g.cache.CommitExists(commit.SHA)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Failed to check commit %s existence: %v\n", commit.SHA, err)
|
||||
continue
|
||||
}
|
||||
if !exists {
|
||||
if err := g.cache.SaveCommit(commit, version.Name); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Failed to save commit %s: %v\n", commit.SHA, err)
|
||||
} else {
|
||||
newCommits++
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Update last processed tag
|
||||
if latestTag, err := g.gitWalker.GetLatestTag(); err == nil && latestTag != "" {
|
||||
if err := g.cache.SetLastProcessedTag(latestTag); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Failed to update last processed tag: %v\n", err)
|
||||
}
|
||||
}
|
||||
|
||||
fmt.Fprintf(os.Stderr, " Added %d new versions and %d new commits (preserved existing data)\n", newVersions, newCommits)
|
||||
return nil
|
||||
}
|
||||
|
||||
// verifyCommitPRMappings ensures all PR commits have proper mappings
|
||||
func (g *Generator) verifyCommitPRMappings() error {
|
||||
// Get all cached PRs
|
||||
allPRs, err := g.cache.GetAllPRs()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get cached PRs: %w", err)
|
||||
}
|
||||
|
||||
// Convert to slice for batch operations (reuse existing logic)
|
||||
var prSlice []*github.PR
|
||||
for _, pr := range allPRs {
|
||||
prSlice = append(prSlice, pr)
|
||||
}
|
||||
|
||||
// Save commit-PR mappings (reuse existing logic)
|
||||
if err := g.cache.SaveCommitPRMappings(prSlice); err != nil {
|
||||
return fmt.Errorf("failed to save commit-PR mappings: %w", err)
|
||||
}
|
||||
|
||||
fmt.Fprintf(os.Stderr, " Verified mappings for %d PRs\n", len(prSlice))
|
||||
return nil
|
||||
}
|
||||
115
cmd/generate_changelog/internal/changelog/generator_test.go
Normal file
115
cmd/generate_changelog/internal/changelog/generator_test.go
Normal file
@@ -0,0 +1,115 @@
|
||||
package changelog
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"testing"
|
||||
|
||||
"github.com/danielmiessler/fabric/cmd/generate_changelog/internal/config"
|
||||
)
|
||||
|
||||
func TestDetectVersionFromNix(t *testing.T) {
|
||||
tempDir := t.TempDir()
|
||||
|
||||
t.Run("version.nix exists", func(t *testing.T) {
|
||||
versionNixContent := `"1.2.3"`
|
||||
versionNixPath := filepath.Join(tempDir, "version.nix")
|
||||
err := os.WriteFile(versionNixPath, []byte(versionNixContent), 0644)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to write version.nix: %v", err)
|
||||
}
|
||||
|
||||
data, err := os.ReadFile(versionNixPath)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to read version.nix: %v", err)
|
||||
}
|
||||
|
||||
versionRegex := regexp.MustCompile(`"([^"]+)"`)
|
||||
matches := versionRegex.FindStringSubmatch(string(data))
|
||||
|
||||
if len(matches) <= 1 {
|
||||
t.Fatalf("No version found in version.nix")
|
||||
}
|
||||
|
||||
version := matches[1]
|
||||
if version != "1.2.3" {
|
||||
t.Errorf("Expected version 1.2.3, got %s", version)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func TestEnsureIncomingDir(t *testing.T) {
|
||||
tempDir := t.TempDir()
|
||||
incomingDir := filepath.Join(tempDir, "incoming")
|
||||
|
||||
cfg := &config.Config{
|
||||
IncomingDir: incomingDir,
|
||||
}
|
||||
|
||||
g := &Generator{cfg: cfg}
|
||||
|
||||
err := g.ensureIncomingDir()
|
||||
if err != nil {
|
||||
t.Fatalf("ensureIncomingDir failed: %v", err)
|
||||
}
|
||||
|
||||
if _, err := os.Stat(incomingDir); os.IsNotExist(err) {
|
||||
t.Errorf("Incoming directory was not created")
|
||||
}
|
||||
}
|
||||
|
||||
func TestInsertVersionAtTop(t *testing.T) {
|
||||
tempDir := t.TempDir()
|
||||
changelogPath := filepath.Join(tempDir, "CHANGELOG.md")
|
||||
|
||||
cfg := &config.Config{
|
||||
RepoPath: tempDir,
|
||||
}
|
||||
|
||||
g := &Generator{cfg: cfg}
|
||||
|
||||
t.Run("new changelog", func(t *testing.T) {
|
||||
entry := "## v1.0.0 (2025-01-01)\n\n- Initial release"
|
||||
|
||||
err := g.insertVersionAtTop(entry)
|
||||
if err != nil {
|
||||
t.Fatalf("insertVersionAtTop failed: %v", err)
|
||||
}
|
||||
|
||||
content, err := os.ReadFile(changelogPath)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to read changelog: %v", err)
|
||||
}
|
||||
|
||||
expected := "# Changelog\n\n## v1.0.0 (2025-01-01)\n\n- Initial release\n"
|
||||
if string(content) != expected {
|
||||
t.Errorf("Expected:\n%s\nGot:\n%s", expected, string(content))
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("existing changelog", func(t *testing.T) {
|
||||
existingContent := "# Changelog\n\n## v0.9.0 (2024-12-01)\n\n- Previous release"
|
||||
err := os.WriteFile(changelogPath, []byte(existingContent), 0644)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to write existing changelog: %v", err)
|
||||
}
|
||||
|
||||
entry := "## v1.0.0 (2025-01-01)\n\n- New release"
|
||||
|
||||
err = g.insertVersionAtTop(entry)
|
||||
if err != nil {
|
||||
t.Fatalf("insertVersionAtTop failed: %v", err)
|
||||
}
|
||||
|
||||
content, err := os.ReadFile(changelogPath)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to read changelog: %v", err)
|
||||
}
|
||||
|
||||
expected := "# Changelog\n\n## v1.0.0 (2025-01-01)\n\n- New release\n## v0.9.0 (2024-12-01)\n\n- Previous release"
|
||||
if string(content) != expected {
|
||||
t.Errorf("Expected:\n%s\nGot:\n%s", expected, string(content))
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -0,0 +1,82 @@
|
||||
package changelog
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/danielmiessler/fabric/cmd/generate_changelog/internal/github"
|
||||
)
|
||||
|
||||
func TestIsMergeCommit(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
commit github.PRCommit
|
||||
expected bool
|
||||
}{
|
||||
{
|
||||
name: "Regular commit with single parent",
|
||||
commit: github.PRCommit{
|
||||
SHA: "abc123",
|
||||
Message: "Fix bug in user authentication",
|
||||
Author: "John Doe",
|
||||
Date: time.Now(),
|
||||
Parents: []string{"def456"},
|
||||
},
|
||||
expected: false,
|
||||
},
|
||||
{
|
||||
name: "Merge commit with multiple parents",
|
||||
commit: github.PRCommit{
|
||||
SHA: "abc123",
|
||||
Message: "Merge pull request #42 from feature/auth",
|
||||
Author: "GitHub",
|
||||
Date: time.Now(),
|
||||
Parents: []string{"def456", "ghi789"},
|
||||
},
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
name: "Merge commit detected by message pattern only",
|
||||
commit: github.PRCommit{
|
||||
SHA: "abc123",
|
||||
Message: "Merge pull request #123 from user/feature-branch",
|
||||
Author: "GitHub",
|
||||
Date: time.Now(),
|
||||
Parents: []string{}, // Empty parents - fallback to message detection
|
||||
},
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
name: "Merge branch commit pattern",
|
||||
commit: github.PRCommit{
|
||||
SHA: "abc123",
|
||||
Message: "Merge branch 'feature' into main",
|
||||
Author: "Developer",
|
||||
Date: time.Now(),
|
||||
Parents: []string{"def456"}, // Single parent but merge pattern
|
||||
},
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
name: "Regular commit with no merge patterns",
|
||||
commit: github.PRCommit{
|
||||
SHA: "abc123",
|
||||
Message: "Add new feature for user management",
|
||||
Author: "Jane Doe",
|
||||
Date: time.Now(),
|
||||
Parents: []string{"def456"},
|
||||
},
|
||||
expected: false,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result := isMergeCommit(tt.commit)
|
||||
if result != tt.expected {
|
||||
t.Errorf("isMergeCommit() = %v, expected %v for commit: %s",
|
||||
result, tt.expected, tt.commit.Message)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
521
cmd/generate_changelog/internal/changelog/processing.go
Normal file
521
cmd/generate_changelog/internal/changelog/processing.go
Normal file
@@ -0,0 +1,521 @@
|
||||
package changelog
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/danielmiessler/fabric/cmd/generate_changelog/internal/git"
|
||||
"github.com/danielmiessler/fabric/cmd/generate_changelog/internal/github"
|
||||
)
|
||||
|
||||
var (
|
||||
mergePatterns []*regexp.Regexp
|
||||
mergePatternsOnce sync.Once
|
||||
)
|
||||
|
||||
// getMergePatterns returns the compiled merge patterns, initializing them lazily
|
||||
func getMergePatterns() []*regexp.Regexp {
|
||||
mergePatternsOnce.Do(func() {
|
||||
mergePatterns = []*regexp.Regexp{
|
||||
regexp.MustCompile(`^Merge pull request #\d+`), // "Merge pull request #123 from..."
|
||||
regexp.MustCompile(`^Merge branch '.*' into .*`), // "Merge branch 'feature' into main"
|
||||
regexp.MustCompile(`^Merge remote-tracking branch`), // "Merge remote-tracking branch..."
|
||||
regexp.MustCompile(`^Merge '.*' into .*`), // "Merge 'feature' into main"
|
||||
}
|
||||
})
|
||||
return mergePatterns
|
||||
}
|
||||
|
||||
// isMergeCommit determines if a commit is a merge commit based on its parents and message patterns.
|
||||
func isMergeCommit(commit github.PRCommit) bool {
|
||||
// Primary method: Check parent count (merge commits have multiple parents)
|
||||
if len(commit.Parents) > 1 {
|
||||
return true
|
||||
}
|
||||
|
||||
// Fallback method: Check commit message patterns
|
||||
mergePatterns := getMergePatterns()
|
||||
for _, pattern := range mergePatterns {
|
||||
if pattern.MatchString(commit.Message) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
// calculateVersionDate determines the version date based on the most recent commit date from the provided PRs.
|
||||
//
|
||||
// If no valid commit dates are found, the function falls back to the current time.
|
||||
// The function iterates through the provided PRs and their associated commits, comparing commit dates
|
||||
// to identify the most recent one. If a valid date is found, it is returned; otherwise, the fallback is used.
|
||||
func calculateVersionDate(fetchedPRs []*github.PR) time.Time {
|
||||
versionDate := time.Now() // fallback to current time
|
||||
if len(fetchedPRs) > 0 {
|
||||
var mostRecentCommitDate time.Time
|
||||
for _, pr := range fetchedPRs {
|
||||
for _, commit := range pr.Commits {
|
||||
if commit.Date.After(mostRecentCommitDate) {
|
||||
mostRecentCommitDate = commit.Date
|
||||
}
|
||||
}
|
||||
}
|
||||
if !mostRecentCommitDate.IsZero() {
|
||||
versionDate = mostRecentCommitDate
|
||||
}
|
||||
}
|
||||
return versionDate
|
||||
}
|
||||
|
||||
// ProcessIncomingPR processes a single PR for changelog entry creation
|
||||
func (g *Generator) ProcessIncomingPR(prNumber int) error {
|
||||
if err := g.validatePRState(prNumber); err != nil {
|
||||
return fmt.Errorf("PR validation failed: %w", err)
|
||||
}
|
||||
|
||||
if err := g.validateGitStatus(); err != nil {
|
||||
return fmt.Errorf("git status validation failed: %w", err)
|
||||
}
|
||||
|
||||
// Now fetch the full PR with commits for content generation
|
||||
pr, err := g.ghClient.GetPRWithCommits(prNumber)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to fetch PR %d: %w", prNumber, err)
|
||||
}
|
||||
|
||||
content := g.formatPR(pr)
|
||||
|
||||
if g.cfg.EnableAISummary {
|
||||
aiContent, err := SummarizeVersionContent(content)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: AI summarization failed: %v\n", err)
|
||||
} else if !checkForAIError(aiContent) {
|
||||
content = strings.TrimSpace(aiContent)
|
||||
}
|
||||
}
|
||||
|
||||
if err := g.ensureIncomingDir(); err != nil {
|
||||
return fmt.Errorf("failed to create incoming directory: %w", err)
|
||||
}
|
||||
|
||||
filename := filepath.Join(g.cfg.IncomingDir, fmt.Sprintf("%d.txt", prNumber))
|
||||
|
||||
// Ensure content ends with a single newline
|
||||
content = strings.TrimSpace(content) + "\n"
|
||||
|
||||
if err := os.WriteFile(filename, []byte(content), 0644); err != nil {
|
||||
return fmt.Errorf("failed to write incoming file: %w", err)
|
||||
}
|
||||
|
||||
if err := g.commitAndPushIncoming(prNumber, filename); err != nil {
|
||||
return fmt.Errorf("failed to commit and push: %w", err)
|
||||
}
|
||||
|
||||
fmt.Printf("Successfully created incoming changelog entry: %s\n", filename)
|
||||
return nil
|
||||
}
|
||||
|
||||
// CreateNewChangelogEntry aggregates all incoming PR files for release and includes direct commits
|
||||
func (g *Generator) CreateNewChangelogEntry(version string) error {
|
||||
files, err := filepath.Glob(filepath.Join(g.cfg.IncomingDir, "*.txt"))
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to scan incoming directory: %w", err)
|
||||
}
|
||||
|
||||
var content strings.Builder
|
||||
var processingErrors []string
|
||||
|
||||
// First, aggregate all incoming PR files
|
||||
for _, file := range files {
|
||||
data, err := os.ReadFile(file)
|
||||
if err != nil {
|
||||
processingErrors = append(processingErrors, fmt.Sprintf("failed to read %s: %v", file, err))
|
||||
continue // Continue to attempt processing other files
|
||||
}
|
||||
content.WriteString(string(data))
|
||||
// Note: No extra newline needed here as each incoming file already ends with a newline
|
||||
}
|
||||
|
||||
if len(processingErrors) > 0 {
|
||||
return fmt.Errorf("encountered errors while processing incoming files: %s", strings.Join(processingErrors, "; "))
|
||||
}
|
||||
|
||||
// Extract PR numbers and their commit SHAs from processed files to avoid including their commits as "direct"
|
||||
processedPRs := make(map[int]bool)
|
||||
processedCommitSHAs := make(map[string]bool)
|
||||
var fetchedPRs []*github.PR
|
||||
var prNumbers []int
|
||||
|
||||
for _, file := range files {
|
||||
// Extract PR number from filename (e.g., "1640.txt" -> 1640)
|
||||
filename := filepath.Base(file)
|
||||
if prNumStr := strings.TrimSuffix(filename, ".txt"); prNumStr != filename {
|
||||
if prNum, err := strconv.Atoi(prNumStr); err == nil {
|
||||
processedPRs[prNum] = true
|
||||
prNumbers = append(prNumbers, prNum)
|
||||
|
||||
// Fetch the PR to get its commit SHAs
|
||||
if pr, err := g.ghClient.GetPRWithCommits(prNum); err == nil {
|
||||
fetchedPRs = append(fetchedPRs, pr)
|
||||
for _, commit := range pr.Commits {
|
||||
processedCommitSHAs[commit.SHA] = true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Now add direct commits since the last release, excluding commits from processed PRs
|
||||
directCommitsContent, err := g.getDirectCommitsSinceLastRelease(processedPRs, processedCommitSHAs)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get direct commits since last release: %w", err)
|
||||
}
|
||||
content.WriteString(directCommitsContent)
|
||||
|
||||
// Check if we have any content at all
|
||||
if content.Len() == 0 {
|
||||
if len(files) == 0 {
|
||||
fmt.Fprintf(os.Stderr, "No incoming PR files found in %s and no direct commits since last release\n", g.cfg.IncomingDir)
|
||||
} else {
|
||||
fmt.Fprintf(os.Stderr, "No content found in incoming files and no direct commits since last release\n")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Calculate the version date for the changelog entry as the most recent commit date from processed PRs
|
||||
versionDate := calculateVersionDate(fetchedPRs)
|
||||
|
||||
entry := fmt.Sprintf("## %s (%s)\n\n%s",
|
||||
version, versionDate.Format("2006-01-02"), strings.TrimLeft(content.String(), "\n"))
|
||||
|
||||
if err := g.insertVersionAtTop(entry); err != nil {
|
||||
return fmt.Errorf("failed to update CHANGELOG.md: %w", err)
|
||||
}
|
||||
|
||||
if g.cache != nil {
|
||||
// Cache the fetched PRs using the same logic as normal changelog generation
|
||||
if len(fetchedPRs) > 0 {
|
||||
// Save PRs to cache
|
||||
if err := g.cache.SavePRBatch(fetchedPRs); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Failed to save PR batch to cache: %v\n", err)
|
||||
}
|
||||
|
||||
// Save SHA→PR mappings for lightning-fast git operations
|
||||
if err := g.cache.SaveCommitPRMappings(fetchedPRs); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Failed to cache commit mappings: %v\n", err)
|
||||
}
|
||||
|
||||
// Save individual commits to cache for each PR
|
||||
for _, pr := range fetchedPRs {
|
||||
for _, commit := range pr.Commits {
|
||||
// Use actual commit timestamp, with fallback to current time if invalid
|
||||
commitDate := commit.Date
|
||||
if commitDate.IsZero() {
|
||||
commitDate = time.Now()
|
||||
fmt.Fprintf(os.Stderr, "Warning: Commit %s has invalid timestamp, using current time as fallback\n", commit.SHA)
|
||||
}
|
||||
|
||||
// Convert github.PRCommit to git.Commit
|
||||
gitCommit := &git.Commit{
|
||||
SHA: commit.SHA,
|
||||
Message: commit.Message,
|
||||
Author: commit.Author,
|
||||
Email: commit.Email, // Use email from GitHub API
|
||||
Date: commitDate, // Use actual commit timestamp from GitHub API
|
||||
IsMerge: isMergeCommit(commit), // Detect merge commits using parents and message patterns
|
||||
PRNumber: pr.Number,
|
||||
}
|
||||
if err := g.cache.SaveCommit(gitCommit, version); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Failed to save commit %s to cache: %v\n", commit.SHA, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Create a proper new version entry for the database
|
||||
newVersionEntry := &git.Version{
|
||||
Name: version,
|
||||
Date: versionDate, // Use most recent commit date instead of current time
|
||||
CommitSHA: "", // Will be set when the release commit is made
|
||||
PRNumbers: prNumbers, // Now we have the actual PR numbers
|
||||
AISummary: content.String(),
|
||||
}
|
||||
|
||||
if err := g.cache.SaveVersion(newVersionEntry); err != nil {
|
||||
return fmt.Errorf("failed to save new version entry to database: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
for _, file := range files {
|
||||
// Convert to relative path for git operations
|
||||
relativeFile, err := filepath.Rel(g.cfg.RepoPath, file)
|
||||
if err != nil {
|
||||
relativeFile = file
|
||||
}
|
||||
|
||||
// Use git remove to handle both filesystem and git index
|
||||
if err := g.gitWalker.RemoveFile(relativeFile); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Failed to remove %s from git index: %v\n", relativeFile, err)
|
||||
// Fallback to filesystem-only removal
|
||||
if err := os.Remove(file); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Error: Failed to remove %s from the filesystem after failing to remove it from the git index.\n", relativeFile)
|
||||
fmt.Fprintf(os.Stderr, "Filesystem error: %v\n", err)
|
||||
fmt.Fprintf(os.Stderr, "Manual intervention required:\n")
|
||||
fmt.Fprintf(os.Stderr, " 1. Remove the file %s manually (using the OS-specific command)\n", file)
|
||||
fmt.Fprintf(os.Stderr, " 2. Remove from git index: git rm --cached %s\n", relativeFile)
|
||||
fmt.Fprintf(os.Stderr, " 3. Or reset git index: git reset HEAD %s\n", relativeFile)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if err := g.stageChangesForRelease(); err != nil {
|
||||
return fmt.Errorf("critical: failed to stage changes for release: %w", err)
|
||||
}
|
||||
|
||||
fmt.Printf("Successfully processed %d incoming PR files for version %s\n", len(files), version)
|
||||
return nil
|
||||
}
|
||||
|
||||
// getDirectCommitsSinceLastRelease gets all direct commits (not part of PRs) since the last release
|
||||
func (g *Generator) getDirectCommitsSinceLastRelease(processedPRs map[int]bool, processedCommitSHAs map[string]bool) (string, error) {
|
||||
// Get the latest tag to determine what commits are unreleased
|
||||
latestTag, err := g.gitWalker.GetLatestTag()
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to get latest tag: %w", err)
|
||||
}
|
||||
|
||||
// Get all commits since the latest tag
|
||||
unreleasedVersion, err := g.gitWalker.WalkCommitsSinceTag(latestTag)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to walk commits since tag %s: %w", latestTag, err)
|
||||
}
|
||||
|
||||
if unreleasedVersion == nil || len(unreleasedVersion.Commits) == 0 {
|
||||
return "", nil // No unreleased commits
|
||||
}
|
||||
|
||||
// Filter out commits that are part of PRs (we already have those from incoming files)
|
||||
// and format the direct commits
|
||||
var directCommits []*git.Commit
|
||||
for _, commit := range unreleasedVersion.Commits {
|
||||
// Skip version bump commits
|
||||
if commit.IsVersion {
|
||||
continue
|
||||
}
|
||||
|
||||
// Skip commits that belong to PRs we've already processed from incoming files (by PR number)
|
||||
if commit.PRNumber > 0 && processedPRs[commit.PRNumber] {
|
||||
continue
|
||||
}
|
||||
|
||||
// Skip commits whose SHA is already included in processed PRs (this catches commits
|
||||
// that might not have been detected as part of a PR but are actually in the PR)
|
||||
if processedCommitSHAs[commit.SHA] {
|
||||
continue
|
||||
}
|
||||
|
||||
// Only include commits that are NOT part of any PR (direct commits)
|
||||
if commit.PRNumber == 0 {
|
||||
directCommits = append(directCommits, commit)
|
||||
}
|
||||
}
|
||||
|
||||
if len(directCommits) == 0 {
|
||||
return "", nil // No direct commits
|
||||
}
|
||||
|
||||
// Format the direct commits similar to how it's done in generateRawVersionContent
|
||||
var sb strings.Builder
|
||||
sb.WriteString("### Direct commits\n\n")
|
||||
|
||||
// Sort direct commits by date (newest first) for consistent ordering
|
||||
sort.Slice(directCommits, func(i, j int) bool {
|
||||
return directCommits[i].Date.After(directCommits[j].Date)
|
||||
})
|
||||
|
||||
for _, commit := range directCommits {
|
||||
message := g.formatCommitMessage(strings.TrimSpace(commit.Message))
|
||||
if message != "" && !g.isDuplicateMessage(message, directCommits) {
|
||||
sb.WriteString(fmt.Sprintf("- %s\n", message))
|
||||
}
|
||||
}
|
||||
|
||||
return sb.String(), nil
|
||||
}
|
||||
|
||||
// validatePRState validates that a PR is in the correct state for processing
|
||||
func (g *Generator) validatePRState(prNumber int) error {
|
||||
// Use lightweight validation call that doesn't fetch commits
|
||||
details, err := g.ghClient.GetPRValidationDetails(prNumber)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to fetch PR %d: %w", prNumber, err)
|
||||
}
|
||||
|
||||
if details.State != "open" {
|
||||
return fmt.Errorf("PR %d is not open (current state: %s)", prNumber, details.State)
|
||||
}
|
||||
|
||||
if !details.Mergeable {
|
||||
return fmt.Errorf("PR %d is not mergeable - please resolve conflicts first", prNumber)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// validateGitStatus ensures the working directory is clean
|
||||
func (g *Generator) validateGitStatus() error {
|
||||
isClean, err := g.gitWalker.IsWorkingDirectoryClean()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to check git status: %w", err)
|
||||
}
|
||||
|
||||
if !isClean {
|
||||
// Get detailed status for better error message
|
||||
statusDetails, statusErr := g.gitWalker.GetStatusDetails()
|
||||
if statusErr == nil && statusDetails != "" {
|
||||
return fmt.Errorf("working directory is not clean - please commit or stash changes before proceeding:\n%s", statusDetails)
|
||||
}
|
||||
return fmt.Errorf("working directory is not clean - please commit or stash changes before proceeding")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// ensureIncomingDir creates the incoming directory if it doesn't exist
|
||||
func (g *Generator) ensureIncomingDir() error {
|
||||
if err := os.MkdirAll(g.cfg.IncomingDir, 0755); err != nil {
|
||||
return fmt.Errorf("failed to create directory %s: %w", g.cfg.IncomingDir, err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// commitAndPushIncoming commits and optionally pushes the incoming changelog file
|
||||
func (g *Generator) commitAndPushIncoming(prNumber int, filename string) error {
|
||||
relativeFilename, err := filepath.Rel(g.cfg.RepoPath, filename)
|
||||
if err != nil {
|
||||
relativeFilename = filename
|
||||
}
|
||||
|
||||
// Add file to git index
|
||||
if err := g.gitWalker.AddFile(relativeFilename); err != nil {
|
||||
return fmt.Errorf("failed to add file %s: %w", relativeFilename, err)
|
||||
}
|
||||
|
||||
// Commit changes
|
||||
commitMessage := fmt.Sprintf("chore: incoming %d changelog entry", prNumber)
|
||||
_, err = g.gitWalker.CommitChanges(commitMessage)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to commit changes: %w", err)
|
||||
}
|
||||
|
||||
// Push to remote if enabled
|
||||
if g.cfg.Push {
|
||||
if err := g.gitWalker.PushToRemote(); err != nil {
|
||||
return fmt.Errorf("failed to push to remote: %w", err)
|
||||
}
|
||||
} else {
|
||||
fmt.Println("Commit created successfully. Please review and push manually.")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// detectVersion detects the current version from version.nix or git tags
|
||||
func (g *Generator) detectVersion() (string, error) {
|
||||
versionNixPath := filepath.Join(g.cfg.RepoPath, "version.nix")
|
||||
if _, err := os.Stat(versionNixPath); err == nil {
|
||||
data, err := os.ReadFile(versionNixPath)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to read version.nix: %w", err)
|
||||
}
|
||||
|
||||
versionRegex := regexp.MustCompile(`"([^"]+)"`)
|
||||
matches := versionRegex.FindStringSubmatch(string(data))
|
||||
if len(matches) > 1 {
|
||||
return matches[1], nil
|
||||
}
|
||||
}
|
||||
|
||||
latestTag, err := g.gitWalker.GetLatestTag()
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to get latest tag: %w", err)
|
||||
}
|
||||
|
||||
if latestTag == "" {
|
||||
return "v1.0.0", nil
|
||||
}
|
||||
|
||||
return latestTag, nil
|
||||
}
|
||||
|
||||
// insertVersionAtTop inserts a new version entry at the top of CHANGELOG.md
|
||||
func (g *Generator) insertVersionAtTop(entry string) error {
|
||||
changelogPath := filepath.Join(g.cfg.RepoPath, "CHANGELOG.md")
|
||||
header := "# Changelog"
|
||||
headerRegex := regexp.MustCompile(`(?m)^# Changelog\s*`)
|
||||
|
||||
existingContent, err := os.ReadFile(changelogPath)
|
||||
if err != nil {
|
||||
if !os.IsNotExist(err) {
|
||||
return fmt.Errorf("failed to read existing CHANGELOG.md: %w", err)
|
||||
}
|
||||
// File doesn't exist, create it.
|
||||
newContent := fmt.Sprintf("%s\n\n%s\n", header, entry)
|
||||
return os.WriteFile(changelogPath, []byte(newContent), 0644)
|
||||
}
|
||||
|
||||
contentStr := string(existingContent)
|
||||
var newContent string
|
||||
|
||||
if loc := headerRegex.FindStringIndex(contentStr); loc != nil {
|
||||
// Found the header, insert after it.
|
||||
insertionPoint := loc[1]
|
||||
// Skip any existing newlines after the header to avoid double spacing
|
||||
for insertionPoint < len(contentStr) && (contentStr[insertionPoint] == '\n' || contentStr[insertionPoint] == '\r') {
|
||||
insertionPoint++
|
||||
}
|
||||
// Insert with proper spacing: single newline after header, then entry, then newline before existing content
|
||||
newContent = contentStr[:loc[1]] + entry + "\n" + contentStr[insertionPoint:]
|
||||
} else {
|
||||
// Header not found, prepend everything.
|
||||
newContent = fmt.Sprintf("%s\n\n%s\n\n%s", header, entry, contentStr)
|
||||
}
|
||||
|
||||
return os.WriteFile(changelogPath, []byte(newContent), 0644)
|
||||
}
|
||||
|
||||
// stageChangesForRelease stages the modified files for the release commit
|
||||
func (g *Generator) stageChangesForRelease() error {
|
||||
changelogPath := filepath.Join(g.cfg.RepoPath, "CHANGELOG.md")
|
||||
relativeChangelog, err := filepath.Rel(g.cfg.RepoPath, changelogPath)
|
||||
if err != nil {
|
||||
relativeChangelog = "CHANGELOG.md"
|
||||
}
|
||||
|
||||
relativeCacheFile, err := filepath.Rel(g.cfg.RepoPath, g.cfg.CacheFile)
|
||||
if err != nil {
|
||||
relativeCacheFile = g.cfg.CacheFile
|
||||
}
|
||||
|
||||
// Add CHANGELOG.md to git index
|
||||
if err := g.gitWalker.AddFile(relativeChangelog); err != nil {
|
||||
return fmt.Errorf("failed to add %s: %w", relativeChangelog, err)
|
||||
}
|
||||
|
||||
// Add cache file to git index
|
||||
if err := g.gitWalker.AddFile(relativeCacheFile); err != nil {
|
||||
return fmt.Errorf("failed to add %s: %w", relativeCacheFile, err)
|
||||
}
|
||||
|
||||
// Note: Individual incoming files are now removed during the main processing loop
|
||||
// No need to remove the entire directory here
|
||||
|
||||
return nil
|
||||
}
|
||||
262
cmd/generate_changelog/internal/changelog/processing_test.go
Normal file
262
cmd/generate_changelog/internal/changelog/processing_test.go
Normal file
@@ -0,0 +1,262 @@
|
||||
package changelog
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/danielmiessler/fabric/cmd/generate_changelog/internal/config"
|
||||
)
|
||||
|
||||
func TestDetectVersion(t *testing.T) {
|
||||
tempDir := t.TempDir()
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
versionNixContent string
|
||||
expectedVersion string
|
||||
shouldError bool
|
||||
}{
|
||||
{
|
||||
name: "valid version.nix",
|
||||
versionNixContent: `"1.2.3"`,
|
||||
expectedVersion: "1.2.3",
|
||||
shouldError: false,
|
||||
},
|
||||
{
|
||||
name: "version with extra whitespace",
|
||||
versionNixContent: `"1.2.3" `,
|
||||
expectedVersion: "1.2.3",
|
||||
shouldError: false,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
// Create version.nix file
|
||||
versionNixPath := filepath.Join(tempDir, "version.nix")
|
||||
if err := os.WriteFile(versionNixPath, []byte(tt.versionNixContent), 0644); err != nil {
|
||||
t.Fatalf("Failed to create version.nix: %v", err)
|
||||
}
|
||||
|
||||
cfg := &config.Config{
|
||||
RepoPath: tempDir,
|
||||
}
|
||||
|
||||
g := &Generator{cfg: cfg}
|
||||
|
||||
version, err := g.detectVersion()
|
||||
if tt.shouldError && err == nil {
|
||||
t.Errorf("Expected error but got none")
|
||||
}
|
||||
if !tt.shouldError && err != nil {
|
||||
t.Errorf("Unexpected error: %v", err)
|
||||
}
|
||||
if version != tt.expectedVersion {
|
||||
t.Errorf("Expected version '%s', got '%s'", tt.expectedVersion, version)
|
||||
}
|
||||
|
||||
// Clean up
|
||||
os.Remove(versionNixPath)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestInsertVersionAtTop_ImprovedRobustness(t *testing.T) {
|
||||
tempDir := t.TempDir()
|
||||
changelogPath := filepath.Join(tempDir, "CHANGELOG.md")
|
||||
|
||||
cfg := &config.Config{
|
||||
RepoPath: tempDir,
|
||||
}
|
||||
|
||||
g := &Generator{cfg: cfg}
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
existingContent string
|
||||
entry string
|
||||
expectedContent string
|
||||
}{
|
||||
{
|
||||
name: "header with trailing spaces",
|
||||
existingContent: "# Changelog \n\n## v1.0.0\n- Old content",
|
||||
entry: "## v2.0.0\n- New content",
|
||||
expectedContent: "# Changelog \n\n## v2.0.0\n- New content\n## v1.0.0\n- Old content",
|
||||
},
|
||||
{
|
||||
name: "header with different line endings",
|
||||
existingContent: "# Changelog\r\n\r\n## v1.0.0\r\n- Old content",
|
||||
entry: "## v2.0.0\n- New content",
|
||||
expectedContent: "# Changelog\r\n\r\n## v2.0.0\n- New content\n## v1.0.0\r\n- Old content",
|
||||
},
|
||||
{
|
||||
name: "no existing header",
|
||||
existingContent: "Some existing content without header",
|
||||
entry: "## v1.0.0\n- New content",
|
||||
expectedContent: "# Changelog\n\n## v1.0.0\n- New content\n\nSome existing content without header",
|
||||
},
|
||||
{
|
||||
name: "new file creation",
|
||||
existingContent: "",
|
||||
entry: "## v1.0.0\n- Initial release",
|
||||
expectedContent: "# Changelog\n\n## v1.0.0\n- Initial release\n",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
// Write existing content (or create empty file)
|
||||
if tt.existingContent != "" {
|
||||
if err := os.WriteFile(changelogPath, []byte(tt.existingContent), 0644); err != nil {
|
||||
t.Fatalf("Failed to write existing content: %v", err)
|
||||
}
|
||||
} else {
|
||||
// Remove file if it exists to test new file creation
|
||||
os.Remove(changelogPath)
|
||||
}
|
||||
|
||||
// Insert new version
|
||||
if err := g.insertVersionAtTop(tt.entry); err != nil {
|
||||
t.Fatalf("insertVersionAtTop failed: %v", err)
|
||||
}
|
||||
|
||||
// Read result
|
||||
result, err := os.ReadFile(changelogPath)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to read result: %v", err)
|
||||
}
|
||||
|
||||
if string(result) != tt.expectedContent {
|
||||
t.Errorf("Expected:\n%q\nGot:\n%q", tt.expectedContent, string(result))
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestProcessIncomingPRs_FileAggregation(t *testing.T) {
|
||||
tempDir := t.TempDir()
|
||||
incomingDir := filepath.Join(tempDir, "incoming")
|
||||
|
||||
// Create incoming directory and files
|
||||
if err := os.MkdirAll(incomingDir, 0755); err != nil {
|
||||
t.Fatalf("Failed to create incoming dir: %v", err)
|
||||
}
|
||||
|
||||
// Create test incoming files
|
||||
file1Content := "## PR #1\n- Feature A"
|
||||
file2Content := "## PR #2\n- Feature B"
|
||||
|
||||
if err := os.WriteFile(filepath.Join(incomingDir, "1.txt"), []byte(file1Content), 0644); err != nil {
|
||||
t.Fatalf("Failed to create test file: %v", err)
|
||||
}
|
||||
if err := os.WriteFile(filepath.Join(incomingDir, "2.txt"), []byte(file2Content), 0644); err != nil {
|
||||
t.Fatalf("Failed to create test file: %v", err)
|
||||
}
|
||||
|
||||
// Test file aggregation logic by calling the internal functions
|
||||
files, err := filepath.Glob(filepath.Join(incomingDir, "*.txt"))
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to glob files: %v", err)
|
||||
}
|
||||
|
||||
if len(files) != 2 {
|
||||
t.Fatalf("Expected 2 files, got %d", len(files))
|
||||
}
|
||||
|
||||
// Test content aggregation
|
||||
var content strings.Builder
|
||||
var processingErrors []string
|
||||
for _, file := range files {
|
||||
data, err := os.ReadFile(file)
|
||||
if err != nil {
|
||||
processingErrors = append(processingErrors, err.Error())
|
||||
continue
|
||||
}
|
||||
content.WriteString(string(data))
|
||||
content.WriteString("\n")
|
||||
}
|
||||
|
||||
if len(processingErrors) > 0 {
|
||||
t.Fatalf("Unexpected processing errors: %v", processingErrors)
|
||||
}
|
||||
|
||||
aggregatedContent := content.String()
|
||||
if !strings.Contains(aggregatedContent, "Feature A") {
|
||||
t.Errorf("Aggregated content should contain 'Feature A'")
|
||||
}
|
||||
if !strings.Contains(aggregatedContent, "Feature B") {
|
||||
t.Errorf("Aggregated content should contain 'Feature B'")
|
||||
}
|
||||
}
|
||||
|
||||
func TestFileProcessing_ErrorHandling(t *testing.T) {
|
||||
tempDir := t.TempDir()
|
||||
incomingDir := filepath.Join(tempDir, "incoming")
|
||||
|
||||
// Create incoming directory with one good file and one unreadable file
|
||||
if err := os.MkdirAll(incomingDir, 0755); err != nil {
|
||||
t.Fatalf("Failed to create incoming dir: %v", err)
|
||||
}
|
||||
|
||||
// Create a good file
|
||||
if err := os.WriteFile(filepath.Join(incomingDir, "1.txt"), []byte("content"), 0644); err != nil {
|
||||
t.Fatalf("Failed to create test file: %v", err)
|
||||
}
|
||||
|
||||
// Create an unreadable file (simulate permission error)
|
||||
unreadableFile := filepath.Join(incomingDir, "2.txt")
|
||||
if err := os.WriteFile(unreadableFile, []byte("content"), 0000); err != nil {
|
||||
t.Fatalf("Failed to create unreadable file: %v", err)
|
||||
}
|
||||
defer os.Chmod(unreadableFile, 0644) // Clean up
|
||||
|
||||
// Test error aggregation logic
|
||||
files, err := filepath.Glob(filepath.Join(incomingDir, "*.txt"))
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to glob files: %v", err)
|
||||
}
|
||||
|
||||
var content strings.Builder
|
||||
var processingErrors []string
|
||||
for _, file := range files {
|
||||
data, err := os.ReadFile(file)
|
||||
if err != nil {
|
||||
processingErrors = append(processingErrors, err.Error())
|
||||
continue
|
||||
}
|
||||
content.WriteString(string(data))
|
||||
content.WriteString("\n")
|
||||
}
|
||||
|
||||
if len(processingErrors) == 0 {
|
||||
t.Errorf("Expected processing errors due to unreadable file")
|
||||
}
|
||||
|
||||
// Verify error message format
|
||||
errorMsg := strings.Join(processingErrors, "; ")
|
||||
if !strings.Contains(errorMsg, "2.txt") {
|
||||
t.Errorf("Error message should mention the problematic file")
|
||||
}
|
||||
}
|
||||
|
||||
func TestEnsureIncomingDirCreation(t *testing.T) {
|
||||
tempDir := t.TempDir()
|
||||
incomingDir := filepath.Join(tempDir, "incoming")
|
||||
|
||||
cfg := &config.Config{
|
||||
IncomingDir: incomingDir,
|
||||
}
|
||||
|
||||
g := &Generator{cfg: cfg}
|
||||
|
||||
err := g.ensureIncomingDir()
|
||||
if err != nil {
|
||||
t.Fatalf("ensureIncomingDir failed: %v", err)
|
||||
}
|
||||
|
||||
if _, err := os.Stat(incomingDir); os.IsNotExist(err) {
|
||||
t.Errorf("Incoming directory was not created")
|
||||
}
|
||||
}
|
||||
79
cmd/generate_changelog/internal/changelog/summarize.go
Normal file
79
cmd/generate_changelog/internal/changelog/summarize.go
Normal file
@@ -0,0 +1,79 @@
|
||||
package changelog
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"os/exec"
|
||||
"strings"
|
||||
)
|
||||
|
||||
const DefaultSummarizeModel = "claude-sonnet-4-20250514"
|
||||
const MinContentLength = 256 // Minimum content length to consider for summarization
|
||||
|
||||
const prompt = `# ROLE
|
||||
You are an expert Technical Writer specializing in creating clear, concise,
|
||||
and professional release notes from raw Git commit logs.
|
||||
|
||||
# TASK
|
||||
Your goal is to transform a provided block of Git commit logs into a clean,
|
||||
human-readable changelog summary. You will identify the most important changes,
|
||||
format them as a bulleted list, and preserve the associated Pull Request (PR)
|
||||
information.
|
||||
|
||||
# INSTRUCTIONS:
|
||||
Follow these steps in order:
|
||||
1. Deeply analyze the input. You will be given a block of text containing PR
|
||||
information and commit log messages. Carefully read through the logs
|
||||
to identify individual commits and their descriptions.
|
||||
2. Identify Key Changes: Focus on commits that represent significant changes,
|
||||
such as new features ("feat"), bug fixes ("fix"), performance improvements ("perf"),
|
||||
or breaking changes ("BREAKING CHANGE").
|
||||
3. Select the Top 5: From the identified key changes, select a maximum of the five
|
||||
(5) most impactful ones to include in the summary.
|
||||
If there are five or fewer total changes, include all of them.
|
||||
4. Format the Output:
|
||||
- Where you see a PR header, include the PR header verbatim. NO CHANGES.
|
||||
**This is a critical rule: Do not modify the PR header, as it contains
|
||||
important links.** What follow the PR header are the related changes.
|
||||
- Do not add any additional text or preamble. Begin directly with the output.
|
||||
- Use bullet points for each key change. Starting each point with a hyphen ("-").
|
||||
- Ensure that the summary is concise and focused on the main changes.
|
||||
- The summary should be in American English (en-US), using proper grammar and punctuation.
|
||||
5. If the content is too brief or you do not see any PR headers, return the content as is.
|
||||
`
|
||||
|
||||
// getSummarizeModel returns the model to use for AI summarization
|
||||
func getSummarizeModel() string {
|
||||
if model := os.Getenv("FABRIC_CHANGELOG_SUMMARIZE_MODEL"); model != "" {
|
||||
return model
|
||||
}
|
||||
return DefaultSummarizeModel
|
||||
}
|
||||
|
||||
// SummarizeVersionContent takes raw version content and returns AI-enhanced summary
|
||||
func SummarizeVersionContent(content string) (string, error) {
|
||||
if strings.TrimSpace(content) == "" {
|
||||
return "", fmt.Errorf("no content to summarize")
|
||||
}
|
||||
if len(content) < MinContentLength {
|
||||
// If content is too brief, return it as is
|
||||
return content, nil
|
||||
}
|
||||
|
||||
model := getSummarizeModel()
|
||||
|
||||
cmd := exec.Command("fabric", "-m", model, prompt)
|
||||
cmd.Stdin = strings.NewReader(content)
|
||||
|
||||
output, err := cmd.Output()
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("fabric command failed: %w", err)
|
||||
}
|
||||
|
||||
summary := strings.TrimSpace(string(output))
|
||||
if summary == "" {
|
||||
return "", fmt.Errorf("fabric returned empty summary")
|
||||
}
|
||||
|
||||
return summary, nil
|
||||
}
|
||||
21
cmd/generate_changelog/internal/config/config.go
Normal file
21
cmd/generate_changelog/internal/config/config.go
Normal file
@@ -0,0 +1,21 @@
|
||||
package config
|
||||
|
||||
type Config struct {
|
||||
RepoPath string
|
||||
OutputFile string
|
||||
Limit int
|
||||
Version string
|
||||
SaveData bool
|
||||
CacheFile string
|
||||
NoCache bool
|
||||
RebuildCache bool
|
||||
GitHubToken string
|
||||
ForcePRSync bool
|
||||
EnableAISummary bool
|
||||
IncomingPR int
|
||||
ProcessPRsVersion string
|
||||
IncomingDir string
|
||||
Push bool
|
||||
SyncDB bool
|
||||
Release string
|
||||
}
|
||||
26
cmd/generate_changelog/internal/git/types.go
Normal file
26
cmd/generate_changelog/internal/git/types.go
Normal file
@@ -0,0 +1,26 @@
|
||||
package git
|
||||
|
||||
import (
|
||||
"time"
|
||||
)
|
||||
|
||||
type Commit struct {
|
||||
SHA string
|
||||
Message string
|
||||
Author string
|
||||
Email string
|
||||
Date time.Time
|
||||
IsMerge bool
|
||||
PRNumber int
|
||||
IsVersion bool
|
||||
Version string
|
||||
}
|
||||
|
||||
type Version struct {
|
||||
Name string
|
||||
Date time.Time
|
||||
CommitSHA string
|
||||
Commits []*Commit
|
||||
PRNumbers []int
|
||||
AISummary string
|
||||
}
|
||||
574
cmd/generate_changelog/internal/git/walker.go
Normal file
574
cmd/generate_changelog/internal/git/walker.go
Normal file
@@ -0,0 +1,574 @@
|
||||
package git
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/danielmiessler/fabric/cmd/generate_changelog/util"
|
||||
"github.com/go-git/go-git/v5"
|
||||
"github.com/go-git/go-git/v5/plumbing"
|
||||
"github.com/go-git/go-git/v5/plumbing/object"
|
||||
"github.com/go-git/go-git/v5/plumbing/storer"
|
||||
"github.com/go-git/go-git/v5/plumbing/transport/http"
|
||||
)
|
||||
|
||||
var (
|
||||
// The versionPattern matches version commit messages with or without the optional "chore(release): " prefix.
|
||||
// Examples of matching commit messages:
|
||||
// - "chore(release): Update version to v1.2.3"
|
||||
// - "Update version to v1.2.3"
|
||||
// Examples of non-matching commit messages:
|
||||
// - "fix: Update version to v1.2.3" (missing "chore(release): " or "Update version to")
|
||||
// - "chore(release): Update version to 1.2.3" (missing "v" prefix in version)
|
||||
// - "Update version to v1.2" (incomplete version number)
|
||||
versionPattern = regexp.MustCompile(`(?:chore\(release\): )?Update version to (v\d+\.\d+\.\d+)`)
|
||||
prPattern = regexp.MustCompile(`Merge pull request #(\d+)`)
|
||||
)
|
||||
|
||||
type Walker struct {
|
||||
repo *git.Repository
|
||||
}
|
||||
|
||||
func NewWalker(repoPath string) (*Walker, error) {
|
||||
repo, err := git.PlainOpen(repoPath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to open repository: %w", err)
|
||||
}
|
||||
|
||||
return &Walker{repo: repo}, nil
|
||||
}
|
||||
|
||||
// GetLatestTag returns the name of the most recent tag by committer date
|
||||
func (w *Walker) GetLatestTag() (string, error) {
|
||||
tagRefs, err := w.repo.Tags()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
var latestTagCommit *object.Commit
|
||||
var latestTagName string
|
||||
|
||||
err = tagRefs.ForEach(func(tagRef *plumbing.Reference) error {
|
||||
revision := plumbing.Revision(tagRef.Name().String())
|
||||
tagCommitHash, err := w.repo.ResolveRevision(revision)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
commit, err := w.repo.CommitObject(*tagCommitHash)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if latestTagCommit == nil {
|
||||
latestTagCommit = commit
|
||||
latestTagName = tagRef.Name().Short() // Get short name like "v1.4.245"
|
||||
}
|
||||
|
||||
if commit.Committer.When.After(latestTagCommit.Committer.When) {
|
||||
latestTagCommit = commit
|
||||
latestTagName = tagRef.Name().Short()
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return latestTagName, nil
|
||||
}
|
||||
|
||||
// WalkCommitsSinceTag walks commits from the specified tag to HEAD and returns only "Unreleased" version
|
||||
func (w *Walker) WalkCommitsSinceTag(tagName string) (*Version, error) {
|
||||
// Get the tag reference
|
||||
tagRef, err := w.repo.Tag(tagName)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to find tag %s: %w", tagName, err)
|
||||
}
|
||||
|
||||
// Get the commit that the tag points to
|
||||
tagCommit, err := w.repo.CommitObject(tagRef.Hash())
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get tag commit: %w", err)
|
||||
}
|
||||
|
||||
// Get HEAD
|
||||
headRef, err := w.repo.Head()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get HEAD: %w", err)
|
||||
}
|
||||
|
||||
// Walk from HEAD back to the tag commit (exclusive)
|
||||
commitIter, err := w.repo.Log(&git.LogOptions{
|
||||
From: headRef.Hash(),
|
||||
Order: git.LogOrderCommitterTime,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get commit log: %w", err)
|
||||
}
|
||||
|
||||
version := &Version{
|
||||
Name: "Unreleased",
|
||||
Commits: []*Commit{},
|
||||
}
|
||||
|
||||
prNumbers := []int{}
|
||||
|
||||
err = commitIter.ForEach(func(c *object.Commit) error {
|
||||
// Stop when we reach the tag commit (don't include it)
|
||||
if c.Hash == tagCommit.Hash {
|
||||
return fmt.Errorf("reached tag commit") // Use error to break out of iteration
|
||||
}
|
||||
|
||||
commit := &Commit{
|
||||
SHA: c.Hash.String(),
|
||||
Message: strings.TrimSpace(c.Message),
|
||||
Date: c.Committer.When,
|
||||
}
|
||||
|
||||
// Check for version patterns
|
||||
if versionMatch := versionPattern.FindStringSubmatch(commit.Message); versionMatch != nil {
|
||||
commit.IsVersion = true
|
||||
}
|
||||
|
||||
// Check for PR merge patterns
|
||||
if prMatch := prPattern.FindStringSubmatch(commit.Message); prMatch != nil {
|
||||
if prNumber, err := strconv.Atoi(prMatch[1]); err == nil {
|
||||
commit.PRNumber = prNumber
|
||||
prNumbers = append(prNumbers, prNumber)
|
||||
}
|
||||
}
|
||||
|
||||
version.Commits = append(version.Commits, commit)
|
||||
return nil
|
||||
})
|
||||
|
||||
// Ignore the "reached tag commit" error - it's expected
|
||||
if err != nil && !strings.Contains(err.Error(), "reached tag commit") {
|
||||
return nil, fmt.Errorf("failed to walk commits: %w", err)
|
||||
}
|
||||
|
||||
// Remove duplicates from prNumbers and set them
|
||||
prNumbersMap := make(map[int]bool)
|
||||
for _, prNum := range prNumbers {
|
||||
prNumbersMap[prNum] = true
|
||||
}
|
||||
|
||||
version.PRNumbers = make([]int, 0, len(prNumbersMap))
|
||||
for prNum := range prNumbersMap {
|
||||
version.PRNumbers = append(version.PRNumbers, prNum)
|
||||
}
|
||||
|
||||
return version, nil
|
||||
}
|
||||
|
||||
func (w *Walker) WalkHistory() (map[string]*Version, error) {
|
||||
ref, err := w.repo.Head()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get HEAD: %w", err)
|
||||
}
|
||||
|
||||
commitIter, err := w.repo.Log(&git.LogOptions{
|
||||
From: ref.Hash(),
|
||||
Order: git.LogOrderCommitterTime,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get commit log: %w", err)
|
||||
}
|
||||
|
||||
versions := make(map[string]*Version)
|
||||
currentVersion := "Unreleased"
|
||||
versions[currentVersion] = &Version{
|
||||
Name: currentVersion,
|
||||
Commits: []*Commit{},
|
||||
}
|
||||
|
||||
prNumbers := make(map[string][]int)
|
||||
|
||||
err = commitIter.ForEach(func(c *object.Commit) error {
|
||||
// c.Message = Summarize(c.Message)
|
||||
commit := &Commit{
|
||||
SHA: c.Hash.String(),
|
||||
Message: strings.TrimSpace(c.Message),
|
||||
Author: c.Author.Name,
|
||||
Email: c.Author.Email,
|
||||
Date: c.Author.When,
|
||||
IsMerge: len(c.ParentHashes) > 1,
|
||||
}
|
||||
|
||||
if matches := versionPattern.FindStringSubmatch(commit.Message); len(matches) > 1 {
|
||||
commit.IsVersion = true
|
||||
commit.Version = matches[1]
|
||||
currentVersion = commit.Version
|
||||
|
||||
if _, exists := versions[currentVersion]; !exists {
|
||||
versions[currentVersion] = &Version{
|
||||
Name: currentVersion,
|
||||
Date: commit.Date,
|
||||
CommitSHA: commit.SHA,
|
||||
Commits: []*Commit{},
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
if matches := prPattern.FindStringSubmatch(commit.Message); len(matches) > 1 {
|
||||
prNumber := 0
|
||||
fmt.Sscanf(matches[1], "%d", &prNumber)
|
||||
commit.PRNumber = prNumber
|
||||
|
||||
prNumbers[currentVersion] = append(prNumbers[currentVersion], prNumber)
|
||||
}
|
||||
|
||||
versions[currentVersion].Commits = append(versions[currentVersion].Commits, commit)
|
||||
|
||||
return nil
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to walk commits: %w", err)
|
||||
}
|
||||
|
||||
for version, prs := range prNumbers {
|
||||
versions[version].PRNumbers = dedupInts(prs)
|
||||
}
|
||||
|
||||
return versions, nil
|
||||
}
|
||||
|
||||
func (w *Walker) GetRepoInfo() (owner string, name string, err error) {
|
||||
remotes, err := w.repo.Remotes()
|
||||
if err != nil {
|
||||
return "", "", fmt.Errorf("failed to get remotes: %w", err)
|
||||
}
|
||||
|
||||
// First try upstream (preferred for forks)
|
||||
for _, remote := range remotes {
|
||||
if remote.Config().Name == "upstream" {
|
||||
urls := remote.Config().URLs
|
||||
if len(urls) > 0 {
|
||||
owner, name = parseGitHubURL(urls[0])
|
||||
if owner != "" && name != "" {
|
||||
return owner, name, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Then try origin
|
||||
for _, remote := range remotes {
|
||||
if remote.Config().Name == "origin" {
|
||||
urls := remote.Config().URLs
|
||||
if len(urls) > 0 {
|
||||
owner, name = parseGitHubURL(urls[0])
|
||||
if owner != "" && name != "" {
|
||||
return owner, name, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return "danielmiessler", "fabric", nil
|
||||
}
|
||||
|
||||
func parseGitHubURL(url string) (owner, repo string) {
|
||||
patterns := []string{
|
||||
`github\.com[:/]([^/]+)/([^/.]+)`,
|
||||
`github\.com[:/]([^/]+)/([^/]+)\.git$`,
|
||||
}
|
||||
|
||||
for _, pattern := range patterns {
|
||||
re := regexp.MustCompile(pattern)
|
||||
matches := re.FindStringSubmatch(url)
|
||||
if len(matches) > 2 {
|
||||
return matches[1], matches[2]
|
||||
}
|
||||
}
|
||||
|
||||
return "", ""
|
||||
}
|
||||
|
||||
// WalkHistorySinceTag walks git history from HEAD down to (but not including) the specified tag
|
||||
// and returns any version commits found along the way
|
||||
func (w *Walker) WalkHistorySinceTag(sinceTag string) (map[string]*Version, error) {
|
||||
// Get the commit SHA for the sinceTag
|
||||
tagRef, err := w.repo.Tag(sinceTag)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get tag %s: %w", sinceTag, err)
|
||||
}
|
||||
|
||||
tagCommit, err := w.repo.CommitObject(tagRef.Hash())
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get commit for tag %s: %w", sinceTag, err)
|
||||
}
|
||||
|
||||
// Get HEAD reference
|
||||
ref, err := w.repo.Head()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get HEAD: %w", err)
|
||||
}
|
||||
|
||||
// Walk from HEAD down to the tag commit (excluding it)
|
||||
commitIter, err := w.repo.Log(&git.LogOptions{
|
||||
From: ref.Hash(),
|
||||
Order: git.LogOrderCommitterTime,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to create commit iterator: %w", err)
|
||||
}
|
||||
defer commitIter.Close()
|
||||
|
||||
versions := make(map[string]*Version)
|
||||
currentVersion := "Unreleased"
|
||||
prNumbers := make(map[string][]int)
|
||||
|
||||
err = commitIter.ForEach(func(c *object.Commit) error {
|
||||
// Stop iteration when the hash of the current commit matches the hash of the specified sinceTag commit
|
||||
if c.Hash == tagCommit.Hash {
|
||||
return storer.ErrStop
|
||||
}
|
||||
|
||||
commit := &Commit{
|
||||
SHA: c.Hash.String(),
|
||||
Message: strings.TrimSpace(c.Message),
|
||||
Author: c.Author.Name,
|
||||
Email: c.Author.Email,
|
||||
Date: c.Author.When,
|
||||
IsMerge: len(c.ParentHashes) > 1,
|
||||
}
|
||||
|
||||
// Check for version pattern
|
||||
if matches := versionPattern.FindStringSubmatch(commit.Message); len(matches) > 1 {
|
||||
commit.IsVersion = true
|
||||
commit.Version = matches[1]
|
||||
currentVersion = commit.Version
|
||||
|
||||
if _, exists := versions[currentVersion]; !exists {
|
||||
versions[currentVersion] = &Version{
|
||||
Name: currentVersion,
|
||||
Date: commit.Date,
|
||||
CommitSHA: commit.SHA,
|
||||
Commits: []*Commit{},
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Check for PR merge pattern
|
||||
if matches := prPattern.FindStringSubmatch(commit.Message); len(matches) > 1 {
|
||||
prNumber, err := strconv.Atoi(matches[1])
|
||||
if err != nil {
|
||||
// Handle parsing error (e.g., log it or skip processing)
|
||||
return fmt.Errorf("failed to parse PR number: %v", err)
|
||||
}
|
||||
commit.PRNumber = prNumber
|
||||
|
||||
prNumbers[currentVersion] = append(prNumbers[currentVersion], prNumber)
|
||||
}
|
||||
|
||||
// Add commit to current version
|
||||
if _, exists := versions[currentVersion]; !exists {
|
||||
versions[currentVersion] = &Version{
|
||||
Name: currentVersion,
|
||||
Date: time.Time{}, // Zero value, will be set by version commit
|
||||
CommitSHA: "",
|
||||
Commits: []*Commit{},
|
||||
}
|
||||
}
|
||||
|
||||
versions[currentVersion].Commits = append(versions[currentVersion].Commits, commit)
|
||||
return nil
|
||||
})
|
||||
|
||||
// Handle the stop condition - storer.ErrStop is expected
|
||||
if err == storer.ErrStop {
|
||||
err = nil
|
||||
}
|
||||
|
||||
// Assign collected PR numbers to each version
|
||||
for version, prs := range prNumbers {
|
||||
versions[version].PRNumbers = dedupInts(prs)
|
||||
}
|
||||
|
||||
return versions, err
|
||||
}
|
||||
|
||||
func dedupInts(ints []int) []int {
|
||||
seen := make(map[int]bool)
|
||||
result := []int{}
|
||||
|
||||
for _, i := range ints {
|
||||
if !seen[i] {
|
||||
seen[i] = true
|
||||
result = append(result, i)
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
// Worktree returns the git worktree for performing git operations
|
||||
func (w *Walker) Worktree() (*git.Worktree, error) {
|
||||
return w.repo.Worktree()
|
||||
}
|
||||
|
||||
// Repository returns the underlying git repository
|
||||
func (w *Walker) Repository() *git.Repository {
|
||||
return w.repo
|
||||
}
|
||||
|
||||
// IsWorkingDirectoryClean checks if the working directory has any uncommitted changes
|
||||
func (w *Walker) IsWorkingDirectoryClean() (bool, error) {
|
||||
worktree, err := w.repo.Worktree()
|
||||
if err != nil {
|
||||
return false, fmt.Errorf("failed to get worktree: %w", err)
|
||||
}
|
||||
|
||||
status, err := worktree.Status()
|
||||
if err != nil {
|
||||
return false, fmt.Errorf("failed to get git status: %w", err)
|
||||
}
|
||||
|
||||
return status.IsClean(), nil
|
||||
}
|
||||
|
||||
// GetStatusDetails returns a detailed status of the working directory
|
||||
func (w *Walker) GetStatusDetails() (string, error) {
|
||||
worktree, err := w.repo.Worktree()
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to get worktree: %w", err)
|
||||
}
|
||||
|
||||
status, err := worktree.Status()
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to get git status: %w", err)
|
||||
}
|
||||
|
||||
if status.IsClean() {
|
||||
return "", nil
|
||||
}
|
||||
|
||||
var details strings.Builder
|
||||
for file, fileStatus := range status {
|
||||
details.WriteString(fmt.Sprintf(" %c%c %s\n", fileStatus.Staging, fileStatus.Worktree, file))
|
||||
}
|
||||
|
||||
return details.String(), nil
|
||||
}
|
||||
|
||||
// AddFile adds a file to the git index
|
||||
func (w *Walker) AddFile(filename string) error {
|
||||
worktree, err := w.repo.Worktree()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get worktree: %w", err)
|
||||
}
|
||||
|
||||
_, err = worktree.Add(filename)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to add file %s: %w", filename, err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// CommitChanges creates a commit with the given message
|
||||
func (w *Walker) CommitChanges(message string) (plumbing.Hash, error) {
|
||||
worktree, err := w.repo.Worktree()
|
||||
if err != nil {
|
||||
return plumbing.ZeroHash, fmt.Errorf("failed to get worktree: %w", err)
|
||||
}
|
||||
|
||||
// Get git config for author information
|
||||
cfg, err := w.repo.Config()
|
||||
if err != nil {
|
||||
return plumbing.ZeroHash, fmt.Errorf("failed to get git config: %w", err)
|
||||
}
|
||||
|
||||
var authorName, authorEmail string
|
||||
if cfg.User.Name != "" {
|
||||
authorName = cfg.User.Name
|
||||
} else {
|
||||
authorName = "Changelog Bot"
|
||||
}
|
||||
if cfg.User.Email != "" {
|
||||
authorEmail = cfg.User.Email
|
||||
} else {
|
||||
authorEmail = "bot@changelog.local"
|
||||
}
|
||||
|
||||
commit, err := worktree.Commit(message, &git.CommitOptions{
|
||||
Author: &object.Signature{
|
||||
Name: authorName,
|
||||
Email: authorEmail,
|
||||
When: time.Now(),
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
return plumbing.ZeroHash, fmt.Errorf("failed to commit: %w", err)
|
||||
}
|
||||
|
||||
return commit, nil
|
||||
}
|
||||
|
||||
// PushToRemote pushes the current branch to the remote repository
|
||||
// It automatically detects GitHub repositories and uses token authentication when available
|
||||
func (w *Walker) PushToRemote() error {
|
||||
pushOptions := &git.PushOptions{}
|
||||
|
||||
// Check if we have a GitHub token for authentication
|
||||
if githubToken := util.GetTokenFromEnv(""); githubToken != "" {
|
||||
// Get remote URL to check if it's a GitHub repository
|
||||
remotes, err := w.repo.Remotes()
|
||||
if err == nil && len(remotes) > 0 {
|
||||
// Get the origin remote (or first remote if origin doesn't exist)
|
||||
var remote *git.Remote
|
||||
for _, r := range remotes {
|
||||
if r.Config().Name == "origin" {
|
||||
remote = r
|
||||
break
|
||||
}
|
||||
}
|
||||
if remote == nil {
|
||||
remote = remotes[0]
|
||||
}
|
||||
|
||||
// Check if this is a GitHub repository
|
||||
urls := remote.Config().URLs
|
||||
if len(urls) > 0 {
|
||||
url := urls[0]
|
||||
if strings.Contains(url, "github.com") {
|
||||
// Use token authentication for GitHub repositories
|
||||
pushOptions.Auth = &http.BasicAuth{
|
||||
Username: "token", // GitHub expects "token" as username
|
||||
Password: githubToken,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
err := w.repo.Push(pushOptions)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to push: %w", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// RemoveFile removes a file from both the working directory and git index
|
||||
func (w *Walker) RemoveFile(filename string) error {
|
||||
worktree, err := w.repo.Worktree()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get worktree: %w", err)
|
||||
}
|
||||
|
||||
_, err = worktree.Remove(filename)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to remove file %s: %w", filename, err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
431
cmd/generate_changelog/internal/github/client.go
Normal file
431
cmd/generate_changelog/internal/github/client.go
Normal file
@@ -0,0 +1,431 @@
|
||||
package github
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"os"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/google/go-github/v66/github"
|
||||
"github.com/hasura/go-graphql-client"
|
||||
"golang.org/x/oauth2"
|
||||
)
|
||||
|
||||
type Client struct {
|
||||
client *github.Client
|
||||
graphqlClient *graphql.Client
|
||||
owner string
|
||||
repo string
|
||||
token string
|
||||
}
|
||||
|
||||
func NewClient(token, owner, repo string) *Client {
|
||||
var githubClient *github.Client
|
||||
var httpClient *http.Client
|
||||
var gqlClient *graphql.Client
|
||||
|
||||
if token != "" {
|
||||
ts := oauth2.StaticTokenSource(
|
||||
&oauth2.Token{AccessToken: token},
|
||||
)
|
||||
httpClient = oauth2.NewClient(context.Background(), ts)
|
||||
githubClient = github.NewClient(httpClient)
|
||||
gqlClient = graphql.NewClient("https://api.github.com/graphql", httpClient)
|
||||
} else {
|
||||
httpClient = http.DefaultClient
|
||||
githubClient = github.NewClient(nil)
|
||||
gqlClient = graphql.NewClient("https://api.github.com/graphql", httpClient)
|
||||
}
|
||||
|
||||
return &Client{
|
||||
client: githubClient,
|
||||
graphqlClient: gqlClient,
|
||||
owner: owner,
|
||||
repo: repo,
|
||||
token: token,
|
||||
}
|
||||
}
|
||||
|
||||
func (c *Client) FetchPRs(prNumbers []int) ([]*PR, error) {
|
||||
if len(prNumbers) == 0 {
|
||||
return []*PR{}, nil
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
prs := make([]*PR, 0, len(prNumbers))
|
||||
prsChan := make(chan *PR, len(prNumbers))
|
||||
errChan := make(chan error, len(prNumbers))
|
||||
|
||||
var wg sync.WaitGroup
|
||||
semaphore := make(chan struct{}, 10)
|
||||
|
||||
for _, prNumber := range prNumbers {
|
||||
wg.Add(1)
|
||||
go func(num int) {
|
||||
defer wg.Done()
|
||||
|
||||
semaphore <- struct{}{}
|
||||
defer func() { <-semaphore }()
|
||||
|
||||
pr, err := c.fetchSinglePR(ctx, num)
|
||||
if err != nil {
|
||||
errChan <- fmt.Errorf("failed to fetch PR #%d: %w", num, err)
|
||||
return
|
||||
}
|
||||
prsChan <- pr
|
||||
}(prNumber)
|
||||
}
|
||||
|
||||
go func() {
|
||||
wg.Wait()
|
||||
close(prsChan)
|
||||
close(errChan)
|
||||
}()
|
||||
|
||||
var errors []error
|
||||
for pr := range prsChan {
|
||||
prs = append(prs, pr)
|
||||
}
|
||||
for err := range errChan {
|
||||
errors = append(errors, err)
|
||||
}
|
||||
|
||||
if len(errors) > 0 {
|
||||
return prs, fmt.Errorf("some PRs failed to fetch: %v", errors)
|
||||
}
|
||||
|
||||
return prs, nil
|
||||
}
|
||||
|
||||
// GetPRValidationDetails fetches only the data needed for validation (lightweight).
|
||||
func (c *Client) GetPRValidationDetails(prNumber int) (*PRDetails, error) {
|
||||
ctx := context.Background()
|
||||
ghPR, _, err := c.client.PullRequests.Get(ctx, c.owner, c.repo, prNumber)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get PR %d: %w", prNumber, err)
|
||||
}
|
||||
|
||||
// Only return validation data, no commits fetched
|
||||
details := &PRDetails{
|
||||
PR: nil, // Will be populated later if needed
|
||||
State: getString(ghPR.State),
|
||||
Mergeable: ghPR.Mergeable != nil && *ghPR.Mergeable,
|
||||
}
|
||||
|
||||
return details, nil
|
||||
}
|
||||
|
||||
// GetPRWithCommits fetches the full PR and its commits.
|
||||
func (c *Client) GetPRWithCommits(prNumber int) (*PR, error) {
|
||||
ctx := context.Background()
|
||||
ghPR, _, err := c.client.PullRequests.Get(ctx, c.owner, c.repo, prNumber)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get PR %d: %w", prNumber, err)
|
||||
}
|
||||
|
||||
return c.buildPRWithCommits(ctx, ghPR)
|
||||
}
|
||||
|
||||
// GetPRDetails fetches a comprehensive set of details for a single PR.
|
||||
// Deprecated: Use GetPRValidationDetails + GetPRWithCommits for better performance
|
||||
func (c *Client) GetPRDetails(prNumber int) (*PRDetails, error) {
|
||||
ctx := context.Background()
|
||||
ghPR, _, err := c.client.PullRequests.Get(ctx, c.owner, c.repo, prNumber)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get PR %d: %w", prNumber, err)
|
||||
}
|
||||
|
||||
// Reuse the existing logic to build the base PR object
|
||||
pr, err := c.buildPRWithCommits(ctx, ghPR)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to build PR details for %d: %w", prNumber, err)
|
||||
}
|
||||
|
||||
details := &PRDetails{
|
||||
PR: pr,
|
||||
State: getString(ghPR.State),
|
||||
Mergeable: ghPR.Mergeable != nil && *ghPR.Mergeable,
|
||||
}
|
||||
|
||||
return details, nil
|
||||
}
|
||||
|
||||
// buildPRWithCommits fetches commits and constructs a PR object from a GitHub API response
|
||||
func (c *Client) buildPRWithCommits(ctx context.Context, ghPR *github.PullRequest) (*PR, error) {
|
||||
commits, _, err := c.client.PullRequests.ListCommits(ctx, c.owner, c.repo, *ghPR.Number, nil)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to fetch commits for PR %d: %w", *ghPR.Number, err)
|
||||
}
|
||||
|
||||
return c.convertGitHubPR(ghPR, commits), nil
|
||||
}
|
||||
|
||||
// convertGitHubPR transforms GitHub API data into our internal PR struct (pure function)
|
||||
func (c *Client) convertGitHubPR(ghPR *github.PullRequest, commits []*github.RepositoryCommit) *PR {
|
||||
|
||||
result := &PR{
|
||||
Number: *ghPR.Number,
|
||||
Title: getString(ghPR.Title),
|
||||
Body: getString(ghPR.Body),
|
||||
URL: getString(ghPR.HTMLURL),
|
||||
Commits: make([]PRCommit, 0, len(commits)),
|
||||
}
|
||||
|
||||
if ghPR.MergedAt != nil {
|
||||
result.MergedAt = ghPR.MergedAt.Time
|
||||
}
|
||||
|
||||
if ghPR.User != nil {
|
||||
result.Author = getString(ghPR.User.Login)
|
||||
result.AuthorURL = getString(ghPR.User.HTMLURL)
|
||||
userType := getString(ghPR.User.Type)
|
||||
|
||||
switch userType {
|
||||
case "User":
|
||||
result.AuthorType = "user"
|
||||
case "Organization":
|
||||
result.AuthorType = "organization"
|
||||
case "Bot":
|
||||
result.AuthorType = "bot"
|
||||
default:
|
||||
result.AuthorType = "user"
|
||||
}
|
||||
}
|
||||
|
||||
if ghPR.MergeCommitSHA != nil {
|
||||
result.MergeCommit = *ghPR.MergeCommitSHA
|
||||
}
|
||||
|
||||
for _, commit := range commits {
|
||||
if commit.Commit != nil {
|
||||
prCommit := PRCommit{
|
||||
SHA: getString(commit.SHA),
|
||||
Message: strings.TrimSpace(getString(commit.Commit.Message)),
|
||||
}
|
||||
if commit.Commit.Author != nil {
|
||||
prCommit.Author = getString(commit.Commit.Author.Name)
|
||||
prCommit.Email = getString(commit.Commit.Author.Email) // Extract author email from GitHub API response
|
||||
// Capture actual commit timestamp from GitHub API
|
||||
if commit.Commit.Author.Date != nil {
|
||||
prCommit.Date = commit.Commit.Author.Date.Time
|
||||
}
|
||||
}
|
||||
// Capture parent commit SHAs for merge detection
|
||||
if commit.Parents != nil {
|
||||
for _, parent := range commit.Parents {
|
||||
if parent.SHA != nil {
|
||||
prCommit.Parents = append(prCommit.Parents, *parent.SHA)
|
||||
}
|
||||
}
|
||||
}
|
||||
result.Commits = append(result.Commits, prCommit)
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
func (c *Client) fetchSinglePR(ctx context.Context, prNumber int) (*PR, error) {
|
||||
ghPR, _, err := c.client.PullRequests.Get(ctx, c.owner, c.repo, prNumber)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return c.buildPRWithCommits(ctx, ghPR)
|
||||
}
|
||||
|
||||
func getString(s *string) string {
|
||||
if s == nil {
|
||||
return ""
|
||||
}
|
||||
return *s
|
||||
}
|
||||
|
||||
// FetchAllMergedPRs fetches all merged PRs using GitHub's search API
|
||||
// This is much more efficient than fetching PRs individually
|
||||
func (c *Client) FetchAllMergedPRs(since time.Time) ([]*PR, error) {
|
||||
ctx := context.Background()
|
||||
var allPRs []*PR
|
||||
|
||||
// Build search query for merged PRs
|
||||
query := fmt.Sprintf("repo:%s/%s is:pr is:merged", c.owner, c.repo)
|
||||
if !since.IsZero() {
|
||||
query += fmt.Sprintf(" merged:>=%s", since.Format("2006-01-02"))
|
||||
}
|
||||
|
||||
opts := &github.SearchOptions{
|
||||
Sort: "created",
|
||||
Order: "desc",
|
||||
ListOptions: github.ListOptions{
|
||||
PerPage: 100, // Maximum allowed
|
||||
},
|
||||
}
|
||||
|
||||
for {
|
||||
result, resp, err := c.client.Search.Issues(ctx, query, opts)
|
||||
if err != nil {
|
||||
return allPRs, fmt.Errorf("failed to search PRs: %w", err)
|
||||
}
|
||||
|
||||
// Process PRs in parallel
|
||||
prsChan := make(chan *PR, len(result.Issues))
|
||||
errChan := make(chan error, len(result.Issues))
|
||||
var wg sync.WaitGroup
|
||||
semaphore := make(chan struct{}, 10) // Limit concurrent requests
|
||||
|
||||
for _, issue := range result.Issues {
|
||||
if issue.PullRequestLinks == nil {
|
||||
continue // Not a PR
|
||||
}
|
||||
|
||||
wg.Add(1)
|
||||
go func(prNumber int) {
|
||||
defer wg.Done()
|
||||
|
||||
semaphore <- struct{}{}
|
||||
defer func() { <-semaphore }()
|
||||
|
||||
pr, err := c.fetchSinglePR(ctx, prNumber)
|
||||
if err != nil {
|
||||
errChan <- fmt.Errorf("failed to fetch PR #%d: %w", prNumber, err)
|
||||
return
|
||||
}
|
||||
prsChan <- pr
|
||||
}(*issue.Number)
|
||||
}
|
||||
|
||||
go func() {
|
||||
wg.Wait()
|
||||
close(prsChan)
|
||||
close(errChan)
|
||||
}()
|
||||
|
||||
// Collect results
|
||||
for pr := range prsChan {
|
||||
allPRs = append(allPRs, pr)
|
||||
}
|
||||
|
||||
// Check for errors
|
||||
for err := range errChan {
|
||||
// Log error but continue processing
|
||||
fmt.Fprintf(os.Stderr, "Warning: %v\n", err)
|
||||
}
|
||||
|
||||
if resp.NextPage == 0 {
|
||||
break
|
||||
}
|
||||
opts.Page = resp.NextPage
|
||||
}
|
||||
|
||||
return allPRs, nil
|
||||
}
|
||||
|
||||
// FetchAllMergedPRsGraphQL fetches all merged PRs with their commits using GraphQL
|
||||
// This is the ultimate optimization - gets everything in ~5-10 API calls
|
||||
func (c *Client) FetchAllMergedPRsGraphQL(since time.Time) ([]*PR, error) {
|
||||
ctx := context.Background()
|
||||
var allPRs []*PR
|
||||
var after *string
|
||||
totalFetched := 0
|
||||
|
||||
for {
|
||||
// Prepare variables
|
||||
variables := map[string]interface{}{
|
||||
"owner": graphql.String(c.owner),
|
||||
"repo": graphql.String(c.repo),
|
||||
"after": (*graphql.String)(after),
|
||||
}
|
||||
|
||||
// Execute GraphQL query
|
||||
var query PullRequestsQuery
|
||||
err := c.graphqlClient.Query(ctx, &query, variables)
|
||||
if err != nil {
|
||||
return allPRs, fmt.Errorf("GraphQL query failed: %w", err)
|
||||
}
|
||||
|
||||
prs := query.Repository.PullRequests.Nodes
|
||||
fmt.Fprintf(os.Stderr, "Fetched %d PRs via GraphQL (page %d)\n", len(prs), (totalFetched/100)+1)
|
||||
|
||||
// Convert GraphQL PRs to our PR struct
|
||||
for _, gqlPR := range prs {
|
||||
// If we have a since filter, stop when we reach older PRs
|
||||
if !since.IsZero() && gqlPR.MergedAt.Before(since) {
|
||||
fmt.Fprintf(os.Stderr, "Reached PRs older than %s, stopping\n", since.Format("2006-01-02"))
|
||||
return allPRs, nil
|
||||
}
|
||||
|
||||
pr := &PR{
|
||||
Number: gqlPR.Number,
|
||||
Title: gqlPR.Title,
|
||||
Body: gqlPR.Body,
|
||||
URL: gqlPR.URL,
|
||||
MergedAt: gqlPR.MergedAt,
|
||||
Commits: make([]PRCommit, 0, len(gqlPR.Commits.Nodes)),
|
||||
}
|
||||
|
||||
// Handle author - check if it's nil first
|
||||
if gqlPR.Author != nil {
|
||||
pr.Author = gqlPR.Author.Login
|
||||
pr.AuthorURL = gqlPR.Author.URL
|
||||
|
||||
switch gqlPR.Author.Typename {
|
||||
case "Bot":
|
||||
pr.AuthorType = "bot"
|
||||
case "Organization":
|
||||
pr.AuthorType = "organization"
|
||||
case "User":
|
||||
pr.AuthorType = "user"
|
||||
default:
|
||||
pr.AuthorType = "user" // fallback
|
||||
if gqlPR.Author.Typename != "" {
|
||||
fmt.Fprintf(os.Stderr, "PR #%d: Unknown author typename '%s'\n", gqlPR.Number, gqlPR.Author.Typename)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Author is nil - try to fetch from REST API as fallback
|
||||
fmt.Fprintf(os.Stderr, "PR #%d: Author is nil in GraphQL response, fetching from REST API\n", gqlPR.Number)
|
||||
|
||||
// Fetch this specific PR from REST API
|
||||
restPR, err := c.fetchSinglePR(ctx, gqlPR.Number)
|
||||
if err == nil && restPR != nil && restPR.Author != "" {
|
||||
pr.Author = restPR.Author
|
||||
pr.AuthorURL = restPR.AuthorURL
|
||||
pr.AuthorType = restPR.AuthorType
|
||||
} else {
|
||||
// Fallback if REST API also fails
|
||||
pr.Author = "[unknown]"
|
||||
pr.AuthorURL = ""
|
||||
pr.AuthorType = "user"
|
||||
}
|
||||
}
|
||||
|
||||
// Convert commits
|
||||
for _, commitNode := range gqlPR.Commits.Nodes {
|
||||
commit := PRCommit{
|
||||
SHA: commitNode.Commit.OID,
|
||||
Message: strings.TrimSpace(commitNode.Commit.Message),
|
||||
Author: commitNode.Commit.Author.Name,
|
||||
Date: commitNode.Commit.AuthoredDate, // Use actual commit timestamp
|
||||
}
|
||||
pr.Commits = append(pr.Commits, commit)
|
||||
}
|
||||
|
||||
allPRs = append(allPRs, pr)
|
||||
}
|
||||
|
||||
totalFetched += len(prs)
|
||||
|
||||
// Check if we need to fetch more pages
|
||||
if !query.Repository.PullRequests.PageInfo.HasNextPage {
|
||||
break
|
||||
}
|
||||
|
||||
after = &query.Repository.PullRequests.PageInfo.EndCursor
|
||||
}
|
||||
|
||||
fmt.Fprintf(os.Stderr, "Total PRs fetched via GraphQL: %d\n", len(allPRs))
|
||||
return allPRs, nil
|
||||
}
|
||||
59
cmd/generate_changelog/internal/github/email_test.go
Normal file
59
cmd/generate_changelog/internal/github/email_test.go
Normal file
@@ -0,0 +1,59 @@
|
||||
package github
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
func TestPRCommitEmailHandling(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
commit PRCommit
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "Valid email field",
|
||||
commit: PRCommit{
|
||||
SHA: "abc123",
|
||||
Message: "Fix bug in authentication",
|
||||
Author: "John Doe",
|
||||
Email: "john.doe@example.com",
|
||||
Date: time.Now(),
|
||||
Parents: []string{"def456"},
|
||||
},
|
||||
expected: "john.doe@example.com",
|
||||
},
|
||||
{
|
||||
name: "Empty email field",
|
||||
commit: PRCommit{
|
||||
SHA: "abc123",
|
||||
Message: "Fix bug in authentication",
|
||||
Author: "John Doe",
|
||||
Email: "",
|
||||
Date: time.Now(),
|
||||
Parents: []string{"def456"},
|
||||
},
|
||||
expected: "",
|
||||
},
|
||||
{
|
||||
name: "Email field with proper initialization",
|
||||
commit: PRCommit{
|
||||
SHA: "def789",
|
||||
Message: "Add new feature",
|
||||
Author: "Jane Smith",
|
||||
Email: "jane.smith@company.org",
|
||||
Date: time.Now(),
|
||||
Parents: []string{"ghi012"},
|
||||
},
|
||||
expected: "jane.smith@company.org",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
if tt.commit.Email != tt.expected {
|
||||
t.Errorf("Expected email %q, got %q", tt.expected, tt.commit.Email)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
68
cmd/generate_changelog/internal/github/types.go
Normal file
68
cmd/generate_changelog/internal/github/types.go
Normal file
@@ -0,0 +1,68 @@
|
||||
package github
|
||||
|
||||
import "time"
|
||||
|
||||
type PR struct {
|
||||
Number int
|
||||
Title string
|
||||
Body string
|
||||
Author string
|
||||
AuthorURL string
|
||||
AuthorType string // "user", "organization", or "bot"
|
||||
URL string
|
||||
MergedAt time.Time
|
||||
Commits []PRCommit
|
||||
MergeCommit string
|
||||
}
|
||||
|
||||
// PRDetails encapsulates all relevant information about a Pull Request.
|
||||
type PRDetails struct {
|
||||
*PR
|
||||
State string
|
||||
Mergeable bool
|
||||
}
|
||||
|
||||
type PRCommit struct {
|
||||
SHA string
|
||||
Message string
|
||||
Author string
|
||||
Email string // Author email from GitHub API, empty if not public
|
||||
Date time.Time // Timestamp field
|
||||
Parents []string // Parent commits (for merge detection)
|
||||
}
|
||||
|
||||
// GraphQL query structures for hasura client
|
||||
type PullRequestsQuery struct {
|
||||
Repository struct {
|
||||
PullRequests struct {
|
||||
PageInfo struct {
|
||||
HasNextPage bool
|
||||
EndCursor string
|
||||
}
|
||||
Nodes []struct {
|
||||
Number int
|
||||
Title string
|
||||
Body string
|
||||
URL string
|
||||
MergedAt time.Time
|
||||
Author *struct {
|
||||
Typename string `graphql:"__typename"`
|
||||
Login string `graphql:"login"`
|
||||
URL string `graphql:"url"`
|
||||
}
|
||||
Commits struct {
|
||||
Nodes []struct {
|
||||
Commit struct {
|
||||
OID string `graphql:"oid"`
|
||||
Message string
|
||||
AuthoredDate time.Time `graphql:"authoredDate"`
|
||||
Author struct {
|
||||
Name string
|
||||
}
|
||||
}
|
||||
}
|
||||
} `graphql:"commits(first: 250)"`
|
||||
}
|
||||
} `graphql:"pullRequests(first: 100, after: $after, states: MERGED, orderBy: {field: UPDATED_AT, direction: DESC})"`
|
||||
} `graphql:"repository(owner: $owner, name: $repo)"`
|
||||
}
|
||||
81
cmd/generate_changelog/internal/release.go
Normal file
81
cmd/generate_changelog/internal/release.go
Normal file
@@ -0,0 +1,81 @@
|
||||
package internal
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
"github.com/danielmiessler/fabric/cmd/generate_changelog/internal/cache"
|
||||
"github.com/danielmiessler/fabric/cmd/generate_changelog/internal/config"
|
||||
"github.com/google/go-github/v66/github"
|
||||
"golang.org/x/oauth2"
|
||||
)
|
||||
|
||||
type ReleaseManager struct {
|
||||
cache *cache.Cache
|
||||
githubToken string
|
||||
owner string
|
||||
repo string
|
||||
}
|
||||
|
||||
func NewReleaseManager(cfg *config.Config) (*ReleaseManager, error) {
|
||||
cache, err := cache.New(cfg.CacheFile)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to create cache: %w", err)
|
||||
}
|
||||
|
||||
return &ReleaseManager{
|
||||
cache: cache,
|
||||
githubToken: cfg.GitHubToken,
|
||||
owner: "danielmiessler",
|
||||
repo: "fabric",
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (rm *ReleaseManager) Close() error {
|
||||
return rm.cache.Close()
|
||||
}
|
||||
|
||||
func (rm *ReleaseManager) UpdateReleaseDescription(version string) error {
|
||||
versions, err := rm.cache.GetVersions()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get versions from cache: %w", err)
|
||||
}
|
||||
|
||||
versionData, exists := versions[version]
|
||||
if !exists {
|
||||
return fmt.Errorf("version %s not found in versions table", version)
|
||||
}
|
||||
|
||||
if versionData.AISummary == "" {
|
||||
return fmt.Errorf("ai_summary is empty for version %s", version)
|
||||
}
|
||||
|
||||
releaseBody := fmt.Sprintf("## Changes\n\n%s", versionData.AISummary)
|
||||
|
||||
ctx := context.Background()
|
||||
var client *github.Client
|
||||
|
||||
if rm.githubToken != "" {
|
||||
ts := oauth2.StaticTokenSource(
|
||||
&oauth2.Token{AccessToken: rm.githubToken},
|
||||
)
|
||||
tc := oauth2.NewClient(ctx, ts)
|
||||
client = github.NewClient(tc)
|
||||
} else {
|
||||
client = github.NewClient(nil)
|
||||
}
|
||||
|
||||
release, _, err := client.Repositories.GetReleaseByTag(ctx, rm.owner, rm.repo, version)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get release for version %s: %w", version, err)
|
||||
}
|
||||
|
||||
release.Body = &releaseBody
|
||||
_, _, err = client.Repositories.EditRelease(ctx, rm.owner, rm.repo, *release.ID, release)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to update release description for version %s: %w", version, err)
|
||||
}
|
||||
|
||||
fmt.Printf("Successfully updated release description for %s\n", version)
|
||||
return nil
|
||||
}
|
||||
117
cmd/generate_changelog/main.go
Normal file
117
cmd/generate_changelog/main.go
Normal file
@@ -0,0 +1,117 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/danielmiessler/fabric/cmd/generate_changelog/internal"
|
||||
"github.com/danielmiessler/fabric/cmd/generate_changelog/internal/changelog"
|
||||
"github.com/danielmiessler/fabric/cmd/generate_changelog/internal/config"
|
||||
"github.com/danielmiessler/fabric/cmd/generate_changelog/util"
|
||||
"github.com/joho/godotenv"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var (
|
||||
cfg = &config.Config{}
|
||||
)
|
||||
|
||||
var rootCmd = &cobra.Command{
|
||||
Use: "generate_changelog",
|
||||
Short: "Generate changelog from git history and GitHub PRs",
|
||||
Long: `A high-performance changelog generator that walks git history,
|
||||
collects version information and pull requests, and generates a
|
||||
comprehensive changelog in markdown format.`,
|
||||
RunE: run,
|
||||
SilenceUsage: true, // Don't show usage on runtime errors, only on flag errors
|
||||
}
|
||||
|
||||
func init() {
|
||||
rootCmd.Flags().StringVarP(&cfg.RepoPath, "repo", "r", ".", "Repository path")
|
||||
rootCmd.Flags().StringVarP(&cfg.OutputFile, "output", "o", "", "Output file (default: stdout)")
|
||||
rootCmd.Flags().IntVarP(&cfg.Limit, "limit", "l", 0, "Limit number of versions (0 = all)")
|
||||
rootCmd.Flags().StringVarP(&cfg.Version, "version", "v", "", "Generate changelog for specific version")
|
||||
rootCmd.Flags().BoolVar(&cfg.SaveData, "save-data", false, "Save version data to JSON for debugging")
|
||||
rootCmd.Flags().StringVar(&cfg.CacheFile, "cache", "./cmd/generate_changelog/changelog.db", "Cache database file")
|
||||
rootCmd.Flags().BoolVar(&cfg.NoCache, "no-cache", false, "Disable cache usage")
|
||||
rootCmd.Flags().BoolVar(&cfg.RebuildCache, "rebuild-cache", false, "Rebuild cache from scratch")
|
||||
rootCmd.Flags().StringVar(&cfg.GitHubToken, "token", "", "GitHub API token (or set GITHUB_TOKEN env var)")
|
||||
rootCmd.Flags().BoolVar(&cfg.ForcePRSync, "force-pr-sync", false, "Force a full PR sync from GitHub (ignores cache age)")
|
||||
rootCmd.Flags().BoolVar(&cfg.EnableAISummary, "ai-summarize", false, "Generate AI-enhanced summaries using Fabric")
|
||||
rootCmd.Flags().IntVar(&cfg.IncomingPR, "incoming-pr", 0, "Pre-process PR for changelog (provide PR number)")
|
||||
rootCmd.Flags().StringVar(&cfg.ProcessPRsVersion, "process-prs", "", "Process all incoming PR files for release (provide version like v1.4.262)")
|
||||
rootCmd.Flags().StringVar(&cfg.IncomingDir, "incoming-dir", "./cmd/generate_changelog/incoming", "Directory for incoming PR files")
|
||||
rootCmd.Flags().BoolVar(&cfg.Push, "push", false, "Enable automatic git push after creating an incoming entry")
|
||||
rootCmd.Flags().BoolVar(&cfg.SyncDB, "sync-db", false, "Synchronize and validate database integrity with git history and GitHub PRs")
|
||||
rootCmd.Flags().StringVar(&cfg.Release, "release", "", "Update GitHub release description with AI summary for version (e.g., v1.2.3)")
|
||||
}
|
||||
|
||||
func run(cmd *cobra.Command, args []string) error {
|
||||
if cfg.IncomingPR > 0 && cfg.ProcessPRsVersion != "" {
|
||||
return fmt.Errorf("--incoming-pr and --process-prs are mutually exclusive flags")
|
||||
}
|
||||
|
||||
if cfg.Release != "" && (cfg.IncomingPR > 0 || cfg.ProcessPRsVersion != "" || cfg.SyncDB) {
|
||||
return fmt.Errorf("--release cannot be used with other processing flags")
|
||||
}
|
||||
|
||||
cfg.GitHubToken = util.GetTokenFromEnv(cfg.GitHubToken)
|
||||
|
||||
generator, err := changelog.New(cfg)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to create changelog generator: %w", err)
|
||||
}
|
||||
|
||||
if cfg.IncomingPR > 0 {
|
||||
return generator.ProcessIncomingPR(cfg.IncomingPR)
|
||||
}
|
||||
|
||||
if cfg.ProcessPRsVersion != "" {
|
||||
return generator.CreateNewChangelogEntry(cfg.ProcessPRsVersion)
|
||||
}
|
||||
|
||||
if cfg.SyncDB {
|
||||
return generator.SyncDatabase()
|
||||
}
|
||||
|
||||
if cfg.Release != "" {
|
||||
releaseManager, err := internal.NewReleaseManager(cfg)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to create release manager: %w", err)
|
||||
}
|
||||
defer releaseManager.Close()
|
||||
return releaseManager.UpdateReleaseDescription(cfg.Release)
|
||||
}
|
||||
|
||||
output, err := generator.Generate()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to generate changelog: %w", err)
|
||||
}
|
||||
|
||||
if cfg.OutputFile != "" {
|
||||
if err := os.WriteFile(cfg.OutputFile, []byte(output), 0644); err != nil {
|
||||
return fmt.Errorf("failed to write output file: %w", err)
|
||||
}
|
||||
fmt.Printf("Changelog written to %s\n", cfg.OutputFile)
|
||||
} else {
|
||||
fmt.Print(output)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func main() {
|
||||
// Load .env file from the same directory as the binary
|
||||
if exePath, err := os.Executable(); err == nil {
|
||||
envPath := filepath.Join(filepath.Dir(exePath), ".env")
|
||||
if _, err := os.Stat(envPath); err == nil {
|
||||
// .env file exists, load it
|
||||
if err := godotenv.Load(envPath); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Failed to load .env file: %v\n", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
rootCmd.Execute()
|
||||
}
|
||||
31
cmd/generate_changelog/util/token.go
Normal file
31
cmd/generate_changelog/util/token.go
Normal file
@@ -0,0 +1,31 @@
|
||||
package util
|
||||
|
||||
import (
|
||||
"os"
|
||||
)
|
||||
|
||||
// GetTokenFromEnv returns a GitHub token based on the following precedence order:
|
||||
// 1. If tokenValue is non-empty, it is returned.
|
||||
// 2. Otherwise, if the GITHUB_TOKEN environment variable is set, its value is returned.
|
||||
// 3. Otherwise, if the GH_TOKEN environment variable is set, its value is returned.
|
||||
// 4. If none of the above are set, an empty string is returned.
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// os.Setenv("GITHUB_TOKEN", "abc")
|
||||
// os.Setenv("GH_TOKEN", "def")
|
||||
// GetTokenFromEnv("xyz") // returns "xyz"
|
||||
// GetTokenFromEnv("") // returns "abc"
|
||||
// os.Unsetenv("GITHUB_TOKEN")
|
||||
// GetTokenFromEnv("") // returns "def"
|
||||
// os.Unsetenv("GH_TOKEN")
|
||||
// GetTokenFromEnv("") // returns ""
|
||||
func GetTokenFromEnv(tokenValue string) string {
|
||||
if tokenValue == "" {
|
||||
tokenValue = os.Getenv("GITHUB_TOKEN")
|
||||
if tokenValue == "" {
|
||||
tokenValue = os.Getenv("GH_TOKEN")
|
||||
}
|
||||
}
|
||||
return tokenValue
|
||||
}
|
||||
@@ -1,47 +1,62 @@
|
||||
#compdef fabric
|
||||
#compdef fabric fabric-ai
|
||||
|
||||
# Zsh completion for fabric CLI
|
||||
# Place this file in a directory in your $fpath (e.g. /usr/local/share/zsh/site-functions)
|
||||
|
||||
_fabric_patterns() {
|
||||
local -a patterns
|
||||
patterns=(${(f)"$(fabric --listpatterns --shell-complete-list 2>/dev/null)"})
|
||||
local cmd=${words[1]}
|
||||
patterns=(${(f)"$($cmd --listpatterns --shell-complete-list 2>/dev/null)"})
|
||||
compadd -X "Patterns:" ${patterns}
|
||||
}
|
||||
|
||||
_fabric_models() {
|
||||
local -a models
|
||||
models=(${(f)"$(fabric --listmodels --shell-complete-list 2>/dev/null)"})
|
||||
local cmd=${words[1]}
|
||||
models=(${(f)"$($cmd --listmodels --shell-complete-list 2>/dev/null)"})
|
||||
compadd -X "Models:" ${models}
|
||||
}
|
||||
|
||||
_fabric_vendors() {
|
||||
local -a vendors
|
||||
local cmd=${words[1]}
|
||||
vendors=(${(f)"$($cmd --listvendors --shell-complete-list 2>/dev/null)"})
|
||||
compadd -X "Vendors:" ${vendors}
|
||||
}
|
||||
|
||||
_fabric_contexts() {
|
||||
local -a contexts
|
||||
contexts=(${(f)"$(fabric --listcontexts --shell-complete-list 2>/dev/null)"})
|
||||
local cmd=${words[1]}
|
||||
contexts=(${(f)"$($cmd --listcontexts --shell-complete-list 2>/dev/null)"})
|
||||
compadd -X "Contexts:" ${contexts}
|
||||
}
|
||||
|
||||
_fabric_sessions() {
|
||||
local -a sessions
|
||||
sessions=(${(f)"$(fabric --listsessions --shell-complete-list 2>/dev/null)"})
|
||||
local cmd=${words[1]}
|
||||
sessions=(${(f)"$($cmd --listsessions --shell-complete-list 2>/dev/null)"})
|
||||
compadd -X "Sessions:" ${sessions}
|
||||
}
|
||||
|
||||
_fabric_strategies() {
|
||||
local -a strategies
|
||||
strategies=(${(f)"$(fabric --liststrategies --shell-complete-list 2>/dev/null)"})
|
||||
local cmd=${words[1]}
|
||||
strategies=(${(f)"$($cmd --liststrategies --shell-complete-list 2>/dev/null)"})
|
||||
compadd -X "Strategies:" ${strategies}
|
||||
}
|
||||
|
||||
_fabric_extensions() {
|
||||
local -a extensions
|
||||
extensions=(${(f)"$(fabric --listextensions --shell-complete-list 2>/dev/null)"})
|
||||
local cmd=${words[1]}
|
||||
extensions=(${(f)"$($cmd --listextensions --shell-complete-list 2>/dev/null)"})
|
||||
compadd -X "Extensions:" ${extensions}
|
||||
'(-L --listmodels)'{-L,--listmodels}'[List all available models]:list models:_fabric_models' \
|
||||
'(-x --listcontexts)'{-x,--listcontexts}'[List all contexts]:list contexts:_fabric_contexts' \
|
||||
'(-X --listsessions)'{-X,--listsessions}'[List all sessions]:list sessions:_fabric_sessions' \
|
||||
'(--listextensions)--listextensions[List all registered extensions]' \
|
||||
'(--liststrategies)--liststrategies[List all strategies]:list strategies:_fabric_strategies' \
|
||||
'(--listvendors)--listvendors[List all vendors]' \
|
||||
vendors=(${(f)"$(fabric --listvendors 2>/dev/null)"})
|
||||
compadd -X "Vendors:" ${vendors}
|
||||
}
|
||||
|
||||
_fabric_gemini_voices() {
|
||||
local -a voices
|
||||
local cmd=${words[1]}
|
||||
voices=(${(f)"$($cmd --list-gemini-voices --shell-complete-list 2>/dev/null)"})
|
||||
compadd -X "Gemini TTS Voices:" ${voices}
|
||||
}
|
||||
|
||||
_fabric() {
|
||||
@@ -68,6 +83,7 @@ _fabric() {
|
||||
'(-U --updatepatterns)'{-U,--updatepatterns}'[Update patterns]' \
|
||||
'(-c --copy)'{-c,--copy}'[Copy to clipboard]' \
|
||||
'(-m --model)'{-m,--model}'[Choose model]:model:_fabric_models' \
|
||||
'(-V --vendor)'{-V,--vendor}'[Specify vendor for chosen model (e.g., -V "LM Studio" -m openai/gpt-oss-20b)]:vendor:_fabric_vendors' \
|
||||
'(--modelContextLength)--modelContextLength[Model context length (only affects ollama)]:length:' \
|
||||
'(-o --output)'{-o,--output}'[Output to file]:file:_files' \
|
||||
'(--output-session)--output-session[Output the entire session to the output file]' \
|
||||
@@ -79,6 +95,7 @@ _fabric() {
|
||||
'(--transcript-with-timestamps)--transcript-with-timestamps[Grab transcript from YouTube video with timestamps]' \
|
||||
'(--comments)--comments[Grab comments from YouTube video and send to chat]' \
|
||||
'(--metadata)--metadata[Output video metadata]' \
|
||||
'(--yt-dlp-args)--yt-dlp-args[Additional arguments to pass to yt-dlp]:yt-dlp args:' \
|
||||
'(-g --language)'{-g,--language}'[Specify the Language Code for the chat, e.g. -g=en -g=zh]:language:' \
|
||||
'(-u --scrape_url)'{-u,--scrape_url}'[Scrape website URL to markdown using Jina AI]:url:' \
|
||||
'(-q --scrape_question)'{-q,--scrape_question}'[Search question using Jina AI]:question:' \
|
||||
@@ -96,7 +113,7 @@ _fabric() {
|
||||
'(--api-key)--api-key[API key used to secure server routes]:api-key:' \
|
||||
'(--config)--config[Path to YAML config file]:config file:_files -g "*.yaml *.yml"' \
|
||||
'(--version)--version[Print current version]' \
|
||||
'(--search)--search[Enable web search tool for supported models (Anthropic, OpenAI)]' \
|
||||
'(--search)--search[Enable web search tool for supported models (Anthropic, OpenAI, Gemini)]' \
|
||||
'(--search-location)--search-location[Set location for web search results]:location:' \
|
||||
'(--image-file)--image-file[Save generated image to specified file path]:image file:_files -g "*.png *.webp *.jpeg *.jpg"' \
|
||||
'(--image-size)--image-size[Image dimensions]:size:(1024x1024 1536x1024 1024x1536 auto)' \
|
||||
@@ -109,10 +126,17 @@ _fabric() {
|
||||
'(--strategy)--strategy[Choose a strategy from the available strategies]:strategy:_fabric_strategies' \
|
||||
'(--liststrategies)--liststrategies[List all strategies]' \
|
||||
'(--listvendors)--listvendors[List all vendors]' \
|
||||
'(--voice)--voice[TTS voice name for supported models]:voice:_fabric_gemini_voices' \
|
||||
'(--list-gemini-voices)--list-gemini-voices[List all available Gemini TTS voices]' \
|
||||
'(--shell-complete-list)--shell-complete-list[Output raw list without headers/formatting (for shell completion)]' \
|
||||
'(--suppress-think)--suppress-think[Suppress text enclosed in thinking tags]' \
|
||||
'(--think-start-tag)--think-start-tag[Start tag for thinking sections (default: <think>)]:start tag:' \
|
||||
'(--think-end-tag)--think-end-tag[End tag for thinking sections (default: </think>)]:end tag:' \
|
||||
'(--disable-responses-api)--disable-responses-api[Disable OpenAI Responses API (default: false)]' \
|
||||
'(--notification)--notification[Send desktop notification when command completes]' \
|
||||
'(--notification-command)--notification-command[Custom command to run for notifications]:notification command:' \
|
||||
'(-h --help)'{-h,--help}'[Show this help message]' \
|
||||
'*:arguments:'
|
||||
}
|
||||
|
||||
_fabric "$@"
|
||||
|
||||
|
||||
@@ -13,11 +13,11 @@ _fabric() {
|
||||
_get_comp_words_by_ref -n : cur prev words cword
|
||||
|
||||
# Define all possible options/flags
|
||||
local opts="--pattern -p --variable -v --context -C --session --attachment -a --setup -S --temperature -t --topp -T --stream -s --presencepenalty -P --raw -r --frequencypenalty -F --listpatterns -l --listmodels -L --listcontexts -x --listsessions -X --updatepatterns -U --copy -c --model -m --modelContextLength --output -o --output-session --latest -n --changeDefaultModel -d --youtube -y --playlist --transcript --transcript-with-timestamps --comments --metadata --language -g --scrape_url -u --scrape_question -q --seed -e --wipecontext -w --wipesession -W --printcontext --printsession --readability --input-has-vars --dry-run --serve --serveOllama --address --api-key --config --search --search-location --image-file --image-size --image-quality --image-compression --image-background --version --listextensions --addextension --rmextension --strategy --liststrategies --listvendors --shell-complete-list --help -h"
|
||||
local opts="--pattern -p --variable -v --context -C --session --attachment -a --setup -S --temperature -t --topp -T --stream -s --presencepenalty -P --raw -r --frequencypenalty -F --listpatterns -l --listmodels -L --listcontexts -x --listsessions -X --updatepatterns -U --copy -c --model -m --vendor -V --modelContextLength --output -o --output-session --latest -n --changeDefaultModel -d --youtube -y --playlist --transcript --transcript-with-timestamps --comments --metadata --yt-dlp-args --language -g --scrape_url -u --scrape_question -q --seed -e --wipecontext -w --wipesession -W --printcontext --printsession --readability --input-has-vars --dry-run --serve --serveOllama --address --api-key --config --search --search-location --image-file --image-size --image-quality --image-compression --image-background --suppress-think --think-start-tag --think-end-tag --disable-responses-api --voice --list-gemini-voices --notification --notification-command --version --listextensions --addextension --rmextension --strategy --liststrategies --listvendors --shell-complete-list --help -h"
|
||||
|
||||
# Helper function for dynamic completions
|
||||
_fabric_get_list() {
|
||||
fabric "$1" --shell-complete-list 2>/dev/null
|
||||
"${COMP_WORDS[0]}" "$1" --shell-complete-list 2>/dev/null
|
||||
}
|
||||
|
||||
# Handle completions based on the previous word
|
||||
@@ -38,6 +38,10 @@ _fabric() {
|
||||
COMPREPLY=($(compgen -W "$(_fabric_get_list --listmodels)" -- "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
-V | --vendor)
|
||||
COMPREPLY=($(compgen -W "$(_fabric_get_list --listvendors)" -- "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
-w | --wipecontext)
|
||||
COMPREPLY=($(compgen -W "$(_fabric_get_list --listcontexts)" -- "${cur}"))
|
||||
return 0
|
||||
@@ -62,6 +66,10 @@ _fabric() {
|
||||
COMPREPLY=($(compgen -W "$(_fabric_get_list --liststrategies)" -- "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
--voice)
|
||||
COMPREPLY=($(compgen -W "$(_fabric_get_list --list-gemini-voices)" -- "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
# Options requiring file/directory paths
|
||||
-a | --attachment | -o | --output | --config | --addextension | --image-file)
|
||||
_filedir
|
||||
@@ -81,7 +89,7 @@ _fabric() {
|
||||
return 0
|
||||
;;
|
||||
# Options requiring simple arguments (no specific completion logic here)
|
||||
-v | --variable | -t | --temperature | -T | --topp | -P | --presencepenalty | -F | --frequencypenalty | --modelContextLength | -n | --latest | -y | --youtube | -g | --language | -u | --scrape_url | -q | --scrape_question | -e | --seed | --address | --api-key | --search-location | --image-compression)
|
||||
-v | --variable | -t | --temperature | -T | --topp | -P | --presencepenalty | -F | --frequencypenalty | --modelContextLength | -n | --latest | -y | --youtube | --yt-dlp-args | -g | --language | -u | --scrape_url | -q | --scrape_question | -e | --seed | --address | --api-key | --search-location | --image-compression | --think-start-tag | --think-end-tag | --notification-command)
|
||||
# No specific completion suggestions, user types the value
|
||||
return 0
|
||||
;;
|
||||
@@ -100,4 +108,4 @@ _fabric() {
|
||||
|
||||
}
|
||||
|
||||
complete -F _fabric fabric
|
||||
complete -F _fabric fabric fabric-ai
|
||||
|
||||
@@ -8,94 +8,126 @@
|
||||
|
||||
# Helper functions for dynamic completions
|
||||
function __fabric_get_patterns
|
||||
fabric --listpatterns --shell-complete-list 2>/dev/null
|
||||
set cmd (commandline -opc)[1]
|
||||
$cmd --listpatterns --shell-complete-list 2>/dev/null
|
||||
end
|
||||
|
||||
function __fabric_get_models
|
||||
fabric --listmodels --shell-complete-list 2>/dev/null
|
||||
set cmd (commandline -opc)[1]
|
||||
$cmd --listmodels --shell-complete-list 2>/dev/null
|
||||
end
|
||||
|
||||
function __fabric_get_vendors
|
||||
set cmd (commandline -opc)[1]
|
||||
$cmd --listvendors --shell-complete-list 2>/dev/null
|
||||
end
|
||||
|
||||
function __fabric_get_contexts
|
||||
fabric --listcontexts --shell-complete-list 2>/dev/null
|
||||
set cmd (commandline -opc)[1]
|
||||
$cmd --listcontexts --shell-complete-list 2>/dev/null
|
||||
end
|
||||
|
||||
function __fabric_get_sessions
|
||||
fabric --listsessions --shell-complete-list 2>/dev/null
|
||||
set cmd (commandline -opc)[1]
|
||||
$cmd --listsessions --shell-complete-list 2>/dev/null
|
||||
end
|
||||
|
||||
function __fabric_get_strategies
|
||||
fabric --liststrategies --shell-complete-list 2>/dev/null
|
||||
set cmd (commandline -opc)[1]
|
||||
$cmd --liststrategies --shell-complete-list 2>/dev/null
|
||||
end
|
||||
|
||||
function __fabric_get_extensions
|
||||
fabric --listextensions --shell-complete-list 2>/dev/null
|
||||
set cmd (commandline -opc)[1]
|
||||
$cmd --listextensions --shell-complete-list 2>/dev/null
|
||||
end
|
||||
|
||||
function __fabric_get_gemini_voices
|
||||
set cmd (commandline -opc)[1]
|
||||
$cmd --list-gemini-voices --shell-complete-list 2>/dev/null
|
||||
end
|
||||
|
||||
# Main completion function
|
||||
complete -c fabric -f
|
||||
function __fabric_register_completions
|
||||
set cmd $argv[1]
|
||||
complete -c $cmd -f
|
||||
|
||||
# Flag completions with arguments
|
||||
complete -c fabric -s p -l pattern -d "Choose a pattern from the available patterns" -a "(__fabric_get_patterns)"
|
||||
complete -c fabric -s v -l variable -d "Values for pattern variables, e.g. -v=#role:expert -v=#points:30"
|
||||
complete -c fabric -s C -l context -d "Choose a context from the available contexts" -a "(__fabric_get_contexts)"
|
||||
complete -c fabric -l session -d "Choose a session from the available sessions" -a "(__fabric_get_sessions)"
|
||||
complete -c fabric -s a -l attachment -d "Attachment path or URL (e.g. for OpenAI image recognition messages)" -r
|
||||
complete -c fabric -s t -l temperature -d "Set temperature (default: 0.7)"
|
||||
complete -c fabric -s T -l topp -d "Set top P (default: 0.9)"
|
||||
complete -c fabric -s P -l presencepenalty -d "Set presence penalty (default: 0.0)"
|
||||
complete -c fabric -s F -l frequencypenalty -d "Set frequency penalty (default: 0.0)"
|
||||
complete -c fabric -s m -l model -d "Choose model" -a "(__fabric_get_models)"
|
||||
complete -c fabric -l modelContextLength -d "Model context length (only affects ollama)"
|
||||
complete -c fabric -s o -l output -d "Output to file" -r
|
||||
complete -c fabric -s n -l latest -d "Number of latest patterns to list (default: 0)"
|
||||
complete -c fabric -s y -l youtube -d "YouTube video or play list URL to grab transcript, comments from it"
|
||||
complete -c fabric -s g -l language -d "Specify the Language Code for the chat, e.g. -g=en -g=zh"
|
||||
complete -c fabric -s u -l scrape_url -d "Scrape website URL to markdown using Jina AI"
|
||||
complete -c fabric -s q -l scrape_question -d "Search question using Jina AI"
|
||||
complete -c fabric -s e -l seed -d "Seed to be used for LMM generation"
|
||||
complete -c fabric -s w -l wipecontext -d "Wipe context" -a "(__fabric_get_contexts)"
|
||||
complete -c fabric -s W -l wipesession -d "Wipe session" -a "(__fabric_get_sessions)"
|
||||
complete -c fabric -l printcontext -d "Print context" -a "(__fabric_get_contexts)"
|
||||
complete -c fabric -l printsession -d "Print session" -a "(__fabric_get_sessions)"
|
||||
complete -c fabric -l address -d "The address to bind the REST API (default: :8080)"
|
||||
complete -c fabric -l api-key -d "API key used to secure server routes"
|
||||
complete -c fabric -l config -d "Path to YAML config file" -r -a "*.yaml *.yml"
|
||||
complete -c fabric -l search-location -d "Set location for web search results (e.g., 'America/Los_Angeles')"
|
||||
complete -c fabric -l image-file -d "Save generated image to specified file path (e.g., 'output.png')" -r -a "*.png *.webp *.jpeg *.jpg"
|
||||
complete -c fabric -l image-size -d "Image dimensions: 1024x1024, 1536x1024, 1024x1536, auto (default: auto)" -a "1024x1024 1536x1024 1024x1536 auto"
|
||||
complete -c fabric -l image-quality -d "Image quality: low, medium, high, auto (default: auto)" -a "low medium high auto"
|
||||
complete -c fabric -l image-compression -d "Compression level 0-100 for JPEG/WebP formats (default: not set)" -r
|
||||
complete -c fabric -l image-background -d "Background type: opaque, transparent (default: opaque, only for PNG/WebP)" -a "opaque transparent"
|
||||
complete -c fabric -l addextension -d "Register a new extension from config file path" -r -a "*.yaml *.yml"
|
||||
complete -c fabric -l rmextension -d "Remove a registered extension by name" -a "(__fabric_get_extensions)"
|
||||
complete -c fabric -l strategy -d "Choose a strategy from the available strategies" -a "(__fabric_get_strategies)"
|
||||
# Flag completions with arguments
|
||||
complete -c $cmd -s p -l pattern -d "Choose a pattern from the available patterns" -a "(__fabric_get_patterns)"
|
||||
complete -c $cmd -s v -l variable -d "Values for pattern variables, e.g. -v=#role:expert -v=#points:30"
|
||||
complete -c $cmd -s C -l context -d "Choose a context from the available contexts" -a "(__fabric_get_contexts)"
|
||||
complete -c $cmd -l session -d "Choose a session from the available sessions" -a "(__fabric_get_sessions)"
|
||||
complete -c $cmd -s a -l attachment -d "Attachment path or URL (e.g. for OpenAI image recognition messages)" -r
|
||||
complete -c $cmd -s t -l temperature -d "Set temperature (default: 0.7)"
|
||||
complete -c $cmd -s T -l topp -d "Set top P (default: 0.9)"
|
||||
complete -c $cmd -s P -l presencepenalty -d "Set presence penalty (default: 0.0)"
|
||||
complete -c $cmd -s F -l frequencypenalty -d "Set frequency penalty (default: 0.0)"
|
||||
complete -c $cmd -s m -l model -d "Choose model" -a "(__fabric_get_models)"
|
||||
complete -c $cmd -s V -l vendor -d "Specify vendor for chosen model (e.g., -V \"LM Studio\" -m openai/gpt-oss-20b)" -a "(__fabric_get_vendors)"
|
||||
complete -c $cmd -l modelContextLength -d "Model context length (only affects ollama)"
|
||||
complete -c $cmd -s o -l output -d "Output to file" -r
|
||||
complete -c $cmd -s n -l latest -d "Number of latest patterns to list (default: 0)"
|
||||
complete -c $cmd -s y -l youtube -d "YouTube video or play list URL to grab transcript, comments from it"
|
||||
complete -c $cmd -s g -l language -d "Specify the Language Code for the chat, e.g. -g=en -g=zh"
|
||||
complete -c $cmd -s u -l scrape_url -d "Scrape website URL to markdown using Jina AI"
|
||||
complete -c $cmd -s q -l scrape_question -d "Search question using Jina AI"
|
||||
complete -c $cmd -s e -l seed -d "Seed to be used for LMM generation"
|
||||
complete -c $cmd -s w -l wipecontext -d "Wipe context" -a "(__fabric_get_contexts)"
|
||||
complete -c $cmd -s W -l wipesession -d "Wipe session" -a "(__fabric_get_sessions)"
|
||||
complete -c $cmd -l printcontext -d "Print context" -a "(__fabric_get_contexts)"
|
||||
complete -c $cmd -l printsession -d "Print session" -a "(__fabric_get_sessions)"
|
||||
complete -c $cmd -l address -d "The address to bind the REST API (default: :8080)"
|
||||
complete -c $cmd -l api-key -d "API key used to secure server routes"
|
||||
complete -c $cmd -l config -d "Path to YAML config file" -r -a "*.yaml *.yml"
|
||||
complete -c $cmd -l search-location -d "Set location for web search results (e.g., 'America/Los_Angeles')"
|
||||
complete -c $cmd -l image-file -d "Save generated image to specified file path (e.g., 'output.png')" -r -a "*.png *.webp *.jpeg *.jpg"
|
||||
complete -c $cmd -l image-size -d "Image dimensions: 1024x1024, 1536x1024, 1024x1536, auto (default: auto)" -a "1024x1024 1536x1024 1024x1536 auto"
|
||||
complete -c $cmd -l image-quality -d "Image quality: low, medium, high, auto (default: auto)" -a "low medium high auto"
|
||||
complete -c $cmd -l image-compression -d "Compression level 0-100 for JPEG/WebP formats (default: not set)" -r
|
||||
complete -c $cmd -l image-background -d "Background type: opaque, transparent (default: opaque, only for PNG/WebP)" -a "opaque transparent"
|
||||
complete -c $cmd -l addextension -d "Register a new extension from config file path" -r -a "*.yaml *.yml"
|
||||
complete -c $cmd -l rmextension -d "Remove a registered extension by name" -a "(__fabric_get_extensions)"
|
||||
complete -c $cmd -l strategy -d "Choose a strategy from the available strategies" -a "(__fabric_get_strategies)"
|
||||
complete -c $cmd -l think-start-tag -d "Start tag for thinking sections (default: <think>)"
|
||||
complete -c $cmd -l think-end-tag -d "End tag for thinking sections (default: </think>)"
|
||||
complete -c $cmd -l voice -d "TTS voice name for supported models (e.g., Kore, Charon, Puck)" -a "(__fabric_get_gemini_voices)"
|
||||
complete -c $cmd -l notification-command -d "Custom command to run for notifications (overrides built-in notifications)"
|
||||
|
||||
# Boolean flags (no arguments)
|
||||
complete -c fabric -s S -l setup -d "Run setup for all reconfigurable parts of fabric"
|
||||
complete -c fabric -s s -l stream -d "Stream"
|
||||
complete -c fabric -s r -l raw -d "Use the defaults of the model without sending chat options"
|
||||
complete -c fabric -s l -l listpatterns -d "List all patterns"
|
||||
complete -c fabric -s L -l listmodels -d "List all available models"
|
||||
complete -c fabric -s x -l listcontexts -d "List all contexts"
|
||||
complete -c fabric -s X -l listsessions -d "List all sessions"
|
||||
complete -c fabric -s U -l updatepatterns -d "Update patterns"
|
||||
complete -c fabric -s c -l copy -d "Copy to clipboard"
|
||||
complete -c fabric -l output-session -d "Output the entire session to the output file"
|
||||
complete -c fabric -s d -l changeDefaultModel -d "Change default model"
|
||||
complete -c fabric -l playlist -d "Prefer playlist over video if both ids are present in the URL"
|
||||
complete -c fabric -l transcript -d "Grab transcript from YouTube video and send to chat"
|
||||
complete -c fabric -l transcript-with-timestamps -d "Grab transcript from YouTube video with timestamps"
|
||||
complete -c fabric -l comments -d "Grab comments from YouTube video and send to chat"
|
||||
complete -c fabric -l metadata -d "Output video metadata"
|
||||
complete -c fabric -l readability -d "Convert HTML input into a clean, readable view"
|
||||
complete -c fabric -l input-has-vars -d "Apply variables to user input"
|
||||
complete -c fabric -l dry-run -d "Show what would be sent to the model without actually sending it"
|
||||
complete -c fabric -l search -d "Enable web search tool for supported models (Anthropic, OpenAI)"
|
||||
complete -c fabric -l serve -d "Serve the Fabric Rest API"
|
||||
complete -c fabric -l serveOllama -d "Serve the Fabric Rest API with ollama endpoints"
|
||||
complete -c fabric -l version -d "Print current version"
|
||||
complete -c fabric -l listextensions -d "List all registered extensions"
|
||||
complete -c fabric -l liststrategies -d "List all strategies"
|
||||
complete -c fabric -l listvendors -d "List all vendors"
|
||||
complete -c fabric -l shell-complete-list -d "Output raw list without headers/formatting (for shell completion)"
|
||||
complete -c fabric -s h -l help -d "Show this help message"
|
||||
# Boolean flags (no arguments)
|
||||
complete -c $cmd -s S -l setup -d "Run setup for all reconfigurable parts of fabric"
|
||||
complete -c $cmd -s s -l stream -d "Stream"
|
||||
complete -c $cmd -s r -l raw -d "Use the defaults of the model without sending chat options"
|
||||
complete -c $cmd -s l -l listpatterns -d "List all patterns"
|
||||
complete -c $cmd -s L -l listmodels -d "List all available models"
|
||||
complete -c $cmd -s x -l listcontexts -d "List all contexts"
|
||||
complete -c $cmd -s X -l listsessions -d "List all sessions"
|
||||
complete -c $cmd -s U -l updatepatterns -d "Update patterns"
|
||||
complete -c $cmd -s c -l copy -d "Copy to clipboard"
|
||||
complete -c $cmd -l output-session -d "Output the entire session to the output file"
|
||||
complete -c $cmd -s d -l changeDefaultModel -d "Change default model"
|
||||
complete -c $cmd -l playlist -d "Prefer playlist over video if both ids are present in the URL"
|
||||
complete -c $cmd -l transcript -d "Grab transcript from YouTube video and send to chat"
|
||||
complete -c $cmd -l transcript-with-timestamps -d "Grab transcript from YouTube video with timestamps"
|
||||
complete -c $cmd -l comments -d "Grab comments from YouTube video and send to chat"
|
||||
complete -c $cmd -l metadata -d "Output video metadata"
|
||||
complete -c $cmd -l yt-dlp-args -d "Additional arguments to pass to yt-dlp (e.g. '--cookies-from-browser brave')"
|
||||
complete -c $cmd -l readability -d "Convert HTML input into a clean, readable view"
|
||||
complete -c $cmd -l input-has-vars -d "Apply variables to user input"
|
||||
complete -c $cmd -l dry-run -d "Show what would be sent to the model without actually sending it"
|
||||
complete -c $cmd -l search -d "Enable web search tool for supported models (Anthropic, OpenAI, Gemini)"
|
||||
complete -c $cmd -l serve -d "Serve the Fabric Rest API"
|
||||
complete -c $cmd -l serveOllama -d "Serve the Fabric Rest API with ollama endpoints"
|
||||
complete -c $cmd -l version -d "Print current version"
|
||||
complete -c $cmd -l listextensions -d "List all registered extensions"
|
||||
complete -c $cmd -l liststrategies -d "List all strategies"
|
||||
complete -c $cmd -l listvendors -d "List all vendors"
|
||||
complete -c $cmd -l list-gemini-voices -d "List all available Gemini TTS voices"
|
||||
complete -c $cmd -l shell-complete-list -d "Output raw list without headers/formatting (for shell completion)"
|
||||
complete -c $cmd -l suppress-think -d "Suppress text enclosed in thinking tags"
|
||||
complete -c $cmd -l disable-responses-api -d "Disable OpenAI Responses API (default: false)"
|
||||
complete -c $cmd -l notification -d "Send desktop notification when command completes"
|
||||
complete -c $cmd -s h -l help -d "Show this help message"
|
||||
end
|
||||
|
||||
__fabric_register_completions fabric
|
||||
__fabric_register_completions fabric-ai
|
||||
|
||||
387
completions/setup-completions.sh
Executable file
387
completions/setup-completions.sh
Executable file
@@ -0,0 +1,387 @@
|
||||
#!/bin/sh
|
||||
|
||||
# Fabric Shell Completions Setup Script
|
||||
# This script automatically installs shell completions for the fabric CLI
|
||||
# based on your current shell and the installed fabric command name.
|
||||
|
||||
set -e
|
||||
|
||||
# Global variables
|
||||
DRY_RUN=false
|
||||
|
||||
# Colors for output
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
BLUE='\033[0;34m'
|
||||
CYAN='\033[0;36m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
# Function to print colored output
|
||||
print_info() {
|
||||
printf "${BLUE}[INFO]${NC} %s\n" "$1"
|
||||
}
|
||||
|
||||
print_success() {
|
||||
printf "${GREEN}[SUCCESS]${NC} %s\n" "$1"
|
||||
}
|
||||
|
||||
print_warning() {
|
||||
printf "${YELLOW}[WARNING]${NC} %s\n" "$1"
|
||||
}
|
||||
|
||||
print_error() {
|
||||
printf "${RED}[ERROR]${NC} %s\n" "$1"
|
||||
}
|
||||
|
||||
print_dry_run() {
|
||||
printf "${CYAN}[DRY-RUN]${NC} %s\n" "$1"
|
||||
}
|
||||
|
||||
# Function to execute commands with dry-run support
|
||||
execute_command() {
|
||||
cmd="$1"
|
||||
|
||||
if [ "$DRY_RUN" = true ]; then
|
||||
print_dry_run "Would run: $cmd"
|
||||
return 0
|
||||
else
|
||||
eval "$cmd" 2>/dev/null
|
||||
fi
|
||||
}
|
||||
|
||||
# Ensure directory exists, try sudo on permission failure
|
||||
ensure_dir() {
|
||||
dir="$1"
|
||||
# Expand ~ if present
|
||||
case "$dir" in
|
||||
~/*)
|
||||
dir="$HOME${dir#~}"
|
||||
;;
|
||||
esac
|
||||
|
||||
if [ -d "$dir" ]; then
|
||||
return 0
|
||||
fi
|
||||
|
||||
if [ "$DRY_RUN" = true ]; then
|
||||
print_dry_run "Would run: mkdir -p \"$dir\""
|
||||
print_dry_run "If permission denied, would run: sudo mkdir -p \"$dir\""
|
||||
return 0
|
||||
fi
|
||||
|
||||
if mkdir -p "$dir" 2>/dev/null; then
|
||||
return 0
|
||||
fi
|
||||
if command -v sudo >/dev/null 2>&1 && sudo mkdir -p "$dir" 2>/dev/null; then
|
||||
return 0
|
||||
fi
|
||||
print_error "Failed to create directory: $dir"
|
||||
return 1
|
||||
}
|
||||
|
||||
# Copy file with sudo fallback on permission failure
|
||||
install_file() {
|
||||
src="$1"
|
||||
dest="$2"
|
||||
|
||||
if [ "$DRY_RUN" = true ]; then
|
||||
print_dry_run "Would run: cp \"$src\" \"$dest\""
|
||||
print_dry_run "If permission denied, would run: sudo cp \"$src\" \"$dest\""
|
||||
return 0
|
||||
fi
|
||||
|
||||
if cp "$src" "$dest" 2>/dev/null; then
|
||||
return 0
|
||||
fi
|
||||
if command -v sudo >/dev/null 2>&1 && sudo cp "$src" "$dest" 2>/dev/null; then
|
||||
return 0
|
||||
fi
|
||||
print_error "Failed to install file to: $dest"
|
||||
return 1
|
||||
}
|
||||
|
||||
# Function to detect fabric command name
|
||||
detect_fabric_command() {
|
||||
if command -v fabric >/dev/null 2>&1; then
|
||||
echo "fabric"
|
||||
elif command -v fabric-ai >/dev/null 2>&1; then
|
||||
echo "fabric-ai"
|
||||
else
|
||||
print_error "Neither 'fabric' nor 'fabric-ai' command found in PATH"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to detect shell
|
||||
detect_shell() {
|
||||
if [ -n "$SHELL" ]; then
|
||||
basename "$SHELL"
|
||||
else
|
||||
print_warning "SHELL environment variable not set, defaulting to sh"
|
||||
echo "sh"
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to get script directory
|
||||
get_script_dir() {
|
||||
# Get the directory where this script is located
|
||||
script_path="$(readlink -f "$0" 2>/dev/null || realpath "$0" 2>/dev/null || echo "$0")"
|
||||
dirname "$script_path"
|
||||
}
|
||||
|
||||
# Function to setup Zsh completions
|
||||
setup_zsh_completions() {
|
||||
fabric_cmd="$1"
|
||||
script_dir="$2"
|
||||
completion_file="_${fabric_cmd}"
|
||||
|
||||
print_info "Setting up Zsh completions for '$fabric_cmd'..."
|
||||
|
||||
# Try to use existing $fpath first, then fall back to default directories
|
||||
zsh_dirs=""
|
||||
|
||||
# Check if user's shell is zsh and try to get fpath from it
|
||||
if [ "$(basename "$SHELL")" = "zsh" ] && command -v zsh >/dev/null 2>&1; then
|
||||
# Get fpath from zsh by sourcing user's .zshrc first
|
||||
fpath_output=$(zsh -c "source \$HOME/.zshrc 2>/dev/null && print -l \$fpath" 2>/dev/null | head -5 | tr '\n' ' ')
|
||||
if [ -n "$fpath_output" ] && [ "$fpath_output" != "" ]; then
|
||||
print_info "Using directories from zsh \$fpath"
|
||||
zsh_dirs="$fpath_output"
|
||||
fi
|
||||
fi
|
||||
|
||||
# If we couldn't get fpath or it's empty, use default directories
|
||||
if [ -z "$zsh_dirs" ] || [ "$zsh_dirs" = "" ]; then
|
||||
print_info "Using default zsh completion directories"
|
||||
zsh_dirs="/usr/local/share/zsh/site-functions /opt/homebrew/share/zsh/site-functions /usr/share/zsh/site-functions ~/.local/share/zsh/site-functions"
|
||||
fi
|
||||
|
||||
installed=false
|
||||
|
||||
for dir in $zsh_dirs; do
|
||||
# Create directory (with sudo fallback if needed)
|
||||
if ensure_dir "$dir"; then
|
||||
if install_file "$script_dir/_fabric" "$dir/$completion_file"; then
|
||||
if [ "$DRY_RUN" = true ]; then
|
||||
print_success "Would install Zsh completion to: $dir/$completion_file"
|
||||
else
|
||||
print_success "Installed Zsh completion to: $dir/$completion_file"
|
||||
fi
|
||||
installed=true
|
||||
break
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
if [ "$installed" = false ]; then
|
||||
if [ "$DRY_RUN" = true ]; then
|
||||
print_warning "Would attempt to install Zsh completions but no writable directory found."
|
||||
else
|
||||
print_error "Failed to install Zsh completions. Try running with sudo or check permissions."
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ "$DRY_RUN" = true ]; then
|
||||
print_info "Would suggest: Restart your shell or run 'autoload -U compinit && compinit' to enable completions."
|
||||
else
|
||||
print_info "Restart your shell or run 'autoload -U compinit && compinit' to enable completions."
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to setup Bash completions
|
||||
setup_bash_completions() {
|
||||
fabric_cmd="$1"
|
||||
script_dir="$2"
|
||||
completion_file="${fabric_cmd}.bash"
|
||||
|
||||
print_info "Setting up Bash completions for '$fabric_cmd'..."
|
||||
|
||||
# Try different completion directories
|
||||
bash_dirs="/etc/bash_completion.d /usr/local/etc/bash_completion.d /opt/homebrew/etc/bash_completion.d ~/.local/share/bash-completion/completions"
|
||||
installed=false
|
||||
|
||||
for dir in $bash_dirs; do
|
||||
if ensure_dir "$dir"; then
|
||||
if install_file "$script_dir/fabric.bash" "$dir/$completion_file"; then
|
||||
if [ "$DRY_RUN" = true ]; then
|
||||
print_success "Would install Bash completion to: $dir/$completion_file"
|
||||
else
|
||||
print_success "Installed Bash completion to: $dir/$completion_file"
|
||||
fi
|
||||
installed=true
|
||||
break
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
if [ "$installed" = false ]; then
|
||||
if [ "$DRY_RUN" = true ]; then
|
||||
print_warning "Would attempt to install Bash completions but no writable directory found."
|
||||
else
|
||||
print_error "Failed to install Bash completions. Try running with sudo or check permissions."
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ "$DRY_RUN" = true ]; then
|
||||
print_info "Would suggest: Restart your shell or run 'source ~/.bashrc' to enable completions."
|
||||
else
|
||||
print_info "Restart your shell or run 'source ~/.bashrc' to enable completions."
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to setup Fish completions
|
||||
setup_fish_completions() {
|
||||
fabric_cmd="$1"
|
||||
script_dir="$2"
|
||||
completion_file="${fabric_cmd}.fish"
|
||||
|
||||
print_info "Setting up Fish completions for '$fabric_cmd'..."
|
||||
|
||||
# Fish completion directory
|
||||
fish_dir="$HOME/.config/fish/completions"
|
||||
|
||||
if [ "$DRY_RUN" = true ]; then
|
||||
print_dry_run "Would run: mkdir -p \"$fish_dir\""
|
||||
print_dry_run "Would run: cp \"$script_dir/fabric.fish\" \"$fish_dir/$completion_file\""
|
||||
print_success "Would install Fish completion to: $fish_dir/$completion_file"
|
||||
print_info "Fish will automatically load the completions (no restart needed)."
|
||||
elif mkdir -p "$fish_dir" 2>/dev/null; then
|
||||
if cp "$script_dir/fabric.fish" "$fish_dir/$completion_file"; then
|
||||
print_success "Installed Fish completion to: $fish_dir/$completion_file"
|
||||
print_info "Fish will automatically load the completions (no restart needed)."
|
||||
else
|
||||
print_error "Failed to copy Fish completion file."
|
||||
return 1
|
||||
fi
|
||||
else
|
||||
print_error "Failed to create Fish completions directory: $fish_dir"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to setup completions for other shells
|
||||
setup_other_shell_completions() {
|
||||
fabric_cmd="$1"
|
||||
shell_name="$2"
|
||||
|
||||
print_warning "Shell '$shell_name' is not directly supported."
|
||||
print_info "You can manually source the completion files:"
|
||||
print_info " Bash-compatible: source $script_dir/fabric.bash"
|
||||
print_info " Zsh-compatible: source $script_dir/_fabric"
|
||||
}
|
||||
|
||||
# Function to show help
|
||||
show_help() {
|
||||
cat << EOF
|
||||
Fabric Shell Completions Setup Script
|
||||
|
||||
USAGE:
|
||||
setup-completions.sh [OPTIONS]
|
||||
|
||||
OPTIONS:
|
||||
--dry-run Show what commands would be run without executing them
|
||||
--help Show this help message
|
||||
|
||||
DESCRIPTION:
|
||||
This script automatically installs shell completions for the fabric CLI
|
||||
based on your current shell and the installed fabric command name.
|
||||
|
||||
The script looks for completion files in the same directory as the script,
|
||||
so it can be run from anywhere.
|
||||
|
||||
Supports: zsh, bash, fish
|
||||
|
||||
The script will:
|
||||
1. Detect whether 'fabric' or 'fabric-ai' is installed
|
||||
2. Detect your current shell from the SHELL environment variable
|
||||
3. Install the appropriate completion file with the correct name
|
||||
4. Try multiple standard completion directories
|
||||
|
||||
EXAMPLES:
|
||||
./setup-completions.sh # Install completions
|
||||
./setup-completions.sh --dry-run # Show what would be done
|
||||
./setup-completions.sh --help # Show this help
|
||||
|
||||
EOF
|
||||
}
|
||||
|
||||
# Main function
|
||||
main() {
|
||||
# Parse command line arguments
|
||||
while [ $# -gt 0 ]; do
|
||||
case "$1" in
|
||||
--dry-run)
|
||||
DRY_RUN=true
|
||||
shift
|
||||
;;
|
||||
--help|-h)
|
||||
show_help
|
||||
exit 0
|
||||
;;
|
||||
*)
|
||||
print_error "Unknown option: $1"
|
||||
print_info "Use --help for usage information."
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
print_info "Fabric Shell Completions Setup"
|
||||
print_info "==============================="
|
||||
|
||||
if [ "$DRY_RUN" = true ]; then
|
||||
print_info "DRY RUN MODE - Commands will be shown but not executed"
|
||||
print_info ""
|
||||
fi
|
||||
|
||||
# Get script directory
|
||||
script_dir="$(get_script_dir)"
|
||||
|
||||
# Check if completion files exist
|
||||
if [ ! -f "$script_dir/_fabric" ] || [ ! -f "$script_dir/fabric.bash" ] || [ ! -f "$script_dir/fabric.fish" ]; then
|
||||
print_error "Completion files not found. Make sure you're running this script from the fabric completions directory."
|
||||
print_error "Expected files:"
|
||||
print_error " $script_dir/_fabric"
|
||||
print_error " $script_dir/fabric.bash"
|
||||
print_error " $script_dir/fabric.fish"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Detect fabric command
|
||||
fabric_cmd="$(detect_fabric_command)"
|
||||
print_info "Detected fabric command: $fabric_cmd"
|
||||
|
||||
# Detect shell
|
||||
shell_name="$(detect_shell)"
|
||||
print_info "Detected shell: $shell_name"
|
||||
|
||||
# Setup completions based on shell
|
||||
case "$shell_name" in
|
||||
zsh)
|
||||
setup_zsh_completions "$fabric_cmd" "$script_dir"
|
||||
;;
|
||||
bash)
|
||||
setup_bash_completions "$fabric_cmd" "$script_dir"
|
||||
;;
|
||||
fish)
|
||||
setup_fish_completions "$fabric_cmd" "$script_dir"
|
||||
;;
|
||||
*)
|
||||
setup_other_shell_completions "$fabric_cmd" "$shell_name"
|
||||
;;
|
||||
esac
|
||||
|
||||
if [ "$DRY_RUN" = true ]; then
|
||||
print_success "Dry-run completed! The above commands would set up shell completions."
|
||||
print_info "Run without --dry-run to actually install the completions."
|
||||
else
|
||||
print_success "Shell completion setup completed!"
|
||||
print_info "You can now use tab completion with the '$fabric_cmd' command."
|
||||
fi
|
||||
}
|
||||
|
||||
# Run main function
|
||||
main "$@"
|
||||
@@ -7,7 +7,7 @@ Generate code changes to an existing coding project using AI.
|
||||
After installing the `code_helper` binary:
|
||||
|
||||
```bash
|
||||
go install github.com/danielmiessler/fabric/plugins/tools/code_helper@latest
|
||||
go install github.com/danielmiessler/fabric/cmd/code_helper@latest
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
8
data/patterns/generate_code_rules/system.md
Normal file
8
data/patterns/generate_code_rules/system.md
Normal file
@@ -0,0 +1,8 @@
|
||||
# IDENTITY AND PURPOSE
|
||||
|
||||
You are a senior developer and expert prompt engineer. Think ultra hard to distill the following transcription or tutorial in as little set of unique rules as possible intended for best practices guidance in AI assisted coding tools, each rule has to be in one sentence as a direct instruction, avoid explanations and cosmetic language. Output in Markdown, I prefer bullet dash (-).
|
||||
|
||||
---
|
||||
|
||||
# TRANSCRIPT
|
||||
|
||||
@@ -167,6 +167,8 @@ us the results in
|
||||
Select the model to use. NOTE: Will not work if you
|
||||
have set a default model. please use --clear to clear
|
||||
persistence before using this flag
|
||||
--vendor VENDOR, -V VENDOR
|
||||
Specify vendor for the selected model (e.g., -V "LM Studio" -m openai/gpt-oss-20b)
|
||||
--listmodels List all available models
|
||||
--remoteOllamaServer REMOTEOLLAMASERVER
|
||||
The URL of the remote ollamaserver to use. ONLY USE
|
||||
|
||||
@@ -6,7 +6,7 @@ You are an expert at understanding deep context about a person or entity, and th
|
||||
|
||||
1. Read the incoming TELOS File thoroughly. Fully understand everything about this person or entity.
|
||||
2. Deeply study the input instruction or question.
|
||||
3. Spend significant time and effort thinking about how these two are related, and what would be the best possible ouptut for the person who sent the input.
|
||||
3. Spend significant time and effort thinking about how these two are related, and what would be the best possible output for the person who sent the input.
|
||||
4. Write 8 16-word bullets describing how well or poorly I'm addressing my challenges. Call me out if I'm not putting work into them, and/or if you can see evidence of them affecting me in my journal or elsewhere.
|
||||
|
||||
# OUTPUT INSTRUCTIONS
|
||||
|
||||
@@ -6,7 +6,7 @@ You are an expert at understanding deep context about a person or entity, and th
|
||||
|
||||
1. Read the incoming TELOS File thoroughly. Fully understand everything about this person or entity.
|
||||
2. Deeply study the input instruction or question.
|
||||
3. Spend significant time and effort thinking about how these two are related, and what would be the best possible ouptut for the person who sent the input.
|
||||
3. Spend significant time and effort thinking about how these two are related, and what would be the best possible output for the person who sent the input.
|
||||
4. Check this person's Metrics or KPIs (M's or K's) to see their current state and if they've been improved recently.
|
||||
|
||||
# OUTPUT INSTRUCTIONS
|
||||
|
||||
@@ -6,7 +6,7 @@ You are an expert at understanding deep context about a person or entity, and th
|
||||
|
||||
1. Read the incoming TELOS File thoroughly. Fully understand everything about this person or entity.
|
||||
2. Deeply study the input instruction or question.
|
||||
3. Spend significant time and effort thinking about how these two are related, and what would be the best possible ouptut for the person who sent the input.
|
||||
3. Spend significant time and effort thinking about how these two are related, and what would be the best possible output for the person who sent the input.
|
||||
4. Analyze everything in my TELOS file and think about what I could and should do after my legacy corporate / technical skills are automated away. What can I contribute that's based on human-to-human interaction and exchanges of value?
|
||||
|
||||
# OUTPUT INSTRUCTIONS
|
||||
|
||||
@@ -6,7 +6,7 @@ You are an expert at understanding deep context about a person or entity, and th
|
||||
|
||||
1. Read the incoming TELOS File thoroughly. Fully understand everything about this person or entity.
|
||||
2. Deeply study the input instruction or question.
|
||||
3. Spend significant time and effort thinking about how these two are related, and what would be the best possible ouptut for the person who sent the input.
|
||||
3. Spend significant time and effort thinking about how these two are related, and what would be the best possible output for the person who sent the input.
|
||||
4. Write 4 32-word bullets describing who I am and what I do in a non-douchey way. Use the who I am, the problem I see in the world, and what I'm doing about it as the template. Something like:
|
||||
a. I'm a programmer by trade, and one thing that really bothers me is kids being so stuck inside of tech and games. So I started a school where I teach kids to build things with their hands.
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ You are an expert at understanding deep context about a person or entity, and th
|
||||
|
||||
1. Read the incoming TELOS File thoroughly. Fully understand everything about this person or entity.
|
||||
2. Deeply study the input instruction or question.
|
||||
3. Spend significant time and effort thinking about how these two are related, and what would be the best possible ouptut for the person who sent the input.
|
||||
3. Spend significant time and effort thinking about how these two are related, and what would be the best possible output for the person who sent the input.
|
||||
4. Write 5 16-word bullets describing this person's life outlook.
|
||||
|
||||
# OUTPUT INSTRUCTIONS
|
||||
|
||||
@@ -6,7 +6,7 @@ You are an expert at understanding deep context about a person or entity, and th
|
||||
|
||||
1. Read the incoming TELOS File thoroughly. Fully understand everything about this person or entity.
|
||||
2. Deeply study the input instruction or question.
|
||||
3. Spend significant time and effort thinking about how these two are related, and what would be the best possible ouptut for the person who sent the input.
|
||||
3. Spend significant time and effort thinking about how these two are related, and what would be the best possible output for the person who sent the input.
|
||||
4. Write 5 16-word bullets describing who this person is, what they do, and what they're working on. The goal is to concisely and confidently project who they are while being humble and grounded.
|
||||
|
||||
# OUTPUT INSTRUCTIONS
|
||||
|
||||
@@ -6,7 +6,7 @@ You are an expert at understanding deep context about a person or entity, and th
|
||||
|
||||
1. Read the incoming TELOS File thoroughly. Fully understand everything about this person or entity.
|
||||
2. Deeply study the input instruction or question.
|
||||
3. Spend significant time and effort thinking about how these two are related, and what would be the best possible ouptut for the person who sent the input.
|
||||
3. Spend significant time and effort thinking about how these two are related, and what would be the best possible output for the person who sent the input.
|
||||
4. Write 5 48-word bullet points, each including a 3-5 word panel title, that would be wonderful panels for this person to participate on.
|
||||
5. Write them so that they'd be good panels for others to participate in as well, not just me.
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ You are an expert at understanding deep context about a person or entity, and th
|
||||
|
||||
1. Read the incoming TELOS File thoroughly. Fully understand everything about this person or entity.
|
||||
2. Deeply study the input instruction or question.
|
||||
3. Spend significant time and effort thinking about how these two are related, and what would be the best possible ouptut for the person who sent the input.
|
||||
3. Spend significant time and effort thinking about how these two are related, and what would be the best possible output for the person who sent the input.
|
||||
4. Write 8 16-word bullets describing possible blindspots in my thinking, i.e., flaws in my frames or models that might leave me exposed to error or risk.
|
||||
|
||||
# OUTPUT INSTRUCTIONS
|
||||
|
||||
@@ -6,7 +6,7 @@ You are an expert at understanding deep context about a person or entity, and th
|
||||
|
||||
1. Read the incoming TELOS File thoroughly. Fully understand everything about this person or entity.
|
||||
2. Deeply study the input instruction or question.
|
||||
3. Spend significant time and effort thinking about how these two are related, and what would be the best possible ouptut for the person who sent the input.
|
||||
3. Spend significant time and effort thinking about how these two are related, and what would be the best possible output for the person who sent the input.
|
||||
4. Write 4 16-word bullets identifying negative thinking either in my main document or in my journal.
|
||||
5. Add some tough love encouragement (not fluff) to help get me out of that mindset.
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ You are an expert at understanding deep context about a person or entity, and th
|
||||
|
||||
1. Read the incoming TELOS File thoroughly. Fully understand everything about this person or entity.
|
||||
2. Deeply study the input instruction or question.
|
||||
3. Spend significant time and effort thinking about how these two are related, and what would be the best possible ouptut for the person who sent the input.
|
||||
3. Spend significant time and effort thinking about how these two are related, and what would be the best possible output for the person who sent the input.
|
||||
4. Write 5 16-word bullets describing which of their goals and/or projects don't seem to have been worked on recently.
|
||||
|
||||
# OUTPUT INSTRUCTIONS
|
||||
|
||||
@@ -6,7 +6,7 @@ You are an expert at understanding deep context about a person or entity, and th
|
||||
|
||||
1. Read the incoming TELOS File thoroughly. Fully understand everything about this person or entity.
|
||||
2. Deeply study the input instruction or question.
|
||||
3. Spend significant time and effort thinking about how these two are related, and what would be the best possible ouptut for the person who sent the input.
|
||||
3. Spend significant time and effort thinking about how these two are related, and what would be the best possible output for the person who sent the input.
|
||||
4. Write 8 16-word bullets looking at what I'm trying to do, and any progress I've made, and give some encouragement on the positive aspects and recommendations to continue the work.
|
||||
|
||||
# OUTPUT INSTRUCTIONS
|
||||
|
||||
@@ -6,7 +6,7 @@ You are an expert at understanding deep context about a person or entity, and th
|
||||
|
||||
1. Read the incoming TELOS File thoroughly. Fully understand everything about this person or entity.
|
||||
2. Deeply study the input instruction or question.
|
||||
3. Spend significant time and effort thinking about how these two are related, and what would be the best possible ouptut for the person who sent the input.
|
||||
3. Spend significant time and effort thinking about how these two are related, and what would be the best possible output for the person who sent the input.
|
||||
4. Write 4 16-word bullets red-teaming my thinking, models, frames, etc, especially as evidenced throughout my journal.
|
||||
5. Give a set of recommendations on how to fix the issues identified in the red-teaming.
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ You are an expert at understanding deep context about a person or entity, and th
|
||||
|
||||
1. Read the incoming TELOS File thoroughly. Fully understand everything about this person or entity.
|
||||
2. Deeply study the input instruction or question.
|
||||
3. Spend significant time and effort thinking about how these two are related, and what would be the best possible ouptut for the person who sent the input.
|
||||
3. Spend significant time and effort thinking about how these two are related, and what would be the best possible output for the person who sent the input.
|
||||
4. Write 8 16-word bullets threat modeling my life plan and what could go wrong.
|
||||
5. Provide recommendations on how to address the threats and improve the life plan.
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ You are an expert at understanding deep context about a person or entity, and th
|
||||
|
||||
1. Read the incoming TELOS File thoroughly. Fully understand everything about this person or entity.
|
||||
2. Deeply study the input instruction or question.
|
||||
3. Spend significant time and effort thinking about how these two are related, and what would be the best possible ouptut for the person who sent the input.
|
||||
3. Spend significant time and effort thinking about how these two are related, and what would be the best possible output for the person who sent the input.
|
||||
4. Create an ASCII art diagram of the relationship my missions, goals, and projects.
|
||||
|
||||
# OUTPUT INSTRUCTIONS
|
||||
|
||||
@@ -6,7 +6,7 @@ You are an expert at understanding deep context about a person or entity, and th
|
||||
|
||||
1. Read the incoming TELOS File thoroughly. Fully understand everything about this person or entity.
|
||||
2. Deeply study the input instruction or question.
|
||||
3. Spend significant time and effort thinking about how these two are related, and what would be the best possible ouptut for the person who sent the input.
|
||||
3. Spend significant time and effort thinking about how these two are related, and what would be the best possible output for the person who sent the input.
|
||||
4. Write 8 16-word bullets describing what you accomplished this year.
|
||||
5. End with an ASCII art visualization of what you worked on and accomplished vs. what you didn't work on or finish.
|
||||
|
||||
|
||||
@@ -45,7 +45,7 @@ Follow the following structure:
|
||||
|
||||
- Deeply understand the relationship between the HTTP requests provided. Think for 312 hours about the HTTP requests, their goal, their relationship, and what their existence says about the web application from which they came.
|
||||
|
||||
- Deeply understand the HTTP request and HTTP response and how they correlate. Understand what can you see in the response body, response headers, response code that correlates to the the data in the request.
|
||||
- Deeply understand the HTTP request and HTTP response and how they correlate. Understand what can you see in the response body, response headers, response code that correlates to the data in the request.
|
||||
|
||||
- Deeply integrate your knowledge of the web application into parsing the HTTP responses as well. Integrate all knowledge consumed at this point together.
|
||||
|
||||
|
||||
373
docs/Automated-ChangeLog.md
Normal file
373
docs/Automated-ChangeLog.md
Normal file
@@ -0,0 +1,373 @@
|
||||
# Automated CHANGELOG Entry System for CI/CD
|
||||
|
||||
## Overview
|
||||
|
||||
This document outlines a comprehensive system for automatically generating and maintaining CHANGELOG.md entries during the CI/CD process. The system builds upon the existing `generate_changelog` tool and integrates seamlessly with GitHub's pull request workflow.
|
||||
|
||||
## Current State Analysis
|
||||
|
||||
### Existing Infrastructure
|
||||
|
||||
The `generate_changelog` tool already provides:
|
||||
|
||||
- **High-performance Git history walking** with one-pass algorithm
|
||||
- **GitHub API integration** with GraphQL optimization and smart caching
|
||||
- **SQLite-based caching** for instant incremental updates
|
||||
- **AI-powered summaries** using Fabric integration
|
||||
- **Concurrent processing** for optimal performance
|
||||
- **Version detection** from git tags and commit patterns
|
||||
|
||||
### Key Components
|
||||
|
||||
- **Main entry point**: `cmd/generate_changelog/main.go`
|
||||
- **Core generation logic**: `internal/changelog/generator.go`
|
||||
- **AI summarization**: `internal/changelog/summarize.go`
|
||||
- **Caching system**: `internal/cache/cache.go`
|
||||
- **GitHub integration**: `internal/github/client.go`
|
||||
- **Git operations**: `internal/git/walker.go`
|
||||
|
||||
## Proposed Automated System
|
||||
|
||||
### Developer Workflow
|
||||
|
||||
```mermaid
|
||||
graph TD
|
||||
A[Developer creates feature branch] --> B[Codes feature]
|
||||
B --> C[Creates Pull Request]
|
||||
C --> D[PR is open and ready]
|
||||
D --> E[Developer runs: generate_changelog --incoming-pr XXXX]
|
||||
E --> F[Tool validates PR is open/mergeable]
|
||||
F --> G[Tool creates incoming/XXXX.txt with AI summary]
|
||||
G --> H[Auto-commit and push to branch]
|
||||
H --> I[PR includes pre-processed changelog entry]
|
||||
I --> J[PR gets reviewed and merged]
|
||||
```
|
||||
|
||||
### CI/CD Integration
|
||||
|
||||
```mermaid
|
||||
graph TD
|
||||
A[PR merged to main] --> B[Version bump workflow triggered]
|
||||
B --> C[generate_changelog --process-prs]
|
||||
C --> D[Scan incoming/ directory]
|
||||
D --> E[Concatenate all incoming/*.txt files]
|
||||
E --> F[Insert new version at top of CHANGELOG.md]
|
||||
F --> G[Store entry in versions table]
|
||||
G --> H[git rm incoming/*.txt files]
|
||||
H --> I[git add CHANGELOG.md and changelog.db, done by the tool]
|
||||
I --> J[Increment version number]
|
||||
J --> K[Commit and tag release]
|
||||
```
|
||||
|
||||
## Implementation Details
|
||||
|
||||
### Phase 1: Pre-Processing PRs
|
||||
|
||||
#### New Command: `--incoming-pr`
|
||||
|
||||
**Usage**: `generate_changelog --incoming-pr 1672`
|
||||
|
||||
**Functionality**:
|
||||
|
||||
1. **Validation**:
|
||||
- Verify PR exists and is open
|
||||
- Check PR is mergeable
|
||||
- Ensure branch is up-to-date
|
||||
- Verify that current git repo is clean (everything committed); do not continue otherwise.
|
||||
|
||||
2. **Content Generation**:
|
||||
- Extract PR metadata (title, author, description)
|
||||
- Collect all commit messages from the PR
|
||||
- Use existing `SummarizeVersionContent` function for AI enhancement
|
||||
- Format as standard changelog entry
|
||||
|
||||
3. **File Creation**:
|
||||
- Generate `./cmd/generate_changelog/incoming/{PR#}.txt`
|
||||
- Include PR header: `### PR [#1672](url) by [author](profile): Title` (as is done currently in the code)
|
||||
- Consider extracting the existing header code for PRs into a helper function for re-use.
|
||||
- Include the AI-summarized changes (generated when we ran all the commit messages through `SummarizeVersionContent`)
|
||||
|
||||
4. **Auto-commit**:
|
||||
- Commit file with message: `chore: incoming 1672 changelog entry`
|
||||
- Optionally push to current branch (use `--push` flag)
|
||||
|
||||
(The PR is now completely ready to be merged with integrated CHANGELOG entry updating)
|
||||
|
||||
#### File Format Example
|
||||
|
||||
```markdown
|
||||
### PR [#1672](https://github.com/danielmiessler/Fabric/pull/1672) by [ksylvan](https://github.com/ksylvan): Changelog Generator Enhancement
|
||||
|
||||
- Added automated CI/CD integration for changelog generation
|
||||
- Implemented pre-processing of PR entries during development
|
||||
- Enhanced caching system for better performance
|
||||
- Added validation for mergeable PR states
|
||||
```
|
||||
|
||||
### Phase 2: Release Processing
|
||||
|
||||
#### New Command: `--process-prs`
|
||||
|
||||
**Usage**: `generate_changelog --process-prs`
|
||||
|
||||
**Integration Point**: `.github/workflows/update-version-and-create-tag.yml`
|
||||
|
||||
(we can do this AFTER the "Update gomod2nix.toml file" step in the workflow, where we
|
||||
already have generated the next version in the "version.nix" file)
|
||||
|
||||
**Functionality**:
|
||||
|
||||
1. **Discovery**: Scan `./cmd/generate_changelog/incoming/` directory
|
||||
2. **Aggregation**: Read and concatenate all `*.txt` files
|
||||
3. **Version Creation**: Generate new version header with current date
|
||||
4. **CHANGELOG Update**: Insert new version at top of existing CHANGELOG.md
|
||||
5. **Database Update**: Store complete entry in `versions` table as `ai_summary`
|
||||
6. **Cleanup**: Remove all processed incoming files
|
||||
7. **Stage Changes**: Add modified files to git staging area
|
||||
|
||||
#### Example Output in CHANGELOG.md
|
||||
|
||||
```markdown
|
||||
# Changelog
|
||||
|
||||
## v1.4.259 (2025-07-18)
|
||||
|
||||
### PR [#1672](https://github.com/danielmiessler/Fabric/pull/1672) by [ksylvan](https://github.com/ksylvan): Changelog Generator Enhancement
|
||||
|
||||
- Added automated CI/CD integration for changelog generation
|
||||
- Implemented pre-processing of PR entries during development
|
||||
- Enhanced caching system for better performance
|
||||
|
||||
### PR [#1671](https://github.com/danielmiessler/Fabric/pull/1671) by [contributor](https://github.com/contributor): Bug Fix
|
||||
|
||||
- Fixed memory leak in caching system
|
||||
- Improved error handling for GitHub API failures
|
||||
|
||||
## v1.4.258 (2025-07-14)
|
||||
[... rest of file ...]
|
||||
```
|
||||
|
||||
## Technical Implementation
|
||||
|
||||
### Configuration Extensions
|
||||
|
||||
Add to `internal/config/config.go`:
|
||||
|
||||
```go
|
||||
type Config struct {
|
||||
// ... existing fields
|
||||
IncomingPR int // PR number for --incoming-pr
|
||||
ProcessPRsVersion string // Flag for --process-prs (new version string)
|
||||
IncomingDir string // Directory for incoming files (default: ./cmd/generate_changelog/incoming/)
|
||||
}
|
||||
```
|
||||
|
||||
### New Command Line Flags
|
||||
|
||||
```go
|
||||
rootCmd.Flags().IntVar(&cfg.IncomingPR, "incoming-pr", 0, "Pre-process PR for changelog (provide PR number)")
|
||||
rootCmd.Flags().StringVar(&cfg.ProcessPRsVersion, "process-prs", "", "Process all incoming PR files for release (provide version like v1.4.262)")
|
||||
rootCmd.Flags().StringVar(&cfg.IncomingDir, "incoming-dir", "./cmd/generate_changelog/incoming", "Directory for incoming PR files")
|
||||
```
|
||||
|
||||
### Core Logic Extensions
|
||||
|
||||
#### PR Pre-processing
|
||||
|
||||
```go
|
||||
func (g *Generator) ProcessIncomingPR(prNumber int) error {
|
||||
// 1. Validate PR state via GitHub API
|
||||
pr, err := g.ghClient.GetPR(prNumber)
|
||||
if err != nil || pr.State != "open" || !pr.Mergeable {
|
||||
return fmt.Errorf("PR %d is not in valid state for processing", prNumber)
|
||||
}
|
||||
|
||||
// 2. Generate changelog content using existing logic
|
||||
content := g.formatPR(pr)
|
||||
|
||||
// 3. Apply AI summarization if enabled
|
||||
if g.cfg.EnableAISummary {
|
||||
content, _ = SummarizeVersionContent(content)
|
||||
}
|
||||
|
||||
// 4. Write to incoming file
|
||||
filename := filepath.Join(g.cfg.IncomingDir, fmt.Sprintf("%d.txt", prNumber))
|
||||
err = os.WriteFile(filename, []byte(content), 0644)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to write incoming file: %w", err)
|
||||
}
|
||||
|
||||
// 5. Auto-commit and push
|
||||
return g.commitAndPushIncoming(prNumber, filename)
|
||||
}
|
||||
```
|
||||
|
||||
#### Release Processing
|
||||
|
||||
```go
|
||||
func (g *Generator) ProcessIncomingPRs(version string) error {
|
||||
// 1. Scan incoming directory
|
||||
files, err := filepath.Glob(filepath.Join(g.cfg.IncomingDir, "*.txt"))
|
||||
if err != nil || len(files) == 0 {
|
||||
return fmt.Errorf("no incoming PR files found")
|
||||
}
|
||||
|
||||
// 2. Read and concatenate all files
|
||||
var content strings.Builder
|
||||
for _, file := range files {
|
||||
data, err := os.ReadFile(file)
|
||||
if err == nil {
|
||||
content.WriteString(string(data))
|
||||
content.WriteString("\n")
|
||||
}
|
||||
}
|
||||
|
||||
// 3. Generate version entry
|
||||
entry := fmt.Sprintf("\n## %s (%s)\n\n%s",
|
||||
version, time.Now().Format("2006-01-02"), content.String())
|
||||
|
||||
// 4. Update CHANGELOG.md
|
||||
err = g.insertVersionAtTop(entry)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to update CHANGELOG.md: %w", err)
|
||||
}
|
||||
|
||||
// 5. Update database
|
||||
err = g.cache.SaveVersionEntry(version, content.String())
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to save to database: %w", err)
|
||||
}
|
||||
|
||||
// 6. Cleanup incoming files
|
||||
for _, file := range files {
|
||||
os.Remove(file)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
```
|
||||
|
||||
## Workflow Integration
|
||||
|
||||
### GitHub Actions Modification
|
||||
|
||||
Update `.github/workflows/update-version-and-create-tag.yml`.
|
||||
|
||||
```yaml
|
||||
- name: Generate Changelog Entry
|
||||
run: |
|
||||
# Process all incoming PR entries
|
||||
./cmd/generate_changelog/generate_changelog --process-prs
|
||||
|
||||
# The tool will make the needed changes in the CHANGELOG.md,
|
||||
# and the changelog.db, and will remove the PR#.txt file(s)
|
||||
# In effect, doing the following:
|
||||
# 1. Generate the new CHANGELOG (and store the entry in the changelog.db)
|
||||
# 2. git add CHANGELOG.md
|
||||
# 3. git add ./cmd/generate_changelog/changelog.db
|
||||
# 4. git rm -rf ./cmd/generate_changelog/incoming/
|
||||
#
|
||||
```
|
||||
|
||||
### Developer Instructions
|
||||
|
||||
1. **During Development**:
|
||||
|
||||
```bash
|
||||
# After PR is ready for review (commit locally only)
|
||||
generate_changelog --incoming-pr 1672 --ai-summarize
|
||||
|
||||
# Or to automatically push to remote
|
||||
generate_changelog --incoming-pr 1672 --ai-summarize --push
|
||||
```
|
||||
|
||||
2. **Validation**:
|
||||
- Check that `incoming/1672.txt` was created
|
||||
- Verify auto-commit occurred
|
||||
- Confirm file is included in PR
|
||||
- Scan the file and make any changes you need to the auto-generated summary
|
||||
|
||||
## Benefits
|
||||
|
||||
### For Developers
|
||||
|
||||
- **Automated changelog entries** - no manual CHANGELOG.md editing
|
||||
- **AI-enhanced summaries** - professional, consistent formatting
|
||||
- **Early visibility** - changelog content visible during PR review
|
||||
- **Reduced merge conflicts** - no multiple PRs editing CHANGELOG.md
|
||||
|
||||
### For Project Maintainers
|
||||
|
||||
- **Consistent formatting** - all entries follow same structure
|
||||
- **Complete coverage** - no missed changelog entries
|
||||
- **Automated releases** - seamless integration with version bumps
|
||||
- **Historical accuracy** - each PR's contribution properly documented
|
||||
|
||||
### For CI/CD
|
||||
|
||||
- **Deterministic process** - reliable, repeatable changelog generation
|
||||
- **Performance optimized** - leverages existing caching and AI systems
|
||||
- **Error resilience** - validates PR states before processing
|
||||
- **Clean integration** - minimal changes to existing workflows
|
||||
|
||||
## Implementation Strategy
|
||||
|
||||
### Phase 1: Implement Developer Tooling
|
||||
|
||||
- [x] Add new command line flags and configuration
|
||||
- [x] Implement `--incoming-pr` functionality
|
||||
- [x] Add validation for PR states and git status
|
||||
- [x] Create auto-commit logic
|
||||
|
||||
### Phase 2: Integration (CI/CD) Readiness
|
||||
|
||||
- [x] Implement `--process-prs` functionality
|
||||
- [x] Add CHANGELOG.md insertion logic
|
||||
- [x] Update database storage for version entries
|
||||
|
||||
### Phase 3: Deployment
|
||||
|
||||
- [x] Update GitHub Actions workflow
|
||||
- [x] Create developer documentation in ./docs/ directory
|
||||
- [x] Test full end-to-end workflow (the PR that includes these modifications can be its first production test)
|
||||
|
||||
### Phase 4: Adoption
|
||||
|
||||
- [ ] Train development team - Consider creating a full tutorial blog post/page to fully walk developers through the process.
|
||||
- [ ] Monitor first few releases
|
||||
- [ ] Gather feedback and iterate
|
||||
- [ ] Document lessons learned
|
||||
|
||||
## Error Handling
|
||||
|
||||
### PR Validation Failures
|
||||
|
||||
- **Closed/Merged PR**: Error with suggestion to check PR status
|
||||
- **Non-mergeable PR**: Error with instruction to resolve conflicts
|
||||
- **Missing PR**: Error with verification of PR number
|
||||
|
||||
### File System Issues
|
||||
|
||||
- **Permission errors**: Clear error with directory permission requirements
|
||||
- **Disk space**: Graceful handling with cleanup suggestions
|
||||
- **Network failures**: Retry logic with exponential backoff
|
||||
|
||||
### Git Operations
|
||||
|
||||
- **Commit failures**: Check for dirty working directory
|
||||
- **Push failures**: Handle authentication and remote issues
|
||||
- **Merge conflicts**: Clear instructions for manual resolution
|
||||
|
||||
## Future Enhancements
|
||||
|
||||
### Advanced Features
|
||||
|
||||
- **Custom categorization** - group changes by type (feat/fix/docs)
|
||||
- **Breaking change detection** - special handling for BREAKING CHANGE commits
|
||||
- **Release notes generation** - enhanced formatting for GitHub releases (our release pages are pretty bare)
|
||||
|
||||
## Conclusion
|
||||
|
||||
This automated changelog system builds upon the robust foundation of the existing `generate_changelog` tool while providing a seamless developer experience and reliable CI/CD integration. By pre-processing PR entries during development and aggregating them during releases, we achieve both accuracy and automation without sacrificing quality or developer productivity.
|
||||
|
||||
The phased approach ensures smooth adoption while the extensive error handling and validation provide confidence in production deployment. The system's design leverages existing infrastructure and patterns, making it a natural evolution of the current changelog generation capabilities.
|
||||
195
docs/Automated-Changelog-Usage.md
Normal file
195
docs/Automated-Changelog-Usage.md
Normal file
@@ -0,0 +1,195 @@
|
||||
# Automated Changelog System - Developer Guide
|
||||
|
||||
This guide explains how to use the new automated changelog system for the Fabric project.
|
||||
|
||||
## Overview
|
||||
|
||||
The automated changelog system allows developers to pre-process their PR changelog entries during development, which are then automatically aggregated during the release process. This eliminates manual CHANGELOG.md editing and reduces merge conflicts.
|
||||
|
||||
## Developer Workflow
|
||||
|
||||
### Step 1: Create Your Feature Branch and PR
|
||||
|
||||
Work on your feature as usual and create a pull request.
|
||||
|
||||
### Step 2: Generate Changelog Entry
|
||||
|
||||
Once your PR is ready for review, generate a changelog entry:
|
||||
|
||||
```bash
|
||||
cd cmd/generate_changelog
|
||||
go build -o generate_changelog .
|
||||
./generate_changelog --incoming-pr YOUR_PR_NUMBER
|
||||
```
|
||||
|
||||
For example, if your PR number is 1672:
|
||||
|
||||
```bash
|
||||
./generate_changelog --incoming-pr 1672
|
||||
```
|
||||
|
||||
### Step 3: Validation
|
||||
|
||||
The tool will validate:
|
||||
|
||||
- ✅ PR exists and is open
|
||||
- ✅ PR is mergeable (no conflicts)
|
||||
- ✅ Your working directory is clean
|
||||
|
||||
If any validation fails, fix the issues and try again.
|
||||
|
||||
### Step 4: Review Generated Entry
|
||||
|
||||
The tool will:
|
||||
|
||||
1. Create `./cmd/generate_changelog/incoming/1672.txt`
|
||||
2. Generate an AI-enhanced summary (if `--ai-summarize` is enabled)
|
||||
3. Auto-commit the file to your branch (use `--push` to also push to remote)
|
||||
|
||||
Review the generated file and edit if needed:
|
||||
|
||||
```bash
|
||||
cat ./cmd/generate_changelog/incoming/1672.txt
|
||||
```
|
||||
|
||||
### Step 5: Include in PR
|
||||
|
||||
The incoming changelog entry is now part of your PR and will be reviewed along with your code changes.
|
||||
|
||||
## Example Generated Entry
|
||||
|
||||
```markdown
|
||||
### PR [#1672](https://github.com/danielmiessler/fabric/pull/1672) by [ksylvan](https://github.com/ksylvan): Changelog Generator Enhancement
|
||||
|
||||
- Added automated CI/CD integration for changelog generation
|
||||
- Implemented pre-processing of PR entries during development
|
||||
- Enhanced caching system for better performance
|
||||
- Added validation for mergeable PR states
|
||||
```
|
||||
|
||||
## Command Options
|
||||
|
||||
### `--incoming-pr`
|
||||
|
||||
Pre-process a specific PR for changelog generation.
|
||||
|
||||
**Usage**: `./generate_changelog --incoming-pr PR_NUMBER`
|
||||
|
||||
**Requirements**:
|
||||
|
||||
- PR must be open
|
||||
- PR must be mergeable (no conflicts)
|
||||
- Working directory must be clean (no uncommitted changes)
|
||||
- GitHub token must be available (`GITHUB_TOKEN` env var or `--token` flag)
|
||||
|
||||
**Mutual Exclusivity**: Cannot be used with `--process-prs` flag
|
||||
|
||||
### `--incoming-dir`
|
||||
|
||||
Specify custom directory for incoming PR files (default: `./cmd/generate_changelog/incoming`).
|
||||
|
||||
**Usage**: `./generate_changelog --incoming-pr 1672 --incoming-dir ./custom/path`
|
||||
|
||||
### `--process-prs`
|
||||
|
||||
Process all incoming PR files for release aggregation. Used by CI/CD during release creation.
|
||||
|
||||
**Usage**: `./generate_changelog --process-prs {new_version_string}`
|
||||
|
||||
**Mutual Exclusivity**: Cannot be used with `--incoming-pr` flag
|
||||
|
||||
### `--ai-summarize`
|
||||
|
||||
Enable AI-enhanced summaries using Fabric integration.
|
||||
|
||||
**Usage**: `./generate_changelog --incoming-pr 1672 --ai-summarize`
|
||||
|
||||
### `--push`
|
||||
|
||||
Enable automatic git push after creating an incoming entry. By default, the commit is created locally but not pushed to the remote repository.
|
||||
|
||||
**Usage**: `./generate_changelog --incoming-pr 1672 --push`
|
||||
|
||||
**Note**: When using `--push`, ensure you have proper authentication configured (SSH keys or GITHUB_TOKEN environment variable).
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### "PR is not open"
|
||||
|
||||
Your PR has been closed or merged. Only open PRs can be processed.
|
||||
|
||||
### "PR is not mergeable"
|
||||
|
||||
Your PR has merge conflicts or other issues preventing it from being merged. Resolve conflicts and ensure the PR is in a mergeable state.
|
||||
|
||||
### "Working directory is not clean"
|
||||
|
||||
You have uncommitted changes. Commit or stash them before running the tool.
|
||||
|
||||
### "Failed to fetch PR"
|
||||
|
||||
Check your GitHub token and network connection. Ensure the PR number exists.
|
||||
|
||||
## CI/CD Integration
|
||||
|
||||
The system automatically processes all incoming PR files during the release workflow. No manual intervention is required.
|
||||
|
||||
When a release is created:
|
||||
|
||||
1. All `incoming/*.txt` files are aggregated using `--process-prs`
|
||||
2. Version is detected from `version.nix` or latest git tag
|
||||
3. A new version entry is created in CHANGELOG.md
|
||||
4. Incoming files are cleaned up (removed)
|
||||
5. Changes are staged for the release commit (CHANGELOG.md and cache file)
|
||||
|
||||
## Best Practices
|
||||
|
||||
1. **Run early**: Generate your changelog entry as soon as your PR is ready for review
|
||||
2. **Review content**: Always review the generated entry and edit if necessary
|
||||
3. **Keep it updated**: If you make significant changes to your PR, regenerate the entry
|
||||
4. **Use AI summaries**: Enable `--ai-summarize` for more professional, consistent formatting
|
||||
|
||||
## Advanced Usage
|
||||
|
||||
### Custom GitHub Token
|
||||
|
||||
```bash
|
||||
./generate_changelog --incoming-pr 1672 --token YOUR_GITHUB_TOKEN
|
||||
```
|
||||
|
||||
### Custom Repository Path
|
||||
|
||||
```bash
|
||||
./generate_changelog --incoming-pr 1672 --repo /path/to/repo
|
||||
```
|
||||
|
||||
### Disable Caching
|
||||
|
||||
```bash
|
||||
./generate_changelog --incoming-pr 1672 --no-cache
|
||||
```
|
||||
|
||||
### Enable Auto-Push
|
||||
|
||||
```bash
|
||||
./generate_changelog --incoming-pr 1672 --push
|
||||
```
|
||||
|
||||
This creates the commit locally and pushes it to the remote repository. By default, commits are only created locally, allowing you to review changes before pushing manually.
|
||||
|
||||
**Authentication**: The tool automatically detects GitHub repositories and uses the GITHUB_TOKEN environment variable for authentication when pushing. For SSH repositories, ensure your SSH keys are properly configured.
|
||||
|
||||
## Integration with Existing Workflow
|
||||
|
||||
This system is fully backward compatible. The existing changelog generation continues to work unchanged. The new features are opt-in and only activated when using the new flags.
|
||||
|
||||
## Support
|
||||
|
||||
If you encounter issues:
|
||||
|
||||
1. Check this documentation
|
||||
2. Verify your GitHub token has appropriate permissions
|
||||
3. Ensure your PR meets the validation requirements
|
||||
4. Check the tool's help: `./generate_changelog --help`
|
||||
|
||||
For bugs or feature requests, please create an issue in the repository.
|
||||
183
docs/Desktop-Notifications.md
Normal file
183
docs/Desktop-Notifications.md
Normal file
@@ -0,0 +1,183 @@
|
||||
# Desktop Notifications
|
||||
|
||||
Fabric supports desktop notifications to alert you when commands complete, which is especially useful for long-running tasks or when you're multitasking.
|
||||
|
||||
## Quick Start
|
||||
|
||||
Enable notifications with the `--notification` flag:
|
||||
|
||||
```bash
|
||||
fabric --pattern summarize --notification < article.txt
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
### Command Line Options
|
||||
|
||||
- `--notification`: Enable desktop notifications when command completes
|
||||
- `--notification-command`: Use a custom notification command instead of built-in notifications
|
||||
|
||||
### YAML Configuration
|
||||
|
||||
Add notification settings to your `~/.config/fabric/config.yaml`:
|
||||
|
||||
```yaml
|
||||
# Enable notifications by default
|
||||
notification: true
|
||||
|
||||
# Optional: Custom notification command
|
||||
notificationCommand: 'notify-send --urgency=normal "$1" "$2"'
|
||||
```
|
||||
|
||||
## Platform Support
|
||||
|
||||
### macOS
|
||||
|
||||
- **Default**: Uses `osascript` (built into macOS)
|
||||
- **Enhanced**: Install `terminal-notifier` for better notifications:
|
||||
|
||||
```bash
|
||||
brew install terminal-notifier
|
||||
```
|
||||
|
||||
### Linux
|
||||
|
||||
- **Requirement**: Install `notify-send`:
|
||||
|
||||
```bash
|
||||
# Ubuntu/Debian
|
||||
sudo apt install libnotify-bin
|
||||
|
||||
# Fedora
|
||||
sudo dnf install libnotify
|
||||
```
|
||||
|
||||
### Windows
|
||||
|
||||
- **Default**: Uses PowerShell message boxes (built-in)
|
||||
|
||||
## Custom Notification Commands
|
||||
|
||||
The `--notification-command` flag allows you to use custom notification scripts or commands. The command receives the title as `$1` and message as `$2` as shell positional arguments.
|
||||
|
||||
**Security Note**: The title and message content are properly escaped to prevent command injection attacks from AI-generated output containing shell metacharacters.
|
||||
|
||||
### Examples
|
||||
|
||||
**macOS with custom sound:**
|
||||
|
||||
```bash
|
||||
fabric --pattern analyze_claims --notification-command 'osascript -e "display notification \"$2\" with title \"$1\" sound name \"Ping\""' < document.txt
|
||||
```
|
||||
|
||||
**Linux with urgency levels:**
|
||||
|
||||
```bash
|
||||
fabric --pattern extract_wisdom --notification-command 'notify-send --urgency=critical "$1" "$2"' < video-transcript.txt
|
||||
```
|
||||
|
||||
**Custom script:**
|
||||
|
||||
```bash
|
||||
fabric --pattern summarize --notification-command '/path/to/my-notification-script.sh "$1" "$2"' < report.pdf
|
||||
```
|
||||
|
||||
**Testing your custom command:**
|
||||
|
||||
```bash
|
||||
# Test that $1 and $2 are passed correctly
|
||||
fabric --pattern raw_query --notification-command 'echo "Title: $1, Message: $2"' "test input"
|
||||
```
|
||||
|
||||
## Notification Content
|
||||
|
||||
Notifications include:
|
||||
|
||||
- **Title**: "Fabric Command Complete" or "Fabric: [pattern] Complete"
|
||||
- **Message**: Brief summary of the output (first 100 characters)
|
||||
|
||||
For long outputs, the message is truncated with "..." to fit notification display limits.
|
||||
|
||||
## Use Cases
|
||||
|
||||
### Long-Running Tasks
|
||||
|
||||
```bash
|
||||
# Process large document with notifications
|
||||
fabric --pattern analyze_paper --notification < research-paper.pdf
|
||||
|
||||
# Extract wisdom from long video with alerts
|
||||
fabric -y "https://youtube.com/watch?v=..." --pattern extract_wisdom --notification
|
||||
```
|
||||
|
||||
### Background Processing
|
||||
|
||||
```bash
|
||||
# Process multiple files and get notified when each completes
|
||||
for file in *.txt; do
|
||||
fabric --pattern summarize --notification < "$file" &
|
||||
done
|
||||
```
|
||||
|
||||
### Integration with Other Tools
|
||||
|
||||
```bash
|
||||
# Combine with other commands
|
||||
curl -s "https://api.example.com/data" | \
|
||||
fabric --pattern analyze_data --notification --output results.md
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### No Notifications Appearing
|
||||
|
||||
1. **Check system notifications are enabled** for Terminal/your shell
|
||||
2. **Verify notification tools are installed**:
|
||||
- macOS: `which osascript` (should exist)
|
||||
- Linux: `which notify-send`
|
||||
- Windows: `where.exe powershell`
|
||||
|
||||
3. **Test with simple command**:
|
||||
|
||||
```bash
|
||||
echo "test" | fabric --pattern raw_query --notification --dry-run
|
||||
```
|
||||
|
||||
### Notification Permission Issues
|
||||
|
||||
On some systems, you may need to grant notification permissions to your terminal application:
|
||||
|
||||
- **macOS**: System Preferences → Security & Privacy → Privacy → Notifications → Enable for Terminal
|
||||
- **Linux**: Depends on desktop environment; usually automatic
|
||||
- **Windows**: Usually works by default
|
||||
|
||||
### Custom Commands Not Working
|
||||
|
||||
- Ensure your custom notification command is executable
|
||||
- Test the command manually with sample arguments
|
||||
- Check that all required dependencies are installed
|
||||
|
||||
## Advanced Configuration
|
||||
|
||||
### Environment-Specific Settings
|
||||
|
||||
Create different configuration files for different environments:
|
||||
|
||||
```bash
|
||||
# Work computer (quieter notifications)
|
||||
fabric --config ~/.config/fabric/work-config.yaml --notification
|
||||
|
||||
# Personal computer (with sound)
|
||||
fabric --config ~/.config/fabric/personal-config.yaml --notification
|
||||
```
|
||||
|
||||
### Integration with Task Management
|
||||
|
||||
```bash
|
||||
# Custom script that also logs to task management system
|
||||
notificationCommand: '/usr/local/bin/fabric-notify-and-log.sh "$1" "$2"'
|
||||
```
|
||||
|
||||
## Examples
|
||||
|
||||
See `docs/notification-config.yaml` for a complete configuration example with various notification command options.
|
||||
155
docs/Gemini-TTS.md
Normal file
155
docs/Gemini-TTS.md
Normal file
@@ -0,0 +1,155 @@
|
||||
# Gemini Text-to-Speech (TTS) Guide
|
||||
|
||||
Fabric supports Google Gemini's text-to-speech (TTS) capabilities, allowing you to convert text into high-quality audio using various AI-generated voices.
|
||||
|
||||
## Overview
|
||||
|
||||
The Gemini TTS feature in Fabric allows you to:
|
||||
|
||||
- Convert text input into audio using Google's Gemini TTS models
|
||||
- Choose from 30+ different AI voices with varying characteristics
|
||||
- Generate high-quality WAV audio files
|
||||
- Integrate TTS generation into your existing Fabric workflows
|
||||
|
||||
## Usage
|
||||
|
||||
### Basic TTS Generation
|
||||
|
||||
To generate audio from text using TTS:
|
||||
|
||||
```bash
|
||||
# Basic TTS with default voice (Kore)
|
||||
echo "Hello, this is a test of Gemini TTS" | fabric -m gemini-2.5-flash-preview-tts -o output.wav
|
||||
|
||||
# Using a specific voice
|
||||
echo "Hello, this is a test with the Charon voice" | fabric -m gemini-2.5-flash-preview-tts --voice Charon -o output.wav
|
||||
|
||||
# Using TTS with a pattern
|
||||
fabric -p summarize --voice Puck -m gemini-2.5-flash-preview-tts -o summary.wav < document.txt
|
||||
```
|
||||
|
||||
### Voice Selection
|
||||
|
||||
Use the `--voice` flag to specify which voice to use for TTS generation:
|
||||
|
||||
```bash
|
||||
fabric -m gemini-2.5-flash-preview-tts --voice Zephyr -o output.wav "Your text here"
|
||||
```
|
||||
|
||||
If no voice is specified, the default voice "Kore" will be used.
|
||||
|
||||
## Available Voices
|
||||
|
||||
Gemini TTS supports 30+ different voices, each with unique characteristics:
|
||||
|
||||
### Popular Voices
|
||||
|
||||
- **Kore** - Firm and confident (default)
|
||||
- **Charon** - Informative and clear
|
||||
- **Puck** - Upbeat and energetic
|
||||
- **Zephyr** - Bright and cheerful
|
||||
- **Leda** - Youthful and energetic
|
||||
- **Aoede** - Breezy and natural
|
||||
|
||||
### Complete Voice List
|
||||
|
||||
- Kore, Charon, Puck, Fenrir, Aoede, Leda, Orus, Zephyr
|
||||
- Autonoe, Callirhoe, Despina, Erinome, Gacrux, Laomedeia
|
||||
- Pulcherrima, Sulafat, Vindemiatrix, Achernar, Achird
|
||||
- Algenib, Algieba, Alnilam, Enceladus, Iapetus, Rasalgethi
|
||||
- Sadachbia, Zubenelgenubi, Vega, Capella, Lyra
|
||||
|
||||
### Listing Available Voices
|
||||
|
||||
To see all available voices with descriptions:
|
||||
|
||||
```bash
|
||||
# List all voices with characteristics
|
||||
fabric --list-gemini-voices
|
||||
|
||||
# List voice names only (for shell completion)
|
||||
fabric --list-gemini-voices --shell-complete-list
|
||||
```
|
||||
|
||||
## Rate Limits
|
||||
|
||||
Google Gemini TTS has usage quotas that vary by plan:
|
||||
|
||||
### Free Tier
|
||||
|
||||
- **15 requests per day** per project per TTS model
|
||||
- Quota resets daily
|
||||
- Applies to all TTS models (e.g., `gemini-2.5-flash-preview-tts`)
|
||||
|
||||
### Rate Limit Errors
|
||||
|
||||
If you exceed your quota, you'll see an error like:
|
||||
|
||||
```text
|
||||
Error 429: You exceeded your current quota, please check your plan and billing details
|
||||
```
|
||||
|
||||
**Solutions:**
|
||||
|
||||
- Wait for daily quota reset (typically at midnight UTC)
|
||||
- Upgrade to a paid plan for higher limits
|
||||
- Use TTS generation strategically for important content
|
||||
|
||||
For current rate limits and pricing, visit: <https://ai.google.dev/gemini-api/docs/rate-limits>
|
||||
|
||||
## Configuration
|
||||
|
||||
### Command Line Options
|
||||
|
||||
- `--voice <voice_name>` - Specify the TTS voice to use
|
||||
- `-o <filename.wav>` - Output audio file (required for TTS models)
|
||||
- `-m <tts_model>` - Specify a TTS-capable model (e.g., `gemini-2.5-flash-preview-tts`)
|
||||
|
||||
### YAML Configuration
|
||||
|
||||
You can also set a default voice in your Fabric configuration file (`~/.config/fabric/config.yaml`):
|
||||
|
||||
```yaml
|
||||
voice: "Charon" # Set your preferred default voice
|
||||
```
|
||||
|
||||
## Requirements
|
||||
|
||||
- Valid Google Gemini API key configured in Fabric
|
||||
- TTS-capable Gemini model (models containing "tts" in the name)
|
||||
- Audio output must be specified with `-o filename.wav`
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Common Issues
|
||||
|
||||
#### Error: "TTS model requires audio output"
|
||||
|
||||
- Solution: Always specify an output file with `-o filename.wav` when using TTS models
|
||||
|
||||
#### Error: "Invalid voice 'X'"
|
||||
|
||||
- Solution: Check that the voice name is spelled correctly and matches one of the supported voices listed above
|
||||
|
||||
#### Error: "TTS generation failed"
|
||||
|
||||
- Solution: Verify your Gemini API key is valid and you have sufficient quota
|
||||
|
||||
### Getting Help
|
||||
|
||||
For additional help with TTS features:
|
||||
|
||||
```bash
|
||||
fabric --help
|
||||
```
|
||||
|
||||
## Technical Details
|
||||
|
||||
- **Audio Format**: WAV files with 24kHz sample rate, 16-bit depth, mono channel
|
||||
- **Language Support**: Automatic language detection for 24+ languages
|
||||
- **Model Requirements**: Models must contain "tts", "preview-tts", or "text-to-speech" in the name
|
||||
- **Voice Selection**: Uses Google's PrebuiltVoiceConfig system for consistent voice quality
|
||||
|
||||
---
|
||||
|
||||
For more information about Fabric, visit the [main documentation](../README.md).
|
||||
124
docs/Shell-Completions.md
Normal file
124
docs/Shell-Completions.md
Normal file
@@ -0,0 +1,124 @@
|
||||
# Shell Completions for Fabric
|
||||
|
||||
Fabric comes with shell completion support for Zsh, Bash, and Fish shells. These completions provide intelligent tab-completion for commands, flags, patterns, models, contexts, and more.
|
||||
|
||||
## Quick Setup (Automated)
|
||||
|
||||
For a quick automated installation, use the setup script:
|
||||
|
||||
```bash
|
||||
# Run the automated setup script
|
||||
./completions/setup-completions.sh
|
||||
|
||||
# Or see what it would do first
|
||||
./completions/setup-completions.sh --dry-run
|
||||
```
|
||||
|
||||
The script will:
|
||||
|
||||
- Detect whether you have `fabric` or `fabric-ai` installed
|
||||
- Detect your current shell (zsh, bash, or fish)
|
||||
- Use your existing `$fpath` directories (for zsh) or standard completion directories
|
||||
- Install the completion file with the correct name
|
||||
- Provide instructions for enabling the completions
|
||||
|
||||
For manual installation or troubleshooting, see the detailed instructions below.
|
||||
|
||||
## Manual Installation
|
||||
|
||||
### Zsh
|
||||
|
||||
1. Copy the completion file to a directory in your `$fpath`:
|
||||
|
||||
```bash
|
||||
sudo cp completions/_fabric /usr/local/share/zsh/site-functions/
|
||||
```
|
||||
|
||||
2. **Important**: If you installed fabric as `fabric-ai`, create a symlink so completions work:
|
||||
|
||||
```bash
|
||||
sudo ln -s /usr/local/share/zsh/site-functions/_fabric /usr/local/share/zsh/site-functions/_fabric-ai
|
||||
```
|
||||
|
||||
3. Restart your shell or reload completions:
|
||||
|
||||
```bash
|
||||
autoload -U compinit && compinit
|
||||
```
|
||||
|
||||
### Bash
|
||||
|
||||
1. Copy the completion file to a standard completion directory:
|
||||
|
||||
```bash
|
||||
# System-wide installation
|
||||
sudo cp completions/fabric.bash /etc/bash_completion.d/
|
||||
|
||||
# Or user-specific installation
|
||||
mkdir -p ~/.local/share/bash-completion/completions/
|
||||
cp completions/fabric.bash ~/.local/share/bash-completion/completions/fabric
|
||||
```
|
||||
|
||||
2. **Important**: If you installed fabric as `fabric-ai`, create a symlink:
|
||||
|
||||
```bash
|
||||
# For system-wide installation
|
||||
sudo ln -s /etc/bash_completion.d/fabric.bash /etc/bash_completion.d/fabric-ai.bash
|
||||
|
||||
# Or for user-specific installation
|
||||
ln -s ~/.local/share/bash-completion/completions/fabric ~/.local/share/bash-completion/completions/fabric-ai
|
||||
```
|
||||
|
||||
3. Restart your shell or source the completion:
|
||||
|
||||
```bash
|
||||
source ~/.bashrc
|
||||
```
|
||||
|
||||
### Fish
|
||||
|
||||
1. Copy the completion file to Fish's completion directory:
|
||||
|
||||
```bash
|
||||
mkdir -p ~/.config/fish/completions
|
||||
cp completions/fabric.fish ~/.config/fish/completions/
|
||||
```
|
||||
|
||||
2. **Important**: If you installed fabric as `fabric-ai`, create a symlink:
|
||||
|
||||
```bash
|
||||
ln -s ~/.config/fish/completions/fabric.fish ~/.config/fish/completions/fabric-ai.fish
|
||||
```
|
||||
|
||||
3. Fish will automatically load the completions (no restart needed).
|
||||
|
||||
## Features
|
||||
|
||||
The completions provide intelligent suggestions for:
|
||||
|
||||
- **Patterns**: Tab-complete available patterns with `-p` or `--pattern`
|
||||
- **Models**: Tab-complete available models with `-m` or `--model`
|
||||
- **Contexts**: Tab-complete contexts for context-related flags
|
||||
- **Sessions**: Tab-complete sessions for session-related flags
|
||||
- **Strategies**: Tab-complete available strategies
|
||||
- **Extensions**: Tab-complete registered extensions
|
||||
- **Gemini Voices**: Tab-complete TTS voices for `--voice`
|
||||
- **File paths**: Smart file completion for attachment, output, and config options
|
||||
- **Flag completion**: All available command-line flags and options
|
||||
|
||||
## Alternative Installation Method
|
||||
|
||||
You can also source the completion files directly in your shell's configuration file:
|
||||
|
||||
- **Zsh**: Add to `~/.zshrc`: `source /path/to/fabric/completions/_fabric`
|
||||
- **Bash**: Add to `~/.bashrc`: `source /path/to/fabric/completions/fabric.bash`
|
||||
- **Fish**: The file-based installation method above is preferred for Fish
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
- If completions don't work, ensure the completion files have proper permissions
|
||||
- For Zsh, verify that the completion directory is in your `$fpath`
|
||||
- If you renamed the fabric binary, make sure to create the appropriate symlinks as described above
|
||||
- Restart your shell after installation to ensure completions are loaded
|
||||
|
||||
The completion system dynamically queries the fabric command for current patterns, models, and other resources, so your completions will always be up-to-date with your fabric installation.
|
||||
298
docs/YouTube-Processing.md
Normal file
298
docs/YouTube-Processing.md
Normal file
@@ -0,0 +1,298 @@
|
||||
# YouTube Processing with Fabric
|
||||
|
||||
Fabric provides powerful YouTube video processing capabilities that allow you to extract transcripts, comments, and metadata from YouTube videos and playlists. This guide covers all the available options and common use cases.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- **yt-dlp**: Required for transcript extraction. Install on MacOS with:
|
||||
|
||||
```bash
|
||||
brew install yt-dlp
|
||||
```
|
||||
|
||||
Or use the package manager of your choice for your operating system.
|
||||
|
||||
See the [yt-dlp wiki page](https://github.com/yt-dlp/yt-dlp/wiki/Installation) for your specific installation instructions.
|
||||
|
||||
- **YouTube API Key** (optional): Only needed for comments and metadata extraction. Configure with:
|
||||
|
||||
```bash
|
||||
fabric --setup
|
||||
```
|
||||
|
||||
## Basic Usage
|
||||
|
||||
### Extract Transcript
|
||||
|
||||
Extract a video transcript and process it with a pattern:
|
||||
|
||||
```bash
|
||||
fabric -y "https://www.youtube.com/watch?v=VIDEO_ID" --pattern summarize
|
||||
```
|
||||
|
||||
### Extract Transcript with Timestamps
|
||||
|
||||
Get transcript with timestamps preserved:
|
||||
|
||||
```bash
|
||||
fabric -y "https://www.youtube.com/watch?v=VIDEO_ID" --transcript-with-timestamps --pattern extract_wisdom
|
||||
```
|
||||
|
||||
### Extract Comments
|
||||
|
||||
Get video comments (requires YouTube API key):
|
||||
|
||||
```bash
|
||||
fabric -y "https://www.youtube.com/watch?v=VIDEO_ID" --comments --pattern analyze_claims
|
||||
```
|
||||
|
||||
### Extract Metadata
|
||||
|
||||
Get video metadata as JSON:
|
||||
|
||||
```bash
|
||||
fabric -y "https://www.youtube.com/watch?v=VIDEO_ID" --metadata
|
||||
```
|
||||
|
||||
## Advanced Options
|
||||
|
||||
### Custom yt-dlp Arguments
|
||||
|
||||
Pass additional arguments to yt-dlp for advanced functionality. **User-provided arguments take precedence** over built-in fabric arguments, giving you full control:
|
||||
|
||||
```bash
|
||||
# Use browser cookies for age-restricted or private videos
|
||||
fabric -y "https://www.youtube.com/watch?v=VIDEO_ID" --yt-dlp-args="--cookies-from-browser brave"
|
||||
|
||||
# Override language selection (takes precedence over -g flag)
|
||||
fabric -g en -y "https://www.youtube.com/watch?v=VIDEO_ID" --yt-dlp-args="--sub-langs es,fr"
|
||||
|
||||
# Use specific format
|
||||
fabric -y "https://www.youtube.com/watch?v=VIDEO_ID" --yt-dlp-args="--format best"
|
||||
|
||||
# Handle rate limiting (slow down requests)
|
||||
fabric -y "https://www.youtube.com/watch?v=VIDEO_ID" --yt-dlp-args="--sleep-requests 1"
|
||||
|
||||
# Multiple arguments (use quotes)
|
||||
fabric -y "https://www.youtube.com/watch?v=VIDEO_ID" --yt-dlp-args="--cookies-from-browser firefox --write-info-json"
|
||||
|
||||
# Combine rate limiting with authentication
|
||||
fabric -y "https://www.youtube.com/watch?v=VIDEO_ID" --yt-dlp-args="--cookies-from-browser brave --sleep-requests 1"
|
||||
|
||||
# Override subtitle format (takes precedence over built-in --sub-format vtt)
|
||||
fabric -y "https://www.youtube.com/watch?v=VIDEO_ID" --yt-dlp-args="--sub-format srt"
|
||||
```
|
||||
|
||||
#### Argument Precedence
|
||||
|
||||
Fabric constructs the yt-dlp command in this order:
|
||||
|
||||
1. **Built-in base arguments** (`--write-auto-subs`, `--skip-download`, etc.)
|
||||
2. **Language selection** (from `-g` flag): `--sub-langs LANGUAGE`
|
||||
3. **User arguments** (from `--yt-dlp-args`): **These override any conflicting built-in arguments**
|
||||
4. **Video URL**
|
||||
|
||||
This means you can override any built-in behavior by specifying it in `--yt-dlp-args`.
|
||||
|
||||
### Playlist Processing
|
||||
|
||||
Process entire playlists:
|
||||
|
||||
```bash
|
||||
# Process all videos in a playlist
|
||||
fabric -y "https://www.youtube.com/playlist?list=PLAYLIST_ID" --playlist --pattern summarize
|
||||
|
||||
# Save playlist videos to CSV
|
||||
fabric -y "https://www.youtube.com/playlist?list=PLAYLIST_ID" --playlist -o playlist.csv
|
||||
```
|
||||
|
||||
### Language Support
|
||||
|
||||
Specify transcript language:
|
||||
|
||||
```bash
|
||||
fabric -y "https://www.youtube.com/watch?v=VIDEO_ID" -g es --pattern translate
|
||||
```
|
||||
|
||||
## Combining Options
|
||||
|
||||
You can combine multiple YouTube processing options:
|
||||
|
||||
```bash
|
||||
# Get transcript, comments, and metadata
|
||||
fabric -y "https://www.youtube.com/watch?v=VIDEO_ID" \
|
||||
--transcript \
|
||||
--comments \
|
||||
--metadata \
|
||||
--pattern comprehensive_analysis
|
||||
```
|
||||
|
||||
## Output Options
|
||||
|
||||
### Save to File
|
||||
|
||||
```bash
|
||||
# Save output to file
|
||||
fabric -y "https://www.youtube.com/watch?v=VIDEO_ID" --pattern summarize -o summary.md
|
||||
|
||||
# Save entire session including input
|
||||
fabric -y "https://www.youtube.com/watch?v=VIDEO_ID" --pattern summarize --output-session -o full_session.md
|
||||
```
|
||||
|
||||
### Stream Output
|
||||
|
||||
Get real-time streaming output:
|
||||
|
||||
```bash
|
||||
fabric -y "https://www.youtube.com/watch?v=VIDEO_ID" --pattern summarize --stream
|
||||
```
|
||||
|
||||
## Common Use Cases
|
||||
|
||||
### Content Analysis
|
||||
|
||||
```bash
|
||||
# Analyze video content for key insights
|
||||
fabric -y "https://www.youtube.com/watch?v=VIDEO_ID" --pattern extract_wisdom
|
||||
|
||||
# Check claims made in the video
|
||||
fabric -y "https://www.youtube.com/watch?v=VIDEO_ID" --pattern analyze_claims
|
||||
```
|
||||
|
||||
### Educational Content
|
||||
|
||||
```bash
|
||||
# Create study notes from educational videos
|
||||
fabric -y "https://www.youtube.com/watch?v=VIDEO_ID" --pattern create_study_notes
|
||||
|
||||
# Extract key concepts and definitions
|
||||
fabric -y "https://www.youtube.com/watch?v=VIDEO_ID" --pattern extract_concepts
|
||||
```
|
||||
|
||||
### Meeting/Conference Processing
|
||||
|
||||
```bash
|
||||
# Summarize conference talks with timestamps
|
||||
fabric -y "https://www.youtube.com/watch?v=VIDEO_ID" \
|
||||
--transcript-with-timestamps \
|
||||
--pattern meeting_summary
|
||||
|
||||
# Extract action items from recorded meetings
|
||||
fabric -y "https://www.youtube.com/watch?v=VIDEO_ID" --pattern extract_action_items
|
||||
```
|
||||
|
||||
### Content Creation
|
||||
|
||||
```bash
|
||||
# Create social media posts from video content
|
||||
fabric -y "https://www.youtube.com/watch?v=VIDEO_ID" --pattern create_social_posts
|
||||
|
||||
# Generate blog post from video transcript
|
||||
fabric -y "https://www.youtube.com/watch?v=VIDEO_ID" --pattern write_blog_post
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Common Issues
|
||||
|
||||
1. **"yt-dlp not found"**: Install yt-dlp using pip or your package manager
|
||||
2. **Age-restricted videos**: Use `--yt-dlp-args="--cookies-from-browser BROWSER"`
|
||||
3. **No subtitles available**: Some videos don't have auto-generated subtitles
|
||||
4. **API rate limits**: YouTube API has daily quotas for comments/metadata
|
||||
5. **HTTP 429 errors**: YouTube is rate limiting subtitle requests
|
||||
|
||||
### Error Messages
|
||||
|
||||
- **"YouTube is not configured"**: Run `fabric --setup` to configure YouTube API
|
||||
- **"yt-dlp failed"**: Check video URL and try with `--yt-dlp-args` for authentication
|
||||
- **"No transcript content found"**: Video may not have subtitles available
|
||||
- **"HTTP Error 429: Too Many Requests"**: YouTube rate limit exceeded. This is increasingly common. Solutions:
|
||||
- **Wait 10-30 minutes and try again** (most effective)
|
||||
- Use longer sleep: `--yt-dlp-args="--sleep-requests 5"`
|
||||
- Try with browser cookies: `--yt-dlp-args="--cookies-from-browser brave --sleep-requests 5"`
|
||||
- **Try a different video** - some videos are less restricted
|
||||
- **Use a VPN** - different IP address may help
|
||||
- **Try without language specification** - let yt-dlp choose any available language
|
||||
- **Try English instead** - `fabric -g en` (English subtitles may be less rate-limited)
|
||||
|
||||
### Language Fallback Behavior
|
||||
|
||||
When you specify a language (e.g., `-g es` for Spanish) but that language isn't available or fails to download:
|
||||
|
||||
1. **Automatic fallback**: Fabric automatically retries without language specification
|
||||
2. **Smart file detection**: If the fallback downloads a different language (e.g., English), Fabric will automatically detect and use it
|
||||
3. **No manual intervention needed**: The process is transparent to the user
|
||||
|
||||
```bash
|
||||
# Even if Spanish isn't available, this will work with whatever language yt-dlp finds
|
||||
fabric -g es -y "https://youtube.com/watch?v=VIDEO_ID" --pattern summarize
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
### YAML Configuration
|
||||
|
||||
You can set default yt-dlp arguments in your config file (`~/.config/fabric/config.yaml`):
|
||||
|
||||
```yaml
|
||||
ytDlpArgs: "--cookies-from-browser brave --write-info-json"
|
||||
```
|
||||
|
||||
### Environment Variables
|
||||
|
||||
Set up your YouTube API key:
|
||||
|
||||
```bash
|
||||
export FABRIC_YOUTUBE_API_KEY="your_api_key_here"
|
||||
```
|
||||
|
||||
## Tips and Best Practices
|
||||
|
||||
1. **Use specific patterns**: Choose patterns that match your use case for better results
|
||||
2. **Combine with other tools**: Pipe output to other commands or save to files for further processing
|
||||
3. **Batch processing**: Use playlists to process multiple videos efficiently
|
||||
4. **Authentication**: Use browser cookies for accessing private or age-restricted content
|
||||
5. **Language support**: Specify language codes for better transcript accuracy
|
||||
6. **Rate limiting**: If you encounter 429 errors, use `--sleep-requests 1` to slow down requests
|
||||
7. **Persistent settings**: Set common yt-dlp args in your config file to avoid repeating them
|
||||
8. **Argument precedence**: Use `--yt-dlp-args` to override any built-in behavior when needed
|
||||
9. **Testing**: Use `yt-dlp --list-subs URL` to see available subtitle languages before processing
|
||||
|
||||
## Examples
|
||||
|
||||
### Quick Video Summary
|
||||
|
||||
```bash
|
||||
fabric -y "https://www.youtube.com/watch?v=dQw4w9WgXcQ" --pattern summarize --stream
|
||||
```
|
||||
|
||||
### Detailed Analysis with Authentication
|
||||
|
||||
```bash
|
||||
fabric -y "https://www.youtube.com/watch?v=VIDEO_ID" \
|
||||
--yt-dlp-args="--cookies-from-browser chrome" \
|
||||
--transcript-with-timestamps \
|
||||
--comments \
|
||||
--pattern comprehensive_analysis \
|
||||
-o analysis.md
|
||||
```
|
||||
|
||||
### Playlist Processing
|
||||
|
||||
```bash
|
||||
fabric -y "https://www.youtube.com/playlist?list=PLrAXtmRdnEQy6nuLvVUxpDnx4C0823vBN" \
|
||||
--playlist \
|
||||
--pattern extract_wisdom \
|
||||
-o playlist_wisdom.md
|
||||
```
|
||||
|
||||
### Override Built-in Language Selection
|
||||
|
||||
```bash
|
||||
# Built-in language selection (-g es) is overridden by user args
|
||||
fabric -g es -y "https://www.youtube.com/watch?v=VIDEO_ID" \
|
||||
--yt-dlp-args="--sub-langs fr,de,en" \
|
||||
--pattern translate
|
||||
```
|
||||
|
||||
For more patterns and advanced usage, see the main [Fabric documentation](../README.md).
|
||||
BIN
docs/images/svelte-preview.png
Normal file
BIN
docs/images/svelte-preview.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 1.1 MiB |
21
docs/notification-config.yaml
Normal file
21
docs/notification-config.yaml
Normal file
@@ -0,0 +1,21 @@
|
||||
# Example Fabric configuration with notification support
|
||||
# Save this to ~/.config/fabric/config.yaml to use as defaults
|
||||
|
||||
# Enable notifications by default for all commands
|
||||
notification: true
|
||||
|
||||
# Optional: Use a custom notification command
|
||||
# Examples:
|
||||
# macOS with custom sound:
|
||||
# notificationCommand: 'osascript -e "display notification \"$2\" with title \"$1\" sound name \"Ping\""'
|
||||
#
|
||||
# Linux with custom urgency:
|
||||
# notificationCommand: 'notify-send --urgency=normal "$1" "$2"'
|
||||
#
|
||||
# Custom script:
|
||||
# notificationCommand: '/path/to/custom-notification-script.sh "$1" "$2"'
|
||||
|
||||
# Other common settings
|
||||
model: "gpt-4o"
|
||||
temperature: 0.7
|
||||
stream: true
|
||||
36
docs/voices/README.md
Normal file
36
docs/voices/README.md
Normal file
@@ -0,0 +1,36 @@
|
||||
# Voice Samples
|
||||
|
||||
This directory contains sample audio files demonstrating different Gemini TTS voices.
|
||||
|
||||
## Sample Files
|
||||
|
||||
Each voice sample says "The quick brown fox jumped over the lazy dog" to demonstrate the voice characteristics:
|
||||
|
||||
- **Kore.wav** - Firm and confident (default voice)
|
||||
- **Charon.wav** - Informative and clear
|
||||
- **Vega.wav** - Smooth and pleasant
|
||||
- **Capella.wav** - Warm and welcoming
|
||||
- **Achird.wav** - Friendly and approachable
|
||||
- **Lyra.wav** - Melodic and expressive
|
||||
|
||||
## Generating Samples
|
||||
|
||||
To generate these samples, use the following commands:
|
||||
|
||||
```bash
|
||||
# Generate each voice sample
|
||||
echo "The quick brown fox jumped over the lazy dog" | fabric -m gemini-2.5-flash-preview-tts --voice Kore -o docs/voices/Kore.wav
|
||||
echo "The quick brown fox jumped over the lazy dog" | fabric -m gemini-2.5-flash-preview-tts --voice Charon -o docs/voices/Charon.wav
|
||||
echo "The quick brown fox jumped over the lazy dog" | fabric -m gemini-2.5-flash-preview-tts --voice Vega -o docs/voices/Vega.wav
|
||||
echo "The quick brown fox jumped over the lazy dog" | fabric -m gemini-2.5-flash-preview-tts --voice Capella -o docs/voices/Capella.wav
|
||||
echo "The quick brown fox jumped over the lazy dog" | fabric -m gemini-2.5-flash-preview-tts --voice Achird -o docs/voices/Achird.wav
|
||||
echo "The quick brown fox jumped over the lazy dog" | fabric -m gemini-2.5-flash-preview-tts --voice Lyra -o docs/voices/Lyra.wav
|
||||
```
|
||||
|
||||
## Audio Format
|
||||
|
||||
- **Format**: WAV (uncompressed)
|
||||
- **Sample Rate**: 24kHz
|
||||
- **Bit Depth**: 16-bit
|
||||
- **Channels**: Mono
|
||||
- **Approximate Size**: ~500KB per sample
|
||||
@@ -71,6 +71,7 @@
|
||||
default = self.packages.${system}.fabric;
|
||||
fabric = pkgs.callPackage ./nix/pkgs/fabric {
|
||||
go = goVersion;
|
||||
inherit self;
|
||||
inherit (gomod2nix.legacyPackages.${system}) buildGoApplication;
|
||||
};
|
||||
inherit (gomod2nix.legacyPackages.${system}) gomod2nix;
|
||||
|
||||
31
go.mod
31
go.mod
@@ -5,7 +5,7 @@ go 1.24.0
|
||||
toolchain go1.24.2
|
||||
|
||||
require (
|
||||
github.com/anthropics/anthropic-sdk-go v1.4.0
|
||||
github.com/anthropics/anthropic-sdk-go v1.7.0
|
||||
github.com/atotto/clipboard v0.1.4
|
||||
github.com/aws/aws-sdk-go-v2 v1.36.4
|
||||
github.com/aws/aws-sdk-go-v2/config v1.27.27
|
||||
@@ -15,29 +15,36 @@ require (
|
||||
github.com/gin-gonic/gin v1.10.1
|
||||
github.com/go-git/go-git/v5 v5.16.2
|
||||
github.com/go-shiori/go-readability v0.0.0-20250217085726-9f5bf5ca7612
|
||||
github.com/google/generative-ai-go v0.20.1
|
||||
github.com/google/go-github/v66 v66.0.0
|
||||
github.com/hasura/go-graphql-client v0.14.4
|
||||
github.com/jessevdk/go-flags v1.6.1
|
||||
github.com/joho/godotenv v1.5.1
|
||||
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51
|
||||
github.com/mattn/go-sqlite3 v1.14.28
|
||||
github.com/ollama/ollama v0.9.0
|
||||
github.com/openai/openai-go v1.8.2
|
||||
github.com/otiai10/copy v1.14.1
|
||||
github.com/pkg/errors v0.9.1
|
||||
github.com/samber/lo v1.50.0
|
||||
github.com/sgaunet/perplexity-go/v2 v2.8.0
|
||||
github.com/spf13/cobra v1.9.1
|
||||
github.com/stretchr/testify v1.10.0
|
||||
golang.org/x/oauth2 v0.30.0
|
||||
golang.org/x/text v0.26.0
|
||||
golang.org/x/text v0.27.0
|
||||
google.golang.org/api v0.236.0
|
||||
gopkg.in/yaml.v3 v3.0.1
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/google/go-cmp v0.7.0 // indirect
|
||||
github.com/gorilla/websocket v1.5.3 // indirect
|
||||
)
|
||||
|
||||
require (
|
||||
cloud.google.com/go v0.121.2 // indirect
|
||||
cloud.google.com/go/ai v0.12.1 // indirect
|
||||
cloud.google.com/go/auth v0.16.2 // indirect
|
||||
cloud.google.com/go/auth/oauth2adapt v0.2.8 // indirect
|
||||
cloud.google.com/go/compute/metadata v0.7.0 // indirect
|
||||
cloud.google.com/go/longrunning v0.6.7 // indirect
|
||||
dario.cat/mergo v1.0.2 // indirect
|
||||
github.com/Microsoft/go-winio v0.6.2 // indirect
|
||||
github.com/ProtonMail/go-crypto v1.3.0 // indirect
|
||||
@@ -59,6 +66,7 @@ require (
|
||||
github.com/bytedance/sonic/loader v0.2.4 // indirect
|
||||
github.com/cloudflare/circl v1.6.1 // indirect
|
||||
github.com/cloudwego/base64x v0.1.5 // indirect
|
||||
github.com/coder/websocket v1.8.13 // indirect
|
||||
github.com/cyphar/filepath-securejoin v0.4.1 // indirect
|
||||
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||
github.com/emirpasic/gods v1.18.1 // indirect
|
||||
@@ -75,10 +83,12 @@ require (
|
||||
github.com/goccy/go-json v0.10.5 // indirect
|
||||
github.com/gogs/chardet v0.0.0-20211120154057-b7413eaefb8f // indirect
|
||||
github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 // indirect
|
||||
github.com/google/go-querystring v1.1.0 // indirect
|
||||
github.com/google/s2a-go v0.1.9 // indirect
|
||||
github.com/google/uuid v1.6.0 // indirect
|
||||
github.com/googleapis/enterprise-certificate-proxy v0.3.6 // indirect
|
||||
github.com/googleapis/gax-go/v2 v2.14.2 // indirect
|
||||
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
||||
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 // indirect
|
||||
github.com/json-iterator/go v1.1.12 // indirect
|
||||
github.com/kevinburke/ssh_config v1.2.0 // indirect
|
||||
@@ -89,10 +99,11 @@ require (
|
||||
github.com/modern-go/reflect2 v1.0.2 // indirect
|
||||
github.com/otiai10/mint v1.6.3 // indirect
|
||||
github.com/pelletier/go-toml/v2 v2.2.4 // indirect
|
||||
github.com/pjbgf/sha1cd v0.3.2 // indirect
|
||||
github.com/pjbgf/sha1cd v0.4.0 // indirect
|
||||
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||
github.com/sergi/go-diff v1.4.0 // indirect
|
||||
github.com/skeema/knownhosts v1.3.1 // indirect
|
||||
github.com/spf13/pflag v1.0.6 // indirect
|
||||
github.com/tidwall/gjson v1.18.0 // indirect
|
||||
github.com/tidwall/match v1.1.1 // indirect
|
||||
github.com/tidwall/pretty v1.2.1 // indirect
|
||||
@@ -101,17 +112,17 @@ require (
|
||||
github.com/ugorji/go/codec v1.2.14 // indirect
|
||||
github.com/xanzy/ssh-agent v0.3.3 // indirect
|
||||
go.opentelemetry.io/auto/sdk v1.1.0 // indirect
|
||||
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.61.0 // indirect
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0 // indirect
|
||||
go.opentelemetry.io/otel v1.36.0 // indirect
|
||||
go.opentelemetry.io/otel/metric v1.36.0 // indirect
|
||||
go.opentelemetry.io/otel/trace v1.36.0 // indirect
|
||||
golang.org/x/arch v0.18.0 // indirect
|
||||
golang.org/x/crypto v0.39.0 // indirect
|
||||
golang.org/x/exp v0.0.0-20250531010427-b6e5de432a8b // indirect
|
||||
golang.org/x/net v0.41.0 // indirect
|
||||
golang.org/x/sync v0.15.0 // indirect
|
||||
golang.org/x/sys v0.33.0 // indirect
|
||||
golang.org/x/time v0.12.0 // indirect
|
||||
golang.org/x/sync v0.16.0 // indirect
|
||||
golang.org/x/sys v0.34.0 // indirect
|
||||
google.golang.org/genai v1.17.0
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20250603155806-513f23925822 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250603155806-513f23925822 // indirect
|
||||
google.golang.org/grpc v1.73.0 // indirect
|
||||
|
||||
60
go.sum
60
go.sum
@@ -1,15 +1,11 @@
|
||||
cloud.google.com/go v0.121.2 h1:v2qQpN6Dx9x2NmwrqlesOt3Ys4ol5/lFZ6Mg1B7OJCg=
|
||||
cloud.google.com/go v0.121.2/go.mod h1:nRFlrHq39MNVWu+zESP2PosMWA0ryJw8KUBZ2iZpxbw=
|
||||
cloud.google.com/go/ai v0.12.1 h1:m1n/VjUuHS+pEO/2R4/VbuuEIkgk0w67fDQvFaMngM0=
|
||||
cloud.google.com/go/ai v0.12.1/go.mod h1:5vIPNe1ZQsVZqCliXIPL4QnhObQQY4d9hAGHdVc4iw4=
|
||||
cloud.google.com/go/auth v0.16.2 h1:QvBAGFPLrDeoiNjyfVunhQ10HKNYuOwZ5noee0M5df4=
|
||||
cloud.google.com/go/auth v0.16.2/go.mod h1:sRBas2Y1fB1vZTdurouM0AzuYQBMZinrUYL8EufhtEA=
|
||||
cloud.google.com/go/auth/oauth2adapt v0.2.8 h1:keo8NaayQZ6wimpNSmW5OPc283g65QNIiLpZnkHRbnc=
|
||||
cloud.google.com/go/auth/oauth2adapt v0.2.8/go.mod h1:XQ9y31RkqZCcwJWNSx2Xvric3RrU88hAYYbjDWYDL+c=
|
||||
cloud.google.com/go/compute/metadata v0.7.0 h1:PBWF+iiAerVNe8UCHxdOt6eHLVc3ydFeOCw78U8ytSU=
|
||||
cloud.google.com/go/compute/metadata v0.7.0/go.mod h1:j5MvL9PprKL39t166CoB1uVHfQMs4tFQZZcKwksXUjo=
|
||||
cloud.google.com/go/longrunning v0.6.7 h1:IGtfDWHhQCgCjwQjV9iiLnUta9LBCo8R9QmAFsS/PrE=
|
||||
cloud.google.com/go/longrunning v0.6.7/go.mod h1:EAFV3IZAKmM56TyiE6VAP3VoTzhZzySwI/YI1s/nRsY=
|
||||
dario.cat/mergo v1.0.2 h1:85+piFYR1tMbRrLcDwR18y4UKJ3aH1Tbzi24VRW1TK8=
|
||||
dario.cat/mergo v1.0.2/go.mod h1:E/hbnu0NxMFBjpMIE34DRGLWqDy0g5FuKDhCb31ngxA=
|
||||
github.com/Microsoft/go-winio v0.5.2/go.mod h1:WpS1mjBmmwHBEWmogvA2mj8546UReBk4v8QkMxJ6pZY=
|
||||
@@ -21,8 +17,8 @@ github.com/andybalholm/cascadia v1.3.3 h1:AG2YHrzJIm4BZ19iwJ/DAua6Btl3IwJX+VI4kk
|
||||
github.com/andybalholm/cascadia v1.3.3/go.mod h1:xNd9bqTn98Ln4DwST8/nG+H0yuB8Hmgu1YHNnWw0GeA=
|
||||
github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be h1:9AeTilPcZAjCFIImctFaOjnTIavg87rW78vTPkQqLI8=
|
||||
github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be/go.mod h1:ySMOLuWl6zY27l47sB3qLNK6tF2fkHG55UZxx8oIVo4=
|
||||
github.com/anthropics/anthropic-sdk-go v1.4.0 h1:fU1jKxYbQdQDiEXCxeW5XZRIOwKevn/PMg8Ay1nnUx0=
|
||||
github.com/anthropics/anthropic-sdk-go v1.4.0/go.mod h1:AapDW22irxK2PSumZiQXYUFvsdQgkwIWlpESweWZI/c=
|
||||
github.com/anthropics/anthropic-sdk-go v1.7.0 h1:5iVf5fG/2gqVsOce8mq02r/WdgqpokM/8DXg2Ue6C9Y=
|
||||
github.com/anthropics/anthropic-sdk-go v1.7.0/go.mod h1:3qSNQ5NrAmjC8A2ykuruSQttfqfdEYNZY5o8c0XSHB8=
|
||||
github.com/araddon/dateparse v0.0.0-20210429162001-6b43995a97de h1:FxWPpzIjnTlhPwqqXc4/vE0f7GvRjuAsbW+HOIe8KnA=
|
||||
github.com/araddon/dateparse v0.0.0-20210429162001-6b43995a97de/go.mod h1:DCaWoUhZrYW9p1lxo/cm8EmUOOzAPSEZNGF2DK1dJgw=
|
||||
github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5 h1:0CwZNZbxp69SHPdPJAN/hZIm0C4OItdklCFmMRWYpio=
|
||||
@@ -71,6 +67,9 @@ github.com/cloudflare/circl v1.6.1/go.mod h1:uddAzsPgqdMAYatqJ0lsjX1oECcQLIlRpzZ
|
||||
github.com/cloudwego/base64x v0.1.5 h1:XPciSp1xaq2VCSt6lF0phncD4koWyULpl5bUxbfCyP4=
|
||||
github.com/cloudwego/base64x v0.1.5/go.mod h1:0zlkT4Wn5C6NdauXdJRhSKRlJvmclQ1hhJgA0rcu/8w=
|
||||
github.com/cloudwego/iasm v0.2.0/go.mod h1:8rXZaNYT2n95jn+zTI1sDr+IgcD2GVs0nlbbQPiEFhY=
|
||||
github.com/coder/websocket v1.8.13 h1:f3QZdXy7uGVz+4uCJy2nTZyM0yTBj8yANEHhqlXZ9FE=
|
||||
github.com/coder/websocket v1.8.13/go.mod h1:LNVeNrXQZfe5qhS9ALED3uA+l5pPqvwXg3CKoDBB2gs=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g=
|
||||
github.com/cyphar/filepath-securejoin v0.4.1 h1:JyxxyPEaktOD+GAnqIqTf9A8tHyAG22rowi7HkoSU1s=
|
||||
github.com/cyphar/filepath-securejoin v0.4.1/go.mod h1:Sdj7gXlvMcPZsbhwhQ33GguGLDGQL7h7bg04C/+u9jI=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
@@ -123,11 +122,14 @@ github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 h1:f+oWsMOmNPc8J
|
||||
github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8/go.mod h1:wcDNUvekVysuuOpQKo3191zZyTpiI6se1N1ULghS0sw=
|
||||
github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek=
|
||||
github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps=
|
||||
github.com/google/generative-ai-go v0.20.1 h1:6dEIujpgN2V0PgLhr6c/M1ynRdc7ARtiIDPFzj45uNQ=
|
||||
github.com/google/generative-ai-go v0.20.1/go.mod h1:TjOnZJmZKzarWbjUJgy+r3Ee7HGBRVLhOIgupnwR4Bg=
|
||||
github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
|
||||
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
|
||||
github.com/google/go-github/v66 v66.0.0 h1:ADJsaXj9UotwdgK8/iFZtv7MLc8E8WBl62WLd/D/9+M=
|
||||
github.com/google/go-github/v66 v66.0.0/go.mod h1:+4SO9Zkuyf8ytMj0csN1NR/5OTR+MfqPp8P8dVlcvY4=
|
||||
github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8=
|
||||
github.com/google/go-querystring v1.1.0/go.mod h1:Kcdr2DB4koayq7X8pmAG4sNG59So17icRSOU623lUBU=
|
||||
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||
github.com/google/s2a-go v0.1.9 h1:LGD7gtMgezd8a/Xak7mEWL0PjoTQFvpRudN895yqKW0=
|
||||
github.com/google/s2a-go v0.1.9/go.mod h1:YA0Ei2ZQL3acow2O62kdp9UlnvMmU7kA6Eutn0dXayM=
|
||||
@@ -137,6 +139,12 @@ github.com/googleapis/enterprise-certificate-proxy v0.3.6 h1:GW/XbdyBFQ8Qe+YAmFU
|
||||
github.com/googleapis/enterprise-certificate-proxy v0.3.6/go.mod h1:MkHOF77EYAE7qfSuSS9PU6g4Nt4e11cnsDUowfwewLA=
|
||||
github.com/googleapis/gax-go/v2 v2.14.2 h1:eBLnkZ9635krYIPD+ag1USrOAI0Nr0QYF3+/3GqO0k0=
|
||||
github.com/googleapis/gax-go/v2 v2.14.2/go.mod h1:ON64QhlJkhVtSqp4v1uaK92VyZ2gmvDQsweuyLV+8+w=
|
||||
github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aNNg=
|
||||
github.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
|
||||
github.com/hasura/go-graphql-client v0.14.4 h1:bYU7/+V50T2YBGdNQXt6l4f2cMZPECPUd8cyCR+ixtw=
|
||||
github.com/hasura/go-graphql-client v0.14.4/go.mod h1:jfSZtBER3or+88Q9vFhWHiFMPppfYILRyl+0zsgPIIw=
|
||||
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
|
||||
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
|
||||
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A=
|
||||
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i6rXxKeerYnT8Nvf0QmHCRC1n8sfWVwXF2Frvo=
|
||||
github.com/jessevdk/go-flags v1.6.1 h1:Cvu5U8UGrLay1rZfv/zP7iLpSHGUZ/Ou68T0iX1bBK4=
|
||||
@@ -145,6 +153,8 @@ github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0=
|
||||
github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4=
|
||||
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
|
||||
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
|
||||
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 h1:Z9n2FFNUXsshfwJMBgNA0RU6/i7WVaAegv3PtuIHPMs=
|
||||
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51/go.mod h1:CzGEWj7cYgsdH8dAjBGEr58BoE7ScuLd+fwFZ44+/x8=
|
||||
github.com/kevinburke/ssh_config v1.2.0 h1:x584FjTGwHzMwvHx18PXxbBVzfnxogHaAReU4gf13a4=
|
||||
github.com/kevinburke/ssh_config v1.2.0/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM=
|
||||
github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
|
||||
@@ -163,6 +173,8 @@ github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjS
|
||||
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
||||
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||
github.com/mattn/go-runewidth v0.0.10/go.mod h1:RAqKPSqVFrSLVXbA8x7dzmKdmGzieGRCM46jaSJTDAk=
|
||||
github.com/mattn/go-sqlite3 v1.14.28 h1:ThEiQrnbtumT+QMknw63Befp/ce/nUPgBPMlRFEum7A=
|
||||
github.com/mattn/go-sqlite3 v1.14.28/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
|
||||
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||
@@ -180,8 +192,8 @@ github.com/otiai10/mint v1.6.3 h1:87qsV/aw1F5as1eH1zS/yqHY85ANKVMgkDrf9rcxbQs=
|
||||
github.com/otiai10/mint v1.6.3/go.mod h1:MJm72SBthJjz8qhefc4z1PYEieWmy8Bku7CjcAqyUSM=
|
||||
github.com/pelletier/go-toml/v2 v2.2.4 h1:mye9XuhQ6gvn5h28+VilKrrPoQVanw5PMw/TB0t5Ec4=
|
||||
github.com/pelletier/go-toml/v2 v2.2.4/go.mod h1:2gIqNv+qfxSVS7cM2xJQKtLSTLUE9V8t9Stt+h56mCY=
|
||||
github.com/pjbgf/sha1cd v0.3.2 h1:a9wb0bp1oC2TGwStyn0Umc/IGKQnEgF0vVaZ8QF8eo4=
|
||||
github.com/pjbgf/sha1cd v0.3.2/go.mod h1:zQWigSxVmsHEZow5qaLtPYxpcKMMQpa09ixqBxuCS6A=
|
||||
github.com/pjbgf/sha1cd v0.4.0 h1:NXzbL1RvjTUi6kgYZCX3fPwwl27Q1LJndxtUDVfJGRY=
|
||||
github.com/pjbgf/sha1cd v0.4.0/go.mod h1:zQWigSxVmsHEZow5qaLtPYxpcKMMQpa09ixqBxuCS6A=
|
||||
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
||||
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
@@ -189,6 +201,7 @@ github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZN
|
||||
github.com/rivo/uniseg v0.1.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
|
||||
github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ=
|
||||
github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc=
|
||||
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
github.com/samber/lo v1.50.0 h1:XrG0xOeHs+4FQ8gJR97zDz5uOFMW7OwFWiFVzqopKgY=
|
||||
github.com/samber/lo v1.50.0/go.mod h1:RjZyNk6WSnUFRKK6EyOhsRJMqft3G+pg7dCWHQCWvsc=
|
||||
github.com/scylladb/termtables v0.0.0-20191203121021-c4c0b6d42ff4/go.mod h1:C1a7PQSMz9NShzorzCiG2fk9+xuCgLkPeCvMHYR2OWg=
|
||||
@@ -199,6 +212,10 @@ github.com/sgaunet/perplexity-go/v2 v2.8.0/go.mod h1:MSks4RNuivCi0GqJyylhFdgSJFV
|
||||
github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0=
|
||||
github.com/skeema/knownhosts v1.3.1 h1:X2osQ+RAjK76shCbvhHHHVl3ZlgDm8apHEHFqRjnBY8=
|
||||
github.com/skeema/knownhosts v1.3.1/go.mod h1:r7KTdC8l4uxWRyK2TpQZ/1o5HaSzh06ePQNxPwTcfiY=
|
||||
github.com/spf13/cobra v1.9.1 h1:CXSaggrXdbHK9CF+8ywj8Amf7PBRmPCOJugH954Nnlo=
|
||||
github.com/spf13/cobra v1.9.1/go.mod h1:nDyEzZ8ogv936Cinf6g1RU9MRY64Ir93oCnqb9wxYW0=
|
||||
github.com/spf13/pflag v1.0.6 h1:jFzHGLGAlb3ruxLB8MhbI6A8+AQX/2eW4qeyNZXNp2o=
|
||||
github.com/spf13/pflag v1.0.6/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
|
||||
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
|
||||
@@ -230,8 +247,6 @@ github.com/xanzy/ssh-agent v0.3.3/go.mod h1:6dzNDKs0J9rVPHPhaGCukekBHKqfl+L3KghI
|
||||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||
go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA=
|
||||
go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A=
|
||||
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.61.0 h1:q4XOmH/0opmeuJtPsbFNivyl7bCt7yRBbeEm2sC/XtQ=
|
||||
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.61.0/go.mod h1:snMWehoOh2wsEwnvvwtDyFCxVeDAODenXHtn5vzrKjo=
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0 h1:F7Jx+6hwnZ41NSFTO5q4LYDtJRXBf2PD0rNBkeB/lus=
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0/go.mod h1:UHB22Z8QsdRDrnAtX4PntOl36ajSxcdUMt1sF7Y6E7Q=
|
||||
go.opentelemetry.io/otel v1.36.0 h1:UumtzIklRBY6cI/lllNZlALOF5nNIzJVb16APdvgTXg=
|
||||
@@ -255,8 +270,8 @@ golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v
|
||||
golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk=
|
||||
golang.org/x/crypto v0.39.0 h1:SHs+kF4LP+f+p14esP5jAoDpHU8Gu/v9lFRK6IT5imM=
|
||||
golang.org/x/crypto v0.39.0/go.mod h1:L+Xg3Wf6HoL4Bn4238Z6ft6KfEpN0tJGo53AAPC632U=
|
||||
golang.org/x/exp v0.0.0-20250218142911-aa4b98e5adaa h1:t2QcU6V556bFjYgu4L6C+6VrCPyJZ+eyRsABUPs1mz4=
|
||||
golang.org/x/exp v0.0.0-20250218142911-aa4b98e5adaa/go.mod h1:BHOTPb3L19zxehTsLoJXVaTktb06DFgmdW6Wb9s8jqk=
|
||||
golang.org/x/exp v0.0.0-20250531010427-b6e5de432a8b h1:QoALfVG9rhQ/M7vYDScfPdWjGL9dlsVVM5VGh7aKoAA=
|
||||
golang.org/x/exp v0.0.0-20250531010427-b6e5de432a8b/go.mod h1:U6Lno4MTRCDY+Ba7aCcauB9T60gsv5s4ralQzP72ZoQ=
|
||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||
golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||
@@ -283,8 +298,8 @@ golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
|
||||
golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sync v0.15.0 h1:KWH3jNZsfyT6xfAfKiz6MRNmd46ByHDYaZ7KSkCtdW8=
|
||||
golang.org/x/sync v0.15.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
|
||||
golang.org/x/sync v0.16.0 h1:ycBJEhp9p4vXvUZNszeOq0kGTPghopOL8q0fq3vstxw=
|
||||
golang.org/x/sync v0.16.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
@@ -301,8 +316,8 @@ golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw=
|
||||
golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
|
||||
golang.org/x/sys v0.34.0 h1:H5Y5sJ2L2JRdyv7ROF1he/lPdvFsd0mJHFw2ThKHxLA=
|
||||
golang.org/x/sys v0.34.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
|
||||
golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
@@ -324,10 +339,8 @@ golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
|
||||
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||
golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
|
||||
golang.org/x/text v0.26.0 h1:P42AVeLghgTYr4+xUnTRKDMqpar+PtX7KWuNQL21L8M=
|
||||
golang.org/x/text v0.26.0/go.mod h1:QK15LZJUUQVJxhz7wXgxSy/CJaTFjd0G+YLonydOVQA=
|
||||
golang.org/x/time v0.12.0 h1:ScB/8o8olJvc+CQPWrK3fPZNfh7qgwCrY0zJmoEQLSE=
|
||||
golang.org/x/time v0.12.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg=
|
||||
golang.org/x/text v0.27.0 h1:4fGWRpyh641NLlecmyl4LOe6yDdfaYNrGb2zdfo4JV4=
|
||||
golang.org/x/text v0.27.0/go.mod h1:1D28KMCvyooCX9hBiosv5Tz/+YLxj0j7XhWjpSUF7CU=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||
@@ -335,8 +348,11 @@ golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
|
||||
golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58=
|
||||
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
google.golang.org/api v0.236.0 h1:CAiEiDVtO4D/Qja2IA9VzlFrgPnK3XVMmRoJZlSWbc0=
|
||||
google.golang.org/api v0.236.0/go.mod h1:X1WF9CU2oTc+Jml1tiIxGmWFK/UZezdqEu09gcxZAj4=
|
||||
google.golang.org/genai v1.17.0 h1:lXYSnWShPYjxTouxRj0zF8RsNmSF+SKo7SQ7dM35NlI=
|
||||
google.golang.org/genai v1.17.0/go.mod h1:QPj5NGJw+3wEOHg+PrsWwJKvG6UC84ex5FR7qAYsN/M=
|
||||
google.golang.org/genproto v0.0.0-20250505200425-f936aa4a68b2 h1:1tXaIXCracvtsRxSBsYDiSBN0cuJvM7QYW+MrpIRY78=
|
||||
google.golang.org/genproto v0.0.0-20250505200425-f936aa4a68b2/go.mod h1:49MsLSx0oWMOZqcpB3uL8ZOkAh1+TndpJ8ONoCBWiZk=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20250603155806-513f23925822 h1:oWVWY3NzT7KJppx2UKhKmzPq4SRe0LdCijVRwvGeikY=
|
||||
|
||||
185
internal/cli/chat.go
Normal file
185
internal/cli/chat.go
Normal file
@@ -0,0 +1,185 @@
|
||||
package cli
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/danielmiessler/fabric/internal/core"
|
||||
"github.com/danielmiessler/fabric/internal/domain"
|
||||
"github.com/danielmiessler/fabric/internal/plugins/db/fsdb"
|
||||
"github.com/danielmiessler/fabric/internal/tools/notifications"
|
||||
)
|
||||
|
||||
// handleChatProcessing handles the main chat processing logic
|
||||
func handleChatProcessing(currentFlags *Flags, registry *core.PluginRegistry, messageTools string) (err error) {
|
||||
if messageTools != "" {
|
||||
currentFlags.AppendMessage(messageTools)
|
||||
}
|
||||
|
||||
var chatter *core.Chatter
|
||||
if chatter, err = registry.GetChatter(currentFlags.Model, currentFlags.ModelContextLength,
|
||||
currentFlags.Vendor, currentFlags.Strategy, currentFlags.Stream, currentFlags.DryRun); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
var session *fsdb.Session
|
||||
var chatReq *domain.ChatRequest
|
||||
if chatReq, err = currentFlags.BuildChatRequest(strings.Join(os.Args[1:], " ")); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
if chatReq.Language == "" {
|
||||
chatReq.Language = registry.Language.DefaultLanguage.Value
|
||||
}
|
||||
var chatOptions *domain.ChatOptions
|
||||
if chatOptions, err = currentFlags.BuildChatOptions(); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
// Check if user is requesting audio output or using a TTS model
|
||||
isAudioOutput := currentFlags.Output != "" && IsAudioFormat(currentFlags.Output)
|
||||
isTTSModel := isTTSModel(currentFlags.Model)
|
||||
|
||||
if isTTSModel && !isAudioOutput {
|
||||
err = fmt.Errorf("TTS model '%s' requires audio output. Please specify an audio output file with -o flag (e.g., -o output.wav)", currentFlags.Model)
|
||||
return
|
||||
}
|
||||
|
||||
if isAudioOutput && !isTTSModel {
|
||||
err = fmt.Errorf("audio output file '%s' specified but model '%s' is not a TTS model. Please use a TTS model like gemini-2.5-flash-preview-tts", currentFlags.Output, currentFlags.Model)
|
||||
return
|
||||
}
|
||||
|
||||
// For TTS models, check if output file already exists BEFORE processing
|
||||
if isTTSModel && isAudioOutput {
|
||||
outputFile := currentFlags.Output
|
||||
// Add .wav extension if not provided
|
||||
if filepath.Ext(outputFile) == "" {
|
||||
outputFile += ".wav"
|
||||
}
|
||||
if _, err = os.Stat(outputFile); err == nil {
|
||||
err = fmt.Errorf("file %s already exists. Please choose a different filename or remove the existing file", outputFile)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// Set audio options in chat config
|
||||
chatOptions.AudioOutput = isAudioOutput
|
||||
if isAudioOutput {
|
||||
chatOptions.AudioFormat = "wav" // Default to WAV format
|
||||
}
|
||||
|
||||
if session, err = chatter.Send(chatReq, chatOptions); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
result := session.GetLastMessage().Content
|
||||
|
||||
if !currentFlags.Stream || currentFlags.SuppressThink {
|
||||
// For TTS models with audio output, show a user-friendly message instead of raw data
|
||||
if isTTSModel && isAudioOutput && strings.HasPrefix(result, "FABRIC_AUDIO_DATA:") {
|
||||
fmt.Printf("TTS audio generated successfully and saved to: %s\n", currentFlags.Output)
|
||||
} else {
|
||||
// print the result if it was not streamed already or suppress-think disabled streaming output
|
||||
fmt.Println(result)
|
||||
}
|
||||
}
|
||||
|
||||
// if the copy flag is set, copy the message to the clipboard
|
||||
if currentFlags.Copy {
|
||||
if err = CopyToClipboard(result); err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// if the output flag is set, create an output file
|
||||
if currentFlags.Output != "" {
|
||||
if currentFlags.OutputSession {
|
||||
sessionAsString := session.String()
|
||||
err = CreateOutputFile(sessionAsString, currentFlags.Output)
|
||||
} else {
|
||||
// For TTS models, we need to handle audio output differently
|
||||
if isTTSModel && isAudioOutput {
|
||||
// Check if result contains actual audio data
|
||||
if strings.HasPrefix(result, "FABRIC_AUDIO_DATA:") {
|
||||
// Extract the binary audio data
|
||||
audioData := result[len("FABRIC_AUDIO_DATA:"):]
|
||||
err = CreateAudioOutputFile([]byte(audioData), currentFlags.Output)
|
||||
} else {
|
||||
// Fallback for any error messages or unexpected responses
|
||||
err = CreateOutputFile(result, currentFlags.Output)
|
||||
}
|
||||
} else {
|
||||
err = CreateOutputFile(result, currentFlags.Output)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Send notification if requested
|
||||
if chatOptions.Notification {
|
||||
if err = sendNotification(chatOptions, chatReq.PatternName, result); err != nil {
|
||||
// Log notification error but don't fail the main command
|
||||
fmt.Fprintf(os.Stderr, "Failed to send notification: %v\n", err)
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// sendNotification sends a desktop notification about command completion.
|
||||
//
|
||||
// When truncating the result for notification display, this function counts Unicode code points,
|
||||
// not grapheme clusters. As a result, complex emoji or accented characters with multiple combining
|
||||
// characters may be truncated improperly. This is a limitation of the current implementation.
|
||||
func sendNotification(options *domain.ChatOptions, patternName, result string) error {
|
||||
title := "Fabric Command Complete"
|
||||
if patternName != "" {
|
||||
title = fmt.Sprintf("Fabric: %s Complete", patternName)
|
||||
}
|
||||
|
||||
// Limit message length for notification display (counts Unicode code points)
|
||||
message := "Command completed successfully"
|
||||
if result != "" {
|
||||
maxLength := 100
|
||||
runes := []rune(result)
|
||||
if len(runes) > maxLength {
|
||||
message = fmt.Sprintf("Output: %s...", string(runes[:maxLength]))
|
||||
} else {
|
||||
message = fmt.Sprintf("Output: %s", result)
|
||||
}
|
||||
// Clean up newlines for notification display
|
||||
message = strings.ReplaceAll(message, "\n", " ")
|
||||
}
|
||||
|
||||
// Use custom notification command if provided
|
||||
if options.NotificationCommand != "" {
|
||||
// SECURITY: Pass title and message as proper shell positional arguments $1 and $2
|
||||
// This matches the documented interface where custom commands receive title and message as shell variables
|
||||
cmd := exec.Command("sh", "-c", options.NotificationCommand+" \"$1\" \"$2\"", "--", title, message)
|
||||
|
||||
// For debugging: capture and display output from custom commands
|
||||
cmd.Stdout = os.Stdout
|
||||
cmd.Stderr = os.Stderr
|
||||
|
||||
return cmd.Run()
|
||||
}
|
||||
|
||||
// Use built-in notification system
|
||||
notificationManager := notifications.NewNotificationManager()
|
||||
if !notificationManager.IsAvailable() {
|
||||
return fmt.Errorf("no notification system available")
|
||||
}
|
||||
|
||||
return notificationManager.Send(title, message)
|
||||
}
|
||||
|
||||
// isTTSModel checks if the model is a text-to-speech model
|
||||
func isTTSModel(modelName string) bool {
|
||||
lowerModel := strings.ToLower(modelName)
|
||||
return strings.Contains(lowerModel, "tts") ||
|
||||
strings.Contains(lowerModel, "preview-tts") ||
|
||||
strings.Contains(lowerModel, "text-to-speech")
|
||||
}
|
||||
166
internal/cli/chat_test.go
Normal file
166
internal/cli/chat_test.go
Normal file
@@ -0,0 +1,166 @@
|
||||
package cli
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/danielmiessler/fabric/internal/domain"
|
||||
)
|
||||
|
||||
func TestSendNotification_SecurityEscaping(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
title string
|
||||
message string
|
||||
command string
|
||||
expectError bool
|
||||
description string
|
||||
}{
|
||||
{
|
||||
name: "Normal content",
|
||||
title: "Test Title",
|
||||
message: "Test message content",
|
||||
command: `echo "Title: $1, Message: $2"`,
|
||||
expectError: false,
|
||||
description: "Normal content should work fine",
|
||||
},
|
||||
{
|
||||
name: "Content with backticks",
|
||||
title: "Test Title",
|
||||
message: "Test `whoami` injection",
|
||||
command: `echo "Title: $1, Message: $2"`,
|
||||
expectError: false,
|
||||
description: "Backticks should be escaped and not executed",
|
||||
},
|
||||
{
|
||||
name: "Content with semicolon injection",
|
||||
title: "Test Title",
|
||||
message: "Test; echo INJECTED; echo end",
|
||||
command: `echo "Title: $1, Message: $2"`,
|
||||
expectError: false,
|
||||
description: "Semicolon injection should be prevented",
|
||||
},
|
||||
{
|
||||
name: "Content with command substitution",
|
||||
title: "Test Title",
|
||||
message: "Test $(whoami) injection",
|
||||
command: `echo "Title: $1, Message: $2"`,
|
||||
expectError: false,
|
||||
description: "Command substitution should be escaped",
|
||||
},
|
||||
{
|
||||
name: "Content with quote injection",
|
||||
title: "Test Title",
|
||||
message: "Test ' || echo INJECTED || echo ' end",
|
||||
command: `echo "Title: $1, Message: $2"`,
|
||||
expectError: false,
|
||||
description: "Quote injection should be prevented",
|
||||
},
|
||||
{
|
||||
name: "Content with newlines",
|
||||
title: "Test Title",
|
||||
message: "Line 1\nLine 2\nLine 3",
|
||||
command: `echo "Title: $1, Message: $2"`,
|
||||
expectError: false,
|
||||
description: "Newlines should be handled safely",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
options := &domain.ChatOptions{
|
||||
NotificationCommand: tt.command,
|
||||
Notification: true,
|
||||
}
|
||||
|
||||
// This test mainly verifies that the function doesn't panic
|
||||
// and properly escapes dangerous content. The actual command
|
||||
// execution is tested separately in integration tests.
|
||||
err := sendNotification(options, "test_pattern", tt.message)
|
||||
|
||||
if tt.expectError && err == nil {
|
||||
t.Errorf("Expected error for %s, but got none", tt.description)
|
||||
}
|
||||
|
||||
if !tt.expectError && err != nil {
|
||||
t.Errorf("Unexpected error for %s: %v", tt.description, err)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestSendNotification_TitleGeneration(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
patternName string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "No pattern name",
|
||||
patternName: "",
|
||||
expected: "Fabric Command Complete",
|
||||
},
|
||||
{
|
||||
name: "With pattern name",
|
||||
patternName: "summarize",
|
||||
expected: "Fabric: summarize Complete",
|
||||
},
|
||||
{
|
||||
name: "Pattern with special characters",
|
||||
patternName: "test_pattern-v2",
|
||||
expected: "Fabric: test_pattern-v2 Complete",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
options := &domain.ChatOptions{
|
||||
NotificationCommand: `echo "Title: $1"`,
|
||||
Notification: true,
|
||||
}
|
||||
|
||||
// We're testing the title generation logic
|
||||
// The actual notification command would echo the title
|
||||
err := sendNotification(options, tt.patternName, "test message")
|
||||
|
||||
// The function should not error for valid inputs
|
||||
if err != nil {
|
||||
t.Errorf("Unexpected error: %v", err)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestSendNotification_MessageTruncation(t *testing.T) {
|
||||
longMessage := strings.Repeat("A", 150) // 150 characters
|
||||
shortMessage := "Short message"
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
message string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "Short message",
|
||||
message: shortMessage,
|
||||
},
|
||||
{
|
||||
name: "Long message truncation",
|
||||
message: longMessage,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
options := &domain.ChatOptions{
|
||||
NotificationCommand: `echo "Message: $2"`,
|
||||
Notification: true,
|
||||
}
|
||||
|
||||
err := sendNotification(options, "test", tt.message)
|
||||
if err != nil {
|
||||
t.Errorf("Unexpected error: %v", err)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -4,18 +4,12 @@ import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/danielmiessler/fabric/internal/tools/youtube"
|
||||
|
||||
"github.com/danielmiessler/fabric/internal/core"
|
||||
"github.com/danielmiessler/fabric/internal/domain"
|
||||
"github.com/danielmiessler/fabric/internal/plugins/ai"
|
||||
"github.com/danielmiessler/fabric/internal/plugins/db/fsdb"
|
||||
restapi "github.com/danielmiessler/fabric/internal/server"
|
||||
"github.com/danielmiessler/fabric/internal/plugins/ai/openai"
|
||||
"github.com/danielmiessler/fabric/internal/tools/converter"
|
||||
"github.com/danielmiessler/fabric/internal/tools/youtube"
|
||||
)
|
||||
|
||||
// Cli Controls the cli. It takes in the flags and runs the appropriate functions
|
||||
@@ -25,282 +19,83 @@ func Cli(version string) (err error) {
|
||||
return
|
||||
}
|
||||
|
||||
if currentFlags.Setup {
|
||||
if err = ensureEnvFile(); err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
if currentFlags.Version {
|
||||
fmt.Println(version)
|
||||
return
|
||||
}
|
||||
|
||||
var homedir string
|
||||
if homedir, err = os.UserHomeDir(); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
fabricDb := fsdb.NewDb(filepath.Join(homedir, ".config/fabric"))
|
||||
|
||||
if err = fabricDb.Configure(); err != nil {
|
||||
// Initialize database and registry
|
||||
var registry, err2 = initializeFabric()
|
||||
if err2 != nil {
|
||||
if !currentFlags.Setup {
|
||||
println(err.Error())
|
||||
fmt.Fprintln(os.Stderr, err2.Error())
|
||||
currentFlags.Setup = true
|
||||
}
|
||||
}
|
||||
|
||||
var registry *core.PluginRegistry
|
||||
if registry, err = core.NewPluginRegistry(fabricDb); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
// if the setup flag is set, run the setup function
|
||||
if currentFlags.Setup {
|
||||
err = registry.Setup()
|
||||
return
|
||||
}
|
||||
|
||||
if currentFlags.Serve {
|
||||
registry.ConfigureVendors()
|
||||
err = restapi.Serve(registry, currentFlags.ServeAddress, currentFlags.ServeAPIKey)
|
||||
return
|
||||
}
|
||||
|
||||
if currentFlags.ServeOllama {
|
||||
registry.ConfigureVendors()
|
||||
err = restapi.ServeOllama(registry, currentFlags.ServeAddress, version)
|
||||
return
|
||||
}
|
||||
|
||||
if currentFlags.UpdatePatterns {
|
||||
err = registry.PatternsLoader.PopulateDB()
|
||||
return
|
||||
}
|
||||
|
||||
if currentFlags.ChangeDefaultModel {
|
||||
if err = registry.Defaults.Setup(); err != nil {
|
||||
return
|
||||
// Return early if registry is nil to prevent panics in subsequent handlers
|
||||
if registry == nil {
|
||||
return err2
|
||||
}
|
||||
err = registry.SaveEnvFile()
|
||||
}
|
||||
|
||||
// Configure OpenAI Responses API setting based on CLI flag
|
||||
if registry != nil {
|
||||
configureOpenAIResponsesAPI(registry, currentFlags.DisableResponsesAPI)
|
||||
}
|
||||
|
||||
// Handle setup and server commands
|
||||
var handled bool
|
||||
if handled, err = handleSetupAndServerCommands(currentFlags, registry, version); err != nil || handled {
|
||||
return
|
||||
}
|
||||
|
||||
if currentFlags.LatestPatterns != "0" {
|
||||
var parsedToInt int
|
||||
if parsedToInt, err = strconv.Atoi(currentFlags.LatestPatterns); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
if err = fabricDb.Patterns.PrintLatestPatterns(parsedToInt); err != nil {
|
||||
return
|
||||
}
|
||||
// Handle configuration commands
|
||||
if handled, err = handleConfigurationCommands(currentFlags, registry); err != nil || handled {
|
||||
return
|
||||
}
|
||||
|
||||
if currentFlags.ListPatterns {
|
||||
err = fabricDb.Patterns.ListNames(currentFlags.ShellCompleteOutput)
|
||||
// Handle listing commands
|
||||
if handled, err = handleListingCommands(currentFlags, registry.Db, registry); err != nil || handled {
|
||||
return
|
||||
}
|
||||
|
||||
if currentFlags.ListAllModels {
|
||||
var models *ai.VendorsModels
|
||||
if models, err = registry.VendorManager.GetModels(); err != nil {
|
||||
return
|
||||
}
|
||||
models.Print(currentFlags.ShellCompleteOutput)
|
||||
// Handle management commands
|
||||
if handled, err = handleManagementCommands(currentFlags, registry.Db); err != nil || handled {
|
||||
return
|
||||
}
|
||||
|
||||
if currentFlags.ListAllContexts {
|
||||
err = fabricDb.Contexts.ListNames(currentFlags.ShellCompleteOutput)
|
||||
return
|
||||
}
|
||||
|
||||
if currentFlags.ListAllSessions {
|
||||
err = fabricDb.Sessions.ListNames(currentFlags.ShellCompleteOutput)
|
||||
return
|
||||
}
|
||||
|
||||
if currentFlags.WipeContext != "" {
|
||||
err = fabricDb.Contexts.Delete(currentFlags.WipeContext)
|
||||
return
|
||||
}
|
||||
|
||||
if currentFlags.WipeSession != "" {
|
||||
err = fabricDb.Sessions.Delete(currentFlags.WipeSession)
|
||||
return
|
||||
}
|
||||
|
||||
if currentFlags.PrintSession != "" {
|
||||
err = fabricDb.Sessions.PrintSession(currentFlags.PrintSession)
|
||||
return
|
||||
}
|
||||
|
||||
if currentFlags.PrintContext != "" {
|
||||
err = fabricDb.Contexts.PrintContext(currentFlags.PrintContext)
|
||||
// Handle extension commands
|
||||
if handled, err = handleExtensionCommands(currentFlags, registry); err != nil || handled {
|
||||
return
|
||||
}
|
||||
|
||||
// Process HTML readability if needed
|
||||
if currentFlags.HtmlReadability {
|
||||
if msg, cleanErr := converter.HtmlReadability(currentFlags.Message); cleanErr != nil {
|
||||
fmt.Println("use original input, because can't apply html readability", err)
|
||||
fmt.Println("use original input, because can't apply html readability", cleanErr)
|
||||
} else {
|
||||
currentFlags.Message = msg
|
||||
}
|
||||
}
|
||||
|
||||
if currentFlags.ListExtensions {
|
||||
err = registry.TemplateExtensions.ListExtensions()
|
||||
return
|
||||
}
|
||||
|
||||
if currentFlags.AddExtension != "" {
|
||||
err = registry.TemplateExtensions.RegisterExtension(currentFlags.AddExtension)
|
||||
return
|
||||
}
|
||||
|
||||
if currentFlags.RemoveExtension != "" {
|
||||
err = registry.TemplateExtensions.RemoveExtension(currentFlags.RemoveExtension)
|
||||
return
|
||||
}
|
||||
|
||||
if currentFlags.ListStrategies {
|
||||
err = registry.Strategies.ListStrategies(currentFlags.ShellCompleteOutput)
|
||||
return
|
||||
}
|
||||
|
||||
if currentFlags.ListVendors {
|
||||
err = registry.ListVendors(os.Stdout)
|
||||
return
|
||||
}
|
||||
|
||||
// if the interactive flag is set, run the interactive function
|
||||
// if currentFlags.Interactive {
|
||||
// interactive.Interactive()
|
||||
// }
|
||||
|
||||
// if none of the above currentFlags are set, run the initiate chat function
|
||||
|
||||
// Handle tool-based message processing
|
||||
var messageTools string
|
||||
|
||||
if currentFlags.YouTube != "" {
|
||||
if !registry.YouTube.IsConfigured() {
|
||||
err = fmt.Errorf("YouTube is not configured, please run the setup procedure")
|
||||
return
|
||||
}
|
||||
|
||||
var videoId string
|
||||
var playlistId string
|
||||
if videoId, playlistId, err = registry.YouTube.GetVideoOrPlaylistId(currentFlags.YouTube); err != nil {
|
||||
return
|
||||
} else if (videoId == "" || currentFlags.YouTubePlaylist) && playlistId != "" {
|
||||
if currentFlags.Output != "" {
|
||||
err = registry.YouTube.FetchAndSavePlaylist(playlistId, currentFlags.Output)
|
||||
} else {
|
||||
var videos []*youtube.VideoMeta
|
||||
if videos, err = registry.YouTube.FetchPlaylistVideos(playlistId); err != nil {
|
||||
err = fmt.Errorf("error fetching playlist videos: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
for _, video := range videos {
|
||||
var message string
|
||||
if message, err = processYoutubeVideo(currentFlags, registry, video.Id); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
if !currentFlags.IsChatRequest() {
|
||||
if err = WriteOutput(message, fmt.Sprintf("%v.md", video.TitleNormalized)); err != nil {
|
||||
return
|
||||
}
|
||||
} else {
|
||||
messageTools = AppendMessage(messageTools, message)
|
||||
}
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
if messageTools, err = processYoutubeVideo(currentFlags, registry, videoId); err != nil {
|
||||
return
|
||||
}
|
||||
if !currentFlags.IsChatRequest() {
|
||||
err = currentFlags.WriteOutput(messageTools)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
if (currentFlags.ScrapeURL != "" || currentFlags.ScrapeQuestion != "") && registry.Jina.IsConfigured() {
|
||||
// Check if the scrape_url flag is set and call ScrapeURL
|
||||
if currentFlags.ScrapeURL != "" {
|
||||
var website string
|
||||
if website, err = registry.Jina.ScrapeURL(currentFlags.ScrapeURL); err != nil {
|
||||
return
|
||||
}
|
||||
messageTools = AppendMessage(messageTools, website)
|
||||
}
|
||||
|
||||
// Check if the scrape_question flag is set and call ScrapeQuestion
|
||||
if currentFlags.ScrapeQuestion != "" {
|
||||
var website string
|
||||
if website, err = registry.Jina.ScrapeQuestion(currentFlags.ScrapeQuestion); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
messageTools = AppendMessage(messageTools, website)
|
||||
}
|
||||
|
||||
if !currentFlags.IsChatRequest() {
|
||||
err = currentFlags.WriteOutput(messageTools)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
if messageTools != "" {
|
||||
currentFlags.AppendMessage(messageTools)
|
||||
}
|
||||
|
||||
var chatter *core.Chatter
|
||||
if chatter, err = registry.GetChatter(currentFlags.Model, currentFlags.ModelContextLength,
|
||||
currentFlags.Strategy, currentFlags.Stream, currentFlags.DryRun); err != nil {
|
||||
if messageTools, err = handleToolProcessing(currentFlags, registry); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
var session *fsdb.Session
|
||||
var chatReq *domain.ChatRequest
|
||||
if chatReq, err = currentFlags.BuildChatRequest(strings.Join(os.Args[1:], " ")); err != nil {
|
||||
return
|
||||
// Return early for non-chat tool operations
|
||||
if messageTools != "" && !currentFlags.IsChatRequest() {
|
||||
return nil
|
||||
}
|
||||
|
||||
if chatReq.Language == "" {
|
||||
chatReq.Language = registry.Language.DefaultLanguage.Value
|
||||
}
|
||||
var chatOptions *domain.ChatOptions
|
||||
if chatOptions, err = currentFlags.BuildChatOptions(); err != nil {
|
||||
return
|
||||
}
|
||||
if session, err = chatter.Send(chatReq, chatOptions); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
result := session.GetLastMessage().Content
|
||||
|
||||
if !currentFlags.Stream {
|
||||
// print the result if it was not streamed already
|
||||
fmt.Println(result)
|
||||
}
|
||||
|
||||
// if the copy flag is set, copy the message to the clipboard
|
||||
if currentFlags.Copy {
|
||||
if err = CopyToClipboard(result); err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// if the output flag is set, create an output file
|
||||
if currentFlags.Output != "" {
|
||||
if currentFlags.OutputSession {
|
||||
sessionAsString := session.String()
|
||||
err = CreateOutputFile(sessionAsString, currentFlags.Output)
|
||||
} else {
|
||||
err = CreateOutputFile(result, currentFlags.Output)
|
||||
}
|
||||
}
|
||||
// Handle chat processing
|
||||
err = handleChatProcessing(currentFlags, registry, messageTools)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -318,11 +113,11 @@ func processYoutubeVideo(
|
||||
}
|
||||
}
|
||||
if flags.YouTubeTranscriptWithTimestamps {
|
||||
if transcript, err = registry.YouTube.GrabTranscriptWithTimestamps(videoId, language); err != nil {
|
||||
if transcript, err = registry.YouTube.GrabTranscriptWithTimestampsWithArgs(videoId, language, flags.YtDlpArgs); err != nil {
|
||||
return
|
||||
}
|
||||
} else {
|
||||
if transcript, err = registry.YouTube.GrabTranscript(videoId, language); err != nil {
|
||||
if transcript, err = registry.YouTube.GrabTranscriptWithArgs(videoId, language, flags.YtDlpArgs); err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
@@ -359,3 +154,21 @@ func WriteOutput(message string, outputFile string) (err error) {
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// configureOpenAIResponsesAPI configures the OpenAI client's Responses API setting based on the CLI flag
|
||||
func configureOpenAIResponsesAPI(registry *core.PluginRegistry, disableResponsesAPI bool) {
|
||||
// Find the OpenAI vendor in the registry
|
||||
if registry != nil && registry.VendorsAll != nil {
|
||||
for _, vendor := range registry.VendorsAll.Vendors {
|
||||
if vendor.GetName() == "OpenAI" {
|
||||
// Type assertion to access the OpenAI-specific method
|
||||
if openaiClient, ok := vendor.(*openai.Client); ok {
|
||||
// Invert the disable flag to get the enable flag
|
||||
enableResponsesAPI := !disableResponsesAPI
|
||||
openaiClient.SetResponsesAPIEnabled(enableResponsesAPI)
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
28
internal/cli/configuration.go
Normal file
28
internal/cli/configuration.go
Normal file
@@ -0,0 +1,28 @@
|
||||
package cli
|
||||
|
||||
import (
|
||||
"github.com/danielmiessler/fabric/internal/core"
|
||||
)
|
||||
|
||||
// handleConfigurationCommands handles configuration-related commands
|
||||
// Returns (handled, error) where handled indicates if a command was processed and should exit
|
||||
func handleConfigurationCommands(currentFlags *Flags, registry *core.PluginRegistry) (handled bool, err error) {
|
||||
if currentFlags.UpdatePatterns {
|
||||
if err = registry.PatternsLoader.PopulateDB(); err != nil {
|
||||
return true, err
|
||||
}
|
||||
// Save configuration in case any paths were migrated during pattern loading
|
||||
err = registry.SaveEnvFile()
|
||||
return true, err
|
||||
}
|
||||
|
||||
if currentFlags.ChangeDefaultModel {
|
||||
if err = registry.Defaults.Setup(); err != nil {
|
||||
return true, err
|
||||
}
|
||||
err = registry.SaveEnvFile()
|
||||
return true, err
|
||||
}
|
||||
|
||||
return false, nil
|
||||
}
|
||||
@@ -18,4 +18,13 @@ temperature: 0.88
|
||||
seed: 42
|
||||
|
||||
stream: true
|
||||
raw: false
|
||||
raw: false
|
||||
|
||||
# suppress vendor thinking output
|
||||
suppressThink: false
|
||||
thinkStartTag: "<think>"
|
||||
thinkEndTag: "</think>"
|
||||
|
||||
# OpenAI Responses API settings
|
||||
# (use this for llama-server or other OpenAI-compatible local servers)
|
||||
disableResponsesAPI: true
|
||||
|
||||
26
internal/cli/extensions.go
Normal file
26
internal/cli/extensions.go
Normal file
@@ -0,0 +1,26 @@
|
||||
package cli
|
||||
|
||||
import (
|
||||
"github.com/danielmiessler/fabric/internal/core"
|
||||
)
|
||||
|
||||
// handleExtensionCommands handles extension-related commands
|
||||
// Returns (handled, error) where handled indicates if a command was processed and should exit
|
||||
func handleExtensionCommands(currentFlags *Flags, registry *core.PluginRegistry) (handled bool, err error) {
|
||||
if currentFlags.ListExtensions {
|
||||
err = registry.TemplateExtensions.ListExtensions()
|
||||
return true, err
|
||||
}
|
||||
|
||||
if currentFlags.AddExtension != "" {
|
||||
err = registry.TemplateExtensions.RegisterExtension(currentFlags.AddExtension)
|
||||
return true, err
|
||||
}
|
||||
|
||||
if currentFlags.RemoveExtension != "" {
|
||||
err = registry.TemplateExtensions.RemoveExtension(currentFlags.RemoveExtension)
|
||||
return true, err
|
||||
}
|
||||
|
||||
return false, nil
|
||||
}
|
||||
@@ -20,6 +20,8 @@ import (
|
||||
)
|
||||
|
||||
// Flags create flags struct. the users flags go into this, this will be passed to the chat struct in cli
|
||||
// Chat parameter defaults set in the struct tags must match domain.Default* constants
|
||||
|
||||
type Flags struct {
|
||||
Pattern string `short:"p" long:"pattern" yaml:"pattern" description:"Choose a pattern from the available patterns" default:""`
|
||||
PatternVariables map[string]string `short:"v" long:"variable" description:"Values for pattern variables, e.g. -v=#role:expert -v=#points:30"`
|
||||
@@ -41,6 +43,7 @@ type Flags struct {
|
||||
Message string `hidden:"true" description:"Messages to send to chat"`
|
||||
Copy bool `short:"c" long:"copy" description:"Copy to clipboard"`
|
||||
Model string `short:"m" long:"model" yaml:"model" description:"Choose model"`
|
||||
Vendor string `short:"V" long:"vendor" yaml:"vendor" description:"Specify vendor for the selected model (e.g., -V \"LM Studio\" -m openai/gpt-oss-20b)"`
|
||||
ModelContextLength int `long:"modelContextLength" yaml:"modelContextLength" description:"Model context length (only affects ollama)"`
|
||||
Output string `short:"o" long:"output" description:"Output to file" default:""`
|
||||
OutputSession bool `long:"output-session" description:"Output the entire session (also a temporary one) to the output file"`
|
||||
@@ -52,6 +55,7 @@ type Flags struct {
|
||||
YouTubeTranscriptWithTimestamps bool `long:"transcript-with-timestamps" description:"Grab transcript from YouTube video with timestamps and send to chat"`
|
||||
YouTubeComments bool `long:"comments" description:"Grab comments from YouTube video and send to chat"`
|
||||
YouTubeMetadata bool `long:"metadata" description:"Output video metadata"`
|
||||
YtDlpArgs string `long:"yt-dlp-args" yaml:"ytDlpArgs" description:"Additional arguments to pass to yt-dlp (e.g. '--cookies-from-browser brave')"`
|
||||
Language string `short:"g" long:"language" description:"Specify the Language Code for the chat, e.g. -g=en -g=zh" default:""`
|
||||
ScrapeURL string `short:"u" long:"scrape_url" description:"Scrape website URL to markdown using Jina AI"`
|
||||
ScrapeQuestion string `short:"q" long:"scrape_question" description:"Search question using Jina AI"`
|
||||
@@ -76,13 +80,21 @@ type Flags struct {
|
||||
ListStrategies bool `long:"liststrategies" description:"List all strategies"`
|
||||
ListVendors bool `long:"listvendors" description:"List all vendors"`
|
||||
ShellCompleteOutput bool `long:"shell-complete-list" description:"Output raw list without headers/formatting (for shell completion)"`
|
||||
Search bool `long:"search" description:"Enable web search tool for supported models (Anthropic, OpenAI)"`
|
||||
Search bool `long:"search" description:"Enable web search tool for supported models (Anthropic, OpenAI, Gemini)"`
|
||||
SearchLocation string `long:"search-location" description:"Set location for web search results (e.g., 'America/Los_Angeles')"`
|
||||
ImageFile string `long:"image-file" description:"Save generated image to specified file path (e.g., 'output.png')"`
|
||||
ImageSize string `long:"image-size" description:"Image dimensions: 1024x1024, 1536x1024, 1024x1536, auto (default: auto)"`
|
||||
ImageQuality string `long:"image-quality" description:"Image quality: low, medium, high, auto (default: auto)"`
|
||||
ImageCompression int `long:"image-compression" description:"Compression level 0-100 for JPEG/WebP formats (default: not set)"`
|
||||
ImageBackground string `long:"image-background" description:"Background type: opaque, transparent (default: opaque, only for PNG/WebP)"`
|
||||
SuppressThink bool `long:"suppress-think" yaml:"suppressThink" description:"Suppress text enclosed in thinking tags"`
|
||||
ThinkStartTag string `long:"think-start-tag" yaml:"thinkStartTag" description:"Start tag for thinking sections" default:"<think>"`
|
||||
ThinkEndTag string `long:"think-end-tag" yaml:"thinkEndTag" description:"End tag for thinking sections" default:"</think>"`
|
||||
DisableResponsesAPI bool `long:"disable-responses-api" yaml:"disableResponsesAPI" description:"Disable OpenAI Responses API (default: false)"`
|
||||
Voice string `long:"voice" yaml:"voice" description:"TTS voice name for supported models (e.g., Kore, Charon, Puck)" default:"Kore"`
|
||||
ListGeminiVoices bool `long:"list-gemini-voices" description:"List all available Gemini TTS voices"`
|
||||
Notification bool `long:"notification" yaml:"notification" description:"Send desktop notification when command completes"`
|
||||
NotificationCommand string `long:"notification-command" yaml:"notificationCommand" description:"Custom command to run for notifications (overrides built-in notifications)"`
|
||||
}
|
||||
|
||||
var debug = false
|
||||
@@ -99,26 +111,34 @@ func Init() (ret *Flags, err error) {
|
||||
usedFlags := make(map[string]bool)
|
||||
yamlArgsScan := os.Args[1:]
|
||||
|
||||
// Get list of fields that have yaml tags, could be in yaml config
|
||||
yamlFields := make(map[string]bool)
|
||||
// Create mapping from flag names (both short and long) to yaml tag names
|
||||
flagToYamlTag := make(map[string]string)
|
||||
t := reflect.TypeOf(Flags{})
|
||||
for i := 0; i < t.NumField(); i++ {
|
||||
if yamlTag := t.Field(i).Tag.Get("yaml"); yamlTag != "" {
|
||||
yamlFields[yamlTag] = true
|
||||
//Debugf("Found yaml-configured field: %s\n", yamlTag)
|
||||
field := t.Field(i)
|
||||
yamlTag := field.Tag.Get("yaml")
|
||||
if yamlTag != "" {
|
||||
longTag := field.Tag.Get("long")
|
||||
shortTag := field.Tag.Get("short")
|
||||
if longTag != "" {
|
||||
flagToYamlTag[longTag] = yamlTag
|
||||
Debugf("Mapped long flag %s to yaml tag %s\n", longTag, yamlTag)
|
||||
}
|
||||
if shortTag != "" {
|
||||
flagToYamlTag[shortTag] = yamlTag
|
||||
Debugf("Mapped short flag %s to yaml tag %s\n", shortTag, yamlTag)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Scan args for that are provided by cli and might be in yaml
|
||||
for _, arg := range yamlArgsScan {
|
||||
if strings.HasPrefix(arg, "--") {
|
||||
flag := strings.TrimPrefix(arg, "--")
|
||||
if i := strings.Index(flag, "="); i > 0 {
|
||||
flag = flag[:i]
|
||||
}
|
||||
if yamlFields[flag] {
|
||||
usedFlags[flag] = true
|
||||
Debugf("CLI flag used: %s\n", flag)
|
||||
flag := extractFlag(arg)
|
||||
|
||||
if flag != "" {
|
||||
if yamlTag, exists := flagToYamlTag[flag]; exists {
|
||||
usedFlags[yamlTag] = true
|
||||
Debugf("CLI flag used: %s (yaml: %s)\n", flag, yamlTag)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -131,6 +151,16 @@ func Init() (ret *Flags, err error) {
|
||||
return
|
||||
}
|
||||
|
||||
// Check to see if a ~/.config/fabric/config.yaml config file exists (only when user didn't specify a config)
|
||||
if ret.Config == "" {
|
||||
// Default to ~/.config/fabric/config.yaml if no config specified
|
||||
if defaultConfigPath, err := util.GetDefaultConfigPath(); err == nil && defaultConfigPath != "" {
|
||||
ret.Config = defaultConfigPath
|
||||
} else if err != nil {
|
||||
Debugf("Could not determine default config path: %v\n", err)
|
||||
}
|
||||
}
|
||||
|
||||
// If config specified, load and apply YAML for unused flags
|
||||
if ret.Config != "" {
|
||||
var yamlFlags *Flags
|
||||
@@ -165,7 +195,6 @@ func Init() (ret *Flags, err error) {
|
||||
}
|
||||
}
|
||||
|
||||
// Handle stdin and messages
|
||||
// Handle stdin and messages
|
||||
info, _ := os.Stdin.Stat()
|
||||
pipedToStdin := (info.Mode() & os.ModeCharDevice) == 0
|
||||
@@ -185,6 +214,22 @@ func Init() (ret *Flags, err error) {
|
||||
return
|
||||
}
|
||||
|
||||
func extractFlag(arg string) string {
|
||||
var flag string
|
||||
if strings.HasPrefix(arg, "--") {
|
||||
flag = strings.TrimPrefix(arg, "--")
|
||||
if i := strings.Index(flag, "="); i > 0 {
|
||||
flag = flag[:i]
|
||||
}
|
||||
} else if strings.HasPrefix(arg, "-") && len(arg) > 1 {
|
||||
flag = strings.TrimPrefix(arg, "-")
|
||||
if i := strings.Index(flag, "="); i > 0 {
|
||||
flag = flag[:i]
|
||||
}
|
||||
}
|
||||
return flag
|
||||
}
|
||||
|
||||
func assignWithConversion(targetField, sourceField reflect.Value) error {
|
||||
// Handle string source values
|
||||
if sourceField.Kind() == reflect.String {
|
||||
@@ -376,22 +421,37 @@ func (o *Flags) BuildChatOptions() (ret *domain.ChatOptions, err error) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
startTag := o.ThinkStartTag
|
||||
if startTag == "" {
|
||||
startTag = "<think>"
|
||||
}
|
||||
endTag := o.ThinkEndTag
|
||||
if endTag == "" {
|
||||
endTag = "</think>"
|
||||
}
|
||||
|
||||
ret = &domain.ChatOptions{
|
||||
Model: o.Model,
|
||||
Temperature: o.Temperature,
|
||||
TopP: o.TopP,
|
||||
PresencePenalty: o.PresencePenalty,
|
||||
FrequencyPenalty: o.FrequencyPenalty,
|
||||
Raw: o.Raw,
|
||||
Seed: o.Seed,
|
||||
ModelContextLength: o.ModelContextLength,
|
||||
Search: o.Search,
|
||||
SearchLocation: o.SearchLocation,
|
||||
ImageFile: o.ImageFile,
|
||||
ImageSize: o.ImageSize,
|
||||
ImageQuality: o.ImageQuality,
|
||||
ImageCompression: o.ImageCompression,
|
||||
ImageBackground: o.ImageBackground,
|
||||
Model: o.Model,
|
||||
Temperature: o.Temperature,
|
||||
TopP: o.TopP,
|
||||
PresencePenalty: o.PresencePenalty,
|
||||
FrequencyPenalty: o.FrequencyPenalty,
|
||||
Raw: o.Raw,
|
||||
Seed: o.Seed,
|
||||
ModelContextLength: o.ModelContextLength,
|
||||
Search: o.Search,
|
||||
SearchLocation: o.SearchLocation,
|
||||
ImageFile: o.ImageFile,
|
||||
ImageSize: o.ImageSize,
|
||||
ImageQuality: o.ImageQuality,
|
||||
ImageCompression: o.ImageCompression,
|
||||
ImageBackground: o.ImageBackground,
|
||||
SuppressThink: o.SuppressThink,
|
||||
ThinkStartTag: startTag,
|
||||
ThinkEndTag: endTag,
|
||||
Voice: o.Voice,
|
||||
Notification: o.Notification || o.NotificationCommand != "",
|
||||
NotificationCommand: o.NotificationCommand,
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
@@ -64,6 +64,9 @@ func TestBuildChatOptions(t *testing.T) {
|
||||
FrequencyPenalty: 0.2,
|
||||
Raw: false,
|
||||
Seed: 1,
|
||||
SuppressThink: false,
|
||||
ThinkStartTag: "<think>",
|
||||
ThinkEndTag: "</think>",
|
||||
}
|
||||
options, err := flags.BuildChatOptions()
|
||||
assert.NoError(t, err)
|
||||
@@ -85,12 +88,29 @@ func TestBuildChatOptionsDefaultSeed(t *testing.T) {
|
||||
FrequencyPenalty: 0.2,
|
||||
Raw: false,
|
||||
Seed: 0,
|
||||
SuppressThink: false,
|
||||
ThinkStartTag: "<think>",
|
||||
ThinkEndTag: "</think>",
|
||||
}
|
||||
options, err := flags.BuildChatOptions()
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, expectedOptions, options)
|
||||
}
|
||||
|
||||
func TestBuildChatOptionsSuppressThink(t *testing.T) {
|
||||
flags := &Flags{
|
||||
SuppressThink: true,
|
||||
ThinkStartTag: "[[t]]",
|
||||
ThinkEndTag: "[[/t]]",
|
||||
}
|
||||
|
||||
options, err := flags.BuildChatOptions()
|
||||
assert.NoError(t, err)
|
||||
assert.True(t, options.SuppressThink)
|
||||
assert.Equal(t, "[[t]]", options.ThinkStartTag)
|
||||
assert.Equal(t, "[[/t]]", options.ThinkEndTag)
|
||||
}
|
||||
|
||||
func TestInitWithYAMLConfig(t *testing.T) {
|
||||
// Create a temporary YAML config file
|
||||
configContent := `
|
||||
|
||||
56
internal/cli/initialization.go
Normal file
56
internal/cli/initialization.go
Normal file
@@ -0,0 +1,56 @@
|
||||
package cli
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/danielmiessler/fabric/internal/core"
|
||||
"github.com/danielmiessler/fabric/internal/plugins/db/fsdb"
|
||||
)
|
||||
|
||||
const ConfigDirPerms os.FileMode = 0755
|
||||
const EnvFilePerms os.FileMode = 0644
|
||||
|
||||
// initializeFabric initializes the fabric database and plugin registry
|
||||
func initializeFabric() (registry *core.PluginRegistry, err error) {
|
||||
var homedir string
|
||||
if homedir, err = os.UserHomeDir(); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
fabricDb := fsdb.NewDb(filepath.Join(homedir, ".config/fabric"))
|
||||
if err = fabricDb.Configure(); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
if registry, err = core.NewPluginRegistry(fabricDb); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// ensureEnvFile checks for the default ~/.config/fabric/.env file and creates it
|
||||
// along with the parent directory if it does not exist.
|
||||
func ensureEnvFile() (err error) {
|
||||
var homedir string
|
||||
if homedir, err = os.UserHomeDir(); err != nil {
|
||||
return fmt.Errorf("could not determine user home directory: %w", err)
|
||||
}
|
||||
configDir := filepath.Join(homedir, ".config", "fabric")
|
||||
envPath := filepath.Join(configDir, ".env")
|
||||
|
||||
if _, statErr := os.Stat(envPath); statErr != nil {
|
||||
if !os.IsNotExist(statErr) {
|
||||
return fmt.Errorf("could not stat .env file: %w", statErr)
|
||||
}
|
||||
if err = os.MkdirAll(configDir, ConfigDirPerms); err != nil {
|
||||
return fmt.Errorf("could not create config directory: %w", err)
|
||||
}
|
||||
if err = os.WriteFile(envPath, []byte{}, EnvFilePerms); err != nil {
|
||||
return fmt.Errorf("could not create .env file: %w", err)
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
74
internal/cli/listing.go
Normal file
74
internal/cli/listing.go
Normal file
@@ -0,0 +1,74 @@
|
||||
package cli
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"strconv"
|
||||
|
||||
"github.com/danielmiessler/fabric/internal/core"
|
||||
"github.com/danielmiessler/fabric/internal/plugins/ai"
|
||||
"github.com/danielmiessler/fabric/internal/plugins/ai/gemini"
|
||||
"github.com/danielmiessler/fabric/internal/plugins/db/fsdb"
|
||||
)
|
||||
|
||||
// handleListingCommands handles listing-related commands
|
||||
// Returns (handled, error) where handled indicates if a command was processed and should exit
|
||||
func handleListingCommands(currentFlags *Flags, fabricDb *fsdb.Db, registry *core.PluginRegistry) (handled bool, err error) {
|
||||
if currentFlags.LatestPatterns != "0" {
|
||||
var parsedToInt int
|
||||
if parsedToInt, err = strconv.Atoi(currentFlags.LatestPatterns); err != nil {
|
||||
return true, err
|
||||
}
|
||||
|
||||
if err = fabricDb.Patterns.PrintLatestPatterns(parsedToInt); err != nil {
|
||||
return true, err
|
||||
}
|
||||
return true, nil
|
||||
}
|
||||
|
||||
if currentFlags.ListPatterns {
|
||||
err = fabricDb.Patterns.ListNames(currentFlags.ShellCompleteOutput)
|
||||
return true, err
|
||||
}
|
||||
|
||||
if currentFlags.ListAllModels {
|
||||
var models *ai.VendorsModels
|
||||
if models, err = registry.VendorManager.GetModels(); err != nil {
|
||||
return true, err
|
||||
}
|
||||
if currentFlags.ShellCompleteOutput {
|
||||
models.Print(true)
|
||||
} else {
|
||||
models.PrintWithVendor(false)
|
||||
}
|
||||
return true, nil
|
||||
}
|
||||
|
||||
if currentFlags.ListAllContexts {
|
||||
err = fabricDb.Contexts.ListNames(currentFlags.ShellCompleteOutput)
|
||||
return true, err
|
||||
}
|
||||
|
||||
if currentFlags.ListAllSessions {
|
||||
err = fabricDb.Sessions.ListNames(currentFlags.ShellCompleteOutput)
|
||||
return true, err
|
||||
}
|
||||
|
||||
if currentFlags.ListStrategies {
|
||||
err = registry.Strategies.ListStrategies(currentFlags.ShellCompleteOutput)
|
||||
return true, err
|
||||
}
|
||||
|
||||
if currentFlags.ListVendors {
|
||||
err = registry.ListVendors(os.Stdout)
|
||||
return true, err
|
||||
}
|
||||
|
||||
if currentFlags.ListGeminiVoices {
|
||||
voicesList := gemini.ListGeminiVoices(currentFlags.ShellCompleteOutput)
|
||||
fmt.Print(voicesList)
|
||||
return true, nil
|
||||
}
|
||||
|
||||
return false, nil
|
||||
}
|
||||
31
internal/cli/management.go
Normal file
31
internal/cli/management.go
Normal file
@@ -0,0 +1,31 @@
|
||||
package cli
|
||||
|
||||
import (
|
||||
"github.com/danielmiessler/fabric/internal/plugins/db/fsdb"
|
||||
)
|
||||
|
||||
// handleManagementCommands handles management-related commands (delete, print, etc.)
|
||||
// Returns (handled, error) where handled indicates if a command was processed and should exit
|
||||
func handleManagementCommands(currentFlags *Flags, fabricDb *fsdb.Db) (handled bool, err error) {
|
||||
if currentFlags.WipeContext != "" {
|
||||
err = fabricDb.Contexts.Delete(currentFlags.WipeContext)
|
||||
return true, err
|
||||
}
|
||||
|
||||
if currentFlags.WipeSession != "" {
|
||||
err = fabricDb.Sessions.Delete(currentFlags.WipeSession)
|
||||
return true, err
|
||||
}
|
||||
|
||||
if currentFlags.PrintSession != "" {
|
||||
err = fabricDb.Sessions.PrintSession(currentFlags.PrintSession)
|
||||
return true, err
|
||||
}
|
||||
|
||||
if currentFlags.PrintContext != "" {
|
||||
err = fabricDb.Contexts.PrintContext(currentFlags.PrintContext)
|
||||
return true, err
|
||||
}
|
||||
|
||||
return false, nil
|
||||
}
|
||||
@@ -3,6 +3,8 @@ package cli
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/atotto/clipboard"
|
||||
)
|
||||
@@ -15,6 +17,10 @@ func CopyToClipboard(message string) (err error) {
|
||||
}
|
||||
|
||||
func CreateOutputFile(message string, fileName string) (err error) {
|
||||
if _, err = os.Stat(fileName); err == nil {
|
||||
err = fmt.Errorf("file %s already exists, not overwriting. Rename the existing file or choose a different name", fileName)
|
||||
return
|
||||
}
|
||||
var file *os.File
|
||||
if file, err = os.Create(fileName); err != nil {
|
||||
err = fmt.Errorf("error creating file: %v", err)
|
||||
@@ -24,7 +30,41 @@ func CreateOutputFile(message string, fileName string) (err error) {
|
||||
if _, err = file.WriteString(message); err != nil {
|
||||
err = fmt.Errorf("error writing to file: %v", err)
|
||||
} else {
|
||||
fmt.Printf("\n\n... written to %s\n", fileName)
|
||||
fmt.Fprintf(os.Stderr, "\n\n[Output also written to %s]\n", fileName)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// CreateAudioOutputFile creates a binary file for audio data
|
||||
func CreateAudioOutputFile(audioData []byte, fileName string) (err error) {
|
||||
// If no extension is provided, default to .wav
|
||||
if filepath.Ext(fileName) == "" {
|
||||
fileName += ".wav"
|
||||
}
|
||||
|
||||
// File existence check is now done in the CLI layer before TTS generation
|
||||
var file *os.File
|
||||
if file, err = os.Create(fileName); err != nil {
|
||||
err = fmt.Errorf("error creating audio file: %v", err)
|
||||
return
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
if _, err = file.Write(audioData); err != nil {
|
||||
err = fmt.Errorf("error writing audio data to file: %v", err)
|
||||
}
|
||||
// No redundant output message here - the CLI layer handles success messaging
|
||||
return
|
||||
}
|
||||
|
||||
// IsAudioFormat checks if the filename suggests an audio format
|
||||
func IsAudioFormat(fileName string) bool {
|
||||
ext := strings.ToLower(filepath.Ext(fileName))
|
||||
audioExts := []string{".wav", ".mp3", ".m4a", ".aac", ".ogg", ".flac"}
|
||||
for _, audioExt := range audioExts {
|
||||
if ext == audioExt {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
30
internal/cli/setup_server.go
Normal file
30
internal/cli/setup_server.go
Normal file
@@ -0,0 +1,30 @@
|
||||
package cli
|
||||
|
||||
import (
|
||||
"github.com/danielmiessler/fabric/internal/core"
|
||||
restapi "github.com/danielmiessler/fabric/internal/server"
|
||||
)
|
||||
|
||||
// handleSetupAndServerCommands handles setup and server-related commands
|
||||
// Returns (handled, error) where handled indicates if a command was processed and should exit
|
||||
func handleSetupAndServerCommands(currentFlags *Flags, registry *core.PluginRegistry, version string) (handled bool, err error) {
|
||||
// if the setup flag is set, run the setup function
|
||||
if currentFlags.Setup {
|
||||
err = registry.Setup()
|
||||
return true, err
|
||||
}
|
||||
|
||||
if currentFlags.Serve {
|
||||
registry.ConfigureVendors()
|
||||
err = restapi.Serve(registry, currentFlags.ServeAddress, currentFlags.ServeAPIKey)
|
||||
return true, err
|
||||
}
|
||||
|
||||
if currentFlags.ServeOllama {
|
||||
registry.ConfigureVendors()
|
||||
err = restapi.ServeOllama(registry, currentFlags.ServeAddress, version)
|
||||
return true, err
|
||||
}
|
||||
|
||||
return false, nil
|
||||
}
|
||||
90
internal/cli/tools.go
Normal file
90
internal/cli/tools.go
Normal file
@@ -0,0 +1,90 @@
|
||||
package cli
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/danielmiessler/fabric/internal/core"
|
||||
"github.com/danielmiessler/fabric/internal/tools/youtube"
|
||||
)
|
||||
|
||||
// handleToolProcessing handles YouTube and web scraping tool processing
|
||||
func handleToolProcessing(currentFlags *Flags, registry *core.PluginRegistry) (messageTools string, err error) {
|
||||
if currentFlags.YouTube != "" {
|
||||
if !registry.YouTube.IsConfigured() {
|
||||
err = fmt.Errorf("YouTube is not configured, please run the setup procedure")
|
||||
return
|
||||
}
|
||||
|
||||
var videoId string
|
||||
var playlistId string
|
||||
if videoId, playlistId, err = registry.YouTube.GetVideoOrPlaylistId(currentFlags.YouTube); err != nil {
|
||||
return
|
||||
} else if (videoId == "" || currentFlags.YouTubePlaylist) && playlistId != "" {
|
||||
if currentFlags.Output != "" {
|
||||
err = registry.YouTube.FetchAndSavePlaylist(playlistId, currentFlags.Output)
|
||||
} else {
|
||||
var videos []*youtube.VideoMeta
|
||||
if videos, err = registry.YouTube.FetchPlaylistVideos(playlistId); err != nil {
|
||||
err = fmt.Errorf("error fetching playlist videos: %w", err)
|
||||
return
|
||||
}
|
||||
|
||||
for _, video := range videos {
|
||||
var message string
|
||||
if message, err = processYoutubeVideo(currentFlags, registry, video.Id); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
if !currentFlags.IsChatRequest() {
|
||||
if err = WriteOutput(message, fmt.Sprintf("%v.md", video.TitleNormalized)); err != nil {
|
||||
return
|
||||
}
|
||||
} else {
|
||||
messageTools = AppendMessage(messageTools, message)
|
||||
}
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
if messageTools, err = processYoutubeVideo(currentFlags, registry, videoId); err != nil {
|
||||
return
|
||||
}
|
||||
if !currentFlags.IsChatRequest() {
|
||||
err = currentFlags.WriteOutput(messageTools)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
if currentFlags.ScrapeURL != "" || currentFlags.ScrapeQuestion != "" {
|
||||
if !registry.Jina.IsConfigured() {
|
||||
err = fmt.Errorf("scraping functionality is not configured. Please set up Jina to enable scraping")
|
||||
return
|
||||
}
|
||||
// Check if the scrape_url flag is set and call ScrapeURL
|
||||
if currentFlags.ScrapeURL != "" {
|
||||
var website string
|
||||
if website, err = registry.Jina.ScrapeURL(currentFlags.ScrapeURL); err != nil {
|
||||
return
|
||||
}
|
||||
messageTools = AppendMessage(messageTools, website)
|
||||
}
|
||||
|
||||
// Check if the scrape_question flag is set and call ScrapeQuestion
|
||||
if currentFlags.ScrapeQuestion != "" {
|
||||
var website string
|
||||
if website, err = registry.Jina.ScrapeQuestion(currentFlags.ScrapeQuestion); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
messageTools = AppendMessage(messageTools, website)
|
||||
}
|
||||
|
||||
if !currentFlags.IsChatRequest() {
|
||||
err = currentFlags.WriteOutput(messageTools)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
@@ -79,7 +79,9 @@ func (o *Chatter) Send(request *domain.ChatRequest, opts *domain.ChatOptions) (s
|
||||
|
||||
for response := range responseChan {
|
||||
message += response
|
||||
fmt.Print(response)
|
||||
if !opts.SuppressThink {
|
||||
fmt.Print(response)
|
||||
}
|
||||
}
|
||||
|
||||
// Wait for goroutine to finish
|
||||
@@ -101,6 +103,10 @@ func (o *Chatter) Send(request *domain.ChatRequest, opts *domain.ChatOptions) (s
|
||||
}
|
||||
}
|
||||
|
||||
if opts.SuppressThink && !o.DryRun {
|
||||
message = domain.StripThinkBlocks(message, opts.ThinkStartTag, opts.ThinkEndTag)
|
||||
}
|
||||
|
||||
if message == "" {
|
||||
session = nil
|
||||
err = fmt.Errorf("empty response")
|
||||
|
||||
@@ -15,6 +15,7 @@ import (
|
||||
type mockVendor struct {
|
||||
sendStreamError error
|
||||
streamChunks []string
|
||||
sendFunc func(context.Context, []*chat.ChatCompletionMessage, *domain.ChatOptions) (string, error)
|
||||
}
|
||||
|
||||
func (m *mockVendor) GetName() string {
|
||||
@@ -57,6 +58,9 @@ func (m *mockVendor) SendStream(messages []*chat.ChatCompletionMessage, opts *do
|
||||
}
|
||||
|
||||
func (m *mockVendor) Send(ctx context.Context, messages []*chat.ChatCompletionMessage, opts *domain.ChatOptions) (string, error) {
|
||||
if m.sendFunc != nil {
|
||||
return m.sendFunc(ctx, messages, opts)
|
||||
}
|
||||
return "test response", nil
|
||||
}
|
||||
|
||||
@@ -64,6 +68,51 @@ func (m *mockVendor) NeedsRawMode(modelName string) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func TestChatter_Send_SuppressThink(t *testing.T) {
|
||||
tempDir := t.TempDir()
|
||||
db := fsdb.NewDb(tempDir)
|
||||
|
||||
mockVendor := &mockVendor{}
|
||||
|
||||
chatter := &Chatter{
|
||||
db: db,
|
||||
Stream: false,
|
||||
vendor: mockVendor,
|
||||
model: "test-model",
|
||||
}
|
||||
|
||||
request := &domain.ChatRequest{
|
||||
Message: &chat.ChatCompletionMessage{
|
||||
Role: chat.ChatMessageRoleUser,
|
||||
Content: "test",
|
||||
},
|
||||
}
|
||||
|
||||
opts := &domain.ChatOptions{
|
||||
Model: "test-model",
|
||||
SuppressThink: true,
|
||||
ThinkStartTag: "<think>",
|
||||
ThinkEndTag: "</think>",
|
||||
}
|
||||
|
||||
// custom send function returning a message with think tags
|
||||
mockVendor.sendFunc = func(ctx context.Context, msgs []*chat.ChatCompletionMessage, o *domain.ChatOptions) (string, error) {
|
||||
return "<think>hidden</think> visible", nil
|
||||
}
|
||||
|
||||
session, err := chatter.Send(request, opts)
|
||||
if err != nil {
|
||||
t.Fatalf("Send returned error: %v", err)
|
||||
}
|
||||
if session == nil {
|
||||
t.Fatal("expected session")
|
||||
}
|
||||
last := session.GetLastMessage()
|
||||
if last.Content != "visible" {
|
||||
t.Errorf("expected filtered content 'visible', got %q", last.Content)
|
||||
}
|
||||
}
|
||||
|
||||
func TestChatter_Send_StreamingErrorPropagation(t *testing.T) {
|
||||
// Create a temporary database for testing
|
||||
tempDir := t.TempDir()
|
||||
|
||||
@@ -37,12 +37,16 @@ import (
|
||||
"github.com/danielmiessler/fabric/internal/util"
|
||||
)
|
||||
|
||||
// hasAWSCredentials checks if any AWS credentials are present either in the
|
||||
// environment variables or in the default/shared credentials file. It doesn't
|
||||
// attempt to verify the validity of the credentials, but simply ensures that a
|
||||
// potential authentication source exists so we can safely initialize the
|
||||
// Bedrock client without causing the AWS SDK to search for credentials.
|
||||
// hasAWSCredentials checks if Bedrock is properly configured by ensuring both
|
||||
// AWS credentials and BEDROCK_AWS_REGION are present. This prevents the Bedrock
|
||||
// client from being initialized when AWS credentials exist for other purposes.
|
||||
func hasAWSCredentials() bool {
|
||||
// First check if BEDROCK_AWS_REGION is set - this is required for Bedrock
|
||||
if os.Getenv("BEDROCK_AWS_REGION") == "" {
|
||||
return false
|
||||
}
|
||||
|
||||
// Then check if AWS credentials are available
|
||||
if os.Getenv("AWS_PROFILE") != "" ||
|
||||
os.Getenv("AWS_ROLE_SESSION_NAME") != "" ||
|
||||
(os.Getenv("AWS_ACCESS_KEY_ID") != "" && os.Getenv("AWS_SECRET_ACCESS_KEY") != "") {
|
||||
@@ -259,8 +263,24 @@ func (o *PluginRegistry) GetModels() (ret *ai.VendorsModels, err error) {
|
||||
func (o *PluginRegistry) Configure() (err error) {
|
||||
o.ConfigureVendors()
|
||||
_ = o.Defaults.Configure()
|
||||
if err := o.CustomPatterns.Configure(); err != nil {
|
||||
return fmt.Errorf("error configuring CustomPatterns: %w", err)
|
||||
}
|
||||
_ = o.PatternsLoader.Configure()
|
||||
|
||||
// Refresh the database custom patterns directory after custom patterns plugin is configured
|
||||
customPatternsDir := os.Getenv("CUSTOM_PATTERNS_DIRECTORY")
|
||||
if customPatternsDir != "" {
|
||||
// Expand home directory if needed
|
||||
if strings.HasPrefix(customPatternsDir, "~/") {
|
||||
if homeDir, err := os.UserHomeDir(); err == nil {
|
||||
customPatternsDir = filepath.Join(homeDir, customPatternsDir[2:])
|
||||
}
|
||||
}
|
||||
o.Db.Patterns.CustomPatternsDir = customPatternsDir
|
||||
o.PatternsLoader.Patterns.CustomPatternsDir = customPatternsDir
|
||||
}
|
||||
|
||||
//YouTube and Jina are not mandatory, so ignore not configured error
|
||||
_ = o.YouTube.Configure()
|
||||
_ = o.Jina.Configure()
|
||||
@@ -268,7 +288,7 @@ func (o *PluginRegistry) Configure() (err error) {
|
||||
return
|
||||
}
|
||||
|
||||
func (o *PluginRegistry) GetChatter(model string, modelContextLength int, strategy string, stream bool, dryRun bool) (ret *Chatter, err error) {
|
||||
func (o *PluginRegistry) GetChatter(model string, modelContextLength int, vendorName string, strategy string, stream bool, dryRun bool) (ret *Chatter, err error) {
|
||||
ret = &Chatter{
|
||||
db: o.Db,
|
||||
Stream: stream,
|
||||
@@ -297,14 +317,32 @@ func (o *PluginRegistry) GetChatter(model string, modelContextLength int, strate
|
||||
ret.model = defaultModel
|
||||
}
|
||||
} else if model == "" {
|
||||
ret.vendor = vendorManager.FindByName(defaultVendor)
|
||||
if vendorName != "" {
|
||||
ret.vendor = vendorManager.FindByName(vendorName)
|
||||
} else {
|
||||
ret.vendor = vendorManager.FindByName(defaultVendor)
|
||||
}
|
||||
ret.model = defaultModel
|
||||
} else {
|
||||
var models *ai.VendorsModels
|
||||
if models, err = vendorManager.GetModels(); err != nil {
|
||||
return
|
||||
}
|
||||
ret.vendor = vendorManager.FindByName(models.FindGroupsByItemFirst(model))
|
||||
if vendorName != "" {
|
||||
// ensure vendor exists and provides model
|
||||
ret.vendor = vendorManager.FindByName(vendorName)
|
||||
availableVendors := models.FindGroupsByItem(model)
|
||||
if ret.vendor == nil || !lo.Contains(availableVendors, vendorName) {
|
||||
err = fmt.Errorf("model %s not available for vendor %s", model, vendorName)
|
||||
return
|
||||
}
|
||||
} else {
|
||||
availableVendors := models.FindGroupsByItem(model)
|
||||
if len(availableVendors) > 1 {
|
||||
fmt.Fprintf(os.Stderr, "Warning: multiple vendors provide model %s: %s. Using %s. Specify --vendor to select a vendor.\n", model, strings.Join(availableVendors, ", "), availableVendors[0])
|
||||
}
|
||||
ret.vendor = vendorManager.FindByName(models.FindGroupsByItemFirst(model))
|
||||
}
|
||||
ret.model = model
|
||||
}
|
||||
|
||||
|
||||
@@ -1,10 +1,19 @@
|
||||
package core
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"io"
|
||||
"os"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/danielmiessler/fabric/internal/chat"
|
||||
"github.com/danielmiessler/fabric/internal/domain"
|
||||
"github.com/danielmiessler/fabric/internal/plugins"
|
||||
"github.com/danielmiessler/fabric/internal/plugins/ai"
|
||||
"github.com/danielmiessler/fabric/internal/plugins/db/fsdb"
|
||||
"github.com/danielmiessler/fabric/internal/tools"
|
||||
)
|
||||
|
||||
func TestSaveEnvFile(t *testing.T) {
|
||||
@@ -19,3 +28,63 @@ func TestSaveEnvFile(t *testing.T) {
|
||||
t.Fatalf("SaveEnvFile() error = %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
// testVendor implements ai.Vendor for testing purposes
|
||||
type testVendor struct {
|
||||
name string
|
||||
models []string
|
||||
}
|
||||
|
||||
func (m *testVendor) GetName() string { return m.name }
|
||||
func (m *testVendor) GetSetupDescription() string { return m.name }
|
||||
func (m *testVendor) IsConfigured() bool { return true }
|
||||
func (m *testVendor) Configure() error { return nil }
|
||||
func (m *testVendor) Setup() error { return nil }
|
||||
func (m *testVendor) SetupFillEnvFileContent(*bytes.Buffer) {}
|
||||
func (m *testVendor) ListModels() ([]string, error) { return m.models, nil }
|
||||
func (m *testVendor) SendStream([]*chat.ChatCompletionMessage, *domain.ChatOptions, chan string) error {
|
||||
return nil
|
||||
}
|
||||
func (m *testVendor) Send(context.Context, []*chat.ChatCompletionMessage, *domain.ChatOptions) (string, error) {
|
||||
return "", nil
|
||||
}
|
||||
func (m *testVendor) NeedsRawMode(string) bool { return false }
|
||||
|
||||
func TestGetChatter_WarnsOnAmbiguousModel(t *testing.T) {
|
||||
tempDir := t.TempDir()
|
||||
db := fsdb.NewDb(tempDir)
|
||||
|
||||
vendorA := &testVendor{name: "VendorA", models: []string{"shared-model"}}
|
||||
vendorB := &testVendor{name: "VendorB", models: []string{"shared-model"}}
|
||||
|
||||
vm := ai.NewVendorsManager()
|
||||
vm.AddVendors(vendorA, vendorB)
|
||||
|
||||
defaults := &tools.Defaults{
|
||||
PluginBase: &plugins.PluginBase{},
|
||||
Vendor: &plugins.Setting{Value: "VendorA"},
|
||||
Model: &plugins.SetupQuestion{Setting: &plugins.Setting{Value: "shared-model"}},
|
||||
ModelContextLength: &plugins.SetupQuestion{Setting: &plugins.Setting{Value: "0"}},
|
||||
}
|
||||
|
||||
registry := &PluginRegistry{Db: db, VendorManager: vm, Defaults: defaults}
|
||||
|
||||
r, w, _ := os.Pipe()
|
||||
oldStderr := os.Stderr
|
||||
os.Stderr = w
|
||||
defer func() { os.Stderr = oldStderr }()
|
||||
|
||||
chatter, err := registry.GetChatter("shared-model", 0, "", "", false, false)
|
||||
w.Close()
|
||||
warning, _ := io.ReadAll(r)
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("GetChatter() error = %v", err)
|
||||
}
|
||||
if chatter.vendor.GetName() != "VendorA" {
|
||||
t.Fatalf("expected vendor VendorA, got %s", chatter.vendor.GetName())
|
||||
}
|
||||
if !strings.Contains(string(warning), "multiple vendors provide model shared-model") {
|
||||
t.Fatalf("expected warning about multiple vendors, got %q", string(warning))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,6 +4,14 @@ import "github.com/danielmiessler/fabric/internal/chat"
|
||||
|
||||
const ChatMessageRoleMeta = "meta"
|
||||
|
||||
// Default values for chat options (must match cli/flags.go defaults)
|
||||
const (
|
||||
DefaultTemperature = 0.7
|
||||
DefaultTopP = 0.9
|
||||
DefaultPresencePenalty = 0.0
|
||||
DefaultFrequencyPenalty = 0.0
|
||||
)
|
||||
|
||||
type ChatRequest struct {
|
||||
ContextName string
|
||||
SessionName string
|
||||
@@ -17,22 +25,30 @@ type ChatRequest struct {
|
||||
}
|
||||
|
||||
type ChatOptions struct {
|
||||
Model string
|
||||
Temperature float64
|
||||
TopP float64
|
||||
PresencePenalty float64
|
||||
FrequencyPenalty float64
|
||||
Raw bool
|
||||
Seed int
|
||||
ModelContextLength int
|
||||
MaxTokens int
|
||||
Search bool
|
||||
SearchLocation string
|
||||
ImageFile string
|
||||
ImageSize string
|
||||
ImageQuality string
|
||||
ImageCompression int
|
||||
ImageBackground string
|
||||
Model string
|
||||
Temperature float64
|
||||
TopP float64
|
||||
PresencePenalty float64
|
||||
FrequencyPenalty float64
|
||||
Raw bool
|
||||
Seed int
|
||||
ModelContextLength int
|
||||
MaxTokens int
|
||||
Search bool
|
||||
SearchLocation string
|
||||
ImageFile string
|
||||
ImageSize string
|
||||
ImageQuality string
|
||||
ImageCompression int
|
||||
ImageBackground string
|
||||
SuppressThink bool
|
||||
ThinkStartTag string
|
||||
ThinkEndTag string
|
||||
AudioOutput bool
|
||||
AudioFormat string
|
||||
Voice string
|
||||
Notification bool
|
||||
NotificationCommand string
|
||||
}
|
||||
|
||||
// NormalizeMessages remove empty messages and ensure messages order user-assist-user
|
||||
|
||||
32
internal/domain/think.go
Normal file
32
internal/domain/think.go
Normal file
@@ -0,0 +1,32 @@
|
||||
package domain
|
||||
|
||||
import (
|
||||
"regexp"
|
||||
"sync"
|
||||
)
|
||||
|
||||
// StripThinkBlocks removes any content between the provided start and end tags
|
||||
// from the input string. Whitespace following the end tag is also removed so
|
||||
// output resumes at the next non-empty line.
|
||||
var (
|
||||
regexCache = make(map[string]*regexp.Regexp)
|
||||
cacheMutex sync.Mutex
|
||||
)
|
||||
|
||||
func StripThinkBlocks(input, startTag, endTag string) string {
|
||||
if startTag == "" || endTag == "" {
|
||||
return input
|
||||
}
|
||||
|
||||
cacheKey := startTag + "|" + endTag
|
||||
cacheMutex.Lock()
|
||||
re, exists := regexCache[cacheKey]
|
||||
if !exists {
|
||||
pattern := "(?s)" + regexp.QuoteMeta(startTag) + ".*?" + regexp.QuoteMeta(endTag) + "\\s*"
|
||||
re = regexp.MustCompile(pattern)
|
||||
regexCache[cacheKey] = re
|
||||
}
|
||||
cacheMutex.Unlock()
|
||||
|
||||
return re.ReplaceAllString(input, "")
|
||||
}
|
||||
19
internal/domain/think_test.go
Normal file
19
internal/domain/think_test.go
Normal file
@@ -0,0 +1,19 @@
|
||||
package domain
|
||||
|
||||
import "testing"
|
||||
|
||||
func TestStripThinkBlocks(t *testing.T) {
|
||||
input := "<think>internal</think>\n\nresult"
|
||||
got := StripThinkBlocks(input, "<think>", "</think>")
|
||||
if got != "result" {
|
||||
t.Errorf("expected %q, got %q", "result", got)
|
||||
}
|
||||
}
|
||||
|
||||
func TestStripThinkBlocksCustomTags(t *testing.T) {
|
||||
input := "[[t]]hidden[[/t]] visible"
|
||||
got := StripThinkBlocks(input, "[[t]]", "[[/t]]")
|
||||
if got != "visible" {
|
||||
t.Errorf("expected %q, got %q", "visible", got)
|
||||
}
|
||||
}
|
||||
@@ -46,6 +46,7 @@ func NewClient() (ret *Client) {
|
||||
string(anthropic.ModelClaude_3_5_Sonnet_20240620), string(anthropic.ModelClaude3OpusLatest),
|
||||
string(anthropic.ModelClaude_3_Opus_20240229), string(anthropic.ModelClaude_3_Haiku_20240307),
|
||||
string(anthropic.ModelClaudeOpus4_20250514), string(anthropic.ModelClaudeSonnet4_20250514),
|
||||
string(anthropic.ModelClaudeOpus4_1_20250805),
|
||||
}
|
||||
|
||||
return
|
||||
@@ -181,11 +182,19 @@ func (an *Client) buildMessageParams(msgs []anthropic.MessageParam, opts *domain
|
||||
params anthropic.MessageNewParams) {
|
||||
|
||||
params = anthropic.MessageNewParams{
|
||||
Model: anthropic.Model(opts.Model),
|
||||
MaxTokens: int64(an.maxTokens),
|
||||
TopP: anthropic.Opt(opts.TopP),
|
||||
Temperature: anthropic.Opt(opts.Temperature),
|
||||
Messages: msgs,
|
||||
Model: anthropic.Model(opts.Model),
|
||||
MaxTokens: int64(an.maxTokens),
|
||||
Messages: msgs,
|
||||
}
|
||||
|
||||
// Only set one of Temperature or TopP as some models don't allow both
|
||||
// Always set temperature to ensure consistent behavior (Anthropic default is 1.0, Fabric default is 0.7)
|
||||
if opts.TopP != domain.DefaultTopP {
|
||||
// User explicitly set TopP, so use that instead of temperature
|
||||
params.TopP = anthropic.Opt(opts.TopP)
|
||||
} else {
|
||||
// Use temperature (always set to ensure Fabric's default of 0.7, not Anthropic's 1.0)
|
||||
params.Temperature = anthropic.Opt(opts.Temperature)
|
||||
}
|
||||
|
||||
// Add Claude Code spoofing system message for OAuth authentication
|
||||
|
||||
@@ -72,7 +72,8 @@ func TestBuildMessageParams_WithoutSearch(t *testing.T) {
|
||||
client := NewClient()
|
||||
opts := &domain.ChatOptions{
|
||||
Model: "claude-3-5-sonnet-latest",
|
||||
Temperature: 0.7,
|
||||
Temperature: 0.8, // Use non-default value to ensure it gets set
|
||||
TopP: domain.DefaultTopP, // Use default TopP so temperature takes precedence
|
||||
Search: false,
|
||||
}
|
||||
|
||||
@@ -90,6 +91,7 @@ func TestBuildMessageParams_WithoutSearch(t *testing.T) {
|
||||
t.Errorf("Expected model %s, got %s", opts.Model, params.Model)
|
||||
}
|
||||
|
||||
// When using non-default temperature, it should be set in params
|
||||
if params.Temperature.Value != opts.Temperature {
|
||||
t.Errorf("Expected temperature %f, got %f", opts.Temperature, params.Temperature.Value)
|
||||
}
|
||||
@@ -99,7 +101,8 @@ func TestBuildMessageParams_WithSearch(t *testing.T) {
|
||||
client := NewClient()
|
||||
opts := &domain.ChatOptions{
|
||||
Model: "claude-3-5-sonnet-latest",
|
||||
Temperature: 0.7,
|
||||
Temperature: 0.8, // Use non-default value
|
||||
TopP: domain.DefaultTopP, // Use default TopP so temperature takes precedence
|
||||
Search: true,
|
||||
}
|
||||
|
||||
@@ -135,7 +138,8 @@ func TestBuildMessageParams_WithSearchAndLocation(t *testing.T) {
|
||||
client := NewClient()
|
||||
opts := &domain.ChatOptions{
|
||||
Model: "claude-3-5-sonnet-latest",
|
||||
Temperature: 0.7,
|
||||
Temperature: 0.8, // Use non-default value
|
||||
TopP: domain.DefaultTopP, // Use default TopP so temperature takes precedence
|
||||
Search: true,
|
||||
SearchLocation: "America/Los_Angeles",
|
||||
}
|
||||
@@ -256,3 +260,59 @@ func TestCitationFormatting(t *testing.T) {
|
||||
t.Errorf("Expected 2 unique citations, got %d", citationCount)
|
||||
}
|
||||
}
|
||||
|
||||
func TestBuildMessageParams_DefaultValues(t *testing.T) {
|
||||
client := NewClient()
|
||||
|
||||
// Test with default temperature - should always set temperature unless TopP is explicitly set
|
||||
opts := &domain.ChatOptions{
|
||||
Model: "claude-3-5-sonnet-latest",
|
||||
Temperature: domain.DefaultTemperature, // 0.7 - should be set to override Anthropic's 1.0 default
|
||||
TopP: domain.DefaultTopP, // 0.9 - default, so temperature takes precedence
|
||||
Search: false,
|
||||
}
|
||||
|
||||
messages := []anthropic.MessageParam{
|
||||
anthropic.NewUserMessage(anthropic.NewTextBlock("Hello")),
|
||||
}
|
||||
|
||||
params := client.buildMessageParams(messages, opts)
|
||||
|
||||
// Temperature should be set when using default value to override Anthropic's 1.0 default
|
||||
if params.Temperature.Value != opts.Temperature {
|
||||
t.Errorf("Expected temperature %f, got %f", opts.Temperature, params.Temperature.Value)
|
||||
}
|
||||
|
||||
// TopP should not be set when using default value (temperature takes precedence)
|
||||
if params.TopP.Value != 0 {
|
||||
t.Errorf("Expected TopP to not be set (0), but got %f", params.TopP.Value)
|
||||
}
|
||||
}
|
||||
|
||||
func TestBuildMessageParams_ExplicitTopP(t *testing.T) {
|
||||
client := NewClient()
|
||||
|
||||
// Test with explicit TopP - should set TopP instead of temperature
|
||||
opts := &domain.ChatOptions{
|
||||
Model: "claude-3-5-sonnet-latest",
|
||||
Temperature: domain.DefaultTemperature, // 0.7 - ignored when TopP is explicitly set
|
||||
TopP: 0.5, // Non-default - should be set
|
||||
Search: false,
|
||||
}
|
||||
|
||||
messages := []anthropic.MessageParam{
|
||||
anthropic.NewUserMessage(anthropic.NewTextBlock("Hello")),
|
||||
}
|
||||
|
||||
params := client.buildMessageParams(messages, opts)
|
||||
|
||||
// Temperature should not be set when TopP is explicitly set
|
||||
if params.Temperature.Value != 0 {
|
||||
t.Errorf("Expected temperature to not be set (0), but got %f", params.Temperature.Value)
|
||||
}
|
||||
|
||||
// TopP should be set when using non-default value
|
||||
if params.TopP.Value != opts.TopP {
|
||||
t.Errorf("Expected TopP %f, got %f", opts.TopP, params.TopP.Value)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,6 +9,7 @@ import (
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"os"
|
||||
"os/exec"
|
||||
"strings"
|
||||
"time"
|
||||
@@ -71,7 +72,7 @@ func (t *OAuthTransport) getValidToken(tokenIdentifier string) (string, error) {
|
||||
}
|
||||
// If no token exists, run OAuth flow
|
||||
if token == nil {
|
||||
fmt.Println("No OAuth token found, initiating authentication...")
|
||||
fmt.Fprintln(os.Stderr, "No OAuth token found, initiating authentication...")
|
||||
newAccessToken, err := RunOAuthFlow(tokenIdentifier)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to authenticate: %w", err)
|
||||
@@ -81,11 +82,11 @@ func (t *OAuthTransport) getValidToken(tokenIdentifier string) (string, error) {
|
||||
|
||||
// Check if token needs refresh (5 minute buffer)
|
||||
if token.IsExpired(5) {
|
||||
fmt.Println("OAuth token expired, refreshing...")
|
||||
fmt.Fprintln(os.Stderr, "OAuth token expired, refreshing...")
|
||||
newAccessToken, err := RefreshToken(tokenIdentifier)
|
||||
if err != nil {
|
||||
// If refresh fails, try re-authentication
|
||||
fmt.Println("Token refresh failed, re-authenticating...")
|
||||
fmt.Fprintln(os.Stderr, "Token refresh failed, re-authenticating...")
|
||||
newAccessToken, err = RunOAuthFlow(tokenIdentifier)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to refresh or re-authenticate: %w", err)
|
||||
@@ -137,13 +138,13 @@ func RunOAuthFlow(tokenIdentifier string) (token string, err error) {
|
||||
if err == nil && existingToken != nil {
|
||||
// If token exists but is expired, try refreshing first
|
||||
if existingToken.IsExpired(5) {
|
||||
fmt.Println("Found expired OAuth token, attempting refresh...")
|
||||
fmt.Fprintln(os.Stderr, "Found expired OAuth token, attempting refresh...")
|
||||
refreshedToken, refreshErr := RefreshToken(tokenIdentifier)
|
||||
if refreshErr == nil {
|
||||
fmt.Println("Token refresh successful")
|
||||
fmt.Fprintln(os.Stderr, "Token refresh successful")
|
||||
return refreshedToken, nil
|
||||
}
|
||||
fmt.Printf("Token refresh failed (%v), proceeding with full OAuth flow...\n", refreshErr)
|
||||
fmt.Fprintf(os.Stderr, "Token refresh failed (%v), proceeding with full OAuth flow...\n", refreshErr)
|
||||
} else {
|
||||
// Token exists and is still valid
|
||||
return existingToken.AccessToken, nil
|
||||
@@ -170,10 +171,10 @@ func RunOAuthFlow(tokenIdentifier string) (token string, err error) {
|
||||
oauth2.SetAuthURLParam("state", verifier),
|
||||
)
|
||||
|
||||
fmt.Println("Open the following URL in your browser. Fabric would like to authorize:")
|
||||
fmt.Println(authURL)
|
||||
fmt.Fprintln(os.Stderr, "Open the following URL in your browser. Fabric would like to authorize:")
|
||||
fmt.Fprintln(os.Stderr, authURL)
|
||||
openBrowser(authURL)
|
||||
fmt.Print("Paste the authorization code here: ")
|
||||
fmt.Fprint(os.Stderr, "Paste the authorization code here: ")
|
||||
var code string
|
||||
fmt.Scanln(&code)
|
||||
parts := strings.SplitN(code, "#", 2)
|
||||
|
||||
@@ -153,7 +153,7 @@ func (c *BedrockClient) ListModels() ([]string, error) {
|
||||
return models, nil
|
||||
}
|
||||
|
||||
// SendStream sends the messages to the the Bedrock ConverseStream API
|
||||
// SendStream sends the messages to the Bedrock ConverseStream API
|
||||
func (c *BedrockClient) SendStream(msgs []*chat.ChatCompletionMessage, opts *domain.ChatOptions, channel chan string) (err error) {
|
||||
// Ensure channel is closed on all exit paths to prevent goroutine leaks
|
||||
defer func() {
|
||||
|
||||
@@ -12,6 +12,8 @@ import (
|
||||
"github.com/danielmiessler/fabric/internal/plugins"
|
||||
)
|
||||
|
||||
const DryRunResponse = "Dry run: Fake response sent by DryRun plugin\n"
|
||||
|
||||
type Client struct {
|
||||
*plugins.PluginBase
|
||||
}
|
||||
@@ -85,27 +87,37 @@ func (c *Client) formatOptions(opts *domain.ChatOptions) string {
|
||||
if opts.ImageFile != "" {
|
||||
builder.WriteString(fmt.Sprintf("ImageFile: %s\n", opts.ImageFile))
|
||||
}
|
||||
if opts.SuppressThink {
|
||||
builder.WriteString("SuppressThink: enabled\n")
|
||||
builder.WriteString(fmt.Sprintf("Thinking Start Tag: %s\n", opts.ThinkStartTag))
|
||||
builder.WriteString(fmt.Sprintf("Thinking End Tag: %s\n", opts.ThinkEndTag))
|
||||
}
|
||||
|
||||
return builder.String()
|
||||
}
|
||||
|
||||
func (c *Client) SendStream(msgs []*chat.ChatCompletionMessage, opts *domain.ChatOptions, channel chan string) error {
|
||||
func (c *Client) constructRequest(msgs []*chat.ChatCompletionMessage, opts *domain.ChatOptions) string {
|
||||
var builder strings.Builder
|
||||
builder.WriteString("Dry run: Would send the following request:\n\n")
|
||||
builder.WriteString(c.formatMessages(msgs))
|
||||
builder.WriteString(c.formatOptions(opts))
|
||||
|
||||
channel <- builder.String()
|
||||
close(channel)
|
||||
return builder.String()
|
||||
}
|
||||
|
||||
func (c *Client) SendStream(msgs []*chat.ChatCompletionMessage, opts *domain.ChatOptions, channel chan string) error {
|
||||
defer close(channel)
|
||||
request := c.constructRequest(msgs, opts)
|
||||
channel <- request
|
||||
channel <- "\n"
|
||||
channel <- DryRunResponse
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *Client) Send(_ context.Context, msgs []*chat.ChatCompletionMessage, opts *domain.ChatOptions) (string, error) {
|
||||
fmt.Println("Dry run: Would send the following request:")
|
||||
fmt.Print(c.formatMessages(msgs))
|
||||
fmt.Print(c.formatOptions(opts))
|
||||
request := c.constructRequest(msgs, opts)
|
||||
|
||||
return "", nil
|
||||
return request + "\n" + DryRunResponse, nil
|
||||
}
|
||||
|
||||
func (c *Client) Setup() error {
|
||||
|
||||
@@ -13,6 +13,7 @@ func NewClient() (ret *Client) {
|
||||
ret = &Client{}
|
||||
ret.Client = openai.NewClientCompatibleNoSetupQuestions("Exolab", ret.configure)
|
||||
|
||||
ret.ApiKey = ret.AddSetupQuestion("API Key", false)
|
||||
ret.ApiBaseURL = ret.AddSetupQuestion("API Base URL", true)
|
||||
ret.ApiBaseURL.Value = "http://localhost:52415"
|
||||
|
||||
|
||||
@@ -1,21 +1,49 @@
|
||||
package gemini
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"errors"
|
||||
"encoding/binary"
|
||||
"fmt"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"github.com/danielmiessler/fabric/internal/chat"
|
||||
"github.com/danielmiessler/fabric/internal/plugins"
|
||||
|
||||
"github.com/danielmiessler/fabric/internal/domain"
|
||||
"github.com/google/generative-ai-go/genai"
|
||||
"google.golang.org/api/iterator"
|
||||
"google.golang.org/api/option"
|
||||
"google.golang.org/genai"
|
||||
)
|
||||
|
||||
const modelsNamePrefix = "models/"
|
||||
// WAV audio constants
|
||||
const (
|
||||
DefaultChannels = 1
|
||||
DefaultSampleRate = 24000
|
||||
DefaultBitsPerSample = 16
|
||||
WAVHeaderSize = 44
|
||||
RIFFHeaderSize = 36
|
||||
MaxAudioDataSize = 100 * 1024 * 1024 // 100MB limit for security
|
||||
MinAudioDataSize = 44 // Minimum viable audio data
|
||||
AudioDataPrefix = "FABRIC_AUDIO_DATA:"
|
||||
)
|
||||
|
||||
const (
|
||||
citationHeader = "\n\n## Sources\n\n"
|
||||
citationSeparator = "\n"
|
||||
citationFormat = "- [%s](%s)"
|
||||
|
||||
errInvalidLocationFormat = "invalid search location format %q: must be timezone (e.g., 'America/Los_Angeles') or language code (e.g., 'en-US')"
|
||||
locationSeparator = "/"
|
||||
langCodeSeparator = "_"
|
||||
langCodeNormalizedSep = "-"
|
||||
|
||||
modelPrefix = "models/"
|
||||
modelTypeTTS = "tts"
|
||||
modelTypePreviewTTS = "preview-tts"
|
||||
modelTypeTextToSpeech = "text-to-speech"
|
||||
)
|
||||
|
||||
var langCodeRegex = regexp.MustCompile(`^[a-z]{2}(-[A-Z]{2})?$`)
|
||||
|
||||
func NewClient() (ret *Client) {
|
||||
vendorName := "Gemini"
|
||||
@@ -39,107 +67,102 @@ type Client struct {
|
||||
func (o *Client) ListModels() (ret []string, err error) {
|
||||
ctx := context.Background()
|
||||
var client *genai.Client
|
||||
if client, err = genai.NewClient(ctx, option.WithAPIKey(o.ApiKey.Value)); err != nil {
|
||||
if client, err = genai.NewClient(ctx, &genai.ClientConfig{
|
||||
APIKey: o.ApiKey.Value,
|
||||
Backend: genai.BackendGeminiAPI,
|
||||
}); err != nil {
|
||||
return
|
||||
}
|
||||
defer client.Close()
|
||||
|
||||
iter := client.ListModels(ctx)
|
||||
for {
|
||||
var resp *genai.ModelInfo
|
||||
if resp, err = iter.Next(); err != nil {
|
||||
if errors.Is(err, iterator.Done) {
|
||||
err = nil
|
||||
}
|
||||
break
|
||||
}
|
||||
// List available models using the correct API
|
||||
resp, err := client.Models.List(ctx, &genai.ListModelsConfig{})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
name := o.buildModelNameSimple(resp.Name)
|
||||
ret = append(ret, name)
|
||||
for _, model := range resp.Items {
|
||||
// Strip the "models/" prefix for user convenience
|
||||
modelName := strings.TrimPrefix(model.Name, "models/")
|
||||
ret = append(ret, modelName)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (o *Client) Send(ctx context.Context, msgs []*chat.ChatCompletionMessage, opts *domain.ChatOptions) (ret string, err error) {
|
||||
systemInstruction, messages := toMessages(msgs)
|
||||
// Check if this is a TTS model request
|
||||
if o.isTTSModel(opts.Model) {
|
||||
if !opts.AudioOutput {
|
||||
err = fmt.Errorf("TTS model '%s' requires audio output. Please specify an audio output file with -o flag ending in .wav", opts.Model)
|
||||
return
|
||||
}
|
||||
|
||||
// Handle TTS generation
|
||||
return o.generateTTSAudio(ctx, msgs, opts)
|
||||
}
|
||||
|
||||
// Regular text generation
|
||||
var client *genai.Client
|
||||
if client, err = genai.NewClient(ctx, option.WithAPIKey(o.ApiKey.Value)); err != nil {
|
||||
return
|
||||
}
|
||||
defer client.Close()
|
||||
|
||||
model := client.GenerativeModel(o.buildModelNameFull(opts.Model))
|
||||
model.SetTemperature(float32(opts.Temperature))
|
||||
model.SetTopP(float32(opts.TopP))
|
||||
model.SystemInstruction = systemInstruction
|
||||
|
||||
var response *genai.GenerateContentResponse
|
||||
if response, err = model.GenerateContent(ctx, messages...); err != nil {
|
||||
if client, err = genai.NewClient(ctx, &genai.ClientConfig{
|
||||
APIKey: o.ApiKey.Value,
|
||||
Backend: genai.BackendGeminiAPI,
|
||||
}); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
ret = o.extractText(response)
|
||||
// Convert messages to new SDK format
|
||||
contents := o.convertMessages(msgs)
|
||||
|
||||
cfg, err := o.buildGenerateContentConfig(opts)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
// Generate content with optional tools
|
||||
response, err := client.Models.GenerateContent(ctx, o.buildModelNameFull(opts.Model), contents, cfg)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
// Extract text from response
|
||||
ret = o.extractTextFromResponse(response)
|
||||
return
|
||||
}
|
||||
|
||||
func (o *Client) buildModelNameSimple(fullModelName string) string {
|
||||
return strings.TrimPrefix(fullModelName, modelsNamePrefix)
|
||||
}
|
||||
|
||||
func (o *Client) buildModelNameFull(modelName string) string {
|
||||
return fmt.Sprintf("%v%v", modelsNamePrefix, modelName)
|
||||
}
|
||||
|
||||
func (o *Client) SendStream(msgs []*chat.ChatCompletionMessage, opts *domain.ChatOptions, channel chan string) (err error) {
|
||||
ctx := context.Background()
|
||||
var client *genai.Client
|
||||
if client, err = genai.NewClient(ctx, option.WithAPIKey(o.ApiKey.Value)); err != nil {
|
||||
if client, err = genai.NewClient(ctx, &genai.ClientConfig{
|
||||
APIKey: o.ApiKey.Value,
|
||||
Backend: genai.BackendGeminiAPI,
|
||||
}); err != nil {
|
||||
return
|
||||
}
|
||||
defer client.Close()
|
||||
|
||||
systemInstruction, messages := toMessages(msgs)
|
||||
// Convert messages to new SDK format
|
||||
contents := o.convertMessages(msgs)
|
||||
|
||||
model := client.GenerativeModel(o.buildModelNameFull(opts.Model))
|
||||
model.SetTemperature(float32(opts.Temperature))
|
||||
model.SetTopP(float32(opts.TopP))
|
||||
model.SystemInstruction = systemInstruction
|
||||
cfg, err := o.buildGenerateContentConfig(opts)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
iter := model.GenerateContentStream(ctx, messages...)
|
||||
for {
|
||||
if resp, iterErr := iter.Next(); iterErr == nil {
|
||||
for _, candidate := range resp.Candidates {
|
||||
if candidate.Content != nil {
|
||||
for _, part := range candidate.Content.Parts {
|
||||
if text, ok := part.(genai.Text); ok {
|
||||
channel <- string(text)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if !errors.Is(iterErr, iterator.Done) {
|
||||
channel <- fmt.Sprintf("%v\n", iterErr)
|
||||
}
|
||||
// Generate streaming content with optional tools
|
||||
stream := client.Models.GenerateContentStream(ctx, o.buildModelNameFull(opts.Model), contents, cfg)
|
||||
|
||||
for response, err := range stream {
|
||||
if err != nil {
|
||||
channel <- fmt.Sprintf("Error: %v\n", err)
|
||||
close(channel)
|
||||
break
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (o *Client) extractText(response *genai.GenerateContentResponse) (ret string) {
|
||||
for _, candidate := range response.Candidates {
|
||||
if candidate.Content == nil {
|
||||
break
|
||||
}
|
||||
for _, part := range candidate.Content.Parts {
|
||||
if text, ok := part.(genai.Text); ok {
|
||||
ret += string(text)
|
||||
}
|
||||
text := o.extractTextFromResponse(response)
|
||||
if text != "" {
|
||||
channel <- text
|
||||
}
|
||||
}
|
||||
close(channel)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
@@ -147,18 +170,354 @@ func (o *Client) NeedsRawMode(modelName string) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func toMessages(msgs []*chat.ChatCompletionMessage) (systemInstruction *genai.Content, messages []genai.Part) {
|
||||
if len(msgs) >= 2 {
|
||||
systemInstruction = &genai.Content{
|
||||
Parts: []genai.Part{
|
||||
genai.Text(msgs[0].Content),
|
||||
},
|
||||
}
|
||||
for _, msg := range msgs[1:] {
|
||||
messages = append(messages, genai.Text(msg.Content))
|
||||
}
|
||||
} else {
|
||||
messages = append(messages, genai.Text(msgs[0].Content))
|
||||
// buildGenerateContentConfig constructs the generation config with optional tools.
|
||||
// When search is enabled it injects the Google Search tool. The optional search
|
||||
// location accepts either:
|
||||
// - A timezone in the format "Continent/City" (e.g., "America/Los_Angeles")
|
||||
// - An ISO language code "ll" or "ll-CC" (e.g., "en" or "en-US")
|
||||
//
|
||||
// Underscores are normalized to hyphens. Returns an error if the location is
|
||||
// invalid.
|
||||
func (o *Client) buildGenerateContentConfig(opts *domain.ChatOptions) (*genai.GenerateContentConfig, error) {
|
||||
temperature := float32(opts.Temperature)
|
||||
topP := float32(opts.TopP)
|
||||
cfg := &genai.GenerateContentConfig{
|
||||
Temperature: &temperature,
|
||||
TopP: &topP,
|
||||
MaxOutputTokens: int32(opts.ModelContextLength),
|
||||
}
|
||||
return
|
||||
|
||||
if opts.Search {
|
||||
cfg.Tools = []*genai.Tool{{GoogleSearch: &genai.GoogleSearch{}}}
|
||||
if loc := opts.SearchLocation; loc != "" {
|
||||
if isValidLocationFormat(loc) {
|
||||
loc = normalizeLocation(loc)
|
||||
cfg.ToolConfig = &genai.ToolConfig{
|
||||
RetrievalConfig: &genai.RetrievalConfig{LanguageCode: loc},
|
||||
}
|
||||
} else {
|
||||
return nil, fmt.Errorf(errInvalidLocationFormat, loc)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return cfg, nil
|
||||
}
|
||||
|
||||
// buildModelNameFull adds the "models/" prefix for API calls
|
||||
func (o *Client) buildModelNameFull(modelName string) string {
|
||||
if strings.HasPrefix(modelName, modelPrefix) {
|
||||
return modelName
|
||||
}
|
||||
return modelPrefix + modelName
|
||||
}
|
||||
|
||||
func isValidLocationFormat(location string) bool {
|
||||
if strings.Contains(location, locationSeparator) {
|
||||
parts := strings.Split(location, locationSeparator)
|
||||
return len(parts) == 2 && parts[0] != "" && parts[1] != ""
|
||||
}
|
||||
return isValidLanguageCode(location)
|
||||
}
|
||||
|
||||
func normalizeLocation(location string) string {
|
||||
if strings.Contains(location, locationSeparator) {
|
||||
return location
|
||||
}
|
||||
return strings.Replace(location, langCodeSeparator, langCodeNormalizedSep, 1)
|
||||
}
|
||||
|
||||
// isValidLanguageCode reports whether the input is an ISO 639-1 language code
|
||||
// optionally followed by an ISO 3166-1 country code. Underscores are
|
||||
// normalized to hyphens before validation.
|
||||
func isValidLanguageCode(code string) bool {
|
||||
normalized := strings.Replace(code, langCodeSeparator, langCodeNormalizedSep, 1)
|
||||
parts := strings.Split(normalized, langCodeNormalizedSep)
|
||||
switch len(parts) {
|
||||
case 1:
|
||||
return langCodeRegex.MatchString(strings.ToLower(parts[0]))
|
||||
case 2:
|
||||
formatted := strings.ToLower(parts[0]) + langCodeNormalizedSep + strings.ToUpper(parts[1])
|
||||
return langCodeRegex.MatchString(formatted)
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
// isTTSModel checks if the model is a text-to-speech model
|
||||
func (o *Client) isTTSModel(modelName string) bool {
|
||||
lowerModel := strings.ToLower(modelName)
|
||||
return strings.Contains(lowerModel, modelTypeTTS) ||
|
||||
strings.Contains(lowerModel, modelTypePreviewTTS) ||
|
||||
strings.Contains(lowerModel, modelTypeTextToSpeech)
|
||||
}
|
||||
|
||||
// extractTextForTTS extracts text content from chat messages for TTS generation
|
||||
func (o *Client) extractTextForTTS(msgs []*chat.ChatCompletionMessage) (string, error) {
|
||||
for i := len(msgs) - 1; i >= 0; i-- {
|
||||
if msgs[i].Role == chat.ChatMessageRoleUser && msgs[i].Content != "" {
|
||||
return msgs[i].Content, nil
|
||||
}
|
||||
}
|
||||
return "", fmt.Errorf("no text content found for TTS generation")
|
||||
}
|
||||
|
||||
// createGenaiClient creates a new GenAI client for TTS operations
|
||||
func (o *Client) createGenaiClient(ctx context.Context) (*genai.Client, error) {
|
||||
return genai.NewClient(ctx, &genai.ClientConfig{
|
||||
APIKey: o.ApiKey.Value,
|
||||
Backend: genai.BackendGeminiAPI,
|
||||
})
|
||||
}
|
||||
|
||||
// generateTTSAudio handles TTS audio generation using the new SDK
|
||||
func (o *Client) generateTTSAudio(ctx context.Context, msgs []*chat.ChatCompletionMessage, opts *domain.ChatOptions) (ret string, err error) {
|
||||
textToSpeak, err := o.extractTextForTTS(msgs)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
// Validate voice name before making API call
|
||||
if opts.Voice != "" && !IsValidGeminiVoice(opts.Voice) {
|
||||
validVoices := GetGeminiVoiceNames()
|
||||
return "", fmt.Errorf("invalid voice '%s'. Valid voices are: %v", opts.Voice, validVoices)
|
||||
}
|
||||
|
||||
client, err := o.createGenaiClient(ctx)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return o.performTTSGeneration(ctx, client, textToSpeak, opts)
|
||||
}
|
||||
|
||||
// performTTSGeneration performs the actual TTS generation and audio processing
|
||||
func (o *Client) performTTSGeneration(ctx context.Context, client *genai.Client, textToSpeak string, opts *domain.ChatOptions) (string, error) {
|
||||
|
||||
// Create content for TTS
|
||||
contents := []*genai.Content{{
|
||||
Parts: []*genai.Part{{Text: textToSpeak}},
|
||||
}}
|
||||
|
||||
// Configure for TTS generation
|
||||
voiceName := opts.Voice
|
||||
if voiceName == "" {
|
||||
voiceName = "Kore" // Default voice if none specified
|
||||
}
|
||||
|
||||
config := &genai.GenerateContentConfig{
|
||||
ResponseModalities: []string{"AUDIO"},
|
||||
SpeechConfig: &genai.SpeechConfig{
|
||||
VoiceConfig: &genai.VoiceConfig{
|
||||
PrebuiltVoiceConfig: &genai.PrebuiltVoiceConfig{
|
||||
VoiceName: voiceName,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
// Generate TTS content
|
||||
response, err := client.Models.GenerateContent(ctx, o.buildModelNameFull(opts.Model), contents, config)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("TTS generation failed: %w", err)
|
||||
}
|
||||
|
||||
// Extract and process audio data
|
||||
if len(response.Candidates) > 0 && response.Candidates[0].Content != nil && len(response.Candidates[0].Content.Parts) > 0 {
|
||||
part := response.Candidates[0].Content.Parts[0]
|
||||
if part.InlineData != nil && len(part.InlineData.Data) > 0 {
|
||||
// Validate audio data format and size
|
||||
if part.InlineData.MIMEType != "" && !strings.HasPrefix(part.InlineData.MIMEType, "audio/") {
|
||||
return "", fmt.Errorf("unexpected data type: %s, expected audio data", part.InlineData.MIMEType)
|
||||
}
|
||||
|
||||
pcmData := part.InlineData.Data
|
||||
if len(pcmData) < MinAudioDataSize {
|
||||
return "", fmt.Errorf("audio data too small: %d bytes, minimum required: %d", len(pcmData), MinAudioDataSize)
|
||||
}
|
||||
|
||||
// Generate WAV file with proper headers and return the binary data
|
||||
wavData, err := o.generateWAVFile(pcmData)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to generate WAV file: %w", err)
|
||||
}
|
||||
|
||||
// Validate generated WAV data
|
||||
if len(wavData) < WAVHeaderSize {
|
||||
return "", fmt.Errorf("generated WAV data is invalid: %d bytes, minimum required: %d", len(wavData), WAVHeaderSize)
|
||||
}
|
||||
|
||||
// Store the binary audio data in a special format that the CLI can detect
|
||||
// Use more efficient string concatenation
|
||||
return AudioDataPrefix + string(wavData), nil
|
||||
}
|
||||
}
|
||||
|
||||
return "", fmt.Errorf("no audio data received from TTS model")
|
||||
}
|
||||
|
||||
// generateWAVFile creates WAV data from PCM data with proper headers
|
||||
func (o *Client) generateWAVFile(pcmData []byte) ([]byte, error) {
|
||||
// Validate input size to prevent potential security issues
|
||||
if len(pcmData) == 0 {
|
||||
return nil, fmt.Errorf("empty PCM data provided")
|
||||
}
|
||||
if len(pcmData) > MaxAudioDataSize {
|
||||
return nil, fmt.Errorf("PCM data too large: %d bytes, maximum allowed: %d", len(pcmData), MaxAudioDataSize)
|
||||
}
|
||||
|
||||
// WAV file parameters (Gemini TTS default specs)
|
||||
channels := DefaultChannels
|
||||
sampleRate := DefaultSampleRate
|
||||
bitsPerSample := DefaultBitsPerSample
|
||||
|
||||
// Calculate required values
|
||||
byteRate := sampleRate * channels * bitsPerSample / 8
|
||||
blockAlign := channels * bitsPerSample / 8
|
||||
dataLen := uint32(len(pcmData))
|
||||
riffSize := RIFFHeaderSize + dataLen
|
||||
|
||||
// Pre-allocate buffer with known size for better performance
|
||||
totalSize := int(riffSize + 8) // +8 for RIFF header
|
||||
buf := bytes.NewBuffer(make([]byte, 0, totalSize))
|
||||
|
||||
// RIFF header
|
||||
buf.WriteString("RIFF")
|
||||
binary.Write(buf, binary.LittleEndian, riffSize)
|
||||
buf.WriteString("WAVE")
|
||||
|
||||
// fmt chunk
|
||||
buf.WriteString("fmt ")
|
||||
binary.Write(buf, binary.LittleEndian, uint32(16)) // subchunk1Size
|
||||
binary.Write(buf, binary.LittleEndian, uint16(1)) // audioFormat = PCM
|
||||
binary.Write(buf, binary.LittleEndian, uint16(channels)) // numChannels
|
||||
binary.Write(buf, binary.LittleEndian, uint32(sampleRate)) // sampleRate
|
||||
binary.Write(buf, binary.LittleEndian, uint32(byteRate)) // byteRate
|
||||
binary.Write(buf, binary.LittleEndian, uint16(blockAlign)) // blockAlign
|
||||
binary.Write(buf, binary.LittleEndian, uint16(bitsPerSample)) // bitsPerSample
|
||||
|
||||
// data chunk
|
||||
buf.WriteString("data")
|
||||
binary.Write(buf, binary.LittleEndian, dataLen)
|
||||
|
||||
// Write PCM data to buffer
|
||||
buf.Write(pcmData)
|
||||
|
||||
// Validate generated WAV data
|
||||
result := buf.Bytes()
|
||||
if len(result) < WAVHeaderSize {
|
||||
return nil, fmt.Errorf("generated WAV data is invalid: %d bytes, minimum required: %d", len(result), WAVHeaderSize)
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// convertMessages converts fabric chat messages to genai Content format
|
||||
func (o *Client) convertMessages(msgs []*chat.ChatCompletionMessage) []*genai.Content {
|
||||
var contents []*genai.Content
|
||||
|
||||
for _, msg := range msgs {
|
||||
content := &genai.Content{Parts: []*genai.Part{}}
|
||||
|
||||
switch msg.Role {
|
||||
case chat.ChatMessageRoleAssistant:
|
||||
content.Role = "model"
|
||||
case chat.ChatMessageRoleUser:
|
||||
content.Role = "user"
|
||||
case chat.ChatMessageRoleSystem, chat.ChatMessageRoleDeveloper, chat.ChatMessageRoleFunction, chat.ChatMessageRoleTool:
|
||||
// Gemini's API only accepts "user" and "model" roles.
|
||||
// Map all other roles to "user" to preserve instruction context.
|
||||
content.Role = "user"
|
||||
default:
|
||||
content.Role = "user"
|
||||
}
|
||||
|
||||
if msg.Content != "" {
|
||||
content.Parts = append(content.Parts, &genai.Part{Text: msg.Content})
|
||||
}
|
||||
|
||||
// Handle multi-content messages (images, etc.)
|
||||
for _, part := range msg.MultiContent {
|
||||
switch part.Type {
|
||||
case chat.ChatMessagePartTypeText:
|
||||
content.Parts = append(content.Parts, &genai.Part{Text: part.Text})
|
||||
case chat.ChatMessagePartTypeImageURL:
|
||||
// TODO: Handle image URLs if needed
|
||||
// This would require downloading and converting to inline data
|
||||
}
|
||||
}
|
||||
|
||||
contents = append(contents, content)
|
||||
}
|
||||
|
||||
return contents
|
||||
}
|
||||
|
||||
// extractTextFromResponse extracts text content from the response and appends
|
||||
// any web citations in a standardized format.
|
||||
func (o *Client) extractTextFromResponse(response *genai.GenerateContentResponse) string {
|
||||
if response == nil {
|
||||
return ""
|
||||
}
|
||||
|
||||
text := o.extractTextParts(response)
|
||||
citations := o.extractCitations(response)
|
||||
if len(citations) > 0 {
|
||||
return text + citationHeader + strings.Join(citations, citationSeparator)
|
||||
}
|
||||
return text
|
||||
}
|
||||
|
||||
func (o *Client) extractTextParts(response *genai.GenerateContentResponse) string {
|
||||
var builder strings.Builder
|
||||
for _, candidate := range response.Candidates {
|
||||
if candidate == nil || candidate.Content == nil {
|
||||
continue
|
||||
}
|
||||
for _, part := range candidate.Content.Parts {
|
||||
if part != nil && part.Text != "" {
|
||||
builder.WriteString(part.Text)
|
||||
}
|
||||
}
|
||||
}
|
||||
return builder.String()
|
||||
}
|
||||
|
||||
func (o *Client) extractCitations(response *genai.GenerateContentResponse) []string {
|
||||
if response == nil || len(response.Candidates) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
citationMap := make(map[string]bool)
|
||||
var citations []string
|
||||
for _, candidate := range response.Candidates {
|
||||
if candidate == nil || candidate.GroundingMetadata == nil {
|
||||
continue
|
||||
}
|
||||
chunks := candidate.GroundingMetadata.GroundingChunks
|
||||
if len(chunks) == 0 {
|
||||
continue
|
||||
}
|
||||
for _, chunk := range chunks {
|
||||
if chunk == nil || chunk.Web == nil {
|
||||
continue
|
||||
}
|
||||
uri := chunk.Web.URI
|
||||
title := chunk.Web.Title
|
||||
if uri == "" || title == "" {
|
||||
continue
|
||||
}
|
||||
var keyBuilder strings.Builder
|
||||
keyBuilder.WriteString(uri)
|
||||
keyBuilder.WriteByte('|')
|
||||
keyBuilder.WriteString(title)
|
||||
key := keyBuilder.String()
|
||||
if !citationMap[key] {
|
||||
citationMap[key] = true
|
||||
citationText := fmt.Sprintf(citationFormat, title, uri)
|
||||
citations = append(citations, citationText)
|
||||
}
|
||||
}
|
||||
}
|
||||
return citations
|
||||
}
|
||||
|
||||
@@ -1,34 +1,46 @@
|
||||
package gemini
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/google/generative-ai-go/genai"
|
||||
"google.golang.org/genai"
|
||||
|
||||
"github.com/danielmiessler/fabric/internal/chat"
|
||||
"github.com/danielmiessler/fabric/internal/domain"
|
||||
)
|
||||
|
||||
// Test generated using Keploy
|
||||
func TestBuildModelNameSimple(t *testing.T) {
|
||||
// Test buildModelNameFull method
|
||||
func TestBuildModelNameFull(t *testing.T) {
|
||||
client := &Client{}
|
||||
fullModelName := "models/chat-bison-001"
|
||||
expected := "chat-bison-001"
|
||||
|
||||
result := client.buildModelNameSimple(fullModelName)
|
||||
tests := []struct {
|
||||
input string
|
||||
expected string
|
||||
}{
|
||||
{"chat-bison-001", "models/chat-bison-001"},
|
||||
{"models/chat-bison-001", "models/chat-bison-001"},
|
||||
{"gemini-2.5-flash-preview-tts", "models/gemini-2.5-flash-preview-tts"},
|
||||
}
|
||||
|
||||
if result != expected {
|
||||
t.Errorf("Expected %v, got %v", expected, result)
|
||||
for _, test := range tests {
|
||||
result := client.buildModelNameFull(test.input)
|
||||
if result != test.expected {
|
||||
t.Errorf("For input %v, expected %v, got %v", test.input, test.expected, result)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Test generated using Keploy
|
||||
func TestExtractText(t *testing.T) {
|
||||
// Test extractTextFromResponse method
|
||||
func TestExtractTextFromResponse(t *testing.T) {
|
||||
client := &Client{}
|
||||
response := &genai.GenerateContentResponse{
|
||||
Candidates: []*genai.Candidate{
|
||||
{
|
||||
Content: &genai.Content{
|
||||
Parts: []genai.Part{
|
||||
genai.Text("Hello, "),
|
||||
genai.Text("world!"),
|
||||
Parts: []*genai.Part{
|
||||
{Text: "Hello, "},
|
||||
{Text: "world!"},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -36,9 +48,180 @@ func TestExtractText(t *testing.T) {
|
||||
}
|
||||
expected := "Hello, world!"
|
||||
|
||||
result := client.extractText(response)
|
||||
result := client.extractTextFromResponse(response)
|
||||
|
||||
if result != expected {
|
||||
t.Errorf("Expected %v, got %v", expected, result)
|
||||
}
|
||||
}
|
||||
|
||||
func TestExtractTextFromResponse_Nil(t *testing.T) {
|
||||
client := &Client{}
|
||||
if got := client.extractTextFromResponse(nil); got != "" {
|
||||
t.Fatalf("expected empty string, got %q", got)
|
||||
}
|
||||
}
|
||||
|
||||
func TestExtractTextFromResponse_EmptyGroundingChunks(t *testing.T) {
|
||||
client := &Client{}
|
||||
response := &genai.GenerateContentResponse{
|
||||
Candidates: []*genai.Candidate{
|
||||
{
|
||||
Content: &genai.Content{Parts: []*genai.Part{{Text: "Hello"}}},
|
||||
GroundingMetadata: &genai.GroundingMetadata{GroundingChunks: nil},
|
||||
},
|
||||
},
|
||||
}
|
||||
if got := client.extractTextFromResponse(response); got != "Hello" {
|
||||
t.Fatalf("expected 'Hello', got %q", got)
|
||||
}
|
||||
}
|
||||
|
||||
func TestBuildGenerateContentConfig_WithSearch(t *testing.T) {
|
||||
client := &Client{}
|
||||
opts := &domain.ChatOptions{Search: true}
|
||||
|
||||
cfg, err := client.buildGenerateContentConfig(opts)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
if cfg.Tools == nil || len(cfg.Tools) != 1 || cfg.Tools[0].GoogleSearch == nil {
|
||||
t.Errorf("expected google search tool to be included")
|
||||
}
|
||||
}
|
||||
|
||||
func TestBuildGenerateContentConfig_WithSearchAndLocation(t *testing.T) {
|
||||
client := &Client{}
|
||||
opts := &domain.ChatOptions{Search: true, SearchLocation: "America/Los_Angeles"}
|
||||
|
||||
cfg, err := client.buildGenerateContentConfig(opts)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
if cfg.ToolConfig == nil || cfg.ToolConfig.RetrievalConfig == nil {
|
||||
t.Fatalf("expected retrieval config when search location provided")
|
||||
}
|
||||
if cfg.ToolConfig.RetrievalConfig.LanguageCode != opts.SearchLocation {
|
||||
t.Errorf("expected language code %s, got %s", opts.SearchLocation, cfg.ToolConfig.RetrievalConfig.LanguageCode)
|
||||
}
|
||||
}
|
||||
|
||||
func TestBuildGenerateContentConfig_InvalidLocation(t *testing.T) {
|
||||
client := &Client{}
|
||||
opts := &domain.ChatOptions{Search: true, SearchLocation: "invalid"}
|
||||
|
||||
_, err := client.buildGenerateContentConfig(opts)
|
||||
if err == nil {
|
||||
t.Fatalf("expected error for invalid location")
|
||||
}
|
||||
}
|
||||
|
||||
func TestBuildGenerateContentConfig_LanguageCodeNormalization(t *testing.T) {
|
||||
client := &Client{}
|
||||
opts := &domain.ChatOptions{Search: true, SearchLocation: "en_US"}
|
||||
|
||||
cfg, err := client.buildGenerateContentConfig(opts)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
if cfg.ToolConfig == nil || cfg.ToolConfig.RetrievalConfig.LanguageCode != "en-US" {
|
||||
t.Fatalf("expected normalized language code 'en-US', got %+v", cfg.ToolConfig)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCitationFormatting(t *testing.T) {
|
||||
client := &Client{}
|
||||
response := &genai.GenerateContentResponse{
|
||||
Candidates: []*genai.Candidate{
|
||||
{
|
||||
Content: &genai.Content{Parts: []*genai.Part{{Text: "Based on recent research, AI is advancing rapidly."}}},
|
||||
GroundingMetadata: &genai.GroundingMetadata{
|
||||
GroundingChunks: []*genai.GroundingChunk{
|
||||
{Web: &genai.GroundingChunkWeb{URI: "https://example.com/ai", Title: "AI Research"}},
|
||||
{Web: &genai.GroundingChunkWeb{URI: "https://news.com/tech", Title: "Tech News"}},
|
||||
{Web: &genai.GroundingChunkWeb{URI: "https://example.com/ai", Title: "AI Research"}}, // duplicate
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
result := client.extractTextFromResponse(response)
|
||||
if !strings.Contains(result, "## Sources") {
|
||||
t.Fatalf("expected sources section in result: %s", result)
|
||||
}
|
||||
if strings.Count(result, "- [") != 2 {
|
||||
t.Errorf("expected 2 unique citations, got %d", strings.Count(result, "- ["))
|
||||
}
|
||||
}
|
||||
|
||||
// Test convertMessages handles role mapping correctly
|
||||
func TestConvertMessagesRoles(t *testing.T) {
|
||||
client := &Client{}
|
||||
msgs := []*chat.ChatCompletionMessage{
|
||||
{Role: chat.ChatMessageRoleUser, Content: "user"},
|
||||
{Role: chat.ChatMessageRoleAssistant, Content: "assistant"},
|
||||
{Role: chat.ChatMessageRoleSystem, Content: "system"},
|
||||
}
|
||||
|
||||
contents := client.convertMessages(msgs)
|
||||
|
||||
expected := []string{"user", "model", "user"}
|
||||
|
||||
if len(contents) != len(expected) {
|
||||
t.Fatalf("expected %d contents, got %d", len(expected), len(contents))
|
||||
}
|
||||
|
||||
for i, c := range contents {
|
||||
if c.Role != expected[i] {
|
||||
t.Errorf("content %d expected role %s, got %s", i, expected[i], c.Role)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Test isTTSModel method
|
||||
func TestIsTTSModel(t *testing.T) {
|
||||
client := &Client{}
|
||||
|
||||
tests := []struct {
|
||||
modelName string
|
||||
expected bool
|
||||
}{
|
||||
{"gemini-2.5-flash-preview-tts", true},
|
||||
{"text-to-speech-model", true},
|
||||
{"TTS-MODEL", true},
|
||||
{"gemini-pro", false},
|
||||
{"chat-bison", false},
|
||||
{"", false},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
result := client.isTTSModel(test.modelName)
|
||||
if result != test.expected {
|
||||
t.Errorf("For model %v, expected %v, got %v", test.modelName, test.expected, result)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Test generateWAVFile method (basic test)
|
||||
func TestGenerateWAVFile(t *testing.T) {
|
||||
client := &Client{}
|
||||
|
||||
// Test with minimal PCM data
|
||||
pcmData := []byte{0x00, 0x01, 0x02, 0x03}
|
||||
|
||||
result, err := client.generateWAVFile(pcmData)
|
||||
if err != nil {
|
||||
t.Errorf("generateWAVFile failed: %v", err)
|
||||
}
|
||||
|
||||
// Check that we got some data back
|
||||
if len(result) == 0 {
|
||||
t.Error("generateWAVFile returned empty data")
|
||||
}
|
||||
|
||||
// Check that it starts with RIFF header
|
||||
if len(result) >= 4 && string(result[0:4]) != "RIFF" {
|
||||
t.Error("Generated WAV data doesn't start with RIFF header")
|
||||
}
|
||||
}
|
||||
|
||||
218
internal/plugins/ai/gemini/voices.go
Normal file
218
internal/plugins/ai/gemini/voices.go
Normal file
@@ -0,0 +1,218 @@
|
||||
package gemini
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"sort"
|
||||
)
|
||||
|
||||
// GeminiVoice represents a Gemini TTS voice with its characteristics
|
||||
type GeminiVoice struct {
|
||||
Name string
|
||||
Description string
|
||||
Characteristics []string
|
||||
}
|
||||
|
||||
// GetGeminiVoices returns the current list of supported Gemini TTS voices
|
||||
// This list is maintained based on official Google Gemini documentation
|
||||
// https://ai.google.dev/gemini-api/docs/speech-generation
|
||||
func GetGeminiVoices() []GeminiVoice {
|
||||
return []GeminiVoice{
|
||||
// Firm voices
|
||||
{Name: "Kore", Description: "Firm and confident", Characteristics: []string{"firm", "confident", "default"}},
|
||||
{Name: "Orus", Description: "Firm and decisive", Characteristics: []string{"firm", "decisive"}},
|
||||
{Name: "Alnilam", Description: "Firm and strong", Characteristics: []string{"firm", "strong"}},
|
||||
|
||||
// Upbeat voices
|
||||
{Name: "Puck", Description: "Upbeat and energetic", Characteristics: []string{"upbeat", "energetic"}},
|
||||
{Name: "Laomedeia", Description: "Upbeat and lively", Characteristics: []string{"upbeat", "lively"}},
|
||||
|
||||
// Bright voices
|
||||
{Name: "Zephyr", Description: "Bright and cheerful", Characteristics: []string{"bright", "cheerful"}},
|
||||
{Name: "Autonoe", Description: "Bright and optimistic", Characteristics: []string{"bright", "optimistic"}},
|
||||
|
||||
// Informative voices
|
||||
{Name: "Charon", Description: "Informative and clear", Characteristics: []string{"informative", "clear"}},
|
||||
{Name: "Rasalgethi", Description: "Informative and professional", Characteristics: []string{"informative", "professional"}},
|
||||
|
||||
// Natural voices
|
||||
{Name: "Aoede", Description: "Breezy and natural", Characteristics: []string{"breezy", "natural"}},
|
||||
{Name: "Leda", Description: "Youthful and energetic", Characteristics: []string{"youthful", "energetic"}},
|
||||
|
||||
// Gentle voices
|
||||
{Name: "Vindemiatrix", Description: "Gentle and kind", Characteristics: []string{"gentle", "kind"}},
|
||||
{Name: "Achernar", Description: "Soft and gentle", Characteristics: []string{"soft", "gentle"}},
|
||||
{Name: "Enceladus", Description: "Breathy and soft", Characteristics: []string{"breathy", "soft"}},
|
||||
|
||||
// Warm voices
|
||||
{Name: "Sulafat", Description: "Warm and welcoming", Characteristics: []string{"warm", "welcoming"}},
|
||||
{Name: "Capella", Description: "Warm and approachable", Characteristics: []string{"warm", "approachable"}},
|
||||
|
||||
// Clear voices
|
||||
{Name: "Iapetus", Description: "Clear and articulate", Characteristics: []string{"clear", "articulate"}},
|
||||
{Name: "Erinome", Description: "Clear and precise", Characteristics: []string{"clear", "precise"}},
|
||||
|
||||
// Pleasant voices
|
||||
{Name: "Algieba", Description: "Smooth and pleasant", Characteristics: []string{"smooth", "pleasant"}},
|
||||
{Name: "Vega", Description: "Smooth and flowing", Characteristics: []string{"smooth", "flowing"}},
|
||||
|
||||
// Textured voices
|
||||
{Name: "Algenib", Description: "Gravelly texture", Characteristics: []string{"gravelly", "textured"}},
|
||||
|
||||
// Relaxed voices
|
||||
{Name: "Callirrhoe", Description: "Easy-going and relaxed", Characteristics: []string{"relaxed", "easy-going"}},
|
||||
{Name: "Despina", Description: "Calm and serene", Characteristics: []string{"calm", "serene"}},
|
||||
|
||||
// Mature voices
|
||||
{Name: "Gacrux", Description: "Mature and experienced", Characteristics: []string{"mature", "experienced"}},
|
||||
|
||||
// Expressive voices
|
||||
{Name: "Pulcherrima", Description: "Forward and expressive", Characteristics: []string{"forward", "expressive"}},
|
||||
{Name: "Lyra", Description: "Melodic and expressive", Characteristics: []string{"melodic", "expressive"}},
|
||||
|
||||
// Dynamic voices
|
||||
{Name: "Fenrir", Description: "Excitable and dynamic", Characteristics: []string{"excitable", "dynamic"}},
|
||||
{Name: "Sadachbia", Description: "Lively and animated", Characteristics: []string{"lively", "animated"}},
|
||||
|
||||
// Friendly voices
|
||||
{Name: "Achird", Description: "Friendly and approachable", Characteristics: []string{"friendly", "approachable"}},
|
||||
|
||||
// Casual voices
|
||||
{Name: "Zubenelgenubi", Description: "Casual and conversational", Characteristics: []string{"casual", "conversational"}},
|
||||
|
||||
// Additional voices from latest API
|
||||
{Name: "Sadaltager", Description: "Experimental voice with a calm and neutral tone", Characteristics: []string{"experimental", "calm", "neutral"}},
|
||||
{Name: "Schedar", Description: "Experimental voice with a warm and engaging tone", Characteristics: []string{"experimental", "warm", "engaging"}},
|
||||
{Name: "Umbriel", Description: "Experimental voice with a deep and resonant tone", Characteristics: []string{"experimental", "deep", "resonant"}},
|
||||
}
|
||||
}
|
||||
|
||||
// GetGeminiVoiceNames returns just the voice names in alphabetical order
|
||||
func GetGeminiVoiceNames() []string {
|
||||
voices := GetGeminiVoices()
|
||||
names := make([]string, len(voices))
|
||||
for i, voice := range voices {
|
||||
names[i] = voice.Name
|
||||
}
|
||||
sort.Strings(names)
|
||||
return names
|
||||
}
|
||||
|
||||
// IsValidGeminiVoice checks if a voice name is valid
|
||||
func IsValidGeminiVoice(voiceName string) bool {
|
||||
if voiceName == "" {
|
||||
return true // Empty voice is valid (will use default)
|
||||
}
|
||||
|
||||
for _, voice := range GetGeminiVoices() {
|
||||
if voice.Name == voiceName {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// GetGeminiVoiceByName returns a specific voice by name
|
||||
func GetGeminiVoiceByName(name string) (*GeminiVoice, error) {
|
||||
for _, voice := range GetGeminiVoices() {
|
||||
if voice.Name == name {
|
||||
return &voice, nil
|
||||
}
|
||||
}
|
||||
return nil, fmt.Errorf("voice '%s' not found", name)
|
||||
}
|
||||
|
||||
// ListGeminiVoices formats the voice list for display
|
||||
func ListGeminiVoices(shellCompleteMode bool) string {
|
||||
if shellCompleteMode {
|
||||
// For shell completion, just return voice names
|
||||
names := GetGeminiVoiceNames()
|
||||
result := ""
|
||||
for _, name := range names {
|
||||
result += name + "\n"
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
// For human-readable output
|
||||
voices := GetGeminiVoices()
|
||||
result := "Available Gemini Text-to-Speech voices:\n\n"
|
||||
|
||||
// Group by characteristics for better readability
|
||||
groups := map[string][]GeminiVoice{
|
||||
"Firm & Confident": {},
|
||||
"Bright & Cheerful": {},
|
||||
"Warm & Welcoming": {},
|
||||
"Clear & Professional": {},
|
||||
"Natural & Expressive": {},
|
||||
"Other Voices": {},
|
||||
}
|
||||
|
||||
for _, voice := range voices {
|
||||
placed := false
|
||||
for _, char := range voice.Characteristics {
|
||||
switch char {
|
||||
case "firm", "confident", "decisive", "strong":
|
||||
if !placed {
|
||||
groups["Firm & Confident"] = append(groups["Firm & Confident"], voice)
|
||||
placed = true
|
||||
}
|
||||
case "bright", "cheerful", "upbeat", "energetic", "lively":
|
||||
if !placed {
|
||||
groups["Bright & Cheerful"] = append(groups["Bright & Cheerful"], voice)
|
||||
placed = true
|
||||
}
|
||||
case "warm", "welcoming", "friendly", "approachable":
|
||||
if !placed {
|
||||
groups["Warm & Welcoming"] = append(groups["Warm & Welcoming"], voice)
|
||||
placed = true
|
||||
}
|
||||
case "clear", "informative", "professional", "articulate":
|
||||
if !placed {
|
||||
groups["Clear & Professional"] = append(groups["Clear & Professional"], voice)
|
||||
placed = true
|
||||
}
|
||||
case "natural", "expressive", "melodic", "breezy":
|
||||
if !placed {
|
||||
groups["Natural & Expressive"] = append(groups["Natural & Expressive"], voice)
|
||||
placed = true
|
||||
}
|
||||
}
|
||||
}
|
||||
if !placed {
|
||||
groups["Other Voices"] = append(groups["Other Voices"], voice)
|
||||
}
|
||||
}
|
||||
|
||||
// Output grouped voices
|
||||
for groupName, groupVoices := range groups {
|
||||
if len(groupVoices) > 0 {
|
||||
result += fmt.Sprintf("%s:\n", groupName)
|
||||
for _, voice := range groupVoices {
|
||||
defaultStr := ""
|
||||
if voice.Name == "Kore" {
|
||||
defaultStr = " (default)"
|
||||
}
|
||||
result += fmt.Sprintf(" %-15s - %s%s\n", voice.Name, voice.Description, defaultStr)
|
||||
}
|
||||
result += "\n"
|
||||
}
|
||||
}
|
||||
|
||||
result += "Use --voice <voice_name> to select a specific voice.\n"
|
||||
result += "Example: fabric --voice Charon -m gemini-2.5-flash-preview-tts -o output.wav \"Hello world\"\n"
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
// NOTE: This implementation maintains a curated list based on official Google documentation.
|
||||
// In the future, if Google provides a dynamic voice discovery API, this can be updated
|
||||
// to make API calls for real-time voice discovery.
|
||||
//
|
||||
// The current approach ensures:
|
||||
// 1. Fast response times (no API calls needed)
|
||||
// 2. Reliable voice information with descriptions
|
||||
// 3. Easy maintenance when new voices are added
|
||||
// 4. Offline functionality
|
||||
//
|
||||
// To update voices: Monitor Google's Gemini TTS documentation at:
|
||||
// https://ai.google.dev/gemini-api/docs/speech-generation
|
||||
@@ -1,6 +1,10 @@
|
||||
package ai
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"github.com/danielmiessler/fabric/internal/util"
|
||||
)
|
||||
|
||||
@@ -11,3 +15,35 @@ func NewVendorsModels() *VendorsModels {
|
||||
type VendorsModels struct {
|
||||
*util.GroupsItemsSelectorString
|
||||
}
|
||||
|
||||
// PrintWithVendor prints models including their vendor on each line.
|
||||
// When shellCompleteList is true, output is suitable for shell completion.
|
||||
func (o *VendorsModels) PrintWithVendor(shellCompleteList bool) {
|
||||
if !shellCompleteList {
|
||||
fmt.Printf("\n%v:\n", o.SelectionLabel)
|
||||
}
|
||||
|
||||
var currentItemIndex int
|
||||
|
||||
sortedGroups := make([]*util.GroupItems[string], len(o.GroupsItems))
|
||||
copy(sortedGroups, o.GroupsItems)
|
||||
sort.SliceStable(sortedGroups, func(i, j int) bool {
|
||||
return strings.ToLower(sortedGroups[i].Group) < strings.ToLower(sortedGroups[j].Group)
|
||||
})
|
||||
|
||||
for _, groupItems := range sortedGroups {
|
||||
items := make([]string, len(groupItems.Items))
|
||||
copy(items, groupItems.Items)
|
||||
sort.SliceStable(items, func(i, j int) bool {
|
||||
return strings.ToLower(items[i]) < strings.ToLower(items[j])
|
||||
})
|
||||
for _, item := range items {
|
||||
currentItemIndex++
|
||||
if shellCompleteList {
|
||||
fmt.Printf("%s|%s\n", groupItems.Group, item)
|
||||
} else {
|
||||
fmt.Printf("\t[%d]\t%s|%s\n", currentItemIndex, groupItems.Group, item)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@ import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"os"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
@@ -61,6 +62,11 @@ func (t *transport_sec) RoundTrip(req *http.Request) (*http.Response, error) {
|
||||
return t.underlyingTransport.RoundTrip(req)
|
||||
}
|
||||
|
||||
// IsConfigured returns true only if OLLAMA_API_URL environment variable is explicitly set
|
||||
func (o *Client) IsConfigured() bool {
|
||||
return os.Getenv("OLLAMA_API_URL") != ""
|
||||
}
|
||||
|
||||
func (o *Client) configure() (err error) {
|
||||
if o.apiUrl, err = url.Parse(o.ApiUrl.Value); err != nil {
|
||||
fmt.Printf("cannot parse URL: %s: %v\n", o.ApiUrl.Value, err)
|
||||
@@ -160,6 +166,7 @@ func (o *Client) NeedsRawMode(modelName string) bool {
|
||||
ollamaPrefixes := []string{
|
||||
"llama3",
|
||||
"llama2",
|
||||
"mistral",
|
||||
}
|
||||
for _, prefix := range ollamaPrefixes {
|
||||
if strings.HasPrefix(modelName, prefix) {
|
||||
|
||||
@@ -66,6 +66,11 @@ type Client struct {
|
||||
ImplementsResponses bool // Whether this provider supports the Responses API
|
||||
}
|
||||
|
||||
// SetResponsesAPIEnabled configures whether to use the Responses API
|
||||
func (o *Client) SetResponsesAPIEnabled(enabled bool) {
|
||||
o.ImplementsResponses = enabled
|
||||
}
|
||||
|
||||
func (o *Client) configure() (ret error) {
|
||||
opts := []option.RequestOption{option.WithAPIKey(o.ApiKey.Value)}
|
||||
if o.ApiBaseURL.Value != "" {
|
||||
@@ -110,7 +115,11 @@ func (o *Client) sendStreamResponses(
|
||||
case string(constant.ResponseOutputTextDelta("").Default()):
|
||||
channel <- event.AsResponseOutputTextDelta().Delta
|
||||
case string(constant.ResponseOutputTextDone("").Default()):
|
||||
channel <- event.AsResponseOutputTextDone().Text
|
||||
// The Responses API sends the full text again in the
|
||||
// final "done" event. Since we've already streamed all
|
||||
// delta chunks above, sending it would duplicate the
|
||||
// output. Ignore it here to prevent doubled results.
|
||||
continue
|
||||
}
|
||||
}
|
||||
if stream.Err() == nil {
|
||||
@@ -159,6 +168,7 @@ func (o *Client) NeedsRawMode(modelName string) bool {
|
||||
"o1",
|
||||
"o3",
|
||||
"o4",
|
||||
"gpt-5",
|
||||
}
|
||||
openAIModelsNeedingRaw := []string{
|
||||
"gpt-4o-mini-search-preview",
|
||||
|
||||
105
internal/plugins/ai/openai_compatible/direct_models_call.go
Normal file
105
internal/plugins/ai/openai_compatible/direct_models_call.go
Normal file
@@ -0,0 +1,105 @@
|
||||
package openai_compatible
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"time"
|
||||
)
|
||||
|
||||
// Model represents a model returned by the API
|
||||
type Model struct {
|
||||
ID string `json:"id"`
|
||||
}
|
||||
|
||||
// ErrorResponseLimit defines the maximum length of error response bodies for truncation.
|
||||
const errorResponseLimit = 1024 // Limit for error response body size
|
||||
|
||||
// DirectlyGetModels is used to fetch models directly from the API
|
||||
// when the standard OpenAI SDK method fails due to a nonstandard format.
|
||||
// This is useful for providers like Together that return a direct array of models.
|
||||
func (c *Client) DirectlyGetModels(ctx context.Context) ([]string, error) {
|
||||
if ctx == nil {
|
||||
ctx = context.Background()
|
||||
}
|
||||
baseURL := c.ApiBaseURL.Value
|
||||
if baseURL == "" {
|
||||
return nil, fmt.Errorf("API base URL not configured for provider %s", c.GetName())
|
||||
}
|
||||
|
||||
// Build the /models endpoint URL
|
||||
fullURL, err := url.JoinPath(baseURL, "models")
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to create models URL: %w", err)
|
||||
}
|
||||
|
||||
req, err := http.NewRequestWithContext(ctx, "GET", fullURL, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", c.ApiKey.Value))
|
||||
req.Header.Set("Accept", "application/json")
|
||||
|
||||
// TODO: Consider reusing a single http.Client instance (e.g., as a field on Client) instead of allocating a new one for each request.
|
||||
|
||||
client := &http.Client{
|
||||
Timeout: 10 * time.Second,
|
||||
}
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
// Read the response body for debugging
|
||||
bodyBytes, _ := io.ReadAll(resp.Body)
|
||||
bodyString := string(bodyBytes)
|
||||
if len(bodyString) > errorResponseLimit { // Truncate if too large
|
||||
bodyString = bodyString[:errorResponseLimit] + "..."
|
||||
}
|
||||
return nil, fmt.Errorf("unexpected status code: %d from provider %s, response body: %s",
|
||||
resp.StatusCode, c.GetName(), bodyString)
|
||||
}
|
||||
|
||||
// Read the response body once
|
||||
bodyBytes, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Try to parse as an object with data field (OpenAI format)
|
||||
var openAIFormat struct {
|
||||
Data []Model `json:"data"`
|
||||
}
|
||||
// Try to parse as a direct array (Together format)
|
||||
var directArray []Model
|
||||
|
||||
if err := json.Unmarshal(bodyBytes, &openAIFormat); err == nil && len(openAIFormat.Data) > 0 {
|
||||
return extractModelIDs(openAIFormat.Data), nil
|
||||
}
|
||||
|
||||
if err := json.Unmarshal(bodyBytes, &directArray); err == nil && len(directArray) > 0 {
|
||||
return extractModelIDs(directArray), nil
|
||||
}
|
||||
|
||||
var truncatedBody string
|
||||
if len(bodyBytes) > errorResponseLimit {
|
||||
truncatedBody = string(bodyBytes[:errorResponseLimit]) + "..."
|
||||
} else {
|
||||
truncatedBody = string(bodyBytes)
|
||||
}
|
||||
return nil, fmt.Errorf("unable to parse models response; raw response: %s", truncatedBody)
|
||||
}
|
||||
|
||||
func extractModelIDs(models []Model) []string {
|
||||
modelIDs := make([]string, 0, len(models))
|
||||
for _, model := range models {
|
||||
modelIDs = append(modelIDs, model.ID)
|
||||
}
|
||||
return modelIDs
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
package openai_compatible
|
||||
|
||||
import (
|
||||
"context"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
@@ -31,6 +32,19 @@ func NewClient(providerConfig ProviderConfig) *Client {
|
||||
return client
|
||||
}
|
||||
|
||||
// ListModels overrides the default ListModels to handle different response formats
|
||||
func (c *Client) ListModels() ([]string, error) {
|
||||
// First try the standard OpenAI SDK approach
|
||||
models, err := c.Client.ListModels()
|
||||
if err == nil && len(models) > 0 { // only return if OpenAI SDK returns models
|
||||
return models, nil
|
||||
}
|
||||
|
||||
// TODO: Handle context properly in Fabric by accepting and propagating a context.Context
|
||||
// instead of creating a new one here.
|
||||
return c.DirectlyGetModels(context.Background())
|
||||
}
|
||||
|
||||
// ProviderMap is a map of provider name to ProviderConfig for O(1) lookup
|
||||
var ProviderMap = map[string]ProviderConfig{
|
||||
"AIML": {
|
||||
@@ -83,6 +97,11 @@ var ProviderMap = map[string]ProviderConfig{
|
||||
BaseURL: "https://api.siliconflow.cn/v1",
|
||||
ImplementsResponses: false,
|
||||
},
|
||||
"Together": {
|
||||
Name: "Together",
|
||||
BaseURL: "https://api.together.xyz/v1",
|
||||
ImplementsResponses: false,
|
||||
},
|
||||
}
|
||||
|
||||
// GetProviderByName returns the provider configuration for a given name with O(1) lookup
|
||||
|
||||
@@ -86,9 +86,10 @@ func (o *Session) String() (ret string) {
|
||||
ret += fmt.Sprintf("\n--- \n[%v]\n%v", message.Role, message.Content)
|
||||
if message.MultiContent != nil {
|
||||
for _, part := range message.MultiContent {
|
||||
if part.Type == chat.ChatMessagePartTypeImageURL {
|
||||
switch part.Type {
|
||||
case chat.ChatMessagePartTypeImageURL:
|
||||
ret += fmt.Sprintf("\n%v: %v", part.Type, *part.ImageURL)
|
||||
} else if part.Type == chat.ChatMessagePartTypeText {
|
||||
case chat.ChatMessagePartTypeText:
|
||||
ret += fmt.Sprintf("\n%v: %v", part.Type, part.Text)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -105,7 +105,7 @@ func (h *ChatHandler) HandleChat(c *gin.Context) {
|
||||
}
|
||||
}
|
||||
|
||||
chatter, err := h.registry.GetChatter(p.Model, 2048, "", false, false)
|
||||
chatter, err := h.registry.GetChatter(p.Model, 2048, p.Vendor, "", false, false)
|
||||
if err != nil {
|
||||
log.Printf("Error creating chatter: %v", err)
|
||||
streamChan <- fmt.Sprintf("Error: %v", err)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user