mirror of
https://github.com/danielmiessler/Fabric.git
synced 2026-01-09 22:38:10 -05:00
Compare commits
233 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3584f83b30 | ||
|
|
056791233a | ||
|
|
dc435dcc6e | ||
|
|
6edbc9dd38 | ||
|
|
fd60d66c0d | ||
|
|
08ec89bbe1 | ||
|
|
836557f41c | ||
|
|
f7c5c6d344 | ||
|
|
9d18ad523e | ||
|
|
efcd7dcac2 | ||
|
|
768e87879e | ||
|
|
3c51cad614 | ||
|
|
bc642904e0 | ||
|
|
fa135036f4 | ||
|
|
2d414ec394 | ||
|
|
9e72df9c6c | ||
|
|
1a933e1c9a | ||
|
|
d5431f9843 | ||
|
|
e2dabc406d | ||
|
|
31f7f22629 | ||
|
|
29aaf430ca | ||
|
|
9ef3518a07 | ||
|
|
0b40bad986 | ||
|
|
34ff4d30f2 | ||
|
|
1d9596bf3d | ||
|
|
72d099d40a | ||
|
|
7ab6fe3baa | ||
|
|
198964df82 | ||
|
|
f0998d3686 | ||
|
|
75875ba9f5 | ||
|
|
ea009ff64b | ||
|
|
3c317f088b | ||
|
|
f91ee2ce3c | ||
|
|
98968d972f | ||
|
|
8ea264e96c | ||
|
|
5203cba5a7 | ||
|
|
f5fba12360 | ||
|
|
d7cc3ff8f1 | ||
|
|
4887cdc353 | ||
|
|
6aa38d2abc | ||
|
|
737e37f00e | ||
|
|
42bb72ab65 | ||
|
|
612ae4e3b5 | ||
|
|
27f9134912 | ||
|
|
c02718855d | ||
|
|
4f16222b31 | ||
|
|
8c27b34d0f | ||
|
|
0b71b54698 | ||
|
|
614b1322d5 | ||
|
|
eab335873e | ||
|
|
577dc9896d | ||
|
|
3a4bb4b9b2 | ||
|
|
c766915764 | ||
|
|
71c08648c6 | ||
|
|
95e2e6a5ac | ||
|
|
5cdf297d85 | ||
|
|
5d7137804a | ||
|
|
8b6b8fbd44 | ||
|
|
3e75aa260f | ||
|
|
92aca524a4 | ||
|
|
f70eff2e41 | ||
|
|
489c481acc | ||
|
|
3a1eaf375f | ||
|
|
52246dda28 | ||
|
|
3c200e2883 | ||
|
|
bda6505d5c | ||
|
|
a241c98837 | ||
|
|
12d7803044 | ||
|
|
d37a1acc9b | ||
|
|
7254571501 | ||
|
|
c300262804 | ||
|
|
e8ba57be90 | ||
|
|
15fad3da87 | ||
|
|
e2b0d3c368 | ||
|
|
3de85eb50e | ||
|
|
58e635c873 | ||
|
|
dde21d2337 | ||
|
|
e3fcbcb12b | ||
|
|
839296e3ba | ||
|
|
5b97b0e56a | ||
|
|
38ff2288da | ||
|
|
771a1ac2e6 | ||
|
|
f6fd6f535a | ||
|
|
f548ca5f82 | ||
|
|
616f51748e | ||
|
|
db5aaf9da6 | ||
|
|
a922032756 | ||
|
|
a415409a48 | ||
|
|
19d95b9014 | ||
|
|
73c7a8c147 | ||
|
|
4dc84bd64d | ||
|
|
dd96014f9b | ||
|
|
3cf2557af3 | ||
|
|
fcda0338cb | ||
|
|
ac19c81ef0 | ||
|
|
a83d57065f | ||
|
|
055ed32ab8 | ||
|
|
8d62165444 | ||
|
|
63bc7a7e79 | ||
|
|
f2b2501767 | ||
|
|
be1e2485ee | ||
|
|
38c4211649 | ||
|
|
71e6355c10 | ||
|
|
64411cdc02 | ||
|
|
9a2ff983a4 | ||
|
|
a522d4a411 | ||
|
|
9bdd77c277 | ||
|
|
cc68dddfe8 | ||
|
|
07ee7f8b21 | ||
|
|
15a355f08a | ||
|
|
c50486b611 | ||
|
|
edaca7a045 | ||
|
|
28432a50f0 | ||
|
|
8ab891fcff | ||
|
|
cab6df88ea | ||
|
|
42afd92f31 | ||
|
|
76d6b1721e | ||
|
|
7d562096d1 | ||
|
|
91c1aca0dd | ||
|
|
b8008a34fb | ||
|
|
482759ae72 | ||
|
|
b0d096d0ea | ||
|
|
e56ecfb7ae | ||
|
|
951bd134eb | ||
|
|
7ff04658f3 | ||
|
|
272f04dd32 | ||
|
|
29cb3796bf | ||
|
|
f51f9e75a9 | ||
|
|
63475784c7 | ||
|
|
1a7bb27370 | ||
|
|
4badaa4c85 | ||
|
|
bf6be964fd | ||
|
|
cdbcb0a512 | ||
|
|
f81cf193a2 | ||
|
|
cba56fcde6 | ||
|
|
72cbd13917 | ||
|
|
dc722f9724 | ||
|
|
1a35f32a48 | ||
|
|
65bd2753c2 | ||
|
|
570c9a9404 | ||
|
|
15151fe9ee | ||
|
|
2aad4caf9b | ||
|
|
289fda8c74 | ||
|
|
fd40778472 | ||
|
|
bc1641a68c | ||
|
|
5cf15d22d3 | ||
|
|
2b2a25daaa | ||
|
|
75a7f25642 | ||
|
|
8bab58f225 | ||
|
|
8ec006e02c | ||
|
|
2508dc6397 | ||
|
|
7670df35ad | ||
|
|
3b9782f942 | ||
|
|
3fca3489fb | ||
|
|
bb2d58eae0 | ||
|
|
87df7dc383 | ||
|
|
1d69afa1c9 | ||
|
|
96c18b4c99 | ||
|
|
dd5173963b | ||
|
|
da1c8ec979 | ||
|
|
ac97f9984f | ||
|
|
181b812eaf | ||
|
|
fe94165d31 | ||
|
|
16e92690aa | ||
|
|
1c33799aa8 | ||
|
|
9559e618c3 | ||
|
|
ac32e8e64a | ||
|
|
82340e6126 | ||
|
|
5dec53726a | ||
|
|
b0eb136cbb | ||
|
|
63f4370ff1 | ||
|
|
b3cc2c737d | ||
|
|
e43b4191e4 | ||
|
|
744c565120 | ||
|
|
1473ac1465 | ||
|
|
c38c16f0db | ||
|
|
a4b1db4193 | ||
|
|
d44bc19a84 | ||
|
|
a2e618e11c | ||
|
|
cb90379b30 | ||
|
|
4868687746 | ||
|
|
85780fee76 | ||
|
|
497b1ed682 | ||
|
|
135433b749 | ||
|
|
f185dedb37 | ||
|
|
c74a157dcf | ||
|
|
91a336e870 | ||
|
|
5212fbcc37 | ||
|
|
6d8eb3d2b9 | ||
|
|
d3bba5d026 | ||
|
|
699762b694 | ||
|
|
f2a6f1bd98 | ||
|
|
3176adf59b | ||
|
|
7e29966622 | ||
|
|
0af0ab683d | ||
|
|
e72e67de71 | ||
|
|
414b6174e7 | ||
|
|
f63e0dfc05 | ||
|
|
4ef8578e47 | ||
|
|
12ee690ae4 | ||
|
|
cc378be485 | ||
|
|
06fc8d8732 | ||
|
|
9e4ed8ecb3 | ||
|
|
c369425708 | ||
|
|
cf074d3411 | ||
|
|
47f75237ff | ||
|
|
fad0a065d4 | ||
|
|
a59a3517d8 | ||
|
|
04c3c0c512 | ||
|
|
cb837bde2d | ||
|
|
2ad454b6dc | ||
|
|
c0ea25f816 | ||
|
|
87796d4fa9 | ||
|
|
e1945a0b62 | ||
|
|
ecac2b4c34 | ||
|
|
7ed4de269e | ||
|
|
6bd305906d | ||
|
|
6aeca6e4da | ||
|
|
b34f249e24 | ||
|
|
b187a80275 | ||
|
|
a6fc54a991 | ||
|
|
b9f4b9837a | ||
|
|
2bedf35957 | ||
|
|
b9df64a0d8 | ||
|
|
6b07b33ff2 | ||
|
|
ff245edd51 | ||
|
|
2e0a4da876 | ||
|
|
1f3befbbbc | ||
|
|
8988206fbe | ||
|
|
1bd5f9d7e4 | ||
|
|
7d6505fe98 | ||
|
|
23c1437794 | ||
|
|
dd5e57477f |
100
.github/workflows/release.yml
vendored
100
.github/workflows/release.yml
vendored
@@ -27,8 +27,39 @@ jobs:
|
||||
- name: Run tests
|
||||
run: go test -v ./...
|
||||
|
||||
get_version:
|
||||
name: Get version
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
latest_tag: ${{ steps.get_version.outputs.latest_tag }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Get version from source
|
||||
id: get_version
|
||||
shell: bash
|
||||
run: |
|
||||
if [ ! -f "nix/pkgs/fabric/version.nix" ]; then
|
||||
echo "Error: version.nix file not found"
|
||||
exit 1
|
||||
fi
|
||||
version=$(cat nix/pkgs/fabric/version.nix | tr -d '"' | tr -cd '0-9.')
|
||||
if [ -z "$version" ]; then
|
||||
echo "Error: version is empty"
|
||||
exit 1
|
||||
fi
|
||||
if ! echo "$version" | grep -E '^[0-9]+\.[0-9]+\.[0-9]+' > /dev/null; then
|
||||
echo "Error: Invalid version format: $version"
|
||||
exit 1
|
||||
fi
|
||||
echo "latest_tag=v$version" >> $GITHUB_OUTPUT
|
||||
|
||||
build:
|
||||
name: Build binaries for Windows, macOS, and Linux
|
||||
needs: [test, get_version]
|
||||
runs-on: ${{ matrix.os }}
|
||||
permissions:
|
||||
contents: write
|
||||
@@ -51,25 +82,14 @@ jobs:
|
||||
with:
|
||||
go-version-file: ./go.mod
|
||||
|
||||
- name: Determine OS Name
|
||||
id: os-name
|
||||
run: |
|
||||
if [ "${{ matrix.os }}" == "ubuntu-latest" ]; then
|
||||
echo "OS=linux" >> $GITHUB_ENV
|
||||
elif [ "${{ matrix.os }}" == "macos-latest" ]; then
|
||||
echo "OS=darwin" >> $GITHUB_ENV
|
||||
else
|
||||
echo "OS=windows" >> $GITHUB_ENV
|
||||
fi
|
||||
shell: bash
|
||||
|
||||
- name: Build binary on Linux and macOS
|
||||
if: matrix.os != 'windows-latest'
|
||||
env:
|
||||
GOOS: ${{ env.OS }}
|
||||
GOOS: ${{ matrix.os == 'ubuntu-latest' && 'linux' || 'darwin' }}
|
||||
GOARCH: ${{ matrix.arch }}
|
||||
run: |
|
||||
go build -o fabric-${OS}-${{ matrix.arch }} ./cmd/fabric
|
||||
OS_NAME="${{ matrix.os == 'ubuntu-latest' && 'linux' || 'darwin' }}"
|
||||
go build -o fabric-${OS_NAME}-${{ matrix.arch }} ./cmd/fabric
|
||||
|
||||
- name: Build binary on Windows
|
||||
if: matrix.os == 'windows-latest'
|
||||
@@ -83,8 +103,8 @@ jobs:
|
||||
if: matrix.os != 'windows-latest'
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: fabric-${OS}-${{ matrix.arch }}
|
||||
path: fabric-${OS}-${{ matrix.arch }}
|
||||
name: fabric-${{ matrix.os == 'ubuntu-latest' && 'linux' || 'darwin' }}-${{ matrix.arch }}
|
||||
path: fabric-${{ matrix.os == 'ubuntu-latest' && 'linux' || 'darwin' }}-${{ matrix.arch }}
|
||||
|
||||
- name: Upload build artifact
|
||||
if: matrix.os == 'windows-latest'
|
||||
@@ -93,29 +113,15 @@ jobs:
|
||||
name: fabric-windows-${{ matrix.arch }}.exe
|
||||
path: fabric-windows-${{ matrix.arch }}.exe
|
||||
|
||||
- name: Get latest tag
|
||||
if: matrix.os != 'windows-latest'
|
||||
id: get_latest_tag
|
||||
run: |
|
||||
latest_tag=$(git tag --sort=-creatordate | head -n 1)
|
||||
echo "latest_tag=$latest_tag" >> $GITHUB_ENV
|
||||
|
||||
- name: Get latest tag
|
||||
if: matrix.os == 'windows-latest'
|
||||
id: get_latest_tag_windows
|
||||
run: |
|
||||
$latest_tag = git tag --sort=-creatordate | Select-Object -First 1
|
||||
Add-Content -Path $env:GITHUB_ENV -Value "latest_tag=$latest_tag"
|
||||
|
||||
- name: Create release if it doesn't exist
|
||||
shell: bash
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
if ! gh release view ${{ env.latest_tag }} >/dev/null 2>&1; then
|
||||
gh release create ${{ env.latest_tag }} --title "Release ${{ env.latest_tag }}" --notes "Automated release for ${{ env.latest_tag }}"
|
||||
if ! gh release view ${{ needs.get_version.outputs.latest_tag }} >/dev/null 2>&1; then
|
||||
gh release create ${{ needs.get_version.outputs.latest_tag }} --title "Release ${{ needs.get_version.outputs.latest_tag }}" --notes "Automated release for ${{ needs.get_version.outputs.latest_tag }}"
|
||||
else
|
||||
echo "Release ${{ env.latest_tag }} already exists."
|
||||
echo "Release ${{ needs.get_version.outputs.latest_tag }} already exists."
|
||||
fi
|
||||
|
||||
- name: Upload release artifact
|
||||
@@ -123,11 +129,35 @@ jobs:
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
gh release upload ${{ env.latest_tag }} fabric-windows-${{ matrix.arch }}.exe
|
||||
gh release upload ${{ needs.get_version.outputs.latest_tag }} fabric-windows-${{ matrix.arch }}.exe
|
||||
|
||||
- name: Upload release artifact
|
||||
if: matrix.os != 'windows-latest'
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
gh release upload ${{ env.latest_tag }} fabric-${OS}-${{ matrix.arch }}
|
||||
OS_NAME="${{ matrix.os == 'ubuntu-latest' && 'linux' || 'darwin' }}"
|
||||
gh release upload ${{ needs.get_version.outputs.latest_tag }} fabric-${OS_NAME}-${{ matrix.arch }}
|
||||
|
||||
update_release_notes:
|
||||
needs: [build, get_version]
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version-file: ./go.mod
|
||||
|
||||
- name: Update release description
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
go run ./cmd/generate_changelog --sync-db
|
||||
go run ./cmd/generate_changelog --release ${{ needs.get_version.outputs.latest_tag }}
|
||||
|
||||
@@ -7,6 +7,11 @@ on:
|
||||
paths-ignore:
|
||||
- "data/patterns/**"
|
||||
- "**/*.md"
|
||||
- "data/strategies/**"
|
||||
- "cmd/generate_changelog/*.db"
|
||||
- "cmd/generate_changelog/incoming/*.txt"
|
||||
- "scripts/pattern_descriptions/*.json"
|
||||
- "web/static/data/pattern_descriptions.json"
|
||||
|
||||
permissions:
|
||||
contents: write # Ensure the workflow has write permissions
|
||||
@@ -79,14 +84,24 @@ jobs:
|
||||
run: |
|
||||
nix run .#gomod2nix -- --outdir nix/pkgs/fabric
|
||||
|
||||
- name: Generate Changelog Entry
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
go run ./cmd/generate_changelog --process-prs ${{ env.new_tag }}
|
||||
go run ./cmd/generate_changelog --sync-db
|
||||
- name: Commit changes
|
||||
run: |
|
||||
# These files are modified by the version bump process
|
||||
git add cmd/fabric/version.go
|
||||
git add nix/pkgs/fabric/version.nix
|
||||
git add nix/pkgs/fabric/gomod2nix.toml
|
||||
git add .
|
||||
|
||||
# The changelog tool is responsible for staging CHANGELOG.md, changelog.db,
|
||||
# and removing the incoming/ directory.
|
||||
|
||||
if ! git diff --staged --quiet; then
|
||||
git commit -m "Update version to ${{ env.new_tag }} and commit $commit_hash"
|
||||
git commit -m "chore(release): Update version to ${{ env.new_tag }}"
|
||||
else
|
||||
echo "No changes to commit."
|
||||
fi
|
||||
|
||||
10
.gitignore
vendored
10
.gitignore
vendored
@@ -131,9 +131,7 @@ celerybeat.pid
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
@@ -349,5 +347,9 @@ web/package-lock.json
|
||||
.gitignore_backup
|
||||
web/static/*.png
|
||||
|
||||
# Local VSCode project settings
|
||||
.vscode/
|
||||
# Local tmp directory
|
||||
.tmp/
|
||||
tmp/
|
||||
|
||||
# Ignore .claude/
|
||||
.claude/
|
||||
|
||||
3
.vscode/extensions.json
vendored
Normal file
3
.vscode/extensions.json
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"recommendations": ["davidanson.vscode-markdownlint"]
|
||||
}
|
||||
177
.vscode/settings.json
vendored
Normal file
177
.vscode/settings.json
vendored
Normal file
@@ -0,0 +1,177 @@
|
||||
{
|
||||
"cSpell.words": [
|
||||
"Achird",
|
||||
"addextension",
|
||||
"adduser",
|
||||
"AIML",
|
||||
"anthropics",
|
||||
"Aoede",
|
||||
"atotto",
|
||||
"Autonoe",
|
||||
"badfile",
|
||||
"Behrens",
|
||||
"blindspots",
|
||||
"Bombal",
|
||||
"Callirhoe",
|
||||
"Callirrhoe",
|
||||
"Cerebras",
|
||||
"compadd",
|
||||
"compdef",
|
||||
"compinit",
|
||||
"creatordate",
|
||||
"curcontext",
|
||||
"custompatterns",
|
||||
"danielmiessler",
|
||||
"davidanson",
|
||||
"Debugf",
|
||||
"dedup",
|
||||
"deepseek",
|
||||
"Despina",
|
||||
"direnv",
|
||||
"dryrun",
|
||||
"dsrp",
|
||||
"editability",
|
||||
"Eisler",
|
||||
"elif",
|
||||
"envrc",
|
||||
"Erinome",
|
||||
"Errorf",
|
||||
"eugeis",
|
||||
"Eugen",
|
||||
"excalidraw",
|
||||
"exolab",
|
||||
"fabriclogo",
|
||||
"flac",
|
||||
"fpath",
|
||||
"frequencypenalty",
|
||||
"fsdb",
|
||||
"gantt",
|
||||
"genai",
|
||||
"githelper",
|
||||
"gjson",
|
||||
"GOARCH",
|
||||
"godotenv",
|
||||
"gofmt",
|
||||
"goimports",
|
||||
"gomod",
|
||||
"gonic",
|
||||
"goopenai",
|
||||
"GOPATH",
|
||||
"gopkg",
|
||||
"GOROOT",
|
||||
"Graphviz",
|
||||
"grokai",
|
||||
"Groq",
|
||||
"hackerone",
|
||||
"Haddix",
|
||||
"hasura",
|
||||
"hormozi",
|
||||
"Hormozi's",
|
||||
"horts",
|
||||
"HTMLURL",
|
||||
"jaredmontoya",
|
||||
"jessevdk",
|
||||
"Jina",
|
||||
"joho",
|
||||
"Keploy",
|
||||
"Kore",
|
||||
"ksylvan",
|
||||
"Langdock",
|
||||
"Laomedeia",
|
||||
"ldflags",
|
||||
"libexec",
|
||||
"listcontexts",
|
||||
"listextensions",
|
||||
"listmodels",
|
||||
"listpatterns",
|
||||
"listsessions",
|
||||
"liststrategies",
|
||||
"listvendors",
|
||||
"lmstudio",
|
||||
"Makefiles",
|
||||
"markmap",
|
||||
"matplotlib",
|
||||
"mattn",
|
||||
"mbed",
|
||||
"Miessler",
|
||||
"nometa",
|
||||
"numpy",
|
||||
"ollama",
|
||||
"ollamaapi",
|
||||
"openaiapi",
|
||||
"opencode",
|
||||
"openrouter",
|
||||
"Orus",
|
||||
"otiai",
|
||||
"pdflatex",
|
||||
"pipx",
|
||||
"PKCE",
|
||||
"pkgs",
|
||||
"presencepenalty",
|
||||
"printcontext",
|
||||
"printsession",
|
||||
"Pulcherrima",
|
||||
"pycache",
|
||||
"pyperclip",
|
||||
"readystream",
|
||||
"restapi",
|
||||
"rmextension",
|
||||
"Sadachbia",
|
||||
"Sadaltager",
|
||||
"samber",
|
||||
"sashabaranov",
|
||||
"sdist",
|
||||
"seaborn",
|
||||
"semgrep",
|
||||
"sess",
|
||||
"storer",
|
||||
"Streamlit",
|
||||
"stretchr",
|
||||
"subchunk",
|
||||
"Sulafat",
|
||||
"talkpanel",
|
||||
"Telos",
|
||||
"testpattern",
|
||||
"testuser",
|
||||
"Thacker",
|
||||
"tidwall",
|
||||
"topp",
|
||||
"ttrc",
|
||||
"unalias",
|
||||
"unconfigured",
|
||||
"unmarshalling",
|
||||
"updatepatterns",
|
||||
"videoid",
|
||||
"webp",
|
||||
"WEBVTT",
|
||||
"wipecontext",
|
||||
"wipesession",
|
||||
"Worktree",
|
||||
"writeups",
|
||||
"xclip",
|
||||
"yourpatternname",
|
||||
"youtu"
|
||||
],
|
||||
"cSpell.ignorePaths": ["go.mod", ".gitignore", "CHANGELOG.md"],
|
||||
"markdownlint.config": {
|
||||
"MD004": false,
|
||||
"MD011": false,
|
||||
"MD024": false,
|
||||
"MD025": false,
|
||||
"M032": false,
|
||||
"MD033": {
|
||||
"allowed_elements": [
|
||||
"a",
|
||||
"br",
|
||||
"code",
|
||||
"div",
|
||||
"em",
|
||||
"h4",
|
||||
"img",
|
||||
"module",
|
||||
"p"
|
||||
]
|
||||
},
|
||||
"MD041": false
|
||||
}
|
||||
}
|
||||
2554
CHANGELOG.md
Normal file
2554
CHANGELOG.md
Normal file
File diff suppressed because it is too large
Load Diff
45
README.md
45
README.md
@@ -3,7 +3,7 @@ Fabric is graciously supported by…
|
||||
|
||||
[](https://warp.dev/fabric)
|
||||
|
||||
<img src="./images/fabric-logo-gif.gif" alt="fabriclogo" width="400" height="400"/>
|
||||
<img src="./docs/images/fabric-logo-gif.gif" alt="fabriclogo" width="400" height="400"/>
|
||||
|
||||
# `fabric`
|
||||
|
||||
@@ -29,7 +29,7 @@ Fabric is graciously supported by…
|
||||
[Helper Apps](#helper-apps) •
|
||||
[Meta](#meta)
|
||||
|
||||

|
||||

|
||||
|
||||
</div>
|
||||
|
||||
@@ -113,30 +113,9 @@ Keep in mind that many of these were recorded when Fabric was Python-based, so r
|
||||
|
||||
## Updates
|
||||
|
||||
> [!NOTE]
|
||||
>
|
||||
> July 4, 2025
|
||||
>
|
||||
> - **Web Search**: Fabric now supports web search for Anthropic and OpenAI models using the `--search` and `--search-location` flags. This replaces the previous plugin-based search, so you may want to remove the old `ANTHROPIC_WEB_SEARCH_TOOL_*` variables from your `~/.config/fabric/.env` file.
|
||||
> - **Image Generation**: Fabric now has powerful image generation capabilities with OpenAI.
|
||||
> - Generate images from text prompts and save them using `--image-file`.
|
||||
> - Edit existing images by providing an input image with `--attachment`.
|
||||
> - Control image `size`, `quality`, `compression`, and `background` with the new `--image-*` flags.
|
||||
>
|
||||
>June 17, 2025
|
||||
>
|
||||
>- Fabric now supports Perplexity AI. Configure it by using `fabric -S` to add your Perplexity AI API Key,
|
||||
> and then try:
|
||||
>
|
||||
> ```bash
|
||||
> fabric -m sonar-pro "What is the latest world news?"
|
||||
> ```
|
||||
>
|
||||
>June 11, 2025
|
||||
>
|
||||
>- Fabric's YouTube transcription now needs `yt-dlp` to be installed. Make sure to install the latest
|
||||
> version (2025.06.09 as of this note). The YouTube API key is only needed for comments (the `--comments` flag)
|
||||
> and metadata extraction (the `--metadata` flag).
|
||||
Fabric is evolving rapidly.
|
||||
|
||||
Stay current with the latest features by reviewing the [CHANGELOG](./CHANGELOG.md) for all recent changes.
|
||||
|
||||
## Philosophy
|
||||
|
||||
@@ -565,10 +544,16 @@ Application Options:
|
||||
--image-compression= Compression level 0-100 for JPEG/WebP formats (default: not set)
|
||||
--image-background= Background type: opaque, transparent (default: opaque, only for
|
||||
PNG/WebP)
|
||||
--suppress-think Suppress text enclosed in thinking tags
|
||||
--think-start-tag= Start tag for thinking sections (default: <think>)
|
||||
--think-end-tag= End tag for thinking sections (default: </think>)
|
||||
--disable-responses-api Disable OpenAI Responses API (default: false)
|
||||
--voice= TTS voice name for supported models (e.g., Kore, Charon, Puck)
|
||||
(default: Kore)
|
||||
--list-gemini-voices List all available Gemini TTS voices
|
||||
|
||||
Help Options:
|
||||
-h, --help Show this help message
|
||||
|
||||
```
|
||||
|
||||
## Our approach to prompting
|
||||
@@ -628,7 +613,7 @@ Now let's look at some things you can do with Fabric.
|
||||
<br />
|
||||
<br />
|
||||
|
||||
If you're not looking to do anything fancy, and you just want a lot of great prompts, you can navigate to the [`/patterns`](https://github.com/danielmiessler/fabric/tree/main/patterns) directory and start exploring!
|
||||
If you're not looking to do anything fancy, and you just want a lot of great prompts, you can navigate to the [`/patterns`](https://github.com/danielmiessler/fabric/tree/main/data/patterns) directory and start exploring!
|
||||
|
||||
We hope that if you used nothing else from Fabric, the Patterns by themselves will make the project useful.
|
||||
|
||||
@@ -644,7 +629,7 @@ be used in addition to the basic patterns.
|
||||
See the [Thinking Faster by Writing Less](https://arxiv.org/pdf/2502.18600) paper and
|
||||
the [Thought Generation section of Learn Prompting](https://learnprompting.org/docs/advanced/thought_generation/introduction) for examples of prompt strategies.
|
||||
|
||||
Each strategy is available as a small `json` file in the [`/strategies`](https://github.com/danielmiessler/fabric/tree/main/strategies) directory.
|
||||
Each strategy is available as a small `json` file in the [`/strategies`](https://github.com/danielmiessler/fabric/tree/main/data/strategies) directory.
|
||||
|
||||
The prompt modification of the strategy is applied to the system prompt and passed on to the
|
||||
LLM in the chat session.
|
||||
@@ -736,7 +721,7 @@ Make sure you have a LaTeX distribution (like TeX Live or MiKTeX) installed on y
|
||||
It generates a `json` representation of a directory of code that can be fed into an AI model
|
||||
with instructions to create a new feature or edit the code in a specified way.
|
||||
|
||||
See [the Create Coding Feature Pattern README](./patterns/create_coding_feature/README.md) for details.
|
||||
See [the Create Coding Feature Pattern README](./data/patterns/create_coding_feature/README.md) for details.
|
||||
|
||||
Install it first using:
|
||||
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
package main
|
||||
|
||||
var version = "v1.4.243"
|
||||
var version = "v1.4.276"
|
||||
|
||||
151
cmd/generate_changelog/PRD.md
Normal file
151
cmd/generate_changelog/PRD.md
Normal file
@@ -0,0 +1,151 @@
|
||||
# Product Requirements Document: Changelog Generator
|
||||
|
||||
## Overview
|
||||
|
||||
The Changelog Generator is a high-performance Go tool that automatically generates comprehensive changelogs from git history and GitHub pull requests.
|
||||
|
||||
## Goals
|
||||
|
||||
1. **Performance**: Very fast. Efficient enough to be used in CI/CD as part of release process.
|
||||
2. **Completeness**: Capture ALL commits including unreleased changes
|
||||
3. **Efficiency**: Minimize API calls through caching and batch operations
|
||||
4. **Reliability**: Handle errors gracefully with proper Go error handling
|
||||
5. **Simplicity**: Single binary with no runtime dependencies
|
||||
|
||||
## Key Features
|
||||
|
||||
### 1. One-Pass Git History Algorithm
|
||||
|
||||
- Walk git history once from newest to oldest
|
||||
- Start with "Unreleased" bucket for all new commits
|
||||
- Switch buckets when encountering version commits
|
||||
- No need to calculate ranges between versions
|
||||
|
||||
### 2. Native Library Integration
|
||||
|
||||
- **go-git**: Pure Go git implementation (no git binary required)
|
||||
- **go-github**: Official GitHub Go client library
|
||||
- Benefits: Type safety, better error handling, no subprocess overhead
|
||||
|
||||
### 3. Smart Caching System
|
||||
|
||||
- SQLite-based persistent cache
|
||||
- Stores: versions, commits, PR details, last processed commit
|
||||
- Enables incremental updates on subsequent runs
|
||||
- Instant changelog regeneration from cache
|
||||
|
||||
### 4. Concurrent Processing
|
||||
|
||||
- Parallel GitHub API calls (up to 10 concurrent)
|
||||
- Batch PR fetching with deduplication
|
||||
- Rate limiting awareness
|
||||
|
||||
### 5. Enhanced Output
|
||||
|
||||
- "Unreleased" section for commits since last version
|
||||
- Clean markdown formatting
|
||||
- Configurable version limiting
|
||||
- Direct commit tracking (non-PR commits)
|
||||
|
||||
## Technical Architecture
|
||||
|
||||
### Module Structure
|
||||
|
||||
```text
|
||||
cmd/generate_changelog/
|
||||
├── main.go # CLI entry point with cobra
|
||||
├── internal/
|
||||
│ ├── git/ # Git operations (go-git)
|
||||
│ ├── github/ # GitHub API client (go-github)
|
||||
│ ├── cache/ # SQLite caching layer
|
||||
│ ├── changelog/ # Core generation logic
|
||||
│ └── config/ # Configuration management
|
||||
└── changelog.db # SQLite cache (generated)
|
||||
```
|
||||
|
||||
### Data Flow
|
||||
|
||||
1. Git walker collects all commits in one pass
|
||||
2. Commits bucketed by version (starting with "Unreleased")
|
||||
3. PR numbers extracted from merge commits
|
||||
4. GitHub API batch-fetches PR details
|
||||
5. Cache stores everything for future runs
|
||||
6. Formatter generates markdown output
|
||||
|
||||
### Cache Schema
|
||||
|
||||
- **metadata**: Last processed commit SHA
|
||||
- **versions**: Version names, dates, commit SHAs
|
||||
- **commits**: Full commit details with version associations
|
||||
- **pull_requests**: PR details including commits
|
||||
- Indexes on version and PR number for fast lookups
|
||||
|
||||
### Features
|
||||
|
||||
- **Unreleased section**: Shows all new commits
|
||||
- **Better caching**: SQLite vs JSON, incremental updates
|
||||
- **Smarter deduplication**: Removes consecutive duplicate commits
|
||||
- **Direct commit tracking**: Shows non-PR commits
|
||||
|
||||
### Reliability
|
||||
|
||||
- **No subprocess errors**: Direct library usage
|
||||
- **Type safety**: Compile-time checking
|
||||
- **Better error handling**: Go's explicit error returns
|
||||
|
||||
### Deployment
|
||||
|
||||
- **Single binary**: No Python/pip/dependencies
|
||||
- **Cross-platform**: Compile for any OS/architecture
|
||||
- **No git CLI required**: Uses go-git library
|
||||
|
||||
## Configuration
|
||||
|
||||
### Environment Variables
|
||||
|
||||
- `GITHUB_TOKEN`: GitHub API authentication token
|
||||
|
||||
### Command Line Flags
|
||||
|
||||
- `--repo, -r`: Repository path (default: current directory)
|
||||
- `--output, -o`: Output file (default: stdout)
|
||||
- `--limit, -l`: Version limit (default: all)
|
||||
- `--version, -v`: Target specific version
|
||||
- `--save-data`: Export debug JSON
|
||||
- `--cache`: Cache file location
|
||||
- `--no-cache`: Disable caching
|
||||
- `--rebuild-cache`: Force cache rebuild
|
||||
- `--token`: GitHub token override
|
||||
|
||||
## Success Metrics
|
||||
|
||||
1. **Performance**: Generate full changelog in <5 seconds for fabric repo
|
||||
2. **Completeness**: 100% commit coverage including unreleased
|
||||
3. **Accuracy**: Correct PR associations and change extraction
|
||||
4. **Reliability**: Handle network failures gracefully
|
||||
5. **Usability**: Simple CLI with sensible defaults
|
||||
|
||||
## Future Enhancements
|
||||
|
||||
1. **Multiple output formats**: JSON, HTML, etc.
|
||||
2. **Custom version patterns**: Configurable regex
|
||||
3. **Change categorization**: feat/fix/docs auto-grouping
|
||||
4. **Conventional commits**: Full support for semantic versioning
|
||||
5. **GitLab/Bitbucket**: Support other platforms
|
||||
6. **Web UI**: Interactive changelog browser
|
||||
7. **Incremental updates**: Update existing CHANGELOG.md file
|
||||
8. **Breaking change detection**: Highlight breaking changes
|
||||
|
||||
## Implementation Status
|
||||
|
||||
- ✅ Core architecture and modules
|
||||
- ✅ One-pass git walking algorithm
|
||||
- ✅ GitHub API integration with concurrency
|
||||
- ✅ SQLite caching system
|
||||
- ✅ Changelog formatting and generation
|
||||
- ✅ CLI with all planned flags
|
||||
- ✅ Documentation (README and PRD)
|
||||
|
||||
## Conclusion
|
||||
|
||||
This Go implementation provides a modern, efficient, and feature-rich changelog generator.
|
||||
264
cmd/generate_changelog/README.md
Normal file
264
cmd/generate_changelog/README.md
Normal file
@@ -0,0 +1,264 @@
|
||||
# Changelog Generator
|
||||
|
||||
A high-performance changelog generator for Git repositories that automatically creates comprehensive, well-formatted changelogs from your git history and GitHub pull requests.
|
||||
|
||||
## Features
|
||||
|
||||
- **One-pass git history walking**: Efficiently processes entire repository history in a single pass
|
||||
- **Automatic PR detection**: Extracts pull request information from merge commits
|
||||
- **GitHub API integration**: Fetches detailed PR information including commits, authors, and descriptions
|
||||
- **Smart caching**: SQLite-based caching for instant incremental updates
|
||||
- **Unreleased changes**: Tracks all commits since the last release
|
||||
- **Concurrent processing**: Parallel GitHub API calls for improved performance
|
||||
- **Flexible output**: Generate complete changelogs or target specific versions
|
||||
- **GraphQL optimization**: Ultra-fast PR fetching using GitHub GraphQL API (~5-10 calls vs 1000s)
|
||||
- **Intelligent sync**: Automatically syncs new PRs every 24 hours or when missing PRs are detected
|
||||
- **AI-powered summaries**: Optional Fabric integration for enhanced changelog summaries
|
||||
- **Advanced caching**: Content-based change detection for AI summaries with hash comparison
|
||||
- **Author type detection**: Distinguishes between users, bots, and organizations
|
||||
- **Lightning-fast incremental updates**: SHA→PR mapping for instant git operations
|
||||
|
||||
## Installation
|
||||
|
||||
```bash
|
||||
go install github.com/danielmiessler/fabric/cmd/generate_changelog@latest
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
### Basic usage (generate complete changelog)
|
||||
|
||||
```bash
|
||||
generate_changelog
|
||||
```
|
||||
|
||||
### Save to file
|
||||
|
||||
```bash
|
||||
generate_changelog -o CHANGELOG.md
|
||||
```
|
||||
|
||||
### Generate for specific version
|
||||
|
||||
```bash
|
||||
generate_changelog -v v1.4.244
|
||||
```
|
||||
|
||||
### Limit to recent versions
|
||||
|
||||
```bash
|
||||
generate_changelog -l 10
|
||||
```
|
||||
|
||||
### Using GitHub token for private repos or higher rate limits
|
||||
|
||||
```bash
|
||||
export GITHUB_TOKEN=your_token_here
|
||||
generate_changelog
|
||||
|
||||
# Or pass directly
|
||||
generate_changelog --token your_token_here
|
||||
```
|
||||
|
||||
### AI-enhanced summaries
|
||||
|
||||
```bash
|
||||
# Enable AI summaries using Fabric
|
||||
generate_changelog --ai-summarize
|
||||
|
||||
# Use custom model for AI summaries
|
||||
FABRIC_CHANGELOG_SUMMARIZE_MODEL=claude-opus-4 generate_changelog --ai-summarize
|
||||
```
|
||||
|
||||
### Cache management
|
||||
|
||||
```bash
|
||||
# Rebuild cache from scratch
|
||||
generate_changelog --rebuild-cache
|
||||
|
||||
# Force a full PR sync from GitHub
|
||||
generate_changelog --force-pr-sync
|
||||
|
||||
# Disable cache usage
|
||||
generate_changelog --no-cache
|
||||
|
||||
# Use custom cache location
|
||||
generate_changelog --cache /path/to/cache.db
|
||||
```
|
||||
|
||||
## Command Line Options
|
||||
|
||||
| Flag | Short | Description | Default |
|
||||
|------|-------|-------------|---------|
|
||||
| `--repo` | `-r` | Repository path | `.` (current directory) |
|
||||
| `--output` | `-o` | Output file | stdout |
|
||||
| `--limit` | `-l` | Limit number of versions | 0 (all) |
|
||||
| `--version` | `-v` | Generate for specific version | |
|
||||
| `--save-data` | | Save version data to JSON | false |
|
||||
| `--cache` | | Cache database file | `./cmd/generate_changelog/changelog.db` |
|
||||
| `--no-cache` | | Disable cache usage | false |
|
||||
| `--rebuild-cache` | | Rebuild cache from scratch | false |
|
||||
| `--force-pr-sync` | | Force a full PR sync from GitHub | false |
|
||||
| `--token` | | GitHub API token | `$GITHUB_TOKEN` |
|
||||
| `--ai-summarize` | | Generate AI-enhanced summaries using Fabric | false |
|
||||
| `--release` | | Update GitHub release description with AI summary for version | |
|
||||
|
||||
## Output Format
|
||||
|
||||
The generated changelog follows this structure:
|
||||
|
||||
```markdown
|
||||
# Changelog
|
||||
|
||||
## Unreleased
|
||||
|
||||
### PR [#1601](url) by [author](profile): PR Title
|
||||
- Change description 1
|
||||
- Change description 2
|
||||
|
||||
### Direct commits
|
||||
- Direct commit message 1
|
||||
- Direct commit message 2
|
||||
|
||||
## v1.4.244 (2025-07-09)
|
||||
|
||||
### PR [#1598](url) by [author](profile): PR Title
|
||||
- Change description
|
||||
...
|
||||
```
|
||||
|
||||
## How It Works
|
||||
|
||||
1. **Git History Walking**: The tool walks through your git history from newest to oldest commits
|
||||
2. **Version Detection**: Identifies version bump commits (pattern: "Update version to vX.Y.Z")
|
||||
3. **PR Extraction**: Detects merge commits and extracts PR numbers
|
||||
4. **GitHub API Calls**: Fetches detailed PR information in parallel batches
|
||||
5. **Change Extraction**: Extracts changes from PR commit messages or PR body
|
||||
6. **Formatting**: Generates clean, organized markdown output
|
||||
|
||||
## Performance
|
||||
|
||||
- **Native Go libraries**: Uses go-git and go-github for maximum performance
|
||||
- **Concurrent API calls**: Processes up to 10 GitHub API requests in parallel
|
||||
- **Smart caching**: SQLite cache eliminates redundant API calls
|
||||
- **Incremental updates**: Only processes new commits on subsequent runs
|
||||
- **GraphQL optimization**: Uses GitHub GraphQL API to fetch all PR data in ~5-10 calls
|
||||
- **AI-powered summaries**: Optional Fabric integration with intelligent caching
|
||||
- **Content-based change detection**: AI summaries only regenerated when content changes
|
||||
- **Lightning-fast git operations**: SHA→PR mapping stored in database for instant lookups
|
||||
|
||||
### Major Optimization: GraphQL + Advanced Caching
|
||||
|
||||
The tool has been optimized to drastically reduce GitHub API calls and improve performance:
|
||||
|
||||
**Previous approach**: Individual API calls for each PR (2 API calls per PR)
|
||||
|
||||
- For a repo with 500 PRs: 1,000 API calls
|
||||
|
||||
**Current approach**: GraphQL batch fetching with intelligent caching
|
||||
|
||||
- For a repo with 500 PRs: ~5-10 GraphQL calls (initial fetch) + 0 calls (subsequent runs with cache)
|
||||
- **99%+ reduction in API calls after initial run!**
|
||||
|
||||
The optimization includes:
|
||||
|
||||
1. **GraphQL Batch Fetch**: Uses GitHub's GraphQL API to fetch all merged PRs with commits in minimal calls
|
||||
2. **Smart Caching**: Stores complete PR data, commits, and SHA mappings in SQLite
|
||||
3. **Incremental Sync**: Only fetches PRs merged after the last sync timestamp
|
||||
4. **Automatic Refresh**: PRs are synced every 24 hours or when missing PRs are detected
|
||||
5. **AI Summary Caching**: Content-based change detection prevents unnecessary AI regeneration
|
||||
6. **Fallback Support**: If GraphQL fails, falls back to REST API batch fetching
|
||||
7. **Lightning Git Operations**: Pre-computed SHA→PR mappings for instant commit association
|
||||
|
||||
## Requirements
|
||||
|
||||
- Go 1.24+ (for installation from source)
|
||||
- Git repository
|
||||
- GitHub token (optional, for private repos or higher rate limits)
|
||||
- Fabric CLI (optional, for AI-enhanced summaries)
|
||||
|
||||
## Authentication
|
||||
|
||||
The tool supports GitHub authentication via:
|
||||
|
||||
1. Environment variable: `export GITHUB_TOKEN=your_token`
|
||||
2. Command line flag: `--token your_token`
|
||||
3. `.env` file in the same directory as the binary
|
||||
|
||||
### Environment File Support
|
||||
|
||||
Create a `.env` file next to the `generate_changelog` binary:
|
||||
|
||||
```bash
|
||||
GITHUB_TOKEN=your_github_token_here
|
||||
FABRIC_CHANGELOG_SUMMARIZE_MODEL=claude-sonnet-4-20250514
|
||||
```
|
||||
|
||||
The tool automatically loads `.env` files for convenient configuration management.
|
||||
|
||||
Without authentication, the tool is limited to 60 GitHub API requests per hour.
|
||||
|
||||
## Caching
|
||||
|
||||
The SQLite cache stores:
|
||||
|
||||
- Version information and commit associations
|
||||
- Pull request details (title, body, commits, authors)
|
||||
- Last processed commit SHA for incremental updates
|
||||
- Last PR sync timestamp for intelligent refresh
|
||||
- AI summaries with content-based change detection
|
||||
- SHA→PR mappings for lightning-fast git operations
|
||||
|
||||
Cache benefits:
|
||||
|
||||
- Instant changelog regeneration
|
||||
- Drastically reduced GitHub API usage (99%+ reduction after initial run)
|
||||
- Offline changelog generation (after initial cache build)
|
||||
- Automatic PR data refresh every 24 hours
|
||||
- Batch database transactions for better performance
|
||||
- Content-aware AI summary regeneration
|
||||
|
||||
## AI-Enhanced Summaries
|
||||
|
||||
The tool can generate AI-powered summaries using Fabric for more polished, professional changelogs:
|
||||
|
||||
```bash
|
||||
# Enable AI summarization
|
||||
generate_changelog --ai-summarize
|
||||
|
||||
# Custom model (default: claude-sonnet-4-20250514)
|
||||
FABRIC_CHANGELOG_SUMMARIZE_MODEL=claude-opus-4 generate_changelog --ai-summarize
|
||||
```
|
||||
|
||||
### AI Summary Features
|
||||
|
||||
- **Content-based change detection**: AI summaries are only regenerated when version content changes
|
||||
- **Intelligent caching**: Preserves existing summaries and only processes changed versions
|
||||
- **Content hash comparison**: Uses SHA256 hashing to detect when "Unreleased" content changes
|
||||
- **Automatic fallback**: Falls back to raw content if AI processing fails
|
||||
- **Error detection**: Identifies and handles AI processing errors gracefully
|
||||
- **Minimum content filtering**: Skips AI processing for very brief content (< 256 characters)
|
||||
|
||||
### AI Model Configuration
|
||||
|
||||
Set the model via environment variable:
|
||||
|
||||
```bash
|
||||
export FABRIC_CHANGELOG_SUMMARIZE_MODEL=claude-opus-4
|
||||
# or
|
||||
export FABRIC_CHANGELOG_SUMMARIZE_MODEL=gpt-4
|
||||
```
|
||||
|
||||
AI summaries are cached and only regenerated when:
|
||||
|
||||
- Version content changes (detected via hash comparison)
|
||||
- No existing AI summary exists for the version
|
||||
- Force rebuild is requested
|
||||
|
||||
## Contributing
|
||||
|
||||
This tool is part of the Fabric project. Contributions are welcome!
|
||||
|
||||
## License
|
||||
|
||||
The MIT License. Same as the Fabric project.
|
||||
BIN
cmd/generate_changelog/changelog.db
Normal file
BIN
cmd/generate_changelog/changelog.db
Normal file
Binary file not shown.
476
cmd/generate_changelog/internal/cache/cache.go
vendored
Normal file
476
cmd/generate_changelog/internal/cache/cache.go
vendored
Normal file
@@ -0,0 +1,476 @@
|
||||
package cache
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
"time"
|
||||
|
||||
"github.com/danielmiessler/fabric/cmd/generate_changelog/internal/git"
|
||||
"github.com/danielmiessler/fabric/cmd/generate_changelog/internal/github"
|
||||
_ "github.com/mattn/go-sqlite3"
|
||||
)
|
||||
|
||||
type Cache struct {
|
||||
db *sql.DB
|
||||
}
|
||||
|
||||
func New(dbPath string) (*Cache, error) {
|
||||
db, err := sql.Open("sqlite3", dbPath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to open database: %w", err)
|
||||
}
|
||||
|
||||
cache := &Cache{db: db}
|
||||
if err := cache.createTables(); err != nil {
|
||||
return nil, fmt.Errorf("failed to create tables: %w", err)
|
||||
}
|
||||
|
||||
return cache, nil
|
||||
}
|
||||
|
||||
func (c *Cache) Close() error {
|
||||
return c.db.Close()
|
||||
}
|
||||
|
||||
func (c *Cache) createTables() error {
|
||||
queries := []string{
|
||||
`CREATE TABLE IF NOT EXISTS metadata (
|
||||
key TEXT PRIMARY KEY,
|
||||
value TEXT NOT NULL,
|
||||
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
)`,
|
||||
`CREATE TABLE IF NOT EXISTS versions (
|
||||
name TEXT PRIMARY KEY,
|
||||
date DATETIME,
|
||||
commit_sha TEXT,
|
||||
pr_numbers TEXT,
|
||||
ai_summary TEXT,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
)`,
|
||||
`CREATE TABLE IF NOT EXISTS commits (
|
||||
sha TEXT PRIMARY KEY,
|
||||
version TEXT NOT NULL,
|
||||
message TEXT,
|
||||
author TEXT,
|
||||
email TEXT,
|
||||
date DATETIME,
|
||||
is_merge BOOLEAN,
|
||||
pr_number INTEGER,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (version) REFERENCES versions(name)
|
||||
)`,
|
||||
`CREATE TABLE IF NOT EXISTS pull_requests (
|
||||
number INTEGER PRIMARY KEY,
|
||||
title TEXT,
|
||||
body TEXT,
|
||||
author TEXT,
|
||||
author_url TEXT,
|
||||
author_type TEXT DEFAULT 'user',
|
||||
url TEXT,
|
||||
merged_at DATETIME,
|
||||
merge_commit TEXT,
|
||||
commits TEXT,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
)`,
|
||||
`CREATE INDEX IF NOT EXISTS idx_commits_version ON commits(version)`,
|
||||
`CREATE INDEX IF NOT EXISTS idx_commits_pr_number ON commits(pr_number)`,
|
||||
`CREATE TABLE IF NOT EXISTS commit_pr_mapping (
|
||||
commit_sha TEXT PRIMARY KEY,
|
||||
pr_number INTEGER NOT NULL,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (pr_number) REFERENCES pull_requests(number)
|
||||
)`,
|
||||
`CREATE INDEX IF NOT EXISTS idx_commit_pr_mapping_sha ON commit_pr_mapping(commit_sha)`,
|
||||
}
|
||||
|
||||
for _, query := range queries {
|
||||
if _, err := c.db.Exec(query); err != nil {
|
||||
return fmt.Errorf("failed to execute query: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *Cache) GetLastProcessedTag() (string, error) {
|
||||
var tag string
|
||||
err := c.db.QueryRow("SELECT value FROM metadata WHERE key = 'last_processed_tag'").Scan(&tag)
|
||||
if err == sql.ErrNoRows {
|
||||
return "", nil
|
||||
}
|
||||
return tag, err
|
||||
}
|
||||
|
||||
func (c *Cache) SetLastProcessedTag(tag string) error {
|
||||
_, err := c.db.Exec(`
|
||||
INSERT OR REPLACE INTO metadata (key, value, updated_at)
|
||||
VALUES ('last_processed_tag', ?, CURRENT_TIMESTAMP)
|
||||
`, tag)
|
||||
return err
|
||||
}
|
||||
|
||||
func (c *Cache) SaveVersion(v *git.Version) error {
|
||||
prNumbers, _ := json.Marshal(v.PRNumbers)
|
||||
|
||||
_, err := c.db.Exec(`
|
||||
INSERT OR REPLACE INTO versions (name, date, commit_sha, pr_numbers, ai_summary)
|
||||
VALUES (?, ?, ?, ?, ?)
|
||||
`, v.Name, v.Date, v.CommitSHA, string(prNumbers), v.AISummary)
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
// UpdateVersionAISummary updates only the AI summary for a specific version
|
||||
func (c *Cache) UpdateVersionAISummary(versionName, aiSummary string) error {
|
||||
_, err := c.db.Exec(`
|
||||
UPDATE versions SET ai_summary = ? WHERE name = ?
|
||||
`, aiSummary, versionName)
|
||||
return err
|
||||
}
|
||||
|
||||
func (c *Cache) SaveCommit(commit *git.Commit, version string) error {
|
||||
_, err := c.db.Exec(`
|
||||
INSERT OR REPLACE INTO commits
|
||||
(sha, version, message, author, email, date, is_merge, pr_number)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
||||
`, commit.SHA, version, commit.Message, commit.Author, commit.Email,
|
||||
commit.Date, commit.IsMerge, commit.PRNumber)
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func (c *Cache) SavePR(pr *github.PR) error {
|
||||
commits, _ := json.Marshal(pr.Commits)
|
||||
|
||||
_, err := c.db.Exec(`
|
||||
INSERT OR REPLACE INTO pull_requests
|
||||
(number, title, body, author, author_url, author_type, url, merged_at, merge_commit, commits)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
`, pr.Number, pr.Title, pr.Body, pr.Author, pr.AuthorURL, pr.AuthorType,
|
||||
pr.URL, pr.MergedAt, pr.MergeCommit, string(commits))
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func (c *Cache) GetPR(number int) (*github.PR, error) {
|
||||
var pr github.PR
|
||||
var commitsJSON string
|
||||
|
||||
err := c.db.QueryRow(`
|
||||
SELECT number, title, body, author, author_url, COALESCE(author_type, 'user'), url, merged_at, merge_commit, commits
|
||||
FROM pull_requests WHERE number = ?
|
||||
`, number).Scan(
|
||||
&pr.Number, &pr.Title, &pr.Body, &pr.Author, &pr.AuthorURL, &pr.AuthorType,
|
||||
&pr.URL, &pr.MergedAt, &pr.MergeCommit, &commitsJSON,
|
||||
)
|
||||
|
||||
if err == sql.ErrNoRows {
|
||||
return nil, nil
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := json.Unmarshal([]byte(commitsJSON), &pr.Commits); err != nil {
|
||||
return nil, fmt.Errorf("failed to unmarshal commits: %w", err)
|
||||
}
|
||||
|
||||
return &pr, nil
|
||||
}
|
||||
|
||||
func (c *Cache) GetVersions() (map[string]*git.Version, error) {
|
||||
rows, err := c.db.Query(`
|
||||
SELECT name, date, commit_sha, pr_numbers, ai_summary FROM versions
|
||||
`)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
versions := make(map[string]*git.Version)
|
||||
|
||||
for rows.Next() {
|
||||
var v git.Version
|
||||
var dateStr sql.NullString
|
||||
var prNumbersJSON string
|
||||
var aiSummary sql.NullString
|
||||
|
||||
if err := rows.Scan(&v.Name, &dateStr, &v.CommitSHA, &prNumbersJSON, &aiSummary); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if dateStr.Valid {
|
||||
// Try RFC3339Nano first (for nanosecond precision), then fall back to RFC3339
|
||||
v.Date, err = time.Parse(time.RFC3339Nano, dateStr.String)
|
||||
if err != nil {
|
||||
v.Date, err = time.Parse(time.RFC3339, dateStr.String)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Error parsing date '%s' for version '%s': %v. Expected format: RFC3339 or RFC3339Nano.\n", dateStr.String, v.Name, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if prNumbersJSON != "" {
|
||||
json.Unmarshal([]byte(prNumbersJSON), &v.PRNumbers)
|
||||
}
|
||||
|
||||
if aiSummary.Valid {
|
||||
v.AISummary = aiSummary.String
|
||||
}
|
||||
|
||||
v.Commits, err = c.getCommitsForVersion(v.Name)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
versions[v.Name] = &v
|
||||
}
|
||||
|
||||
return versions, rows.Err()
|
||||
}
|
||||
|
||||
func (c *Cache) getCommitsForVersion(version string) ([]*git.Commit, error) {
|
||||
rows, err := c.db.Query(`
|
||||
SELECT sha, message, author, email, date, is_merge, pr_number
|
||||
FROM commits WHERE version = ?
|
||||
ORDER BY date DESC
|
||||
`, version)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var commits []*git.Commit
|
||||
|
||||
for rows.Next() {
|
||||
var commit git.Commit
|
||||
if err := rows.Scan(
|
||||
&commit.SHA, &commit.Message, &commit.Author, &commit.Email,
|
||||
&commit.Date, &commit.IsMerge, &commit.PRNumber,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
commits = append(commits, &commit)
|
||||
}
|
||||
|
||||
return commits, rows.Err()
|
||||
}
|
||||
|
||||
func (c *Cache) Clear() error {
|
||||
tables := []string{"metadata", "versions", "commits", "pull_requests"}
|
||||
for _, table := range tables {
|
||||
if _, err := c.db.Exec("DELETE FROM " + table); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// VersionExists checks if a version already exists in the cache
|
||||
func (c *Cache) VersionExists(version string) (bool, error) {
|
||||
var count int
|
||||
err := c.db.QueryRow("SELECT COUNT(*) FROM versions WHERE name = ?", version).Scan(&count)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
return count > 0, nil
|
||||
}
|
||||
|
||||
// CommitExists checks if a commit already exists in the cache
|
||||
func (c *Cache) CommitExists(hash string) (bool, error) {
|
||||
var count int
|
||||
err := c.db.QueryRow("SELECT COUNT(*) FROM commits WHERE sha = ?", hash).Scan(&count)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
return count > 0, nil
|
||||
}
|
||||
|
||||
// GetLastPRSync returns the timestamp of the last PR sync
|
||||
func (c *Cache) GetLastPRSync() (time.Time, error) {
|
||||
var timestamp string
|
||||
err := c.db.QueryRow("SELECT value FROM metadata WHERE key = 'last_pr_sync'").Scan(×tamp)
|
||||
if err == sql.ErrNoRows {
|
||||
return time.Time{}, nil
|
||||
}
|
||||
if err != nil {
|
||||
return time.Time{}, err
|
||||
}
|
||||
|
||||
return time.Parse(time.RFC3339, timestamp)
|
||||
}
|
||||
|
||||
// SetLastPRSync updates the timestamp of the last PR sync
|
||||
func (c *Cache) SetLastPRSync(timestamp time.Time) error {
|
||||
_, err := c.db.Exec(`
|
||||
INSERT OR REPLACE INTO metadata (key, value, updated_at)
|
||||
VALUES ('last_pr_sync', ?, CURRENT_TIMESTAMP)
|
||||
`, timestamp.Format(time.RFC3339))
|
||||
return err
|
||||
}
|
||||
|
||||
// SavePRBatch saves multiple PRs in a single transaction for better performance
|
||||
func (c *Cache) SavePRBatch(prs []*github.PR) error {
|
||||
tx, err := c.db.Begin()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to begin transaction: %w", err)
|
||||
}
|
||||
defer tx.Rollback()
|
||||
|
||||
stmt, err := tx.Prepare(`
|
||||
INSERT OR REPLACE INTO pull_requests
|
||||
(number, title, body, author, author_url, author_type, url, merged_at, merge_commit, commits)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
`)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to prepare statement: %w", err)
|
||||
}
|
||||
defer stmt.Close()
|
||||
|
||||
for _, pr := range prs {
|
||||
commits, _ := json.Marshal(pr.Commits)
|
||||
_, err := stmt.Exec(
|
||||
pr.Number, pr.Title, pr.Body, pr.Author, pr.AuthorURL, pr.AuthorType,
|
||||
pr.URL, pr.MergedAt, pr.MergeCommit, string(commits),
|
||||
)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to save PR #%d: %w", pr.Number, err)
|
||||
}
|
||||
}
|
||||
|
||||
return tx.Commit()
|
||||
}
|
||||
|
||||
// GetAllPRs returns all cached PRs
|
||||
func (c *Cache) GetAllPRs() (map[int]*github.PR, error) {
|
||||
rows, err := c.db.Query(`
|
||||
SELECT number, title, body, author, author_url, COALESCE(author_type, 'user'), url, merged_at, merge_commit, commits
|
||||
FROM pull_requests
|
||||
`)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
prs := make(map[int]*github.PR)
|
||||
|
||||
for rows.Next() {
|
||||
var pr github.PR
|
||||
var commitsJSON string
|
||||
|
||||
if err := rows.Scan(
|
||||
&pr.Number, &pr.Title, &pr.Body, &pr.Author, &pr.AuthorURL, &pr.AuthorType,
|
||||
&pr.URL, &pr.MergedAt, &pr.MergeCommit, &commitsJSON,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := json.Unmarshal([]byte(commitsJSON), &pr.Commits); err != nil {
|
||||
return nil, fmt.Errorf("failed to unmarshal commits for PR #%d: %w", pr.Number, err)
|
||||
}
|
||||
|
||||
prs[pr.Number] = &pr
|
||||
}
|
||||
|
||||
return prs, rows.Err()
|
||||
}
|
||||
|
||||
// MarkPRAsNonExistent marks a PR number as non-existent to avoid future fetches
|
||||
func (c *Cache) MarkPRAsNonExistent(prNumber int) error {
|
||||
_, err := c.db.Exec(`
|
||||
INSERT OR REPLACE INTO metadata (key, value, updated_at)
|
||||
VALUES (?, 'non_existent', CURRENT_TIMESTAMP)
|
||||
`, fmt.Sprintf("pr_non_existent_%d", prNumber))
|
||||
return err
|
||||
}
|
||||
|
||||
// IsPRMarkedAsNonExistent checks if a PR is marked as non-existent
|
||||
func (c *Cache) IsPRMarkedAsNonExistent(prNumber int) bool {
|
||||
var value string
|
||||
err := c.db.QueryRow("SELECT value FROM metadata WHERE key = ?",
|
||||
fmt.Sprintf("pr_non_existent_%d", prNumber)).Scan(&value)
|
||||
return err == nil && value == "non_existent"
|
||||
}
|
||||
|
||||
// SaveCommitPRMappings saves SHA→PR mappings for all commits in PRs
|
||||
func (c *Cache) SaveCommitPRMappings(prs []*github.PR) error {
|
||||
tx, err := c.db.Begin()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to begin transaction: %w", err)
|
||||
}
|
||||
defer tx.Rollback()
|
||||
|
||||
stmt, err := tx.Prepare(`
|
||||
INSERT OR REPLACE INTO commit_pr_mapping (commit_sha, pr_number)
|
||||
VALUES (?, ?)
|
||||
`)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to prepare statement: %w", err)
|
||||
}
|
||||
defer stmt.Close()
|
||||
|
||||
for _, pr := range prs {
|
||||
for _, commit := range pr.Commits {
|
||||
_, err := stmt.Exec(commit.SHA, pr.Number)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to save commit mapping %s→%d: %w", commit.SHA, pr.Number, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return tx.Commit()
|
||||
}
|
||||
|
||||
// GetPRNumberBySHA returns the PR number for a given commit SHA
|
||||
func (c *Cache) GetPRNumberBySHA(sha string) (int, bool) {
|
||||
var prNumber int
|
||||
err := c.db.QueryRow("SELECT pr_number FROM commit_pr_mapping WHERE commit_sha = ?", sha).Scan(&prNumber)
|
||||
if err == sql.ErrNoRows {
|
||||
return 0, false
|
||||
}
|
||||
if err != nil {
|
||||
return 0, false
|
||||
}
|
||||
return prNumber, true
|
||||
}
|
||||
|
||||
// GetCommitSHAsForPR returns all commit SHAs for a given PR number
|
||||
func (c *Cache) GetCommitSHAsForPR(prNumber int) ([]string, error) {
|
||||
rows, err := c.db.Query("SELECT commit_sha FROM commit_pr_mapping WHERE pr_number = ?", prNumber)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var shas []string
|
||||
for rows.Next() {
|
||||
var sha string
|
||||
if err := rows.Scan(&sha); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
shas = append(shas, sha)
|
||||
}
|
||||
|
||||
return shas, rows.Err()
|
||||
}
|
||||
|
||||
// GetUnreleasedContentHash returns the cached content hash for Unreleased
|
||||
func (c *Cache) GetUnreleasedContentHash() (string, error) {
|
||||
var hash string
|
||||
err := c.db.QueryRow("SELECT value FROM metadata WHERE key = 'unreleased_content_hash'").Scan(&hash)
|
||||
if err == sql.ErrNoRows {
|
||||
return "", fmt.Errorf("no content hash found")
|
||||
}
|
||||
return hash, err
|
||||
}
|
||||
|
||||
// SetUnreleasedContentHash stores the content hash for Unreleased
|
||||
func (c *Cache) SetUnreleasedContentHash(hash string) error {
|
||||
_, err := c.db.Exec(`
|
||||
INSERT OR REPLACE INTO metadata (key, value, updated_at)
|
||||
VALUES ('unreleased_content_hash', ?, CURRENT_TIMESTAMP)
|
||||
`, hash)
|
||||
return err
|
||||
}
|
||||
805
cmd/generate_changelog/internal/changelog/generator.go
Normal file
805
cmd/generate_changelog/internal/changelog/generator.go
Normal file
@@ -0,0 +1,805 @@
|
||||
package changelog
|
||||
|
||||
import (
|
||||
"crypto/sha256"
|
||||
"fmt"
|
||||
"os"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/danielmiessler/fabric/cmd/generate_changelog/internal/cache"
|
||||
"github.com/danielmiessler/fabric/cmd/generate_changelog/internal/config"
|
||||
"github.com/danielmiessler/fabric/cmd/generate_changelog/internal/git"
|
||||
"github.com/danielmiessler/fabric/cmd/generate_changelog/internal/github"
|
||||
)
|
||||
|
||||
type Generator struct {
|
||||
cfg *config.Config
|
||||
gitWalker *git.Walker
|
||||
ghClient *github.Client
|
||||
cache *cache.Cache
|
||||
versions map[string]*git.Version
|
||||
prs map[int]*github.PR
|
||||
}
|
||||
|
||||
func New(cfg *config.Config) (*Generator, error) {
|
||||
gitWalker, err := git.NewWalker(cfg.RepoPath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to create git walker: %w", err)
|
||||
}
|
||||
|
||||
owner, repo, err := gitWalker.GetRepoInfo()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get repo info: %w", err)
|
||||
}
|
||||
|
||||
ghClient := github.NewClient(cfg.GitHubToken, owner, repo)
|
||||
|
||||
var c *cache.Cache
|
||||
if !cfg.NoCache {
|
||||
c, err = cache.New(cfg.CacheFile)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to create cache: %w", err)
|
||||
}
|
||||
|
||||
if cfg.RebuildCache {
|
||||
if err := c.Clear(); err != nil {
|
||||
return nil, fmt.Errorf("failed to clear cache: %w", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return &Generator{
|
||||
cfg: cfg,
|
||||
gitWalker: gitWalker,
|
||||
ghClient: ghClient,
|
||||
cache: c,
|
||||
prs: make(map[int]*github.PR),
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (g *Generator) Generate() (string, error) {
|
||||
if err := g.collectData(); err != nil {
|
||||
return "", fmt.Errorf("failed to collect data: %w", err)
|
||||
}
|
||||
|
||||
if err := g.fetchPRs(g.cfg.ForcePRSync); err != nil {
|
||||
return "", fmt.Errorf("failed to fetch PRs: %w", err)
|
||||
}
|
||||
|
||||
return g.formatChangelog(), nil
|
||||
}
|
||||
|
||||
func (g *Generator) collectData() error {
|
||||
if g.cache != nil && !g.cfg.RebuildCache {
|
||||
cachedTag, err := g.cache.GetLastProcessedTag()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get last processed tag: %w", err)
|
||||
}
|
||||
|
||||
if cachedTag != "" {
|
||||
// Get the current latest tag from git
|
||||
currentTag, err := g.gitWalker.GetLatestTag()
|
||||
if err == nil {
|
||||
// Load cached data - we can use it even if there are new tags
|
||||
cachedVersions, err := g.cache.GetVersions()
|
||||
if err == nil && len(cachedVersions) > 0 {
|
||||
g.versions = cachedVersions
|
||||
|
||||
// Load cached PRs
|
||||
for _, version := range g.versions {
|
||||
for _, prNum := range version.PRNumbers {
|
||||
if pr, err := g.cache.GetPR(prNum); err == nil && pr != nil {
|
||||
g.prs[prNum] = pr
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If we have new tags since cache, process the new versions only
|
||||
if currentTag != cachedTag {
|
||||
fmt.Fprintf(os.Stderr, "Processing new versions since %s...\n", cachedTag)
|
||||
newVersions, err := g.gitWalker.WalkHistorySinceTag(cachedTag)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Failed to walk history since tag %s: %v\n", cachedTag, err)
|
||||
} else {
|
||||
// Merge new versions into cached versions (only add if not already cached)
|
||||
for name, version := range newVersions {
|
||||
if name != "Unreleased" { // Handle Unreleased separately
|
||||
if existingVersion, exists := g.versions[name]; !exists {
|
||||
g.versions[name] = version
|
||||
} else {
|
||||
// Update existing version with new PR numbers if they're missing
|
||||
if len(existingVersion.PRNumbers) == 0 && len(version.PRNumbers) > 0 {
|
||||
existingVersion.PRNumbers = version.PRNumbers
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Always update Unreleased section with latest commits
|
||||
unreleasedVersion, err := g.gitWalker.WalkCommitsSinceTag(currentTag)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Failed to walk commits since tag %s: %v\n", currentTag, err)
|
||||
} else if unreleasedVersion != nil {
|
||||
// Preserve existing AI summary if available
|
||||
if existingUnreleased, exists := g.versions["Unreleased"]; exists {
|
||||
unreleasedVersion.AISummary = existingUnreleased.AISummary
|
||||
}
|
||||
// Replace or add the unreleased version
|
||||
g.versions["Unreleased"] = unreleasedVersion
|
||||
}
|
||||
|
||||
// Save any new versions to cache (after potential AI processing)
|
||||
if currentTag != cachedTag {
|
||||
for _, version := range g.versions {
|
||||
// Skip versions that were already cached and Unreleased
|
||||
if version.Name != "Unreleased" {
|
||||
if err := g.cache.SaveVersion(version); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Failed to save version to cache: %v\n", err)
|
||||
}
|
||||
|
||||
for _, commit := range version.Commits {
|
||||
if err := g.cache.SaveCommit(commit, version.Name); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Failed to save commit to cache: %v\n", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Update the last processed tag
|
||||
if err := g.cache.SetLastProcessedTag(currentTag); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Failed to update last processed tag: %v\n", err)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
versions, err := g.gitWalker.WalkHistory()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to walk history: %w", err)
|
||||
}
|
||||
|
||||
g.versions = versions
|
||||
|
||||
if g.cache != nil {
|
||||
for _, version := range versions {
|
||||
if err := g.cache.SaveVersion(version); err != nil {
|
||||
return fmt.Errorf("failed to save version to cache: %w", err)
|
||||
}
|
||||
|
||||
for _, commit := range version.Commits {
|
||||
if err := g.cache.SaveCommit(commit, version.Name); err != nil {
|
||||
return fmt.Errorf("failed to save commit to cache: %w", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Save the latest tag as our cache anchor point
|
||||
if latestTag, err := g.gitWalker.GetLatestTag(); err == nil && latestTag != "" {
|
||||
if err := g.cache.SetLastProcessedTag(latestTag); err != nil {
|
||||
return fmt.Errorf("failed to save last processed tag: %w", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (g *Generator) fetchPRs(forcePRSync bool) error {
|
||||
// First, load all cached PRs
|
||||
if g.cache != nil {
|
||||
cachedPRs, err := g.cache.GetAllPRs()
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Failed to load cached PRs: %v\n", err)
|
||||
} else {
|
||||
g.prs = cachedPRs
|
||||
}
|
||||
}
|
||||
|
||||
// Check if we need to fetch new PRs
|
||||
var lastSync time.Time
|
||||
if g.cache != nil {
|
||||
lastSync, _ = g.cache.GetLastPRSync()
|
||||
}
|
||||
|
||||
// Check if we need to sync for missing PRs
|
||||
missingPRs := false
|
||||
for _, version := range g.versions {
|
||||
for _, prNum := range version.PRNumbers {
|
||||
if _, exists := g.prs[prNum]; !exists {
|
||||
missingPRs = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if missingPRs {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if missingPRs {
|
||||
fmt.Fprintf(os.Stderr, "Full sync triggered due to missing PRs in cache.\n")
|
||||
}
|
||||
// If we have never synced or it's been more than 24 hours, do a full sync
|
||||
// Also sync if we have versions with PR numbers that aren't cached
|
||||
needsSync := lastSync.IsZero() || time.Since(lastSync) > 24*time.Hour || forcePRSync || missingPRs
|
||||
|
||||
if !needsSync {
|
||||
fmt.Fprintf(os.Stderr, "Using cached PR data (last sync: %s)\n", lastSync.Format("2006-01-02 15:04:05"))
|
||||
return nil
|
||||
}
|
||||
|
||||
fmt.Fprintf(os.Stderr, "Fetching merged PRs from GitHub using GraphQL...\n")
|
||||
|
||||
// Use GraphQL for ultimate performance - gets everything in ~5-10 calls
|
||||
prs, err := g.ghClient.FetchAllMergedPRsGraphQL(lastSync)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "GraphQL fetch failed, falling back to REST API: %v\n", err)
|
||||
// Fall back to REST API
|
||||
prs, err = g.ghClient.FetchAllMergedPRs(lastSync)
|
||||
if err != nil {
|
||||
return fmt.Errorf("both GraphQL and REST API failed: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
// Update our PR map with new data
|
||||
for _, pr := range prs {
|
||||
g.prs[pr.Number] = pr
|
||||
}
|
||||
|
||||
// Save all PRs to cache in a batch transaction
|
||||
if g.cache != nil && len(prs) > 0 {
|
||||
// Save PRs
|
||||
if err := g.cache.SavePRBatch(prs); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Failed to cache PRs: %v\n", err)
|
||||
}
|
||||
|
||||
// Save SHA→PR mappings for lightning-fast git operations
|
||||
if err := g.cache.SaveCommitPRMappings(prs); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Failed to cache commit mappings: %v\n", err)
|
||||
}
|
||||
|
||||
// Update last sync timestamp
|
||||
if err := g.cache.SetLastPRSync(time.Now()); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Failed to update last sync timestamp: %v\n", err)
|
||||
}
|
||||
}
|
||||
|
||||
if len(prs) > 0 {
|
||||
fmt.Fprintf(os.Stderr, "Fetched %d PRs with commits (total cached: %d)\n", len(prs), len(g.prs))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (g *Generator) formatChangelog() string {
|
||||
var sb strings.Builder
|
||||
sb.WriteString("# Changelog\n")
|
||||
|
||||
versionList := g.getSortedVersions()
|
||||
|
||||
for _, version := range versionList {
|
||||
if g.cfg.Version != "" && version.Name != g.cfg.Version {
|
||||
continue
|
||||
}
|
||||
|
||||
versionText := g.formatVersion(version)
|
||||
if versionText != "" {
|
||||
sb.WriteString("\n")
|
||||
sb.WriteString(versionText)
|
||||
}
|
||||
}
|
||||
|
||||
return sb.String()
|
||||
}
|
||||
|
||||
func (g *Generator) getSortedVersions() []*git.Version {
|
||||
var versions []*git.Version
|
||||
var releasedVersions []*git.Version
|
||||
|
||||
// Collect all released versions (non-"Unreleased")
|
||||
for name, version := range g.versions {
|
||||
if name != "Unreleased" {
|
||||
releasedVersions = append(releasedVersions, version)
|
||||
}
|
||||
}
|
||||
|
||||
// Sort released versions by date (newest first)
|
||||
sort.Slice(releasedVersions, func(i, j int) bool {
|
||||
return releasedVersions[i].Date.After(releasedVersions[j].Date)
|
||||
})
|
||||
|
||||
// Add "Unreleased" first if it exists and has commits
|
||||
if unreleased, exists := g.versions["Unreleased"]; exists && len(unreleased.Commits) > 0 {
|
||||
versions = append(versions, unreleased)
|
||||
}
|
||||
|
||||
// Add sorted released versions
|
||||
versions = append(versions, releasedVersions...)
|
||||
|
||||
if g.cfg.Limit > 0 && len(versions) > g.cfg.Limit {
|
||||
versions = versions[:g.cfg.Limit]
|
||||
}
|
||||
|
||||
return versions
|
||||
}
|
||||
|
||||
func (g *Generator) formatVersion(version *git.Version) string {
|
||||
var sb strings.Builder
|
||||
|
||||
// Generate raw content
|
||||
rawContent := g.generateRawVersionContent(version)
|
||||
if rawContent == "" {
|
||||
return ""
|
||||
}
|
||||
|
||||
header := g.formatVersionHeader(version)
|
||||
sb.WriteString(("\n"))
|
||||
sb.WriteString(header)
|
||||
|
||||
// If AI summarization is enabled, enhance with AI
|
||||
if g.cfg.EnableAISummary {
|
||||
// For "Unreleased", check if content has changed since last AI summary
|
||||
if version.Name == "Unreleased" && version.AISummary != "" && g.cache != nil {
|
||||
// Get cached content hash
|
||||
cachedHash, err := g.cache.GetUnreleasedContentHash()
|
||||
if err == nil {
|
||||
// Calculate current content hash
|
||||
currentHash := hashContent(rawContent)
|
||||
if cachedHash == currentHash {
|
||||
// Content unchanged, use cached summary
|
||||
fmt.Fprintf(os.Stderr, "✅ %s content unchanged (skipping AI)\n", version.Name)
|
||||
sb.WriteString(version.AISummary)
|
||||
return fixMarkdown(sb.String())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// For released versions, if we have cached AI summary, use it!
|
||||
if version.Name != "Unreleased" && version.AISummary != "" {
|
||||
fmt.Fprintf(os.Stderr, "✅ %s already summarized (skipping)\n", version.Name)
|
||||
sb.WriteString(version.AISummary)
|
||||
return fixMarkdown(sb.String())
|
||||
}
|
||||
|
||||
fmt.Fprintf(os.Stderr, "🤖 AI summarizing %s...", version.Name)
|
||||
|
||||
aiSummary, err := SummarizeVersionContent(rawContent)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, " Failed: %v\n", err)
|
||||
sb.WriteString((rawContent))
|
||||
return fixMarkdown(sb.String())
|
||||
}
|
||||
if checkForAIError(aiSummary) {
|
||||
fmt.Fprintf(os.Stderr, " AI error detected, using raw content instead\n")
|
||||
sb.WriteString(rawContent)
|
||||
fmt.Fprintf(os.Stderr, "Raw Content was: (%d bytes) %s \n", len(rawContent), rawContent)
|
||||
fmt.Fprintf(os.Stderr, "AI Summary was: (%d bytes) %s\n", len(aiSummary), aiSummary)
|
||||
return fixMarkdown(sb.String())
|
||||
}
|
||||
|
||||
fmt.Fprintf(os.Stderr, " Done!\n")
|
||||
aiSummary = strings.TrimSpace(aiSummary)
|
||||
|
||||
// Cache the AI summary and content hash
|
||||
version.AISummary = aiSummary
|
||||
if g.cache != nil {
|
||||
if err := g.cache.UpdateVersionAISummary(version.Name, aiSummary); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Failed to cache AI summary: %v\n", err)
|
||||
}
|
||||
// Cache content hash for "Unreleased" to detect changes
|
||||
if version.Name == "Unreleased" {
|
||||
if err := g.cache.SetUnreleasedContentHash(hashContent(rawContent)); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Failed to cache content hash: %v\n", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
sb.WriteString(aiSummary)
|
||||
return fixMarkdown(sb.String())
|
||||
}
|
||||
|
||||
sb.WriteString(rawContent)
|
||||
return fixMarkdown(sb.String())
|
||||
}
|
||||
|
||||
func checkForAIError(summary string) bool {
|
||||
// Check for common AI error patterns
|
||||
errorPatterns := []string{
|
||||
"I don't see any", "please provide",
|
||||
"content you've provided appears to be incomplete",
|
||||
}
|
||||
|
||||
for _, pattern := range errorPatterns {
|
||||
if strings.Contains(summary, pattern) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
// formatVersionHeader formats just the version header (## ...)
|
||||
func (g *Generator) formatVersionHeader(version *git.Version) string {
|
||||
if version.Name == "Unreleased" {
|
||||
return "## Unreleased\n\n"
|
||||
}
|
||||
return fmt.Sprintf("\n## %s (%s)\n\n", version.Name, version.Date.Format("2006-01-02"))
|
||||
}
|
||||
|
||||
// generateRawVersionContent generates the raw content (PRs + commits) for a version
|
||||
func (g *Generator) generateRawVersionContent(version *git.Version) string {
|
||||
var sb strings.Builder
|
||||
|
||||
// Build a set of commit SHAs that are part of fetched PRs
|
||||
prCommitSHAs := make(map[string]bool)
|
||||
for _, prNum := range version.PRNumbers {
|
||||
if pr, exists := g.prs[prNum]; exists {
|
||||
for _, prCommit := range pr.Commits {
|
||||
prCommitSHAs[prCommit.SHA] = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
prCommits := make(map[int][]*git.Commit)
|
||||
directCommits := []*git.Commit{}
|
||||
|
||||
for _, commit := range version.Commits {
|
||||
// Skip version bump commits from output
|
||||
if commit.IsVersion {
|
||||
continue
|
||||
}
|
||||
|
||||
// If this commit is part of a fetched PR, don't include it in direct commits
|
||||
if prCommitSHAs[commit.SHA] {
|
||||
continue
|
||||
}
|
||||
|
||||
if commit.PRNumber > 0 {
|
||||
prCommits[commit.PRNumber] = append(prCommits[commit.PRNumber], commit)
|
||||
} else {
|
||||
directCommits = append(directCommits, commit)
|
||||
}
|
||||
}
|
||||
|
||||
// There are occasionally no PRs or direct commits other than version bumps, so we handle that gracefully
|
||||
if len(prCommits) == 0 && len(directCommits) == 0 {
|
||||
return ""
|
||||
}
|
||||
|
||||
prependNewline := ""
|
||||
for _, prNum := range version.PRNumbers {
|
||||
if pr, exists := g.prs[prNum]; exists {
|
||||
sb.WriteString(prependNewline)
|
||||
sb.WriteString(g.formatPR(pr))
|
||||
prependNewline = "\n"
|
||||
}
|
||||
}
|
||||
|
||||
if len(directCommits) > 0 {
|
||||
// Sort direct commits by date (newest first) for consistent ordering
|
||||
sort.Slice(directCommits, func(i, j int) bool {
|
||||
return directCommits[i].Date.After(directCommits[j].Date)
|
||||
})
|
||||
|
||||
sb.WriteString(prependNewline + "### Direct commits\n\n")
|
||||
for _, commit := range directCommits {
|
||||
message := g.formatCommitMessage(strings.TrimSpace(commit.Message))
|
||||
if message != "" && !g.isDuplicateMessage(message, directCommits) {
|
||||
sb.WriteString(fmt.Sprintf("- %s\n", message))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return fixMarkdown(
|
||||
strings.ReplaceAll(sb.String(), "\n-\n", "\n"), // Remove empty list items
|
||||
)
|
||||
}
|
||||
|
||||
func fixMarkdown(content string) string {
|
||||
|
||||
// Fix MD032/blank-around-lists: Lists should be surrounded by blank lines
|
||||
lines := strings.Split(content, "\n")
|
||||
inList := false
|
||||
preListNewline := false
|
||||
for i := range lines {
|
||||
line := strings.TrimSpace(lines[i])
|
||||
if strings.HasPrefix(line, "- ") || strings.HasPrefix(line, "* ") {
|
||||
if !inList {
|
||||
inList = true
|
||||
// Ensure there's a blank line before the list starts
|
||||
if !preListNewline && i > 0 && lines[i-1] != "" {
|
||||
line = "\n" + line
|
||||
preListNewline = true
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if inList {
|
||||
inList = false
|
||||
preListNewline = false
|
||||
}
|
||||
}
|
||||
lines[i] = strings.TrimRight(line, " \t")
|
||||
}
|
||||
|
||||
fixedContent := strings.TrimSpace(strings.Join(lines, "\n"))
|
||||
|
||||
return fixedContent + "\n"
|
||||
}
|
||||
|
||||
func (g *Generator) formatPR(pr *github.PR) string {
|
||||
var sb strings.Builder
|
||||
|
||||
pr.Title = strings.TrimRight(strings.TrimSpace(pr.Title), ".")
|
||||
|
||||
// Add type indicator for non-users
|
||||
authorName := pr.Author
|
||||
switch pr.AuthorType {
|
||||
case "bot":
|
||||
authorName += "[bot]"
|
||||
case "organization":
|
||||
authorName += "[org]"
|
||||
}
|
||||
|
||||
sb.WriteString(fmt.Sprintf("### PR [#%d](%s) by [%s](%s): %s\n\n",
|
||||
pr.Number, pr.URL, authorName, pr.AuthorURL, strings.TrimSpace(pr.Title)))
|
||||
|
||||
changes := g.extractChanges(pr)
|
||||
for _, change := range changes {
|
||||
if change != "" {
|
||||
sb.WriteString(fmt.Sprintf("- %s\n", change))
|
||||
}
|
||||
}
|
||||
|
||||
return sb.String()
|
||||
}
|
||||
|
||||
func (g *Generator) extractChanges(pr *github.PR) []string {
|
||||
var changes []string
|
||||
seen := make(map[string]bool)
|
||||
|
||||
for _, commit := range pr.Commits {
|
||||
message := g.formatCommitMessage(commit.Message)
|
||||
if message != "" && !seen[message] {
|
||||
seen[message] = true
|
||||
changes = append(changes, message)
|
||||
}
|
||||
}
|
||||
|
||||
if len(changes) == 0 && pr.Body != "" {
|
||||
lines := strings.Split(pr.Body, "\n")
|
||||
for _, line := range lines {
|
||||
line = strings.TrimSpace(line)
|
||||
if strings.HasPrefix(line, "- ") || strings.HasPrefix(line, "* ") {
|
||||
change := strings.TrimPrefix(strings.TrimPrefix(line, "- "), "* ")
|
||||
if change != "" {
|
||||
changes = append(changes, change)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return changes
|
||||
}
|
||||
|
||||
func normalizeLineEndings(content string) string {
|
||||
return strings.ReplaceAll(content, "\r\n", "\n")
|
||||
}
|
||||
|
||||
func (g *Generator) formatCommitMessage(message string) string {
|
||||
strings_to_remove := []string{
|
||||
"### CHANGES\n", "## CHANGES\n", "# CHANGES\n",
|
||||
"...\n", "---\n", "## Changes\n", "## Change",
|
||||
"Update version to v..1 and commit\n",
|
||||
"# What this Pull Request (PR) does\n",
|
||||
"# Conflicts:",
|
||||
}
|
||||
|
||||
message = normalizeLineEndings(message)
|
||||
// No hard tabs
|
||||
message = strings.ReplaceAll(message, "\t", " ")
|
||||
|
||||
if len(message) > 0 {
|
||||
message = strings.ToUpper(message[:1]) + message[1:]
|
||||
}
|
||||
|
||||
for _, str := range strings_to_remove {
|
||||
if strings.Contains(message, str) {
|
||||
message = strings.ReplaceAll(message, str, "")
|
||||
}
|
||||
}
|
||||
|
||||
message = fixFormatting(message)
|
||||
|
||||
return message
|
||||
}
|
||||
|
||||
func fixFormatting(message string) string {
|
||||
// Turn "*"" lists into "-" lists"
|
||||
message = strings.ReplaceAll(message, "* ", "- ")
|
||||
// Remove extra spaces around dashes
|
||||
message = strings.ReplaceAll(message, "- ", "- ")
|
||||
message = strings.ReplaceAll(message, "- ", "- ")
|
||||
// turn bare URL into <URL>
|
||||
if strings.Contains(message, "http://") || strings.Contains(message, "https://") {
|
||||
// Use regex to wrap bare URLs with angle brackets
|
||||
urlRegex := regexp.MustCompile(`\b(https?://[^\s<>]+)`)
|
||||
message = urlRegex.ReplaceAllString(message, "<$1>")
|
||||
}
|
||||
|
||||
// Replace "## LINKS\n" with "- "
|
||||
message = strings.ReplaceAll(message, "## LINKS\n", "- ")
|
||||
// Dependabot messages: "- [Commits]" should become "\n- [Commits]"
|
||||
message = strings.TrimSpace(message)
|
||||
// Turn multiple newlines into a single newline
|
||||
message = strings.TrimSpace(strings.ReplaceAll(message, "\n\n", "\n"))
|
||||
// Fix inline trailing spaces
|
||||
message = strings.ReplaceAll(message, " \n", "\n")
|
||||
// Fix weird indent before list,
|
||||
message = strings.ReplaceAll(message, "\n - ", "\n- ")
|
||||
|
||||
// blanks-around-lists MD032 fix
|
||||
// Use regex to ensure blank line before list items that don't already have one
|
||||
listRegex := regexp.MustCompile(`(?m)([^\n-].*[^:\n])\n([-*] .*)`)
|
||||
message = listRegex.ReplaceAllString(message, "$1\n\n$2")
|
||||
|
||||
// Change random first-level "#" to 4th level "####"
|
||||
// This is a hack to fix spurious first-level headings that are not actual headings
|
||||
// but rather just comments or notes in the commit message.
|
||||
message = strings.ReplaceAll(message, "# ", "\n#### ")
|
||||
message = strings.ReplaceAll(message, "\n\n\n", "\n\n")
|
||||
|
||||
// Wrap any non-wrapped Emails with angle brackets
|
||||
emailRegex := regexp.MustCompile(`([a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,})`)
|
||||
message = emailRegex.ReplaceAllString(message, "<$1>")
|
||||
|
||||
// Wrap any non-wrapped URLs with angle brackets
|
||||
urlRegex := regexp.MustCompile(`(https?://[^\s<]+)`)
|
||||
message = urlRegex.ReplaceAllString(message, "<$1>")
|
||||
|
||||
message = strings.ReplaceAll(message, "<<", "<")
|
||||
message = strings.ReplaceAll(message, ">>", ">")
|
||||
|
||||
// Fix some spurious Issue/PR links at the beginning of a commit message line
|
||||
prOrIssueLinkRegex := regexp.MustCompile("\n" + `(#\d+)`)
|
||||
message = prOrIssueLinkRegex.ReplaceAllString(message, " $1")
|
||||
|
||||
// Remove leading/trailing whitespace
|
||||
message = strings.TrimSpace(message)
|
||||
return message
|
||||
}
|
||||
|
||||
func (g *Generator) isDuplicateMessage(message string, commits []*git.Commit) bool {
|
||||
if message == "." || strings.ToLower(message) == "fix" {
|
||||
count := 0
|
||||
for _, commit := range commits {
|
||||
formatted := g.formatCommitMessage(commit.Message)
|
||||
if formatted == message {
|
||||
count++
|
||||
if count > 1 {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// hashContent generates a SHA256 hash of the content for change detection
|
||||
func hashContent(content string) string {
|
||||
hash := sha256.Sum256([]byte(content))
|
||||
return fmt.Sprintf("%x", hash)
|
||||
}
|
||||
|
||||
// SyncDatabase performs a comprehensive database synchronization and validation
|
||||
func (g *Generator) SyncDatabase() error {
|
||||
if g.cache == nil {
|
||||
return fmt.Errorf("cache is disabled, cannot sync database")
|
||||
}
|
||||
|
||||
fmt.Fprintf(os.Stderr, "[SYNC] Starting database synchronization...\n")
|
||||
|
||||
// Step 1: Force PR sync (pass true explicitly)
|
||||
fmt.Fprintf(os.Stderr, "[PR_SYNC] Forcing PR sync from GitHub...\n")
|
||||
if err := g.fetchPRs(true); err != nil {
|
||||
return fmt.Errorf("failed to sync PRs: %w", err)
|
||||
}
|
||||
|
||||
// Step 2: Rebuild git history and verify versions/commits completeness
|
||||
fmt.Fprintf(os.Stderr, "[VERIFY] Verifying git history and version completeness...\n")
|
||||
if err := g.syncGitHistory(); err != nil {
|
||||
return fmt.Errorf("failed to sync git history: %w", err)
|
||||
}
|
||||
|
||||
// Step 3: Verify commit-PR mappings
|
||||
fmt.Fprintf(os.Stderr, "[MAPPING] Verifying commit-PR mappings...\n")
|
||||
if err := g.verifyCommitPRMappings(); err != nil {
|
||||
return fmt.Errorf("failed to verify commit-PR mappings: %w", err)
|
||||
}
|
||||
|
||||
fmt.Fprintf(os.Stderr, "[SUCCESS] Database synchronization completed successfully!\n")
|
||||
return nil
|
||||
}
|
||||
|
||||
// syncGitHistory walks the complete git history and ensures all versions and commits are cached
|
||||
func (g *Generator) syncGitHistory() error {
|
||||
// Walk complete git history (reuse existing logic)
|
||||
versions, err := g.gitWalker.WalkHistory()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to walk git history: %w", err)
|
||||
}
|
||||
|
||||
// Save only new versions and commits (preserve existing data)
|
||||
var newVersions, newCommits int
|
||||
for _, version := range versions {
|
||||
// Only save version if it doesn't exist
|
||||
exists, err := g.cache.VersionExists(version.Name)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Failed to check existence of version %s: %v. This may affect the completeness of the sync operation.\n", version.Name, err)
|
||||
continue
|
||||
}
|
||||
if !exists {
|
||||
if err := g.cache.SaveVersion(version); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Failed to save version %s: %v\n", version.Name, err)
|
||||
} else {
|
||||
newVersions++
|
||||
}
|
||||
}
|
||||
|
||||
// Only save commits that don't exist
|
||||
for _, commit := range version.Commits {
|
||||
exists, err := g.cache.CommitExists(commit.SHA)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Failed to check commit %s existence: %v\n", commit.SHA, err)
|
||||
continue
|
||||
}
|
||||
if !exists {
|
||||
if err := g.cache.SaveCommit(commit, version.Name); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Failed to save commit %s: %v\n", commit.SHA, err)
|
||||
} else {
|
||||
newCommits++
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Update last processed tag
|
||||
if latestTag, err := g.gitWalker.GetLatestTag(); err == nil && latestTag != "" {
|
||||
if err := g.cache.SetLastProcessedTag(latestTag); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Failed to update last processed tag: %v\n", err)
|
||||
}
|
||||
}
|
||||
|
||||
fmt.Fprintf(os.Stderr, " Added %d new versions and %d new commits (preserved existing data)\n", newVersions, newCommits)
|
||||
return nil
|
||||
}
|
||||
|
||||
// verifyCommitPRMappings ensures all PR commits have proper mappings
|
||||
func (g *Generator) verifyCommitPRMappings() error {
|
||||
// Get all cached PRs
|
||||
allPRs, err := g.cache.GetAllPRs()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get cached PRs: %w", err)
|
||||
}
|
||||
|
||||
// Convert to slice for batch operations (reuse existing logic)
|
||||
var prSlice []*github.PR
|
||||
for _, pr := range allPRs {
|
||||
prSlice = append(prSlice, pr)
|
||||
}
|
||||
|
||||
// Save commit-PR mappings (reuse existing logic)
|
||||
if err := g.cache.SaveCommitPRMappings(prSlice); err != nil {
|
||||
return fmt.Errorf("failed to save commit-PR mappings: %w", err)
|
||||
}
|
||||
|
||||
fmt.Fprintf(os.Stderr, " Verified mappings for %d PRs\n", len(prSlice))
|
||||
return nil
|
||||
}
|
||||
115
cmd/generate_changelog/internal/changelog/generator_test.go
Normal file
115
cmd/generate_changelog/internal/changelog/generator_test.go
Normal file
@@ -0,0 +1,115 @@
|
||||
package changelog
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"testing"
|
||||
|
||||
"github.com/danielmiessler/fabric/cmd/generate_changelog/internal/config"
|
||||
)
|
||||
|
||||
func TestDetectVersionFromNix(t *testing.T) {
|
||||
tempDir := t.TempDir()
|
||||
|
||||
t.Run("version.nix exists", func(t *testing.T) {
|
||||
versionNixContent := `"1.2.3"`
|
||||
versionNixPath := filepath.Join(tempDir, "version.nix")
|
||||
err := os.WriteFile(versionNixPath, []byte(versionNixContent), 0644)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to write version.nix: %v", err)
|
||||
}
|
||||
|
||||
data, err := os.ReadFile(versionNixPath)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to read version.nix: %v", err)
|
||||
}
|
||||
|
||||
versionRegex := regexp.MustCompile(`"([^"]+)"`)
|
||||
matches := versionRegex.FindStringSubmatch(string(data))
|
||||
|
||||
if len(matches) <= 1 {
|
||||
t.Fatalf("No version found in version.nix")
|
||||
}
|
||||
|
||||
version := matches[1]
|
||||
if version != "1.2.3" {
|
||||
t.Errorf("Expected version 1.2.3, got %s", version)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func TestEnsureIncomingDir(t *testing.T) {
|
||||
tempDir := t.TempDir()
|
||||
incomingDir := filepath.Join(tempDir, "incoming")
|
||||
|
||||
cfg := &config.Config{
|
||||
IncomingDir: incomingDir,
|
||||
}
|
||||
|
||||
g := &Generator{cfg: cfg}
|
||||
|
||||
err := g.ensureIncomingDir()
|
||||
if err != nil {
|
||||
t.Fatalf("ensureIncomingDir failed: %v", err)
|
||||
}
|
||||
|
||||
if _, err := os.Stat(incomingDir); os.IsNotExist(err) {
|
||||
t.Errorf("Incoming directory was not created")
|
||||
}
|
||||
}
|
||||
|
||||
func TestInsertVersionAtTop(t *testing.T) {
|
||||
tempDir := t.TempDir()
|
||||
changelogPath := filepath.Join(tempDir, "CHANGELOG.md")
|
||||
|
||||
cfg := &config.Config{
|
||||
RepoPath: tempDir,
|
||||
}
|
||||
|
||||
g := &Generator{cfg: cfg}
|
||||
|
||||
t.Run("new changelog", func(t *testing.T) {
|
||||
entry := "## v1.0.0 (2025-01-01)\n\n- Initial release"
|
||||
|
||||
err := g.insertVersionAtTop(entry)
|
||||
if err != nil {
|
||||
t.Fatalf("insertVersionAtTop failed: %v", err)
|
||||
}
|
||||
|
||||
content, err := os.ReadFile(changelogPath)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to read changelog: %v", err)
|
||||
}
|
||||
|
||||
expected := "# Changelog\n\n## v1.0.0 (2025-01-01)\n\n- Initial release\n"
|
||||
if string(content) != expected {
|
||||
t.Errorf("Expected:\n%s\nGot:\n%s", expected, string(content))
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("existing changelog", func(t *testing.T) {
|
||||
existingContent := "# Changelog\n\n## v0.9.0 (2024-12-01)\n\n- Previous release"
|
||||
err := os.WriteFile(changelogPath, []byte(existingContent), 0644)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to write existing changelog: %v", err)
|
||||
}
|
||||
|
||||
entry := "## v1.0.0 (2025-01-01)\n\n- New release"
|
||||
|
||||
err = g.insertVersionAtTop(entry)
|
||||
if err != nil {
|
||||
t.Fatalf("insertVersionAtTop failed: %v", err)
|
||||
}
|
||||
|
||||
content, err := os.ReadFile(changelogPath)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to read changelog: %v", err)
|
||||
}
|
||||
|
||||
expected := "# Changelog\n\n## v1.0.0 (2025-01-01)\n\n- New release\n## v0.9.0 (2024-12-01)\n\n- Previous release"
|
||||
if string(content) != expected {
|
||||
t.Errorf("Expected:\n%s\nGot:\n%s", expected, string(content))
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -0,0 +1,82 @@
|
||||
package changelog
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/danielmiessler/fabric/cmd/generate_changelog/internal/github"
|
||||
)
|
||||
|
||||
func TestIsMergeCommit(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
commit github.PRCommit
|
||||
expected bool
|
||||
}{
|
||||
{
|
||||
name: "Regular commit with single parent",
|
||||
commit: github.PRCommit{
|
||||
SHA: "abc123",
|
||||
Message: "Fix bug in user authentication",
|
||||
Author: "John Doe",
|
||||
Date: time.Now(),
|
||||
Parents: []string{"def456"},
|
||||
},
|
||||
expected: false,
|
||||
},
|
||||
{
|
||||
name: "Merge commit with multiple parents",
|
||||
commit: github.PRCommit{
|
||||
SHA: "abc123",
|
||||
Message: "Merge pull request #42 from feature/auth",
|
||||
Author: "GitHub",
|
||||
Date: time.Now(),
|
||||
Parents: []string{"def456", "ghi789"},
|
||||
},
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
name: "Merge commit detected by message pattern only",
|
||||
commit: github.PRCommit{
|
||||
SHA: "abc123",
|
||||
Message: "Merge pull request #123 from user/feature-branch",
|
||||
Author: "GitHub",
|
||||
Date: time.Now(),
|
||||
Parents: []string{}, // Empty parents - fallback to message detection
|
||||
},
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
name: "Merge branch commit pattern",
|
||||
commit: github.PRCommit{
|
||||
SHA: "abc123",
|
||||
Message: "Merge branch 'feature' into main",
|
||||
Author: "Developer",
|
||||
Date: time.Now(),
|
||||
Parents: []string{"def456"}, // Single parent but merge pattern
|
||||
},
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
name: "Regular commit with no merge patterns",
|
||||
commit: github.PRCommit{
|
||||
SHA: "abc123",
|
||||
Message: "Add new feature for user management",
|
||||
Author: "Jane Doe",
|
||||
Date: time.Now(),
|
||||
Parents: []string{"def456"},
|
||||
},
|
||||
expected: false,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result := isMergeCommit(tt.commit)
|
||||
if result != tt.expected {
|
||||
t.Errorf("isMergeCommit() = %v, expected %v for commit: %s",
|
||||
result, tt.expected, tt.commit.Message)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
521
cmd/generate_changelog/internal/changelog/processing.go
Normal file
521
cmd/generate_changelog/internal/changelog/processing.go
Normal file
@@ -0,0 +1,521 @@
|
||||
package changelog
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/danielmiessler/fabric/cmd/generate_changelog/internal/git"
|
||||
"github.com/danielmiessler/fabric/cmd/generate_changelog/internal/github"
|
||||
)
|
||||
|
||||
var (
|
||||
mergePatterns []*regexp.Regexp
|
||||
mergePatternsOnce sync.Once
|
||||
)
|
||||
|
||||
// getMergePatterns returns the compiled merge patterns, initializing them lazily
|
||||
func getMergePatterns() []*regexp.Regexp {
|
||||
mergePatternsOnce.Do(func() {
|
||||
mergePatterns = []*regexp.Regexp{
|
||||
regexp.MustCompile(`^Merge pull request #\d+`), // "Merge pull request #123 from..."
|
||||
regexp.MustCompile(`^Merge branch '.*' into .*`), // "Merge branch 'feature' into main"
|
||||
regexp.MustCompile(`^Merge remote-tracking branch`), // "Merge remote-tracking branch..."
|
||||
regexp.MustCompile(`^Merge '.*' into .*`), // "Merge 'feature' into main"
|
||||
}
|
||||
})
|
||||
return mergePatterns
|
||||
}
|
||||
|
||||
// isMergeCommit determines if a commit is a merge commit based on its parents and message patterns.
|
||||
func isMergeCommit(commit github.PRCommit) bool {
|
||||
// Primary method: Check parent count (merge commits have multiple parents)
|
||||
if len(commit.Parents) > 1 {
|
||||
return true
|
||||
}
|
||||
|
||||
// Fallback method: Check commit message patterns
|
||||
mergePatterns := getMergePatterns()
|
||||
for _, pattern := range mergePatterns {
|
||||
if pattern.MatchString(commit.Message) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
// calculateVersionDate determines the version date based on the most recent commit date from the provided PRs.
|
||||
//
|
||||
// If no valid commit dates are found, the function falls back to the current time.
|
||||
// The function iterates through the provided PRs and their associated commits, comparing commit dates
|
||||
// to identify the most recent one. If a valid date is found, it is returned; otherwise, the fallback is used.
|
||||
func calculateVersionDate(fetchedPRs []*github.PR) time.Time {
|
||||
versionDate := time.Now() // fallback to current time
|
||||
if len(fetchedPRs) > 0 {
|
||||
var mostRecentCommitDate time.Time
|
||||
for _, pr := range fetchedPRs {
|
||||
for _, commit := range pr.Commits {
|
||||
if commit.Date.After(mostRecentCommitDate) {
|
||||
mostRecentCommitDate = commit.Date
|
||||
}
|
||||
}
|
||||
}
|
||||
if !mostRecentCommitDate.IsZero() {
|
||||
versionDate = mostRecentCommitDate
|
||||
}
|
||||
}
|
||||
return versionDate
|
||||
}
|
||||
|
||||
// ProcessIncomingPR processes a single PR for changelog entry creation
|
||||
func (g *Generator) ProcessIncomingPR(prNumber int) error {
|
||||
if err := g.validatePRState(prNumber); err != nil {
|
||||
return fmt.Errorf("PR validation failed: %w", err)
|
||||
}
|
||||
|
||||
if err := g.validateGitStatus(); err != nil {
|
||||
return fmt.Errorf("git status validation failed: %w", err)
|
||||
}
|
||||
|
||||
// Now fetch the full PR with commits for content generation
|
||||
pr, err := g.ghClient.GetPRWithCommits(prNumber)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to fetch PR %d: %w", prNumber, err)
|
||||
}
|
||||
|
||||
content := g.formatPR(pr)
|
||||
|
||||
if g.cfg.EnableAISummary {
|
||||
aiContent, err := SummarizeVersionContent(content)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: AI summarization failed: %v\n", err)
|
||||
} else if !checkForAIError(aiContent) {
|
||||
content = strings.TrimSpace(aiContent)
|
||||
}
|
||||
}
|
||||
|
||||
if err := g.ensureIncomingDir(); err != nil {
|
||||
return fmt.Errorf("failed to create incoming directory: %w", err)
|
||||
}
|
||||
|
||||
filename := filepath.Join(g.cfg.IncomingDir, fmt.Sprintf("%d.txt", prNumber))
|
||||
|
||||
// Ensure content ends with a single newline
|
||||
content = strings.TrimSpace(content) + "\n"
|
||||
|
||||
if err := os.WriteFile(filename, []byte(content), 0644); err != nil {
|
||||
return fmt.Errorf("failed to write incoming file: %w", err)
|
||||
}
|
||||
|
||||
if err := g.commitAndPushIncoming(prNumber, filename); err != nil {
|
||||
return fmt.Errorf("failed to commit and push: %w", err)
|
||||
}
|
||||
|
||||
fmt.Printf("Successfully created incoming changelog entry: %s\n", filename)
|
||||
return nil
|
||||
}
|
||||
|
||||
// CreateNewChangelogEntry aggregates all incoming PR files for release and includes direct commits
|
||||
func (g *Generator) CreateNewChangelogEntry(version string) error {
|
||||
files, err := filepath.Glob(filepath.Join(g.cfg.IncomingDir, "*.txt"))
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to scan incoming directory: %w", err)
|
||||
}
|
||||
|
||||
var content strings.Builder
|
||||
var processingErrors []string
|
||||
|
||||
// First, aggregate all incoming PR files
|
||||
for _, file := range files {
|
||||
data, err := os.ReadFile(file)
|
||||
if err != nil {
|
||||
processingErrors = append(processingErrors, fmt.Sprintf("failed to read %s: %v", file, err))
|
||||
continue // Continue to attempt processing other files
|
||||
}
|
||||
content.WriteString(string(data))
|
||||
// Note: No extra newline needed here as each incoming file already ends with a newline
|
||||
}
|
||||
|
||||
if len(processingErrors) > 0 {
|
||||
return fmt.Errorf("encountered errors while processing incoming files: %s", strings.Join(processingErrors, "; "))
|
||||
}
|
||||
|
||||
// Extract PR numbers and their commit SHAs from processed files to avoid including their commits as "direct"
|
||||
processedPRs := make(map[int]bool)
|
||||
processedCommitSHAs := make(map[string]bool)
|
||||
var fetchedPRs []*github.PR
|
||||
var prNumbers []int
|
||||
|
||||
for _, file := range files {
|
||||
// Extract PR number from filename (e.g., "1640.txt" -> 1640)
|
||||
filename := filepath.Base(file)
|
||||
if prNumStr := strings.TrimSuffix(filename, ".txt"); prNumStr != filename {
|
||||
if prNum, err := strconv.Atoi(prNumStr); err == nil {
|
||||
processedPRs[prNum] = true
|
||||
prNumbers = append(prNumbers, prNum)
|
||||
|
||||
// Fetch the PR to get its commit SHAs
|
||||
if pr, err := g.ghClient.GetPRWithCommits(prNum); err == nil {
|
||||
fetchedPRs = append(fetchedPRs, pr)
|
||||
for _, commit := range pr.Commits {
|
||||
processedCommitSHAs[commit.SHA] = true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Now add direct commits since the last release, excluding commits from processed PRs
|
||||
directCommitsContent, err := g.getDirectCommitsSinceLastRelease(processedPRs, processedCommitSHAs)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get direct commits since last release: %w", err)
|
||||
}
|
||||
content.WriteString(directCommitsContent)
|
||||
|
||||
// Check if we have any content at all
|
||||
if content.Len() == 0 {
|
||||
if len(files) == 0 {
|
||||
fmt.Fprintf(os.Stderr, "No incoming PR files found in %s and no direct commits since last release\n", g.cfg.IncomingDir)
|
||||
} else {
|
||||
fmt.Fprintf(os.Stderr, "No content found in incoming files and no direct commits since last release\n")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Calculate the version date for the changelog entry as the most recent commit date from processed PRs
|
||||
versionDate := calculateVersionDate(fetchedPRs)
|
||||
|
||||
entry := fmt.Sprintf("## %s (%s)\n\n%s",
|
||||
version, versionDate.Format("2006-01-02"), strings.TrimLeft(content.String(), "\n"))
|
||||
|
||||
if err := g.insertVersionAtTop(entry); err != nil {
|
||||
return fmt.Errorf("failed to update CHANGELOG.md: %w", err)
|
||||
}
|
||||
|
||||
if g.cache != nil {
|
||||
// Cache the fetched PRs using the same logic as normal changelog generation
|
||||
if len(fetchedPRs) > 0 {
|
||||
// Save PRs to cache
|
||||
if err := g.cache.SavePRBatch(fetchedPRs); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Failed to save PR batch to cache: %v\n", err)
|
||||
}
|
||||
|
||||
// Save SHA→PR mappings for lightning-fast git operations
|
||||
if err := g.cache.SaveCommitPRMappings(fetchedPRs); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Failed to cache commit mappings: %v\n", err)
|
||||
}
|
||||
|
||||
// Save individual commits to cache for each PR
|
||||
for _, pr := range fetchedPRs {
|
||||
for _, commit := range pr.Commits {
|
||||
// Use actual commit timestamp, with fallback to current time if invalid
|
||||
commitDate := commit.Date
|
||||
if commitDate.IsZero() {
|
||||
commitDate = time.Now()
|
||||
fmt.Fprintf(os.Stderr, "Warning: Commit %s has invalid timestamp, using current time as fallback\n", commit.SHA)
|
||||
}
|
||||
|
||||
// Convert github.PRCommit to git.Commit
|
||||
gitCommit := &git.Commit{
|
||||
SHA: commit.SHA,
|
||||
Message: commit.Message,
|
||||
Author: commit.Author,
|
||||
Email: commit.Email, // Use email from GitHub API
|
||||
Date: commitDate, // Use actual commit timestamp from GitHub API
|
||||
IsMerge: isMergeCommit(commit), // Detect merge commits using parents and message patterns
|
||||
PRNumber: pr.Number,
|
||||
}
|
||||
if err := g.cache.SaveCommit(gitCommit, version); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Failed to save commit %s to cache: %v\n", commit.SHA, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Create a proper new version entry for the database
|
||||
newVersionEntry := &git.Version{
|
||||
Name: version,
|
||||
Date: versionDate, // Use most recent commit date instead of current time
|
||||
CommitSHA: "", // Will be set when the release commit is made
|
||||
PRNumbers: prNumbers, // Now we have the actual PR numbers
|
||||
AISummary: content.String(),
|
||||
}
|
||||
|
||||
if err := g.cache.SaveVersion(newVersionEntry); err != nil {
|
||||
return fmt.Errorf("failed to save new version entry to database: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
for _, file := range files {
|
||||
// Convert to relative path for git operations
|
||||
relativeFile, err := filepath.Rel(g.cfg.RepoPath, file)
|
||||
if err != nil {
|
||||
relativeFile = file
|
||||
}
|
||||
|
||||
// Use git remove to handle both filesystem and git index
|
||||
if err := g.gitWalker.RemoveFile(relativeFile); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Failed to remove %s from git index: %v\n", relativeFile, err)
|
||||
// Fallback to filesystem-only removal
|
||||
if err := os.Remove(file); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Error: Failed to remove %s from the filesystem after failing to remove it from the git index.\n", relativeFile)
|
||||
fmt.Fprintf(os.Stderr, "Filesystem error: %v\n", err)
|
||||
fmt.Fprintf(os.Stderr, "Manual intervention required:\n")
|
||||
fmt.Fprintf(os.Stderr, " 1. Remove the file %s manually (using the OS-specific command)\n", file)
|
||||
fmt.Fprintf(os.Stderr, " 2. Remove from git index: git rm --cached %s\n", relativeFile)
|
||||
fmt.Fprintf(os.Stderr, " 3. Or reset git index: git reset HEAD %s\n", relativeFile)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if err := g.stageChangesForRelease(); err != nil {
|
||||
return fmt.Errorf("critical: failed to stage changes for release: %w", err)
|
||||
}
|
||||
|
||||
fmt.Printf("Successfully processed %d incoming PR files for version %s\n", len(files), version)
|
||||
return nil
|
||||
}
|
||||
|
||||
// getDirectCommitsSinceLastRelease gets all direct commits (not part of PRs) since the last release
|
||||
func (g *Generator) getDirectCommitsSinceLastRelease(processedPRs map[int]bool, processedCommitSHAs map[string]bool) (string, error) {
|
||||
// Get the latest tag to determine what commits are unreleased
|
||||
latestTag, err := g.gitWalker.GetLatestTag()
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to get latest tag: %w", err)
|
||||
}
|
||||
|
||||
// Get all commits since the latest tag
|
||||
unreleasedVersion, err := g.gitWalker.WalkCommitsSinceTag(latestTag)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to walk commits since tag %s: %w", latestTag, err)
|
||||
}
|
||||
|
||||
if unreleasedVersion == nil || len(unreleasedVersion.Commits) == 0 {
|
||||
return "", nil // No unreleased commits
|
||||
}
|
||||
|
||||
// Filter out commits that are part of PRs (we already have those from incoming files)
|
||||
// and format the direct commits
|
||||
var directCommits []*git.Commit
|
||||
for _, commit := range unreleasedVersion.Commits {
|
||||
// Skip version bump commits
|
||||
if commit.IsVersion {
|
||||
continue
|
||||
}
|
||||
|
||||
// Skip commits that belong to PRs we've already processed from incoming files (by PR number)
|
||||
if commit.PRNumber > 0 && processedPRs[commit.PRNumber] {
|
||||
continue
|
||||
}
|
||||
|
||||
// Skip commits whose SHA is already included in processed PRs (this catches commits
|
||||
// that might not have been detected as part of a PR but are actually in the PR)
|
||||
if processedCommitSHAs[commit.SHA] {
|
||||
continue
|
||||
}
|
||||
|
||||
// Only include commits that are NOT part of any PR (direct commits)
|
||||
if commit.PRNumber == 0 {
|
||||
directCommits = append(directCommits, commit)
|
||||
}
|
||||
}
|
||||
|
||||
if len(directCommits) == 0 {
|
||||
return "", nil // No direct commits
|
||||
}
|
||||
|
||||
// Format the direct commits similar to how it's done in generateRawVersionContent
|
||||
var sb strings.Builder
|
||||
sb.WriteString("### Direct commits\n\n")
|
||||
|
||||
// Sort direct commits by date (newest first) for consistent ordering
|
||||
sort.Slice(directCommits, func(i, j int) bool {
|
||||
return directCommits[i].Date.After(directCommits[j].Date)
|
||||
})
|
||||
|
||||
for _, commit := range directCommits {
|
||||
message := g.formatCommitMessage(strings.TrimSpace(commit.Message))
|
||||
if message != "" && !g.isDuplicateMessage(message, directCommits) {
|
||||
sb.WriteString(fmt.Sprintf("- %s\n", message))
|
||||
}
|
||||
}
|
||||
|
||||
return sb.String(), nil
|
||||
}
|
||||
|
||||
// validatePRState validates that a PR is in the correct state for processing
|
||||
func (g *Generator) validatePRState(prNumber int) error {
|
||||
// Use lightweight validation call that doesn't fetch commits
|
||||
details, err := g.ghClient.GetPRValidationDetails(prNumber)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to fetch PR %d: %w", prNumber, err)
|
||||
}
|
||||
|
||||
if details.State != "open" {
|
||||
return fmt.Errorf("PR %d is not open (current state: %s)", prNumber, details.State)
|
||||
}
|
||||
|
||||
if !details.Mergeable {
|
||||
return fmt.Errorf("PR %d is not mergeable - please resolve conflicts first", prNumber)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// validateGitStatus ensures the working directory is clean
|
||||
func (g *Generator) validateGitStatus() error {
|
||||
isClean, err := g.gitWalker.IsWorkingDirectoryClean()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to check git status: %w", err)
|
||||
}
|
||||
|
||||
if !isClean {
|
||||
// Get detailed status for better error message
|
||||
statusDetails, statusErr := g.gitWalker.GetStatusDetails()
|
||||
if statusErr == nil && statusDetails != "" {
|
||||
return fmt.Errorf("working directory is not clean - please commit or stash changes before proceeding:\n%s", statusDetails)
|
||||
}
|
||||
return fmt.Errorf("working directory is not clean - please commit or stash changes before proceeding")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// ensureIncomingDir creates the incoming directory if it doesn't exist
|
||||
func (g *Generator) ensureIncomingDir() error {
|
||||
if err := os.MkdirAll(g.cfg.IncomingDir, 0755); err != nil {
|
||||
return fmt.Errorf("failed to create directory %s: %w", g.cfg.IncomingDir, err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// commitAndPushIncoming commits and optionally pushes the incoming changelog file
|
||||
func (g *Generator) commitAndPushIncoming(prNumber int, filename string) error {
|
||||
relativeFilename, err := filepath.Rel(g.cfg.RepoPath, filename)
|
||||
if err != nil {
|
||||
relativeFilename = filename
|
||||
}
|
||||
|
||||
// Add file to git index
|
||||
if err := g.gitWalker.AddFile(relativeFilename); err != nil {
|
||||
return fmt.Errorf("failed to add file %s: %w", relativeFilename, err)
|
||||
}
|
||||
|
||||
// Commit changes
|
||||
commitMessage := fmt.Sprintf("chore: incoming %d changelog entry", prNumber)
|
||||
_, err = g.gitWalker.CommitChanges(commitMessage)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to commit changes: %w", err)
|
||||
}
|
||||
|
||||
// Push to remote if enabled
|
||||
if g.cfg.Push {
|
||||
if err := g.gitWalker.PushToRemote(); err != nil {
|
||||
return fmt.Errorf("failed to push to remote: %w", err)
|
||||
}
|
||||
} else {
|
||||
fmt.Println("Commit created successfully. Please review and push manually.")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// detectVersion detects the current version from version.nix or git tags
|
||||
func (g *Generator) detectVersion() (string, error) {
|
||||
versionNixPath := filepath.Join(g.cfg.RepoPath, "version.nix")
|
||||
if _, err := os.Stat(versionNixPath); err == nil {
|
||||
data, err := os.ReadFile(versionNixPath)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to read version.nix: %w", err)
|
||||
}
|
||||
|
||||
versionRegex := regexp.MustCompile(`"([^"]+)"`)
|
||||
matches := versionRegex.FindStringSubmatch(string(data))
|
||||
if len(matches) > 1 {
|
||||
return matches[1], nil
|
||||
}
|
||||
}
|
||||
|
||||
latestTag, err := g.gitWalker.GetLatestTag()
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to get latest tag: %w", err)
|
||||
}
|
||||
|
||||
if latestTag == "" {
|
||||
return "v1.0.0", nil
|
||||
}
|
||||
|
||||
return latestTag, nil
|
||||
}
|
||||
|
||||
// insertVersionAtTop inserts a new version entry at the top of CHANGELOG.md
|
||||
func (g *Generator) insertVersionAtTop(entry string) error {
|
||||
changelogPath := filepath.Join(g.cfg.RepoPath, "CHANGELOG.md")
|
||||
header := "# Changelog"
|
||||
headerRegex := regexp.MustCompile(`(?m)^# Changelog\s*`)
|
||||
|
||||
existingContent, err := os.ReadFile(changelogPath)
|
||||
if err != nil {
|
||||
if !os.IsNotExist(err) {
|
||||
return fmt.Errorf("failed to read existing CHANGELOG.md: %w", err)
|
||||
}
|
||||
// File doesn't exist, create it.
|
||||
newContent := fmt.Sprintf("%s\n\n%s\n", header, entry)
|
||||
return os.WriteFile(changelogPath, []byte(newContent), 0644)
|
||||
}
|
||||
|
||||
contentStr := string(existingContent)
|
||||
var newContent string
|
||||
|
||||
if loc := headerRegex.FindStringIndex(contentStr); loc != nil {
|
||||
// Found the header, insert after it.
|
||||
insertionPoint := loc[1]
|
||||
// Skip any existing newlines after the header to avoid double spacing
|
||||
for insertionPoint < len(contentStr) && (contentStr[insertionPoint] == '\n' || contentStr[insertionPoint] == '\r') {
|
||||
insertionPoint++
|
||||
}
|
||||
// Insert with proper spacing: single newline after header, then entry, then newline before existing content
|
||||
newContent = contentStr[:loc[1]] + entry + "\n" + contentStr[insertionPoint:]
|
||||
} else {
|
||||
// Header not found, prepend everything.
|
||||
newContent = fmt.Sprintf("%s\n\n%s\n\n%s", header, entry, contentStr)
|
||||
}
|
||||
|
||||
return os.WriteFile(changelogPath, []byte(newContent), 0644)
|
||||
}
|
||||
|
||||
// stageChangesForRelease stages the modified files for the release commit
|
||||
func (g *Generator) stageChangesForRelease() error {
|
||||
changelogPath := filepath.Join(g.cfg.RepoPath, "CHANGELOG.md")
|
||||
relativeChangelog, err := filepath.Rel(g.cfg.RepoPath, changelogPath)
|
||||
if err != nil {
|
||||
relativeChangelog = "CHANGELOG.md"
|
||||
}
|
||||
|
||||
relativeCacheFile, err := filepath.Rel(g.cfg.RepoPath, g.cfg.CacheFile)
|
||||
if err != nil {
|
||||
relativeCacheFile = g.cfg.CacheFile
|
||||
}
|
||||
|
||||
// Add CHANGELOG.md to git index
|
||||
if err := g.gitWalker.AddFile(relativeChangelog); err != nil {
|
||||
return fmt.Errorf("failed to add %s: %w", relativeChangelog, err)
|
||||
}
|
||||
|
||||
// Add cache file to git index
|
||||
if err := g.gitWalker.AddFile(relativeCacheFile); err != nil {
|
||||
return fmt.Errorf("failed to add %s: %w", relativeCacheFile, err)
|
||||
}
|
||||
|
||||
// Note: Individual incoming files are now removed during the main processing loop
|
||||
// No need to remove the entire directory here
|
||||
|
||||
return nil
|
||||
}
|
||||
262
cmd/generate_changelog/internal/changelog/processing_test.go
Normal file
262
cmd/generate_changelog/internal/changelog/processing_test.go
Normal file
@@ -0,0 +1,262 @@
|
||||
package changelog
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/danielmiessler/fabric/cmd/generate_changelog/internal/config"
|
||||
)
|
||||
|
||||
func TestDetectVersion(t *testing.T) {
|
||||
tempDir := t.TempDir()
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
versionNixContent string
|
||||
expectedVersion string
|
||||
shouldError bool
|
||||
}{
|
||||
{
|
||||
name: "valid version.nix",
|
||||
versionNixContent: `"1.2.3"`,
|
||||
expectedVersion: "1.2.3",
|
||||
shouldError: false,
|
||||
},
|
||||
{
|
||||
name: "version with extra whitespace",
|
||||
versionNixContent: `"1.2.3" `,
|
||||
expectedVersion: "1.2.3",
|
||||
shouldError: false,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
// Create version.nix file
|
||||
versionNixPath := filepath.Join(tempDir, "version.nix")
|
||||
if err := os.WriteFile(versionNixPath, []byte(tt.versionNixContent), 0644); err != nil {
|
||||
t.Fatalf("Failed to create version.nix: %v", err)
|
||||
}
|
||||
|
||||
cfg := &config.Config{
|
||||
RepoPath: tempDir,
|
||||
}
|
||||
|
||||
g := &Generator{cfg: cfg}
|
||||
|
||||
version, err := g.detectVersion()
|
||||
if tt.shouldError && err == nil {
|
||||
t.Errorf("Expected error but got none")
|
||||
}
|
||||
if !tt.shouldError && err != nil {
|
||||
t.Errorf("Unexpected error: %v", err)
|
||||
}
|
||||
if version != tt.expectedVersion {
|
||||
t.Errorf("Expected version '%s', got '%s'", tt.expectedVersion, version)
|
||||
}
|
||||
|
||||
// Clean up
|
||||
os.Remove(versionNixPath)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestInsertVersionAtTop_ImprovedRobustness(t *testing.T) {
|
||||
tempDir := t.TempDir()
|
||||
changelogPath := filepath.Join(tempDir, "CHANGELOG.md")
|
||||
|
||||
cfg := &config.Config{
|
||||
RepoPath: tempDir,
|
||||
}
|
||||
|
||||
g := &Generator{cfg: cfg}
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
existingContent string
|
||||
entry string
|
||||
expectedContent string
|
||||
}{
|
||||
{
|
||||
name: "header with trailing spaces",
|
||||
existingContent: "# Changelog \n\n## v1.0.0\n- Old content",
|
||||
entry: "## v2.0.0\n- New content",
|
||||
expectedContent: "# Changelog \n\n## v2.0.0\n- New content\n## v1.0.0\n- Old content",
|
||||
},
|
||||
{
|
||||
name: "header with different line endings",
|
||||
existingContent: "# Changelog\r\n\r\n## v1.0.0\r\n- Old content",
|
||||
entry: "## v2.0.0\n- New content",
|
||||
expectedContent: "# Changelog\r\n\r\n## v2.0.0\n- New content\n## v1.0.0\r\n- Old content",
|
||||
},
|
||||
{
|
||||
name: "no existing header",
|
||||
existingContent: "Some existing content without header",
|
||||
entry: "## v1.0.0\n- New content",
|
||||
expectedContent: "# Changelog\n\n## v1.0.0\n- New content\n\nSome existing content without header",
|
||||
},
|
||||
{
|
||||
name: "new file creation",
|
||||
existingContent: "",
|
||||
entry: "## v1.0.0\n- Initial release",
|
||||
expectedContent: "# Changelog\n\n## v1.0.0\n- Initial release\n",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
// Write existing content (or create empty file)
|
||||
if tt.existingContent != "" {
|
||||
if err := os.WriteFile(changelogPath, []byte(tt.existingContent), 0644); err != nil {
|
||||
t.Fatalf("Failed to write existing content: %v", err)
|
||||
}
|
||||
} else {
|
||||
// Remove file if it exists to test new file creation
|
||||
os.Remove(changelogPath)
|
||||
}
|
||||
|
||||
// Insert new version
|
||||
if err := g.insertVersionAtTop(tt.entry); err != nil {
|
||||
t.Fatalf("insertVersionAtTop failed: %v", err)
|
||||
}
|
||||
|
||||
// Read result
|
||||
result, err := os.ReadFile(changelogPath)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to read result: %v", err)
|
||||
}
|
||||
|
||||
if string(result) != tt.expectedContent {
|
||||
t.Errorf("Expected:\n%q\nGot:\n%q", tt.expectedContent, string(result))
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestProcessIncomingPRs_FileAggregation(t *testing.T) {
|
||||
tempDir := t.TempDir()
|
||||
incomingDir := filepath.Join(tempDir, "incoming")
|
||||
|
||||
// Create incoming directory and files
|
||||
if err := os.MkdirAll(incomingDir, 0755); err != nil {
|
||||
t.Fatalf("Failed to create incoming dir: %v", err)
|
||||
}
|
||||
|
||||
// Create test incoming files
|
||||
file1Content := "## PR #1\n- Feature A"
|
||||
file2Content := "## PR #2\n- Feature B"
|
||||
|
||||
if err := os.WriteFile(filepath.Join(incomingDir, "1.txt"), []byte(file1Content), 0644); err != nil {
|
||||
t.Fatalf("Failed to create test file: %v", err)
|
||||
}
|
||||
if err := os.WriteFile(filepath.Join(incomingDir, "2.txt"), []byte(file2Content), 0644); err != nil {
|
||||
t.Fatalf("Failed to create test file: %v", err)
|
||||
}
|
||||
|
||||
// Test file aggregation logic by calling the internal functions
|
||||
files, err := filepath.Glob(filepath.Join(incomingDir, "*.txt"))
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to glob files: %v", err)
|
||||
}
|
||||
|
||||
if len(files) != 2 {
|
||||
t.Fatalf("Expected 2 files, got %d", len(files))
|
||||
}
|
||||
|
||||
// Test content aggregation
|
||||
var content strings.Builder
|
||||
var processingErrors []string
|
||||
for _, file := range files {
|
||||
data, err := os.ReadFile(file)
|
||||
if err != nil {
|
||||
processingErrors = append(processingErrors, err.Error())
|
||||
continue
|
||||
}
|
||||
content.WriteString(string(data))
|
||||
content.WriteString("\n")
|
||||
}
|
||||
|
||||
if len(processingErrors) > 0 {
|
||||
t.Fatalf("Unexpected processing errors: %v", processingErrors)
|
||||
}
|
||||
|
||||
aggregatedContent := content.String()
|
||||
if !strings.Contains(aggregatedContent, "Feature A") {
|
||||
t.Errorf("Aggregated content should contain 'Feature A'")
|
||||
}
|
||||
if !strings.Contains(aggregatedContent, "Feature B") {
|
||||
t.Errorf("Aggregated content should contain 'Feature B'")
|
||||
}
|
||||
}
|
||||
|
||||
func TestFileProcessing_ErrorHandling(t *testing.T) {
|
||||
tempDir := t.TempDir()
|
||||
incomingDir := filepath.Join(tempDir, "incoming")
|
||||
|
||||
// Create incoming directory with one good file and one unreadable file
|
||||
if err := os.MkdirAll(incomingDir, 0755); err != nil {
|
||||
t.Fatalf("Failed to create incoming dir: %v", err)
|
||||
}
|
||||
|
||||
// Create a good file
|
||||
if err := os.WriteFile(filepath.Join(incomingDir, "1.txt"), []byte("content"), 0644); err != nil {
|
||||
t.Fatalf("Failed to create test file: %v", err)
|
||||
}
|
||||
|
||||
// Create an unreadable file (simulate permission error)
|
||||
unreadableFile := filepath.Join(incomingDir, "2.txt")
|
||||
if err := os.WriteFile(unreadableFile, []byte("content"), 0000); err != nil {
|
||||
t.Fatalf("Failed to create unreadable file: %v", err)
|
||||
}
|
||||
defer os.Chmod(unreadableFile, 0644) // Clean up
|
||||
|
||||
// Test error aggregation logic
|
||||
files, err := filepath.Glob(filepath.Join(incomingDir, "*.txt"))
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to glob files: %v", err)
|
||||
}
|
||||
|
||||
var content strings.Builder
|
||||
var processingErrors []string
|
||||
for _, file := range files {
|
||||
data, err := os.ReadFile(file)
|
||||
if err != nil {
|
||||
processingErrors = append(processingErrors, err.Error())
|
||||
continue
|
||||
}
|
||||
content.WriteString(string(data))
|
||||
content.WriteString("\n")
|
||||
}
|
||||
|
||||
if len(processingErrors) == 0 {
|
||||
t.Errorf("Expected processing errors due to unreadable file")
|
||||
}
|
||||
|
||||
// Verify error message format
|
||||
errorMsg := strings.Join(processingErrors, "; ")
|
||||
if !strings.Contains(errorMsg, "2.txt") {
|
||||
t.Errorf("Error message should mention the problematic file")
|
||||
}
|
||||
}
|
||||
|
||||
func TestEnsureIncomingDirCreation(t *testing.T) {
|
||||
tempDir := t.TempDir()
|
||||
incomingDir := filepath.Join(tempDir, "incoming")
|
||||
|
||||
cfg := &config.Config{
|
||||
IncomingDir: incomingDir,
|
||||
}
|
||||
|
||||
g := &Generator{cfg: cfg}
|
||||
|
||||
err := g.ensureIncomingDir()
|
||||
if err != nil {
|
||||
t.Fatalf("ensureIncomingDir failed: %v", err)
|
||||
}
|
||||
|
||||
if _, err := os.Stat(incomingDir); os.IsNotExist(err) {
|
||||
t.Errorf("Incoming directory was not created")
|
||||
}
|
||||
}
|
||||
79
cmd/generate_changelog/internal/changelog/summarize.go
Normal file
79
cmd/generate_changelog/internal/changelog/summarize.go
Normal file
@@ -0,0 +1,79 @@
|
||||
package changelog
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"os/exec"
|
||||
"strings"
|
||||
)
|
||||
|
||||
const DefaultSummarizeModel = "claude-sonnet-4-20250514"
|
||||
const MinContentLength = 256 // Minimum content length to consider for summarization
|
||||
|
||||
const prompt = `# ROLE
|
||||
You are an expert Technical Writer specializing in creating clear, concise,
|
||||
and professional release notes from raw Git commit logs.
|
||||
|
||||
# TASK
|
||||
Your goal is to transform a provided block of Git commit logs into a clean,
|
||||
human-readable changelog summary. You will identify the most important changes,
|
||||
format them as a bulleted list, and preserve the associated Pull Request (PR)
|
||||
information.
|
||||
|
||||
# INSTRUCTIONS:
|
||||
Follow these steps in order:
|
||||
1. Deeply analyze the input. You will be given a block of text containing PR
|
||||
information and commit log messages. Carefully read through the logs
|
||||
to identify individual commits and their descriptions.
|
||||
2. Identify Key Changes: Focus on commits that represent significant changes,
|
||||
such as new features ("feat"), bug fixes ("fix"), performance improvements ("perf"),
|
||||
or breaking changes ("BREAKING CHANGE").
|
||||
3. Select the Top 5: From the identified key changes, select a maximum of the five
|
||||
(5) most impactful ones to include in the summary.
|
||||
If there are five or fewer total changes, include all of them.
|
||||
4. Format the Output:
|
||||
- Where you see a PR header, include the PR header verbatim. NO CHANGES.
|
||||
**This is a critical rule: Do not modify the PR header, as it contains
|
||||
important links.** What follow the PR header are the related changes.
|
||||
- Do not add any additional text or preamble. Begin directly with the output.
|
||||
- Use bullet points for each key change. Starting each point with a hyphen ("-").
|
||||
- Ensure that the summary is concise and focused on the main changes.
|
||||
- The summary should be in American English (en-US), using proper grammar and punctuation.
|
||||
5. If the content is too brief or you do not see any PR headers, return the content as is.
|
||||
`
|
||||
|
||||
// getSummarizeModel returns the model to use for AI summarization
|
||||
func getSummarizeModel() string {
|
||||
if model := os.Getenv("FABRIC_CHANGELOG_SUMMARIZE_MODEL"); model != "" {
|
||||
return model
|
||||
}
|
||||
return DefaultSummarizeModel
|
||||
}
|
||||
|
||||
// SummarizeVersionContent takes raw version content and returns AI-enhanced summary
|
||||
func SummarizeVersionContent(content string) (string, error) {
|
||||
if strings.TrimSpace(content) == "" {
|
||||
return "", fmt.Errorf("no content to summarize")
|
||||
}
|
||||
if len(content) < MinContentLength {
|
||||
// If content is too brief, return it as is
|
||||
return content, nil
|
||||
}
|
||||
|
||||
model := getSummarizeModel()
|
||||
|
||||
cmd := exec.Command("fabric", "-m", model, prompt)
|
||||
cmd.Stdin = strings.NewReader(content)
|
||||
|
||||
output, err := cmd.Output()
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("fabric command failed: %w", err)
|
||||
}
|
||||
|
||||
summary := strings.TrimSpace(string(output))
|
||||
if summary == "" {
|
||||
return "", fmt.Errorf("fabric returned empty summary")
|
||||
}
|
||||
|
||||
return summary, nil
|
||||
}
|
||||
21
cmd/generate_changelog/internal/config/config.go
Normal file
21
cmd/generate_changelog/internal/config/config.go
Normal file
@@ -0,0 +1,21 @@
|
||||
package config
|
||||
|
||||
type Config struct {
|
||||
RepoPath string
|
||||
OutputFile string
|
||||
Limit int
|
||||
Version string
|
||||
SaveData bool
|
||||
CacheFile string
|
||||
NoCache bool
|
||||
RebuildCache bool
|
||||
GitHubToken string
|
||||
ForcePRSync bool
|
||||
EnableAISummary bool
|
||||
IncomingPR int
|
||||
ProcessPRsVersion string
|
||||
IncomingDir string
|
||||
Push bool
|
||||
SyncDB bool
|
||||
Release string
|
||||
}
|
||||
26
cmd/generate_changelog/internal/git/types.go
Normal file
26
cmd/generate_changelog/internal/git/types.go
Normal file
@@ -0,0 +1,26 @@
|
||||
package git
|
||||
|
||||
import (
|
||||
"time"
|
||||
)
|
||||
|
||||
type Commit struct {
|
||||
SHA string
|
||||
Message string
|
||||
Author string
|
||||
Email string
|
||||
Date time.Time
|
||||
IsMerge bool
|
||||
PRNumber int
|
||||
IsVersion bool
|
||||
Version string
|
||||
}
|
||||
|
||||
type Version struct {
|
||||
Name string
|
||||
Date time.Time
|
||||
CommitSHA string
|
||||
Commits []*Commit
|
||||
PRNumbers []int
|
||||
AISummary string
|
||||
}
|
||||
574
cmd/generate_changelog/internal/git/walker.go
Normal file
574
cmd/generate_changelog/internal/git/walker.go
Normal file
@@ -0,0 +1,574 @@
|
||||
package git
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/danielmiessler/fabric/cmd/generate_changelog/util"
|
||||
"github.com/go-git/go-git/v5"
|
||||
"github.com/go-git/go-git/v5/plumbing"
|
||||
"github.com/go-git/go-git/v5/plumbing/object"
|
||||
"github.com/go-git/go-git/v5/plumbing/storer"
|
||||
"github.com/go-git/go-git/v5/plumbing/transport/http"
|
||||
)
|
||||
|
||||
var (
|
||||
// The versionPattern matches version commit messages with or without the optional "chore(release): " prefix.
|
||||
// Examples of matching commit messages:
|
||||
// - "chore(release): Update version to v1.2.3"
|
||||
// - "Update version to v1.2.3"
|
||||
// Examples of non-matching commit messages:
|
||||
// - "fix: Update version to v1.2.3" (missing "chore(release): " or "Update version to")
|
||||
// - "chore(release): Update version to 1.2.3" (missing "v" prefix in version)
|
||||
// - "Update version to v1.2" (incomplete version number)
|
||||
versionPattern = regexp.MustCompile(`(?:chore\(release\): )?Update version to (v\d+\.\d+\.\d+)`)
|
||||
prPattern = regexp.MustCompile(`Merge pull request #(\d+)`)
|
||||
)
|
||||
|
||||
type Walker struct {
|
||||
repo *git.Repository
|
||||
}
|
||||
|
||||
func NewWalker(repoPath string) (*Walker, error) {
|
||||
repo, err := git.PlainOpen(repoPath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to open repository: %w", err)
|
||||
}
|
||||
|
||||
return &Walker{repo: repo}, nil
|
||||
}
|
||||
|
||||
// GetLatestTag returns the name of the most recent tag by committer date
|
||||
func (w *Walker) GetLatestTag() (string, error) {
|
||||
tagRefs, err := w.repo.Tags()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
var latestTagCommit *object.Commit
|
||||
var latestTagName string
|
||||
|
||||
err = tagRefs.ForEach(func(tagRef *plumbing.Reference) error {
|
||||
revision := plumbing.Revision(tagRef.Name().String())
|
||||
tagCommitHash, err := w.repo.ResolveRevision(revision)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
commit, err := w.repo.CommitObject(*tagCommitHash)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if latestTagCommit == nil {
|
||||
latestTagCommit = commit
|
||||
latestTagName = tagRef.Name().Short() // Get short name like "v1.4.245"
|
||||
}
|
||||
|
||||
if commit.Committer.When.After(latestTagCommit.Committer.When) {
|
||||
latestTagCommit = commit
|
||||
latestTagName = tagRef.Name().Short()
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return latestTagName, nil
|
||||
}
|
||||
|
||||
// WalkCommitsSinceTag walks commits from the specified tag to HEAD and returns only "Unreleased" version
|
||||
func (w *Walker) WalkCommitsSinceTag(tagName string) (*Version, error) {
|
||||
// Get the tag reference
|
||||
tagRef, err := w.repo.Tag(tagName)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to find tag %s: %w", tagName, err)
|
||||
}
|
||||
|
||||
// Get the commit that the tag points to
|
||||
tagCommit, err := w.repo.CommitObject(tagRef.Hash())
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get tag commit: %w", err)
|
||||
}
|
||||
|
||||
// Get HEAD
|
||||
headRef, err := w.repo.Head()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get HEAD: %w", err)
|
||||
}
|
||||
|
||||
// Walk from HEAD back to the tag commit (exclusive)
|
||||
commitIter, err := w.repo.Log(&git.LogOptions{
|
||||
From: headRef.Hash(),
|
||||
Order: git.LogOrderCommitterTime,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get commit log: %w", err)
|
||||
}
|
||||
|
||||
version := &Version{
|
||||
Name: "Unreleased",
|
||||
Commits: []*Commit{},
|
||||
}
|
||||
|
||||
prNumbers := []int{}
|
||||
|
||||
err = commitIter.ForEach(func(c *object.Commit) error {
|
||||
// Stop when we reach the tag commit (don't include it)
|
||||
if c.Hash == tagCommit.Hash {
|
||||
return fmt.Errorf("reached tag commit") // Use error to break out of iteration
|
||||
}
|
||||
|
||||
commit := &Commit{
|
||||
SHA: c.Hash.String(),
|
||||
Message: strings.TrimSpace(c.Message),
|
||||
Date: c.Committer.When,
|
||||
}
|
||||
|
||||
// Check for version patterns
|
||||
if versionMatch := versionPattern.FindStringSubmatch(commit.Message); versionMatch != nil {
|
||||
commit.IsVersion = true
|
||||
}
|
||||
|
||||
// Check for PR merge patterns
|
||||
if prMatch := prPattern.FindStringSubmatch(commit.Message); prMatch != nil {
|
||||
if prNumber, err := strconv.Atoi(prMatch[1]); err == nil {
|
||||
commit.PRNumber = prNumber
|
||||
prNumbers = append(prNumbers, prNumber)
|
||||
}
|
||||
}
|
||||
|
||||
version.Commits = append(version.Commits, commit)
|
||||
return nil
|
||||
})
|
||||
|
||||
// Ignore the "reached tag commit" error - it's expected
|
||||
if err != nil && !strings.Contains(err.Error(), "reached tag commit") {
|
||||
return nil, fmt.Errorf("failed to walk commits: %w", err)
|
||||
}
|
||||
|
||||
// Remove duplicates from prNumbers and set them
|
||||
prNumbersMap := make(map[int]bool)
|
||||
for _, prNum := range prNumbers {
|
||||
prNumbersMap[prNum] = true
|
||||
}
|
||||
|
||||
version.PRNumbers = make([]int, 0, len(prNumbersMap))
|
||||
for prNum := range prNumbersMap {
|
||||
version.PRNumbers = append(version.PRNumbers, prNum)
|
||||
}
|
||||
|
||||
return version, nil
|
||||
}
|
||||
|
||||
func (w *Walker) WalkHistory() (map[string]*Version, error) {
|
||||
ref, err := w.repo.Head()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get HEAD: %w", err)
|
||||
}
|
||||
|
||||
commitIter, err := w.repo.Log(&git.LogOptions{
|
||||
From: ref.Hash(),
|
||||
Order: git.LogOrderCommitterTime,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get commit log: %w", err)
|
||||
}
|
||||
|
||||
versions := make(map[string]*Version)
|
||||
currentVersion := "Unreleased"
|
||||
versions[currentVersion] = &Version{
|
||||
Name: currentVersion,
|
||||
Commits: []*Commit{},
|
||||
}
|
||||
|
||||
prNumbers := make(map[string][]int)
|
||||
|
||||
err = commitIter.ForEach(func(c *object.Commit) error {
|
||||
// c.Message = Summarize(c.Message)
|
||||
commit := &Commit{
|
||||
SHA: c.Hash.String(),
|
||||
Message: strings.TrimSpace(c.Message),
|
||||
Author: c.Author.Name,
|
||||
Email: c.Author.Email,
|
||||
Date: c.Author.When,
|
||||
IsMerge: len(c.ParentHashes) > 1,
|
||||
}
|
||||
|
||||
if matches := versionPattern.FindStringSubmatch(commit.Message); len(matches) > 1 {
|
||||
commit.IsVersion = true
|
||||
commit.Version = matches[1]
|
||||
currentVersion = commit.Version
|
||||
|
||||
if _, exists := versions[currentVersion]; !exists {
|
||||
versions[currentVersion] = &Version{
|
||||
Name: currentVersion,
|
||||
Date: commit.Date,
|
||||
CommitSHA: commit.SHA,
|
||||
Commits: []*Commit{},
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
if matches := prPattern.FindStringSubmatch(commit.Message); len(matches) > 1 {
|
||||
prNumber := 0
|
||||
fmt.Sscanf(matches[1], "%d", &prNumber)
|
||||
commit.PRNumber = prNumber
|
||||
|
||||
prNumbers[currentVersion] = append(prNumbers[currentVersion], prNumber)
|
||||
}
|
||||
|
||||
versions[currentVersion].Commits = append(versions[currentVersion].Commits, commit)
|
||||
|
||||
return nil
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to walk commits: %w", err)
|
||||
}
|
||||
|
||||
for version, prs := range prNumbers {
|
||||
versions[version].PRNumbers = dedupInts(prs)
|
||||
}
|
||||
|
||||
return versions, nil
|
||||
}
|
||||
|
||||
func (w *Walker) GetRepoInfo() (owner string, name string, err error) {
|
||||
remotes, err := w.repo.Remotes()
|
||||
if err != nil {
|
||||
return "", "", fmt.Errorf("failed to get remotes: %w", err)
|
||||
}
|
||||
|
||||
// First try upstream (preferred for forks)
|
||||
for _, remote := range remotes {
|
||||
if remote.Config().Name == "upstream" {
|
||||
urls := remote.Config().URLs
|
||||
if len(urls) > 0 {
|
||||
owner, name = parseGitHubURL(urls[0])
|
||||
if owner != "" && name != "" {
|
||||
return owner, name, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Then try origin
|
||||
for _, remote := range remotes {
|
||||
if remote.Config().Name == "origin" {
|
||||
urls := remote.Config().URLs
|
||||
if len(urls) > 0 {
|
||||
owner, name = parseGitHubURL(urls[0])
|
||||
if owner != "" && name != "" {
|
||||
return owner, name, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return "danielmiessler", "fabric", nil
|
||||
}
|
||||
|
||||
func parseGitHubURL(url string) (owner, repo string) {
|
||||
patterns := []string{
|
||||
`github\.com[:/]([^/]+)/([^/.]+)`,
|
||||
`github\.com[:/]([^/]+)/([^/]+)\.git$`,
|
||||
}
|
||||
|
||||
for _, pattern := range patterns {
|
||||
re := regexp.MustCompile(pattern)
|
||||
matches := re.FindStringSubmatch(url)
|
||||
if len(matches) > 2 {
|
||||
return matches[1], matches[2]
|
||||
}
|
||||
}
|
||||
|
||||
return "", ""
|
||||
}
|
||||
|
||||
// WalkHistorySinceTag walks git history from HEAD down to (but not including) the specified tag
|
||||
// and returns any version commits found along the way
|
||||
func (w *Walker) WalkHistorySinceTag(sinceTag string) (map[string]*Version, error) {
|
||||
// Get the commit SHA for the sinceTag
|
||||
tagRef, err := w.repo.Tag(sinceTag)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get tag %s: %w", sinceTag, err)
|
||||
}
|
||||
|
||||
tagCommit, err := w.repo.CommitObject(tagRef.Hash())
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get commit for tag %s: %w", sinceTag, err)
|
||||
}
|
||||
|
||||
// Get HEAD reference
|
||||
ref, err := w.repo.Head()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get HEAD: %w", err)
|
||||
}
|
||||
|
||||
// Walk from HEAD down to the tag commit (excluding it)
|
||||
commitIter, err := w.repo.Log(&git.LogOptions{
|
||||
From: ref.Hash(),
|
||||
Order: git.LogOrderCommitterTime,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to create commit iterator: %w", err)
|
||||
}
|
||||
defer commitIter.Close()
|
||||
|
||||
versions := make(map[string]*Version)
|
||||
currentVersion := "Unreleased"
|
||||
prNumbers := make(map[string][]int)
|
||||
|
||||
err = commitIter.ForEach(func(c *object.Commit) error {
|
||||
// Stop iteration when the hash of the current commit matches the hash of the specified sinceTag commit
|
||||
if c.Hash == tagCommit.Hash {
|
||||
return storer.ErrStop
|
||||
}
|
||||
|
||||
commit := &Commit{
|
||||
SHA: c.Hash.String(),
|
||||
Message: strings.TrimSpace(c.Message),
|
||||
Author: c.Author.Name,
|
||||
Email: c.Author.Email,
|
||||
Date: c.Author.When,
|
||||
IsMerge: len(c.ParentHashes) > 1,
|
||||
}
|
||||
|
||||
// Check for version pattern
|
||||
if matches := versionPattern.FindStringSubmatch(commit.Message); len(matches) > 1 {
|
||||
commit.IsVersion = true
|
||||
commit.Version = matches[1]
|
||||
currentVersion = commit.Version
|
||||
|
||||
if _, exists := versions[currentVersion]; !exists {
|
||||
versions[currentVersion] = &Version{
|
||||
Name: currentVersion,
|
||||
Date: commit.Date,
|
||||
CommitSHA: commit.SHA,
|
||||
Commits: []*Commit{},
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Check for PR merge pattern
|
||||
if matches := prPattern.FindStringSubmatch(commit.Message); len(matches) > 1 {
|
||||
prNumber, err := strconv.Atoi(matches[1])
|
||||
if err != nil {
|
||||
// Handle parsing error (e.g., log it or skip processing)
|
||||
return fmt.Errorf("failed to parse PR number: %v", err)
|
||||
}
|
||||
commit.PRNumber = prNumber
|
||||
|
||||
prNumbers[currentVersion] = append(prNumbers[currentVersion], prNumber)
|
||||
}
|
||||
|
||||
// Add commit to current version
|
||||
if _, exists := versions[currentVersion]; !exists {
|
||||
versions[currentVersion] = &Version{
|
||||
Name: currentVersion,
|
||||
Date: time.Time{}, // Zero value, will be set by version commit
|
||||
CommitSHA: "",
|
||||
Commits: []*Commit{},
|
||||
}
|
||||
}
|
||||
|
||||
versions[currentVersion].Commits = append(versions[currentVersion].Commits, commit)
|
||||
return nil
|
||||
})
|
||||
|
||||
// Handle the stop condition - storer.ErrStop is expected
|
||||
if err == storer.ErrStop {
|
||||
err = nil
|
||||
}
|
||||
|
||||
// Assign collected PR numbers to each version
|
||||
for version, prs := range prNumbers {
|
||||
versions[version].PRNumbers = dedupInts(prs)
|
||||
}
|
||||
|
||||
return versions, err
|
||||
}
|
||||
|
||||
func dedupInts(ints []int) []int {
|
||||
seen := make(map[int]bool)
|
||||
result := []int{}
|
||||
|
||||
for _, i := range ints {
|
||||
if !seen[i] {
|
||||
seen[i] = true
|
||||
result = append(result, i)
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
// Worktree returns the git worktree for performing git operations
|
||||
func (w *Walker) Worktree() (*git.Worktree, error) {
|
||||
return w.repo.Worktree()
|
||||
}
|
||||
|
||||
// Repository returns the underlying git repository
|
||||
func (w *Walker) Repository() *git.Repository {
|
||||
return w.repo
|
||||
}
|
||||
|
||||
// IsWorkingDirectoryClean checks if the working directory has any uncommitted changes
|
||||
func (w *Walker) IsWorkingDirectoryClean() (bool, error) {
|
||||
worktree, err := w.repo.Worktree()
|
||||
if err != nil {
|
||||
return false, fmt.Errorf("failed to get worktree: %w", err)
|
||||
}
|
||||
|
||||
status, err := worktree.Status()
|
||||
if err != nil {
|
||||
return false, fmt.Errorf("failed to get git status: %w", err)
|
||||
}
|
||||
|
||||
return status.IsClean(), nil
|
||||
}
|
||||
|
||||
// GetStatusDetails returns a detailed status of the working directory
|
||||
func (w *Walker) GetStatusDetails() (string, error) {
|
||||
worktree, err := w.repo.Worktree()
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to get worktree: %w", err)
|
||||
}
|
||||
|
||||
status, err := worktree.Status()
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to get git status: %w", err)
|
||||
}
|
||||
|
||||
if status.IsClean() {
|
||||
return "", nil
|
||||
}
|
||||
|
||||
var details strings.Builder
|
||||
for file, fileStatus := range status {
|
||||
details.WriteString(fmt.Sprintf(" %c%c %s\n", fileStatus.Staging, fileStatus.Worktree, file))
|
||||
}
|
||||
|
||||
return details.String(), nil
|
||||
}
|
||||
|
||||
// AddFile adds a file to the git index
|
||||
func (w *Walker) AddFile(filename string) error {
|
||||
worktree, err := w.repo.Worktree()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get worktree: %w", err)
|
||||
}
|
||||
|
||||
_, err = worktree.Add(filename)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to add file %s: %w", filename, err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// CommitChanges creates a commit with the given message
|
||||
func (w *Walker) CommitChanges(message string) (plumbing.Hash, error) {
|
||||
worktree, err := w.repo.Worktree()
|
||||
if err != nil {
|
||||
return plumbing.ZeroHash, fmt.Errorf("failed to get worktree: %w", err)
|
||||
}
|
||||
|
||||
// Get git config for author information
|
||||
cfg, err := w.repo.Config()
|
||||
if err != nil {
|
||||
return plumbing.ZeroHash, fmt.Errorf("failed to get git config: %w", err)
|
||||
}
|
||||
|
||||
var authorName, authorEmail string
|
||||
if cfg.User.Name != "" {
|
||||
authorName = cfg.User.Name
|
||||
} else {
|
||||
authorName = "Changelog Bot"
|
||||
}
|
||||
if cfg.User.Email != "" {
|
||||
authorEmail = cfg.User.Email
|
||||
} else {
|
||||
authorEmail = "bot@changelog.local"
|
||||
}
|
||||
|
||||
commit, err := worktree.Commit(message, &git.CommitOptions{
|
||||
Author: &object.Signature{
|
||||
Name: authorName,
|
||||
Email: authorEmail,
|
||||
When: time.Now(),
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
return plumbing.ZeroHash, fmt.Errorf("failed to commit: %w", err)
|
||||
}
|
||||
|
||||
return commit, nil
|
||||
}
|
||||
|
||||
// PushToRemote pushes the current branch to the remote repository
|
||||
// It automatically detects GitHub repositories and uses token authentication when available
|
||||
func (w *Walker) PushToRemote() error {
|
||||
pushOptions := &git.PushOptions{}
|
||||
|
||||
// Check if we have a GitHub token for authentication
|
||||
if githubToken := util.GetTokenFromEnv(""); githubToken != "" {
|
||||
// Get remote URL to check if it's a GitHub repository
|
||||
remotes, err := w.repo.Remotes()
|
||||
if err == nil && len(remotes) > 0 {
|
||||
// Get the origin remote (or first remote if origin doesn't exist)
|
||||
var remote *git.Remote
|
||||
for _, r := range remotes {
|
||||
if r.Config().Name == "origin" {
|
||||
remote = r
|
||||
break
|
||||
}
|
||||
}
|
||||
if remote == nil {
|
||||
remote = remotes[0]
|
||||
}
|
||||
|
||||
// Check if this is a GitHub repository
|
||||
urls := remote.Config().URLs
|
||||
if len(urls) > 0 {
|
||||
url := urls[0]
|
||||
if strings.Contains(url, "github.com") {
|
||||
// Use token authentication for GitHub repositories
|
||||
pushOptions.Auth = &http.BasicAuth{
|
||||
Username: "token", // GitHub expects "token" as username
|
||||
Password: githubToken,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
err := w.repo.Push(pushOptions)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to push: %w", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// RemoveFile removes a file from both the working directory and git index
|
||||
func (w *Walker) RemoveFile(filename string) error {
|
||||
worktree, err := w.repo.Worktree()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get worktree: %w", err)
|
||||
}
|
||||
|
||||
_, err = worktree.Remove(filename)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to remove file %s: %w", filename, err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
431
cmd/generate_changelog/internal/github/client.go
Normal file
431
cmd/generate_changelog/internal/github/client.go
Normal file
@@ -0,0 +1,431 @@
|
||||
package github
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"os"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/google/go-github/v66/github"
|
||||
"github.com/hasura/go-graphql-client"
|
||||
"golang.org/x/oauth2"
|
||||
)
|
||||
|
||||
type Client struct {
|
||||
client *github.Client
|
||||
graphqlClient *graphql.Client
|
||||
owner string
|
||||
repo string
|
||||
token string
|
||||
}
|
||||
|
||||
func NewClient(token, owner, repo string) *Client {
|
||||
var githubClient *github.Client
|
||||
var httpClient *http.Client
|
||||
var gqlClient *graphql.Client
|
||||
|
||||
if token != "" {
|
||||
ts := oauth2.StaticTokenSource(
|
||||
&oauth2.Token{AccessToken: token},
|
||||
)
|
||||
httpClient = oauth2.NewClient(context.Background(), ts)
|
||||
githubClient = github.NewClient(httpClient)
|
||||
gqlClient = graphql.NewClient("https://api.github.com/graphql", httpClient)
|
||||
} else {
|
||||
httpClient = http.DefaultClient
|
||||
githubClient = github.NewClient(nil)
|
||||
gqlClient = graphql.NewClient("https://api.github.com/graphql", httpClient)
|
||||
}
|
||||
|
||||
return &Client{
|
||||
client: githubClient,
|
||||
graphqlClient: gqlClient,
|
||||
owner: owner,
|
||||
repo: repo,
|
||||
token: token,
|
||||
}
|
||||
}
|
||||
|
||||
func (c *Client) FetchPRs(prNumbers []int) ([]*PR, error) {
|
||||
if len(prNumbers) == 0 {
|
||||
return []*PR{}, nil
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
prs := make([]*PR, 0, len(prNumbers))
|
||||
prsChan := make(chan *PR, len(prNumbers))
|
||||
errChan := make(chan error, len(prNumbers))
|
||||
|
||||
var wg sync.WaitGroup
|
||||
semaphore := make(chan struct{}, 10)
|
||||
|
||||
for _, prNumber := range prNumbers {
|
||||
wg.Add(1)
|
||||
go func(num int) {
|
||||
defer wg.Done()
|
||||
|
||||
semaphore <- struct{}{}
|
||||
defer func() { <-semaphore }()
|
||||
|
||||
pr, err := c.fetchSinglePR(ctx, num)
|
||||
if err != nil {
|
||||
errChan <- fmt.Errorf("failed to fetch PR #%d: %w", num, err)
|
||||
return
|
||||
}
|
||||
prsChan <- pr
|
||||
}(prNumber)
|
||||
}
|
||||
|
||||
go func() {
|
||||
wg.Wait()
|
||||
close(prsChan)
|
||||
close(errChan)
|
||||
}()
|
||||
|
||||
var errors []error
|
||||
for pr := range prsChan {
|
||||
prs = append(prs, pr)
|
||||
}
|
||||
for err := range errChan {
|
||||
errors = append(errors, err)
|
||||
}
|
||||
|
||||
if len(errors) > 0 {
|
||||
return prs, fmt.Errorf("some PRs failed to fetch: %v", errors)
|
||||
}
|
||||
|
||||
return prs, nil
|
||||
}
|
||||
|
||||
// GetPRValidationDetails fetches only the data needed for validation (lightweight).
|
||||
func (c *Client) GetPRValidationDetails(prNumber int) (*PRDetails, error) {
|
||||
ctx := context.Background()
|
||||
ghPR, _, err := c.client.PullRequests.Get(ctx, c.owner, c.repo, prNumber)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get PR %d: %w", prNumber, err)
|
||||
}
|
||||
|
||||
// Only return validation data, no commits fetched
|
||||
details := &PRDetails{
|
||||
PR: nil, // Will be populated later if needed
|
||||
State: getString(ghPR.State),
|
||||
Mergeable: ghPR.Mergeable != nil && *ghPR.Mergeable,
|
||||
}
|
||||
|
||||
return details, nil
|
||||
}
|
||||
|
||||
// GetPRWithCommits fetches the full PR and its commits.
|
||||
func (c *Client) GetPRWithCommits(prNumber int) (*PR, error) {
|
||||
ctx := context.Background()
|
||||
ghPR, _, err := c.client.PullRequests.Get(ctx, c.owner, c.repo, prNumber)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get PR %d: %w", prNumber, err)
|
||||
}
|
||||
|
||||
return c.buildPRWithCommits(ctx, ghPR)
|
||||
}
|
||||
|
||||
// GetPRDetails fetches a comprehensive set of details for a single PR.
|
||||
// Deprecated: Use GetPRValidationDetails + GetPRWithCommits for better performance
|
||||
func (c *Client) GetPRDetails(prNumber int) (*PRDetails, error) {
|
||||
ctx := context.Background()
|
||||
ghPR, _, err := c.client.PullRequests.Get(ctx, c.owner, c.repo, prNumber)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get PR %d: %w", prNumber, err)
|
||||
}
|
||||
|
||||
// Reuse the existing logic to build the base PR object
|
||||
pr, err := c.buildPRWithCommits(ctx, ghPR)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to build PR details for %d: %w", prNumber, err)
|
||||
}
|
||||
|
||||
details := &PRDetails{
|
||||
PR: pr,
|
||||
State: getString(ghPR.State),
|
||||
Mergeable: ghPR.Mergeable != nil && *ghPR.Mergeable,
|
||||
}
|
||||
|
||||
return details, nil
|
||||
}
|
||||
|
||||
// buildPRWithCommits fetches commits and constructs a PR object from a GitHub API response
|
||||
func (c *Client) buildPRWithCommits(ctx context.Context, ghPR *github.PullRequest) (*PR, error) {
|
||||
commits, _, err := c.client.PullRequests.ListCommits(ctx, c.owner, c.repo, *ghPR.Number, nil)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to fetch commits for PR %d: %w", *ghPR.Number, err)
|
||||
}
|
||||
|
||||
return c.convertGitHubPR(ghPR, commits), nil
|
||||
}
|
||||
|
||||
// convertGitHubPR transforms GitHub API data into our internal PR struct (pure function)
|
||||
func (c *Client) convertGitHubPR(ghPR *github.PullRequest, commits []*github.RepositoryCommit) *PR {
|
||||
|
||||
result := &PR{
|
||||
Number: *ghPR.Number,
|
||||
Title: getString(ghPR.Title),
|
||||
Body: getString(ghPR.Body),
|
||||
URL: getString(ghPR.HTMLURL),
|
||||
Commits: make([]PRCommit, 0, len(commits)),
|
||||
}
|
||||
|
||||
if ghPR.MergedAt != nil {
|
||||
result.MergedAt = ghPR.MergedAt.Time
|
||||
}
|
||||
|
||||
if ghPR.User != nil {
|
||||
result.Author = getString(ghPR.User.Login)
|
||||
result.AuthorURL = getString(ghPR.User.HTMLURL)
|
||||
userType := getString(ghPR.User.Type)
|
||||
|
||||
switch userType {
|
||||
case "User":
|
||||
result.AuthorType = "user"
|
||||
case "Organization":
|
||||
result.AuthorType = "organization"
|
||||
case "Bot":
|
||||
result.AuthorType = "bot"
|
||||
default:
|
||||
result.AuthorType = "user"
|
||||
}
|
||||
}
|
||||
|
||||
if ghPR.MergeCommitSHA != nil {
|
||||
result.MergeCommit = *ghPR.MergeCommitSHA
|
||||
}
|
||||
|
||||
for _, commit := range commits {
|
||||
if commit.Commit != nil {
|
||||
prCommit := PRCommit{
|
||||
SHA: getString(commit.SHA),
|
||||
Message: strings.TrimSpace(getString(commit.Commit.Message)),
|
||||
}
|
||||
if commit.Commit.Author != nil {
|
||||
prCommit.Author = getString(commit.Commit.Author.Name)
|
||||
prCommit.Email = getString(commit.Commit.Author.Email) // Extract author email from GitHub API response
|
||||
// Capture actual commit timestamp from GitHub API
|
||||
if commit.Commit.Author.Date != nil {
|
||||
prCommit.Date = commit.Commit.Author.Date.Time
|
||||
}
|
||||
}
|
||||
// Capture parent commit SHAs for merge detection
|
||||
if commit.Parents != nil {
|
||||
for _, parent := range commit.Parents {
|
||||
if parent.SHA != nil {
|
||||
prCommit.Parents = append(prCommit.Parents, *parent.SHA)
|
||||
}
|
||||
}
|
||||
}
|
||||
result.Commits = append(result.Commits, prCommit)
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
func (c *Client) fetchSinglePR(ctx context.Context, prNumber int) (*PR, error) {
|
||||
ghPR, _, err := c.client.PullRequests.Get(ctx, c.owner, c.repo, prNumber)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return c.buildPRWithCommits(ctx, ghPR)
|
||||
}
|
||||
|
||||
func getString(s *string) string {
|
||||
if s == nil {
|
||||
return ""
|
||||
}
|
||||
return *s
|
||||
}
|
||||
|
||||
// FetchAllMergedPRs fetches all merged PRs using GitHub's search API
|
||||
// This is much more efficient than fetching PRs individually
|
||||
func (c *Client) FetchAllMergedPRs(since time.Time) ([]*PR, error) {
|
||||
ctx := context.Background()
|
||||
var allPRs []*PR
|
||||
|
||||
// Build search query for merged PRs
|
||||
query := fmt.Sprintf("repo:%s/%s is:pr is:merged", c.owner, c.repo)
|
||||
if !since.IsZero() {
|
||||
query += fmt.Sprintf(" merged:>=%s", since.Format("2006-01-02"))
|
||||
}
|
||||
|
||||
opts := &github.SearchOptions{
|
||||
Sort: "created",
|
||||
Order: "desc",
|
||||
ListOptions: github.ListOptions{
|
||||
PerPage: 100, // Maximum allowed
|
||||
},
|
||||
}
|
||||
|
||||
for {
|
||||
result, resp, err := c.client.Search.Issues(ctx, query, opts)
|
||||
if err != nil {
|
||||
return allPRs, fmt.Errorf("failed to search PRs: %w", err)
|
||||
}
|
||||
|
||||
// Process PRs in parallel
|
||||
prsChan := make(chan *PR, len(result.Issues))
|
||||
errChan := make(chan error, len(result.Issues))
|
||||
var wg sync.WaitGroup
|
||||
semaphore := make(chan struct{}, 10) // Limit concurrent requests
|
||||
|
||||
for _, issue := range result.Issues {
|
||||
if issue.PullRequestLinks == nil {
|
||||
continue // Not a PR
|
||||
}
|
||||
|
||||
wg.Add(1)
|
||||
go func(prNumber int) {
|
||||
defer wg.Done()
|
||||
|
||||
semaphore <- struct{}{}
|
||||
defer func() { <-semaphore }()
|
||||
|
||||
pr, err := c.fetchSinglePR(ctx, prNumber)
|
||||
if err != nil {
|
||||
errChan <- fmt.Errorf("failed to fetch PR #%d: %w", prNumber, err)
|
||||
return
|
||||
}
|
||||
prsChan <- pr
|
||||
}(*issue.Number)
|
||||
}
|
||||
|
||||
go func() {
|
||||
wg.Wait()
|
||||
close(prsChan)
|
||||
close(errChan)
|
||||
}()
|
||||
|
||||
// Collect results
|
||||
for pr := range prsChan {
|
||||
allPRs = append(allPRs, pr)
|
||||
}
|
||||
|
||||
// Check for errors
|
||||
for err := range errChan {
|
||||
// Log error but continue processing
|
||||
fmt.Fprintf(os.Stderr, "Warning: %v\n", err)
|
||||
}
|
||||
|
||||
if resp.NextPage == 0 {
|
||||
break
|
||||
}
|
||||
opts.Page = resp.NextPage
|
||||
}
|
||||
|
||||
return allPRs, nil
|
||||
}
|
||||
|
||||
// FetchAllMergedPRsGraphQL fetches all merged PRs with their commits using GraphQL
|
||||
// This is the ultimate optimization - gets everything in ~5-10 API calls
|
||||
func (c *Client) FetchAllMergedPRsGraphQL(since time.Time) ([]*PR, error) {
|
||||
ctx := context.Background()
|
||||
var allPRs []*PR
|
||||
var after *string
|
||||
totalFetched := 0
|
||||
|
||||
for {
|
||||
// Prepare variables
|
||||
variables := map[string]interface{}{
|
||||
"owner": graphql.String(c.owner),
|
||||
"repo": graphql.String(c.repo),
|
||||
"after": (*graphql.String)(after),
|
||||
}
|
||||
|
||||
// Execute GraphQL query
|
||||
var query PullRequestsQuery
|
||||
err := c.graphqlClient.Query(ctx, &query, variables)
|
||||
if err != nil {
|
||||
return allPRs, fmt.Errorf("GraphQL query failed: %w", err)
|
||||
}
|
||||
|
||||
prs := query.Repository.PullRequests.Nodes
|
||||
fmt.Fprintf(os.Stderr, "Fetched %d PRs via GraphQL (page %d)\n", len(prs), (totalFetched/100)+1)
|
||||
|
||||
// Convert GraphQL PRs to our PR struct
|
||||
for _, gqlPR := range prs {
|
||||
// If we have a since filter, stop when we reach older PRs
|
||||
if !since.IsZero() && gqlPR.MergedAt.Before(since) {
|
||||
fmt.Fprintf(os.Stderr, "Reached PRs older than %s, stopping\n", since.Format("2006-01-02"))
|
||||
return allPRs, nil
|
||||
}
|
||||
|
||||
pr := &PR{
|
||||
Number: gqlPR.Number,
|
||||
Title: gqlPR.Title,
|
||||
Body: gqlPR.Body,
|
||||
URL: gqlPR.URL,
|
||||
MergedAt: gqlPR.MergedAt,
|
||||
Commits: make([]PRCommit, 0, len(gqlPR.Commits.Nodes)),
|
||||
}
|
||||
|
||||
// Handle author - check if it's nil first
|
||||
if gqlPR.Author != nil {
|
||||
pr.Author = gqlPR.Author.Login
|
||||
pr.AuthorURL = gqlPR.Author.URL
|
||||
|
||||
switch gqlPR.Author.Typename {
|
||||
case "Bot":
|
||||
pr.AuthorType = "bot"
|
||||
case "Organization":
|
||||
pr.AuthorType = "organization"
|
||||
case "User":
|
||||
pr.AuthorType = "user"
|
||||
default:
|
||||
pr.AuthorType = "user" // fallback
|
||||
if gqlPR.Author.Typename != "" {
|
||||
fmt.Fprintf(os.Stderr, "PR #%d: Unknown author typename '%s'\n", gqlPR.Number, gqlPR.Author.Typename)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Author is nil - try to fetch from REST API as fallback
|
||||
fmt.Fprintf(os.Stderr, "PR #%d: Author is nil in GraphQL response, fetching from REST API\n", gqlPR.Number)
|
||||
|
||||
// Fetch this specific PR from REST API
|
||||
restPR, err := c.fetchSinglePR(ctx, gqlPR.Number)
|
||||
if err == nil && restPR != nil && restPR.Author != "" {
|
||||
pr.Author = restPR.Author
|
||||
pr.AuthorURL = restPR.AuthorURL
|
||||
pr.AuthorType = restPR.AuthorType
|
||||
} else {
|
||||
// Fallback if REST API also fails
|
||||
pr.Author = "[unknown]"
|
||||
pr.AuthorURL = ""
|
||||
pr.AuthorType = "user"
|
||||
}
|
||||
}
|
||||
|
||||
// Convert commits
|
||||
for _, commitNode := range gqlPR.Commits.Nodes {
|
||||
commit := PRCommit{
|
||||
SHA: commitNode.Commit.OID,
|
||||
Message: strings.TrimSpace(commitNode.Commit.Message),
|
||||
Author: commitNode.Commit.Author.Name,
|
||||
Date: commitNode.Commit.AuthoredDate, // Use actual commit timestamp
|
||||
}
|
||||
pr.Commits = append(pr.Commits, commit)
|
||||
}
|
||||
|
||||
allPRs = append(allPRs, pr)
|
||||
}
|
||||
|
||||
totalFetched += len(prs)
|
||||
|
||||
// Check if we need to fetch more pages
|
||||
if !query.Repository.PullRequests.PageInfo.HasNextPage {
|
||||
break
|
||||
}
|
||||
|
||||
after = &query.Repository.PullRequests.PageInfo.EndCursor
|
||||
}
|
||||
|
||||
fmt.Fprintf(os.Stderr, "Total PRs fetched via GraphQL: %d\n", len(allPRs))
|
||||
return allPRs, nil
|
||||
}
|
||||
59
cmd/generate_changelog/internal/github/email_test.go
Normal file
59
cmd/generate_changelog/internal/github/email_test.go
Normal file
@@ -0,0 +1,59 @@
|
||||
package github
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
func TestPRCommitEmailHandling(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
commit PRCommit
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "Valid email field",
|
||||
commit: PRCommit{
|
||||
SHA: "abc123",
|
||||
Message: "Fix bug in authentication",
|
||||
Author: "John Doe",
|
||||
Email: "john.doe@example.com",
|
||||
Date: time.Now(),
|
||||
Parents: []string{"def456"},
|
||||
},
|
||||
expected: "john.doe@example.com",
|
||||
},
|
||||
{
|
||||
name: "Empty email field",
|
||||
commit: PRCommit{
|
||||
SHA: "abc123",
|
||||
Message: "Fix bug in authentication",
|
||||
Author: "John Doe",
|
||||
Email: "",
|
||||
Date: time.Now(),
|
||||
Parents: []string{"def456"},
|
||||
},
|
||||
expected: "",
|
||||
},
|
||||
{
|
||||
name: "Email field with proper initialization",
|
||||
commit: PRCommit{
|
||||
SHA: "def789",
|
||||
Message: "Add new feature",
|
||||
Author: "Jane Smith",
|
||||
Email: "jane.smith@company.org",
|
||||
Date: time.Now(),
|
||||
Parents: []string{"ghi012"},
|
||||
},
|
||||
expected: "jane.smith@company.org",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
if tt.commit.Email != tt.expected {
|
||||
t.Errorf("Expected email %q, got %q", tt.expected, tt.commit.Email)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
68
cmd/generate_changelog/internal/github/types.go
Normal file
68
cmd/generate_changelog/internal/github/types.go
Normal file
@@ -0,0 +1,68 @@
|
||||
package github
|
||||
|
||||
import "time"
|
||||
|
||||
type PR struct {
|
||||
Number int
|
||||
Title string
|
||||
Body string
|
||||
Author string
|
||||
AuthorURL string
|
||||
AuthorType string // "user", "organization", or "bot"
|
||||
URL string
|
||||
MergedAt time.Time
|
||||
Commits []PRCommit
|
||||
MergeCommit string
|
||||
}
|
||||
|
||||
// PRDetails encapsulates all relevant information about a Pull Request.
|
||||
type PRDetails struct {
|
||||
*PR
|
||||
State string
|
||||
Mergeable bool
|
||||
}
|
||||
|
||||
type PRCommit struct {
|
||||
SHA string
|
||||
Message string
|
||||
Author string
|
||||
Email string // Author email from GitHub API, empty if not public
|
||||
Date time.Time // Timestamp field
|
||||
Parents []string // Parent commits (for merge detection)
|
||||
}
|
||||
|
||||
// GraphQL query structures for hasura client
|
||||
type PullRequestsQuery struct {
|
||||
Repository struct {
|
||||
PullRequests struct {
|
||||
PageInfo struct {
|
||||
HasNextPage bool
|
||||
EndCursor string
|
||||
}
|
||||
Nodes []struct {
|
||||
Number int
|
||||
Title string
|
||||
Body string
|
||||
URL string
|
||||
MergedAt time.Time
|
||||
Author *struct {
|
||||
Typename string `graphql:"__typename"`
|
||||
Login string `graphql:"login"`
|
||||
URL string `graphql:"url"`
|
||||
}
|
||||
Commits struct {
|
||||
Nodes []struct {
|
||||
Commit struct {
|
||||
OID string `graphql:"oid"`
|
||||
Message string
|
||||
AuthoredDate time.Time `graphql:"authoredDate"`
|
||||
Author struct {
|
||||
Name string
|
||||
}
|
||||
}
|
||||
}
|
||||
} `graphql:"commits(first: 250)"`
|
||||
}
|
||||
} `graphql:"pullRequests(first: 100, after: $after, states: MERGED, orderBy: {field: UPDATED_AT, direction: DESC})"`
|
||||
} `graphql:"repository(owner: $owner, name: $repo)"`
|
||||
}
|
||||
81
cmd/generate_changelog/internal/release.go
Normal file
81
cmd/generate_changelog/internal/release.go
Normal file
@@ -0,0 +1,81 @@
|
||||
package internal
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
"github.com/danielmiessler/fabric/cmd/generate_changelog/internal/cache"
|
||||
"github.com/danielmiessler/fabric/cmd/generate_changelog/internal/config"
|
||||
"github.com/google/go-github/v66/github"
|
||||
"golang.org/x/oauth2"
|
||||
)
|
||||
|
||||
type ReleaseManager struct {
|
||||
cache *cache.Cache
|
||||
githubToken string
|
||||
owner string
|
||||
repo string
|
||||
}
|
||||
|
||||
func NewReleaseManager(cfg *config.Config) (*ReleaseManager, error) {
|
||||
cache, err := cache.New(cfg.CacheFile)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to create cache: %w", err)
|
||||
}
|
||||
|
||||
return &ReleaseManager{
|
||||
cache: cache,
|
||||
githubToken: cfg.GitHubToken,
|
||||
owner: "danielmiessler",
|
||||
repo: "fabric",
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (rm *ReleaseManager) Close() error {
|
||||
return rm.cache.Close()
|
||||
}
|
||||
|
||||
func (rm *ReleaseManager) UpdateReleaseDescription(version string) error {
|
||||
versions, err := rm.cache.GetVersions()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get versions from cache: %w", err)
|
||||
}
|
||||
|
||||
versionData, exists := versions[version]
|
||||
if !exists {
|
||||
return fmt.Errorf("version %s not found in versions table", version)
|
||||
}
|
||||
|
||||
if versionData.AISummary == "" {
|
||||
return fmt.Errorf("ai_summary is empty for version %s", version)
|
||||
}
|
||||
|
||||
releaseBody := fmt.Sprintf("## Changes\n\n%s", versionData.AISummary)
|
||||
|
||||
ctx := context.Background()
|
||||
var client *github.Client
|
||||
|
||||
if rm.githubToken != "" {
|
||||
ts := oauth2.StaticTokenSource(
|
||||
&oauth2.Token{AccessToken: rm.githubToken},
|
||||
)
|
||||
tc := oauth2.NewClient(ctx, ts)
|
||||
client = github.NewClient(tc)
|
||||
} else {
|
||||
client = github.NewClient(nil)
|
||||
}
|
||||
|
||||
release, _, err := client.Repositories.GetReleaseByTag(ctx, rm.owner, rm.repo, version)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get release for version %s: %w", version, err)
|
||||
}
|
||||
|
||||
release.Body = &releaseBody
|
||||
_, _, err = client.Repositories.EditRelease(ctx, rm.owner, rm.repo, *release.ID, release)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to update release description for version %s: %w", version, err)
|
||||
}
|
||||
|
||||
fmt.Printf("Successfully updated release description for %s\n", version)
|
||||
return nil
|
||||
}
|
||||
117
cmd/generate_changelog/main.go
Normal file
117
cmd/generate_changelog/main.go
Normal file
@@ -0,0 +1,117 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/danielmiessler/fabric/cmd/generate_changelog/internal"
|
||||
"github.com/danielmiessler/fabric/cmd/generate_changelog/internal/changelog"
|
||||
"github.com/danielmiessler/fabric/cmd/generate_changelog/internal/config"
|
||||
"github.com/danielmiessler/fabric/cmd/generate_changelog/util"
|
||||
"github.com/joho/godotenv"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var (
|
||||
cfg = &config.Config{}
|
||||
)
|
||||
|
||||
var rootCmd = &cobra.Command{
|
||||
Use: "generate_changelog",
|
||||
Short: "Generate changelog from git history and GitHub PRs",
|
||||
Long: `A high-performance changelog generator that walks git history,
|
||||
collects version information and pull requests, and generates a
|
||||
comprehensive changelog in markdown format.`,
|
||||
RunE: run,
|
||||
SilenceUsage: true, // Don't show usage on runtime errors, only on flag errors
|
||||
}
|
||||
|
||||
func init() {
|
||||
rootCmd.Flags().StringVarP(&cfg.RepoPath, "repo", "r", ".", "Repository path")
|
||||
rootCmd.Flags().StringVarP(&cfg.OutputFile, "output", "o", "", "Output file (default: stdout)")
|
||||
rootCmd.Flags().IntVarP(&cfg.Limit, "limit", "l", 0, "Limit number of versions (0 = all)")
|
||||
rootCmd.Flags().StringVarP(&cfg.Version, "version", "v", "", "Generate changelog for specific version")
|
||||
rootCmd.Flags().BoolVar(&cfg.SaveData, "save-data", false, "Save version data to JSON for debugging")
|
||||
rootCmd.Flags().StringVar(&cfg.CacheFile, "cache", "./cmd/generate_changelog/changelog.db", "Cache database file")
|
||||
rootCmd.Flags().BoolVar(&cfg.NoCache, "no-cache", false, "Disable cache usage")
|
||||
rootCmd.Flags().BoolVar(&cfg.RebuildCache, "rebuild-cache", false, "Rebuild cache from scratch")
|
||||
rootCmd.Flags().StringVar(&cfg.GitHubToken, "token", "", "GitHub API token (or set GITHUB_TOKEN env var)")
|
||||
rootCmd.Flags().BoolVar(&cfg.ForcePRSync, "force-pr-sync", false, "Force a full PR sync from GitHub (ignores cache age)")
|
||||
rootCmd.Flags().BoolVar(&cfg.EnableAISummary, "ai-summarize", false, "Generate AI-enhanced summaries using Fabric")
|
||||
rootCmd.Flags().IntVar(&cfg.IncomingPR, "incoming-pr", 0, "Pre-process PR for changelog (provide PR number)")
|
||||
rootCmd.Flags().StringVar(&cfg.ProcessPRsVersion, "process-prs", "", "Process all incoming PR files for release (provide version like v1.4.262)")
|
||||
rootCmd.Flags().StringVar(&cfg.IncomingDir, "incoming-dir", "./cmd/generate_changelog/incoming", "Directory for incoming PR files")
|
||||
rootCmd.Flags().BoolVar(&cfg.Push, "push", false, "Enable automatic git push after creating an incoming entry")
|
||||
rootCmd.Flags().BoolVar(&cfg.SyncDB, "sync-db", false, "Synchronize and validate database integrity with git history and GitHub PRs")
|
||||
rootCmd.Flags().StringVar(&cfg.Release, "release", "", "Update GitHub release description with AI summary for version (e.g., v1.2.3)")
|
||||
}
|
||||
|
||||
func run(cmd *cobra.Command, args []string) error {
|
||||
if cfg.IncomingPR > 0 && cfg.ProcessPRsVersion != "" {
|
||||
return fmt.Errorf("--incoming-pr and --process-prs are mutually exclusive flags")
|
||||
}
|
||||
|
||||
if cfg.Release != "" && (cfg.IncomingPR > 0 || cfg.ProcessPRsVersion != "" || cfg.SyncDB) {
|
||||
return fmt.Errorf("--release cannot be used with other processing flags")
|
||||
}
|
||||
|
||||
cfg.GitHubToken = util.GetTokenFromEnv(cfg.GitHubToken)
|
||||
|
||||
generator, err := changelog.New(cfg)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to create changelog generator: %w", err)
|
||||
}
|
||||
|
||||
if cfg.IncomingPR > 0 {
|
||||
return generator.ProcessIncomingPR(cfg.IncomingPR)
|
||||
}
|
||||
|
||||
if cfg.ProcessPRsVersion != "" {
|
||||
return generator.CreateNewChangelogEntry(cfg.ProcessPRsVersion)
|
||||
}
|
||||
|
||||
if cfg.SyncDB {
|
||||
return generator.SyncDatabase()
|
||||
}
|
||||
|
||||
if cfg.Release != "" {
|
||||
releaseManager, err := internal.NewReleaseManager(cfg)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to create release manager: %w", err)
|
||||
}
|
||||
defer releaseManager.Close()
|
||||
return releaseManager.UpdateReleaseDescription(cfg.Release)
|
||||
}
|
||||
|
||||
output, err := generator.Generate()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to generate changelog: %w", err)
|
||||
}
|
||||
|
||||
if cfg.OutputFile != "" {
|
||||
if err := os.WriteFile(cfg.OutputFile, []byte(output), 0644); err != nil {
|
||||
return fmt.Errorf("failed to write output file: %w", err)
|
||||
}
|
||||
fmt.Printf("Changelog written to %s\n", cfg.OutputFile)
|
||||
} else {
|
||||
fmt.Print(output)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func main() {
|
||||
// Load .env file from the same directory as the binary
|
||||
if exePath, err := os.Executable(); err == nil {
|
||||
envPath := filepath.Join(filepath.Dir(exePath), ".env")
|
||||
if _, err := os.Stat(envPath); err == nil {
|
||||
// .env file exists, load it
|
||||
if err := godotenv.Load(envPath); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Failed to load .env file: %v\n", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
rootCmd.Execute()
|
||||
}
|
||||
31
cmd/generate_changelog/util/token.go
Normal file
31
cmd/generate_changelog/util/token.go
Normal file
@@ -0,0 +1,31 @@
|
||||
package util
|
||||
|
||||
import (
|
||||
"os"
|
||||
)
|
||||
|
||||
// GetTokenFromEnv returns a GitHub token based on the following precedence order:
|
||||
// 1. If tokenValue is non-empty, it is returned.
|
||||
// 2. Otherwise, if the GITHUB_TOKEN environment variable is set, its value is returned.
|
||||
// 3. Otherwise, if the GH_TOKEN environment variable is set, its value is returned.
|
||||
// 4. If none of the above are set, an empty string is returned.
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// os.Setenv("GITHUB_TOKEN", "abc")
|
||||
// os.Setenv("GH_TOKEN", "def")
|
||||
// GetTokenFromEnv("xyz") // returns "xyz"
|
||||
// GetTokenFromEnv("") // returns "abc"
|
||||
// os.Unsetenv("GITHUB_TOKEN")
|
||||
// GetTokenFromEnv("") // returns "def"
|
||||
// os.Unsetenv("GH_TOKEN")
|
||||
// GetTokenFromEnv("") // returns ""
|
||||
func GetTokenFromEnv(tokenValue string) string {
|
||||
if tokenValue == "" {
|
||||
tokenValue = os.Getenv("GITHUB_TOKEN")
|
||||
if tokenValue == "" {
|
||||
tokenValue = os.Getenv("GH_TOKEN")
|
||||
}
|
||||
}
|
||||
return tokenValue
|
||||
}
|
||||
@@ -14,16 +14,19 @@ _fabric_models() {
|
||||
models=(${(f)"$(fabric --listmodels --shell-complete-list 2>/dev/null)"})
|
||||
compadd -X "Models:" ${models}
|
||||
}
|
||||
|
||||
_fabric_contexts() {
|
||||
local -a contexts
|
||||
contexts=(${(f)"$(fabric --listcontexts --shell-complete-list 2>/dev/null)"})
|
||||
compadd -X "Contexts:" ${contexts}
|
||||
}
|
||||
|
||||
_fabric_sessions() {
|
||||
local -a sessions
|
||||
sessions=(${(f)"$(fabric --listsessions --shell-complete-list 2>/dev/null)"})
|
||||
compadd -X "Sessions:" ${sessions}
|
||||
}
|
||||
|
||||
_fabric_strategies() {
|
||||
local -a strategies
|
||||
strategies=(${(f)"$(fabric --liststrategies --shell-complete-list 2>/dev/null)"})
|
||||
@@ -34,14 +37,12 @@ _fabric_extensions() {
|
||||
local -a extensions
|
||||
extensions=(${(f)"$(fabric --listextensions --shell-complete-list 2>/dev/null)"})
|
||||
compadd -X "Extensions:" ${extensions}
|
||||
'(-L --listmodels)'{-L,--listmodels}'[List all available models]:list models:_fabric_models' \
|
||||
'(-x --listcontexts)'{-x,--listcontexts}'[List all contexts]:list contexts:_fabric_contexts' \
|
||||
'(-X --listsessions)'{-X,--listsessions}'[List all sessions]:list sessions:_fabric_sessions' \
|
||||
'(--listextensions)--listextensions[List all registered extensions]' \
|
||||
'(--liststrategies)--liststrategies[List all strategies]:list strategies:_fabric_strategies' \
|
||||
'(--listvendors)--listvendors[List all vendors]' \
|
||||
vendors=(${(f)"$(fabric --listvendors 2>/dev/null)"})
|
||||
compadd -X "Vendors:" ${vendors}
|
||||
}
|
||||
|
||||
_fabric_gemini_voices() {
|
||||
local -a voices
|
||||
voices=(${(f)"$(fabric --list-gemini-voices --shell-complete-list 2>/dev/null)"})
|
||||
compadd -X "Gemini TTS Voices:" ${voices}
|
||||
}
|
||||
|
||||
_fabric() {
|
||||
@@ -109,10 +110,15 @@ _fabric() {
|
||||
'(--strategy)--strategy[Choose a strategy from the available strategies]:strategy:_fabric_strategies' \
|
||||
'(--liststrategies)--liststrategies[List all strategies]' \
|
||||
'(--listvendors)--listvendors[List all vendors]' \
|
||||
'(--voice)--voice[TTS voice name for supported models]:voice:_fabric_gemini_voices' \
|
||||
'(--list-gemini-voices)--list-gemini-voices[List all available Gemini TTS voices]' \
|
||||
'(--shell-complete-list)--shell-complete-list[Output raw list without headers/formatting (for shell completion)]' \
|
||||
'(--suppress-think)--suppress-think[Suppress text enclosed in thinking tags]' \
|
||||
'(--think-start-tag)--think-start-tag[Start tag for thinking sections (default: <think>)]:start tag:' \
|
||||
'(--think-end-tag)--think-end-tag[End tag for thinking sections (default: </think>)]:end tag:' \
|
||||
'(--disable-responses-api)--disable-responses-api[Disable OpenAI Responses API (default: false)]' \
|
||||
'(-h --help)'{-h,--help}'[Show this help message]' \
|
||||
'*:arguments:'
|
||||
}
|
||||
|
||||
_fabric "$@"
|
||||
|
||||
|
||||
@@ -13,7 +13,7 @@ _fabric() {
|
||||
_get_comp_words_by_ref -n : cur prev words cword
|
||||
|
||||
# Define all possible options/flags
|
||||
local opts="--pattern -p --variable -v --context -C --session --attachment -a --setup -S --temperature -t --topp -T --stream -s --presencepenalty -P --raw -r --frequencypenalty -F --listpatterns -l --listmodels -L --listcontexts -x --listsessions -X --updatepatterns -U --copy -c --model -m --modelContextLength --output -o --output-session --latest -n --changeDefaultModel -d --youtube -y --playlist --transcript --transcript-with-timestamps --comments --metadata --language -g --scrape_url -u --scrape_question -q --seed -e --wipecontext -w --wipesession -W --printcontext --printsession --readability --input-has-vars --dry-run --serve --serveOllama --address --api-key --config --search --search-location --image-file --image-size --image-quality --image-compression --image-background --version --listextensions --addextension --rmextension --strategy --liststrategies --listvendors --shell-complete-list --help -h"
|
||||
local opts="--pattern -p --variable -v --context -C --session --attachment -a --setup -S --temperature -t --topp -T --stream -s --presencepenalty -P --raw -r --frequencypenalty -F --listpatterns -l --listmodels -L --listcontexts -x --listsessions -X --updatepatterns -U --copy -c --model -m --modelContextLength --output -o --output-session --latest -n --changeDefaultModel -d --youtube -y --playlist --transcript --transcript-with-timestamps --comments --metadata --language -g --scrape_url -u --scrape_question -q --seed -e --wipecontext -w --wipesession -W --printcontext --printsession --readability --input-has-vars --dry-run --serve --serveOllama --address --api-key --config --search --search-location --image-file --image-size --image-quality --image-compression --image-background --suppress-think --think-start-tag --think-end-tag --disable-responses-api --voice --list-gemini-voices --version --listextensions --addextension --rmextension --strategy --liststrategies --listvendors --shell-complete-list --help -h"
|
||||
|
||||
# Helper function for dynamic completions
|
||||
_fabric_get_list() {
|
||||
@@ -62,6 +62,10 @@ _fabric() {
|
||||
COMPREPLY=($(compgen -W "$(_fabric_get_list --liststrategies)" -- "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
--voice)
|
||||
COMPREPLY=($(compgen -W "$(_fabric_get_list --list-gemini-voices)" -- "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
# Options requiring file/directory paths
|
||||
-a | --attachment | -o | --output | --config | --addextension | --image-file)
|
||||
_filedir
|
||||
@@ -81,7 +85,7 @@ _fabric() {
|
||||
return 0
|
||||
;;
|
||||
# Options requiring simple arguments (no specific completion logic here)
|
||||
-v | --variable | -t | --temperature | -T | --topp | -P | --presencepenalty | -F | --frequencypenalty | --modelContextLength | -n | --latest | -y | --youtube | -g | --language | -u | --scrape_url | -q | --scrape_question | -e | --seed | --address | --api-key | --search-location | --image-compression)
|
||||
-v | --variable | -t | --temperature | -T | --topp | -P | --presencepenalty | -F | --frequencypenalty | --modelContextLength | -n | --latest | -y | --youtube | -g | --language | -u | --scrape_url | -q | --scrape_question | -e | --seed | --address | --api-key | --search-location | --image-compression | --think-start-tag | --think-end-tag)
|
||||
# No specific completion suggestions, user types the value
|
||||
return 0
|
||||
;;
|
||||
|
||||
@@ -31,6 +31,10 @@ function __fabric_get_extensions
|
||||
fabric --listextensions --shell-complete-list 2>/dev/null
|
||||
end
|
||||
|
||||
function __fabric_get_gemini_voices
|
||||
fabric --list-gemini-voices --shell-complete-list 2>/dev/null
|
||||
end
|
||||
|
||||
# Main completion function
|
||||
complete -c fabric -f
|
||||
|
||||
@@ -69,6 +73,9 @@ complete -c fabric -l image-background -d "Background type: opaque, transparent
|
||||
complete -c fabric -l addextension -d "Register a new extension from config file path" -r -a "*.yaml *.yml"
|
||||
complete -c fabric -l rmextension -d "Remove a registered extension by name" -a "(__fabric_get_extensions)"
|
||||
complete -c fabric -l strategy -d "Choose a strategy from the available strategies" -a "(__fabric_get_strategies)"
|
||||
complete -c fabric -l think-start-tag -d "Start tag for thinking sections (default: <think>)"
|
||||
complete -c fabric -l think-end-tag -d "End tag for thinking sections (default: </think>)"
|
||||
complete -c fabric -l voice -d "TTS voice name for supported models (e.g., Kore, Charon, Puck)" -a "(__fabric_get_gemini_voices)"
|
||||
|
||||
# Boolean flags (no arguments)
|
||||
complete -c fabric -s S -l setup -d "Run setup for all reconfigurable parts of fabric"
|
||||
@@ -97,5 +104,8 @@ complete -c fabric -l version -d "Print current version"
|
||||
complete -c fabric -l listextensions -d "List all registered extensions"
|
||||
complete -c fabric -l liststrategies -d "List all strategies"
|
||||
complete -c fabric -l listvendors -d "List all vendors"
|
||||
complete -c fabric -l list-gemini-voices -d "List all available Gemini TTS voices"
|
||||
complete -c fabric -l shell-complete-list -d "Output raw list without headers/formatting (for shell completion)"
|
||||
complete -c fabric -l suppress-think -d "Suppress text enclosed in thinking tags"
|
||||
complete -c fabric -l disable-responses-api -d "Disable OpenAI Responses API (default: false)"
|
||||
complete -c fabric -s h -l help -d "Show this help message"
|
||||
|
||||
@@ -7,7 +7,7 @@ Generate code changes to an existing coding project using AI.
|
||||
After installing the `code_helper` binary:
|
||||
|
||||
```bash
|
||||
go install github.com/danielmiessler/fabric/plugins/tools/code_helper@latest
|
||||
go install github.com/danielmiessler/fabric/cmd/code_helper@latest
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
8
data/patterns/generate_code_rules/system.md
Normal file
8
data/patterns/generate_code_rules/system.md
Normal file
@@ -0,0 +1,8 @@
|
||||
# IDENTITY AND PURPOSE
|
||||
|
||||
You are a senior developer and expert prompt engineer. Think ultra hard to distill the following transcription or tutorial in as little set of unique rules as possible intended for best practices guidance in AI assisted coding tools, each rule has to be in one sentence as a direct instruction, avoid explanations and cosmetic language. Output in Markdown, I prefer bullet dash (-).
|
||||
|
||||
---
|
||||
|
||||
# TRANSCRIPT
|
||||
|
||||
@@ -6,7 +6,7 @@ You are an expert at understanding deep context about a person or entity, and th
|
||||
|
||||
1. Read the incoming TELOS File thoroughly. Fully understand everything about this person or entity.
|
||||
2. Deeply study the input instruction or question.
|
||||
3. Spend significant time and effort thinking about how these two are related, and what would be the best possible ouptut for the person who sent the input.
|
||||
3. Spend significant time and effort thinking about how these two are related, and what would be the best possible output for the person who sent the input.
|
||||
4. Write 8 16-word bullets describing how well or poorly I'm addressing my challenges. Call me out if I'm not putting work into them, and/or if you can see evidence of them affecting me in my journal or elsewhere.
|
||||
|
||||
# OUTPUT INSTRUCTIONS
|
||||
|
||||
@@ -6,7 +6,7 @@ You are an expert at understanding deep context about a person or entity, and th
|
||||
|
||||
1. Read the incoming TELOS File thoroughly. Fully understand everything about this person or entity.
|
||||
2. Deeply study the input instruction or question.
|
||||
3. Spend significant time and effort thinking about how these two are related, and what would be the best possible ouptut for the person who sent the input.
|
||||
3. Spend significant time and effort thinking about how these two are related, and what would be the best possible output for the person who sent the input.
|
||||
4. Check this person's Metrics or KPIs (M's or K's) to see their current state and if they've been improved recently.
|
||||
|
||||
# OUTPUT INSTRUCTIONS
|
||||
|
||||
@@ -6,7 +6,7 @@ You are an expert at understanding deep context about a person or entity, and th
|
||||
|
||||
1. Read the incoming TELOS File thoroughly. Fully understand everything about this person or entity.
|
||||
2. Deeply study the input instruction or question.
|
||||
3. Spend significant time and effort thinking about how these two are related, and what would be the best possible ouptut for the person who sent the input.
|
||||
3. Spend significant time and effort thinking about how these two are related, and what would be the best possible output for the person who sent the input.
|
||||
4. Analyze everything in my TELOS file and think about what I could and should do after my legacy corporate / technical skills are automated away. What can I contribute that's based on human-to-human interaction and exchanges of value?
|
||||
|
||||
# OUTPUT INSTRUCTIONS
|
||||
|
||||
@@ -6,7 +6,7 @@ You are an expert at understanding deep context about a person or entity, and th
|
||||
|
||||
1. Read the incoming TELOS File thoroughly. Fully understand everything about this person or entity.
|
||||
2. Deeply study the input instruction or question.
|
||||
3. Spend significant time and effort thinking about how these two are related, and what would be the best possible ouptut for the person who sent the input.
|
||||
3. Spend significant time and effort thinking about how these two are related, and what would be the best possible output for the person who sent the input.
|
||||
4. Write 4 32-word bullets describing who I am and what I do in a non-douchey way. Use the who I am, the problem I see in the world, and what I'm doing about it as the template. Something like:
|
||||
a. I'm a programmer by trade, and one thing that really bothers me is kids being so stuck inside of tech and games. So I started a school where I teach kids to build things with their hands.
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ You are an expert at understanding deep context about a person or entity, and th
|
||||
|
||||
1. Read the incoming TELOS File thoroughly. Fully understand everything about this person or entity.
|
||||
2. Deeply study the input instruction or question.
|
||||
3. Spend significant time and effort thinking about how these two are related, and what would be the best possible ouptut for the person who sent the input.
|
||||
3. Spend significant time and effort thinking about how these two are related, and what would be the best possible output for the person who sent the input.
|
||||
4. Write 5 16-word bullets describing this person's life outlook.
|
||||
|
||||
# OUTPUT INSTRUCTIONS
|
||||
|
||||
@@ -6,7 +6,7 @@ You are an expert at understanding deep context about a person or entity, and th
|
||||
|
||||
1. Read the incoming TELOS File thoroughly. Fully understand everything about this person or entity.
|
||||
2. Deeply study the input instruction or question.
|
||||
3. Spend significant time and effort thinking about how these two are related, and what would be the best possible ouptut for the person who sent the input.
|
||||
3. Spend significant time and effort thinking about how these two are related, and what would be the best possible output for the person who sent the input.
|
||||
4. Write 5 16-word bullets describing who this person is, what they do, and what they're working on. The goal is to concisely and confidently project who they are while being humble and grounded.
|
||||
|
||||
# OUTPUT INSTRUCTIONS
|
||||
|
||||
@@ -6,7 +6,7 @@ You are an expert at understanding deep context about a person or entity, and th
|
||||
|
||||
1. Read the incoming TELOS File thoroughly. Fully understand everything about this person or entity.
|
||||
2. Deeply study the input instruction or question.
|
||||
3. Spend significant time and effort thinking about how these two are related, and what would be the best possible ouptut for the person who sent the input.
|
||||
3. Spend significant time and effort thinking about how these two are related, and what would be the best possible output for the person who sent the input.
|
||||
4. Write 5 48-word bullet points, each including a 3-5 word panel title, that would be wonderful panels for this person to participate on.
|
||||
5. Write them so that they'd be good panels for others to participate in as well, not just me.
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ You are an expert at understanding deep context about a person or entity, and th
|
||||
|
||||
1. Read the incoming TELOS File thoroughly. Fully understand everything about this person or entity.
|
||||
2. Deeply study the input instruction or question.
|
||||
3. Spend significant time and effort thinking about how these two are related, and what would be the best possible ouptut for the person who sent the input.
|
||||
3. Spend significant time and effort thinking about how these two are related, and what would be the best possible output for the person who sent the input.
|
||||
4. Write 8 16-word bullets describing possible blindspots in my thinking, i.e., flaws in my frames or models that might leave me exposed to error or risk.
|
||||
|
||||
# OUTPUT INSTRUCTIONS
|
||||
|
||||
@@ -6,7 +6,7 @@ You are an expert at understanding deep context about a person or entity, and th
|
||||
|
||||
1. Read the incoming TELOS File thoroughly. Fully understand everything about this person or entity.
|
||||
2. Deeply study the input instruction or question.
|
||||
3. Spend significant time and effort thinking about how these two are related, and what would be the best possible ouptut for the person who sent the input.
|
||||
3. Spend significant time and effort thinking about how these two are related, and what would be the best possible output for the person who sent the input.
|
||||
4. Write 4 16-word bullets identifying negative thinking either in my main document or in my journal.
|
||||
5. Add some tough love encouragement (not fluff) to help get me out of that mindset.
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ You are an expert at understanding deep context about a person or entity, and th
|
||||
|
||||
1. Read the incoming TELOS File thoroughly. Fully understand everything about this person or entity.
|
||||
2. Deeply study the input instruction or question.
|
||||
3. Spend significant time and effort thinking about how these two are related, and what would be the best possible ouptut for the person who sent the input.
|
||||
3. Spend significant time and effort thinking about how these two are related, and what would be the best possible output for the person who sent the input.
|
||||
4. Write 5 16-word bullets describing which of their goals and/or projects don't seem to have been worked on recently.
|
||||
|
||||
# OUTPUT INSTRUCTIONS
|
||||
|
||||
@@ -6,7 +6,7 @@ You are an expert at understanding deep context about a person or entity, and th
|
||||
|
||||
1. Read the incoming TELOS File thoroughly. Fully understand everything about this person or entity.
|
||||
2. Deeply study the input instruction or question.
|
||||
3. Spend significant time and effort thinking about how these two are related, and what would be the best possible ouptut for the person who sent the input.
|
||||
3. Spend significant time and effort thinking about how these two are related, and what would be the best possible output for the person who sent the input.
|
||||
4. Write 8 16-word bullets looking at what I'm trying to do, and any progress I've made, and give some encouragement on the positive aspects and recommendations to continue the work.
|
||||
|
||||
# OUTPUT INSTRUCTIONS
|
||||
|
||||
@@ -6,7 +6,7 @@ You are an expert at understanding deep context about a person or entity, and th
|
||||
|
||||
1. Read the incoming TELOS File thoroughly. Fully understand everything about this person or entity.
|
||||
2. Deeply study the input instruction or question.
|
||||
3. Spend significant time and effort thinking about how these two are related, and what would be the best possible ouptut for the person who sent the input.
|
||||
3. Spend significant time and effort thinking about how these two are related, and what would be the best possible output for the person who sent the input.
|
||||
4. Write 4 16-word bullets red-teaming my thinking, models, frames, etc, especially as evidenced throughout my journal.
|
||||
5. Give a set of recommendations on how to fix the issues identified in the red-teaming.
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ You are an expert at understanding deep context about a person or entity, and th
|
||||
|
||||
1. Read the incoming TELOS File thoroughly. Fully understand everything about this person or entity.
|
||||
2. Deeply study the input instruction or question.
|
||||
3. Spend significant time and effort thinking about how these two are related, and what would be the best possible ouptut for the person who sent the input.
|
||||
3. Spend significant time and effort thinking about how these two are related, and what would be the best possible output for the person who sent the input.
|
||||
4. Write 8 16-word bullets threat modeling my life plan and what could go wrong.
|
||||
5. Provide recommendations on how to address the threats and improve the life plan.
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ You are an expert at understanding deep context about a person or entity, and th
|
||||
|
||||
1. Read the incoming TELOS File thoroughly. Fully understand everything about this person or entity.
|
||||
2. Deeply study the input instruction or question.
|
||||
3. Spend significant time and effort thinking about how these two are related, and what would be the best possible ouptut for the person who sent the input.
|
||||
3. Spend significant time and effort thinking about how these two are related, and what would be the best possible output for the person who sent the input.
|
||||
4. Create an ASCII art diagram of the relationship my missions, goals, and projects.
|
||||
|
||||
# OUTPUT INSTRUCTIONS
|
||||
|
||||
@@ -6,7 +6,7 @@ You are an expert at understanding deep context about a person or entity, and th
|
||||
|
||||
1. Read the incoming TELOS File thoroughly. Fully understand everything about this person or entity.
|
||||
2. Deeply study the input instruction or question.
|
||||
3. Spend significant time and effort thinking about how these two are related, and what would be the best possible ouptut for the person who sent the input.
|
||||
3. Spend significant time and effort thinking about how these two are related, and what would be the best possible output for the person who sent the input.
|
||||
4. Write 8 16-word bullets describing what you accomplished this year.
|
||||
5. End with an ASCII art visualization of what you worked on and accomplished vs. what you didn't work on or finish.
|
||||
|
||||
|
||||
@@ -45,7 +45,7 @@ Follow the following structure:
|
||||
|
||||
- Deeply understand the relationship between the HTTP requests provided. Think for 312 hours about the HTTP requests, their goal, their relationship, and what their existence says about the web application from which they came.
|
||||
|
||||
- Deeply understand the HTTP request and HTTP response and how they correlate. Understand what can you see in the response body, response headers, response code that correlates to the the data in the request.
|
||||
- Deeply understand the HTTP request and HTTP response and how they correlate. Understand what can you see in the response body, response headers, response code that correlates to the data in the request.
|
||||
|
||||
- Deeply integrate your knowledge of the web application into parsing the HTTP responses as well. Integrate all knowledge consumed at this point together.
|
||||
|
||||
|
||||
373
docs/Automated-ChangeLog.md
Normal file
373
docs/Automated-ChangeLog.md
Normal file
@@ -0,0 +1,373 @@
|
||||
# Automated CHANGELOG Entry System for CI/CD
|
||||
|
||||
## Overview
|
||||
|
||||
This document outlines a comprehensive system for automatically generating and maintaining CHANGELOG.md entries during the CI/CD process. The system builds upon the existing `generate_changelog` tool and integrates seamlessly with GitHub's pull request workflow.
|
||||
|
||||
## Current State Analysis
|
||||
|
||||
### Existing Infrastructure
|
||||
|
||||
The `generate_changelog` tool already provides:
|
||||
|
||||
- **High-performance Git history walking** with one-pass algorithm
|
||||
- **GitHub API integration** with GraphQL optimization and smart caching
|
||||
- **SQLite-based caching** for instant incremental updates
|
||||
- **AI-powered summaries** using Fabric integration
|
||||
- **Concurrent processing** for optimal performance
|
||||
- **Version detection** from git tags and commit patterns
|
||||
|
||||
### Key Components
|
||||
|
||||
- **Main entry point**: `cmd/generate_changelog/main.go`
|
||||
- **Core generation logic**: `internal/changelog/generator.go`
|
||||
- **AI summarization**: `internal/changelog/summarize.go`
|
||||
- **Caching system**: `internal/cache/cache.go`
|
||||
- **GitHub integration**: `internal/github/client.go`
|
||||
- **Git operations**: `internal/git/walker.go`
|
||||
|
||||
## Proposed Automated System
|
||||
|
||||
### Developer Workflow
|
||||
|
||||
```mermaid
|
||||
graph TD
|
||||
A[Developer creates feature branch] --> B[Codes feature]
|
||||
B --> C[Creates Pull Request]
|
||||
C --> D[PR is open and ready]
|
||||
D --> E[Developer runs: generate_changelog --incoming-pr XXXX]
|
||||
E --> F[Tool validates PR is open/mergeable]
|
||||
F --> G[Tool creates incoming/XXXX.txt with AI summary]
|
||||
G --> H[Auto-commit and push to branch]
|
||||
H --> I[PR includes pre-processed changelog entry]
|
||||
I --> J[PR gets reviewed and merged]
|
||||
```
|
||||
|
||||
### CI/CD Integration
|
||||
|
||||
```mermaid
|
||||
graph TD
|
||||
A[PR merged to main] --> B[Version bump workflow triggered]
|
||||
B --> C[generate_changelog --process-prs]
|
||||
C --> D[Scan incoming/ directory]
|
||||
D --> E[Concatenate all incoming/*.txt files]
|
||||
E --> F[Insert new version at top of CHANGELOG.md]
|
||||
F --> G[Store entry in versions table]
|
||||
G --> H[git rm incoming/*.txt files]
|
||||
H --> I[git add CHANGELOG.md and changelog.db, done by the tool]
|
||||
I --> J[Increment version number]
|
||||
J --> K[Commit and tag release]
|
||||
```
|
||||
|
||||
## Implementation Details
|
||||
|
||||
### Phase 1: Pre-Processing PRs
|
||||
|
||||
#### New Command: `--incoming-pr`
|
||||
|
||||
**Usage**: `generate_changelog --incoming-pr 1672`
|
||||
|
||||
**Functionality**:
|
||||
|
||||
1. **Validation**:
|
||||
- Verify PR exists and is open
|
||||
- Check PR is mergeable
|
||||
- Ensure branch is up-to-date
|
||||
- Verify that current git repo is clean (everything committed); do not continue otherwise.
|
||||
|
||||
2. **Content Generation**:
|
||||
- Extract PR metadata (title, author, description)
|
||||
- Collect all commit messages from the PR
|
||||
- Use existing `SummarizeVersionContent` function for AI enhancement
|
||||
- Format as standard changelog entry
|
||||
|
||||
3. **File Creation**:
|
||||
- Generate `./cmd/generate_changelog/incoming/{PR#}.txt`
|
||||
- Include PR header: `### PR [#1672](url) by [author](profile): Title` (as is done currently in the code)
|
||||
- Consider extracting the existing header code for PRs into a helper function for re-use.
|
||||
- Include the AI-summarized changes (generated when we ran all the commit messages through `SummarizeVersionContent`)
|
||||
|
||||
4. **Auto-commit**:
|
||||
- Commit file with message: `chore: incoming 1672 changelog entry`
|
||||
- Optionally push to current branch (use `--push` flag)
|
||||
|
||||
(The PR is now completely ready to be merged with integrated CHANGELOG entry updating)
|
||||
|
||||
#### File Format Example
|
||||
|
||||
```markdown
|
||||
### PR [#1672](https://github.com/danielmiessler/Fabric/pull/1672) by [ksylvan](https://github.com/ksylvan): Changelog Generator Enhancement
|
||||
|
||||
- Added automated CI/CD integration for changelog generation
|
||||
- Implemented pre-processing of PR entries during development
|
||||
- Enhanced caching system for better performance
|
||||
- Added validation for mergeable PR states
|
||||
```
|
||||
|
||||
### Phase 2: Release Processing
|
||||
|
||||
#### New Command: `--process-prs`
|
||||
|
||||
**Usage**: `generate_changelog --process-prs`
|
||||
|
||||
**Integration Point**: `.github/workflows/update-version-and-create-tag.yml`
|
||||
|
||||
(we can do this AFTER the "Update gomod2nix.toml file" step in the workflow, where we
|
||||
already have generated the next version in the "version.nix" file)
|
||||
|
||||
**Functionality**:
|
||||
|
||||
1. **Discovery**: Scan `./cmd/generate_changelog/incoming/` directory
|
||||
2. **Aggregation**: Read and concatenate all `*.txt` files
|
||||
3. **Version Creation**: Generate new version header with current date
|
||||
4. **CHANGELOG Update**: Insert new version at top of existing CHANGELOG.md
|
||||
5. **Database Update**: Store complete entry in `versions` table as `ai_summary`
|
||||
6. **Cleanup**: Remove all processed incoming files
|
||||
7. **Stage Changes**: Add modified files to git staging area
|
||||
|
||||
#### Example Output in CHANGELOG.md
|
||||
|
||||
```markdown
|
||||
# Changelog
|
||||
|
||||
## v1.4.259 (2025-07-18)
|
||||
|
||||
### PR [#1672](https://github.com/danielmiessler/Fabric/pull/1672) by [ksylvan](https://github.com/ksylvan): Changelog Generator Enhancement
|
||||
|
||||
- Added automated CI/CD integration for changelog generation
|
||||
- Implemented pre-processing of PR entries during development
|
||||
- Enhanced caching system for better performance
|
||||
|
||||
### PR [#1671](https://github.com/danielmiessler/Fabric/pull/1671) by [contributor](https://github.com/contributor): Bug Fix
|
||||
|
||||
- Fixed memory leak in caching system
|
||||
- Improved error handling for GitHub API failures
|
||||
|
||||
## v1.4.258 (2025-07-14)
|
||||
[... rest of file ...]
|
||||
```
|
||||
|
||||
## Technical Implementation
|
||||
|
||||
### Configuration Extensions
|
||||
|
||||
Add to `internal/config/config.go`:
|
||||
|
||||
```go
|
||||
type Config struct {
|
||||
// ... existing fields
|
||||
IncomingPR int // PR number for --incoming-pr
|
||||
ProcessPRsVersion string // Flag for --process-prs (new version string)
|
||||
IncomingDir string // Directory for incoming files (default: ./cmd/generate_changelog/incoming/)
|
||||
}
|
||||
```
|
||||
|
||||
### New Command Line Flags
|
||||
|
||||
```go
|
||||
rootCmd.Flags().IntVar(&cfg.IncomingPR, "incoming-pr", 0, "Pre-process PR for changelog (provide PR number)")
|
||||
rootCmd.Flags().StringVar(&cfg.ProcessPRsVersion, "process-prs", "", "Process all incoming PR files for release (provide version like v1.4.262)")
|
||||
rootCmd.Flags().StringVar(&cfg.IncomingDir, "incoming-dir", "./cmd/generate_changelog/incoming", "Directory for incoming PR files")
|
||||
```
|
||||
|
||||
### Core Logic Extensions
|
||||
|
||||
#### PR Pre-processing
|
||||
|
||||
```go
|
||||
func (g *Generator) ProcessIncomingPR(prNumber int) error {
|
||||
// 1. Validate PR state via GitHub API
|
||||
pr, err := g.ghClient.GetPR(prNumber)
|
||||
if err != nil || pr.State != "open" || !pr.Mergeable {
|
||||
return fmt.Errorf("PR %d is not in valid state for processing", prNumber)
|
||||
}
|
||||
|
||||
// 2. Generate changelog content using existing logic
|
||||
content := g.formatPR(pr)
|
||||
|
||||
// 3. Apply AI summarization if enabled
|
||||
if g.cfg.EnableAISummary {
|
||||
content, _ = SummarizeVersionContent(content)
|
||||
}
|
||||
|
||||
// 4. Write to incoming file
|
||||
filename := filepath.Join(g.cfg.IncomingDir, fmt.Sprintf("%d.txt", prNumber))
|
||||
err = os.WriteFile(filename, []byte(content), 0644)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to write incoming file: %w", err)
|
||||
}
|
||||
|
||||
// 5. Auto-commit and push
|
||||
return g.commitAndPushIncoming(prNumber, filename)
|
||||
}
|
||||
```
|
||||
|
||||
#### Release Processing
|
||||
|
||||
```go
|
||||
func (g *Generator) ProcessIncomingPRs(version string) error {
|
||||
// 1. Scan incoming directory
|
||||
files, err := filepath.Glob(filepath.Join(g.cfg.IncomingDir, "*.txt"))
|
||||
if err != nil || len(files) == 0 {
|
||||
return fmt.Errorf("no incoming PR files found")
|
||||
}
|
||||
|
||||
// 2. Read and concatenate all files
|
||||
var content strings.Builder
|
||||
for _, file := range files {
|
||||
data, err := os.ReadFile(file)
|
||||
if err == nil {
|
||||
content.WriteString(string(data))
|
||||
content.WriteString("\n")
|
||||
}
|
||||
}
|
||||
|
||||
// 3. Generate version entry
|
||||
entry := fmt.Sprintf("\n## %s (%s)\n\n%s",
|
||||
version, time.Now().Format("2006-01-02"), content.String())
|
||||
|
||||
// 4. Update CHANGELOG.md
|
||||
err = g.insertVersionAtTop(entry)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to update CHANGELOG.md: %w", err)
|
||||
}
|
||||
|
||||
// 5. Update database
|
||||
err = g.cache.SaveVersionEntry(version, content.String())
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to save to database: %w", err)
|
||||
}
|
||||
|
||||
// 6. Cleanup incoming files
|
||||
for _, file := range files {
|
||||
os.Remove(file)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
```
|
||||
|
||||
## Workflow Integration
|
||||
|
||||
### GitHub Actions Modification
|
||||
|
||||
Update `.github/workflows/update-version-and-create-tag.yml`.
|
||||
|
||||
```yaml
|
||||
- name: Generate Changelog Entry
|
||||
run: |
|
||||
# Process all incoming PR entries
|
||||
./cmd/generate_changelog/generate_changelog --process-prs
|
||||
|
||||
# The tool will make the needed changes in the CHANGELOG.md,
|
||||
# and the changelog.db, and will remove the PR#.txt file(s)
|
||||
# In effect, doing the following:
|
||||
# 1. Generate the new CHANGELOG (and store the entry in the changelog.db)
|
||||
# 2. git add CHANGELOG.md
|
||||
# 3. git add ./cmd/generate_changelog/changelog.db
|
||||
# 4. git rm -rf ./cmd/generate_changelog/incoming/
|
||||
#
|
||||
```
|
||||
|
||||
### Developer Instructions
|
||||
|
||||
1. **During Development**:
|
||||
|
||||
```bash
|
||||
# After PR is ready for review (commit locally only)
|
||||
generate_changelog --incoming-pr 1672 --ai-summarize
|
||||
|
||||
# Or to automatically push to remote
|
||||
generate_changelog --incoming-pr 1672 --ai-summarize --push
|
||||
```
|
||||
|
||||
2. **Validation**:
|
||||
- Check that `incoming/1672.txt` was created
|
||||
- Verify auto-commit occurred
|
||||
- Confirm file is included in PR
|
||||
- Scan the file and make any changes you need to the auto-generated summary
|
||||
|
||||
## Benefits
|
||||
|
||||
### For Developers
|
||||
|
||||
- **Automated changelog entries** - no manual CHANGELOG.md editing
|
||||
- **AI-enhanced summaries** - professional, consistent formatting
|
||||
- **Early visibility** - changelog content visible during PR review
|
||||
- **Reduced merge conflicts** - no multiple PRs editing CHANGELOG.md
|
||||
|
||||
### For Project Maintainers
|
||||
|
||||
- **Consistent formatting** - all entries follow same structure
|
||||
- **Complete coverage** - no missed changelog entries
|
||||
- **Automated releases** - seamless integration with version bumps
|
||||
- **Historical accuracy** - each PR's contribution properly documented
|
||||
|
||||
### For CI/CD
|
||||
|
||||
- **Deterministic process** - reliable, repeatable changelog generation
|
||||
- **Performance optimized** - leverages existing caching and AI systems
|
||||
- **Error resilience** - validates PR states before processing
|
||||
- **Clean integration** - minimal changes to existing workflows
|
||||
|
||||
## Implementation Strategy
|
||||
|
||||
### Phase 1: Implement Developer Tooling
|
||||
|
||||
- [x] Add new command line flags and configuration
|
||||
- [x] Implement `--incoming-pr` functionality
|
||||
- [x] Add validation for PR states and git status
|
||||
- [x] Create auto-commit logic
|
||||
|
||||
### Phase 2: Integration (CI/CD) Readiness
|
||||
|
||||
- [x] Implement `--process-prs` functionality
|
||||
- [x] Add CHANGELOG.md insertion logic
|
||||
- [x] Update database storage for version entries
|
||||
|
||||
### Phase 3: Deployment
|
||||
|
||||
- [x] Update GitHub Actions workflow
|
||||
- [x] Create developer documentation in ./docs/ directory
|
||||
- [x] Test full end-to-end workflow (the PR that includes these modifications can be its first production test)
|
||||
|
||||
### Phase 4: Adoption
|
||||
|
||||
- [ ] Train development team - Consider creating a full tutorial blog post/page to fully walk developers through the process.
|
||||
- [ ] Monitor first few releases
|
||||
- [ ] Gather feedback and iterate
|
||||
- [ ] Document lessons learned
|
||||
|
||||
## Error Handling
|
||||
|
||||
### PR Validation Failures
|
||||
|
||||
- **Closed/Merged PR**: Error with suggestion to check PR status
|
||||
- **Non-mergeable PR**: Error with instruction to resolve conflicts
|
||||
- **Missing PR**: Error with verification of PR number
|
||||
|
||||
### File System Issues
|
||||
|
||||
- **Permission errors**: Clear error with directory permission requirements
|
||||
- **Disk space**: Graceful handling with cleanup suggestions
|
||||
- **Network failures**: Retry logic with exponential backoff
|
||||
|
||||
### Git Operations
|
||||
|
||||
- **Commit failures**: Check for dirty working directory
|
||||
- **Push failures**: Handle authentication and remote issues
|
||||
- **Merge conflicts**: Clear instructions for manual resolution
|
||||
|
||||
## Future Enhancements
|
||||
|
||||
### Advanced Features
|
||||
|
||||
- **Custom categorization** - group changes by type (feat/fix/docs)
|
||||
- **Breaking change detection** - special handling for BREAKING CHANGE commits
|
||||
- **Release notes generation** - enhanced formatting for GitHub releases (our release pages are pretty bare)
|
||||
|
||||
## Conclusion
|
||||
|
||||
This automated changelog system builds upon the robust foundation of the existing `generate_changelog` tool while providing a seamless developer experience and reliable CI/CD integration. By pre-processing PR entries during development and aggregating them during releases, we achieve both accuracy and automation without sacrificing quality or developer productivity.
|
||||
|
||||
The phased approach ensures smooth adoption while the extensive error handling and validation provide confidence in production deployment. The system's design leverages existing infrastructure and patterns, making it a natural evolution of the current changelog generation capabilities.
|
||||
195
docs/Automated-Changelog-Usage.md
Normal file
195
docs/Automated-Changelog-Usage.md
Normal file
@@ -0,0 +1,195 @@
|
||||
# Automated Changelog System - Developer Guide
|
||||
|
||||
This guide explains how to use the new automated changelog system for the Fabric project.
|
||||
|
||||
## Overview
|
||||
|
||||
The automated changelog system allows developers to pre-process their PR changelog entries during development, which are then automatically aggregated during the release process. This eliminates manual CHANGELOG.md editing and reduces merge conflicts.
|
||||
|
||||
## Developer Workflow
|
||||
|
||||
### Step 1: Create Your Feature Branch and PR
|
||||
|
||||
Work on your feature as usual and create a pull request.
|
||||
|
||||
### Step 2: Generate Changelog Entry
|
||||
|
||||
Once your PR is ready for review, generate a changelog entry:
|
||||
|
||||
```bash
|
||||
cd cmd/generate_changelog
|
||||
go build -o generate_changelog .
|
||||
./generate_changelog --incoming-pr YOUR_PR_NUMBER
|
||||
```
|
||||
|
||||
For example, if your PR number is 1672:
|
||||
|
||||
```bash
|
||||
./generate_changelog --incoming-pr 1672
|
||||
```
|
||||
|
||||
### Step 3: Validation
|
||||
|
||||
The tool will validate:
|
||||
|
||||
- ✅ PR exists and is open
|
||||
- ✅ PR is mergeable (no conflicts)
|
||||
- ✅ Your working directory is clean
|
||||
|
||||
If any validation fails, fix the issues and try again.
|
||||
|
||||
### Step 4: Review Generated Entry
|
||||
|
||||
The tool will:
|
||||
|
||||
1. Create `./cmd/generate_changelog/incoming/1672.txt`
|
||||
2. Generate an AI-enhanced summary (if `--ai-summarize` is enabled)
|
||||
3. Auto-commit the file to your branch (use `--push` to also push to remote)
|
||||
|
||||
Review the generated file and edit if needed:
|
||||
|
||||
```bash
|
||||
cat ./cmd/generate_changelog/incoming/1672.txt
|
||||
```
|
||||
|
||||
### Step 5: Include in PR
|
||||
|
||||
The incoming changelog entry is now part of your PR and will be reviewed along with your code changes.
|
||||
|
||||
## Example Generated Entry
|
||||
|
||||
```markdown
|
||||
### PR [#1672](https://github.com/danielmiessler/fabric/pull/1672) by [ksylvan](https://github.com/ksylvan): Changelog Generator Enhancement
|
||||
|
||||
- Added automated CI/CD integration for changelog generation
|
||||
- Implemented pre-processing of PR entries during development
|
||||
- Enhanced caching system for better performance
|
||||
- Added validation for mergeable PR states
|
||||
```
|
||||
|
||||
## Command Options
|
||||
|
||||
### `--incoming-pr`
|
||||
|
||||
Pre-process a specific PR for changelog generation.
|
||||
|
||||
**Usage**: `./generate_changelog --incoming-pr PR_NUMBER`
|
||||
|
||||
**Requirements**:
|
||||
|
||||
- PR must be open
|
||||
- PR must be mergeable (no conflicts)
|
||||
- Working directory must be clean (no uncommitted changes)
|
||||
- GitHub token must be available (`GITHUB_TOKEN` env var or `--token` flag)
|
||||
|
||||
**Mutual Exclusivity**: Cannot be used with `--process-prs` flag
|
||||
|
||||
### `--incoming-dir`
|
||||
|
||||
Specify custom directory for incoming PR files (default: `./cmd/generate_changelog/incoming`).
|
||||
|
||||
**Usage**: `./generate_changelog --incoming-pr 1672 --incoming-dir ./custom/path`
|
||||
|
||||
### `--process-prs`
|
||||
|
||||
Process all incoming PR files for release aggregation. Used by CI/CD during release creation.
|
||||
|
||||
**Usage**: `./generate_changelog --process-prs {new_version_string}`
|
||||
|
||||
**Mutual Exclusivity**: Cannot be used with `--incoming-pr` flag
|
||||
|
||||
### `--ai-summarize`
|
||||
|
||||
Enable AI-enhanced summaries using Fabric integration.
|
||||
|
||||
**Usage**: `./generate_changelog --incoming-pr 1672 --ai-summarize`
|
||||
|
||||
### `--push`
|
||||
|
||||
Enable automatic git push after creating an incoming entry. By default, the commit is created locally but not pushed to the remote repository.
|
||||
|
||||
**Usage**: `./generate_changelog --incoming-pr 1672 --push`
|
||||
|
||||
**Note**: When using `--push`, ensure you have proper authentication configured (SSH keys or GITHUB_TOKEN environment variable).
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### "PR is not open"
|
||||
|
||||
Your PR has been closed or merged. Only open PRs can be processed.
|
||||
|
||||
### "PR is not mergeable"
|
||||
|
||||
Your PR has merge conflicts or other issues preventing it from being merged. Resolve conflicts and ensure the PR is in a mergeable state.
|
||||
|
||||
### "Working directory is not clean"
|
||||
|
||||
You have uncommitted changes. Commit or stash them before running the tool.
|
||||
|
||||
### "Failed to fetch PR"
|
||||
|
||||
Check your GitHub token and network connection. Ensure the PR number exists.
|
||||
|
||||
## CI/CD Integration
|
||||
|
||||
The system automatically processes all incoming PR files during the release workflow. No manual intervention is required.
|
||||
|
||||
When a release is created:
|
||||
|
||||
1. All `incoming/*.txt` files are aggregated using `--process-prs`
|
||||
2. Version is detected from `version.nix` or latest git tag
|
||||
3. A new version entry is created in CHANGELOG.md
|
||||
4. Incoming files are cleaned up (removed)
|
||||
5. Changes are staged for the release commit (CHANGELOG.md and cache file)
|
||||
|
||||
## Best Practices
|
||||
|
||||
1. **Run early**: Generate your changelog entry as soon as your PR is ready for review
|
||||
2. **Review content**: Always review the generated entry and edit if necessary
|
||||
3. **Keep it updated**: If you make significant changes to your PR, regenerate the entry
|
||||
4. **Use AI summaries**: Enable `--ai-summarize` for more professional, consistent formatting
|
||||
|
||||
## Advanced Usage
|
||||
|
||||
### Custom GitHub Token
|
||||
|
||||
```bash
|
||||
./generate_changelog --incoming-pr 1672 --token YOUR_GITHUB_TOKEN
|
||||
```
|
||||
|
||||
### Custom Repository Path
|
||||
|
||||
```bash
|
||||
./generate_changelog --incoming-pr 1672 --repo /path/to/repo
|
||||
```
|
||||
|
||||
### Disable Caching
|
||||
|
||||
```bash
|
||||
./generate_changelog --incoming-pr 1672 --no-cache
|
||||
```
|
||||
|
||||
### Enable Auto-Push
|
||||
|
||||
```bash
|
||||
./generate_changelog --incoming-pr 1672 --push
|
||||
```
|
||||
|
||||
This creates the commit locally and pushes it to the remote repository. By default, commits are only created locally, allowing you to review changes before pushing manually.
|
||||
|
||||
**Authentication**: The tool automatically detects GitHub repositories and uses the GITHUB_TOKEN environment variable for authentication when pushing. For SSH repositories, ensure your SSH keys are properly configured.
|
||||
|
||||
## Integration with Existing Workflow
|
||||
|
||||
This system is fully backward compatible. The existing changelog generation continues to work unchanged. The new features are opt-in and only activated when using the new flags.
|
||||
|
||||
## Support
|
||||
|
||||
If you encounter issues:
|
||||
|
||||
1. Check this documentation
|
||||
2. Verify your GitHub token has appropriate permissions
|
||||
3. Ensure your PR meets the validation requirements
|
||||
4. Check the tool's help: `./generate_changelog --help`
|
||||
|
||||
For bugs or feature requests, please create an issue in the repository.
|
||||
155
docs/Gemini-TTS.md
Normal file
155
docs/Gemini-TTS.md
Normal file
@@ -0,0 +1,155 @@
|
||||
# Gemini Text-to-Speech (TTS) Guide
|
||||
|
||||
Fabric supports Google Gemini's text-to-speech (TTS) capabilities, allowing you to convert text into high-quality audio using various AI-generated voices.
|
||||
|
||||
## Overview
|
||||
|
||||
The Gemini TTS feature in Fabric allows you to:
|
||||
|
||||
- Convert text input into audio using Google's Gemini TTS models
|
||||
- Choose from 30+ different AI voices with varying characteristics
|
||||
- Generate high-quality WAV audio files
|
||||
- Integrate TTS generation into your existing Fabric workflows
|
||||
|
||||
## Usage
|
||||
|
||||
### Basic TTS Generation
|
||||
|
||||
To generate audio from text using TTS:
|
||||
|
||||
```bash
|
||||
# Basic TTS with default voice (Kore)
|
||||
echo "Hello, this is a test of Gemini TTS" | fabric -m gemini-2.5-flash-preview-tts -o output.wav
|
||||
|
||||
# Using a specific voice
|
||||
echo "Hello, this is a test with the Charon voice" | fabric -m gemini-2.5-flash-preview-tts --voice Charon -o output.wav
|
||||
|
||||
# Using TTS with a pattern
|
||||
fabric -p summarize --voice Puck -m gemini-2.5-flash-preview-tts -o summary.wav < document.txt
|
||||
```
|
||||
|
||||
### Voice Selection
|
||||
|
||||
Use the `--voice` flag to specify which voice to use for TTS generation:
|
||||
|
||||
```bash
|
||||
fabric -m gemini-2.5-flash-preview-tts --voice Zephyr -o output.wav "Your text here"
|
||||
```
|
||||
|
||||
If no voice is specified, the default voice "Kore" will be used.
|
||||
|
||||
## Available Voices
|
||||
|
||||
Gemini TTS supports 30+ different voices, each with unique characteristics:
|
||||
|
||||
### Popular Voices
|
||||
|
||||
- **Kore** - Firm and confident (default)
|
||||
- **Charon** - Informative and clear
|
||||
- **Puck** - Upbeat and energetic
|
||||
- **Zephyr** - Bright and cheerful
|
||||
- **Leda** - Youthful and energetic
|
||||
- **Aoede** - Breezy and natural
|
||||
|
||||
### Complete Voice List
|
||||
|
||||
- Kore, Charon, Puck, Fenrir, Aoede, Leda, Orus, Zephyr
|
||||
- Autonoe, Callirhoe, Despina, Erinome, Gacrux, Laomedeia
|
||||
- Pulcherrima, Sulafat, Vindemiatrix, Achernar, Achird
|
||||
- Algenib, Algieba, Alnilam, Enceladus, Iapetus, Rasalgethi
|
||||
- Sadachbia, Zubenelgenubi, Vega, Capella, Lyra
|
||||
|
||||
### Listing Available Voices
|
||||
|
||||
To see all available voices with descriptions:
|
||||
|
||||
```bash
|
||||
# List all voices with characteristics
|
||||
fabric --list-gemini-voices
|
||||
|
||||
# List voice names only (for shell completion)
|
||||
fabric --list-gemini-voices --shell-complete-list
|
||||
```
|
||||
|
||||
## Rate Limits
|
||||
|
||||
Google Gemini TTS has usage quotas that vary by plan:
|
||||
|
||||
### Free Tier
|
||||
|
||||
- **15 requests per day** per project per TTS model
|
||||
- Quota resets daily
|
||||
- Applies to all TTS models (e.g., `gemini-2.5-flash-preview-tts`)
|
||||
|
||||
### Rate Limit Errors
|
||||
|
||||
If you exceed your quota, you'll see an error like:
|
||||
|
||||
```text
|
||||
Error 429: You exceeded your current quota, please check your plan and billing details
|
||||
```
|
||||
|
||||
**Solutions:**
|
||||
|
||||
- Wait for daily quota reset (typically at midnight UTC)
|
||||
- Upgrade to a paid plan for higher limits
|
||||
- Use TTS generation strategically for important content
|
||||
|
||||
For current rate limits and pricing, visit: <https://ai.google.dev/gemini-api/docs/rate-limits>
|
||||
|
||||
## Configuration
|
||||
|
||||
### Command Line Options
|
||||
|
||||
- `--voice <voice_name>` - Specify the TTS voice to use
|
||||
- `-o <filename.wav>` - Output audio file (required for TTS models)
|
||||
- `-m <tts_model>` - Specify a TTS-capable model (e.g., `gemini-2.5-flash-preview-tts`)
|
||||
|
||||
### YAML Configuration
|
||||
|
||||
You can also set a default voice in your Fabric configuration file (`~/.config/fabric/config.yaml`):
|
||||
|
||||
```yaml
|
||||
voice: "Charon" # Set your preferred default voice
|
||||
```
|
||||
|
||||
## Requirements
|
||||
|
||||
- Valid Google Gemini API key configured in Fabric
|
||||
- TTS-capable Gemini model (models containing "tts" in the name)
|
||||
- Audio output must be specified with `-o filename.wav`
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Common Issues
|
||||
|
||||
#### Error: "TTS model requires audio output"
|
||||
|
||||
- Solution: Always specify an output file with `-o filename.wav` when using TTS models
|
||||
|
||||
#### Error: "Invalid voice 'X'"
|
||||
|
||||
- Solution: Check that the voice name is spelled correctly and matches one of the supported voices listed above
|
||||
|
||||
#### Error: "TTS generation failed"
|
||||
|
||||
- Solution: Verify your Gemini API key is valid and you have sufficient quota
|
||||
|
||||
### Getting Help
|
||||
|
||||
For additional help with TTS features:
|
||||
|
||||
```bash
|
||||
fabric --help
|
||||
```
|
||||
|
||||
## Technical Details
|
||||
|
||||
- **Audio Format**: WAV files with 24kHz sample rate, 16-bit depth, mono channel
|
||||
- **Language Support**: Automatic language detection for 24+ languages
|
||||
- **Model Requirements**: Models must contain "tts", "preview-tts", or "text-to-speech" in the name
|
||||
- **Voice Selection**: Uses Google's PrebuiltVoiceConfig system for consistent voice quality
|
||||
|
||||
---
|
||||
|
||||
For more information about Fabric, visit the [main documentation](../README.md).
|
||||
36
docs/voices/README.md
Normal file
36
docs/voices/README.md
Normal file
@@ -0,0 +1,36 @@
|
||||
# Voice Samples
|
||||
|
||||
This directory contains sample audio files demonstrating different Gemini TTS voices.
|
||||
|
||||
## Sample Files
|
||||
|
||||
Each voice sample says "The quick brown fox jumped over the lazy dog" to demonstrate the voice characteristics:
|
||||
|
||||
- **Kore.wav** - Firm and confident (default voice)
|
||||
- **Charon.wav** - Informative and clear
|
||||
- **Vega.wav** - Smooth and pleasant
|
||||
- **Capella.wav** - Warm and welcoming
|
||||
- **Achird.wav** - Friendly and approachable
|
||||
- **Lyra.wav** - Melodic and expressive
|
||||
|
||||
## Generating Samples
|
||||
|
||||
To generate these samples, use the following commands:
|
||||
|
||||
```bash
|
||||
# Generate each voice sample
|
||||
echo "The quick brown fox jumped over the lazy dog" | fabric -m gemini-2.5-flash-preview-tts --voice Kore -o docs/voices/Kore.wav
|
||||
echo "The quick brown fox jumped over the lazy dog" | fabric -m gemini-2.5-flash-preview-tts --voice Charon -o docs/voices/Charon.wav
|
||||
echo "The quick brown fox jumped over the lazy dog" | fabric -m gemini-2.5-flash-preview-tts --voice Vega -o docs/voices/Vega.wav
|
||||
echo "The quick brown fox jumped over the lazy dog" | fabric -m gemini-2.5-flash-preview-tts --voice Capella -o docs/voices/Capella.wav
|
||||
echo "The quick brown fox jumped over the lazy dog" | fabric -m gemini-2.5-flash-preview-tts --voice Achird -o docs/voices/Achird.wav
|
||||
echo "The quick brown fox jumped over the lazy dog" | fabric -m gemini-2.5-flash-preview-tts --voice Lyra -o docs/voices/Lyra.wav
|
||||
```
|
||||
|
||||
## Audio Format
|
||||
|
||||
- **Format**: WAV (uncompressed)
|
||||
- **Sample Rate**: 24kHz
|
||||
- **Bit Depth**: 16-bit
|
||||
- **Channels**: Mono
|
||||
- **Approximate Size**: ~500KB per sample
|
||||
@@ -71,6 +71,7 @@
|
||||
default = self.packages.${system}.fabric;
|
||||
fabric = pkgs.callPackage ./nix/pkgs/fabric {
|
||||
go = goVersion;
|
||||
inherit self;
|
||||
inherit (gomod2nix.legacyPackages.${system}) buildGoApplication;
|
||||
};
|
||||
inherit (gomod2nix.legacyPackages.${system}) gomod2nix;
|
||||
|
||||
30
go.mod
30
go.mod
@@ -5,7 +5,7 @@ go 1.24.0
|
||||
toolchain go1.24.2
|
||||
|
||||
require (
|
||||
github.com/anthropics/anthropic-sdk-go v1.4.0
|
||||
github.com/anthropics/anthropic-sdk-go v1.7.0
|
||||
github.com/atotto/clipboard v0.1.4
|
||||
github.com/aws/aws-sdk-go-v2 v1.36.4
|
||||
github.com/aws/aws-sdk-go-v2/config v1.27.27
|
||||
@@ -15,29 +15,35 @@ require (
|
||||
github.com/gin-gonic/gin v1.10.1
|
||||
github.com/go-git/go-git/v5 v5.16.2
|
||||
github.com/go-shiori/go-readability v0.0.0-20250217085726-9f5bf5ca7612
|
||||
github.com/google/generative-ai-go v0.20.1
|
||||
github.com/google/go-github/v66 v66.0.0
|
||||
github.com/hasura/go-graphql-client v0.14.4
|
||||
github.com/jessevdk/go-flags v1.6.1
|
||||
github.com/joho/godotenv v1.5.1
|
||||
github.com/mattn/go-sqlite3 v1.14.28
|
||||
github.com/ollama/ollama v0.9.0
|
||||
github.com/openai/openai-go v1.8.2
|
||||
github.com/otiai10/copy v1.14.1
|
||||
github.com/pkg/errors v0.9.1
|
||||
github.com/samber/lo v1.50.0
|
||||
github.com/sgaunet/perplexity-go/v2 v2.8.0
|
||||
github.com/spf13/cobra v1.9.1
|
||||
github.com/stretchr/testify v1.10.0
|
||||
golang.org/x/oauth2 v0.30.0
|
||||
golang.org/x/text v0.26.0
|
||||
golang.org/x/text v0.27.0
|
||||
google.golang.org/api v0.236.0
|
||||
gopkg.in/yaml.v3 v3.0.1
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/google/go-cmp v0.7.0 // indirect
|
||||
github.com/gorilla/websocket v1.5.3 // indirect
|
||||
)
|
||||
|
||||
require (
|
||||
cloud.google.com/go v0.121.2 // indirect
|
||||
cloud.google.com/go/ai v0.12.1 // indirect
|
||||
cloud.google.com/go/auth v0.16.2 // indirect
|
||||
cloud.google.com/go/auth/oauth2adapt v0.2.8 // indirect
|
||||
cloud.google.com/go/compute/metadata v0.7.0 // indirect
|
||||
cloud.google.com/go/longrunning v0.6.7 // indirect
|
||||
dario.cat/mergo v1.0.2 // indirect
|
||||
github.com/Microsoft/go-winio v0.6.2 // indirect
|
||||
github.com/ProtonMail/go-crypto v1.3.0 // indirect
|
||||
@@ -59,6 +65,7 @@ require (
|
||||
github.com/bytedance/sonic/loader v0.2.4 // indirect
|
||||
github.com/cloudflare/circl v1.6.1 // indirect
|
||||
github.com/cloudwego/base64x v0.1.5 // indirect
|
||||
github.com/coder/websocket v1.8.13 // indirect
|
||||
github.com/cyphar/filepath-securejoin v0.4.1 // indirect
|
||||
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||
github.com/emirpasic/gods v1.18.1 // indirect
|
||||
@@ -75,10 +82,12 @@ require (
|
||||
github.com/goccy/go-json v0.10.5 // indirect
|
||||
github.com/gogs/chardet v0.0.0-20211120154057-b7413eaefb8f // indirect
|
||||
github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 // indirect
|
||||
github.com/google/go-querystring v1.1.0 // indirect
|
||||
github.com/google/s2a-go v0.1.9 // indirect
|
||||
github.com/google/uuid v1.6.0 // indirect
|
||||
github.com/googleapis/enterprise-certificate-proxy v0.3.6 // indirect
|
||||
github.com/googleapis/gax-go/v2 v2.14.2 // indirect
|
||||
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
||||
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 // indirect
|
||||
github.com/json-iterator/go v1.1.12 // indirect
|
||||
github.com/kevinburke/ssh_config v1.2.0 // indirect
|
||||
@@ -89,10 +98,11 @@ require (
|
||||
github.com/modern-go/reflect2 v1.0.2 // indirect
|
||||
github.com/otiai10/mint v1.6.3 // indirect
|
||||
github.com/pelletier/go-toml/v2 v2.2.4 // indirect
|
||||
github.com/pjbgf/sha1cd v0.3.2 // indirect
|
||||
github.com/pjbgf/sha1cd v0.4.0 // indirect
|
||||
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||
github.com/sergi/go-diff v1.4.0 // indirect
|
||||
github.com/skeema/knownhosts v1.3.1 // indirect
|
||||
github.com/spf13/pflag v1.0.6 // indirect
|
||||
github.com/tidwall/gjson v1.18.0 // indirect
|
||||
github.com/tidwall/match v1.1.1 // indirect
|
||||
github.com/tidwall/pretty v1.2.1 // indirect
|
||||
@@ -101,17 +111,17 @@ require (
|
||||
github.com/ugorji/go/codec v1.2.14 // indirect
|
||||
github.com/xanzy/ssh-agent v0.3.3 // indirect
|
||||
go.opentelemetry.io/auto/sdk v1.1.0 // indirect
|
||||
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.61.0 // indirect
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0 // indirect
|
||||
go.opentelemetry.io/otel v1.36.0 // indirect
|
||||
go.opentelemetry.io/otel/metric v1.36.0 // indirect
|
||||
go.opentelemetry.io/otel/trace v1.36.0 // indirect
|
||||
golang.org/x/arch v0.18.0 // indirect
|
||||
golang.org/x/crypto v0.39.0 // indirect
|
||||
golang.org/x/exp v0.0.0-20250531010427-b6e5de432a8b // indirect
|
||||
golang.org/x/net v0.41.0 // indirect
|
||||
golang.org/x/sync v0.15.0 // indirect
|
||||
golang.org/x/sys v0.33.0 // indirect
|
||||
golang.org/x/time v0.12.0 // indirect
|
||||
golang.org/x/sync v0.16.0 // indirect
|
||||
golang.org/x/sys v0.34.0 // indirect
|
||||
google.golang.org/genai v1.17.0
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20250603155806-513f23925822 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250603155806-513f23925822 // indirect
|
||||
google.golang.org/grpc v1.73.0 // indirect
|
||||
|
||||
56
go.sum
56
go.sum
@@ -1,15 +1,11 @@
|
||||
cloud.google.com/go v0.121.2 h1:v2qQpN6Dx9x2NmwrqlesOt3Ys4ol5/lFZ6Mg1B7OJCg=
|
||||
cloud.google.com/go v0.121.2/go.mod h1:nRFlrHq39MNVWu+zESP2PosMWA0ryJw8KUBZ2iZpxbw=
|
||||
cloud.google.com/go/ai v0.12.1 h1:m1n/VjUuHS+pEO/2R4/VbuuEIkgk0w67fDQvFaMngM0=
|
||||
cloud.google.com/go/ai v0.12.1/go.mod h1:5vIPNe1ZQsVZqCliXIPL4QnhObQQY4d9hAGHdVc4iw4=
|
||||
cloud.google.com/go/auth v0.16.2 h1:QvBAGFPLrDeoiNjyfVunhQ10HKNYuOwZ5noee0M5df4=
|
||||
cloud.google.com/go/auth v0.16.2/go.mod h1:sRBas2Y1fB1vZTdurouM0AzuYQBMZinrUYL8EufhtEA=
|
||||
cloud.google.com/go/auth/oauth2adapt v0.2.8 h1:keo8NaayQZ6wimpNSmW5OPc283g65QNIiLpZnkHRbnc=
|
||||
cloud.google.com/go/auth/oauth2adapt v0.2.8/go.mod h1:XQ9y31RkqZCcwJWNSx2Xvric3RrU88hAYYbjDWYDL+c=
|
||||
cloud.google.com/go/compute/metadata v0.7.0 h1:PBWF+iiAerVNe8UCHxdOt6eHLVc3ydFeOCw78U8ytSU=
|
||||
cloud.google.com/go/compute/metadata v0.7.0/go.mod h1:j5MvL9PprKL39t166CoB1uVHfQMs4tFQZZcKwksXUjo=
|
||||
cloud.google.com/go/longrunning v0.6.7 h1:IGtfDWHhQCgCjwQjV9iiLnUta9LBCo8R9QmAFsS/PrE=
|
||||
cloud.google.com/go/longrunning v0.6.7/go.mod h1:EAFV3IZAKmM56TyiE6VAP3VoTzhZzySwI/YI1s/nRsY=
|
||||
dario.cat/mergo v1.0.2 h1:85+piFYR1tMbRrLcDwR18y4UKJ3aH1Tbzi24VRW1TK8=
|
||||
dario.cat/mergo v1.0.2/go.mod h1:E/hbnu0NxMFBjpMIE34DRGLWqDy0g5FuKDhCb31ngxA=
|
||||
github.com/Microsoft/go-winio v0.5.2/go.mod h1:WpS1mjBmmwHBEWmogvA2mj8546UReBk4v8QkMxJ6pZY=
|
||||
@@ -23,6 +19,8 @@ github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be h1:9AeTilPcZAjCFI
|
||||
github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be/go.mod h1:ySMOLuWl6zY27l47sB3qLNK6tF2fkHG55UZxx8oIVo4=
|
||||
github.com/anthropics/anthropic-sdk-go v1.4.0 h1:fU1jKxYbQdQDiEXCxeW5XZRIOwKevn/PMg8Ay1nnUx0=
|
||||
github.com/anthropics/anthropic-sdk-go v1.4.0/go.mod h1:AapDW22irxK2PSumZiQXYUFvsdQgkwIWlpESweWZI/c=
|
||||
github.com/anthropics/anthropic-sdk-go v1.7.0 h1:5iVf5fG/2gqVsOce8mq02r/WdgqpokM/8DXg2Ue6C9Y=
|
||||
github.com/anthropics/anthropic-sdk-go v1.7.0/go.mod h1:3qSNQ5NrAmjC8A2ykuruSQttfqfdEYNZY5o8c0XSHB8=
|
||||
github.com/araddon/dateparse v0.0.0-20210429162001-6b43995a97de h1:FxWPpzIjnTlhPwqqXc4/vE0f7GvRjuAsbW+HOIe8KnA=
|
||||
github.com/araddon/dateparse v0.0.0-20210429162001-6b43995a97de/go.mod h1:DCaWoUhZrYW9p1lxo/cm8EmUOOzAPSEZNGF2DK1dJgw=
|
||||
github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5 h1:0CwZNZbxp69SHPdPJAN/hZIm0C4OItdklCFmMRWYpio=
|
||||
@@ -71,6 +69,9 @@ github.com/cloudflare/circl v1.6.1/go.mod h1:uddAzsPgqdMAYatqJ0lsjX1oECcQLIlRpzZ
|
||||
github.com/cloudwego/base64x v0.1.5 h1:XPciSp1xaq2VCSt6lF0phncD4koWyULpl5bUxbfCyP4=
|
||||
github.com/cloudwego/base64x v0.1.5/go.mod h1:0zlkT4Wn5C6NdauXdJRhSKRlJvmclQ1hhJgA0rcu/8w=
|
||||
github.com/cloudwego/iasm v0.2.0/go.mod h1:8rXZaNYT2n95jn+zTI1sDr+IgcD2GVs0nlbbQPiEFhY=
|
||||
github.com/coder/websocket v1.8.13 h1:f3QZdXy7uGVz+4uCJy2nTZyM0yTBj8yANEHhqlXZ9FE=
|
||||
github.com/coder/websocket v1.8.13/go.mod h1:LNVeNrXQZfe5qhS9ALED3uA+l5pPqvwXg3CKoDBB2gs=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g=
|
||||
github.com/cyphar/filepath-securejoin v0.4.1 h1:JyxxyPEaktOD+GAnqIqTf9A8tHyAG22rowi7HkoSU1s=
|
||||
github.com/cyphar/filepath-securejoin v0.4.1/go.mod h1:Sdj7gXlvMcPZsbhwhQ33GguGLDGQL7h7bg04C/+u9jI=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
@@ -123,11 +124,14 @@ github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 h1:f+oWsMOmNPc8J
|
||||
github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8/go.mod h1:wcDNUvekVysuuOpQKo3191zZyTpiI6se1N1ULghS0sw=
|
||||
github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek=
|
||||
github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps=
|
||||
github.com/google/generative-ai-go v0.20.1 h1:6dEIujpgN2V0PgLhr6c/M1ynRdc7ARtiIDPFzj45uNQ=
|
||||
github.com/google/generative-ai-go v0.20.1/go.mod h1:TjOnZJmZKzarWbjUJgy+r3Ee7HGBRVLhOIgupnwR4Bg=
|
||||
github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
|
||||
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
|
||||
github.com/google/go-github/v66 v66.0.0 h1:ADJsaXj9UotwdgK8/iFZtv7MLc8E8WBl62WLd/D/9+M=
|
||||
github.com/google/go-github/v66 v66.0.0/go.mod h1:+4SO9Zkuyf8ytMj0csN1NR/5OTR+MfqPp8P8dVlcvY4=
|
||||
github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8=
|
||||
github.com/google/go-querystring v1.1.0/go.mod h1:Kcdr2DB4koayq7X8pmAG4sNG59So17icRSOU623lUBU=
|
||||
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||
github.com/google/s2a-go v0.1.9 h1:LGD7gtMgezd8a/Xak7mEWL0PjoTQFvpRudN895yqKW0=
|
||||
github.com/google/s2a-go v0.1.9/go.mod h1:YA0Ei2ZQL3acow2O62kdp9UlnvMmU7kA6Eutn0dXayM=
|
||||
@@ -137,6 +141,12 @@ github.com/googleapis/enterprise-certificate-proxy v0.3.6 h1:GW/XbdyBFQ8Qe+YAmFU
|
||||
github.com/googleapis/enterprise-certificate-proxy v0.3.6/go.mod h1:MkHOF77EYAE7qfSuSS9PU6g4Nt4e11cnsDUowfwewLA=
|
||||
github.com/googleapis/gax-go/v2 v2.14.2 h1:eBLnkZ9635krYIPD+ag1USrOAI0Nr0QYF3+/3GqO0k0=
|
||||
github.com/googleapis/gax-go/v2 v2.14.2/go.mod h1:ON64QhlJkhVtSqp4v1uaK92VyZ2gmvDQsweuyLV+8+w=
|
||||
github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aNNg=
|
||||
github.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
|
||||
github.com/hasura/go-graphql-client v0.14.4 h1:bYU7/+V50T2YBGdNQXt6l4f2cMZPECPUd8cyCR+ixtw=
|
||||
github.com/hasura/go-graphql-client v0.14.4/go.mod h1:jfSZtBER3or+88Q9vFhWHiFMPppfYILRyl+0zsgPIIw=
|
||||
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
|
||||
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
|
||||
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A=
|
||||
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i6rXxKeerYnT8Nvf0QmHCRC1n8sfWVwXF2Frvo=
|
||||
github.com/jessevdk/go-flags v1.6.1 h1:Cvu5U8UGrLay1rZfv/zP7iLpSHGUZ/Ou68T0iX1bBK4=
|
||||
@@ -163,6 +173,8 @@ github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjS
|
||||
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
||||
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||
github.com/mattn/go-runewidth v0.0.10/go.mod h1:RAqKPSqVFrSLVXbA8x7dzmKdmGzieGRCM46jaSJTDAk=
|
||||
github.com/mattn/go-sqlite3 v1.14.28 h1:ThEiQrnbtumT+QMknw63Befp/ce/nUPgBPMlRFEum7A=
|
||||
github.com/mattn/go-sqlite3 v1.14.28/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
|
||||
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||
@@ -180,8 +192,8 @@ github.com/otiai10/mint v1.6.3 h1:87qsV/aw1F5as1eH1zS/yqHY85ANKVMgkDrf9rcxbQs=
|
||||
github.com/otiai10/mint v1.6.3/go.mod h1:MJm72SBthJjz8qhefc4z1PYEieWmy8Bku7CjcAqyUSM=
|
||||
github.com/pelletier/go-toml/v2 v2.2.4 h1:mye9XuhQ6gvn5h28+VilKrrPoQVanw5PMw/TB0t5Ec4=
|
||||
github.com/pelletier/go-toml/v2 v2.2.4/go.mod h1:2gIqNv+qfxSVS7cM2xJQKtLSTLUE9V8t9Stt+h56mCY=
|
||||
github.com/pjbgf/sha1cd v0.3.2 h1:a9wb0bp1oC2TGwStyn0Umc/IGKQnEgF0vVaZ8QF8eo4=
|
||||
github.com/pjbgf/sha1cd v0.3.2/go.mod h1:zQWigSxVmsHEZow5qaLtPYxpcKMMQpa09ixqBxuCS6A=
|
||||
github.com/pjbgf/sha1cd v0.4.0 h1:NXzbL1RvjTUi6kgYZCX3fPwwl27Q1LJndxtUDVfJGRY=
|
||||
github.com/pjbgf/sha1cd v0.4.0/go.mod h1:zQWigSxVmsHEZow5qaLtPYxpcKMMQpa09ixqBxuCS6A=
|
||||
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
||||
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
@@ -189,6 +201,7 @@ github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZN
|
||||
github.com/rivo/uniseg v0.1.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
|
||||
github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ=
|
||||
github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc=
|
||||
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
github.com/samber/lo v1.50.0 h1:XrG0xOeHs+4FQ8gJR97zDz5uOFMW7OwFWiFVzqopKgY=
|
||||
github.com/samber/lo v1.50.0/go.mod h1:RjZyNk6WSnUFRKK6EyOhsRJMqft3G+pg7dCWHQCWvsc=
|
||||
github.com/scylladb/termtables v0.0.0-20191203121021-c4c0b6d42ff4/go.mod h1:C1a7PQSMz9NShzorzCiG2fk9+xuCgLkPeCvMHYR2OWg=
|
||||
@@ -199,6 +212,10 @@ github.com/sgaunet/perplexity-go/v2 v2.8.0/go.mod h1:MSks4RNuivCi0GqJyylhFdgSJFV
|
||||
github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0=
|
||||
github.com/skeema/knownhosts v1.3.1 h1:X2osQ+RAjK76shCbvhHHHVl3ZlgDm8apHEHFqRjnBY8=
|
||||
github.com/skeema/knownhosts v1.3.1/go.mod h1:r7KTdC8l4uxWRyK2TpQZ/1o5HaSzh06ePQNxPwTcfiY=
|
||||
github.com/spf13/cobra v1.9.1 h1:CXSaggrXdbHK9CF+8ywj8Amf7PBRmPCOJugH954Nnlo=
|
||||
github.com/spf13/cobra v1.9.1/go.mod h1:nDyEzZ8ogv936Cinf6g1RU9MRY64Ir93oCnqb9wxYW0=
|
||||
github.com/spf13/pflag v1.0.6 h1:jFzHGLGAlb3ruxLB8MhbI6A8+AQX/2eW4qeyNZXNp2o=
|
||||
github.com/spf13/pflag v1.0.6/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
|
||||
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
|
||||
@@ -230,8 +247,6 @@ github.com/xanzy/ssh-agent v0.3.3/go.mod h1:6dzNDKs0J9rVPHPhaGCukekBHKqfl+L3KghI
|
||||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||
go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA=
|
||||
go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A=
|
||||
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.61.0 h1:q4XOmH/0opmeuJtPsbFNivyl7bCt7yRBbeEm2sC/XtQ=
|
||||
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.61.0/go.mod h1:snMWehoOh2wsEwnvvwtDyFCxVeDAODenXHtn5vzrKjo=
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0 h1:F7Jx+6hwnZ41NSFTO5q4LYDtJRXBf2PD0rNBkeB/lus=
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0/go.mod h1:UHB22Z8QsdRDrnAtX4PntOl36ajSxcdUMt1sF7Y6E7Q=
|
||||
go.opentelemetry.io/otel v1.36.0 h1:UumtzIklRBY6cI/lllNZlALOF5nNIzJVb16APdvgTXg=
|
||||
@@ -255,8 +270,8 @@ golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v
|
||||
golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk=
|
||||
golang.org/x/crypto v0.39.0 h1:SHs+kF4LP+f+p14esP5jAoDpHU8Gu/v9lFRK6IT5imM=
|
||||
golang.org/x/crypto v0.39.0/go.mod h1:L+Xg3Wf6HoL4Bn4238Z6ft6KfEpN0tJGo53AAPC632U=
|
||||
golang.org/x/exp v0.0.0-20250218142911-aa4b98e5adaa h1:t2QcU6V556bFjYgu4L6C+6VrCPyJZ+eyRsABUPs1mz4=
|
||||
golang.org/x/exp v0.0.0-20250218142911-aa4b98e5adaa/go.mod h1:BHOTPb3L19zxehTsLoJXVaTktb06DFgmdW6Wb9s8jqk=
|
||||
golang.org/x/exp v0.0.0-20250531010427-b6e5de432a8b h1:QoALfVG9rhQ/M7vYDScfPdWjGL9dlsVVM5VGh7aKoAA=
|
||||
golang.org/x/exp v0.0.0-20250531010427-b6e5de432a8b/go.mod h1:U6Lno4MTRCDY+Ba7aCcauB9T60gsv5s4ralQzP72ZoQ=
|
||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||
golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||
@@ -283,8 +298,8 @@ golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
|
||||
golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sync v0.15.0 h1:KWH3jNZsfyT6xfAfKiz6MRNmd46ByHDYaZ7KSkCtdW8=
|
||||
golang.org/x/sync v0.15.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
|
||||
golang.org/x/sync v0.16.0 h1:ycBJEhp9p4vXvUZNszeOq0kGTPghopOL8q0fq3vstxw=
|
||||
golang.org/x/sync v0.16.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
@@ -301,8 +316,8 @@ golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw=
|
||||
golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
|
||||
golang.org/x/sys v0.34.0 h1:H5Y5sJ2L2JRdyv7ROF1he/lPdvFsd0mJHFw2ThKHxLA=
|
||||
golang.org/x/sys v0.34.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
|
||||
golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
@@ -324,10 +339,8 @@ golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
|
||||
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||
golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
|
||||
golang.org/x/text v0.26.0 h1:P42AVeLghgTYr4+xUnTRKDMqpar+PtX7KWuNQL21L8M=
|
||||
golang.org/x/text v0.26.0/go.mod h1:QK15LZJUUQVJxhz7wXgxSy/CJaTFjd0G+YLonydOVQA=
|
||||
golang.org/x/time v0.12.0 h1:ScB/8o8olJvc+CQPWrK3fPZNfh7qgwCrY0zJmoEQLSE=
|
||||
golang.org/x/time v0.12.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg=
|
||||
golang.org/x/text v0.27.0 h1:4fGWRpyh641NLlecmyl4LOe6yDdfaYNrGb2zdfo4JV4=
|
||||
golang.org/x/text v0.27.0/go.mod h1:1D28KMCvyooCX9hBiosv5Tz/+YLxj0j7XhWjpSUF7CU=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||
@@ -335,8 +348,11 @@ golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
|
||||
golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58=
|
||||
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
google.golang.org/api v0.236.0 h1:CAiEiDVtO4D/Qja2IA9VzlFrgPnK3XVMmRoJZlSWbc0=
|
||||
google.golang.org/api v0.236.0/go.mod h1:X1WF9CU2oTc+Jml1tiIxGmWFK/UZezdqEu09gcxZAj4=
|
||||
google.golang.org/genai v1.17.0 h1:lXYSnWShPYjxTouxRj0zF8RsNmSF+SKo7SQ7dM35NlI=
|
||||
google.golang.org/genai v1.17.0/go.mod h1:QPj5NGJw+3wEOHg+PrsWwJKvG6UC84ex5FR7qAYsN/M=
|
||||
google.golang.org/genproto v0.0.0-20250505200425-f936aa4a68b2 h1:1tXaIXCracvtsRxSBsYDiSBN0cuJvM7QYW+MrpIRY78=
|
||||
google.golang.org/genproto v0.0.0-20250505200425-f936aa4a68b2/go.mod h1:49MsLSx0oWMOZqcpB3uL8ZOkAh1+TndpJ8ONoCBWiZk=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20250603155806-513f23925822 h1:oWVWY3NzT7KJppx2UKhKmzPq4SRe0LdCijVRwvGeikY=
|
||||
|
||||
@@ -3,6 +3,7 @@ package cli
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/danielmiessler/fabric/internal/core"
|
||||
@@ -35,15 +36,54 @@ func handleChatProcessing(currentFlags *Flags, registry *core.PluginRegistry, me
|
||||
if chatOptions, err = currentFlags.BuildChatOptions(); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
// Check if user is requesting audio output or using a TTS model
|
||||
isAudioOutput := currentFlags.Output != "" && IsAudioFormat(currentFlags.Output)
|
||||
isTTSModel := isTTSModel(currentFlags.Model)
|
||||
|
||||
if isTTSModel && !isAudioOutput {
|
||||
err = fmt.Errorf("TTS model '%s' requires audio output. Please specify an audio output file with -o flag (e.g., -o output.wav)", currentFlags.Model)
|
||||
return
|
||||
}
|
||||
|
||||
if isAudioOutput && !isTTSModel {
|
||||
err = fmt.Errorf("audio output file '%s' specified but model '%s' is not a TTS model. Please use a TTS model like gemini-2.5-flash-preview-tts", currentFlags.Output, currentFlags.Model)
|
||||
return
|
||||
}
|
||||
|
||||
// For TTS models, check if output file already exists BEFORE processing
|
||||
if isTTSModel && isAudioOutput {
|
||||
outputFile := currentFlags.Output
|
||||
// Add .wav extension if not provided
|
||||
if filepath.Ext(outputFile) == "" {
|
||||
outputFile += ".wav"
|
||||
}
|
||||
if _, err = os.Stat(outputFile); err == nil {
|
||||
err = fmt.Errorf("file %s already exists. Please choose a different filename or remove the existing file", outputFile)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// Set audio options in chat config
|
||||
chatOptions.AudioOutput = isAudioOutput
|
||||
if isAudioOutput {
|
||||
chatOptions.AudioFormat = "wav" // Default to WAV format
|
||||
}
|
||||
|
||||
if session, err = chatter.Send(chatReq, chatOptions); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
result := session.GetLastMessage().Content
|
||||
|
||||
if !currentFlags.Stream {
|
||||
// print the result if it was not streamed already
|
||||
fmt.Println(result)
|
||||
if !currentFlags.Stream || currentFlags.SuppressThink {
|
||||
// For TTS models with audio output, show a user-friendly message instead of raw data
|
||||
if isTTSModel && isAudioOutput && strings.HasPrefix(result, "FABRIC_AUDIO_DATA:") {
|
||||
fmt.Printf("TTS audio generated successfully and saved to: %s\n", currentFlags.Output)
|
||||
} else {
|
||||
// print the result if it was not streamed already or suppress-think disabled streaming output
|
||||
fmt.Println(result)
|
||||
}
|
||||
}
|
||||
|
||||
// if the copy flag is set, copy the message to the clipboard
|
||||
@@ -59,8 +99,29 @@ func handleChatProcessing(currentFlags *Flags, registry *core.PluginRegistry, me
|
||||
sessionAsString := session.String()
|
||||
err = CreateOutputFile(sessionAsString, currentFlags.Output)
|
||||
} else {
|
||||
err = CreateOutputFile(result, currentFlags.Output)
|
||||
// For TTS models, we need to handle audio output differently
|
||||
if isTTSModel && isAudioOutput {
|
||||
// Check if result contains actual audio data
|
||||
if strings.HasPrefix(result, "FABRIC_AUDIO_DATA:") {
|
||||
// Extract the binary audio data
|
||||
audioData := result[len("FABRIC_AUDIO_DATA:"):]
|
||||
err = CreateAudioOutputFile([]byte(audioData), currentFlags.Output)
|
||||
} else {
|
||||
// Fallback for any error messages or unexpected responses
|
||||
err = CreateOutputFile(result, currentFlags.Output)
|
||||
}
|
||||
} else {
|
||||
err = CreateOutputFile(result, currentFlags.Output)
|
||||
}
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// isTTSModel checks if the model is a text-to-speech model
|
||||
func isTTSModel(modelName string) bool {
|
||||
lowerModel := strings.ToLower(modelName)
|
||||
return strings.Contains(lowerModel, "tts") ||
|
||||
strings.Contains(lowerModel, "preview-tts") ||
|
||||
strings.Contains(lowerModel, "text-to-speech")
|
||||
}
|
||||
|
||||
@@ -7,6 +7,7 @@ import (
|
||||
"strings"
|
||||
|
||||
"github.com/danielmiessler/fabric/internal/core"
|
||||
"github.com/danielmiessler/fabric/internal/plugins/ai/openai"
|
||||
"github.com/danielmiessler/fabric/internal/tools/converter"
|
||||
"github.com/danielmiessler/fabric/internal/tools/youtube"
|
||||
)
|
||||
@@ -18,6 +19,12 @@ func Cli(version string) (err error) {
|
||||
return
|
||||
}
|
||||
|
||||
if currentFlags.Setup {
|
||||
if err = ensureEnvFile(); err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
if currentFlags.Version {
|
||||
fmt.Println(version)
|
||||
return
|
||||
@@ -36,6 +43,11 @@ func Cli(version string) (err error) {
|
||||
}
|
||||
}
|
||||
|
||||
// Configure OpenAI Responses API setting based on CLI flag
|
||||
if registry != nil {
|
||||
configureOpenAIResponsesAPI(registry, currentFlags.DisableResponsesAPI)
|
||||
}
|
||||
|
||||
// Handle setup and server commands
|
||||
var handled bool
|
||||
if handled, err = handleSetupAndServerCommands(currentFlags, registry, version); err != nil || handled {
|
||||
@@ -142,3 +154,21 @@ func WriteOutput(message string, outputFile string) (err error) {
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// configureOpenAIResponsesAPI configures the OpenAI client's Responses API setting based on the CLI flag
|
||||
func configureOpenAIResponsesAPI(registry *core.PluginRegistry, disableResponsesAPI bool) {
|
||||
// Find the OpenAI vendor in the registry
|
||||
if registry != nil && registry.VendorsAll != nil {
|
||||
for _, vendor := range registry.VendorsAll.Vendors {
|
||||
if vendor.GetName() == "OpenAI" {
|
||||
// Type assertion to access the OpenAI-specific method
|
||||
if openaiClient, ok := vendor.(*openai.Client); ok {
|
||||
// Invert the disable flag to get the enable flag
|
||||
enableResponsesAPI := !disableResponsesAPI
|
||||
openaiClient.SetResponsesAPIEnabled(enableResponsesAPI)
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,4 +18,13 @@ temperature: 0.88
|
||||
seed: 42
|
||||
|
||||
stream: true
|
||||
raw: false
|
||||
raw: false
|
||||
|
||||
# suppress vendor thinking output
|
||||
suppressThink: false
|
||||
thinkStartTag: "<think>"
|
||||
thinkEndTag: "</think>"
|
||||
|
||||
# OpenAI Responses API settings
|
||||
# (use this for llama-server or other OpenAI-compatible local servers)
|
||||
disableResponsesAPI: true
|
||||
|
||||
@@ -20,6 +20,8 @@ import (
|
||||
)
|
||||
|
||||
// Flags create flags struct. the users flags go into this, this will be passed to the chat struct in cli
|
||||
// Chat parameter defaults set in the struct tags must match domain.Default* constants
|
||||
|
||||
type Flags struct {
|
||||
Pattern string `short:"p" long:"pattern" yaml:"pattern" description:"Choose a pattern from the available patterns" default:""`
|
||||
PatternVariables map[string]string `short:"v" long:"variable" description:"Values for pattern variables, e.g. -v=#role:expert -v=#points:30"`
|
||||
@@ -83,6 +85,12 @@ type Flags struct {
|
||||
ImageQuality string `long:"image-quality" description:"Image quality: low, medium, high, auto (default: auto)"`
|
||||
ImageCompression int `long:"image-compression" description:"Compression level 0-100 for JPEG/WebP formats (default: not set)"`
|
||||
ImageBackground string `long:"image-background" description:"Background type: opaque, transparent (default: opaque, only for PNG/WebP)"`
|
||||
SuppressThink bool `long:"suppress-think" yaml:"suppressThink" description:"Suppress text enclosed in thinking tags"`
|
||||
ThinkStartTag string `long:"think-start-tag" yaml:"thinkStartTag" description:"Start tag for thinking sections" default:"<think>"`
|
||||
ThinkEndTag string `long:"think-end-tag" yaml:"thinkEndTag" description:"End tag for thinking sections" default:"</think>"`
|
||||
DisableResponsesAPI bool `long:"disable-responses-api" yaml:"disableResponsesAPI" description:"Disable OpenAI Responses API (default: false)"`
|
||||
Voice string `long:"voice" yaml:"voice" description:"TTS voice name for supported models (e.g., Kore, Charon, Puck)" default:"Kore"`
|
||||
ListGeminiVoices bool `long:"list-gemini-voices" description:"List all available Gemini TTS voices"`
|
||||
}
|
||||
|
||||
var debug = false
|
||||
@@ -99,26 +107,34 @@ func Init() (ret *Flags, err error) {
|
||||
usedFlags := make(map[string]bool)
|
||||
yamlArgsScan := os.Args[1:]
|
||||
|
||||
// Get list of fields that have yaml tags, could be in yaml config
|
||||
yamlFields := make(map[string]bool)
|
||||
// Create mapping from flag names (both short and long) to yaml tag names
|
||||
flagToYamlTag := make(map[string]string)
|
||||
t := reflect.TypeOf(Flags{})
|
||||
for i := 0; i < t.NumField(); i++ {
|
||||
if yamlTag := t.Field(i).Tag.Get("yaml"); yamlTag != "" {
|
||||
yamlFields[yamlTag] = true
|
||||
//Debugf("Found yaml-configured field: %s\n", yamlTag)
|
||||
field := t.Field(i)
|
||||
yamlTag := field.Tag.Get("yaml")
|
||||
if yamlTag != "" {
|
||||
longTag := field.Tag.Get("long")
|
||||
shortTag := field.Tag.Get("short")
|
||||
if longTag != "" {
|
||||
flagToYamlTag[longTag] = yamlTag
|
||||
Debugf("Mapped long flag %s to yaml tag %s\n", longTag, yamlTag)
|
||||
}
|
||||
if shortTag != "" {
|
||||
flagToYamlTag[shortTag] = yamlTag
|
||||
Debugf("Mapped short flag %s to yaml tag %s\n", shortTag, yamlTag)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Scan args for that are provided by cli and might be in yaml
|
||||
for _, arg := range yamlArgsScan {
|
||||
if strings.HasPrefix(arg, "--") {
|
||||
flag := strings.TrimPrefix(arg, "--")
|
||||
if i := strings.Index(flag, "="); i > 0 {
|
||||
flag = flag[:i]
|
||||
}
|
||||
if yamlFields[flag] {
|
||||
usedFlags[flag] = true
|
||||
Debugf("CLI flag used: %s\n", flag)
|
||||
flag := extractFlag(arg)
|
||||
|
||||
if flag != "" {
|
||||
if yamlTag, exists := flagToYamlTag[flag]; exists {
|
||||
usedFlags[yamlTag] = true
|
||||
Debugf("CLI flag used: %s (yaml: %s)\n", flag, yamlTag)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -131,6 +147,16 @@ func Init() (ret *Flags, err error) {
|
||||
return
|
||||
}
|
||||
|
||||
// Check to see if a ~/.config/fabric/config.yaml config file exists (only when user didn't specify a config)
|
||||
if ret.Config == "" {
|
||||
// Default to ~/.config/fabric/config.yaml if no config specified
|
||||
if defaultConfigPath, err := util.GetDefaultConfigPath(); err == nil && defaultConfigPath != "" {
|
||||
ret.Config = defaultConfigPath
|
||||
} else if err != nil {
|
||||
Debugf("Could not determine default config path: %v\n", err)
|
||||
}
|
||||
}
|
||||
|
||||
// If config specified, load and apply YAML for unused flags
|
||||
if ret.Config != "" {
|
||||
var yamlFlags *Flags
|
||||
@@ -165,7 +191,6 @@ func Init() (ret *Flags, err error) {
|
||||
}
|
||||
}
|
||||
|
||||
// Handle stdin and messages
|
||||
// Handle stdin and messages
|
||||
info, _ := os.Stdin.Stat()
|
||||
pipedToStdin := (info.Mode() & os.ModeCharDevice) == 0
|
||||
@@ -185,6 +210,22 @@ func Init() (ret *Flags, err error) {
|
||||
return
|
||||
}
|
||||
|
||||
func extractFlag(arg string) string {
|
||||
var flag string
|
||||
if strings.HasPrefix(arg, "--") {
|
||||
flag = strings.TrimPrefix(arg, "--")
|
||||
if i := strings.Index(flag, "="); i > 0 {
|
||||
flag = flag[:i]
|
||||
}
|
||||
} else if strings.HasPrefix(arg, "-") && len(arg) > 1 {
|
||||
flag = strings.TrimPrefix(arg, "-")
|
||||
if i := strings.Index(flag, "="); i > 0 {
|
||||
flag = flag[:i]
|
||||
}
|
||||
}
|
||||
return flag
|
||||
}
|
||||
|
||||
func assignWithConversion(targetField, sourceField reflect.Value) error {
|
||||
// Handle string source values
|
||||
if sourceField.Kind() == reflect.String {
|
||||
@@ -376,6 +417,15 @@ func (o *Flags) BuildChatOptions() (ret *domain.ChatOptions, err error) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
startTag := o.ThinkStartTag
|
||||
if startTag == "" {
|
||||
startTag = "<think>"
|
||||
}
|
||||
endTag := o.ThinkEndTag
|
||||
if endTag == "" {
|
||||
endTag = "</think>"
|
||||
}
|
||||
|
||||
ret = &domain.ChatOptions{
|
||||
Model: o.Model,
|
||||
Temperature: o.Temperature,
|
||||
@@ -392,6 +442,10 @@ func (o *Flags) BuildChatOptions() (ret *domain.ChatOptions, err error) {
|
||||
ImageQuality: o.ImageQuality,
|
||||
ImageCompression: o.ImageCompression,
|
||||
ImageBackground: o.ImageBackground,
|
||||
SuppressThink: o.SuppressThink,
|
||||
ThinkStartTag: startTag,
|
||||
ThinkEndTag: endTag,
|
||||
Voice: o.Voice,
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
@@ -64,6 +64,9 @@ func TestBuildChatOptions(t *testing.T) {
|
||||
FrequencyPenalty: 0.2,
|
||||
Raw: false,
|
||||
Seed: 1,
|
||||
SuppressThink: false,
|
||||
ThinkStartTag: "<think>",
|
||||
ThinkEndTag: "</think>",
|
||||
}
|
||||
options, err := flags.BuildChatOptions()
|
||||
assert.NoError(t, err)
|
||||
@@ -85,12 +88,29 @@ func TestBuildChatOptionsDefaultSeed(t *testing.T) {
|
||||
FrequencyPenalty: 0.2,
|
||||
Raw: false,
|
||||
Seed: 0,
|
||||
SuppressThink: false,
|
||||
ThinkStartTag: "<think>",
|
||||
ThinkEndTag: "</think>",
|
||||
}
|
||||
options, err := flags.BuildChatOptions()
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, expectedOptions, options)
|
||||
}
|
||||
|
||||
func TestBuildChatOptionsSuppressThink(t *testing.T) {
|
||||
flags := &Flags{
|
||||
SuppressThink: true,
|
||||
ThinkStartTag: "[[t]]",
|
||||
ThinkEndTag: "[[/t]]",
|
||||
}
|
||||
|
||||
options, err := flags.BuildChatOptions()
|
||||
assert.NoError(t, err)
|
||||
assert.True(t, options.SuppressThink)
|
||||
assert.Equal(t, "[[t]]", options.ThinkStartTag)
|
||||
assert.Equal(t, "[[/t]]", options.ThinkEndTag)
|
||||
}
|
||||
|
||||
func TestInitWithYAMLConfig(t *testing.T) {
|
||||
// Create a temporary YAML config file
|
||||
configContent := `
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package cli
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
@@ -8,6 +9,9 @@ import (
|
||||
"github.com/danielmiessler/fabric/internal/plugins/db/fsdb"
|
||||
)
|
||||
|
||||
const ConfigDirPerms os.FileMode = 0755
|
||||
const EnvFilePerms os.FileMode = 0644
|
||||
|
||||
// initializeFabric initializes the fabric database and plugin registry
|
||||
func initializeFabric() (registry *core.PluginRegistry, err error) {
|
||||
var homedir string
|
||||
@@ -26,3 +30,27 @@ func initializeFabric() (registry *core.PluginRegistry, err error) {
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// ensureEnvFile checks for the default ~/.config/fabric/.env file and creates it
|
||||
// along with the parent directory if it does not exist.
|
||||
func ensureEnvFile() (err error) {
|
||||
var homedir string
|
||||
if homedir, err = os.UserHomeDir(); err != nil {
|
||||
return fmt.Errorf("could not determine user home directory: %w", err)
|
||||
}
|
||||
configDir := filepath.Join(homedir, ".config", "fabric")
|
||||
envPath := filepath.Join(configDir, ".env")
|
||||
|
||||
if _, statErr := os.Stat(envPath); statErr != nil {
|
||||
if !os.IsNotExist(statErr) {
|
||||
return fmt.Errorf("could not stat .env file: %w", statErr)
|
||||
}
|
||||
if err = os.MkdirAll(configDir, ConfigDirPerms); err != nil {
|
||||
return fmt.Errorf("could not create config directory: %w", err)
|
||||
}
|
||||
if err = os.WriteFile(envPath, []byte{}, EnvFilePerms); err != nil {
|
||||
return fmt.Errorf("could not create .env file: %w", err)
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
package cli
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"strconv"
|
||||
|
||||
"github.com/danielmiessler/fabric/internal/core"
|
||||
"github.com/danielmiessler/fabric/internal/plugins/ai"
|
||||
"github.com/danielmiessler/fabric/internal/plugins/ai/gemini"
|
||||
"github.com/danielmiessler/fabric/internal/plugins/db/fsdb"
|
||||
)
|
||||
|
||||
@@ -58,5 +60,11 @@ func handleListingCommands(currentFlags *Flags, fabricDb *fsdb.Db, registry *cor
|
||||
return true, err
|
||||
}
|
||||
|
||||
if currentFlags.ListGeminiVoices {
|
||||
voicesList := gemini.ListGeminiVoices(currentFlags.ShellCompleteOutput)
|
||||
fmt.Print(voicesList)
|
||||
return true, nil
|
||||
}
|
||||
|
||||
return false, nil
|
||||
}
|
||||
|
||||
@@ -3,6 +3,8 @@ package cli
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/atotto/clipboard"
|
||||
)
|
||||
@@ -15,6 +17,10 @@ func CopyToClipboard(message string) (err error) {
|
||||
}
|
||||
|
||||
func CreateOutputFile(message string, fileName string) (err error) {
|
||||
if _, err = os.Stat(fileName); err == nil {
|
||||
err = fmt.Errorf("file %s already exists, not overwriting. Rename the existing file or choose a different name", fileName)
|
||||
return
|
||||
}
|
||||
var file *os.File
|
||||
if file, err = os.Create(fileName); err != nil {
|
||||
err = fmt.Errorf("error creating file: %v", err)
|
||||
@@ -24,7 +30,41 @@ func CreateOutputFile(message string, fileName string) (err error) {
|
||||
if _, err = file.WriteString(message); err != nil {
|
||||
err = fmt.Errorf("error writing to file: %v", err)
|
||||
} else {
|
||||
fmt.Printf("\n\n... written to %s\n", fileName)
|
||||
fmt.Fprintf(os.Stderr, "\n\n[Output also written to %s]\n", fileName)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// CreateAudioOutputFile creates a binary file for audio data
|
||||
func CreateAudioOutputFile(audioData []byte, fileName string) (err error) {
|
||||
// If no extension is provided, default to .wav
|
||||
if filepath.Ext(fileName) == "" {
|
||||
fileName += ".wav"
|
||||
}
|
||||
|
||||
// File existence check is now done in the CLI layer before TTS generation
|
||||
var file *os.File
|
||||
if file, err = os.Create(fileName); err != nil {
|
||||
err = fmt.Errorf("error creating audio file: %v", err)
|
||||
return
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
if _, err = file.Write(audioData); err != nil {
|
||||
err = fmt.Errorf("error writing audio data to file: %v", err)
|
||||
}
|
||||
// No redundant output message here - the CLI layer handles success messaging
|
||||
return
|
||||
}
|
||||
|
||||
// IsAudioFormat checks if the filename suggests an audio format
|
||||
func IsAudioFormat(fileName string) bool {
|
||||
ext := strings.ToLower(filepath.Ext(fileName))
|
||||
audioExts := []string{".wav", ".mp3", ".m4a", ".aac", ".ogg", ".flac"}
|
||||
for _, audioExt := range audioExts {
|
||||
if ext == audioExt {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
@@ -79,7 +79,9 @@ func (o *Chatter) Send(request *domain.ChatRequest, opts *domain.ChatOptions) (s
|
||||
|
||||
for response := range responseChan {
|
||||
message += response
|
||||
fmt.Print(response)
|
||||
if !opts.SuppressThink {
|
||||
fmt.Print(response)
|
||||
}
|
||||
}
|
||||
|
||||
// Wait for goroutine to finish
|
||||
@@ -101,6 +103,10 @@ func (o *Chatter) Send(request *domain.ChatRequest, opts *domain.ChatOptions) (s
|
||||
}
|
||||
}
|
||||
|
||||
if opts.SuppressThink && !o.DryRun {
|
||||
message = domain.StripThinkBlocks(message, opts.ThinkStartTag, opts.ThinkEndTag)
|
||||
}
|
||||
|
||||
if message == "" {
|
||||
session = nil
|
||||
err = fmt.Errorf("empty response")
|
||||
|
||||
@@ -15,6 +15,7 @@ import (
|
||||
type mockVendor struct {
|
||||
sendStreamError error
|
||||
streamChunks []string
|
||||
sendFunc func(context.Context, []*chat.ChatCompletionMessage, *domain.ChatOptions) (string, error)
|
||||
}
|
||||
|
||||
func (m *mockVendor) GetName() string {
|
||||
@@ -57,6 +58,9 @@ func (m *mockVendor) SendStream(messages []*chat.ChatCompletionMessage, opts *do
|
||||
}
|
||||
|
||||
func (m *mockVendor) Send(ctx context.Context, messages []*chat.ChatCompletionMessage, opts *domain.ChatOptions) (string, error) {
|
||||
if m.sendFunc != nil {
|
||||
return m.sendFunc(ctx, messages, opts)
|
||||
}
|
||||
return "test response", nil
|
||||
}
|
||||
|
||||
@@ -64,6 +68,51 @@ func (m *mockVendor) NeedsRawMode(modelName string) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func TestChatter_Send_SuppressThink(t *testing.T) {
|
||||
tempDir := t.TempDir()
|
||||
db := fsdb.NewDb(tempDir)
|
||||
|
||||
mockVendor := &mockVendor{}
|
||||
|
||||
chatter := &Chatter{
|
||||
db: db,
|
||||
Stream: false,
|
||||
vendor: mockVendor,
|
||||
model: "test-model",
|
||||
}
|
||||
|
||||
request := &domain.ChatRequest{
|
||||
Message: &chat.ChatCompletionMessage{
|
||||
Role: chat.ChatMessageRoleUser,
|
||||
Content: "test",
|
||||
},
|
||||
}
|
||||
|
||||
opts := &domain.ChatOptions{
|
||||
Model: "test-model",
|
||||
SuppressThink: true,
|
||||
ThinkStartTag: "<think>",
|
||||
ThinkEndTag: "</think>",
|
||||
}
|
||||
|
||||
// custom send function returning a message with think tags
|
||||
mockVendor.sendFunc = func(ctx context.Context, msgs []*chat.ChatCompletionMessage, o *domain.ChatOptions) (string, error) {
|
||||
return "<think>hidden</think> visible", nil
|
||||
}
|
||||
|
||||
session, err := chatter.Send(request, opts)
|
||||
if err != nil {
|
||||
t.Fatalf("Send returned error: %v", err)
|
||||
}
|
||||
if session == nil {
|
||||
t.Fatal("expected session")
|
||||
}
|
||||
last := session.GetLastMessage()
|
||||
if last.Content != "visible" {
|
||||
t.Errorf("expected filtered content 'visible', got %q", last.Content)
|
||||
}
|
||||
}
|
||||
|
||||
func TestChatter_Send_StreamingErrorPropagation(t *testing.T) {
|
||||
// Create a temporary database for testing
|
||||
tempDir := t.TempDir()
|
||||
|
||||
@@ -37,12 +37,16 @@ import (
|
||||
"github.com/danielmiessler/fabric/internal/util"
|
||||
)
|
||||
|
||||
// hasAWSCredentials checks if any AWS credentials are present either in the
|
||||
// environment variables or in the default/shared credentials file. It doesn't
|
||||
// attempt to verify the validity of the credentials, but simply ensures that a
|
||||
// potential authentication source exists so we can safely initialize the
|
||||
// Bedrock client without causing the AWS SDK to search for credentials.
|
||||
// hasAWSCredentials checks if Bedrock is properly configured by ensuring both
|
||||
// AWS credentials and BEDROCK_AWS_REGION are present. This prevents the Bedrock
|
||||
// client from being initialized when AWS credentials exist for other purposes.
|
||||
func hasAWSCredentials() bool {
|
||||
// First check if BEDROCK_AWS_REGION is set - this is required for Bedrock
|
||||
if os.Getenv("BEDROCK_AWS_REGION") == "" {
|
||||
return false
|
||||
}
|
||||
|
||||
// Then check if AWS credentials are available
|
||||
if os.Getenv("AWS_PROFILE") != "" ||
|
||||
os.Getenv("AWS_ROLE_SESSION_NAME") != "" ||
|
||||
(os.Getenv("AWS_ACCESS_KEY_ID") != "" && os.Getenv("AWS_SECRET_ACCESS_KEY") != "") {
|
||||
@@ -259,8 +263,24 @@ func (o *PluginRegistry) GetModels() (ret *ai.VendorsModels, err error) {
|
||||
func (o *PluginRegistry) Configure() (err error) {
|
||||
o.ConfigureVendors()
|
||||
_ = o.Defaults.Configure()
|
||||
if err := o.CustomPatterns.Configure(); err != nil {
|
||||
return fmt.Errorf("error configuring CustomPatterns: %w", err)
|
||||
}
|
||||
_ = o.PatternsLoader.Configure()
|
||||
|
||||
// Refresh the database custom patterns directory after custom patterns plugin is configured
|
||||
customPatternsDir := os.Getenv("CUSTOM_PATTERNS_DIRECTORY")
|
||||
if customPatternsDir != "" {
|
||||
// Expand home directory if needed
|
||||
if strings.HasPrefix(customPatternsDir, "~/") {
|
||||
if homeDir, err := os.UserHomeDir(); err == nil {
|
||||
customPatternsDir = filepath.Join(homeDir, customPatternsDir[2:])
|
||||
}
|
||||
}
|
||||
o.Db.Patterns.CustomPatternsDir = customPatternsDir
|
||||
o.PatternsLoader.Patterns.CustomPatternsDir = customPatternsDir
|
||||
}
|
||||
|
||||
//YouTube and Jina are not mandatory, so ignore not configured error
|
||||
_ = o.YouTube.Configure()
|
||||
_ = o.Jina.Configure()
|
||||
|
||||
@@ -4,6 +4,14 @@ import "github.com/danielmiessler/fabric/internal/chat"
|
||||
|
||||
const ChatMessageRoleMeta = "meta"
|
||||
|
||||
// Default values for chat options (must match cli/flags.go defaults)
|
||||
const (
|
||||
DefaultTemperature = 0.7
|
||||
DefaultTopP = 0.9
|
||||
DefaultPresencePenalty = 0.0
|
||||
DefaultFrequencyPenalty = 0.0
|
||||
)
|
||||
|
||||
type ChatRequest struct {
|
||||
ContextName string
|
||||
SessionName string
|
||||
@@ -33,6 +41,12 @@ type ChatOptions struct {
|
||||
ImageQuality string
|
||||
ImageCompression int
|
||||
ImageBackground string
|
||||
SuppressThink bool
|
||||
ThinkStartTag string
|
||||
ThinkEndTag string
|
||||
AudioOutput bool
|
||||
AudioFormat string
|
||||
Voice string
|
||||
}
|
||||
|
||||
// NormalizeMessages remove empty messages and ensure messages order user-assist-user
|
||||
|
||||
32
internal/domain/think.go
Normal file
32
internal/domain/think.go
Normal file
@@ -0,0 +1,32 @@
|
||||
package domain
|
||||
|
||||
import (
|
||||
"regexp"
|
||||
"sync"
|
||||
)
|
||||
|
||||
// StripThinkBlocks removes any content between the provided start and end tags
|
||||
// from the input string. Whitespace following the end tag is also removed so
|
||||
// output resumes at the next non-empty line.
|
||||
var (
|
||||
regexCache = make(map[string]*regexp.Regexp)
|
||||
cacheMutex sync.Mutex
|
||||
)
|
||||
|
||||
func StripThinkBlocks(input, startTag, endTag string) string {
|
||||
if startTag == "" || endTag == "" {
|
||||
return input
|
||||
}
|
||||
|
||||
cacheKey := startTag + "|" + endTag
|
||||
cacheMutex.Lock()
|
||||
re, exists := regexCache[cacheKey]
|
||||
if !exists {
|
||||
pattern := "(?s)" + regexp.QuoteMeta(startTag) + ".*?" + regexp.QuoteMeta(endTag) + "\\s*"
|
||||
re = regexp.MustCompile(pattern)
|
||||
regexCache[cacheKey] = re
|
||||
}
|
||||
cacheMutex.Unlock()
|
||||
|
||||
return re.ReplaceAllString(input, "")
|
||||
}
|
||||
19
internal/domain/think_test.go
Normal file
19
internal/domain/think_test.go
Normal file
@@ -0,0 +1,19 @@
|
||||
package domain
|
||||
|
||||
import "testing"
|
||||
|
||||
func TestStripThinkBlocks(t *testing.T) {
|
||||
input := "<think>internal</think>\n\nresult"
|
||||
got := StripThinkBlocks(input, "<think>", "</think>")
|
||||
if got != "result" {
|
||||
t.Errorf("expected %q, got %q", "result", got)
|
||||
}
|
||||
}
|
||||
|
||||
func TestStripThinkBlocksCustomTags(t *testing.T) {
|
||||
input := "[[t]]hidden[[/t]] visible"
|
||||
got := StripThinkBlocks(input, "[[t]]", "[[/t]]")
|
||||
if got != "visible" {
|
||||
t.Errorf("expected %q, got %q", "visible", got)
|
||||
}
|
||||
}
|
||||
@@ -46,6 +46,7 @@ func NewClient() (ret *Client) {
|
||||
string(anthropic.ModelClaude_3_5_Sonnet_20240620), string(anthropic.ModelClaude3OpusLatest),
|
||||
string(anthropic.ModelClaude_3_Opus_20240229), string(anthropic.ModelClaude_3_Haiku_20240307),
|
||||
string(anthropic.ModelClaudeOpus4_20250514), string(anthropic.ModelClaudeSonnet4_20250514),
|
||||
string(anthropic.ModelClaudeOpus4_1_20250805),
|
||||
}
|
||||
|
||||
return
|
||||
@@ -181,11 +182,19 @@ func (an *Client) buildMessageParams(msgs []anthropic.MessageParam, opts *domain
|
||||
params anthropic.MessageNewParams) {
|
||||
|
||||
params = anthropic.MessageNewParams{
|
||||
Model: anthropic.Model(opts.Model),
|
||||
MaxTokens: int64(an.maxTokens),
|
||||
TopP: anthropic.Opt(opts.TopP),
|
||||
Temperature: anthropic.Opt(opts.Temperature),
|
||||
Messages: msgs,
|
||||
Model: anthropic.Model(opts.Model),
|
||||
MaxTokens: int64(an.maxTokens),
|
||||
Messages: msgs,
|
||||
}
|
||||
|
||||
// Only set one of Temperature or TopP as some models don't allow both
|
||||
// Always set temperature to ensure consistent behavior (Anthropic default is 1.0, Fabric default is 0.7)
|
||||
if opts.TopP != domain.DefaultTopP {
|
||||
// User explicitly set TopP, so use that instead of temperature
|
||||
params.TopP = anthropic.Opt(opts.TopP)
|
||||
} else {
|
||||
// Use temperature (always set to ensure Fabric's default of 0.7, not Anthropic's 1.0)
|
||||
params.Temperature = anthropic.Opt(opts.Temperature)
|
||||
}
|
||||
|
||||
// Add Claude Code spoofing system message for OAuth authentication
|
||||
|
||||
@@ -72,7 +72,8 @@ func TestBuildMessageParams_WithoutSearch(t *testing.T) {
|
||||
client := NewClient()
|
||||
opts := &domain.ChatOptions{
|
||||
Model: "claude-3-5-sonnet-latest",
|
||||
Temperature: 0.7,
|
||||
Temperature: 0.8, // Use non-default value to ensure it gets set
|
||||
TopP: domain.DefaultTopP, // Use default TopP so temperature takes precedence
|
||||
Search: false,
|
||||
}
|
||||
|
||||
@@ -90,6 +91,7 @@ func TestBuildMessageParams_WithoutSearch(t *testing.T) {
|
||||
t.Errorf("Expected model %s, got %s", opts.Model, params.Model)
|
||||
}
|
||||
|
||||
// When using non-default temperature, it should be set in params
|
||||
if params.Temperature.Value != opts.Temperature {
|
||||
t.Errorf("Expected temperature %f, got %f", opts.Temperature, params.Temperature.Value)
|
||||
}
|
||||
@@ -99,7 +101,8 @@ func TestBuildMessageParams_WithSearch(t *testing.T) {
|
||||
client := NewClient()
|
||||
opts := &domain.ChatOptions{
|
||||
Model: "claude-3-5-sonnet-latest",
|
||||
Temperature: 0.7,
|
||||
Temperature: 0.8, // Use non-default value
|
||||
TopP: domain.DefaultTopP, // Use default TopP so temperature takes precedence
|
||||
Search: true,
|
||||
}
|
||||
|
||||
@@ -135,7 +138,8 @@ func TestBuildMessageParams_WithSearchAndLocation(t *testing.T) {
|
||||
client := NewClient()
|
||||
opts := &domain.ChatOptions{
|
||||
Model: "claude-3-5-sonnet-latest",
|
||||
Temperature: 0.7,
|
||||
Temperature: 0.8, // Use non-default value
|
||||
TopP: domain.DefaultTopP, // Use default TopP so temperature takes precedence
|
||||
Search: true,
|
||||
SearchLocation: "America/Los_Angeles",
|
||||
}
|
||||
@@ -256,3 +260,59 @@ func TestCitationFormatting(t *testing.T) {
|
||||
t.Errorf("Expected 2 unique citations, got %d", citationCount)
|
||||
}
|
||||
}
|
||||
|
||||
func TestBuildMessageParams_DefaultValues(t *testing.T) {
|
||||
client := NewClient()
|
||||
|
||||
// Test with default temperature - should always set temperature unless TopP is explicitly set
|
||||
opts := &domain.ChatOptions{
|
||||
Model: "claude-3-5-sonnet-latest",
|
||||
Temperature: domain.DefaultTemperature, // 0.7 - should be set to override Anthropic's 1.0 default
|
||||
TopP: domain.DefaultTopP, // 0.9 - default, so temperature takes precedence
|
||||
Search: false,
|
||||
}
|
||||
|
||||
messages := []anthropic.MessageParam{
|
||||
anthropic.NewUserMessage(anthropic.NewTextBlock("Hello")),
|
||||
}
|
||||
|
||||
params := client.buildMessageParams(messages, opts)
|
||||
|
||||
// Temperature should be set when using default value to override Anthropic's 1.0 default
|
||||
if params.Temperature.Value != opts.Temperature {
|
||||
t.Errorf("Expected temperature %f, got %f", opts.Temperature, params.Temperature.Value)
|
||||
}
|
||||
|
||||
// TopP should not be set when using default value (temperature takes precedence)
|
||||
if params.TopP.Value != 0 {
|
||||
t.Errorf("Expected TopP to not be set (0), but got %f", params.TopP.Value)
|
||||
}
|
||||
}
|
||||
|
||||
func TestBuildMessageParams_ExplicitTopP(t *testing.T) {
|
||||
client := NewClient()
|
||||
|
||||
// Test with explicit TopP - should set TopP instead of temperature
|
||||
opts := &domain.ChatOptions{
|
||||
Model: "claude-3-5-sonnet-latest",
|
||||
Temperature: domain.DefaultTemperature, // 0.7 - ignored when TopP is explicitly set
|
||||
TopP: 0.5, // Non-default - should be set
|
||||
Search: false,
|
||||
}
|
||||
|
||||
messages := []anthropic.MessageParam{
|
||||
anthropic.NewUserMessage(anthropic.NewTextBlock("Hello")),
|
||||
}
|
||||
|
||||
params := client.buildMessageParams(messages, opts)
|
||||
|
||||
// Temperature should not be set when TopP is explicitly set
|
||||
if params.Temperature.Value != 0 {
|
||||
t.Errorf("Expected temperature to not be set (0), but got %f", params.Temperature.Value)
|
||||
}
|
||||
|
||||
// TopP should be set when using non-default value
|
||||
if params.TopP.Value != opts.TopP {
|
||||
t.Errorf("Expected TopP %f, got %f", opts.TopP, params.TopP.Value)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,6 +9,7 @@ import (
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"os"
|
||||
"os/exec"
|
||||
"strings"
|
||||
"time"
|
||||
@@ -71,7 +72,7 @@ func (t *OAuthTransport) getValidToken(tokenIdentifier string) (string, error) {
|
||||
}
|
||||
// If no token exists, run OAuth flow
|
||||
if token == nil {
|
||||
fmt.Println("No OAuth token found, initiating authentication...")
|
||||
fmt.Fprintln(os.Stderr, "No OAuth token found, initiating authentication...")
|
||||
newAccessToken, err := RunOAuthFlow(tokenIdentifier)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to authenticate: %w", err)
|
||||
@@ -81,11 +82,11 @@ func (t *OAuthTransport) getValidToken(tokenIdentifier string) (string, error) {
|
||||
|
||||
// Check if token needs refresh (5 minute buffer)
|
||||
if token.IsExpired(5) {
|
||||
fmt.Println("OAuth token expired, refreshing...")
|
||||
fmt.Fprintln(os.Stderr, "OAuth token expired, refreshing...")
|
||||
newAccessToken, err := RefreshToken(tokenIdentifier)
|
||||
if err != nil {
|
||||
// If refresh fails, try re-authentication
|
||||
fmt.Println("Token refresh failed, re-authenticating...")
|
||||
fmt.Fprintln(os.Stderr, "Token refresh failed, re-authenticating...")
|
||||
newAccessToken, err = RunOAuthFlow(tokenIdentifier)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to refresh or re-authenticate: %w", err)
|
||||
@@ -137,13 +138,13 @@ func RunOAuthFlow(tokenIdentifier string) (token string, err error) {
|
||||
if err == nil && existingToken != nil {
|
||||
// If token exists but is expired, try refreshing first
|
||||
if existingToken.IsExpired(5) {
|
||||
fmt.Println("Found expired OAuth token, attempting refresh...")
|
||||
fmt.Fprintln(os.Stderr, "Found expired OAuth token, attempting refresh...")
|
||||
refreshedToken, refreshErr := RefreshToken(tokenIdentifier)
|
||||
if refreshErr == nil {
|
||||
fmt.Println("Token refresh successful")
|
||||
fmt.Fprintln(os.Stderr, "Token refresh successful")
|
||||
return refreshedToken, nil
|
||||
}
|
||||
fmt.Printf("Token refresh failed (%v), proceeding with full OAuth flow...\n", refreshErr)
|
||||
fmt.Fprintf(os.Stderr, "Token refresh failed (%v), proceeding with full OAuth flow...\n", refreshErr)
|
||||
} else {
|
||||
// Token exists and is still valid
|
||||
return existingToken.AccessToken, nil
|
||||
@@ -170,10 +171,10 @@ func RunOAuthFlow(tokenIdentifier string) (token string, err error) {
|
||||
oauth2.SetAuthURLParam("state", verifier),
|
||||
)
|
||||
|
||||
fmt.Println("Open the following URL in your browser. Fabric would like to authorize:")
|
||||
fmt.Println(authURL)
|
||||
fmt.Fprintln(os.Stderr, "Open the following URL in your browser. Fabric would like to authorize:")
|
||||
fmt.Fprintln(os.Stderr, authURL)
|
||||
openBrowser(authURL)
|
||||
fmt.Print("Paste the authorization code here: ")
|
||||
fmt.Fprint(os.Stderr, "Paste the authorization code here: ")
|
||||
var code string
|
||||
fmt.Scanln(&code)
|
||||
parts := strings.SplitN(code, "#", 2)
|
||||
|
||||
@@ -153,7 +153,7 @@ func (c *BedrockClient) ListModels() ([]string, error) {
|
||||
return models, nil
|
||||
}
|
||||
|
||||
// SendStream sends the messages to the the Bedrock ConverseStream API
|
||||
// SendStream sends the messages to the Bedrock ConverseStream API
|
||||
func (c *BedrockClient) SendStream(msgs []*chat.ChatCompletionMessage, opts *domain.ChatOptions, channel chan string) (err error) {
|
||||
// Ensure channel is closed on all exit paths to prevent goroutine leaks
|
||||
defer func() {
|
||||
|
||||
@@ -12,6 +12,8 @@ import (
|
||||
"github.com/danielmiessler/fabric/internal/plugins"
|
||||
)
|
||||
|
||||
const DryRunResponse = "Dry run: Fake response sent by DryRun plugin\n"
|
||||
|
||||
type Client struct {
|
||||
*plugins.PluginBase
|
||||
}
|
||||
@@ -85,27 +87,37 @@ func (c *Client) formatOptions(opts *domain.ChatOptions) string {
|
||||
if opts.ImageFile != "" {
|
||||
builder.WriteString(fmt.Sprintf("ImageFile: %s\n", opts.ImageFile))
|
||||
}
|
||||
if opts.SuppressThink {
|
||||
builder.WriteString("SuppressThink: enabled\n")
|
||||
builder.WriteString(fmt.Sprintf("Thinking Start Tag: %s\n", opts.ThinkStartTag))
|
||||
builder.WriteString(fmt.Sprintf("Thinking End Tag: %s\n", opts.ThinkEndTag))
|
||||
}
|
||||
|
||||
return builder.String()
|
||||
}
|
||||
|
||||
func (c *Client) SendStream(msgs []*chat.ChatCompletionMessage, opts *domain.ChatOptions, channel chan string) error {
|
||||
func (c *Client) constructRequest(msgs []*chat.ChatCompletionMessage, opts *domain.ChatOptions) string {
|
||||
var builder strings.Builder
|
||||
builder.WriteString("Dry run: Would send the following request:\n\n")
|
||||
builder.WriteString(c.formatMessages(msgs))
|
||||
builder.WriteString(c.formatOptions(opts))
|
||||
|
||||
channel <- builder.String()
|
||||
close(channel)
|
||||
return builder.String()
|
||||
}
|
||||
|
||||
func (c *Client) SendStream(msgs []*chat.ChatCompletionMessage, opts *domain.ChatOptions, channel chan string) error {
|
||||
defer close(channel)
|
||||
request := c.constructRequest(msgs, opts)
|
||||
channel <- request
|
||||
channel <- "\n"
|
||||
channel <- DryRunResponse
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *Client) Send(_ context.Context, msgs []*chat.ChatCompletionMessage, opts *domain.ChatOptions) (string, error) {
|
||||
fmt.Println("Dry run: Would send the following request:")
|
||||
fmt.Print(c.formatMessages(msgs))
|
||||
fmt.Print(c.formatOptions(opts))
|
||||
request := c.constructRequest(msgs, opts)
|
||||
|
||||
return "", nil
|
||||
return request + "\n" + DryRunResponse, nil
|
||||
}
|
||||
|
||||
func (c *Client) Setup() error {
|
||||
|
||||
@@ -13,6 +13,7 @@ func NewClient() (ret *Client) {
|
||||
ret = &Client{}
|
||||
ret.Client = openai.NewClientCompatibleNoSetupQuestions("Exolab", ret.configure)
|
||||
|
||||
ret.ApiKey = ret.AddSetupQuestion("API Key", false)
|
||||
ret.ApiBaseURL = ret.AddSetupQuestion("API Base URL", true)
|
||||
ret.ApiBaseURL.Value = "http://localhost:52415"
|
||||
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
package gemini
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"errors"
|
||||
"encoding/binary"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
@@ -10,12 +11,20 @@ import (
|
||||
"github.com/danielmiessler/fabric/internal/plugins"
|
||||
|
||||
"github.com/danielmiessler/fabric/internal/domain"
|
||||
"github.com/google/generative-ai-go/genai"
|
||||
"google.golang.org/api/iterator"
|
||||
"google.golang.org/api/option"
|
||||
"google.golang.org/genai"
|
||||
)
|
||||
|
||||
const modelsNamePrefix = "models/"
|
||||
// WAV audio constants
|
||||
const (
|
||||
DefaultChannels = 1
|
||||
DefaultSampleRate = 24000
|
||||
DefaultBitsPerSample = 16
|
||||
WAVHeaderSize = 44
|
||||
RIFFHeaderSize = 36
|
||||
MaxAudioDataSize = 100 * 1024 * 1024 // 100MB limit for security
|
||||
MinAudioDataSize = 44 // Minimum viable audio data
|
||||
AudioDataPrefix = "FABRIC_AUDIO_DATA:"
|
||||
)
|
||||
|
||||
func NewClient() (ret *Client) {
|
||||
vendorName := "Gemini"
|
||||
@@ -39,107 +48,104 @@ type Client struct {
|
||||
func (o *Client) ListModels() (ret []string, err error) {
|
||||
ctx := context.Background()
|
||||
var client *genai.Client
|
||||
if client, err = genai.NewClient(ctx, option.WithAPIKey(o.ApiKey.Value)); err != nil {
|
||||
if client, err = genai.NewClient(ctx, &genai.ClientConfig{
|
||||
APIKey: o.ApiKey.Value,
|
||||
Backend: genai.BackendGeminiAPI,
|
||||
}); err != nil {
|
||||
return
|
||||
}
|
||||
defer client.Close()
|
||||
|
||||
iter := client.ListModels(ctx)
|
||||
for {
|
||||
var resp *genai.ModelInfo
|
||||
if resp, err = iter.Next(); err != nil {
|
||||
if errors.Is(err, iterator.Done) {
|
||||
err = nil
|
||||
}
|
||||
break
|
||||
}
|
||||
// List available models using the correct API
|
||||
resp, err := client.Models.List(ctx, &genai.ListModelsConfig{})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
name := o.buildModelNameSimple(resp.Name)
|
||||
ret = append(ret, name)
|
||||
for _, model := range resp.Items {
|
||||
// Strip the "models/" prefix for user convenience
|
||||
modelName := strings.TrimPrefix(model.Name, "models/")
|
||||
ret = append(ret, modelName)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (o *Client) Send(ctx context.Context, msgs []*chat.ChatCompletionMessage, opts *domain.ChatOptions) (ret string, err error) {
|
||||
systemInstruction, messages := toMessages(msgs)
|
||||
// Check if this is a TTS model request
|
||||
if o.isTTSModel(opts.Model) {
|
||||
if !opts.AudioOutput {
|
||||
err = fmt.Errorf("TTS model '%s' requires audio output. Please specify an audio output file with -o flag ending in .wav", opts.Model)
|
||||
return
|
||||
}
|
||||
|
||||
// Handle TTS generation
|
||||
return o.generateTTSAudio(ctx, msgs, opts)
|
||||
}
|
||||
|
||||
// Regular text generation
|
||||
var client *genai.Client
|
||||
if client, err = genai.NewClient(ctx, option.WithAPIKey(o.ApiKey.Value)); err != nil {
|
||||
return
|
||||
}
|
||||
defer client.Close()
|
||||
|
||||
model := client.GenerativeModel(o.buildModelNameFull(opts.Model))
|
||||
model.SetTemperature(float32(opts.Temperature))
|
||||
model.SetTopP(float32(opts.TopP))
|
||||
model.SystemInstruction = systemInstruction
|
||||
|
||||
var response *genai.GenerateContentResponse
|
||||
if response, err = model.GenerateContent(ctx, messages...); err != nil {
|
||||
if client, err = genai.NewClient(ctx, &genai.ClientConfig{
|
||||
APIKey: o.ApiKey.Value,
|
||||
Backend: genai.BackendGeminiAPI,
|
||||
}); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
ret = o.extractText(response)
|
||||
// Convert messages to new SDK format
|
||||
contents := o.convertMessages(msgs)
|
||||
|
||||
// Generate content
|
||||
temperature := float32(opts.Temperature)
|
||||
topP := float32(opts.TopP)
|
||||
response, err := client.Models.GenerateContent(ctx, o.buildModelNameFull(opts.Model), contents, &genai.GenerateContentConfig{
|
||||
Temperature: &temperature,
|
||||
TopP: &topP,
|
||||
MaxOutputTokens: int32(opts.ModelContextLength),
|
||||
})
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
// Extract text from response
|
||||
ret = o.extractTextFromResponse(response)
|
||||
return
|
||||
}
|
||||
|
||||
func (o *Client) buildModelNameSimple(fullModelName string) string {
|
||||
return strings.TrimPrefix(fullModelName, modelsNamePrefix)
|
||||
}
|
||||
|
||||
func (o *Client) buildModelNameFull(modelName string) string {
|
||||
return fmt.Sprintf("%v%v", modelsNamePrefix, modelName)
|
||||
}
|
||||
|
||||
func (o *Client) SendStream(msgs []*chat.ChatCompletionMessage, opts *domain.ChatOptions, channel chan string) (err error) {
|
||||
ctx := context.Background()
|
||||
var client *genai.Client
|
||||
if client, err = genai.NewClient(ctx, option.WithAPIKey(o.ApiKey.Value)); err != nil {
|
||||
if client, err = genai.NewClient(ctx, &genai.ClientConfig{
|
||||
APIKey: o.ApiKey.Value,
|
||||
Backend: genai.BackendGeminiAPI,
|
||||
}); err != nil {
|
||||
return
|
||||
}
|
||||
defer client.Close()
|
||||
|
||||
systemInstruction, messages := toMessages(msgs)
|
||||
// Convert messages to new SDK format
|
||||
contents := o.convertMessages(msgs)
|
||||
|
||||
model := client.GenerativeModel(o.buildModelNameFull(opts.Model))
|
||||
model.SetTemperature(float32(opts.Temperature))
|
||||
model.SetTopP(float32(opts.TopP))
|
||||
model.SystemInstruction = systemInstruction
|
||||
// Generate streaming content
|
||||
temperature := float32(opts.Temperature)
|
||||
topP := float32(opts.TopP)
|
||||
stream := client.Models.GenerateContentStream(ctx, o.buildModelNameFull(opts.Model), contents, &genai.GenerateContentConfig{
|
||||
Temperature: &temperature,
|
||||
TopP: &topP,
|
||||
MaxOutputTokens: int32(opts.ModelContextLength),
|
||||
})
|
||||
|
||||
iter := model.GenerateContentStream(ctx, messages...)
|
||||
for {
|
||||
if resp, iterErr := iter.Next(); iterErr == nil {
|
||||
for _, candidate := range resp.Candidates {
|
||||
if candidate.Content != nil {
|
||||
for _, part := range candidate.Content.Parts {
|
||||
if text, ok := part.(genai.Text); ok {
|
||||
channel <- string(text)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if !errors.Is(iterErr, iterator.Done) {
|
||||
channel <- fmt.Sprintf("%v\n", iterErr)
|
||||
}
|
||||
for response, err := range stream {
|
||||
if err != nil {
|
||||
channel <- fmt.Sprintf("Error: %v\n", err)
|
||||
close(channel)
|
||||
break
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (o *Client) extractText(response *genai.GenerateContentResponse) (ret string) {
|
||||
for _, candidate := range response.Candidates {
|
||||
if candidate.Content == nil {
|
||||
break
|
||||
}
|
||||
for _, part := range candidate.Content.Parts {
|
||||
if text, ok := part.(genai.Text); ok {
|
||||
ret += string(text)
|
||||
}
|
||||
text := o.extractTextFromResponse(response)
|
||||
if text != "" {
|
||||
channel <- text
|
||||
}
|
||||
}
|
||||
close(channel)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
@@ -147,18 +153,223 @@ func (o *Client) NeedsRawMode(modelName string) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func toMessages(msgs []*chat.ChatCompletionMessage) (systemInstruction *genai.Content, messages []genai.Part) {
|
||||
if len(msgs) >= 2 {
|
||||
systemInstruction = &genai.Content{
|
||||
Parts: []genai.Part{
|
||||
genai.Text(msgs[0].Content),
|
||||
},
|
||||
}
|
||||
for _, msg := range msgs[1:] {
|
||||
messages = append(messages, genai.Text(msg.Content))
|
||||
}
|
||||
} else {
|
||||
messages = append(messages, genai.Text(msgs[0].Content))
|
||||
// buildModelNameFull adds the "models/" prefix for API calls
|
||||
func (o *Client) buildModelNameFull(modelName string) string {
|
||||
if strings.HasPrefix(modelName, "models/") {
|
||||
return modelName
|
||||
}
|
||||
return
|
||||
return "models/" + modelName
|
||||
}
|
||||
|
||||
// isTTSModel checks if the model is a text-to-speech model
|
||||
func (o *Client) isTTSModel(modelName string) bool {
|
||||
lowerModel := strings.ToLower(modelName)
|
||||
return strings.Contains(lowerModel, "tts") ||
|
||||
strings.Contains(lowerModel, "preview-tts") ||
|
||||
strings.Contains(lowerModel, "text-to-speech")
|
||||
}
|
||||
|
||||
// extractTextForTTS extracts text content from chat messages for TTS generation
|
||||
func (o *Client) extractTextForTTS(msgs []*chat.ChatCompletionMessage) (string, error) {
|
||||
for i := len(msgs) - 1; i >= 0; i-- {
|
||||
if msgs[i].Role == chat.ChatMessageRoleUser && msgs[i].Content != "" {
|
||||
return msgs[i].Content, nil
|
||||
}
|
||||
}
|
||||
return "", fmt.Errorf("no text content found for TTS generation")
|
||||
}
|
||||
|
||||
// createGenaiClient creates a new GenAI client for TTS operations
|
||||
func (o *Client) createGenaiClient(ctx context.Context) (*genai.Client, error) {
|
||||
return genai.NewClient(ctx, &genai.ClientConfig{
|
||||
APIKey: o.ApiKey.Value,
|
||||
Backend: genai.BackendGeminiAPI,
|
||||
})
|
||||
}
|
||||
|
||||
// generateTTSAudio handles TTS audio generation using the new SDK
|
||||
func (o *Client) generateTTSAudio(ctx context.Context, msgs []*chat.ChatCompletionMessage, opts *domain.ChatOptions) (ret string, err error) {
|
||||
textToSpeak, err := o.extractTextForTTS(msgs)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
// Validate voice name before making API call
|
||||
if opts.Voice != "" && !IsValidGeminiVoice(opts.Voice) {
|
||||
validVoices := GetGeminiVoiceNames()
|
||||
return "", fmt.Errorf("invalid voice '%s'. Valid voices are: %v", opts.Voice, validVoices)
|
||||
}
|
||||
|
||||
client, err := o.createGenaiClient(ctx)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return o.performTTSGeneration(ctx, client, textToSpeak, opts)
|
||||
}
|
||||
|
||||
// performTTSGeneration performs the actual TTS generation and audio processing
|
||||
func (o *Client) performTTSGeneration(ctx context.Context, client *genai.Client, textToSpeak string, opts *domain.ChatOptions) (string, error) {
|
||||
|
||||
// Create content for TTS
|
||||
contents := []*genai.Content{{
|
||||
Parts: []*genai.Part{{Text: textToSpeak}},
|
||||
}}
|
||||
|
||||
// Configure for TTS generation
|
||||
voiceName := opts.Voice
|
||||
if voiceName == "" {
|
||||
voiceName = "Kore" // Default voice if none specified
|
||||
}
|
||||
|
||||
config := &genai.GenerateContentConfig{
|
||||
ResponseModalities: []string{"AUDIO"},
|
||||
SpeechConfig: &genai.SpeechConfig{
|
||||
VoiceConfig: &genai.VoiceConfig{
|
||||
PrebuiltVoiceConfig: &genai.PrebuiltVoiceConfig{
|
||||
VoiceName: voiceName,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
// Generate TTS content
|
||||
response, err := client.Models.GenerateContent(ctx, o.buildModelNameFull(opts.Model), contents, config)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("TTS generation failed: %w", err)
|
||||
}
|
||||
|
||||
// Extract and process audio data
|
||||
if len(response.Candidates) > 0 && response.Candidates[0].Content != nil && len(response.Candidates[0].Content.Parts) > 0 {
|
||||
part := response.Candidates[0].Content.Parts[0]
|
||||
if part.InlineData != nil && len(part.InlineData.Data) > 0 {
|
||||
// Validate audio data format and size
|
||||
if part.InlineData.MIMEType != "" && !strings.HasPrefix(part.InlineData.MIMEType, "audio/") {
|
||||
return "", fmt.Errorf("unexpected data type: %s, expected audio data", part.InlineData.MIMEType)
|
||||
}
|
||||
|
||||
pcmData := part.InlineData.Data
|
||||
if len(pcmData) < MinAudioDataSize {
|
||||
return "", fmt.Errorf("audio data too small: %d bytes, minimum required: %d", len(pcmData), MinAudioDataSize)
|
||||
}
|
||||
|
||||
// Generate WAV file with proper headers and return the binary data
|
||||
wavData, err := o.generateWAVFile(pcmData)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to generate WAV file: %w", err)
|
||||
}
|
||||
|
||||
// Validate generated WAV data
|
||||
if len(wavData) < WAVHeaderSize {
|
||||
return "", fmt.Errorf("generated WAV data is invalid: %d bytes, minimum required: %d", len(wavData), WAVHeaderSize)
|
||||
}
|
||||
|
||||
// Store the binary audio data in a special format that the CLI can detect
|
||||
// Use more efficient string concatenation
|
||||
return AudioDataPrefix + string(wavData), nil
|
||||
}
|
||||
}
|
||||
|
||||
return "", fmt.Errorf("no audio data received from TTS model")
|
||||
}
|
||||
|
||||
// generateWAVFile creates WAV data from PCM data with proper headers
|
||||
func (o *Client) generateWAVFile(pcmData []byte) ([]byte, error) {
|
||||
// Validate input size to prevent potential security issues
|
||||
if len(pcmData) == 0 {
|
||||
return nil, fmt.Errorf("empty PCM data provided")
|
||||
}
|
||||
if len(pcmData) > MaxAudioDataSize {
|
||||
return nil, fmt.Errorf("PCM data too large: %d bytes, maximum allowed: %d", len(pcmData), MaxAudioDataSize)
|
||||
}
|
||||
|
||||
// WAV file parameters (Gemini TTS default specs)
|
||||
channels := DefaultChannels
|
||||
sampleRate := DefaultSampleRate
|
||||
bitsPerSample := DefaultBitsPerSample
|
||||
|
||||
// Calculate required values
|
||||
byteRate := sampleRate * channels * bitsPerSample / 8
|
||||
blockAlign := channels * bitsPerSample / 8
|
||||
dataLen := uint32(len(pcmData))
|
||||
riffSize := RIFFHeaderSize + dataLen
|
||||
|
||||
// Pre-allocate buffer with known size for better performance
|
||||
totalSize := int(riffSize + 8) // +8 for RIFF header
|
||||
buf := bytes.NewBuffer(make([]byte, 0, totalSize))
|
||||
|
||||
// RIFF header
|
||||
buf.WriteString("RIFF")
|
||||
binary.Write(buf, binary.LittleEndian, riffSize)
|
||||
buf.WriteString("WAVE")
|
||||
|
||||
// fmt chunk
|
||||
buf.WriteString("fmt ")
|
||||
binary.Write(buf, binary.LittleEndian, uint32(16)) // subchunk1Size
|
||||
binary.Write(buf, binary.LittleEndian, uint16(1)) // audioFormat = PCM
|
||||
binary.Write(buf, binary.LittleEndian, uint16(channels)) // numChannels
|
||||
binary.Write(buf, binary.LittleEndian, uint32(sampleRate)) // sampleRate
|
||||
binary.Write(buf, binary.LittleEndian, uint32(byteRate)) // byteRate
|
||||
binary.Write(buf, binary.LittleEndian, uint16(blockAlign)) // blockAlign
|
||||
binary.Write(buf, binary.LittleEndian, uint16(bitsPerSample)) // bitsPerSample
|
||||
|
||||
// data chunk
|
||||
buf.WriteString("data")
|
||||
binary.Write(buf, binary.LittleEndian, dataLen)
|
||||
|
||||
// Write PCM data to buffer
|
||||
buf.Write(pcmData)
|
||||
|
||||
// Validate generated WAV data
|
||||
result := buf.Bytes()
|
||||
if len(result) < WAVHeaderSize {
|
||||
return nil, fmt.Errorf("generated WAV data is invalid: %d bytes, minimum required: %d", len(result), WAVHeaderSize)
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// convertMessages converts fabric chat messages to genai Content format
|
||||
func (o *Client) convertMessages(msgs []*chat.ChatCompletionMessage) []*genai.Content {
|
||||
var contents []*genai.Content
|
||||
|
||||
for _, msg := range msgs {
|
||||
content := &genai.Content{Parts: []*genai.Part{}}
|
||||
|
||||
if msg.Content != "" {
|
||||
content.Parts = append(content.Parts, &genai.Part{Text: msg.Content})
|
||||
}
|
||||
|
||||
// Handle multi-content messages (images, etc.)
|
||||
for _, part := range msg.MultiContent {
|
||||
switch part.Type {
|
||||
case chat.ChatMessagePartTypeText:
|
||||
content.Parts = append(content.Parts, &genai.Part{Text: part.Text})
|
||||
case chat.ChatMessagePartTypeImageURL:
|
||||
// TODO: Handle image URLs if needed
|
||||
// This would require downloading and converting to inline data
|
||||
}
|
||||
}
|
||||
|
||||
contents = append(contents, content)
|
||||
}
|
||||
|
||||
return contents
|
||||
}
|
||||
|
||||
// extractTextFromResponse extracts text content from the response
|
||||
func (o *Client) extractTextFromResponse(response *genai.GenerateContentResponse) string {
|
||||
var result strings.Builder
|
||||
|
||||
for _, candidate := range response.Candidates {
|
||||
if candidate.Content != nil {
|
||||
for _, part := range candidate.Content.Parts {
|
||||
if part.Text != "" {
|
||||
result.WriteString(part.Text)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result.String()
|
||||
}
|
||||
|
||||
@@ -3,32 +3,40 @@ package gemini
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/google/generative-ai-go/genai"
|
||||
"google.golang.org/genai"
|
||||
)
|
||||
|
||||
// Test generated using Keploy
|
||||
func TestBuildModelNameSimple(t *testing.T) {
|
||||
// Test buildModelNameFull method
|
||||
func TestBuildModelNameFull(t *testing.T) {
|
||||
client := &Client{}
|
||||
fullModelName := "models/chat-bison-001"
|
||||
expected := "chat-bison-001"
|
||||
|
||||
result := client.buildModelNameSimple(fullModelName)
|
||||
tests := []struct {
|
||||
input string
|
||||
expected string
|
||||
}{
|
||||
{"chat-bison-001", "models/chat-bison-001"},
|
||||
{"models/chat-bison-001", "models/chat-bison-001"},
|
||||
{"gemini-2.5-flash-preview-tts", "models/gemini-2.5-flash-preview-tts"},
|
||||
}
|
||||
|
||||
if result != expected {
|
||||
t.Errorf("Expected %v, got %v", expected, result)
|
||||
for _, test := range tests {
|
||||
result := client.buildModelNameFull(test.input)
|
||||
if result != test.expected {
|
||||
t.Errorf("For input %v, expected %v, got %v", test.input, test.expected, result)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Test generated using Keploy
|
||||
func TestExtractText(t *testing.T) {
|
||||
// Test extractTextFromResponse method
|
||||
func TestExtractTextFromResponse(t *testing.T) {
|
||||
client := &Client{}
|
||||
response := &genai.GenerateContentResponse{
|
||||
Candidates: []*genai.Candidate{
|
||||
{
|
||||
Content: &genai.Content{
|
||||
Parts: []genai.Part{
|
||||
genai.Text("Hello, "),
|
||||
genai.Text("world!"),
|
||||
Parts: []*genai.Part{
|
||||
{Text: "Hello, "},
|
||||
{Text: "world!"},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -36,9 +44,56 @@ func TestExtractText(t *testing.T) {
|
||||
}
|
||||
expected := "Hello, world!"
|
||||
|
||||
result := client.extractText(response)
|
||||
result := client.extractTextFromResponse(response)
|
||||
|
||||
if result != expected {
|
||||
t.Errorf("Expected %v, got %v", expected, result)
|
||||
}
|
||||
}
|
||||
|
||||
// Test isTTSModel method
|
||||
func TestIsTTSModel(t *testing.T) {
|
||||
client := &Client{}
|
||||
|
||||
tests := []struct {
|
||||
modelName string
|
||||
expected bool
|
||||
}{
|
||||
{"gemini-2.5-flash-preview-tts", true},
|
||||
{"text-to-speech-model", true},
|
||||
{"TTS-MODEL", true},
|
||||
{"gemini-pro", false},
|
||||
{"chat-bison", false},
|
||||
{"", false},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
result := client.isTTSModel(test.modelName)
|
||||
if result != test.expected {
|
||||
t.Errorf("For model %v, expected %v, got %v", test.modelName, test.expected, result)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Test generateWAVFile method (basic test)
|
||||
func TestGenerateWAVFile(t *testing.T) {
|
||||
client := &Client{}
|
||||
|
||||
// Test with minimal PCM data
|
||||
pcmData := []byte{0x00, 0x01, 0x02, 0x03}
|
||||
|
||||
result, err := client.generateWAVFile(pcmData)
|
||||
if err != nil {
|
||||
t.Errorf("generateWAVFile failed: %v", err)
|
||||
}
|
||||
|
||||
// Check that we got some data back
|
||||
if len(result) == 0 {
|
||||
t.Error("generateWAVFile returned empty data")
|
||||
}
|
||||
|
||||
// Check that it starts with RIFF header
|
||||
if len(result) >= 4 && string(result[0:4]) != "RIFF" {
|
||||
t.Error("Generated WAV data doesn't start with RIFF header")
|
||||
}
|
||||
}
|
||||
|
||||
218
internal/plugins/ai/gemini/voices.go
Normal file
218
internal/plugins/ai/gemini/voices.go
Normal file
@@ -0,0 +1,218 @@
|
||||
package gemini
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"sort"
|
||||
)
|
||||
|
||||
// GeminiVoice represents a Gemini TTS voice with its characteristics
|
||||
type GeminiVoice struct {
|
||||
Name string
|
||||
Description string
|
||||
Characteristics []string
|
||||
}
|
||||
|
||||
// GetGeminiVoices returns the current list of supported Gemini TTS voices
|
||||
// This list is maintained based on official Google Gemini documentation
|
||||
// https://ai.google.dev/gemini-api/docs/speech-generation
|
||||
func GetGeminiVoices() []GeminiVoice {
|
||||
return []GeminiVoice{
|
||||
// Firm voices
|
||||
{Name: "Kore", Description: "Firm and confident", Characteristics: []string{"firm", "confident", "default"}},
|
||||
{Name: "Orus", Description: "Firm and decisive", Characteristics: []string{"firm", "decisive"}},
|
||||
{Name: "Alnilam", Description: "Firm and strong", Characteristics: []string{"firm", "strong"}},
|
||||
|
||||
// Upbeat voices
|
||||
{Name: "Puck", Description: "Upbeat and energetic", Characteristics: []string{"upbeat", "energetic"}},
|
||||
{Name: "Laomedeia", Description: "Upbeat and lively", Characteristics: []string{"upbeat", "lively"}},
|
||||
|
||||
// Bright voices
|
||||
{Name: "Zephyr", Description: "Bright and cheerful", Characteristics: []string{"bright", "cheerful"}},
|
||||
{Name: "Autonoe", Description: "Bright and optimistic", Characteristics: []string{"bright", "optimistic"}},
|
||||
|
||||
// Informative voices
|
||||
{Name: "Charon", Description: "Informative and clear", Characteristics: []string{"informative", "clear"}},
|
||||
{Name: "Rasalgethi", Description: "Informative and professional", Characteristics: []string{"informative", "professional"}},
|
||||
|
||||
// Natural voices
|
||||
{Name: "Aoede", Description: "Breezy and natural", Characteristics: []string{"breezy", "natural"}},
|
||||
{Name: "Leda", Description: "Youthful and energetic", Characteristics: []string{"youthful", "energetic"}},
|
||||
|
||||
// Gentle voices
|
||||
{Name: "Vindemiatrix", Description: "Gentle and kind", Characteristics: []string{"gentle", "kind"}},
|
||||
{Name: "Achernar", Description: "Soft and gentle", Characteristics: []string{"soft", "gentle"}},
|
||||
{Name: "Enceladus", Description: "Breathy and soft", Characteristics: []string{"breathy", "soft"}},
|
||||
|
||||
// Warm voices
|
||||
{Name: "Sulafat", Description: "Warm and welcoming", Characteristics: []string{"warm", "welcoming"}},
|
||||
{Name: "Capella", Description: "Warm and approachable", Characteristics: []string{"warm", "approachable"}},
|
||||
|
||||
// Clear voices
|
||||
{Name: "Iapetus", Description: "Clear and articulate", Characteristics: []string{"clear", "articulate"}},
|
||||
{Name: "Erinome", Description: "Clear and precise", Characteristics: []string{"clear", "precise"}},
|
||||
|
||||
// Pleasant voices
|
||||
{Name: "Algieba", Description: "Smooth and pleasant", Characteristics: []string{"smooth", "pleasant"}},
|
||||
{Name: "Vega", Description: "Smooth and flowing", Characteristics: []string{"smooth", "flowing"}},
|
||||
|
||||
// Textured voices
|
||||
{Name: "Algenib", Description: "Gravelly texture", Characteristics: []string{"gravelly", "textured"}},
|
||||
|
||||
// Relaxed voices
|
||||
{Name: "Callirrhoe", Description: "Easy-going and relaxed", Characteristics: []string{"relaxed", "easy-going"}},
|
||||
{Name: "Despina", Description: "Calm and serene", Characteristics: []string{"calm", "serene"}},
|
||||
|
||||
// Mature voices
|
||||
{Name: "Gacrux", Description: "Mature and experienced", Characteristics: []string{"mature", "experienced"}},
|
||||
|
||||
// Expressive voices
|
||||
{Name: "Pulcherrima", Description: "Forward and expressive", Characteristics: []string{"forward", "expressive"}},
|
||||
{Name: "Lyra", Description: "Melodic and expressive", Characteristics: []string{"melodic", "expressive"}},
|
||||
|
||||
// Dynamic voices
|
||||
{Name: "Fenrir", Description: "Excitable and dynamic", Characteristics: []string{"excitable", "dynamic"}},
|
||||
{Name: "Sadachbia", Description: "Lively and animated", Characteristics: []string{"lively", "animated"}},
|
||||
|
||||
// Friendly voices
|
||||
{Name: "Achird", Description: "Friendly and approachable", Characteristics: []string{"friendly", "approachable"}},
|
||||
|
||||
// Casual voices
|
||||
{Name: "Zubenelgenubi", Description: "Casual and conversational", Characteristics: []string{"casual", "conversational"}},
|
||||
|
||||
// Additional voices from latest API
|
||||
{Name: "Sadaltager", Description: "Experimental voice with a calm and neutral tone", Characteristics: []string{"experimental", "calm", "neutral"}},
|
||||
{Name: "Schedar", Description: "Experimental voice with a warm and engaging tone", Characteristics: []string{"experimental", "warm", "engaging"}},
|
||||
{Name: "Umbriel", Description: "Experimental voice with a deep and resonant tone", Characteristics: []string{"experimental", "deep", "resonant"}},
|
||||
}
|
||||
}
|
||||
|
||||
// GetGeminiVoiceNames returns just the voice names in alphabetical order
|
||||
func GetGeminiVoiceNames() []string {
|
||||
voices := GetGeminiVoices()
|
||||
names := make([]string, len(voices))
|
||||
for i, voice := range voices {
|
||||
names[i] = voice.Name
|
||||
}
|
||||
sort.Strings(names)
|
||||
return names
|
||||
}
|
||||
|
||||
// IsValidGeminiVoice checks if a voice name is valid
|
||||
func IsValidGeminiVoice(voiceName string) bool {
|
||||
if voiceName == "" {
|
||||
return true // Empty voice is valid (will use default)
|
||||
}
|
||||
|
||||
for _, voice := range GetGeminiVoices() {
|
||||
if voice.Name == voiceName {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// GetGeminiVoiceByName returns a specific voice by name
|
||||
func GetGeminiVoiceByName(name string) (*GeminiVoice, error) {
|
||||
for _, voice := range GetGeminiVoices() {
|
||||
if voice.Name == name {
|
||||
return &voice, nil
|
||||
}
|
||||
}
|
||||
return nil, fmt.Errorf("voice '%s' not found", name)
|
||||
}
|
||||
|
||||
// ListGeminiVoices formats the voice list for display
|
||||
func ListGeminiVoices(shellCompleteMode bool) string {
|
||||
if shellCompleteMode {
|
||||
// For shell completion, just return voice names
|
||||
names := GetGeminiVoiceNames()
|
||||
result := ""
|
||||
for _, name := range names {
|
||||
result += name + "\n"
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
// For human-readable output
|
||||
voices := GetGeminiVoices()
|
||||
result := "Available Gemini Text-to-Speech voices:\n\n"
|
||||
|
||||
// Group by characteristics for better readability
|
||||
groups := map[string][]GeminiVoice{
|
||||
"Firm & Confident": {},
|
||||
"Bright & Cheerful": {},
|
||||
"Warm & Welcoming": {},
|
||||
"Clear & Professional": {},
|
||||
"Natural & Expressive": {},
|
||||
"Other Voices": {},
|
||||
}
|
||||
|
||||
for _, voice := range voices {
|
||||
placed := false
|
||||
for _, char := range voice.Characteristics {
|
||||
switch char {
|
||||
case "firm", "confident", "decisive", "strong":
|
||||
if !placed {
|
||||
groups["Firm & Confident"] = append(groups["Firm & Confident"], voice)
|
||||
placed = true
|
||||
}
|
||||
case "bright", "cheerful", "upbeat", "energetic", "lively":
|
||||
if !placed {
|
||||
groups["Bright & Cheerful"] = append(groups["Bright & Cheerful"], voice)
|
||||
placed = true
|
||||
}
|
||||
case "warm", "welcoming", "friendly", "approachable":
|
||||
if !placed {
|
||||
groups["Warm & Welcoming"] = append(groups["Warm & Welcoming"], voice)
|
||||
placed = true
|
||||
}
|
||||
case "clear", "informative", "professional", "articulate":
|
||||
if !placed {
|
||||
groups["Clear & Professional"] = append(groups["Clear & Professional"], voice)
|
||||
placed = true
|
||||
}
|
||||
case "natural", "expressive", "melodic", "breezy":
|
||||
if !placed {
|
||||
groups["Natural & Expressive"] = append(groups["Natural & Expressive"], voice)
|
||||
placed = true
|
||||
}
|
||||
}
|
||||
}
|
||||
if !placed {
|
||||
groups["Other Voices"] = append(groups["Other Voices"], voice)
|
||||
}
|
||||
}
|
||||
|
||||
// Output grouped voices
|
||||
for groupName, groupVoices := range groups {
|
||||
if len(groupVoices) > 0 {
|
||||
result += fmt.Sprintf("%s:\n", groupName)
|
||||
for _, voice := range groupVoices {
|
||||
defaultStr := ""
|
||||
if voice.Name == "Kore" {
|
||||
defaultStr = " (default)"
|
||||
}
|
||||
result += fmt.Sprintf(" %-15s - %s%s\n", voice.Name, voice.Description, defaultStr)
|
||||
}
|
||||
result += "\n"
|
||||
}
|
||||
}
|
||||
|
||||
result += "Use --voice <voice_name> to select a specific voice.\n"
|
||||
result += "Example: fabric --voice Charon -m gemini-2.5-flash-preview-tts -o output.wav \"Hello world\"\n"
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
// NOTE: This implementation maintains a curated list based on official Google documentation.
|
||||
// In the future, if Google provides a dynamic voice discovery API, this can be updated
|
||||
// to make API calls for real-time voice discovery.
|
||||
//
|
||||
// The current approach ensures:
|
||||
// 1. Fast response times (no API calls needed)
|
||||
// 2. Reliable voice information with descriptions
|
||||
// 3. Easy maintenance when new voices are added
|
||||
// 4. Offline functionality
|
||||
//
|
||||
// To update voices: Monitor Google's Gemini TTS documentation at:
|
||||
// https://ai.google.dev/gemini-api/docs/speech-generation
|
||||
@@ -5,6 +5,7 @@ import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"os"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
@@ -61,6 +62,11 @@ func (t *transport_sec) RoundTrip(req *http.Request) (*http.Response, error) {
|
||||
return t.underlyingTransport.RoundTrip(req)
|
||||
}
|
||||
|
||||
// IsConfigured returns true only if OLLAMA_API_URL environment variable is explicitly set
|
||||
func (o *Client) IsConfigured() bool {
|
||||
return os.Getenv("OLLAMA_API_URL") != ""
|
||||
}
|
||||
|
||||
func (o *Client) configure() (err error) {
|
||||
if o.apiUrl, err = url.Parse(o.ApiUrl.Value); err != nil {
|
||||
fmt.Printf("cannot parse URL: %s: %v\n", o.ApiUrl.Value, err)
|
||||
@@ -160,6 +166,7 @@ func (o *Client) NeedsRawMode(modelName string) bool {
|
||||
ollamaPrefixes := []string{
|
||||
"llama3",
|
||||
"llama2",
|
||||
"mistral",
|
||||
}
|
||||
for _, prefix := range ollamaPrefixes {
|
||||
if strings.HasPrefix(modelName, prefix) {
|
||||
|
||||
@@ -66,6 +66,11 @@ type Client struct {
|
||||
ImplementsResponses bool // Whether this provider supports the Responses API
|
||||
}
|
||||
|
||||
// SetResponsesAPIEnabled configures whether to use the Responses API
|
||||
func (o *Client) SetResponsesAPIEnabled(enabled bool) {
|
||||
o.ImplementsResponses = enabled
|
||||
}
|
||||
|
||||
func (o *Client) configure() (ret error) {
|
||||
opts := []option.RequestOption{option.WithAPIKey(o.ApiKey.Value)}
|
||||
if o.ApiBaseURL.Value != "" {
|
||||
@@ -159,6 +164,7 @@ func (o *Client) NeedsRawMode(modelName string) bool {
|
||||
"o1",
|
||||
"o3",
|
||||
"o4",
|
||||
"gpt-5",
|
||||
}
|
||||
openAIModelsNeedingRaw := []string{
|
||||
"gpt-4o-mini-search-preview",
|
||||
|
||||
105
internal/plugins/ai/openai_compatible/direct_models_call.go
Normal file
105
internal/plugins/ai/openai_compatible/direct_models_call.go
Normal file
@@ -0,0 +1,105 @@
|
||||
package openai_compatible
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"time"
|
||||
)
|
||||
|
||||
// Model represents a model returned by the API
|
||||
type Model struct {
|
||||
ID string `json:"id"`
|
||||
}
|
||||
|
||||
// ErrorResponseLimit defines the maximum length of error response bodies for truncation.
|
||||
const errorResponseLimit = 1024 // Limit for error response body size
|
||||
|
||||
// DirectlyGetModels is used to fetch models directly from the API
|
||||
// when the standard OpenAI SDK method fails due to a nonstandard format.
|
||||
// This is useful for providers like Together that return a direct array of models.
|
||||
func (c *Client) DirectlyGetModels(ctx context.Context) ([]string, error) {
|
||||
if ctx == nil {
|
||||
ctx = context.Background()
|
||||
}
|
||||
baseURL := c.ApiBaseURL.Value
|
||||
if baseURL == "" {
|
||||
return nil, fmt.Errorf("API base URL not configured for provider %s", c.GetName())
|
||||
}
|
||||
|
||||
// Build the /models endpoint URL
|
||||
fullURL, err := url.JoinPath(baseURL, "models")
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to create models URL: %w", err)
|
||||
}
|
||||
|
||||
req, err := http.NewRequestWithContext(ctx, "GET", fullURL, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", c.ApiKey.Value))
|
||||
req.Header.Set("Accept", "application/json")
|
||||
|
||||
// TODO: Consider reusing a single http.Client instance (e.g., as a field on Client) instead of allocating a new one for each request.
|
||||
|
||||
client := &http.Client{
|
||||
Timeout: 10 * time.Second,
|
||||
}
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
// Read the response body for debugging
|
||||
bodyBytes, _ := io.ReadAll(resp.Body)
|
||||
bodyString := string(bodyBytes)
|
||||
if len(bodyString) > errorResponseLimit { // Truncate if too large
|
||||
bodyString = bodyString[:errorResponseLimit] + "..."
|
||||
}
|
||||
return nil, fmt.Errorf("unexpected status code: %d from provider %s, response body: %s",
|
||||
resp.StatusCode, c.GetName(), bodyString)
|
||||
}
|
||||
|
||||
// Read the response body once
|
||||
bodyBytes, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Try to parse as an object with data field (OpenAI format)
|
||||
var openAIFormat struct {
|
||||
Data []Model `json:"data"`
|
||||
}
|
||||
// Try to parse as a direct array (Together format)
|
||||
var directArray []Model
|
||||
|
||||
if err := json.Unmarshal(bodyBytes, &openAIFormat); err == nil && len(openAIFormat.Data) > 0 {
|
||||
return extractModelIDs(openAIFormat.Data), nil
|
||||
}
|
||||
|
||||
if err := json.Unmarshal(bodyBytes, &directArray); err == nil && len(directArray) > 0 {
|
||||
return extractModelIDs(directArray), nil
|
||||
}
|
||||
|
||||
var truncatedBody string
|
||||
if len(bodyBytes) > errorResponseLimit {
|
||||
truncatedBody = string(bodyBytes[:errorResponseLimit]) + "..."
|
||||
} else {
|
||||
truncatedBody = string(bodyBytes)
|
||||
}
|
||||
return nil, fmt.Errorf("unable to parse models response; raw response: %s", truncatedBody)
|
||||
}
|
||||
|
||||
func extractModelIDs(models []Model) []string {
|
||||
modelIDs := make([]string, 0, len(models))
|
||||
for _, model := range models {
|
||||
modelIDs = append(modelIDs, model.ID)
|
||||
}
|
||||
return modelIDs
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
package openai_compatible
|
||||
|
||||
import (
|
||||
"context"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
@@ -31,6 +32,19 @@ func NewClient(providerConfig ProviderConfig) *Client {
|
||||
return client
|
||||
}
|
||||
|
||||
// ListModels overrides the default ListModels to handle different response formats
|
||||
func (c *Client) ListModels() ([]string, error) {
|
||||
// First try the standard OpenAI SDK approach
|
||||
models, err := c.Client.ListModels()
|
||||
if err == nil && len(models) > 0 { // only return if OpenAI SDK returns models
|
||||
return models, nil
|
||||
}
|
||||
|
||||
// TODO: Handle context properly in Fabric by accepting and propagating a context.Context
|
||||
// instead of creating a new one here.
|
||||
return c.DirectlyGetModels(context.Background())
|
||||
}
|
||||
|
||||
// ProviderMap is a map of provider name to ProviderConfig for O(1) lookup
|
||||
var ProviderMap = map[string]ProviderConfig{
|
||||
"AIML": {
|
||||
@@ -83,6 +97,11 @@ var ProviderMap = map[string]ProviderConfig{
|
||||
BaseURL: "https://api.siliconflow.cn/v1",
|
||||
ImplementsResponses: false,
|
||||
},
|
||||
"Together": {
|
||||
Name: "Together",
|
||||
BaseURL: "https://api.together.xyz/v1",
|
||||
ImplementsResponses: false,
|
||||
},
|
||||
}
|
||||
|
||||
// GetProviderByName returns the provider configuration for a given name with O(1) lookup
|
||||
|
||||
@@ -86,9 +86,10 @@ func (o *Session) String() (ret string) {
|
||||
ret += fmt.Sprintf("\n--- \n[%v]\n%v", message.Role, message.Content)
|
||||
if message.MultiContent != nil {
|
||||
for _, part := range message.MultiContent {
|
||||
if part.Type == chat.ChatMessagePartTypeImageURL {
|
||||
switch part.Type {
|
||||
case chat.ChatMessagePartTypeImageURL:
|
||||
ret += fmt.Sprintf("\n%v: %v", part.Type, *part.ImageURL)
|
||||
} else if part.Type == chat.ChatMessagePartTypeText {
|
||||
case chat.ChatMessagePartTypeText:
|
||||
ret += fmt.Sprintf("\n%v: %v", part.Type, part.Text)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,6 +4,8 @@ import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"github.com/danielmiessler/fabric/internal/plugins"
|
||||
"github.com/danielmiessler/fabric/internal/plugins/db/fsdb"
|
||||
@@ -107,6 +109,12 @@ func (o *PatternsLoader) PopulateDB() (err error) {
|
||||
}
|
||||
|
||||
fmt.Printf("✅ Successfully downloaded and installed patterns to %s\n", o.Patterns.Dir)
|
||||
|
||||
// Create the unique patterns file after patterns are successfully moved
|
||||
if err = o.createUniquePatternsFile(); err != nil {
|
||||
return fmt.Errorf("failed to create unique patterns file: %w", err)
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
@@ -301,3 +309,60 @@ func (o *PatternsLoader) countPatternsInDirectory(dir string) (int, error) {
|
||||
|
||||
return patternCount, nil
|
||||
}
|
||||
|
||||
// createUniquePatternsFile creates the unique_patterns.txt file with all pattern names
|
||||
func (o *PatternsLoader) createUniquePatternsFile() (err error) {
|
||||
// Read patterns from the main patterns directory
|
||||
entries, err := os.ReadDir(o.Patterns.Dir)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to read patterns directory: %w", err)
|
||||
}
|
||||
|
||||
patternNamesMap := make(map[string]bool) // Use map to avoid duplicates
|
||||
|
||||
// Add patterns from main directory
|
||||
for _, entry := range entries {
|
||||
if entry.IsDir() {
|
||||
patternNamesMap[entry.Name()] = true
|
||||
}
|
||||
}
|
||||
|
||||
// Add patterns from custom patterns directory if it exists
|
||||
if o.Patterns.CustomPatternsDir != "" {
|
||||
if customEntries, customErr := os.ReadDir(o.Patterns.CustomPatternsDir); customErr == nil {
|
||||
for _, entry := range customEntries {
|
||||
if entry.IsDir() {
|
||||
patternNamesMap[entry.Name()] = true
|
||||
}
|
||||
}
|
||||
fmt.Fprintf(os.Stderr, "📂 Also included patterns from custom directory: %s\n", o.Patterns.CustomPatternsDir)
|
||||
} else {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Could not read custom patterns directory %s: %v\n", o.Patterns.CustomPatternsDir, customErr)
|
||||
}
|
||||
}
|
||||
|
||||
if len(patternNamesMap) == 0 {
|
||||
if o.Patterns.CustomPatternsDir != "" {
|
||||
return fmt.Errorf("no patterns found in directories %s and %s", o.Patterns.Dir, o.Patterns.CustomPatternsDir)
|
||||
}
|
||||
return fmt.Errorf("no patterns found in directory %s", o.Patterns.Dir)
|
||||
}
|
||||
|
||||
// Convert map to sorted slice
|
||||
var patternNames []string
|
||||
for name := range patternNamesMap {
|
||||
patternNames = append(patternNames, name)
|
||||
}
|
||||
|
||||
// Sort patterns alphabetically for consistent output
|
||||
sort.Strings(patternNames)
|
||||
|
||||
// Join pattern names with newlines
|
||||
content := strings.Join(patternNames, "\n") + "\n"
|
||||
if err = os.WriteFile(o.Patterns.UniquePatternsFilePath, []byte(content), 0644); err != nil {
|
||||
return fmt.Errorf("failed to write unique patterns file: %w", err)
|
||||
}
|
||||
|
||||
fmt.Printf("📝 Created unique patterns file with %d patterns\n", len(patternNames))
|
||||
return nil
|
||||
}
|
||||
|
||||
61
internal/tools/youtube/timestamp_test.go
Normal file
61
internal/tools/youtube/timestamp_test.go
Normal file
@@ -0,0 +1,61 @@
|
||||
package youtube
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestParseTimestampToSeconds(t *testing.T) {
|
||||
tests := []struct {
|
||||
timestamp string
|
||||
expected int
|
||||
shouldErr bool
|
||||
}{
|
||||
{"00:30", 30, false},
|
||||
{"01:30", 90, false},
|
||||
{"01:05:30", 3930, false}, // 1 hour 5 minutes 30 seconds
|
||||
{"10:00", 600, false},
|
||||
{"invalid", 0, true},
|
||||
{"1:2:3:4", 0, true}, // too many parts
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
result, err := parseTimestampToSeconds(test.timestamp)
|
||||
|
||||
if test.shouldErr {
|
||||
if err == nil {
|
||||
t.Errorf("Expected error for timestamp %s, but got none", test.timestamp)
|
||||
}
|
||||
} else {
|
||||
if err != nil {
|
||||
t.Errorf("Unexpected error for timestamp %s: %v", test.timestamp, err)
|
||||
}
|
||||
if result != test.expected {
|
||||
t.Errorf("For timestamp %s, expected %d seconds, got %d", test.timestamp, test.expected, result)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestShouldIncludeRepeat(t *testing.T) {
|
||||
tests := []struct {
|
||||
lastTimestamp string
|
||||
currentTimestamp string
|
||||
expected bool
|
||||
description string
|
||||
}{
|
||||
{"00:30", "01:30", true, "60 second gap should allow repeat"},
|
||||
{"00:30", "00:45", true, "15 second gap should allow repeat"},
|
||||
{"01:00", "01:10", true, "10 second gap should allow repeat (boundary case)"},
|
||||
{"01:00", "01:09", false, "9 second gap should not allow repeat"},
|
||||
{"00:30", "00:35", false, "5 second gap should not allow repeat"},
|
||||
{"invalid", "01:30", true, "invalid timestamp should err on side of inclusion"},
|
||||
{"01:30", "invalid", true, "invalid timestamp should err on side of inclusion"},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
result := shouldIncludeRepeat(test.lastTimestamp, test.currentTimestamp)
|
||||
if result != test.expected {
|
||||
t.Errorf("%s: expected %v, got %v", test.description, test.expected, result)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -29,6 +29,15 @@ import (
|
||||
"google.golang.org/api/youtube/v3"
|
||||
)
|
||||
|
||||
var timestampRegex *regexp.Regexp
|
||||
|
||||
const TimeGapForRepeats = 10 // seconds
|
||||
|
||||
func init() {
|
||||
// Match timestamps like "00:00:01.234" or just numbers or sequence numbers
|
||||
timestampRegex = regexp.MustCompile(`^\d+$|^\d{1,2}:\d{2}(:\d{2})?(\.\d{3})?$`)
|
||||
}
|
||||
|
||||
func NewYouTube() (ret *YouTube) {
|
||||
|
||||
label := "YouTube"
|
||||
@@ -180,6 +189,7 @@ func (o *YouTube) readAndCleanVTTFile(filename string) (ret string, err error) {
|
||||
// Convert VTT to plain text
|
||||
lines := strings.Split(string(content), "\n")
|
||||
var textBuilder strings.Builder
|
||||
seenSegments := make(map[string]struct{})
|
||||
|
||||
for _, line := range lines {
|
||||
line = strings.TrimSpace(line)
|
||||
@@ -193,8 +203,11 @@ func (o *YouTube) readAndCleanVTTFile(filename string) (ret string, err error) {
|
||||
// Remove VTT formatting tags
|
||||
line = removeVTTTags(line)
|
||||
if line != "" {
|
||||
textBuilder.WriteString(line)
|
||||
textBuilder.WriteString(" ")
|
||||
if _, exists := seenSegments[line]; !exists {
|
||||
textBuilder.WriteString(line)
|
||||
textBuilder.WriteString(" ")
|
||||
seenSegments[line] = struct{}{}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -215,6 +228,10 @@ func (o *YouTube) readAndFormatVTTWithTimestamps(filename string) (ret string, e
|
||||
lines := strings.Split(string(content), "\n")
|
||||
var textBuilder strings.Builder
|
||||
var currentTimestamp string
|
||||
// Track content with timestamps to allow repeats after significant time gaps
|
||||
// This preserves legitimate repeated content (choruses, recurring phrases, etc.)
|
||||
// while still filtering out immediate duplicates from VTT formatting issues
|
||||
seenSegments := make(map[string]string) // text -> last timestamp seen
|
||||
|
||||
for _, line := range lines {
|
||||
line = strings.TrimSpace(line)
|
||||
@@ -246,7 +263,20 @@ func (o *YouTube) readAndFormatVTTWithTimestamps(filename string) (ret string, e
|
||||
// Remove VTT formatting tags
|
||||
cleanText := removeVTTTags(line)
|
||||
if cleanText != "" && currentTimestamp != "" {
|
||||
textBuilder.WriteString(fmt.Sprintf("[%s] %s\n", currentTimestamp, cleanText))
|
||||
// Check if we should include this segment
|
||||
shouldInclude := true
|
||||
if lastTimestamp, exists := seenSegments[cleanText]; exists {
|
||||
// Calculate time difference to determine if this is a legitimate repeat
|
||||
if !shouldIncludeRepeat(lastTimestamp, currentTimestamp) {
|
||||
shouldInclude = false
|
||||
}
|
||||
}
|
||||
|
||||
if shouldInclude {
|
||||
timestampedLine := fmt.Sprintf("[%s] %s", currentTimestamp, cleanText)
|
||||
textBuilder.WriteString(timestampedLine + "\n")
|
||||
seenSegments[cleanText] = currentTimestamp
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -268,8 +298,6 @@ func formatVTTTimestamp(vttTime string) string {
|
||||
}
|
||||
|
||||
func isTimeStamp(s string) bool {
|
||||
// Match timestamps like "00:00:01.234" or just numbers
|
||||
timestampRegex := regexp.MustCompile(`^\d+$|^\d{2}:\d{2}:\d{2}`)
|
||||
return timestampRegex.MatchString(s)
|
||||
}
|
||||
|
||||
@@ -279,6 +307,76 @@ func removeVTTTags(s string) string {
|
||||
return tagRegex.ReplaceAllString(s, "")
|
||||
}
|
||||
|
||||
// shouldIncludeRepeat determines if repeated content should be included based on time gap
|
||||
func shouldIncludeRepeat(lastTimestamp, currentTimestamp string) bool {
|
||||
// Parse timestamps to calculate time difference
|
||||
lastSeconds, err1 := parseTimestampToSeconds(lastTimestamp)
|
||||
currentSeconds, err2 := parseTimestampToSeconds(currentTimestamp)
|
||||
|
||||
if err1 != nil || err2 != nil {
|
||||
// If we can't parse timestamps, err on the side of inclusion
|
||||
return true
|
||||
}
|
||||
|
||||
// Allow repeats if there's at least a TimeGapForRepeats gap
|
||||
// This threshold can be adjusted based on use case:
|
||||
// - 10 seconds works well for most content
|
||||
// - Could be made configurable in the future
|
||||
timeDiffSeconds := currentSeconds - lastSeconds
|
||||
return timeDiffSeconds >= TimeGapForRepeats
|
||||
}
|
||||
|
||||
// parseTimestampToSeconds converts timestamp string (HH:MM:SS or MM:SS) to total seconds
|
||||
func parseTimestampToSeconds(timestamp string) (int, error) {
|
||||
parts := strings.Split(timestamp, ":")
|
||||
if len(parts) < 2 || len(parts) > 3 {
|
||||
return 0, fmt.Errorf("invalid timestamp format: %s", timestamp)
|
||||
}
|
||||
|
||||
var hours, minutes, seconds int
|
||||
var err error
|
||||
|
||||
if len(parts) == 3 {
|
||||
// HH:MM:SS format
|
||||
if hours, err = strconv.Atoi(parts[0]); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
if minutes, err = strconv.Atoi(parts[1]); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
if seconds, err = parseSeconds(parts[2]); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
} else {
|
||||
// MM:SS format
|
||||
if minutes, err = strconv.Atoi(parts[0]); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
if seconds, err = parseSeconds(parts[1]); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
}
|
||||
|
||||
return hours*3600 + minutes*60 + seconds, nil
|
||||
}
|
||||
|
||||
func parseSeconds(seconds_str string) (int, error) {
|
||||
var seconds int
|
||||
var err error
|
||||
if strings.Contains(seconds_str, ".") {
|
||||
// Handle fractional seconds
|
||||
second_parts := strings.Split(seconds_str, ".")
|
||||
if seconds, err = strconv.Atoi(second_parts[0]); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
} else {
|
||||
if seconds, err = strconv.Atoi(seconds_str); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
}
|
||||
return seconds, nil
|
||||
}
|
||||
|
||||
func (o *YouTube) GrabComments(videoId string) (ret []string, err error) {
|
||||
if err = o.initService(); err != nil {
|
||||
return
|
||||
|
||||
@@ -71,3 +71,21 @@ func IsSymlinkToDir(path string) bool {
|
||||
|
||||
return false // Regular directories should not be treated as symlinks
|
||||
}
|
||||
|
||||
// GetDefaultConfigPath returns the default path for the configuration file
|
||||
// if it exists, otherwise returns an empty string.
|
||||
func GetDefaultConfigPath() (string, error) {
|
||||
homeDir, err := os.UserHomeDir()
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("could not determine user home directory: %w", err)
|
||||
}
|
||||
|
||||
defaultConfigPath := filepath.Join(homeDir, ".config", "fabric", "config.yaml")
|
||||
if _, err := os.Stat(defaultConfigPath); err != nil {
|
||||
if os.IsNotExist(err) {
|
||||
return "", nil // Return no error for non-existent config path
|
||||
}
|
||||
return "", fmt.Errorf("error accessing default config path: %w", err)
|
||||
}
|
||||
return defaultConfigPath, nil
|
||||
}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
{
|
||||
self,
|
||||
lib,
|
||||
buildGoApplication,
|
||||
go,
|
||||
@@ -8,8 +9,8 @@
|
||||
buildGoApplication {
|
||||
pname = "fabric-ai";
|
||||
version = import ./version.nix;
|
||||
src = ../../../.;
|
||||
pwd = ../../../.;
|
||||
src = self;
|
||||
pwd = self;
|
||||
modules = ./gomod2nix.toml;
|
||||
|
||||
doCheck = false;
|
||||
|
||||
@@ -4,9 +4,6 @@ schema = 3
|
||||
[mod."cloud.google.com/go"]
|
||||
version = "v0.121.2"
|
||||
hash = "sha256-BCgGHxKti8slH98UDDurtgzX3lgcYEklsmj4ImPpwlc="
|
||||
[mod."cloud.google.com/go/ai"]
|
||||
version = "v0.12.1"
|
||||
hash = "sha256-wg3oLMS68E/v7EdNzywbjwEmpk+u6U8LTnIc1pq8edo="
|
||||
[mod."cloud.google.com/go/auth"]
|
||||
version = "v0.16.2"
|
||||
hash = "sha256-BAU9WGFKe0pd5Eu3l/Mbts+QeCOjS+lChr5hrPBCzdA="
|
||||
@@ -16,9 +13,6 @@ schema = 3
|
||||
[mod."cloud.google.com/go/compute/metadata"]
|
||||
version = "v0.7.0"
|
||||
hash = "sha256-jJZDW+hibqjMiY8OiJhgJALbGwEq+djLOxfYR7upQyE="
|
||||
[mod."cloud.google.com/go/longrunning"]
|
||||
version = "v0.6.7"
|
||||
hash = "sha256-9I0Nc2KWAEVoxDngNkqFUdASmZIAySfMEELlPh3Q3xA="
|
||||
[mod."dario.cat/mergo"]
|
||||
version = "v1.0.2"
|
||||
hash = "sha256-p6jdiHlLEfZES8vJnDywG4aVzIe16p0CU6iglglIweA="
|
||||
@@ -32,8 +26,8 @@ schema = 3
|
||||
version = "v1.3.3"
|
||||
hash = "sha256-jv7ZshpSd7FZzKKN6hqlUgiR8C3y85zNIS/hq7g76Ho="
|
||||
[mod."github.com/anthropics/anthropic-sdk-go"]
|
||||
version = "v1.4.0"
|
||||
hash = "sha256-4kwFw9gt/sRIlTo0fC2PbfLnCyc4lCOtmfQelhpORX8="
|
||||
version = "v1.7.0"
|
||||
hash = "sha256-DvpFXlUE04HeMbqQX4HIC/KMJYPXJ8rEaZkNJb1rWxs="
|
||||
[mod."github.com/araddon/dateparse"]
|
||||
version = "v0.0.0-20210429162001-6b43995a97de"
|
||||
hash = "sha256-UuX84naeRGMsFOgIgRoBHG5sNy1CzBkWPKmd6VbLwFw="
|
||||
@@ -100,6 +94,9 @@ schema = 3
|
||||
[mod."github.com/cloudwego/base64x"]
|
||||
version = "v0.1.5"
|
||||
hash = "sha256-MyUYTveN48DhnL8mwAgCRuMExLct98uzSPsmYlfaa4I="
|
||||
[mod."github.com/coder/websocket"]
|
||||
version = "v1.8.13"
|
||||
hash = "sha256-NbF0aPhy8YR3jRM6LMMQTtkeGTFba0eIBPAUsqI9KOk="
|
||||
[mod."github.com/cyphar/filepath-securejoin"]
|
||||
version = "v0.4.1"
|
||||
hash = "sha256-NOV6MfbkcQbfhNmfADQw2SJmZ6q1nw0wwg8Pm2tf2DM="
|
||||
@@ -160,9 +157,15 @@ schema = 3
|
||||
[mod."github.com/golang/groupcache"]
|
||||
version = "v0.0.0-20241129210726-2c02b8208cf8"
|
||||
hash = "sha256-AdLZ3dJLe/yduoNvZiXugZxNfmwJjNQyQGsIdzYzH74="
|
||||
[mod."github.com/google/generative-ai-go"]
|
||||
version = "v0.20.1"
|
||||
hash = "sha256-9bSpEs4kByhgyTKiHdOY5muYjGBTluA1LvEjw2gSoLI="
|
||||
[mod."github.com/google/go-cmp"]
|
||||
version = "v0.7.0"
|
||||
hash = "sha256-JbxZFBFGCh/Rj5XZ1vG94V2x7c18L8XKB0N9ZD5F2rM="
|
||||
[mod."github.com/google/go-github/v66"]
|
||||
version = "v66.0.0"
|
||||
hash = "sha256-o4usfbApXwTuwIFMECagJwK2H4UMJbCpdyGdWZ5VUpI="
|
||||
[mod."github.com/google/go-querystring"]
|
||||
version = "v1.1.0"
|
||||
hash = "sha256-itsKgKghuX26czU79cK6C2n+lc27jm5Dw1XbIRgwZJY="
|
||||
[mod."github.com/google/s2a-go"]
|
||||
version = "v0.1.9"
|
||||
hash = "sha256-0AdSpSTso4bATmM/9qamWzKrVtOLDf7afvDhoiT/UpA="
|
||||
@@ -175,6 +178,15 @@ schema = 3
|
||||
[mod."github.com/googleapis/gax-go/v2"]
|
||||
version = "v2.14.2"
|
||||
hash = "sha256-QyY7wuCkrOJCJIf9Q884KD/BC3vk/QtQLXeLeNPt750="
|
||||
[mod."github.com/gorilla/websocket"]
|
||||
version = "v1.5.3"
|
||||
hash = "sha256-vTIGEFMEi+30ZdO6ffMNJ/kId6pZs5bbyqov8xe9BM0="
|
||||
[mod."github.com/hasura/go-graphql-client"]
|
||||
version = "v0.14.4"
|
||||
hash = "sha256-TBNYIfC2CI0cVu7aZcHSWc6ZkgdkWSSfoCXqoAJT8jw="
|
||||
[mod."github.com/inconshreveable/mousetrap"]
|
||||
version = "v1.1.0"
|
||||
hash = "sha256-XWlYH0c8IcxAwQTnIi6WYqq44nOKUylSWxWO/vi+8pE="
|
||||
[mod."github.com/jbenet/go-context"]
|
||||
version = "v0.0.0-20150711004518-d14ea06fba99"
|
||||
hash = "sha256-VANNCWNNpARH/ILQV9sCQsBWgyL2iFT+4AHZREpxIWE="
|
||||
@@ -199,6 +211,9 @@ schema = 3
|
||||
[mod."github.com/mattn/go-isatty"]
|
||||
version = "v0.0.20"
|
||||
hash = "sha256-qhw9hWtU5wnyFyuMbKx+7RB8ckQaFQ8D+8GKPkN3HHQ="
|
||||
[mod."github.com/mattn/go-sqlite3"]
|
||||
version = "v1.14.28"
|
||||
hash = "sha256-mskU1xki6J1Fj6ItNgY/XNetB4Ta4jufEr4+JvTd7qs="
|
||||
[mod."github.com/modern-go/concurrent"]
|
||||
version = "v0.0.0-20180306012644-bacd9c7ef1dd"
|
||||
hash = "sha256-OTySieAgPWR4oJnlohaFTeK1tRaVp/b0d1rYY8xKMzo="
|
||||
@@ -221,8 +236,8 @@ schema = 3
|
||||
version = "v2.2.4"
|
||||
hash = "sha256-8qQIPldbsS5RO8v/FW/se3ZsAyvLzexiivzJCbGRg2Q="
|
||||
[mod."github.com/pjbgf/sha1cd"]
|
||||
version = "v0.3.2"
|
||||
hash = "sha256-jdbiRhU8xc1C5c8m7BSCj71PUXHY3f7TWFfxDKKpUMk="
|
||||
version = "v0.4.0"
|
||||
hash = "sha256-a+KXfvy1KEna9yJZ+rKXzyTT0A3hg6+yvgqQKD0xXFQ="
|
||||
[mod."github.com/pkg/errors"]
|
||||
version = "v0.9.1"
|
||||
hash = "sha256-mNfQtcrQmu3sNg/7IwiieKWOgFQOVVe2yXgKBpe/wZw="
|
||||
@@ -241,6 +256,12 @@ schema = 3
|
||||
[mod."github.com/skeema/knownhosts"]
|
||||
version = "v1.3.1"
|
||||
hash = "sha256-kjqQDzuncQNTuOYegqVZExwuOt/Z73m2ST7NZFEKixI="
|
||||
[mod."github.com/spf13/cobra"]
|
||||
version = "v1.9.1"
|
||||
hash = "sha256-dzEqquABE3UqZmJuj99244QjvfojS8cFlsPr/MXQGj0="
|
||||
[mod."github.com/spf13/pflag"]
|
||||
version = "v1.0.6"
|
||||
hash = "sha256-NjrK0FZPIfO/p2xtL1J7fOBQNTZAPZOC6Cb4aMMvhxI="
|
||||
[mod."github.com/stretchr/testify"]
|
||||
version = "v1.10.0"
|
||||
hash = "sha256-fJ4gnPr0vnrOhjQYQwJ3ARDKPsOtA7d4olQmQWR+wpI="
|
||||
@@ -268,9 +289,6 @@ schema = 3
|
||||
[mod."go.opentelemetry.io/auto/sdk"]
|
||||
version = "v1.1.0"
|
||||
hash = "sha256-cA9qCCu8P1NSJRxgmpfkfa5rKyn9X+Y/9FSmSd5xjyo="
|
||||
[mod."go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc"]
|
||||
version = "v0.61.0"
|
||||
hash = "sha256-o5w9k3VbqP3gaXI3Aelw93LLHH53U4PnkYVwc3MaY3Y="
|
||||
[mod."go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp"]
|
||||
version = "v0.61.0"
|
||||
hash = "sha256-4pfXD7ErXhexSynXiEEQSAkWoPwHd7PEDE3M1Zi5gLM="
|
||||
@@ -289,6 +307,9 @@ schema = 3
|
||||
[mod."golang.org/x/crypto"]
|
||||
version = "v0.39.0"
|
||||
hash = "sha256-FtwjbVoAhZkx7F2hmzi9Y0J87CVVhWcrZzun+zWQLzc="
|
||||
[mod."golang.org/x/exp"]
|
||||
version = "v0.0.0-20250531010427-b6e5de432a8b"
|
||||
hash = "sha256-QaFfjyB+pogCkUkJskR9xnXwkCOU828XJRrzwwLm6Ms="
|
||||
[mod."golang.org/x/net"]
|
||||
version = "v0.41.0"
|
||||
hash = "sha256-6/pi8rNmGvBFzkJQXkXkMfL1Bjydhg3BgAMYDyQ/Uvg="
|
||||
@@ -296,20 +317,20 @@ schema = 3
|
||||
version = "v0.30.0"
|
||||
hash = "sha256-btD7BUtQpOswusZY5qIU90uDo38buVrQ0tmmQ8qNHDg="
|
||||
[mod."golang.org/x/sync"]
|
||||
version = "v0.15.0"
|
||||
hash = "sha256-Jf4ehm8H8YAWY6mM151RI5CbG7JcOFtmN0AZx4bE3UE="
|
||||
version = "v0.16.0"
|
||||
hash = "sha256-sqKDRESeMzLe0jWGWltLZL/JIgrn0XaIeBWCzVN3Bks="
|
||||
[mod."golang.org/x/sys"]
|
||||
version = "v0.33.0"
|
||||
hash = "sha256-wlOzIOUgAiGAtdzhW/KPl/yUVSH/lvFZfs5XOuJ9LOQ="
|
||||
version = "v0.34.0"
|
||||
hash = "sha256-5rZ7p8IaGli5X1sJbfIKOcOEwY4c0yQhinJPh2EtK50="
|
||||
[mod."golang.org/x/text"]
|
||||
version = "v0.26.0"
|
||||
hash = "sha256-N+27nBCyGvje0yCTlUzZoVZ0LRxx4AJ+eBlrFQVRlFQ="
|
||||
[mod."golang.org/x/time"]
|
||||
version = "v0.12.0"
|
||||
hash = "sha256-Cp3oxrCMH2wyxjzr5SHVmyhgaoUuSl56Uy00Q7DYEpw="
|
||||
version = "v0.27.0"
|
||||
hash = "sha256-VX0rOh6L3qIvquKSGjfZQFU8URNtGvkNvxE7OZtboW8="
|
||||
[mod."google.golang.org/api"]
|
||||
version = "v0.236.0"
|
||||
hash = "sha256-tP1RSUSnQ4a0axgZQwEZgKF1E13nL02FSP1NPSZr0Rc="
|
||||
[mod."google.golang.org/genai"]
|
||||
version = "v1.17.0"
|
||||
hash = "sha256-Iw09DYpWuGR8E++dsFCBs702oKJPZLBEEGv0g4a4AhA="
|
||||
[mod."google.golang.org/genproto/googleapis/api"]
|
||||
version = "v0.0.0-20250603155806-513f23925822"
|
||||
hash = "sha256-0CS432v9zVhkVLqFpZtxBX8rvVqP67lb7qQ3es7RqIU="
|
||||
|
||||
@@ -1 +1 @@
|
||||
"1.4.243"
|
||||
"1.4.276"
|
||||
|
||||
@@ -16,16 +16,16 @@
|
||||
gomod2nix
|
||||
goEnv
|
||||
|
||||
(pkgs.writeShellScriptBin "update" ''
|
||||
(pkgs.writeShellScriptBin "update-mod" ''
|
||||
go get -u
|
||||
go mod tidy
|
||||
gomod2nix generate
|
||||
gomod2nix generate --outdir nix/pkgs/fabric
|
||||
'')
|
||||
];
|
||||
|
||||
shellHook = ''
|
||||
echo -e "\033[0;32;4mHeper commands:\033[0m"
|
||||
echo "'update' instead of 'go get -u && go mod tidy'"
|
||||
echo -e "\033[0;32;4mHelper commands:\033[0m"
|
||||
echo "'update-mod' instead of 'go get -u && go mod tidy && gomod2nix generate --outdir nix/pkgs/fabric'"
|
||||
'';
|
||||
};
|
||||
}
|
||||
|
||||
116
scripts/docker-test/README.md
Normal file
116
scripts/docker-test/README.md
Normal file
@@ -0,0 +1,116 @@
|
||||
# Docker Test Environment for API Configuration Fix
|
||||
|
||||
This directory contains a Docker-based testing setup for fixing the issue where Fabric calls Ollama and Bedrock APIs even when not configured. This addresses the problem where unconfigured services show error messages during model listing.
|
||||
|
||||
## Quick Start
|
||||
|
||||
```bash
|
||||
# Run all tests
|
||||
./scripts/docker-test/test-runner.sh
|
||||
|
||||
# Interactive mode - pick which test to run
|
||||
./scripts/docker-test/test-runner.sh -i
|
||||
|
||||
# Run specific test case
|
||||
./scripts/docker-test/test-runner.sh gemini-only
|
||||
|
||||
# Shell into test environment
|
||||
./scripts/docker-test/test-runner.sh -s gemini-only
|
||||
|
||||
# Build image only (for development)
|
||||
./scripts/docker-test/test-runner.sh -b
|
||||
|
||||
# Show help
|
||||
./scripts/docker-test/test-runner.sh -h
|
||||
```
|
||||
|
||||
## Test Cases
|
||||
|
||||
1. **no-config**: No APIs configured
|
||||
2. **gemini-only**: Only Gemini configured (reproduces original issue #1195)
|
||||
3. **openai-only**: Only OpenAI configured
|
||||
4. **ollama-only**: Only Ollama configured
|
||||
5. **bedrock-only**: Only Bedrock configured
|
||||
6. **mixed**: Multiple APIs configured (Gemini + OpenAI + Ollama)
|
||||
|
||||
## Environment Files
|
||||
|
||||
Each test case has a corresponding environment file in `scripts/docker-test/env/`:
|
||||
|
||||
- `env.no-config` - Empty configuration
|
||||
- `env.gemini-only` - Only Gemini API key
|
||||
- `env.openai-only` - Only OpenAI API key
|
||||
- `env.ollama-only` - Only Ollama URL
|
||||
- `env.bedrock-only` - Only Bedrock configuration
|
||||
- `env.mixed` - Multiple API configurations
|
||||
|
||||
These files are volume-mounted into the Docker container and persist changes made with `fabric -S`.
|
||||
|
||||
## Interactive Mode & Shell Access
|
||||
|
||||
The interactive mode (`-i`) provides several options:
|
||||
|
||||
```text
|
||||
Available test cases:
|
||||
|
||||
1) No APIs configured (no-config)
|
||||
2) Only Gemini configured (gemini-only)
|
||||
3) Only OpenAI configured (openai-only)
|
||||
4) Only Ollama configured (ollama-only)
|
||||
5) Only Bedrock configured (bedrock-only)
|
||||
6) Mixed configuration (mixed)
|
||||
7) Run all tests
|
||||
0) Exit
|
||||
|
||||
Add '!' after number to shell into test environment (e.g., '1!' to shell into no-config)
|
||||
```
|
||||
|
||||
### Shell Mode
|
||||
|
||||
- Use `1!`, `2!`, etc. to shell into any test environment
|
||||
- Run `fabric -S` to configure APIs interactively
|
||||
- Run `fabric --listmodels` or `fabric -L` to test model listing
|
||||
- Changes persist in the environment files
|
||||
- Type `exit` to return to test runner
|
||||
|
||||
## Expected Results
|
||||
|
||||
**Before Fix:**
|
||||
|
||||
- `no-config` and `gemini-only` tests show Ollama connection errors
|
||||
- Tests show Bedrock authentication errors when BEDROCK_AWS_REGION not set
|
||||
- Error: `Ollama Get "http://localhost:11434/api/tags": dial tcp...`
|
||||
- Error: `Bedrock failed to list foundation models...`
|
||||
|
||||
**After Fix:**
|
||||
|
||||
- Clean output with no error messages for unconfigured services
|
||||
- Only configured services appear in model listings
|
||||
- Ollama only initialized when `OLLAMA_API_URL` is set
|
||||
- Bedrock only initialized when `BEDROCK_AWS_REGION` is set
|
||||
|
||||
## Implementation Details
|
||||
|
||||
- **Volume-mounted configs**: Environment files are mounted to `/home/testuser/.config/fabric/.env`
|
||||
- **Persistent state**: Configuration changes survive between test runs
|
||||
- **Single Docker image**: Built once from `scripts/docker-test/base/Dockerfile`, reused for all tests
|
||||
- **Isolated environments**: Each test uses its own environment file
|
||||
- **Cross-platform**: Works on macOS, Linux, and Windows with Docker
|
||||
|
||||
## Development Workflow
|
||||
|
||||
1. Make code changes to fix API initialization logic
|
||||
2. Run `./scripts/docker-test/test-runner.sh no-config` to test the main issue
|
||||
3. Use `./scripts/docker-test/test-runner.sh -i` for interactive testing
|
||||
4. Shell into environments (`1!`, `2!`, etc.) to debug specific configurations
|
||||
5. Run all tests before submitting PR: `./scripts/docker-test/test-runner.sh`
|
||||
|
||||
## Architecture
|
||||
|
||||
The fix involves:
|
||||
|
||||
1. **Ollama**: Override `IsConfigured()` method to check for `OLLAMA_API_URL` env var
|
||||
2. **Bedrock**: Modify `hasAWSCredentials()` to require `BEDROCK_AWS_REGION`
|
||||
3. **Plugin Registry**: Only initialize providers when properly configured
|
||||
|
||||
This prevents unnecessary API calls and eliminates confusing error messages for users.
|
||||
30
scripts/docker-test/base/Dockerfile
Normal file
30
scripts/docker-test/base/Dockerfile
Normal file
@@ -0,0 +1,30 @@
|
||||
FROM golang:1.24-alpine AS builder
|
||||
|
||||
WORKDIR /app
|
||||
COPY go.mod go.sum ./
|
||||
RUN go mod download
|
||||
|
||||
COPY ./cmd/fabric ./cmd/fabric
|
||||
COPY ./internal ./internal
|
||||
RUN go build -o fabric ./cmd/fabric
|
||||
|
||||
FROM alpine:latest
|
||||
RUN apk --no-cache add ca-certificates
|
||||
|
||||
# Create a test user
|
||||
RUN adduser -D -s /bin/sh testuser
|
||||
|
||||
# Switch to test user
|
||||
USER testuser
|
||||
WORKDIR /home/testuser
|
||||
|
||||
# Set environment variables for the test user
|
||||
ENV HOME=/home/testuser
|
||||
ENV USER=testuser
|
||||
|
||||
COPY --from=builder /app/fabric .
|
||||
|
||||
# Create fabric config directory and empty .env file
|
||||
RUN mkdir -p .config/fabric && touch .config/fabric/.env
|
||||
|
||||
ENTRYPOINT ["./fabric"]
|
||||
235
scripts/docker-test/test-runner.sh
Executable file
235
scripts/docker-test/test-runner.sh
Executable file
@@ -0,0 +1,235 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
# Get the directory where this script is located
|
||||
top_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
base_name="$(basename "$top_dir")"
|
||||
cd "$top_dir"/../.. || exit 1
|
||||
|
||||
# Check if bash version supports associative arrays
|
||||
if [[ ${BASH_VERSION%%.*} -lt 4 ]]; then
|
||||
echo "This script requires bash 4.0 or later for associative arrays."
|
||||
echo "Current version: $BASH_VERSION"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
IMAGE_NAME="fabric-test-setup"
|
||||
ENV_DIR="scripts/${base_name}/env"
|
||||
|
||||
# Test case descriptions
|
||||
declare -A test_descriptions=(
|
||||
["no-config"]="No APIs configured"
|
||||
["gemini-only"]="Only Gemini configured (reproduces original issue)"
|
||||
["openai-only"]="Only OpenAI configured"
|
||||
["ollama-only"]="Only Ollama configured"
|
||||
["bedrock-only"]="Only Bedrock configured"
|
||||
["mixed"]="Mixed configuration (Gemini + OpenAI + Ollama)"
|
||||
)
|
||||
|
||||
# Test case order for consistent display
|
||||
test_order=("no-config" "gemini-only" "openai-only" "ollama-only" "bedrock-only" "mixed")
|
||||
|
||||
build_image() {
|
||||
echo "=== Building Docker image ==="
|
||||
docker build -f "${top_dir}/base/Dockerfile" -t "$IMAGE_NAME" .
|
||||
echo
|
||||
}
|
||||
|
||||
check_env_file() {
|
||||
local test_name="$1"
|
||||
local env_file="$ENV_DIR/env.$test_name"
|
||||
|
||||
if [[ ! -f "$env_file" ]]; then
|
||||
echo "Error: Environment file not found: $env_file"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
run_test() {
|
||||
local test_name="$1"
|
||||
local description="${test_descriptions[$test_name]}"
|
||||
local env_file="$ENV_DIR/env.$test_name"
|
||||
|
||||
check_env_file "$test_name"
|
||||
|
||||
echo "===================="
|
||||
echo "Test: $description"
|
||||
echo "Config: $test_name"
|
||||
echo "Env file: $env_file"
|
||||
echo "===================="
|
||||
|
||||
echo "Running test..."
|
||||
if docker run --rm \
|
||||
-e HOME=/home/testuser \
|
||||
-e USER=testuser \
|
||||
-v "$(pwd)/$env_file:/home/testuser/.config/fabric/.env:ro" \
|
||||
"$IMAGE_NAME" --listmodels 2>&1; then
|
||||
echo "✅ Test completed"
|
||||
else
|
||||
echo "❌ Test failed"
|
||||
fi
|
||||
echo
|
||||
}
|
||||
|
||||
shell_into_env() {
|
||||
local test_name="$1"
|
||||
local description="${test_descriptions[$test_name]}"
|
||||
local env_file="$ENV_DIR/env.$test_name"
|
||||
|
||||
check_env_file "$test_name"
|
||||
|
||||
echo "===================="
|
||||
echo "Shelling into: $description"
|
||||
echo "Config: $test_name"
|
||||
echo "Env file: $env_file"
|
||||
echo "===================="
|
||||
echo "You can now run 'fabric -S' to configure, or 'fabric --listmodels' or 'fabric -L' to test."
|
||||
echo "Changes to .env will persist in $env_file"
|
||||
echo "Type 'exit' to return to the test runner."
|
||||
echo
|
||||
|
||||
docker run -it --rm \
|
||||
-e HOME=/home/testuser \
|
||||
-e USER=testuser \
|
||||
-v "$(pwd)/$env_file:/home/testuser/.config/fabric/.env" \
|
||||
--entrypoint=/bin/sh \
|
||||
"$IMAGE_NAME"
|
||||
}
|
||||
|
||||
interactive_mode() {
|
||||
echo "=== Interactive Mode ==="
|
||||
echo "Available test cases:"
|
||||
echo
|
||||
local i=1
|
||||
local cases=()
|
||||
for test_name in "${test_order[@]}"; do
|
||||
echo "$i) ${test_descriptions[$test_name]} ($test_name)"
|
||||
cases[i]="$test_name"
|
||||
((i++))
|
||||
done
|
||||
echo "$i) Run all tests"
|
||||
echo "0) Exit"
|
||||
echo
|
||||
echo "Add '!' after number to shell into test environment (e.g., '1!' to shell into no-config)"
|
||||
echo
|
||||
|
||||
while true; do
|
||||
read -r -p "Select test case (0-$i) [or 1!, etc. to shell into test environment]: " choice
|
||||
|
||||
# Check for shell mode (! suffix)
|
||||
local shell_mode=false
|
||||
if [[ "$choice" == *"!" ]]; then
|
||||
shell_mode=true
|
||||
choice="${choice%!}" # Remove the ! suffix
|
||||
fi
|
||||
|
||||
if [[ "$choice" == "0" ]]; then
|
||||
if [[ "$shell_mode" == true ]]; then
|
||||
echo "Cannot shell into exit option."
|
||||
continue
|
||||
fi
|
||||
echo "Exiting..."
|
||||
exit 0
|
||||
elif [[ "$choice" == "$i" ]]; then
|
||||
if [[ "$shell_mode" == true ]]; then
|
||||
echo "Cannot shell into 'run all tests' option."
|
||||
continue
|
||||
fi
|
||||
echo "Running all tests..."
|
||||
run_all_tests
|
||||
break
|
||||
elif [[ "$choice" -ge 1 && "$choice" -lt "$i" ]]; then
|
||||
local selected_test="${cases[$choice]}"
|
||||
if [[ "$shell_mode" == true ]]; then
|
||||
echo "Shelling into: ${test_descriptions[$selected_test]}"
|
||||
shell_into_env "$selected_test"
|
||||
else
|
||||
echo "Running: ${test_descriptions[$selected_test]}"
|
||||
run_test "$selected_test"
|
||||
fi
|
||||
|
||||
read -r -p "Continue testing? (y/n): " again
|
||||
if [[ "$again" != "y" && "$again" != "Y" ]]; then
|
||||
break
|
||||
fi
|
||||
echo
|
||||
else
|
||||
echo "Invalid choice. Please select 0-$i (optionally with '!' for shell mode)."
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
run_all_tests() {
|
||||
echo "=== Testing PR #1645: Conditional API initialization ==="
|
||||
echo
|
||||
|
||||
for test_name in "${test_order[@]}"; do
|
||||
run_test "$test_name"
|
||||
done
|
||||
|
||||
echo "=== Test run complete ==="
|
||||
echo "Review the output above to check:"
|
||||
echo "1. No Ollama connection errors when OLLAMA_URL not set"
|
||||
echo "2. No Bedrock authentication errors when BEDROCK_AWS_REGION not set"
|
||||
echo "3. Only configured services appear in model listings"
|
||||
}
|
||||
|
||||
show_help() {
|
||||
echo "Usage: $0 [OPTIONS] [TEST_CASE]"
|
||||
echo
|
||||
echo "Test PR #1645 conditional API initialization"
|
||||
echo
|
||||
echo "Options:"
|
||||
echo " -h, --help Show this help message"
|
||||
echo " -i, --interactive Run in interactive mode"
|
||||
echo " -b, --build-only Build image only, don't run tests"
|
||||
echo " -s, --shell TEST Shell into test environment"
|
||||
echo
|
||||
echo "Test cases:"
|
||||
for test_name in "${test_order[@]}"; do
|
||||
echo " $test_name: ${test_descriptions[$test_name]}"
|
||||
done
|
||||
echo
|
||||
echo "Examples:"
|
||||
echo " $0 # Run all tests"
|
||||
echo " $0 -i # Interactive mode"
|
||||
echo " $0 gemini-only # Run specific test"
|
||||
echo " $0 -s gemini-only # Shell into gemini-only environment"
|
||||
echo " $0 -b # Build image only"
|
||||
echo
|
||||
echo "Environment files are located in $ENV_DIR/ and can be edited directly."
|
||||
}
|
||||
|
||||
# Parse command line arguments
|
||||
if [[ $# -eq 0 ]]; then
|
||||
build_image
|
||||
run_all_tests
|
||||
elif [[ "$1" == "-h" || "$1" == "--help" ]]; then
|
||||
show_help
|
||||
elif [[ "$1" == "-i" || "$1" == "--interactive" ]]; then
|
||||
build_image
|
||||
interactive_mode
|
||||
elif [[ "$1" == "-b" || "$1" == "--build-only" ]]; then
|
||||
build_image
|
||||
elif [[ "$1" == "-s" || "$1" == "--shell" ]]; then
|
||||
if [[ -z "$2" ]]; then
|
||||
echo "Error: -s/--shell requires a test case name"
|
||||
echo "Use -h for help."
|
||||
exit 1
|
||||
fi
|
||||
if [[ -z "${test_descriptions[$2]}" ]]; then
|
||||
echo "Error: Unknown test case: $2"
|
||||
echo "Use -h for help."
|
||||
exit 1
|
||||
fi
|
||||
build_image
|
||||
shell_into_env "$2"
|
||||
elif [[ -n "${test_descriptions[$1]}" ]]; then
|
||||
build_image
|
||||
run_test "$1"
|
||||
else
|
||||
echo "Unknown test case or option: $1"
|
||||
echo "Use -h for help."
|
||||
exit 1
|
||||
fi
|
||||
@@ -1861,6 +1861,16 @@
|
||||
"CR THINKING",
|
||||
"SELF"
|
||||
]
|
||||
},
|
||||
{
|
||||
"patternName": "generate_code_rules",
|
||||
"description": "Extracts a list of best practices rules for AI coding assisted tools.",
|
||||
"tags": [
|
||||
"ANALYSIS",
|
||||
"EXTRACT",
|
||||
"DEVELOPMENT",
|
||||
"AI"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -903,6 +903,10 @@
|
||||
{
|
||||
"patternName": "t_check_dunning_kruger",
|
||||
"pattern_extract": "# IDENTITY You are an expert at understanding deep context about a person or entity, and then creating wisdom from that context combined with the instruction or question given in the input. # STEPS 1. Read the incoming TELOS File thoroughly. Fully understand everything about this person or entity. 2. Deeply study the input instruction or question. 3. Spend significant time and effort thinking about how these two are related, and what would be the best possible output for the person who sent the input. 4. Evaluate the input against the Dunning-Kruger effect and input's prior beliefs. Explore cognitive bias, subjective ability and objective ability for: low-ability areas where the input owner overestimate their knowledge or skill; and the opposite, high-ability areas where the input owner underestimate their knowledge or skill. # EXAMPLE In education, students who overestimate their understanding of a topic may not seek help or put in the necessary effort, while high-achieving students might doubt their abilities. In healthcare, overconfident practitioners might make critical errors, and underconfident practitioners might delay crucial decisions. In politics, politicians with limited expertise might propose simplistic solutions and ignore expert advice. END OF EXAMPLE # OUTPUT - In a section called OVERESTIMATION OF COMPETENCE, output a set of 10, 16-word bullets, that capture the principal misinterpretation of lack of knowledge or skill which are leading the input owner to believe they are more knowledgeable or skilled than they actually are. - In a section called UNDERESTIMATION OF COMPETENCE, output a set of 10, 16-word bullets,that capture the principal misinterpreation of underestimation of their knowledge or skill which are preventing the input owner to see opportunities. - In a section called METACOGNITIVIVE SKILLS, output a set of 10-word bullets that expose areas where the input owner struggles to accuratelly assess their own performance and may not be aware of the gap between their actual ability and their perceived ability. - In a section called IMPACT ON DECISION MAKING, output a set of 10-word bullets exposing facts, biases, traces of behavior based on overinflated self-assessment, that can lead to poor decisions. - At the end summarize the findings and give the input owner a motivational and constructive perspective on how they can start to tackle principal 5 gaps in their perceived skills and knowledge competencies. Don't be over simplistic. # OUTPUT INSTRUCTIONS 1. Only output valid, basic Markdown. No special formatting or italics or bolding or anything. 2. Do not output any content other than the sections above. Nothing else."
|
||||
},
|
||||
{
|
||||
"patternName": "generate_code_rules",
|
||||
"pattern_extract": "# IDENTITY AND PURPOSE You are a senior developer and expert prompt engineer. Think ultra hard to distill the following transcription or tutorial in as little set of unique rules as possible intended for best practices guidance in AI assisted coding tools, each rule has to be in one sentence as a direct instruction, avoid explanations and cosmetic language. Output in Markdown, I prefer bullet dash (-). --- # TRANSCRIPT"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -1861,6 +1861,16 @@
|
||||
"CR THINKING",
|
||||
"SELF"
|
||||
]
|
||||
},
|
||||
{
|
||||
"patternName": "generate_code_rules",
|
||||
"description": "Extracts a list of best practices rules for AI coding assisted tools.",
|
||||
"tags": [
|
||||
"ANALYSIS",
|
||||
"EXTRACT",
|
||||
"DEVELOPMENT",
|
||||
"AI"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -44,7 +44,7 @@ export default defineConfig({
|
||||
'/api': {
|
||||
target: FABRIC_BASE_URL,
|
||||
changeOrigin: true,
|
||||
timeout: 30000,
|
||||
timeout: 900000,
|
||||
rewrite: (path) => path.replace(/^\/api/, ''),
|
||||
configure: (proxy, _options) => {
|
||||
proxy.on('error', (err, req, res) => {
|
||||
@@ -59,7 +59,7 @@ export default defineConfig({
|
||||
'^/(patterns|models|sessions)/names': {
|
||||
target: FABRIC_BASE_URL,
|
||||
changeOrigin: true,
|
||||
timeout: 30000,
|
||||
timeout: 900000,
|
||||
configure: (proxy, _options) => {
|
||||
proxy.on('error', (err, req, res) => {
|
||||
console.log('proxy error', err);
|
||||
|
||||
Reference in New Issue
Block a user