mirror of
https://github.com/danielmiessler/Fabric.git
synced 2026-01-09 22:38:10 -05:00
Compare commits
118 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d37a1acc9b | ||
|
|
7254571501 | ||
|
|
c300262804 | ||
|
|
e8ba57be90 | ||
|
|
15fad3da87 | ||
|
|
e2b0d3c368 | ||
|
|
3de85eb50e | ||
|
|
58e635c873 | ||
|
|
dde21d2337 | ||
|
|
e3fcbcb12b | ||
|
|
839296e3ba | ||
|
|
5b97b0e56a | ||
|
|
38ff2288da | ||
|
|
771a1ac2e6 | ||
|
|
f6fd6f535a | ||
|
|
f548ca5f82 | ||
|
|
616f51748e | ||
|
|
db5aaf9da6 | ||
|
|
a922032756 | ||
|
|
a415409a48 | ||
|
|
19d95b9014 | ||
|
|
73c7a8c147 | ||
|
|
4dc84bd64d | ||
|
|
dd96014f9b | ||
|
|
3cf2557af3 | ||
|
|
fcda0338cb | ||
|
|
ac19c81ef0 | ||
|
|
a83d57065f | ||
|
|
055ed32ab8 | ||
|
|
8d62165444 | ||
|
|
63bc7a7e79 | ||
|
|
f2b2501767 | ||
|
|
be1e2485ee | ||
|
|
38c4211649 | ||
|
|
71e6355c10 | ||
|
|
64411cdc02 | ||
|
|
9a2ff983a4 | ||
|
|
a522d4a411 | ||
|
|
9bdd77c277 | ||
|
|
cc68dddfe8 | ||
|
|
07ee7f8b21 | ||
|
|
15a355f08a | ||
|
|
c50486b611 | ||
|
|
edaca7a045 | ||
|
|
28432a50f0 | ||
|
|
8ab891fcff | ||
|
|
cab6df88ea | ||
|
|
42afd92f31 | ||
|
|
76d6b1721e | ||
|
|
7d562096d1 | ||
|
|
91c1aca0dd | ||
|
|
b8008a34fb | ||
|
|
482759ae72 | ||
|
|
b0d096d0ea | ||
|
|
e56ecfb7ae | ||
|
|
951bd134eb | ||
|
|
7ff04658f3 | ||
|
|
272f04dd32 | ||
|
|
29cb3796bf | ||
|
|
f51f9e75a9 | ||
|
|
63475784c7 | ||
|
|
1a7bb27370 | ||
|
|
4badaa4c85 | ||
|
|
bf6be964fd | ||
|
|
cdbcb0a512 | ||
|
|
f81cf193a2 | ||
|
|
cba56fcde6 | ||
|
|
72cbd13917 | ||
|
|
dc722f9724 | ||
|
|
1a35f32a48 | ||
|
|
65bd2753c2 | ||
|
|
570c9a9404 | ||
|
|
15151fe9ee | ||
|
|
2aad4caf9b | ||
|
|
289fda8c74 | ||
|
|
fd40778472 | ||
|
|
bc1641a68c | ||
|
|
5cf15d22d3 | ||
|
|
2b2a25daaa | ||
|
|
75a7f25642 | ||
|
|
8bab58f225 | ||
|
|
8ec006e02c | ||
|
|
2508dc6397 | ||
|
|
7670df35ad | ||
|
|
3b9782f942 | ||
|
|
3fca3489fb | ||
|
|
bb2d58eae0 | ||
|
|
87df7dc383 | ||
|
|
1d69afa1c9 | ||
|
|
96c18b4c99 | ||
|
|
dd5173963b | ||
|
|
da1c8ec979 | ||
|
|
ac97f9984f | ||
|
|
181b812eaf | ||
|
|
fe94165d31 | ||
|
|
16e92690aa | ||
|
|
1c33799aa8 | ||
|
|
9559e618c3 | ||
|
|
ac32e8e64a | ||
|
|
82340e6126 | ||
|
|
5dec53726a | ||
|
|
b0eb136cbb | ||
|
|
63f4370ff1 | ||
|
|
b3cc2c737d | ||
|
|
e43b4191e4 | ||
|
|
744c565120 | ||
|
|
1473ac1465 | ||
|
|
c38c16f0db | ||
|
|
a4b1db4193 | ||
|
|
d44bc19a84 | ||
|
|
a2e618e11c | ||
|
|
cb90379b30 | ||
|
|
4868687746 | ||
|
|
85780fee76 | ||
|
|
497b1ed682 | ||
|
|
135433b749 | ||
|
|
f185dedb37 | ||
|
|
c74a157dcf |
@@ -7,6 +7,11 @@ on:
|
||||
paths-ignore:
|
||||
- "data/patterns/**"
|
||||
- "**/*.md"
|
||||
- "data/strategies/**"
|
||||
- "cmd/generate_changelog/*.db"
|
||||
- "cmd/generate_changelog/incoming/*.txt"
|
||||
- "scripts/pattern_descriptions/*.json"
|
||||
- "web/static/data/pattern_descriptions.json"
|
||||
|
||||
permissions:
|
||||
contents: write # Ensure the workflow has write permissions
|
||||
@@ -79,14 +84,23 @@ jobs:
|
||||
run: |
|
||||
nix run .#gomod2nix -- --outdir nix/pkgs/fabric
|
||||
|
||||
- name: Generate Changelog Entry
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
go run ./cmd/generate_changelog --process-prs ${{ env.new_tag }}
|
||||
- name: Commit changes
|
||||
run: |
|
||||
# These files are modified by the version bump process
|
||||
git add cmd/fabric/version.go
|
||||
git add nix/pkgs/fabric/version.nix
|
||||
git add nix/pkgs/fabric/gomod2nix.toml
|
||||
git add .
|
||||
|
||||
# The changelog tool is responsible for staging CHANGELOG.md, changelog.db,
|
||||
# and removing the incoming/ directory.
|
||||
|
||||
if ! git diff --staged --quiet; then
|
||||
git commit -m "Update version to ${{ env.new_tag }} and commit $commit_hash"
|
||||
git commit -m "chore(release): Update version to ${{ env.new_tag }}"
|
||||
else
|
||||
echo "No changes to commit."
|
||||
fi
|
||||
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -350,3 +350,6 @@ web/static/*.png
|
||||
# Local tmp directory
|
||||
.tmp/
|
||||
tmp/
|
||||
|
||||
# Ignore .claude/
|
||||
.claude/
|
||||
|
||||
11
.vscode/settings.json
vendored
11
.vscode/settings.json
vendored
@@ -23,6 +23,7 @@
|
||||
"Eisler",
|
||||
"elif",
|
||||
"envrc",
|
||||
"Errorf",
|
||||
"eugeis",
|
||||
"Eugen",
|
||||
"excalidraw",
|
||||
@@ -53,6 +54,7 @@
|
||||
"hasura",
|
||||
"hormozi",
|
||||
"Hormozi's",
|
||||
"horts",
|
||||
"HTMLURL",
|
||||
"jaredmontoya",
|
||||
"jessevdk",
|
||||
@@ -74,10 +76,13 @@
|
||||
"markmap",
|
||||
"matplotlib",
|
||||
"mattn",
|
||||
"mbed",
|
||||
"Miessler",
|
||||
"nometa",
|
||||
"numpy",
|
||||
"ollama",
|
||||
"ollamaapi",
|
||||
"openaiapi",
|
||||
"opencode",
|
||||
"openrouter",
|
||||
"otiai",
|
||||
@@ -104,6 +109,7 @@
|
||||
"stretchr",
|
||||
"talkpanel",
|
||||
"Telos",
|
||||
"testpattern",
|
||||
"Thacker",
|
||||
"tidwall",
|
||||
"topp",
|
||||
@@ -113,11 +119,14 @@
|
||||
"updatepatterns",
|
||||
"videoid",
|
||||
"webp",
|
||||
"WEBVTT",
|
||||
"wipecontext",
|
||||
"wipesession",
|
||||
"Worktree",
|
||||
"writeups",
|
||||
"xclip",
|
||||
"yourpatternname"
|
||||
"yourpatternname",
|
||||
"youtu"
|
||||
],
|
||||
"cSpell.ignorePaths": ["go.mod", ".gitignore", "CHANGELOG.md"],
|
||||
"markdownlint.config": {
|
||||
|
||||
1463
CHANGELOG.md
1463
CHANGELOG.md
File diff suppressed because it is too large
Load Diff
32
README.md
32
README.md
@@ -113,30 +113,9 @@ Keep in mind that many of these were recorded when Fabric was Python-based, so r
|
||||
|
||||
## Updates
|
||||
|
||||
> [!NOTE]
|
||||
>
|
||||
> July 4, 2025
|
||||
>
|
||||
> - **Web Search**: Fabric now supports web search for Anthropic and OpenAI models using the `--search` and `--search-location` flags. This replaces the previous plugin-based search, so you may want to remove the old `ANTHROPIC_WEB_SEARCH_TOOL_*` variables from your `~/.config/fabric/.env` file.
|
||||
> - **Image Generation**: Fabric now has powerful image generation capabilities with OpenAI.
|
||||
> - Generate images from text prompts and save them using `--image-file`.
|
||||
> - Edit existing images by providing an input image with `--attachment`.
|
||||
> - Control image `size`, `quality`, `compression`, and `background` with the new `--image-*` flags.
|
||||
>
|
||||
>June 17, 2025
|
||||
>
|
||||
>- Fabric now supports Perplexity AI. Configure it by using `fabric -S` to add your Perplexity AI API Key,
|
||||
> and then try:
|
||||
>
|
||||
> ```bash
|
||||
> fabric -m sonar-pro "What is the latest world news?"
|
||||
> ```
|
||||
>
|
||||
>June 11, 2025
|
||||
>
|
||||
>- Fabric's YouTube transcription now needs `yt-dlp` to be installed. Make sure to install the latest
|
||||
> version (2025.06.09 as of this note). The YouTube API key is only needed for comments (the `--comments` flag)
|
||||
> and metadata extraction (the `--metadata` flag).
|
||||
Fabric is evolving rapidly.
|
||||
|
||||
Stay current with the latest features by reviewing the [CHANGELOG](./CHANGELOG.md) for all recent changes.
|
||||
|
||||
## Philosophy
|
||||
|
||||
@@ -565,10 +544,13 @@ Application Options:
|
||||
--image-compression= Compression level 0-100 for JPEG/WebP formats (default: not set)
|
||||
--image-background= Background type: opaque, transparent (default: opaque, only for
|
||||
PNG/WebP)
|
||||
--suppress-think Suppress text enclosed in thinking tags
|
||||
--think-start-tag= Start tag for thinking sections (default: <think>)
|
||||
--think-end-tag= End tag for thinking sections (default: </think>)
|
||||
--disable-responses-api Disable OpenAI Responses API (default: false)
|
||||
|
||||
Help Options:
|
||||
-h, --help Show this help message
|
||||
|
||||
```
|
||||
|
||||
## Our approach to prompting
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
package main
|
||||
|
||||
var version = "v1.4.249"
|
||||
var version = "v1.4.264"
|
||||
|
||||
Binary file not shown.
30
cmd/generate_changelog/internal/cache/cache.go
vendored
30
cmd/generate_changelog/internal/cache/cache.go
vendored
@@ -4,6 +4,7 @@ import (
|
||||
"database/sql"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
"time"
|
||||
|
||||
"github.com/danielmiessler/fabric/cmd/generate_changelog/internal/git"
|
||||
@@ -201,7 +202,14 @@ func (c *Cache) GetVersions() (map[string]*git.Version, error) {
|
||||
}
|
||||
|
||||
if dateStr.Valid {
|
||||
v.Date, _ = time.Parse(time.RFC3339, dateStr.String)
|
||||
// Try RFC3339Nano first (for nanosecond precision), then fall back to RFC3339
|
||||
v.Date, err = time.Parse(time.RFC3339Nano, dateStr.String)
|
||||
if err != nil {
|
||||
v.Date, err = time.Parse(time.RFC3339, dateStr.String)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Error parsing date '%s' for version '%s': %v. Expected format: RFC3339 or RFC3339Nano.\n", dateStr.String, v.Name, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if prNumbersJSON != "" {
|
||||
@@ -260,6 +268,26 @@ func (c *Cache) Clear() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// VersionExists checks if a version already exists in the cache
|
||||
func (c *Cache) VersionExists(version string) (bool, error) {
|
||||
var count int
|
||||
err := c.db.QueryRow("SELECT COUNT(*) FROM versions WHERE name = ?", version).Scan(&count)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
return count > 0, nil
|
||||
}
|
||||
|
||||
// CommitExists checks if a commit already exists in the cache
|
||||
func (c *Cache) CommitExists(hash string) (bool, error) {
|
||||
var count int
|
||||
err := c.db.QueryRow("SELECT COUNT(*) FROM commits WHERE sha = ?", hash).Scan(&count)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
return count > 0, nil
|
||||
}
|
||||
|
||||
// GetLastPRSync returns the timestamp of the last PR sync
|
||||
func (c *Cache) GetLastPRSync() (time.Time, error) {
|
||||
var timestamp string
|
||||
|
||||
@@ -65,7 +65,7 @@ func (g *Generator) Generate() (string, error) {
|
||||
return "", fmt.Errorf("failed to collect data: %w", err)
|
||||
}
|
||||
|
||||
if err := g.fetchPRs(); err != nil {
|
||||
if err := g.fetchPRs(g.cfg.ForcePRSync); err != nil {
|
||||
return "", fmt.Errorf("failed to fetch PRs: %w", err)
|
||||
}
|
||||
|
||||
@@ -193,7 +193,7 @@ func (g *Generator) collectData() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (g *Generator) fetchPRs() error {
|
||||
func (g *Generator) fetchPRs(forcePRSync bool) error {
|
||||
// First, load all cached PRs
|
||||
if g.cache != nil {
|
||||
cachedPRs, err := g.cache.GetAllPRs()
|
||||
@@ -229,7 +229,7 @@ func (g *Generator) fetchPRs() error {
|
||||
}
|
||||
// If we have never synced or it's been more than 24 hours, do a full sync
|
||||
// Also sync if we have versions with PR numbers that aren't cached
|
||||
needsSync := lastSync.IsZero() || time.Since(lastSync) > 24*time.Hour || g.cfg.ForcePRSync || missingPRs
|
||||
needsSync := lastSync.IsZero() || time.Since(lastSync) > 24*time.Hour || forcePRSync || missingPRs
|
||||
|
||||
if !needsSync {
|
||||
fmt.Fprintf(os.Stderr, "Using cached PR data (last sync: %s)\n", lastSync.Format("2006-01-02 15:04:05"))
|
||||
@@ -697,3 +697,109 @@ func hashContent(content string) string {
|
||||
hash := sha256.Sum256([]byte(content))
|
||||
return fmt.Sprintf("%x", hash)
|
||||
}
|
||||
|
||||
// SyncDatabase performs a comprehensive database synchronization and validation
|
||||
func (g *Generator) SyncDatabase() error {
|
||||
if g.cache == nil {
|
||||
return fmt.Errorf("cache is disabled, cannot sync database")
|
||||
}
|
||||
|
||||
fmt.Fprintf(os.Stderr, "[SYNC] Starting database synchronization...\n")
|
||||
|
||||
// Step 1: Force PR sync (pass true explicitly)
|
||||
fmt.Fprintf(os.Stderr, "[PR_SYNC] Forcing PR sync from GitHub...\n")
|
||||
if err := g.fetchPRs(true); err != nil {
|
||||
return fmt.Errorf("failed to sync PRs: %w", err)
|
||||
}
|
||||
|
||||
// Step 2: Rebuild git history and verify versions/commits completeness
|
||||
fmt.Fprintf(os.Stderr, "[VERIFY] Verifying git history and version completeness...\n")
|
||||
if err := g.syncGitHistory(); err != nil {
|
||||
return fmt.Errorf("failed to sync git history: %w", err)
|
||||
}
|
||||
|
||||
// Step 3: Verify commit-PR mappings
|
||||
fmt.Fprintf(os.Stderr, "[MAPPING] Verifying commit-PR mappings...\n")
|
||||
if err := g.verifyCommitPRMappings(); err != nil {
|
||||
return fmt.Errorf("failed to verify commit-PR mappings: %w", err)
|
||||
}
|
||||
|
||||
fmt.Fprintf(os.Stderr, "[SUCCESS] Database synchronization completed successfully!\n")
|
||||
return nil
|
||||
}
|
||||
|
||||
// syncGitHistory walks the complete git history and ensures all versions and commits are cached
|
||||
func (g *Generator) syncGitHistory() error {
|
||||
// Walk complete git history (reuse existing logic)
|
||||
versions, err := g.gitWalker.WalkHistory()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to walk git history: %w", err)
|
||||
}
|
||||
|
||||
// Save only new versions and commits (preserve existing data)
|
||||
var newVersions, newCommits int
|
||||
for _, version := range versions {
|
||||
// Only save version if it doesn't exist
|
||||
exists, err := g.cache.VersionExists(version.Name)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Failed to check existence of version %s: %v. This may affect the completeness of the sync operation.\n", version.Name, err)
|
||||
continue
|
||||
}
|
||||
if !exists {
|
||||
if err := g.cache.SaveVersion(version); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Failed to save version %s: %v\n", version.Name, err)
|
||||
} else {
|
||||
newVersions++
|
||||
}
|
||||
}
|
||||
|
||||
// Only save commits that don't exist
|
||||
for _, commit := range version.Commits {
|
||||
exists, err := g.cache.CommitExists(commit.SHA)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Failed to check commit %s existence: %v\n", commit.SHA, err)
|
||||
continue
|
||||
}
|
||||
if !exists {
|
||||
if err := g.cache.SaveCommit(commit, version.Name); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Failed to save commit %s: %v\n", commit.SHA, err)
|
||||
} else {
|
||||
newCommits++
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Update last processed tag
|
||||
if latestTag, err := g.gitWalker.GetLatestTag(); err == nil && latestTag != "" {
|
||||
if err := g.cache.SetLastProcessedTag(latestTag); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Failed to update last processed tag: %v\n", err)
|
||||
}
|
||||
}
|
||||
|
||||
fmt.Fprintf(os.Stderr, " Added %d new versions and %d new commits (preserved existing data)\n", newVersions, newCommits)
|
||||
return nil
|
||||
}
|
||||
|
||||
// verifyCommitPRMappings ensures all PR commits have proper mappings
|
||||
func (g *Generator) verifyCommitPRMappings() error {
|
||||
// Get all cached PRs
|
||||
allPRs, err := g.cache.GetAllPRs()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get cached PRs: %w", err)
|
||||
}
|
||||
|
||||
// Convert to slice for batch operations (reuse existing logic)
|
||||
var prSlice []*github.PR
|
||||
for _, pr := range allPRs {
|
||||
prSlice = append(prSlice, pr)
|
||||
}
|
||||
|
||||
// Save commit-PR mappings (reuse existing logic)
|
||||
if err := g.cache.SaveCommitPRMappings(prSlice); err != nil {
|
||||
return fmt.Errorf("failed to save commit-PR mappings: %w", err)
|
||||
}
|
||||
|
||||
fmt.Fprintf(os.Stderr, " Verified mappings for %d PRs\n", len(prSlice))
|
||||
return nil
|
||||
}
|
||||
|
||||
115
cmd/generate_changelog/internal/changelog/generator_test.go
Normal file
115
cmd/generate_changelog/internal/changelog/generator_test.go
Normal file
@@ -0,0 +1,115 @@
|
||||
package changelog
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"testing"
|
||||
|
||||
"github.com/danielmiessler/fabric/cmd/generate_changelog/internal/config"
|
||||
)
|
||||
|
||||
func TestDetectVersionFromNix(t *testing.T) {
|
||||
tempDir := t.TempDir()
|
||||
|
||||
t.Run("version.nix exists", func(t *testing.T) {
|
||||
versionNixContent := `"1.2.3"`
|
||||
versionNixPath := filepath.Join(tempDir, "version.nix")
|
||||
err := os.WriteFile(versionNixPath, []byte(versionNixContent), 0644)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to write version.nix: %v", err)
|
||||
}
|
||||
|
||||
data, err := os.ReadFile(versionNixPath)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to read version.nix: %v", err)
|
||||
}
|
||||
|
||||
versionRegex := regexp.MustCompile(`"([^"]+)"`)
|
||||
matches := versionRegex.FindStringSubmatch(string(data))
|
||||
|
||||
if len(matches) <= 1 {
|
||||
t.Fatalf("No version found in version.nix")
|
||||
}
|
||||
|
||||
version := matches[1]
|
||||
if version != "1.2.3" {
|
||||
t.Errorf("Expected version 1.2.3, got %s", version)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func TestEnsureIncomingDir(t *testing.T) {
|
||||
tempDir := t.TempDir()
|
||||
incomingDir := filepath.Join(tempDir, "incoming")
|
||||
|
||||
cfg := &config.Config{
|
||||
IncomingDir: incomingDir,
|
||||
}
|
||||
|
||||
g := &Generator{cfg: cfg}
|
||||
|
||||
err := g.ensureIncomingDir()
|
||||
if err != nil {
|
||||
t.Fatalf("ensureIncomingDir failed: %v", err)
|
||||
}
|
||||
|
||||
if _, err := os.Stat(incomingDir); os.IsNotExist(err) {
|
||||
t.Errorf("Incoming directory was not created")
|
||||
}
|
||||
}
|
||||
|
||||
func TestInsertVersionAtTop(t *testing.T) {
|
||||
tempDir := t.TempDir()
|
||||
changelogPath := filepath.Join(tempDir, "CHANGELOG.md")
|
||||
|
||||
cfg := &config.Config{
|
||||
RepoPath: tempDir,
|
||||
}
|
||||
|
||||
g := &Generator{cfg: cfg}
|
||||
|
||||
t.Run("new changelog", func(t *testing.T) {
|
||||
entry := "## v1.0.0 (2025-01-01)\n\n- Initial release"
|
||||
|
||||
err := g.insertVersionAtTop(entry)
|
||||
if err != nil {
|
||||
t.Fatalf("insertVersionAtTop failed: %v", err)
|
||||
}
|
||||
|
||||
content, err := os.ReadFile(changelogPath)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to read changelog: %v", err)
|
||||
}
|
||||
|
||||
expected := "# Changelog\n\n## v1.0.0 (2025-01-01)\n\n- Initial release\n"
|
||||
if string(content) != expected {
|
||||
t.Errorf("Expected:\n%s\nGot:\n%s", expected, string(content))
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("existing changelog", func(t *testing.T) {
|
||||
existingContent := "# Changelog\n\n## v0.9.0 (2024-12-01)\n\n- Previous release"
|
||||
err := os.WriteFile(changelogPath, []byte(existingContent), 0644)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to write existing changelog: %v", err)
|
||||
}
|
||||
|
||||
entry := "## v1.0.0 (2025-01-01)\n\n- New release"
|
||||
|
||||
err = g.insertVersionAtTop(entry)
|
||||
if err != nil {
|
||||
t.Fatalf("insertVersionAtTop failed: %v", err)
|
||||
}
|
||||
|
||||
content, err := os.ReadFile(changelogPath)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to read changelog: %v", err)
|
||||
}
|
||||
|
||||
expected := "# Changelog\n\n## v1.0.0 (2025-01-01)\n\n- New release\n## v0.9.0 (2024-12-01)\n\n- Previous release"
|
||||
if string(content) != expected {
|
||||
t.Errorf("Expected:\n%s\nGot:\n%s", expected, string(content))
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -0,0 +1,82 @@
|
||||
package changelog
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/danielmiessler/fabric/cmd/generate_changelog/internal/github"
|
||||
)
|
||||
|
||||
func TestIsMergeCommit(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
commit github.PRCommit
|
||||
expected bool
|
||||
}{
|
||||
{
|
||||
name: "Regular commit with single parent",
|
||||
commit: github.PRCommit{
|
||||
SHA: "abc123",
|
||||
Message: "Fix bug in user authentication",
|
||||
Author: "John Doe",
|
||||
Date: time.Now(),
|
||||
Parents: []string{"def456"},
|
||||
},
|
||||
expected: false,
|
||||
},
|
||||
{
|
||||
name: "Merge commit with multiple parents",
|
||||
commit: github.PRCommit{
|
||||
SHA: "abc123",
|
||||
Message: "Merge pull request #42 from feature/auth",
|
||||
Author: "GitHub",
|
||||
Date: time.Now(),
|
||||
Parents: []string{"def456", "ghi789"},
|
||||
},
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
name: "Merge commit detected by message pattern only",
|
||||
commit: github.PRCommit{
|
||||
SHA: "abc123",
|
||||
Message: "Merge pull request #123 from user/feature-branch",
|
||||
Author: "GitHub",
|
||||
Date: time.Now(),
|
||||
Parents: []string{}, // Empty parents - fallback to message detection
|
||||
},
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
name: "Merge branch commit pattern",
|
||||
commit: github.PRCommit{
|
||||
SHA: "abc123",
|
||||
Message: "Merge branch 'feature' into main",
|
||||
Author: "Developer",
|
||||
Date: time.Now(),
|
||||
Parents: []string{"def456"}, // Single parent but merge pattern
|
||||
},
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
name: "Regular commit with no merge patterns",
|
||||
commit: github.PRCommit{
|
||||
SHA: "abc123",
|
||||
Message: "Add new feature for user management",
|
||||
Author: "Jane Doe",
|
||||
Date: time.Now(),
|
||||
Parents: []string{"def456"},
|
||||
},
|
||||
expected: false,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result := isMergeCommit(tt.commit)
|
||||
if result != tt.expected {
|
||||
t.Errorf("isMergeCommit() = %v, expected %v for commit: %s",
|
||||
result, tt.expected, tt.commit.Message)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
521
cmd/generate_changelog/internal/changelog/processing.go
Normal file
521
cmd/generate_changelog/internal/changelog/processing.go
Normal file
@@ -0,0 +1,521 @@
|
||||
package changelog
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/danielmiessler/fabric/cmd/generate_changelog/internal/git"
|
||||
"github.com/danielmiessler/fabric/cmd/generate_changelog/internal/github"
|
||||
)
|
||||
|
||||
var (
|
||||
mergePatterns []*regexp.Regexp
|
||||
mergePatternsOnce sync.Once
|
||||
)
|
||||
|
||||
// getMergePatterns returns the compiled merge patterns, initializing them lazily
|
||||
func getMergePatterns() []*regexp.Regexp {
|
||||
mergePatternsOnce.Do(func() {
|
||||
mergePatterns = []*regexp.Regexp{
|
||||
regexp.MustCompile(`^Merge pull request #\d+`), // "Merge pull request #123 from..."
|
||||
regexp.MustCompile(`^Merge branch '.*' into .*`), // "Merge branch 'feature' into main"
|
||||
regexp.MustCompile(`^Merge remote-tracking branch`), // "Merge remote-tracking branch..."
|
||||
regexp.MustCompile(`^Merge '.*' into .*`), // "Merge 'feature' into main"
|
||||
}
|
||||
})
|
||||
return mergePatterns
|
||||
}
|
||||
|
||||
// isMergeCommit determines if a commit is a merge commit based on its parents and message patterns.
|
||||
func isMergeCommit(commit github.PRCommit) bool {
|
||||
// Primary method: Check parent count (merge commits have multiple parents)
|
||||
if len(commit.Parents) > 1 {
|
||||
return true
|
||||
}
|
||||
|
||||
// Fallback method: Check commit message patterns
|
||||
mergePatterns := getMergePatterns()
|
||||
for _, pattern := range mergePatterns {
|
||||
if pattern.MatchString(commit.Message) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
// calculateVersionDate determines the version date based on the most recent commit date from the provided PRs.
|
||||
//
|
||||
// If no valid commit dates are found, the function falls back to the current time.
|
||||
// The function iterates through the provided PRs and their associated commits, comparing commit dates
|
||||
// to identify the most recent one. If a valid date is found, it is returned; otherwise, the fallback is used.
|
||||
func calculateVersionDate(fetchedPRs []*github.PR) time.Time {
|
||||
versionDate := time.Now() // fallback to current time
|
||||
if len(fetchedPRs) > 0 {
|
||||
var mostRecentCommitDate time.Time
|
||||
for _, pr := range fetchedPRs {
|
||||
for _, commit := range pr.Commits {
|
||||
if commit.Date.After(mostRecentCommitDate) {
|
||||
mostRecentCommitDate = commit.Date
|
||||
}
|
||||
}
|
||||
}
|
||||
if !mostRecentCommitDate.IsZero() {
|
||||
versionDate = mostRecentCommitDate
|
||||
}
|
||||
}
|
||||
return versionDate
|
||||
}
|
||||
|
||||
// ProcessIncomingPR processes a single PR for changelog entry creation
|
||||
func (g *Generator) ProcessIncomingPR(prNumber int) error {
|
||||
if err := g.validatePRState(prNumber); err != nil {
|
||||
return fmt.Errorf("PR validation failed: %w", err)
|
||||
}
|
||||
|
||||
if err := g.validateGitStatus(); err != nil {
|
||||
return fmt.Errorf("git status validation failed: %w", err)
|
||||
}
|
||||
|
||||
// Now fetch the full PR with commits for content generation
|
||||
pr, err := g.ghClient.GetPRWithCommits(prNumber)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to fetch PR %d: %w", prNumber, err)
|
||||
}
|
||||
|
||||
content := g.formatPR(pr)
|
||||
|
||||
if g.cfg.EnableAISummary {
|
||||
aiContent, err := SummarizeVersionContent(content)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: AI summarization failed: %v\n", err)
|
||||
} else if !checkForAIError(aiContent) {
|
||||
content = strings.TrimSpace(aiContent)
|
||||
}
|
||||
}
|
||||
|
||||
if err := g.ensureIncomingDir(); err != nil {
|
||||
return fmt.Errorf("failed to create incoming directory: %w", err)
|
||||
}
|
||||
|
||||
filename := filepath.Join(g.cfg.IncomingDir, fmt.Sprintf("%d.txt", prNumber))
|
||||
|
||||
// Ensure content ends with a single newline
|
||||
content = strings.TrimSpace(content) + "\n"
|
||||
|
||||
if err := os.WriteFile(filename, []byte(content), 0644); err != nil {
|
||||
return fmt.Errorf("failed to write incoming file: %w", err)
|
||||
}
|
||||
|
||||
if err := g.commitAndPushIncoming(prNumber, filename); err != nil {
|
||||
return fmt.Errorf("failed to commit and push: %w", err)
|
||||
}
|
||||
|
||||
fmt.Printf("Successfully created incoming changelog entry: %s\n", filename)
|
||||
return nil
|
||||
}
|
||||
|
||||
// CreateNewChangelogEntry aggregates all incoming PR files for release and includes direct commits
|
||||
func (g *Generator) CreateNewChangelogEntry(version string) error {
|
||||
files, err := filepath.Glob(filepath.Join(g.cfg.IncomingDir, "*.txt"))
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to scan incoming directory: %w", err)
|
||||
}
|
||||
|
||||
var content strings.Builder
|
||||
var processingErrors []string
|
||||
|
||||
// First, aggregate all incoming PR files
|
||||
for _, file := range files {
|
||||
data, err := os.ReadFile(file)
|
||||
if err != nil {
|
||||
processingErrors = append(processingErrors, fmt.Sprintf("failed to read %s: %v", file, err))
|
||||
continue // Continue to attempt processing other files
|
||||
}
|
||||
content.WriteString(string(data))
|
||||
content.WriteString("\n")
|
||||
}
|
||||
|
||||
if len(processingErrors) > 0 {
|
||||
return fmt.Errorf("encountered errors while processing incoming files: %s", strings.Join(processingErrors, "; "))
|
||||
}
|
||||
|
||||
// Extract PR numbers and their commit SHAs from processed files to avoid including their commits as "direct"
|
||||
processedPRs := make(map[int]bool)
|
||||
processedCommitSHAs := make(map[string]bool)
|
||||
var fetchedPRs []*github.PR
|
||||
var prNumbers []int
|
||||
|
||||
for _, file := range files {
|
||||
// Extract PR number from filename (e.g., "1640.txt" -> 1640)
|
||||
filename := filepath.Base(file)
|
||||
if prNumStr := strings.TrimSuffix(filename, ".txt"); prNumStr != filename {
|
||||
if prNum, err := strconv.Atoi(prNumStr); err == nil {
|
||||
processedPRs[prNum] = true
|
||||
prNumbers = append(prNumbers, prNum)
|
||||
|
||||
// Fetch the PR to get its commit SHAs
|
||||
if pr, err := g.ghClient.GetPRWithCommits(prNum); err == nil {
|
||||
fetchedPRs = append(fetchedPRs, pr)
|
||||
for _, commit := range pr.Commits {
|
||||
processedCommitSHAs[commit.SHA] = true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Now add direct commits since the last release, excluding commits from processed PRs
|
||||
directCommitsContent, err := g.getDirectCommitsSinceLastRelease(processedPRs, processedCommitSHAs)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get direct commits since last release: %w", err)
|
||||
}
|
||||
content.WriteString(directCommitsContent)
|
||||
|
||||
// Check if we have any content at all
|
||||
if content.Len() == 0 {
|
||||
if len(files) == 0 {
|
||||
fmt.Fprintf(os.Stderr, "No incoming PR files found in %s and no direct commits since last release\n", g.cfg.IncomingDir)
|
||||
} else {
|
||||
fmt.Fprintf(os.Stderr, "No content found in incoming files and no direct commits since last release\n")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Calculate the version date for the changelog entry as the most recent commit date from processed PRs
|
||||
versionDate := calculateVersionDate(fetchedPRs)
|
||||
|
||||
entry := fmt.Sprintf("## %s (%s)\n\n%s",
|
||||
version, versionDate.Format("2006-01-02"), strings.TrimLeft(content.String(), "\n"))
|
||||
|
||||
if err := g.insertVersionAtTop(entry); err != nil {
|
||||
return fmt.Errorf("failed to update CHANGELOG.md: %w", err)
|
||||
}
|
||||
|
||||
if g.cache != nil {
|
||||
// Cache the fetched PRs using the same logic as normal changelog generation
|
||||
if len(fetchedPRs) > 0 {
|
||||
// Save PRs to cache
|
||||
if err := g.cache.SavePRBatch(fetchedPRs); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Failed to save PR batch to cache: %v\n", err)
|
||||
}
|
||||
|
||||
// Save SHA→PR mappings for lightning-fast git operations
|
||||
if err := g.cache.SaveCommitPRMappings(fetchedPRs); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Failed to cache commit mappings: %v\n", err)
|
||||
}
|
||||
|
||||
// Save individual commits to cache for each PR
|
||||
for _, pr := range fetchedPRs {
|
||||
for _, commit := range pr.Commits {
|
||||
// Use actual commit timestamp, with fallback to current time if invalid
|
||||
commitDate := commit.Date
|
||||
if commitDate.IsZero() {
|
||||
commitDate = time.Now()
|
||||
fmt.Fprintf(os.Stderr, "Warning: Commit %s has invalid timestamp, using current time as fallback\n", commit.SHA)
|
||||
}
|
||||
|
||||
// Convert github.PRCommit to git.Commit
|
||||
gitCommit := &git.Commit{
|
||||
SHA: commit.SHA,
|
||||
Message: commit.Message,
|
||||
Author: commit.Author,
|
||||
Email: commit.Email, // Use email from GitHub API
|
||||
Date: commitDate, // Use actual commit timestamp from GitHub API
|
||||
IsMerge: isMergeCommit(commit), // Detect merge commits using parents and message patterns
|
||||
PRNumber: pr.Number,
|
||||
}
|
||||
if err := g.cache.SaveCommit(gitCommit, version); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Failed to save commit %s to cache: %v\n", commit.SHA, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Create a proper new version entry for the database
|
||||
newVersionEntry := &git.Version{
|
||||
Name: version,
|
||||
Date: versionDate, // Use most recent commit date instead of current time
|
||||
CommitSHA: "", // Will be set when the release commit is made
|
||||
PRNumbers: prNumbers, // Now we have the actual PR numbers
|
||||
AISummary: content.String(),
|
||||
}
|
||||
|
||||
if err := g.cache.SaveVersion(newVersionEntry); err != nil {
|
||||
return fmt.Errorf("failed to save new version entry to database: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
for _, file := range files {
|
||||
// Convert to relative path for git operations
|
||||
relativeFile, err := filepath.Rel(g.cfg.RepoPath, file)
|
||||
if err != nil {
|
||||
relativeFile = file
|
||||
}
|
||||
|
||||
// Use git remove to handle both filesystem and git index
|
||||
if err := g.gitWalker.RemoveFile(relativeFile); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Failed to remove %s from git index: %v\n", relativeFile, err)
|
||||
// Fallback to filesystem-only removal
|
||||
if err := os.Remove(file); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Error: Failed to remove %s from the filesystem after failing to remove it from the git index.\n", relativeFile)
|
||||
fmt.Fprintf(os.Stderr, "Filesystem error: %v\n", err)
|
||||
fmt.Fprintf(os.Stderr, "Manual intervention required:\n")
|
||||
fmt.Fprintf(os.Stderr, " 1. Remove the file %s manually (using the OS-specific command)\n", file)
|
||||
fmt.Fprintf(os.Stderr, " 2. Remove from git index: git rm --cached %s\n", relativeFile)
|
||||
fmt.Fprintf(os.Stderr, " 3. Or reset git index: git reset HEAD %s\n", relativeFile)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if err := g.stageChangesForRelease(); err != nil {
|
||||
return fmt.Errorf("critical: failed to stage changes for release: %w", err)
|
||||
}
|
||||
|
||||
fmt.Printf("Successfully processed %d incoming PR files for version %s\n", len(files), version)
|
||||
return nil
|
||||
}
|
||||
|
||||
// getDirectCommitsSinceLastRelease gets all direct commits (not part of PRs) since the last release
|
||||
func (g *Generator) getDirectCommitsSinceLastRelease(processedPRs map[int]bool, processedCommitSHAs map[string]bool) (string, error) {
|
||||
// Get the latest tag to determine what commits are unreleased
|
||||
latestTag, err := g.gitWalker.GetLatestTag()
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to get latest tag: %w", err)
|
||||
}
|
||||
|
||||
// Get all commits since the latest tag
|
||||
unreleasedVersion, err := g.gitWalker.WalkCommitsSinceTag(latestTag)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to walk commits since tag %s: %w", latestTag, err)
|
||||
}
|
||||
|
||||
if unreleasedVersion == nil || len(unreleasedVersion.Commits) == 0 {
|
||||
return "", nil // No unreleased commits
|
||||
}
|
||||
|
||||
// Filter out commits that are part of PRs (we already have those from incoming files)
|
||||
// and format the direct commits
|
||||
var directCommits []*git.Commit
|
||||
for _, commit := range unreleasedVersion.Commits {
|
||||
// Skip version bump commits
|
||||
if commit.IsVersion {
|
||||
continue
|
||||
}
|
||||
|
||||
// Skip commits that belong to PRs we've already processed from incoming files (by PR number)
|
||||
if commit.PRNumber > 0 && processedPRs[commit.PRNumber] {
|
||||
continue
|
||||
}
|
||||
|
||||
// Skip commits whose SHA is already included in processed PRs (this catches commits
|
||||
// that might not have been detected as part of a PR but are actually in the PR)
|
||||
if processedCommitSHAs[commit.SHA] {
|
||||
continue
|
||||
}
|
||||
|
||||
// Only include commits that are NOT part of any PR (direct commits)
|
||||
if commit.PRNumber == 0 {
|
||||
directCommits = append(directCommits, commit)
|
||||
}
|
||||
}
|
||||
|
||||
if len(directCommits) == 0 {
|
||||
return "", nil // No direct commits
|
||||
}
|
||||
|
||||
// Format the direct commits similar to how it's done in generateRawVersionContent
|
||||
var sb strings.Builder
|
||||
sb.WriteString("### Direct commits\n\n")
|
||||
|
||||
// Sort direct commits by date (newest first) for consistent ordering
|
||||
sort.Slice(directCommits, func(i, j int) bool {
|
||||
return directCommits[i].Date.After(directCommits[j].Date)
|
||||
})
|
||||
|
||||
for _, commit := range directCommits {
|
||||
message := g.formatCommitMessage(strings.TrimSpace(commit.Message))
|
||||
if message != "" && !g.isDuplicateMessage(message, directCommits) {
|
||||
sb.WriteString(fmt.Sprintf("- %s\n", message))
|
||||
}
|
||||
}
|
||||
|
||||
return sb.String(), nil
|
||||
}
|
||||
|
||||
// validatePRState validates that a PR is in the correct state for processing
|
||||
func (g *Generator) validatePRState(prNumber int) error {
|
||||
// Use lightweight validation call that doesn't fetch commits
|
||||
details, err := g.ghClient.GetPRValidationDetails(prNumber)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to fetch PR %d: %w", prNumber, err)
|
||||
}
|
||||
|
||||
if details.State != "open" {
|
||||
return fmt.Errorf("PR %d is not open (current state: %s)", prNumber, details.State)
|
||||
}
|
||||
|
||||
if !details.Mergeable {
|
||||
return fmt.Errorf("PR %d is not mergeable - please resolve conflicts first", prNumber)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// validateGitStatus ensures the working directory is clean
|
||||
func (g *Generator) validateGitStatus() error {
|
||||
isClean, err := g.gitWalker.IsWorkingDirectoryClean()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to check git status: %w", err)
|
||||
}
|
||||
|
||||
if !isClean {
|
||||
// Get detailed status for better error message
|
||||
statusDetails, statusErr := g.gitWalker.GetStatusDetails()
|
||||
if statusErr == nil && statusDetails != "" {
|
||||
return fmt.Errorf("working directory is not clean - please commit or stash changes before proceeding:\n%s", statusDetails)
|
||||
}
|
||||
return fmt.Errorf("working directory is not clean - please commit or stash changes before proceeding")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// ensureIncomingDir creates the incoming directory if it doesn't exist
|
||||
func (g *Generator) ensureIncomingDir() error {
|
||||
if err := os.MkdirAll(g.cfg.IncomingDir, 0755); err != nil {
|
||||
return fmt.Errorf("failed to create directory %s: %w", g.cfg.IncomingDir, err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// commitAndPushIncoming commits and optionally pushes the incoming changelog file
|
||||
func (g *Generator) commitAndPushIncoming(prNumber int, filename string) error {
|
||||
relativeFilename, err := filepath.Rel(g.cfg.RepoPath, filename)
|
||||
if err != nil {
|
||||
relativeFilename = filename
|
||||
}
|
||||
|
||||
// Add file to git index
|
||||
if err := g.gitWalker.AddFile(relativeFilename); err != nil {
|
||||
return fmt.Errorf("failed to add file %s: %w", relativeFilename, err)
|
||||
}
|
||||
|
||||
// Commit changes
|
||||
commitMessage := fmt.Sprintf("chore: incoming %d changelog entry", prNumber)
|
||||
_, err = g.gitWalker.CommitChanges(commitMessage)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to commit changes: %w", err)
|
||||
}
|
||||
|
||||
// Push to remote if enabled
|
||||
if g.cfg.Push {
|
||||
if err := g.gitWalker.PushToRemote(); err != nil {
|
||||
return fmt.Errorf("failed to push to remote: %w", err)
|
||||
}
|
||||
} else {
|
||||
fmt.Println("Commit created successfully. Please review and push manually.")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// detectVersion detects the current version from version.nix or git tags
|
||||
func (g *Generator) detectVersion() (string, error) {
|
||||
versionNixPath := filepath.Join(g.cfg.RepoPath, "version.nix")
|
||||
if _, err := os.Stat(versionNixPath); err == nil {
|
||||
data, err := os.ReadFile(versionNixPath)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to read version.nix: %w", err)
|
||||
}
|
||||
|
||||
versionRegex := regexp.MustCompile(`"([^"]+)"`)
|
||||
matches := versionRegex.FindStringSubmatch(string(data))
|
||||
if len(matches) > 1 {
|
||||
return matches[1], nil
|
||||
}
|
||||
}
|
||||
|
||||
latestTag, err := g.gitWalker.GetLatestTag()
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to get latest tag: %w", err)
|
||||
}
|
||||
|
||||
if latestTag == "" {
|
||||
return "v1.0.0", nil
|
||||
}
|
||||
|
||||
return latestTag, nil
|
||||
}
|
||||
|
||||
// insertVersionAtTop inserts a new version entry at the top of CHANGELOG.md
|
||||
func (g *Generator) insertVersionAtTop(entry string) error {
|
||||
changelogPath := filepath.Join(g.cfg.RepoPath, "CHANGELOG.md")
|
||||
header := "# Changelog"
|
||||
headerRegex := regexp.MustCompile(`(?m)^# Changelog\s*`)
|
||||
|
||||
existingContent, err := os.ReadFile(changelogPath)
|
||||
if err != nil {
|
||||
if !os.IsNotExist(err) {
|
||||
return fmt.Errorf("failed to read existing CHANGELOG.md: %w", err)
|
||||
}
|
||||
// File doesn't exist, create it.
|
||||
newContent := fmt.Sprintf("%s\n\n%s\n", header, entry)
|
||||
return os.WriteFile(changelogPath, []byte(newContent), 0644)
|
||||
}
|
||||
|
||||
contentStr := string(existingContent)
|
||||
var newContent string
|
||||
|
||||
if loc := headerRegex.FindStringIndex(contentStr); loc != nil {
|
||||
// Found the header, insert after it.
|
||||
insertionPoint := loc[1]
|
||||
// Skip any existing newlines after the header to avoid double spacing
|
||||
for insertionPoint < len(contentStr) && (contentStr[insertionPoint] == '\n' || contentStr[insertionPoint] == '\r') {
|
||||
insertionPoint++
|
||||
}
|
||||
// Insert with proper spacing: single newline after header, then entry, then newline before existing content
|
||||
newContent = contentStr[:loc[1]] + entry + "\n" + contentStr[insertionPoint:]
|
||||
} else {
|
||||
// Header not found, prepend everything.
|
||||
newContent = fmt.Sprintf("%s\n\n%s\n\n%s", header, entry, contentStr)
|
||||
}
|
||||
|
||||
return os.WriteFile(changelogPath, []byte(newContent), 0644)
|
||||
}
|
||||
|
||||
// stageChangesForRelease stages the modified files for the release commit
|
||||
func (g *Generator) stageChangesForRelease() error {
|
||||
changelogPath := filepath.Join(g.cfg.RepoPath, "CHANGELOG.md")
|
||||
relativeChangelog, err := filepath.Rel(g.cfg.RepoPath, changelogPath)
|
||||
if err != nil {
|
||||
relativeChangelog = "CHANGELOG.md"
|
||||
}
|
||||
|
||||
relativeCacheFile, err := filepath.Rel(g.cfg.RepoPath, g.cfg.CacheFile)
|
||||
if err != nil {
|
||||
relativeCacheFile = g.cfg.CacheFile
|
||||
}
|
||||
|
||||
// Add CHANGELOG.md to git index
|
||||
if err := g.gitWalker.AddFile(relativeChangelog); err != nil {
|
||||
return fmt.Errorf("failed to add %s: %w", relativeChangelog, err)
|
||||
}
|
||||
|
||||
// Add cache file to git index
|
||||
if err := g.gitWalker.AddFile(relativeCacheFile); err != nil {
|
||||
return fmt.Errorf("failed to add %s: %w", relativeCacheFile, err)
|
||||
}
|
||||
|
||||
// Note: Individual incoming files are now removed during the main processing loop
|
||||
// No need to remove the entire directory here
|
||||
|
||||
return nil
|
||||
}
|
||||
262
cmd/generate_changelog/internal/changelog/processing_test.go
Normal file
262
cmd/generate_changelog/internal/changelog/processing_test.go
Normal file
@@ -0,0 +1,262 @@
|
||||
package changelog
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/danielmiessler/fabric/cmd/generate_changelog/internal/config"
|
||||
)
|
||||
|
||||
func TestDetectVersion(t *testing.T) {
|
||||
tempDir := t.TempDir()
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
versionNixContent string
|
||||
expectedVersion string
|
||||
shouldError bool
|
||||
}{
|
||||
{
|
||||
name: "valid version.nix",
|
||||
versionNixContent: `"1.2.3"`,
|
||||
expectedVersion: "1.2.3",
|
||||
shouldError: false,
|
||||
},
|
||||
{
|
||||
name: "version with extra whitespace",
|
||||
versionNixContent: `"1.2.3" `,
|
||||
expectedVersion: "1.2.3",
|
||||
shouldError: false,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
// Create version.nix file
|
||||
versionNixPath := filepath.Join(tempDir, "version.nix")
|
||||
if err := os.WriteFile(versionNixPath, []byte(tt.versionNixContent), 0644); err != nil {
|
||||
t.Fatalf("Failed to create version.nix: %v", err)
|
||||
}
|
||||
|
||||
cfg := &config.Config{
|
||||
RepoPath: tempDir,
|
||||
}
|
||||
|
||||
g := &Generator{cfg: cfg}
|
||||
|
||||
version, err := g.detectVersion()
|
||||
if tt.shouldError && err == nil {
|
||||
t.Errorf("Expected error but got none")
|
||||
}
|
||||
if !tt.shouldError && err != nil {
|
||||
t.Errorf("Unexpected error: %v", err)
|
||||
}
|
||||
if version != tt.expectedVersion {
|
||||
t.Errorf("Expected version '%s', got '%s'", tt.expectedVersion, version)
|
||||
}
|
||||
|
||||
// Clean up
|
||||
os.Remove(versionNixPath)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestInsertVersionAtTop_ImprovedRobustness(t *testing.T) {
|
||||
tempDir := t.TempDir()
|
||||
changelogPath := filepath.Join(tempDir, "CHANGELOG.md")
|
||||
|
||||
cfg := &config.Config{
|
||||
RepoPath: tempDir,
|
||||
}
|
||||
|
||||
g := &Generator{cfg: cfg}
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
existingContent string
|
||||
entry string
|
||||
expectedContent string
|
||||
}{
|
||||
{
|
||||
name: "header with trailing spaces",
|
||||
existingContent: "# Changelog \n\n## v1.0.0\n- Old content",
|
||||
entry: "## v2.0.0\n- New content",
|
||||
expectedContent: "# Changelog \n\n## v2.0.0\n- New content\n## v1.0.0\n- Old content",
|
||||
},
|
||||
{
|
||||
name: "header with different line endings",
|
||||
existingContent: "# Changelog\r\n\r\n## v1.0.0\r\n- Old content",
|
||||
entry: "## v2.0.0\n- New content",
|
||||
expectedContent: "# Changelog\r\n\r\n## v2.0.0\n- New content\n## v1.0.0\r\n- Old content",
|
||||
},
|
||||
{
|
||||
name: "no existing header",
|
||||
existingContent: "Some existing content without header",
|
||||
entry: "## v1.0.0\n- New content",
|
||||
expectedContent: "# Changelog\n\n## v1.0.0\n- New content\n\nSome existing content without header",
|
||||
},
|
||||
{
|
||||
name: "new file creation",
|
||||
existingContent: "",
|
||||
entry: "## v1.0.0\n- Initial release",
|
||||
expectedContent: "# Changelog\n\n## v1.0.0\n- Initial release\n",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
// Write existing content (or create empty file)
|
||||
if tt.existingContent != "" {
|
||||
if err := os.WriteFile(changelogPath, []byte(tt.existingContent), 0644); err != nil {
|
||||
t.Fatalf("Failed to write existing content: %v", err)
|
||||
}
|
||||
} else {
|
||||
// Remove file if it exists to test new file creation
|
||||
os.Remove(changelogPath)
|
||||
}
|
||||
|
||||
// Insert new version
|
||||
if err := g.insertVersionAtTop(tt.entry); err != nil {
|
||||
t.Fatalf("insertVersionAtTop failed: %v", err)
|
||||
}
|
||||
|
||||
// Read result
|
||||
result, err := os.ReadFile(changelogPath)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to read result: %v", err)
|
||||
}
|
||||
|
||||
if string(result) != tt.expectedContent {
|
||||
t.Errorf("Expected:\n%q\nGot:\n%q", tt.expectedContent, string(result))
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestProcessIncomingPRs_FileAggregation(t *testing.T) {
|
||||
tempDir := t.TempDir()
|
||||
incomingDir := filepath.Join(tempDir, "incoming")
|
||||
|
||||
// Create incoming directory and files
|
||||
if err := os.MkdirAll(incomingDir, 0755); err != nil {
|
||||
t.Fatalf("Failed to create incoming dir: %v", err)
|
||||
}
|
||||
|
||||
// Create test incoming files
|
||||
file1Content := "## PR #1\n- Feature A"
|
||||
file2Content := "## PR #2\n- Feature B"
|
||||
|
||||
if err := os.WriteFile(filepath.Join(incomingDir, "1.txt"), []byte(file1Content), 0644); err != nil {
|
||||
t.Fatalf("Failed to create test file: %v", err)
|
||||
}
|
||||
if err := os.WriteFile(filepath.Join(incomingDir, "2.txt"), []byte(file2Content), 0644); err != nil {
|
||||
t.Fatalf("Failed to create test file: %v", err)
|
||||
}
|
||||
|
||||
// Test file aggregation logic by calling the internal functions
|
||||
files, err := filepath.Glob(filepath.Join(incomingDir, "*.txt"))
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to glob files: %v", err)
|
||||
}
|
||||
|
||||
if len(files) != 2 {
|
||||
t.Fatalf("Expected 2 files, got %d", len(files))
|
||||
}
|
||||
|
||||
// Test content aggregation
|
||||
var content strings.Builder
|
||||
var processingErrors []string
|
||||
for _, file := range files {
|
||||
data, err := os.ReadFile(file)
|
||||
if err != nil {
|
||||
processingErrors = append(processingErrors, err.Error())
|
||||
continue
|
||||
}
|
||||
content.WriteString(string(data))
|
||||
content.WriteString("\n")
|
||||
}
|
||||
|
||||
if len(processingErrors) > 0 {
|
||||
t.Fatalf("Unexpected processing errors: %v", processingErrors)
|
||||
}
|
||||
|
||||
aggregatedContent := content.String()
|
||||
if !strings.Contains(aggregatedContent, "Feature A") {
|
||||
t.Errorf("Aggregated content should contain 'Feature A'")
|
||||
}
|
||||
if !strings.Contains(aggregatedContent, "Feature B") {
|
||||
t.Errorf("Aggregated content should contain 'Feature B'")
|
||||
}
|
||||
}
|
||||
|
||||
func TestFileProcessing_ErrorHandling(t *testing.T) {
|
||||
tempDir := t.TempDir()
|
||||
incomingDir := filepath.Join(tempDir, "incoming")
|
||||
|
||||
// Create incoming directory with one good file and one unreadable file
|
||||
if err := os.MkdirAll(incomingDir, 0755); err != nil {
|
||||
t.Fatalf("Failed to create incoming dir: %v", err)
|
||||
}
|
||||
|
||||
// Create a good file
|
||||
if err := os.WriteFile(filepath.Join(incomingDir, "1.txt"), []byte("content"), 0644); err != nil {
|
||||
t.Fatalf("Failed to create test file: %v", err)
|
||||
}
|
||||
|
||||
// Create an unreadable file (simulate permission error)
|
||||
unreadableFile := filepath.Join(incomingDir, "2.txt")
|
||||
if err := os.WriteFile(unreadableFile, []byte("content"), 0000); err != nil {
|
||||
t.Fatalf("Failed to create unreadable file: %v", err)
|
||||
}
|
||||
defer os.Chmod(unreadableFile, 0644) // Clean up
|
||||
|
||||
// Test error aggregation logic
|
||||
files, err := filepath.Glob(filepath.Join(incomingDir, "*.txt"))
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to glob files: %v", err)
|
||||
}
|
||||
|
||||
var content strings.Builder
|
||||
var processingErrors []string
|
||||
for _, file := range files {
|
||||
data, err := os.ReadFile(file)
|
||||
if err != nil {
|
||||
processingErrors = append(processingErrors, err.Error())
|
||||
continue
|
||||
}
|
||||
content.WriteString(string(data))
|
||||
content.WriteString("\n")
|
||||
}
|
||||
|
||||
if len(processingErrors) == 0 {
|
||||
t.Errorf("Expected processing errors due to unreadable file")
|
||||
}
|
||||
|
||||
// Verify error message format
|
||||
errorMsg := strings.Join(processingErrors, "; ")
|
||||
if !strings.Contains(errorMsg, "2.txt") {
|
||||
t.Errorf("Error message should mention the problematic file")
|
||||
}
|
||||
}
|
||||
|
||||
func TestEnsureIncomingDirCreation(t *testing.T) {
|
||||
tempDir := t.TempDir()
|
||||
incomingDir := filepath.Join(tempDir, "incoming")
|
||||
|
||||
cfg := &config.Config{
|
||||
IncomingDir: incomingDir,
|
||||
}
|
||||
|
||||
g := &Generator{cfg: cfg}
|
||||
|
||||
err := g.ensureIncomingDir()
|
||||
if err != nil {
|
||||
t.Fatalf("ensureIncomingDir failed: %v", err)
|
||||
}
|
||||
|
||||
if _, err := os.Stat(incomingDir); os.IsNotExist(err) {
|
||||
t.Errorf("Incoming directory was not created")
|
||||
}
|
||||
}
|
||||
@@ -1,15 +1,20 @@
|
||||
package config
|
||||
|
||||
type Config struct {
|
||||
RepoPath string
|
||||
OutputFile string
|
||||
Limit int
|
||||
Version string
|
||||
SaveData bool
|
||||
CacheFile string
|
||||
NoCache bool
|
||||
RebuildCache bool
|
||||
GitHubToken string
|
||||
ForcePRSync bool
|
||||
EnableAISummary bool
|
||||
RepoPath string
|
||||
OutputFile string
|
||||
Limit int
|
||||
Version string
|
||||
SaveData bool
|
||||
CacheFile string
|
||||
NoCache bool
|
||||
RebuildCache bool
|
||||
GitHubToken string
|
||||
ForcePRSync bool
|
||||
EnableAISummary bool
|
||||
IncomingPR int
|
||||
ProcessPRsVersion string
|
||||
IncomingDir string
|
||||
Push bool
|
||||
SyncDB bool
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ package git
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
@@ -11,10 +12,19 @@ import (
|
||||
"github.com/go-git/go-git/v5/plumbing"
|
||||
"github.com/go-git/go-git/v5/plumbing/object"
|
||||
"github.com/go-git/go-git/v5/plumbing/storer"
|
||||
"github.com/go-git/go-git/v5/plumbing/transport/http"
|
||||
)
|
||||
|
||||
var (
|
||||
versionPattern = regexp.MustCompile(`Update version to (v\d+\.\d+\.\d+)`)
|
||||
// The versionPattern matches version commit messages with or without the optional "chore(release): " prefix.
|
||||
// Examples of matching commit messages:
|
||||
// - "chore(release): Update version to v1.2.3"
|
||||
// - "Update version to v1.2.3"
|
||||
// Examples of non-matching commit messages:
|
||||
// - "fix: Update version to v1.2.3" (missing "chore(release): " or "Update version to")
|
||||
// - "chore(release): Update version to 1.2.3" (missing "v" prefix in version)
|
||||
// - "Update version to v1.2" (incomplete version number)
|
||||
versionPattern = regexp.MustCompile(`(?:chore\(release\): )?Update version to (v\d+\.\d+\.\d+)`)
|
||||
prPattern = regexp.MustCompile(`Merge pull request #(\d+)`)
|
||||
)
|
||||
|
||||
@@ -400,3 +410,165 @@ func dedupInts(ints []int) []int {
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
// Worktree returns the git worktree for performing git operations
|
||||
func (w *Walker) Worktree() (*git.Worktree, error) {
|
||||
return w.repo.Worktree()
|
||||
}
|
||||
|
||||
// Repository returns the underlying git repository
|
||||
func (w *Walker) Repository() *git.Repository {
|
||||
return w.repo
|
||||
}
|
||||
|
||||
// IsWorkingDirectoryClean checks if the working directory has any uncommitted changes
|
||||
func (w *Walker) IsWorkingDirectoryClean() (bool, error) {
|
||||
worktree, err := w.repo.Worktree()
|
||||
if err != nil {
|
||||
return false, fmt.Errorf("failed to get worktree: %w", err)
|
||||
}
|
||||
|
||||
status, err := worktree.Status()
|
||||
if err != nil {
|
||||
return false, fmt.Errorf("failed to get git status: %w", err)
|
||||
}
|
||||
|
||||
return status.IsClean(), nil
|
||||
}
|
||||
|
||||
// GetStatusDetails returns a detailed status of the working directory
|
||||
func (w *Walker) GetStatusDetails() (string, error) {
|
||||
worktree, err := w.repo.Worktree()
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to get worktree: %w", err)
|
||||
}
|
||||
|
||||
status, err := worktree.Status()
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to get git status: %w", err)
|
||||
}
|
||||
|
||||
if status.IsClean() {
|
||||
return "", nil
|
||||
}
|
||||
|
||||
var details strings.Builder
|
||||
for file, fileStatus := range status {
|
||||
details.WriteString(fmt.Sprintf(" %c%c %s\n", fileStatus.Staging, fileStatus.Worktree, file))
|
||||
}
|
||||
|
||||
return details.String(), nil
|
||||
}
|
||||
|
||||
// AddFile adds a file to the git index
|
||||
func (w *Walker) AddFile(filename string) error {
|
||||
worktree, err := w.repo.Worktree()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get worktree: %w", err)
|
||||
}
|
||||
|
||||
_, err = worktree.Add(filename)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to add file %s: %w", filename, err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// CommitChanges creates a commit with the given message
|
||||
func (w *Walker) CommitChanges(message string) (plumbing.Hash, error) {
|
||||
worktree, err := w.repo.Worktree()
|
||||
if err != nil {
|
||||
return plumbing.ZeroHash, fmt.Errorf("failed to get worktree: %w", err)
|
||||
}
|
||||
|
||||
// Get git config for author information
|
||||
cfg, err := w.repo.Config()
|
||||
if err != nil {
|
||||
return plumbing.ZeroHash, fmt.Errorf("failed to get git config: %w", err)
|
||||
}
|
||||
|
||||
var authorName, authorEmail string
|
||||
if cfg.User.Name != "" {
|
||||
authorName = cfg.User.Name
|
||||
} else {
|
||||
authorName = "Changelog Bot"
|
||||
}
|
||||
if cfg.User.Email != "" {
|
||||
authorEmail = cfg.User.Email
|
||||
} else {
|
||||
authorEmail = "bot@changelog.local"
|
||||
}
|
||||
|
||||
commit, err := worktree.Commit(message, &git.CommitOptions{
|
||||
Author: &object.Signature{
|
||||
Name: authorName,
|
||||
Email: authorEmail,
|
||||
When: time.Now(),
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
return plumbing.ZeroHash, fmt.Errorf("failed to commit: %w", err)
|
||||
}
|
||||
|
||||
return commit, nil
|
||||
}
|
||||
|
||||
// PushToRemote pushes the current branch to the remote repository
|
||||
// It automatically detects GitHub repositories and uses token authentication when available
|
||||
func (w *Walker) PushToRemote() error {
|
||||
pushOptions := &git.PushOptions{}
|
||||
|
||||
// Check if we have a GitHub token for authentication
|
||||
if githubToken := os.Getenv("GITHUB_TOKEN"); githubToken != "" {
|
||||
// Get remote URL to check if it's a GitHub repository
|
||||
remotes, err := w.repo.Remotes()
|
||||
if err == nil && len(remotes) > 0 {
|
||||
// Get the origin remote (or first remote if origin doesn't exist)
|
||||
var remote *git.Remote
|
||||
for _, r := range remotes {
|
||||
if r.Config().Name == "origin" {
|
||||
remote = r
|
||||
break
|
||||
}
|
||||
}
|
||||
if remote == nil {
|
||||
remote = remotes[0]
|
||||
}
|
||||
|
||||
// Check if this is a GitHub repository
|
||||
urls := remote.Config().URLs
|
||||
if len(urls) > 0 {
|
||||
url := urls[0]
|
||||
if strings.Contains(url, "github.com") {
|
||||
// Use token authentication for GitHub repositories
|
||||
pushOptions.Auth = &http.BasicAuth{
|
||||
Username: "token", // GitHub expects "token" as username
|
||||
Password: githubToken,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
err := w.repo.Push(pushOptions)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to push: %w", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// RemoveFile removes a file from both the working directory and git index
|
||||
func (w *Walker) RemoveFile(filename string) error {
|
||||
worktree, err := w.repo.Worktree()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get worktree: %w", err)
|
||||
}
|
||||
|
||||
_, err = worktree.Remove(filename)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to remove file %s: %w", filename, err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -100,35 +100,89 @@ func (c *Client) FetchPRs(prNumbers []int) ([]*PR, error) {
|
||||
return prs, nil
|
||||
}
|
||||
|
||||
func (c *Client) fetchSinglePR(ctx context.Context, prNumber int) (*PR, error) {
|
||||
pr, _, err := c.client.PullRequests.Get(ctx, c.owner, c.repo, prNumber)
|
||||
// GetPRValidationDetails fetches only the data needed for validation (lightweight).
|
||||
func (c *Client) GetPRValidationDetails(prNumber int) (*PRDetails, error) {
|
||||
ctx := context.Background()
|
||||
ghPR, _, err := c.client.PullRequests.Get(ctx, c.owner, c.repo, prNumber)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return nil, fmt.Errorf("failed to get PR %d: %w", prNumber, err)
|
||||
}
|
||||
|
||||
commits, _, err := c.client.PullRequests.ListCommits(ctx, c.owner, c.repo, prNumber, nil)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to fetch commits: %w", err)
|
||||
// Only return validation data, no commits fetched
|
||||
details := &PRDetails{
|
||||
PR: nil, // Will be populated later if needed
|
||||
State: getString(ghPR.State),
|
||||
Mergeable: ghPR.Mergeable != nil && *ghPR.Mergeable,
|
||||
}
|
||||
|
||||
return details, nil
|
||||
}
|
||||
|
||||
// GetPRWithCommits fetches the full PR and its commits.
|
||||
func (c *Client) GetPRWithCommits(prNumber int) (*PR, error) {
|
||||
ctx := context.Background()
|
||||
ghPR, _, err := c.client.PullRequests.Get(ctx, c.owner, c.repo, prNumber)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get PR %d: %w", prNumber, err)
|
||||
}
|
||||
|
||||
return c.buildPRWithCommits(ctx, ghPR)
|
||||
}
|
||||
|
||||
// GetPRDetails fetches a comprehensive set of details for a single PR.
|
||||
// Deprecated: Use GetPRValidationDetails + GetPRWithCommits for better performance
|
||||
func (c *Client) GetPRDetails(prNumber int) (*PRDetails, error) {
|
||||
ctx := context.Background()
|
||||
ghPR, _, err := c.client.PullRequests.Get(ctx, c.owner, c.repo, prNumber)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get PR %d: %w", prNumber, err)
|
||||
}
|
||||
|
||||
// Reuse the existing logic to build the base PR object
|
||||
pr, err := c.buildPRWithCommits(ctx, ghPR)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to build PR details for %d: %w", prNumber, err)
|
||||
}
|
||||
|
||||
details := &PRDetails{
|
||||
PR: pr,
|
||||
State: getString(ghPR.State),
|
||||
Mergeable: ghPR.Mergeable != nil && *ghPR.Mergeable,
|
||||
}
|
||||
|
||||
return details, nil
|
||||
}
|
||||
|
||||
// buildPRWithCommits fetches commits and constructs a PR object from a GitHub API response
|
||||
func (c *Client) buildPRWithCommits(ctx context.Context, ghPR *github.PullRequest) (*PR, error) {
|
||||
commits, _, err := c.client.PullRequests.ListCommits(ctx, c.owner, c.repo, *ghPR.Number, nil)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to fetch commits for PR %d: %w", *ghPR.Number, err)
|
||||
}
|
||||
|
||||
return c.convertGitHubPR(ghPR, commits), nil
|
||||
}
|
||||
|
||||
// convertGitHubPR transforms GitHub API data into our internal PR struct (pure function)
|
||||
func (c *Client) convertGitHubPR(ghPR *github.PullRequest, commits []*github.RepositoryCommit) *PR {
|
||||
|
||||
result := &PR{
|
||||
Number: prNumber,
|
||||
Title: getString(pr.Title),
|
||||
Body: getString(pr.Body),
|
||||
URL: getString(pr.HTMLURL),
|
||||
Number: *ghPR.Number,
|
||||
Title: getString(ghPR.Title),
|
||||
Body: getString(ghPR.Body),
|
||||
URL: getString(ghPR.HTMLURL),
|
||||
Commits: make([]PRCommit, 0, len(commits)),
|
||||
}
|
||||
|
||||
if pr.MergedAt != nil {
|
||||
result.MergedAt = pr.MergedAt.Time
|
||||
if ghPR.MergedAt != nil {
|
||||
result.MergedAt = ghPR.MergedAt.Time
|
||||
}
|
||||
|
||||
if pr.User != nil {
|
||||
result.Author = getString(pr.User.Login)
|
||||
result.AuthorURL = getString(pr.User.HTMLURL)
|
||||
userType := getString(pr.User.Type) // GitHub API returns "User", "Organization", or "Bot"
|
||||
if ghPR.User != nil {
|
||||
result.Author = getString(ghPR.User.Login)
|
||||
result.AuthorURL = getString(ghPR.User.HTMLURL)
|
||||
userType := getString(ghPR.User.Type)
|
||||
|
||||
// Convert GitHub API type to lowercase
|
||||
switch userType {
|
||||
case "User":
|
||||
result.AuthorType = "user"
|
||||
@@ -137,12 +191,12 @@ func (c *Client) fetchSinglePR(ctx context.Context, prNumber int) (*PR, error) {
|
||||
case "Bot":
|
||||
result.AuthorType = "bot"
|
||||
default:
|
||||
result.AuthorType = "user" // Default fallback
|
||||
result.AuthorType = "user"
|
||||
}
|
||||
}
|
||||
|
||||
if pr.MergeCommitSHA != nil {
|
||||
result.MergeCommit = *pr.MergeCommitSHA
|
||||
if ghPR.MergeCommitSHA != nil {
|
||||
result.MergeCommit = *ghPR.MergeCommitSHA
|
||||
}
|
||||
|
||||
for _, commit := range commits {
|
||||
@@ -153,12 +207,34 @@ func (c *Client) fetchSinglePR(ctx context.Context, prNumber int) (*PR, error) {
|
||||
}
|
||||
if commit.Commit.Author != nil {
|
||||
prCommit.Author = getString(commit.Commit.Author.Name)
|
||||
prCommit.Email = getString(commit.Commit.Author.Email) // Extract author email from GitHub API response
|
||||
// Capture actual commit timestamp from GitHub API
|
||||
if commit.Commit.Author.Date != nil {
|
||||
prCommit.Date = commit.Commit.Author.Date.Time
|
||||
}
|
||||
}
|
||||
// Capture parent commit SHAs for merge detection
|
||||
if commit.Parents != nil {
|
||||
for _, parent := range commit.Parents {
|
||||
if parent.SHA != nil {
|
||||
prCommit.Parents = append(prCommit.Parents, *parent.SHA)
|
||||
}
|
||||
}
|
||||
}
|
||||
result.Commits = append(result.Commits, prCommit)
|
||||
}
|
||||
}
|
||||
|
||||
return result, nil
|
||||
return result
|
||||
}
|
||||
|
||||
func (c *Client) fetchSinglePR(ctx context.Context, prNumber int) (*PR, error) {
|
||||
ghPR, _, err := c.client.PullRequests.Get(ctx, c.owner, c.repo, prNumber)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return c.buildPRWithCommits(ctx, ghPR)
|
||||
}
|
||||
|
||||
func getString(s *string) string {
|
||||
@@ -332,6 +408,7 @@ func (c *Client) FetchAllMergedPRsGraphQL(since time.Time) ([]*PR, error) {
|
||||
SHA: commitNode.Commit.OID,
|
||||
Message: strings.TrimSpace(commitNode.Commit.Message),
|
||||
Author: commitNode.Commit.Author.Name,
|
||||
Date: commitNode.Commit.AuthoredDate, // Use actual commit timestamp
|
||||
}
|
||||
pr.Commits = append(pr.Commits, commit)
|
||||
}
|
||||
|
||||
59
cmd/generate_changelog/internal/github/email_test.go
Normal file
59
cmd/generate_changelog/internal/github/email_test.go
Normal file
@@ -0,0 +1,59 @@
|
||||
package github
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
func TestPRCommitEmailHandling(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
commit PRCommit
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "Valid email field",
|
||||
commit: PRCommit{
|
||||
SHA: "abc123",
|
||||
Message: "Fix bug in authentication",
|
||||
Author: "John Doe",
|
||||
Email: "john.doe@example.com",
|
||||
Date: time.Now(),
|
||||
Parents: []string{"def456"},
|
||||
},
|
||||
expected: "john.doe@example.com",
|
||||
},
|
||||
{
|
||||
name: "Empty email field",
|
||||
commit: PRCommit{
|
||||
SHA: "abc123",
|
||||
Message: "Fix bug in authentication",
|
||||
Author: "John Doe",
|
||||
Email: "",
|
||||
Date: time.Now(),
|
||||
Parents: []string{"def456"},
|
||||
},
|
||||
expected: "",
|
||||
},
|
||||
{
|
||||
name: "Email field with proper initialization",
|
||||
commit: PRCommit{
|
||||
SHA: "def789",
|
||||
Message: "Add new feature",
|
||||
Author: "Jane Smith",
|
||||
Email: "jane.smith@company.org",
|
||||
Date: time.Now(),
|
||||
Parents: []string{"ghi012"},
|
||||
},
|
||||
expected: "jane.smith@company.org",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
if tt.commit.Email != tt.expected {
|
||||
t.Errorf("Expected email %q, got %q", tt.expected, tt.commit.Email)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -15,10 +15,20 @@ type PR struct {
|
||||
MergeCommit string
|
||||
}
|
||||
|
||||
// PRDetails encapsulates all relevant information about a Pull Request.
|
||||
type PRDetails struct {
|
||||
*PR
|
||||
State string
|
||||
Mergeable bool
|
||||
}
|
||||
|
||||
type PRCommit struct {
|
||||
SHA string
|
||||
Message string
|
||||
Author string
|
||||
Email string // Author email from GitHub API, empty if not public
|
||||
Date time.Time // Timestamp field
|
||||
Parents []string // Parent commits (for merge detection)
|
||||
}
|
||||
|
||||
// GraphQL query structures for hasura client
|
||||
@@ -43,9 +53,10 @@ type PullRequestsQuery struct {
|
||||
Commits struct {
|
||||
Nodes []struct {
|
||||
Commit struct {
|
||||
OID string `graphql:"oid"`
|
||||
Message string
|
||||
Author struct {
|
||||
OID string `graphql:"oid"`
|
||||
Message string
|
||||
AuthoredDate time.Time `graphql:"authoredDate"`
|
||||
Author struct {
|
||||
Name string
|
||||
}
|
||||
}
|
||||
|
||||
@@ -21,7 +21,8 @@ var rootCmd = &cobra.Command{
|
||||
Long: `A high-performance changelog generator that walks git history,
|
||||
collects version information and pull requests, and generates a
|
||||
comprehensive changelog in markdown format.`,
|
||||
RunE: run,
|
||||
RunE: run,
|
||||
SilenceUsage: true, // Don't show usage on runtime errors, only on flag errors
|
||||
}
|
||||
|
||||
func init() {
|
||||
@@ -36,9 +37,18 @@ func init() {
|
||||
rootCmd.Flags().StringVar(&cfg.GitHubToken, "token", "", "GitHub API token (or set GITHUB_TOKEN env var)")
|
||||
rootCmd.Flags().BoolVar(&cfg.ForcePRSync, "force-pr-sync", false, "Force a full PR sync from GitHub (ignores cache age)")
|
||||
rootCmd.Flags().BoolVar(&cfg.EnableAISummary, "ai-summarize", false, "Generate AI-enhanced summaries using Fabric")
|
||||
rootCmd.Flags().IntVar(&cfg.IncomingPR, "incoming-pr", 0, "Pre-process PR for changelog (provide PR number)")
|
||||
rootCmd.Flags().StringVar(&cfg.ProcessPRsVersion, "process-prs", "", "Process all incoming PR files for release (provide version like v1.4.262)")
|
||||
rootCmd.Flags().StringVar(&cfg.IncomingDir, "incoming-dir", "./cmd/generate_changelog/incoming", "Directory for incoming PR files")
|
||||
rootCmd.Flags().BoolVar(&cfg.Push, "push", false, "Enable automatic git push after creating an incoming entry")
|
||||
rootCmd.Flags().BoolVar(&cfg.SyncDB, "sync-db", false, "Synchronize and validate database integrity with git history and GitHub PRs")
|
||||
}
|
||||
|
||||
func run(cmd *cobra.Command, args []string) error {
|
||||
if cfg.IncomingPR > 0 && cfg.ProcessPRsVersion != "" {
|
||||
return fmt.Errorf("--incoming-pr and --process-prs are mutually exclusive flags")
|
||||
}
|
||||
|
||||
if cfg.GitHubToken == "" {
|
||||
cfg.GitHubToken = os.Getenv("GITHUB_TOKEN")
|
||||
}
|
||||
@@ -48,6 +58,18 @@ func run(cmd *cobra.Command, args []string) error {
|
||||
return fmt.Errorf("failed to create changelog generator: %w", err)
|
||||
}
|
||||
|
||||
if cfg.IncomingPR > 0 {
|
||||
return generator.ProcessIncomingPR(cfg.IncomingPR)
|
||||
}
|
||||
|
||||
if cfg.ProcessPRsVersion != "" {
|
||||
return generator.CreateNewChangelogEntry(cfg.ProcessPRsVersion)
|
||||
}
|
||||
|
||||
if cfg.SyncDB {
|
||||
return generator.SyncDatabase()
|
||||
}
|
||||
|
||||
output, err := generator.Generate()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to generate changelog: %w", err)
|
||||
@@ -77,8 +99,5 @@ func main() {
|
||||
}
|
||||
}
|
||||
|
||||
if err := rootCmd.Execute(); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Error: %v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
rootCmd.Execute()
|
||||
}
|
||||
|
||||
@@ -110,6 +110,10 @@ _fabric() {
|
||||
'(--liststrategies)--liststrategies[List all strategies]' \
|
||||
'(--listvendors)--listvendors[List all vendors]' \
|
||||
'(--shell-complete-list)--shell-complete-list[Output raw list without headers/formatting (for shell completion)]' \
|
||||
'(--suppress-think)--suppress-think[Suppress text enclosed in thinking tags]' \
|
||||
'(--think-start-tag)--think-start-tag[Start tag for thinking sections (default: <think>)]:start tag:' \
|
||||
'(--think-end-tag)--think-end-tag[End tag for thinking sections (default: </think>)]:end tag:' \
|
||||
'(--disable-responses-api)--disable-responses-api[Disable OpenAI Responses API (default: false)]' \
|
||||
'(-h --help)'{-h,--help}'[Show this help message]' \
|
||||
'*:arguments:'
|
||||
}
|
||||
|
||||
@@ -13,7 +13,7 @@ _fabric() {
|
||||
_get_comp_words_by_ref -n : cur prev words cword
|
||||
|
||||
# Define all possible options/flags
|
||||
local opts="--pattern -p --variable -v --context -C --session --attachment -a --setup -S --temperature -t --topp -T --stream -s --presencepenalty -P --raw -r --frequencypenalty -F --listpatterns -l --listmodels -L --listcontexts -x --listsessions -X --updatepatterns -U --copy -c --model -m --modelContextLength --output -o --output-session --latest -n --changeDefaultModel -d --youtube -y --playlist --transcript --transcript-with-timestamps --comments --metadata --language -g --scrape_url -u --scrape_question -q --seed -e --wipecontext -w --wipesession -W --printcontext --printsession --readability --input-has-vars --dry-run --serve --serveOllama --address --api-key --config --search --search-location --image-file --image-size --image-quality --image-compression --image-background --version --listextensions --addextension --rmextension --strategy --liststrategies --listvendors --shell-complete-list --help -h"
|
||||
local opts="--pattern -p --variable -v --context -C --session --attachment -a --setup -S --temperature -t --topp -T --stream -s --presencepenalty -P --raw -r --frequencypenalty -F --listpatterns -l --listmodels -L --listcontexts -x --listsessions -X --updatepatterns -U --copy -c --model -m --modelContextLength --output -o --output-session --latest -n --changeDefaultModel -d --youtube -y --playlist --transcript --transcript-with-timestamps --comments --metadata --language -g --scrape_url -u --scrape_question -q --seed -e --wipecontext -w --wipesession -W --printcontext --printsession --readability --input-has-vars --dry-run --serve --serveOllama --address --api-key --config --search --search-location --image-file --image-size --image-quality --image-compression --image-background --suppress-think --think-start-tag --think-end-tag --disable-responses-api --version --listextensions --addextension --rmextension --strategy --liststrategies --listvendors --shell-complete-list --help -h"
|
||||
|
||||
# Helper function for dynamic completions
|
||||
_fabric_get_list() {
|
||||
@@ -81,7 +81,7 @@ _fabric() {
|
||||
return 0
|
||||
;;
|
||||
# Options requiring simple arguments (no specific completion logic here)
|
||||
-v | --variable | -t | --temperature | -T | --topp | -P | --presencepenalty | -F | --frequencypenalty | --modelContextLength | -n | --latest | -y | --youtube | -g | --language | -u | --scrape_url | -q | --scrape_question | -e | --seed | --address | --api-key | --search-location | --image-compression)
|
||||
-v | --variable | -t | --temperature | -T | --topp | -P | --presencepenalty | -F | --frequencypenalty | --modelContextLength | -n | --latest | -y | --youtube | -g | --language | -u | --scrape_url | -q | --scrape_question | -e | --seed | --address | --api-key | --search-location | --image-compression | --think-start-tag | --think-end-tag)
|
||||
# No specific completion suggestions, user types the value
|
||||
return 0
|
||||
;;
|
||||
|
||||
@@ -69,6 +69,8 @@ complete -c fabric -l image-background -d "Background type: opaque, transparent
|
||||
complete -c fabric -l addextension -d "Register a new extension from config file path" -r -a "*.yaml *.yml"
|
||||
complete -c fabric -l rmextension -d "Remove a registered extension by name" -a "(__fabric_get_extensions)"
|
||||
complete -c fabric -l strategy -d "Choose a strategy from the available strategies" -a "(__fabric_get_strategies)"
|
||||
complete -c fabric -l think-start-tag -d "Start tag for thinking sections (default: <think>)"
|
||||
complete -c fabric -l think-end-tag -d "End tag for thinking sections (default: </think>)"
|
||||
|
||||
# Boolean flags (no arguments)
|
||||
complete -c fabric -s S -l setup -d "Run setup for all reconfigurable parts of fabric"
|
||||
@@ -98,4 +100,6 @@ complete -c fabric -l listextensions -d "List all registered extensions"
|
||||
complete -c fabric -l liststrategies -d "List all strategies"
|
||||
complete -c fabric -l listvendors -d "List all vendors"
|
||||
complete -c fabric -l shell-complete-list -d "Output raw list without headers/formatting (for shell completion)"
|
||||
complete -c fabric -l suppress-think -d "Suppress text enclosed in thinking tags"
|
||||
complete -c fabric -l disable-responses-api -d "Disable OpenAI Responses API (default: false)"
|
||||
complete -c fabric -s h -l help -d "Show this help message"
|
||||
|
||||
8
data/patterns/generate_code_rules/system.md
Normal file
8
data/patterns/generate_code_rules/system.md
Normal file
@@ -0,0 +1,8 @@
|
||||
# IDENTITY AND PURPOSE
|
||||
|
||||
You are a senior developer and expert prompt engineer. Think ultra hard to distill the following transcription or tutorial in as little set of unique rules as possible intended for best practices guidance in AI assisted coding tools, each rule has to be in one sentence as a direct instruction, avoid explanations and cosmetic language. Output in Markdown, I prefer bullet dash (-).
|
||||
|
||||
---
|
||||
|
||||
# TRANSCRIPT
|
||||
|
||||
373
docs/Automated-ChangeLog.md
Normal file
373
docs/Automated-ChangeLog.md
Normal file
@@ -0,0 +1,373 @@
|
||||
# Automated CHANGELOG Entry System for CI/CD
|
||||
|
||||
## Overview
|
||||
|
||||
This document outlines a comprehensive system for automatically generating and maintaining CHANGELOG.md entries during the CI/CD process. The system builds upon the existing `generate_changelog` tool and integrates seamlessly with GitHub's pull request workflow.
|
||||
|
||||
## Current State Analysis
|
||||
|
||||
### Existing Infrastructure
|
||||
|
||||
The `generate_changelog` tool already provides:
|
||||
|
||||
- **High-performance Git history walking** with one-pass algorithm
|
||||
- **GitHub API integration** with GraphQL optimization and smart caching
|
||||
- **SQLite-based caching** for instant incremental updates
|
||||
- **AI-powered summaries** using Fabric integration
|
||||
- **Concurrent processing** for optimal performance
|
||||
- **Version detection** from git tags and commit patterns
|
||||
|
||||
### Key Components
|
||||
|
||||
- **Main entry point**: `cmd/generate_changelog/main.go`
|
||||
- **Core generation logic**: `internal/changelog/generator.go`
|
||||
- **AI summarization**: `internal/changelog/summarize.go`
|
||||
- **Caching system**: `internal/cache/cache.go`
|
||||
- **GitHub integration**: `internal/github/client.go`
|
||||
- **Git operations**: `internal/git/walker.go`
|
||||
|
||||
## Proposed Automated System
|
||||
|
||||
### Developer Workflow
|
||||
|
||||
```mermaid
|
||||
graph TD
|
||||
A[Developer creates feature branch] --> B[Codes feature]
|
||||
B --> C[Creates Pull Request]
|
||||
C --> D[PR is open and ready]
|
||||
D --> E[Developer runs: generate_changelog --incoming-pr XXXX]
|
||||
E --> F[Tool validates PR is open/mergeable]
|
||||
F --> G[Tool creates incoming/XXXX.txt with AI summary]
|
||||
G --> H[Auto-commit and push to branch]
|
||||
H --> I[PR includes pre-processed changelog entry]
|
||||
I --> J[PR gets reviewed and merged]
|
||||
```
|
||||
|
||||
### CI/CD Integration
|
||||
|
||||
```mermaid
|
||||
graph TD
|
||||
A[PR merged to main] --> B[Version bump workflow triggered]
|
||||
B --> C[generate_changelog --process-prs]
|
||||
C --> D[Scan incoming/ directory]
|
||||
D --> E[Concatenate all incoming/*.txt files]
|
||||
E --> F[Insert new version at top of CHANGELOG.md]
|
||||
F --> G[Store entry in versions table]
|
||||
G --> H[git rm incoming/*.txt files]
|
||||
H --> I[git add CHANGELOG.md and changelog.db, done by the tool]
|
||||
I --> J[Increment version number]
|
||||
J --> K[Commit and tag release]
|
||||
```
|
||||
|
||||
## Implementation Details
|
||||
|
||||
### Phase 1: Pre-Processing PRs
|
||||
|
||||
#### New Command: `--incoming-pr`
|
||||
|
||||
**Usage**: `generate_changelog --incoming-pr 1672`
|
||||
|
||||
**Functionality**:
|
||||
|
||||
1. **Validation**:
|
||||
- Verify PR exists and is open
|
||||
- Check PR is mergeable
|
||||
- Ensure branch is up-to-date
|
||||
- Verify that current git repo is clean (everything committed); do not continue otherwise.
|
||||
|
||||
2. **Content Generation**:
|
||||
- Extract PR metadata (title, author, description)
|
||||
- Collect all commit messages from the PR
|
||||
- Use existing `SummarizeVersionContent` function for AI enhancement
|
||||
- Format as standard changelog entry
|
||||
|
||||
3. **File Creation**:
|
||||
- Generate `./cmd/generate_changelog/incoming/{PR#}.txt`
|
||||
- Include PR header: `### PR [#1672](url) by [author](profile): Title` (as is done currently in the code)
|
||||
- Consider extracting the existing header code for PRs into a helper function for re-use.
|
||||
- Include the AI-summarized changes (generated when we ran all the commit messages through `SummarizeVersionContent`)
|
||||
|
||||
4. **Auto-commit**:
|
||||
- Commit file with message: `chore: incoming 1672 changelog entry`
|
||||
- Optionally push to current branch (use `--push` flag)
|
||||
|
||||
(The PR is now completely ready to be merged with integrated CHANGELOG entry updating)
|
||||
|
||||
#### File Format Example
|
||||
|
||||
```markdown
|
||||
### PR [#1672](https://github.com/danielmiessler/Fabric/pull/1672) by [ksylvan](https://github.com/ksylvan): Changelog Generator Enhancement
|
||||
|
||||
- Added automated CI/CD integration for changelog generation
|
||||
- Implemented pre-processing of PR entries during development
|
||||
- Enhanced caching system for better performance
|
||||
- Added validation for mergeable PR states
|
||||
```
|
||||
|
||||
### Phase 2: Release Processing
|
||||
|
||||
#### New Command: `--process-prs`
|
||||
|
||||
**Usage**: `generate_changelog --process-prs`
|
||||
|
||||
**Integration Point**: `.github/workflows/update-version-and-create-tag.yml`
|
||||
|
||||
(we can do this AFTER the "Update gomod2nix.toml file" step in the workflow, where we
|
||||
already have generated the next version in the "version.nix" file)
|
||||
|
||||
**Functionality**:
|
||||
|
||||
1. **Discovery**: Scan `./cmd/generate_changelog/incoming/` directory
|
||||
2. **Aggregation**: Read and concatenate all `*.txt` files
|
||||
3. **Version Creation**: Generate new version header with current date
|
||||
4. **CHANGELOG Update**: Insert new version at top of existing CHANGELOG.md
|
||||
5. **Database Update**: Store complete entry in `versions` table as `ai_summary`
|
||||
6. **Cleanup**: Remove all processed incoming files
|
||||
7. **Stage Changes**: Add modified files to git staging area
|
||||
|
||||
#### Example Output in CHANGELOG.md
|
||||
|
||||
```markdown
|
||||
# Changelog
|
||||
|
||||
## v1.4.259 (2025-07-18)
|
||||
|
||||
### PR [#1672](https://github.com/danielmiessler/Fabric/pull/1672) by [ksylvan](https://github.com/ksylvan): Changelog Generator Enhancement
|
||||
|
||||
- Added automated CI/CD integration for changelog generation
|
||||
- Implemented pre-processing of PR entries during development
|
||||
- Enhanced caching system for better performance
|
||||
|
||||
### PR [#1671](https://github.com/danielmiessler/Fabric/pull/1671) by [contributor](https://github.com/contributor): Bug Fix
|
||||
|
||||
- Fixed memory leak in caching system
|
||||
- Improved error handling for GitHub API failures
|
||||
|
||||
## v1.4.258 (2025-07-14)
|
||||
[... rest of file ...]
|
||||
```
|
||||
|
||||
## Technical Implementation
|
||||
|
||||
### Configuration Extensions
|
||||
|
||||
Add to `internal/config/config.go`:
|
||||
|
||||
```go
|
||||
type Config struct {
|
||||
// ... existing fields
|
||||
IncomingPR int // PR number for --incoming-pr
|
||||
ProcessPRsVersion string // Flag for --process-prs (new version string)
|
||||
IncomingDir string // Directory for incoming files (default: ./cmd/generate_changelog/incoming/)
|
||||
}
|
||||
```
|
||||
|
||||
### New Command Line Flags
|
||||
|
||||
```go
|
||||
rootCmd.Flags().IntVar(&cfg.IncomingPR, "incoming-pr", 0, "Pre-process PR for changelog (provide PR number)")
|
||||
rootCmd.Flags().StringVar(&cfg.ProcessPRsVersion, "process-prs", "", "Process all incoming PR files for release (provide version like v1.4.262)")
|
||||
rootCmd.Flags().StringVar(&cfg.IncomingDir, "incoming-dir", "./cmd/generate_changelog/incoming", "Directory for incoming PR files")
|
||||
```
|
||||
|
||||
### Core Logic Extensions
|
||||
|
||||
#### PR Pre-processing
|
||||
|
||||
```go
|
||||
func (g *Generator) ProcessIncomingPR(prNumber int) error {
|
||||
// 1. Validate PR state via GitHub API
|
||||
pr, err := g.ghClient.GetPR(prNumber)
|
||||
if err != nil || pr.State != "open" || !pr.Mergeable {
|
||||
return fmt.Errorf("PR %d is not in valid state for processing", prNumber)
|
||||
}
|
||||
|
||||
// 2. Generate changelog content using existing logic
|
||||
content := g.formatPR(pr)
|
||||
|
||||
// 3. Apply AI summarization if enabled
|
||||
if g.cfg.EnableAISummary {
|
||||
content, _ = SummarizeVersionContent(content)
|
||||
}
|
||||
|
||||
// 4. Write to incoming file
|
||||
filename := filepath.Join(g.cfg.IncomingDir, fmt.Sprintf("%d.txt", prNumber))
|
||||
err = os.WriteFile(filename, []byte(content), 0644)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to write incoming file: %w", err)
|
||||
}
|
||||
|
||||
// 5. Auto-commit and push
|
||||
return g.commitAndPushIncoming(prNumber, filename)
|
||||
}
|
||||
```
|
||||
|
||||
#### Release Processing
|
||||
|
||||
```go
|
||||
func (g *Generator) ProcessIncomingPRs(version string) error {
|
||||
// 1. Scan incoming directory
|
||||
files, err := filepath.Glob(filepath.Join(g.cfg.IncomingDir, "*.txt"))
|
||||
if err != nil || len(files) == 0 {
|
||||
return fmt.Errorf("no incoming PR files found")
|
||||
}
|
||||
|
||||
// 2. Read and concatenate all files
|
||||
var content strings.Builder
|
||||
for _, file := range files {
|
||||
data, err := os.ReadFile(file)
|
||||
if err == nil {
|
||||
content.WriteString(string(data))
|
||||
content.WriteString("\n")
|
||||
}
|
||||
}
|
||||
|
||||
// 3. Generate version entry
|
||||
entry := fmt.Sprintf("\n## %s (%s)\n\n%s",
|
||||
version, time.Now().Format("2006-01-02"), content.String())
|
||||
|
||||
// 4. Update CHANGELOG.md
|
||||
err = g.insertVersionAtTop(entry)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to update CHANGELOG.md: %w", err)
|
||||
}
|
||||
|
||||
// 5. Update database
|
||||
err = g.cache.SaveVersionEntry(version, content.String())
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to save to database: %w", err)
|
||||
}
|
||||
|
||||
// 6. Cleanup incoming files
|
||||
for _, file := range files {
|
||||
os.Remove(file)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
```
|
||||
|
||||
## Workflow Integration
|
||||
|
||||
### GitHub Actions Modification
|
||||
|
||||
Update `.github/workflows/update-version-and-create-tag.yml`.
|
||||
|
||||
```yaml
|
||||
- name: Generate Changelog Entry
|
||||
run: |
|
||||
# Process all incoming PR entries
|
||||
./cmd/generate_changelog/generate_changelog --process-prs
|
||||
|
||||
# The tool will make the needed changes in the CHANGELOG.md,
|
||||
# and the changelog.db, and will remove the PR#.txt file(s)
|
||||
# In effect, doing the following:
|
||||
# 1. Generate the new CHANGELOG (and store the entry in the changelog.db)
|
||||
# 2. git add CHANGELOG.md
|
||||
# 3. git add ./cmd/generate_changelog/changelog.db
|
||||
# 4. git rm -rf ./cmd/generate_changelog/incoming/
|
||||
#
|
||||
```
|
||||
|
||||
### Developer Instructions
|
||||
|
||||
1. **During Development**:
|
||||
|
||||
```bash
|
||||
# After PR is ready for review (commit locally only)
|
||||
generate_changelog --incoming-pr 1672 --ai-summarize
|
||||
|
||||
# Or to automatically push to remote
|
||||
generate_changelog --incoming-pr 1672 --ai-summarize --push
|
||||
```
|
||||
|
||||
2. **Validation**:
|
||||
- Check that `incoming/1672.txt` was created
|
||||
- Verify auto-commit occurred
|
||||
- Confirm file is included in PR
|
||||
- Scan the file and make any changes you need to the auto-generated summary
|
||||
|
||||
## Benefits
|
||||
|
||||
### For Developers
|
||||
|
||||
- **Automated changelog entries** - no manual CHANGELOG.md editing
|
||||
- **AI-enhanced summaries** - professional, consistent formatting
|
||||
- **Early visibility** - changelog content visible during PR review
|
||||
- **Reduced merge conflicts** - no multiple PRs editing CHANGELOG.md
|
||||
|
||||
### For Project Maintainers
|
||||
|
||||
- **Consistent formatting** - all entries follow same structure
|
||||
- **Complete coverage** - no missed changelog entries
|
||||
- **Automated releases** - seamless integration with version bumps
|
||||
- **Historical accuracy** - each PR's contribution properly documented
|
||||
|
||||
### For CI/CD
|
||||
|
||||
- **Deterministic process** - reliable, repeatable changelog generation
|
||||
- **Performance optimized** - leverages existing caching and AI systems
|
||||
- **Error resilience** - validates PR states before processing
|
||||
- **Clean integration** - minimal changes to existing workflows
|
||||
|
||||
## Implementation Strategy
|
||||
|
||||
### Phase 1: Implement Developer Tooling
|
||||
|
||||
- [x] Add new command line flags and configuration
|
||||
- [x] Implement `--incoming-pr` functionality
|
||||
- [x] Add validation for PR states and git status
|
||||
- [x] Create auto-commit logic
|
||||
|
||||
### Phase 2: Integration (CI/CD) Readiness
|
||||
|
||||
- [x] Implement `--process-prs` functionality
|
||||
- [x] Add CHANGELOG.md insertion logic
|
||||
- [x] Update database storage for version entries
|
||||
|
||||
### Phase 3: Deployment
|
||||
|
||||
- [x] Update GitHub Actions workflow
|
||||
- [x] Create developer documentation in ./docs/ directory
|
||||
- [x] Test full end-to-end workflow (the PR that includes these modifications can be its first production test)
|
||||
|
||||
### Phase 4: Adoption
|
||||
|
||||
- [ ] Train development team - Consider creating a full tutorial blog post/page to fully walk developers through the process.
|
||||
- [ ] Monitor first few releases
|
||||
- [ ] Gather feedback and iterate
|
||||
- [ ] Document lessons learned
|
||||
|
||||
## Error Handling
|
||||
|
||||
### PR Validation Failures
|
||||
|
||||
- **Closed/Merged PR**: Error with suggestion to check PR status
|
||||
- **Non-mergeable PR**: Error with instruction to resolve conflicts
|
||||
- **Missing PR**: Error with verification of PR number
|
||||
|
||||
### File System Issues
|
||||
|
||||
- **Permission errors**: Clear error with directory permission requirements
|
||||
- **Disk space**: Graceful handling with cleanup suggestions
|
||||
- **Network failures**: Retry logic with exponential backoff
|
||||
|
||||
### Git Operations
|
||||
|
||||
- **Commit failures**: Check for dirty working directory
|
||||
- **Push failures**: Handle authentication and remote issues
|
||||
- **Merge conflicts**: Clear instructions for manual resolution
|
||||
|
||||
## Future Enhancements
|
||||
|
||||
### Advanced Features
|
||||
|
||||
- **Custom categorization** - group changes by type (feat/fix/docs)
|
||||
- **Breaking change detection** - special handling for BREAKING CHANGE commits
|
||||
- **Release notes generation** - enhanced formatting for GitHub releases (our release pages are pretty bare)
|
||||
|
||||
## Conclusion
|
||||
|
||||
This automated changelog system builds upon the robust foundation of the existing `generate_changelog` tool while providing a seamless developer experience and reliable CI/CD integration. By pre-processing PR entries during development and aggregating them during releases, we achieve both accuracy and automation without sacrificing quality or developer productivity.
|
||||
|
||||
The phased approach ensures smooth adoption while the extensive error handling and validation provide confidence in production deployment. The system's design leverages existing infrastructure and patterns, making it a natural evolution of the current changelog generation capabilities.
|
||||
195
docs/Automated-Changelog-Usage.md
Normal file
195
docs/Automated-Changelog-Usage.md
Normal file
@@ -0,0 +1,195 @@
|
||||
# Automated Changelog System - Developer Guide
|
||||
|
||||
This guide explains how to use the new automated changelog system for the Fabric project.
|
||||
|
||||
## Overview
|
||||
|
||||
The automated changelog system allows developers to pre-process their PR changelog entries during development, which are then automatically aggregated during the release process. This eliminates manual CHANGELOG.md editing and reduces merge conflicts.
|
||||
|
||||
## Developer Workflow
|
||||
|
||||
### Step 1: Create Your Feature Branch and PR
|
||||
|
||||
Work on your feature as usual and create a pull request.
|
||||
|
||||
### Step 2: Generate Changelog Entry
|
||||
|
||||
Once your PR is ready for review, generate a changelog entry:
|
||||
|
||||
```bash
|
||||
cd cmd/generate_changelog
|
||||
go build -o generate_changelog .
|
||||
./generate_changelog --incoming-pr YOUR_PR_NUMBER
|
||||
```
|
||||
|
||||
For example, if your PR number is 1672:
|
||||
|
||||
```bash
|
||||
./generate_changelog --incoming-pr 1672
|
||||
```
|
||||
|
||||
### Step 3: Validation
|
||||
|
||||
The tool will validate:
|
||||
|
||||
- ✅ PR exists and is open
|
||||
- ✅ PR is mergeable (no conflicts)
|
||||
- ✅ Your working directory is clean
|
||||
|
||||
If any validation fails, fix the issues and try again.
|
||||
|
||||
### Step 4: Review Generated Entry
|
||||
|
||||
The tool will:
|
||||
|
||||
1. Create `./cmd/generate_changelog/incoming/1672.txt`
|
||||
2. Generate an AI-enhanced summary (if `--ai-summarize` is enabled)
|
||||
3. Auto-commit the file to your branch (use `--push` to also push to remote)
|
||||
|
||||
Review the generated file and edit if needed:
|
||||
|
||||
```bash
|
||||
cat ./cmd/generate_changelog/incoming/1672.txt
|
||||
```
|
||||
|
||||
### Step 5: Include in PR
|
||||
|
||||
The incoming changelog entry is now part of your PR and will be reviewed along with your code changes.
|
||||
|
||||
## Example Generated Entry
|
||||
|
||||
```markdown
|
||||
### PR [#1672](https://github.com/danielmiessler/fabric/pull/1672) by [ksylvan](https://github.com/ksylvan): Changelog Generator Enhancement
|
||||
|
||||
- Added automated CI/CD integration for changelog generation
|
||||
- Implemented pre-processing of PR entries during development
|
||||
- Enhanced caching system for better performance
|
||||
- Added validation for mergeable PR states
|
||||
```
|
||||
|
||||
## Command Options
|
||||
|
||||
### `--incoming-pr`
|
||||
|
||||
Pre-process a specific PR for changelog generation.
|
||||
|
||||
**Usage**: `./generate_changelog --incoming-pr PR_NUMBER`
|
||||
|
||||
**Requirements**:
|
||||
|
||||
- PR must be open
|
||||
- PR must be mergeable (no conflicts)
|
||||
- Working directory must be clean (no uncommitted changes)
|
||||
- GitHub token must be available (`GITHUB_TOKEN` env var or `--token` flag)
|
||||
|
||||
**Mutual Exclusivity**: Cannot be used with `--process-prs` flag
|
||||
|
||||
### `--incoming-dir`
|
||||
|
||||
Specify custom directory for incoming PR files (default: `./cmd/generate_changelog/incoming`).
|
||||
|
||||
**Usage**: `./generate_changelog --incoming-pr 1672 --incoming-dir ./custom/path`
|
||||
|
||||
### `--process-prs`
|
||||
|
||||
Process all incoming PR files for release aggregation. Used by CI/CD during release creation.
|
||||
|
||||
**Usage**: `./generate_changelog --process-prs {new_version_string}`
|
||||
|
||||
**Mutual Exclusivity**: Cannot be used with `--incoming-pr` flag
|
||||
|
||||
### `--ai-summarize`
|
||||
|
||||
Enable AI-enhanced summaries using Fabric integration.
|
||||
|
||||
**Usage**: `./generate_changelog --incoming-pr 1672 --ai-summarize`
|
||||
|
||||
### `--push`
|
||||
|
||||
Enable automatic git push after creating an incoming entry. By default, the commit is created locally but not pushed to the remote repository.
|
||||
|
||||
**Usage**: `./generate_changelog --incoming-pr 1672 --push`
|
||||
|
||||
**Note**: When using `--push`, ensure you have proper authentication configured (SSH keys or GITHUB_TOKEN environment variable).
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### "PR is not open"
|
||||
|
||||
Your PR has been closed or merged. Only open PRs can be processed.
|
||||
|
||||
### "PR is not mergeable"
|
||||
|
||||
Your PR has merge conflicts or other issues preventing it from being merged. Resolve conflicts and ensure the PR is in a mergeable state.
|
||||
|
||||
### "Working directory is not clean"
|
||||
|
||||
You have uncommitted changes. Commit or stash them before running the tool.
|
||||
|
||||
### "Failed to fetch PR"
|
||||
|
||||
Check your GitHub token and network connection. Ensure the PR number exists.
|
||||
|
||||
## CI/CD Integration
|
||||
|
||||
The system automatically processes all incoming PR files during the release workflow. No manual intervention is required.
|
||||
|
||||
When a release is created:
|
||||
|
||||
1. All `incoming/*.txt` files are aggregated using `--process-prs`
|
||||
2. Version is detected from `version.nix` or latest git tag
|
||||
3. A new version entry is created in CHANGELOG.md
|
||||
4. Incoming files are cleaned up (removed)
|
||||
5. Changes are staged for the release commit (CHANGELOG.md and cache file)
|
||||
|
||||
## Best Practices
|
||||
|
||||
1. **Run early**: Generate your changelog entry as soon as your PR is ready for review
|
||||
2. **Review content**: Always review the generated entry and edit if necessary
|
||||
3. **Keep it updated**: If you make significant changes to your PR, regenerate the entry
|
||||
4. **Use AI summaries**: Enable `--ai-summarize` for more professional, consistent formatting
|
||||
|
||||
## Advanced Usage
|
||||
|
||||
### Custom GitHub Token
|
||||
|
||||
```bash
|
||||
./generate_changelog --incoming-pr 1672 --token YOUR_GITHUB_TOKEN
|
||||
```
|
||||
|
||||
### Custom Repository Path
|
||||
|
||||
```bash
|
||||
./generate_changelog --incoming-pr 1672 --repo /path/to/repo
|
||||
```
|
||||
|
||||
### Disable Caching
|
||||
|
||||
```bash
|
||||
./generate_changelog --incoming-pr 1672 --no-cache
|
||||
```
|
||||
|
||||
### Enable Auto-Push
|
||||
|
||||
```bash
|
||||
./generate_changelog --incoming-pr 1672 --push
|
||||
```
|
||||
|
||||
This creates the commit locally and pushes it to the remote repository. By default, commits are only created locally, allowing you to review changes before pushing manually.
|
||||
|
||||
**Authentication**: The tool automatically detects GitHub repositories and uses the GITHUB_TOKEN environment variable for authentication when pushing. For SSH repositories, ensure your SSH keys are properly configured.
|
||||
|
||||
## Integration with Existing Workflow
|
||||
|
||||
This system is fully backward compatible. The existing changelog generation continues to work unchanged. The new features are opt-in and only activated when using the new flags.
|
||||
|
||||
## Support
|
||||
|
||||
If you encounter issues:
|
||||
|
||||
1. Check this documentation
|
||||
2. Verify your GitHub token has appropriate permissions
|
||||
3. Ensure your PR meets the validation requirements
|
||||
4. Check the tool's help: `./generate_changelog --help`
|
||||
|
||||
For bugs or feature requests, please create an issue in the repository.
|
||||
@@ -41,8 +41,8 @@ func handleChatProcessing(currentFlags *Flags, registry *core.PluginRegistry, me
|
||||
|
||||
result := session.GetLastMessage().Content
|
||||
|
||||
if !currentFlags.Stream {
|
||||
// print the result if it was not streamed already
|
||||
if !currentFlags.Stream || currentFlags.SuppressThink {
|
||||
// print the result if it was not streamed already or suppress-think disabled streaming output
|
||||
fmt.Println(result)
|
||||
}
|
||||
|
||||
|
||||
@@ -7,6 +7,7 @@ import (
|
||||
"strings"
|
||||
|
||||
"github.com/danielmiessler/fabric/internal/core"
|
||||
"github.com/danielmiessler/fabric/internal/plugins/ai/openai"
|
||||
"github.com/danielmiessler/fabric/internal/tools/converter"
|
||||
"github.com/danielmiessler/fabric/internal/tools/youtube"
|
||||
)
|
||||
@@ -18,6 +19,12 @@ func Cli(version string) (err error) {
|
||||
return
|
||||
}
|
||||
|
||||
if currentFlags.Setup {
|
||||
if err = ensureEnvFile(); err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
if currentFlags.Version {
|
||||
fmt.Println(version)
|
||||
return
|
||||
@@ -36,6 +43,11 @@ func Cli(version string) (err error) {
|
||||
}
|
||||
}
|
||||
|
||||
// Configure OpenAI Responses API setting based on CLI flag
|
||||
if registry != nil {
|
||||
configureOpenAIResponsesAPI(registry, currentFlags.DisableResponsesAPI)
|
||||
}
|
||||
|
||||
// Handle setup and server commands
|
||||
var handled bool
|
||||
if handled, err = handleSetupAndServerCommands(currentFlags, registry, version); err != nil || handled {
|
||||
@@ -142,3 +154,21 @@ func WriteOutput(message string, outputFile string) (err error) {
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// configureOpenAIResponsesAPI configures the OpenAI client's Responses API setting based on the CLI flag
|
||||
func configureOpenAIResponsesAPI(registry *core.PluginRegistry, disableResponsesAPI bool) {
|
||||
// Find the OpenAI vendor in the registry
|
||||
if registry != nil && registry.VendorsAll != nil {
|
||||
for _, vendor := range registry.VendorsAll.Vendors {
|
||||
if vendor.GetName() == "OpenAI" {
|
||||
// Type assertion to access the OpenAI-specific method
|
||||
if openaiClient, ok := vendor.(*openai.Client); ok {
|
||||
// Invert the disable flag to get the enable flag
|
||||
enableResponsesAPI := !disableResponsesAPI
|
||||
openaiClient.SetResponsesAPIEnabled(enableResponsesAPI)
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,4 +18,13 @@ temperature: 0.88
|
||||
seed: 42
|
||||
|
||||
stream: true
|
||||
raw: false
|
||||
raw: false
|
||||
|
||||
# suppress vendor thinking output
|
||||
suppressThink: false
|
||||
thinkStartTag: "<think>"
|
||||
thinkEndTag: "</think>"
|
||||
|
||||
# OpenAI Responses API settings
|
||||
# (use this for llama-server or other OpenAI-compatible local servers)
|
||||
disableResponsesAPI: true
|
||||
|
||||
@@ -83,6 +83,10 @@ type Flags struct {
|
||||
ImageQuality string `long:"image-quality" description:"Image quality: low, medium, high, auto (default: auto)"`
|
||||
ImageCompression int `long:"image-compression" description:"Compression level 0-100 for JPEG/WebP formats (default: not set)"`
|
||||
ImageBackground string `long:"image-background" description:"Background type: opaque, transparent (default: opaque, only for PNG/WebP)"`
|
||||
SuppressThink bool `long:"suppress-think" yaml:"suppressThink" description:"Suppress text enclosed in thinking tags"`
|
||||
ThinkStartTag string `long:"think-start-tag" yaml:"thinkStartTag" description:"Start tag for thinking sections" default:"<think>"`
|
||||
ThinkEndTag string `long:"think-end-tag" yaml:"thinkEndTag" description:"End tag for thinking sections" default:"</think>"`
|
||||
DisableResponsesAPI bool `long:"disable-responses-api" yaml:"disableResponsesAPI" description:"Disable OpenAI Responses API (default: false)"`
|
||||
}
|
||||
|
||||
var debug = false
|
||||
@@ -99,26 +103,34 @@ func Init() (ret *Flags, err error) {
|
||||
usedFlags := make(map[string]bool)
|
||||
yamlArgsScan := os.Args[1:]
|
||||
|
||||
// Get list of fields that have yaml tags, could be in yaml config
|
||||
yamlFields := make(map[string]bool)
|
||||
// Create mapping from flag names (both short and long) to yaml tag names
|
||||
flagToYamlTag := make(map[string]string)
|
||||
t := reflect.TypeOf(Flags{})
|
||||
for i := 0; i < t.NumField(); i++ {
|
||||
if yamlTag := t.Field(i).Tag.Get("yaml"); yamlTag != "" {
|
||||
yamlFields[yamlTag] = true
|
||||
//Debugf("Found yaml-configured field: %s\n", yamlTag)
|
||||
field := t.Field(i)
|
||||
yamlTag := field.Tag.Get("yaml")
|
||||
if yamlTag != "" {
|
||||
longTag := field.Tag.Get("long")
|
||||
shortTag := field.Tag.Get("short")
|
||||
if longTag != "" {
|
||||
flagToYamlTag[longTag] = yamlTag
|
||||
Debugf("Mapped long flag %s to yaml tag %s\n", longTag, yamlTag)
|
||||
}
|
||||
if shortTag != "" {
|
||||
flagToYamlTag[shortTag] = yamlTag
|
||||
Debugf("Mapped short flag %s to yaml tag %s\n", shortTag, yamlTag)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Scan args for that are provided by cli and might be in yaml
|
||||
for _, arg := range yamlArgsScan {
|
||||
if strings.HasPrefix(arg, "--") {
|
||||
flag := strings.TrimPrefix(arg, "--")
|
||||
if i := strings.Index(flag, "="); i > 0 {
|
||||
flag = flag[:i]
|
||||
}
|
||||
if yamlFields[flag] {
|
||||
usedFlags[flag] = true
|
||||
Debugf("CLI flag used: %s\n", flag)
|
||||
flag := extractFlag(arg)
|
||||
|
||||
if flag != "" {
|
||||
if yamlTag, exists := flagToYamlTag[flag]; exists {
|
||||
usedFlags[yamlTag] = true
|
||||
Debugf("CLI flag used: %s (yaml: %s)\n", flag, yamlTag)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -131,6 +143,16 @@ func Init() (ret *Flags, err error) {
|
||||
return
|
||||
}
|
||||
|
||||
// Check to see if a ~/.config/fabric/config.yaml config file exists (only when user didn't specify a config)
|
||||
if ret.Config == "" {
|
||||
// Default to ~/.config/fabric/config.yaml if no config specified
|
||||
if defaultConfigPath, err := util.GetDefaultConfigPath(); err == nil && defaultConfigPath != "" {
|
||||
ret.Config = defaultConfigPath
|
||||
} else if err != nil {
|
||||
Debugf("Could not determine default config path: %v\n", err)
|
||||
}
|
||||
}
|
||||
|
||||
// If config specified, load and apply YAML for unused flags
|
||||
if ret.Config != "" {
|
||||
var yamlFlags *Flags
|
||||
@@ -165,7 +187,6 @@ func Init() (ret *Flags, err error) {
|
||||
}
|
||||
}
|
||||
|
||||
// Handle stdin and messages
|
||||
// Handle stdin and messages
|
||||
info, _ := os.Stdin.Stat()
|
||||
pipedToStdin := (info.Mode() & os.ModeCharDevice) == 0
|
||||
@@ -185,6 +206,22 @@ func Init() (ret *Flags, err error) {
|
||||
return
|
||||
}
|
||||
|
||||
func extractFlag(arg string) string {
|
||||
var flag string
|
||||
if strings.HasPrefix(arg, "--") {
|
||||
flag = strings.TrimPrefix(arg, "--")
|
||||
if i := strings.Index(flag, "="); i > 0 {
|
||||
flag = flag[:i]
|
||||
}
|
||||
} else if strings.HasPrefix(arg, "-") && len(arg) > 1 {
|
||||
flag = strings.TrimPrefix(arg, "-")
|
||||
if i := strings.Index(flag, "="); i > 0 {
|
||||
flag = flag[:i]
|
||||
}
|
||||
}
|
||||
return flag
|
||||
}
|
||||
|
||||
func assignWithConversion(targetField, sourceField reflect.Value) error {
|
||||
// Handle string source values
|
||||
if sourceField.Kind() == reflect.String {
|
||||
@@ -376,6 +413,15 @@ func (o *Flags) BuildChatOptions() (ret *domain.ChatOptions, err error) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
startTag := o.ThinkStartTag
|
||||
if startTag == "" {
|
||||
startTag = "<think>"
|
||||
}
|
||||
endTag := o.ThinkEndTag
|
||||
if endTag == "" {
|
||||
endTag = "</think>"
|
||||
}
|
||||
|
||||
ret = &domain.ChatOptions{
|
||||
Model: o.Model,
|
||||
Temperature: o.Temperature,
|
||||
@@ -392,6 +438,9 @@ func (o *Flags) BuildChatOptions() (ret *domain.ChatOptions, err error) {
|
||||
ImageQuality: o.ImageQuality,
|
||||
ImageCompression: o.ImageCompression,
|
||||
ImageBackground: o.ImageBackground,
|
||||
SuppressThink: o.SuppressThink,
|
||||
ThinkStartTag: startTag,
|
||||
ThinkEndTag: endTag,
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
@@ -64,6 +64,9 @@ func TestBuildChatOptions(t *testing.T) {
|
||||
FrequencyPenalty: 0.2,
|
||||
Raw: false,
|
||||
Seed: 1,
|
||||
SuppressThink: false,
|
||||
ThinkStartTag: "<think>",
|
||||
ThinkEndTag: "</think>",
|
||||
}
|
||||
options, err := flags.BuildChatOptions()
|
||||
assert.NoError(t, err)
|
||||
@@ -85,12 +88,29 @@ func TestBuildChatOptionsDefaultSeed(t *testing.T) {
|
||||
FrequencyPenalty: 0.2,
|
||||
Raw: false,
|
||||
Seed: 0,
|
||||
SuppressThink: false,
|
||||
ThinkStartTag: "<think>",
|
||||
ThinkEndTag: "</think>",
|
||||
}
|
||||
options, err := flags.BuildChatOptions()
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, expectedOptions, options)
|
||||
}
|
||||
|
||||
func TestBuildChatOptionsSuppressThink(t *testing.T) {
|
||||
flags := &Flags{
|
||||
SuppressThink: true,
|
||||
ThinkStartTag: "[[t]]",
|
||||
ThinkEndTag: "[[/t]]",
|
||||
}
|
||||
|
||||
options, err := flags.BuildChatOptions()
|
||||
assert.NoError(t, err)
|
||||
assert.True(t, options.SuppressThink)
|
||||
assert.Equal(t, "[[t]]", options.ThinkStartTag)
|
||||
assert.Equal(t, "[[/t]]", options.ThinkEndTag)
|
||||
}
|
||||
|
||||
func TestInitWithYAMLConfig(t *testing.T) {
|
||||
// Create a temporary YAML config file
|
||||
configContent := `
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package cli
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
@@ -8,6 +9,9 @@ import (
|
||||
"github.com/danielmiessler/fabric/internal/plugins/db/fsdb"
|
||||
)
|
||||
|
||||
const ConfigDirPerms os.FileMode = 0755
|
||||
const EnvFilePerms os.FileMode = 0644
|
||||
|
||||
// initializeFabric initializes the fabric database and plugin registry
|
||||
func initializeFabric() (registry *core.PluginRegistry, err error) {
|
||||
var homedir string
|
||||
@@ -26,3 +30,27 @@ func initializeFabric() (registry *core.PluginRegistry, err error) {
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// ensureEnvFile checks for the default ~/.config/fabric/.env file and creates it
|
||||
// along with the parent directory if it does not exist.
|
||||
func ensureEnvFile() (err error) {
|
||||
var homedir string
|
||||
if homedir, err = os.UserHomeDir(); err != nil {
|
||||
return fmt.Errorf("could not determine user home directory: %w", err)
|
||||
}
|
||||
configDir := filepath.Join(homedir, ".config", "fabric")
|
||||
envPath := filepath.Join(configDir, ".env")
|
||||
|
||||
if _, statErr := os.Stat(envPath); statErr != nil {
|
||||
if !os.IsNotExist(statErr) {
|
||||
return fmt.Errorf("could not stat .env file: %w", statErr)
|
||||
}
|
||||
if err = os.MkdirAll(configDir, ConfigDirPerms); err != nil {
|
||||
return fmt.Errorf("could not create config directory: %w", err)
|
||||
}
|
||||
if err = os.WriteFile(envPath, []byte{}, EnvFilePerms); err != nil {
|
||||
return fmt.Errorf("could not create .env file: %w", err)
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
@@ -79,7 +79,9 @@ func (o *Chatter) Send(request *domain.ChatRequest, opts *domain.ChatOptions) (s
|
||||
|
||||
for response := range responseChan {
|
||||
message += response
|
||||
fmt.Print(response)
|
||||
if !opts.SuppressThink {
|
||||
fmt.Print(response)
|
||||
}
|
||||
}
|
||||
|
||||
// Wait for goroutine to finish
|
||||
@@ -101,6 +103,10 @@ func (o *Chatter) Send(request *domain.ChatRequest, opts *domain.ChatOptions) (s
|
||||
}
|
||||
}
|
||||
|
||||
if opts.SuppressThink && !o.DryRun {
|
||||
message = domain.StripThinkBlocks(message, opts.ThinkStartTag, opts.ThinkEndTag)
|
||||
}
|
||||
|
||||
if message == "" {
|
||||
session = nil
|
||||
err = fmt.Errorf("empty response")
|
||||
|
||||
@@ -15,6 +15,7 @@ import (
|
||||
type mockVendor struct {
|
||||
sendStreamError error
|
||||
streamChunks []string
|
||||
sendFunc func(context.Context, []*chat.ChatCompletionMessage, *domain.ChatOptions) (string, error)
|
||||
}
|
||||
|
||||
func (m *mockVendor) GetName() string {
|
||||
@@ -57,6 +58,9 @@ func (m *mockVendor) SendStream(messages []*chat.ChatCompletionMessage, opts *do
|
||||
}
|
||||
|
||||
func (m *mockVendor) Send(ctx context.Context, messages []*chat.ChatCompletionMessage, opts *domain.ChatOptions) (string, error) {
|
||||
if m.sendFunc != nil {
|
||||
return m.sendFunc(ctx, messages, opts)
|
||||
}
|
||||
return "test response", nil
|
||||
}
|
||||
|
||||
@@ -64,6 +68,51 @@ func (m *mockVendor) NeedsRawMode(modelName string) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func TestChatter_Send_SuppressThink(t *testing.T) {
|
||||
tempDir := t.TempDir()
|
||||
db := fsdb.NewDb(tempDir)
|
||||
|
||||
mockVendor := &mockVendor{}
|
||||
|
||||
chatter := &Chatter{
|
||||
db: db,
|
||||
Stream: false,
|
||||
vendor: mockVendor,
|
||||
model: "test-model",
|
||||
}
|
||||
|
||||
request := &domain.ChatRequest{
|
||||
Message: &chat.ChatCompletionMessage{
|
||||
Role: chat.ChatMessageRoleUser,
|
||||
Content: "test",
|
||||
},
|
||||
}
|
||||
|
||||
opts := &domain.ChatOptions{
|
||||
Model: "test-model",
|
||||
SuppressThink: true,
|
||||
ThinkStartTag: "<think>",
|
||||
ThinkEndTag: "</think>",
|
||||
}
|
||||
|
||||
// custom send function returning a message with think tags
|
||||
mockVendor.sendFunc = func(ctx context.Context, msgs []*chat.ChatCompletionMessage, o *domain.ChatOptions) (string, error) {
|
||||
return "<think>hidden</think> visible", nil
|
||||
}
|
||||
|
||||
session, err := chatter.Send(request, opts)
|
||||
if err != nil {
|
||||
t.Fatalf("Send returned error: %v", err)
|
||||
}
|
||||
if session == nil {
|
||||
t.Fatal("expected session")
|
||||
}
|
||||
last := session.GetLastMessage()
|
||||
if last.Content != "visible" {
|
||||
t.Errorf("expected filtered content 'visible', got %q", last.Content)
|
||||
}
|
||||
}
|
||||
|
||||
func TestChatter_Send_StreamingErrorPropagation(t *testing.T) {
|
||||
// Create a temporary database for testing
|
||||
tempDir := t.TempDir()
|
||||
|
||||
@@ -33,6 +33,9 @@ type ChatOptions struct {
|
||||
ImageQuality string
|
||||
ImageCompression int
|
||||
ImageBackground string
|
||||
SuppressThink bool
|
||||
ThinkStartTag string
|
||||
ThinkEndTag string
|
||||
}
|
||||
|
||||
// NormalizeMessages remove empty messages and ensure messages order user-assist-user
|
||||
|
||||
32
internal/domain/think.go
Normal file
32
internal/domain/think.go
Normal file
@@ -0,0 +1,32 @@
|
||||
package domain
|
||||
|
||||
import (
|
||||
"regexp"
|
||||
"sync"
|
||||
)
|
||||
|
||||
// StripThinkBlocks removes any content between the provided start and end tags
|
||||
// from the input string. Whitespace following the end tag is also removed so
|
||||
// output resumes at the next non-empty line.
|
||||
var (
|
||||
regexCache = make(map[string]*regexp.Regexp)
|
||||
cacheMutex sync.Mutex
|
||||
)
|
||||
|
||||
func StripThinkBlocks(input, startTag, endTag string) string {
|
||||
if startTag == "" || endTag == "" {
|
||||
return input
|
||||
}
|
||||
|
||||
cacheKey := startTag + "|" + endTag
|
||||
cacheMutex.Lock()
|
||||
re, exists := regexCache[cacheKey]
|
||||
if !exists {
|
||||
pattern := "(?s)" + regexp.QuoteMeta(startTag) + ".*?" + regexp.QuoteMeta(endTag) + "\\s*"
|
||||
re = regexp.MustCompile(pattern)
|
||||
regexCache[cacheKey] = re
|
||||
}
|
||||
cacheMutex.Unlock()
|
||||
|
||||
return re.ReplaceAllString(input, "")
|
||||
}
|
||||
19
internal/domain/think_test.go
Normal file
19
internal/domain/think_test.go
Normal file
@@ -0,0 +1,19 @@
|
||||
package domain
|
||||
|
||||
import "testing"
|
||||
|
||||
func TestStripThinkBlocks(t *testing.T) {
|
||||
input := "<think>internal</think>\n\nresult"
|
||||
got := StripThinkBlocks(input, "<think>", "</think>")
|
||||
if got != "result" {
|
||||
t.Errorf("expected %q, got %q", "result", got)
|
||||
}
|
||||
}
|
||||
|
||||
func TestStripThinkBlocksCustomTags(t *testing.T) {
|
||||
input := "[[t]]hidden[[/t]] visible"
|
||||
got := StripThinkBlocks(input, "[[t]]", "[[/t]]")
|
||||
if got != "visible" {
|
||||
t.Errorf("expected %q, got %q", "visible", got)
|
||||
}
|
||||
}
|
||||
@@ -12,6 +12,8 @@ import (
|
||||
"github.com/danielmiessler/fabric/internal/plugins"
|
||||
)
|
||||
|
||||
const DryRunResponse = "Dry run: Fake response sent by DryRun plugin\n"
|
||||
|
||||
type Client struct {
|
||||
*plugins.PluginBase
|
||||
}
|
||||
@@ -85,27 +87,37 @@ func (c *Client) formatOptions(opts *domain.ChatOptions) string {
|
||||
if opts.ImageFile != "" {
|
||||
builder.WriteString(fmt.Sprintf("ImageFile: %s\n", opts.ImageFile))
|
||||
}
|
||||
if opts.SuppressThink {
|
||||
builder.WriteString("SuppressThink: enabled\n")
|
||||
builder.WriteString(fmt.Sprintf("Thinking Start Tag: %s\n", opts.ThinkStartTag))
|
||||
builder.WriteString(fmt.Sprintf("Thinking End Tag: %s\n", opts.ThinkEndTag))
|
||||
}
|
||||
|
||||
return builder.String()
|
||||
}
|
||||
|
||||
func (c *Client) SendStream(msgs []*chat.ChatCompletionMessage, opts *domain.ChatOptions, channel chan string) error {
|
||||
func (c *Client) constructRequest(msgs []*chat.ChatCompletionMessage, opts *domain.ChatOptions) string {
|
||||
var builder strings.Builder
|
||||
builder.WriteString("Dry run: Would send the following request:\n\n")
|
||||
builder.WriteString(c.formatMessages(msgs))
|
||||
builder.WriteString(c.formatOptions(opts))
|
||||
|
||||
channel <- builder.String()
|
||||
close(channel)
|
||||
return builder.String()
|
||||
}
|
||||
|
||||
func (c *Client) SendStream(msgs []*chat.ChatCompletionMessage, opts *domain.ChatOptions, channel chan string) error {
|
||||
defer close(channel)
|
||||
request := c.constructRequest(msgs, opts)
|
||||
channel <- request
|
||||
channel <- "\n"
|
||||
channel <- DryRunResponse
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *Client) Send(_ context.Context, msgs []*chat.ChatCompletionMessage, opts *domain.ChatOptions) (string, error) {
|
||||
fmt.Println("Dry run: Would send the following request:")
|
||||
fmt.Print(c.formatMessages(msgs))
|
||||
fmt.Print(c.formatOptions(opts))
|
||||
request := c.constructRequest(msgs, opts)
|
||||
|
||||
return "", nil
|
||||
return request + "\n" + DryRunResponse, nil
|
||||
}
|
||||
|
||||
func (c *Client) Setup() error {
|
||||
|
||||
@@ -13,6 +13,7 @@ func NewClient() (ret *Client) {
|
||||
ret = &Client{}
|
||||
ret.Client = openai.NewClientCompatibleNoSetupQuestions("Exolab", ret.configure)
|
||||
|
||||
ret.ApiKey = ret.AddSetupQuestion("API Key", false)
|
||||
ret.ApiBaseURL = ret.AddSetupQuestion("API Base URL", true)
|
||||
ret.ApiBaseURL.Value = "http://localhost:52415"
|
||||
|
||||
|
||||
@@ -160,6 +160,7 @@ func (o *Client) NeedsRawMode(modelName string) bool {
|
||||
ollamaPrefixes := []string{
|
||||
"llama3",
|
||||
"llama2",
|
||||
"mistral",
|
||||
}
|
||||
for _, prefix := range ollamaPrefixes {
|
||||
if strings.HasPrefix(modelName, prefix) {
|
||||
|
||||
@@ -66,6 +66,11 @@ type Client struct {
|
||||
ImplementsResponses bool // Whether this provider supports the Responses API
|
||||
}
|
||||
|
||||
// SetResponsesAPIEnabled configures whether to use the Responses API
|
||||
func (o *Client) SetResponsesAPIEnabled(enabled bool) {
|
||||
o.ImplementsResponses = enabled
|
||||
}
|
||||
|
||||
func (o *Client) configure() (ret error) {
|
||||
opts := []option.RequestOption{option.WithAPIKey(o.ApiKey.Value)}
|
||||
if o.ApiBaseURL.Value != "" {
|
||||
|
||||
61
internal/tools/youtube/timestamp_test.go
Normal file
61
internal/tools/youtube/timestamp_test.go
Normal file
@@ -0,0 +1,61 @@
|
||||
package youtube
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestParseTimestampToSeconds(t *testing.T) {
|
||||
tests := []struct {
|
||||
timestamp string
|
||||
expected int
|
||||
shouldErr bool
|
||||
}{
|
||||
{"00:30", 30, false},
|
||||
{"01:30", 90, false},
|
||||
{"01:05:30", 3930, false}, // 1 hour 5 minutes 30 seconds
|
||||
{"10:00", 600, false},
|
||||
{"invalid", 0, true},
|
||||
{"1:2:3:4", 0, true}, // too many parts
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
result, err := parseTimestampToSeconds(test.timestamp)
|
||||
|
||||
if test.shouldErr {
|
||||
if err == nil {
|
||||
t.Errorf("Expected error for timestamp %s, but got none", test.timestamp)
|
||||
}
|
||||
} else {
|
||||
if err != nil {
|
||||
t.Errorf("Unexpected error for timestamp %s: %v", test.timestamp, err)
|
||||
}
|
||||
if result != test.expected {
|
||||
t.Errorf("For timestamp %s, expected %d seconds, got %d", test.timestamp, test.expected, result)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestShouldIncludeRepeat(t *testing.T) {
|
||||
tests := []struct {
|
||||
lastTimestamp string
|
||||
currentTimestamp string
|
||||
expected bool
|
||||
description string
|
||||
}{
|
||||
{"00:30", "01:30", true, "60 second gap should allow repeat"},
|
||||
{"00:30", "00:45", true, "15 second gap should allow repeat"},
|
||||
{"01:00", "01:10", true, "10 second gap should allow repeat (boundary case)"},
|
||||
{"01:00", "01:09", false, "9 second gap should not allow repeat"},
|
||||
{"00:30", "00:35", false, "5 second gap should not allow repeat"},
|
||||
{"invalid", "01:30", true, "invalid timestamp should err on side of inclusion"},
|
||||
{"01:30", "invalid", true, "invalid timestamp should err on side of inclusion"},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
result := shouldIncludeRepeat(test.lastTimestamp, test.currentTimestamp)
|
||||
if result != test.expected {
|
||||
t.Errorf("%s: expected %v, got %v", test.description, test.expected, result)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -29,6 +29,15 @@ import (
|
||||
"google.golang.org/api/youtube/v3"
|
||||
)
|
||||
|
||||
var timestampRegex *regexp.Regexp
|
||||
|
||||
const TimeGapForRepeats = 10 // seconds
|
||||
|
||||
func init() {
|
||||
// Match timestamps like "00:00:01.234" or just numbers or sequence numbers
|
||||
timestampRegex = regexp.MustCompile(`^\d+$|^\d{1,2}:\d{2}(:\d{2})?(\.\d{3})?$`)
|
||||
}
|
||||
|
||||
func NewYouTube() (ret *YouTube) {
|
||||
|
||||
label := "YouTube"
|
||||
@@ -180,6 +189,7 @@ func (o *YouTube) readAndCleanVTTFile(filename string) (ret string, err error) {
|
||||
// Convert VTT to plain text
|
||||
lines := strings.Split(string(content), "\n")
|
||||
var textBuilder strings.Builder
|
||||
seenSegments := make(map[string]struct{})
|
||||
|
||||
for _, line := range lines {
|
||||
line = strings.TrimSpace(line)
|
||||
@@ -193,8 +203,11 @@ func (o *YouTube) readAndCleanVTTFile(filename string) (ret string, err error) {
|
||||
// Remove VTT formatting tags
|
||||
line = removeVTTTags(line)
|
||||
if line != "" {
|
||||
textBuilder.WriteString(line)
|
||||
textBuilder.WriteString(" ")
|
||||
if _, exists := seenSegments[line]; !exists {
|
||||
textBuilder.WriteString(line)
|
||||
textBuilder.WriteString(" ")
|
||||
seenSegments[line] = struct{}{}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -215,6 +228,10 @@ func (o *YouTube) readAndFormatVTTWithTimestamps(filename string) (ret string, e
|
||||
lines := strings.Split(string(content), "\n")
|
||||
var textBuilder strings.Builder
|
||||
var currentTimestamp string
|
||||
// Track content with timestamps to allow repeats after significant time gaps
|
||||
// This preserves legitimate repeated content (choruses, recurring phrases, etc.)
|
||||
// while still filtering out immediate duplicates from VTT formatting issues
|
||||
seenSegments := make(map[string]string) // text -> last timestamp seen
|
||||
|
||||
for _, line := range lines {
|
||||
line = strings.TrimSpace(line)
|
||||
@@ -246,7 +263,20 @@ func (o *YouTube) readAndFormatVTTWithTimestamps(filename string) (ret string, e
|
||||
// Remove VTT formatting tags
|
||||
cleanText := removeVTTTags(line)
|
||||
if cleanText != "" && currentTimestamp != "" {
|
||||
textBuilder.WriteString(fmt.Sprintf("[%s] %s\n", currentTimestamp, cleanText))
|
||||
// Check if we should include this segment
|
||||
shouldInclude := true
|
||||
if lastTimestamp, exists := seenSegments[cleanText]; exists {
|
||||
// Calculate time difference to determine if this is a legitimate repeat
|
||||
if !shouldIncludeRepeat(lastTimestamp, currentTimestamp) {
|
||||
shouldInclude = false
|
||||
}
|
||||
}
|
||||
|
||||
if shouldInclude {
|
||||
timestampedLine := fmt.Sprintf("[%s] %s", currentTimestamp, cleanText)
|
||||
textBuilder.WriteString(timestampedLine + "\n")
|
||||
seenSegments[cleanText] = currentTimestamp
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -268,8 +298,6 @@ func formatVTTTimestamp(vttTime string) string {
|
||||
}
|
||||
|
||||
func isTimeStamp(s string) bool {
|
||||
// Match timestamps like "00:00:01.234" or just numbers
|
||||
timestampRegex := regexp.MustCompile(`^\d+$|^\d{2}:\d{2}:\d{2}`)
|
||||
return timestampRegex.MatchString(s)
|
||||
}
|
||||
|
||||
@@ -279,6 +307,76 @@ func removeVTTTags(s string) string {
|
||||
return tagRegex.ReplaceAllString(s, "")
|
||||
}
|
||||
|
||||
// shouldIncludeRepeat determines if repeated content should be included based on time gap
|
||||
func shouldIncludeRepeat(lastTimestamp, currentTimestamp string) bool {
|
||||
// Parse timestamps to calculate time difference
|
||||
lastSeconds, err1 := parseTimestampToSeconds(lastTimestamp)
|
||||
currentSeconds, err2 := parseTimestampToSeconds(currentTimestamp)
|
||||
|
||||
if err1 != nil || err2 != nil {
|
||||
// If we can't parse timestamps, err on the side of inclusion
|
||||
return true
|
||||
}
|
||||
|
||||
// Allow repeats if there's at least a TimeGapForRepeats gap
|
||||
// This threshold can be adjusted based on use case:
|
||||
// - 10 seconds works well for most content
|
||||
// - Could be made configurable in the future
|
||||
timeDiffSeconds := currentSeconds - lastSeconds
|
||||
return timeDiffSeconds >= TimeGapForRepeats
|
||||
}
|
||||
|
||||
// parseTimestampToSeconds converts timestamp string (HH:MM:SS or MM:SS) to total seconds
|
||||
func parseTimestampToSeconds(timestamp string) (int, error) {
|
||||
parts := strings.Split(timestamp, ":")
|
||||
if len(parts) < 2 || len(parts) > 3 {
|
||||
return 0, fmt.Errorf("invalid timestamp format: %s", timestamp)
|
||||
}
|
||||
|
||||
var hours, minutes, seconds int
|
||||
var err error
|
||||
|
||||
if len(parts) == 3 {
|
||||
// HH:MM:SS format
|
||||
if hours, err = strconv.Atoi(parts[0]); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
if minutes, err = strconv.Atoi(parts[1]); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
if seconds, err = parseSeconds(parts[2]); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
} else {
|
||||
// MM:SS format
|
||||
if minutes, err = strconv.Atoi(parts[0]); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
if seconds, err = parseSeconds(parts[1]); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
}
|
||||
|
||||
return hours*3600 + minutes*60 + seconds, nil
|
||||
}
|
||||
|
||||
func parseSeconds(seconds_str string) (int, error) {
|
||||
var seconds int
|
||||
var err error
|
||||
if strings.Contains(seconds_str, ".") {
|
||||
// Handle fractional seconds
|
||||
second_parts := strings.Split(seconds_str, ".")
|
||||
if seconds, err = strconv.Atoi(second_parts[0]); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
} else {
|
||||
if seconds, err = strconv.Atoi(seconds_str); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
}
|
||||
return seconds, nil
|
||||
}
|
||||
|
||||
func (o *YouTube) GrabComments(videoId string) (ret []string, err error) {
|
||||
if err = o.initService(); err != nil {
|
||||
return
|
||||
|
||||
@@ -71,3 +71,21 @@ func IsSymlinkToDir(path string) bool {
|
||||
|
||||
return false // Regular directories should not be treated as symlinks
|
||||
}
|
||||
|
||||
// GetDefaultConfigPath returns the default path for the configuration file
|
||||
// if it exists, otherwise returns an empty string.
|
||||
func GetDefaultConfigPath() (string, error) {
|
||||
homeDir, err := os.UserHomeDir()
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("could not determine user home directory: %w", err)
|
||||
}
|
||||
|
||||
defaultConfigPath := filepath.Join(homeDir, ".config", "fabric", "config.yaml")
|
||||
if _, err := os.Stat(defaultConfigPath); err != nil {
|
||||
if os.IsNotExist(err) {
|
||||
return "", nil // Return no error for non-existent config path
|
||||
}
|
||||
return "", fmt.Errorf("error accessing default config path: %w", err)
|
||||
}
|
||||
return defaultConfigPath, nil
|
||||
}
|
||||
|
||||
@@ -1 +1 @@
|
||||
"1.4.249"
|
||||
"1.4.264"
|
||||
|
||||
@@ -1861,6 +1861,16 @@
|
||||
"CR THINKING",
|
||||
"SELF"
|
||||
]
|
||||
},
|
||||
{
|
||||
"patternName": "generate_code_rules",
|
||||
"description": "Extracts a list of best practices rules for AI coding assisted tools.",
|
||||
"tags": [
|
||||
"ANALYSIS",
|
||||
"EXTRACT",
|
||||
"DEVELOPMENT",
|
||||
"AI"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -903,6 +903,10 @@
|
||||
{
|
||||
"patternName": "t_check_dunning_kruger",
|
||||
"pattern_extract": "# IDENTITY You are an expert at understanding deep context about a person or entity, and then creating wisdom from that context combined with the instruction or question given in the input. # STEPS 1. Read the incoming TELOS File thoroughly. Fully understand everything about this person or entity. 2. Deeply study the input instruction or question. 3. Spend significant time and effort thinking about how these two are related, and what would be the best possible output for the person who sent the input. 4. Evaluate the input against the Dunning-Kruger effect and input's prior beliefs. Explore cognitive bias, subjective ability and objective ability for: low-ability areas where the input owner overestimate their knowledge or skill; and the opposite, high-ability areas where the input owner underestimate their knowledge or skill. # EXAMPLE In education, students who overestimate their understanding of a topic may not seek help or put in the necessary effort, while high-achieving students might doubt their abilities. In healthcare, overconfident practitioners might make critical errors, and underconfident practitioners might delay crucial decisions. In politics, politicians with limited expertise might propose simplistic solutions and ignore expert advice. END OF EXAMPLE # OUTPUT - In a section called OVERESTIMATION OF COMPETENCE, output a set of 10, 16-word bullets, that capture the principal misinterpretation of lack of knowledge or skill which are leading the input owner to believe they are more knowledgeable or skilled than they actually are. - In a section called UNDERESTIMATION OF COMPETENCE, output a set of 10, 16-word bullets,that capture the principal misinterpreation of underestimation of their knowledge or skill which are preventing the input owner to see opportunities. - In a section called METACOGNITIVIVE SKILLS, output a set of 10-word bullets that expose areas where the input owner struggles to accuratelly assess their own performance and may not be aware of the gap between their actual ability and their perceived ability. - In a section called IMPACT ON DECISION MAKING, output a set of 10-word bullets exposing facts, biases, traces of behavior based on overinflated self-assessment, that can lead to poor decisions. - At the end summarize the findings and give the input owner a motivational and constructive perspective on how they can start to tackle principal 5 gaps in their perceived skills and knowledge competencies. Don't be over simplistic. # OUTPUT INSTRUCTIONS 1. Only output valid, basic Markdown. No special formatting or italics or bolding or anything. 2. Do not output any content other than the sections above. Nothing else."
|
||||
},
|
||||
{
|
||||
"patternName": "generate_code_rules",
|
||||
"pattern_extract": "# IDENTITY AND PURPOSE You are a senior developer and expert prompt engineer. Think ultra hard to distill the following transcription or tutorial in as little set of unique rules as possible intended for best practices guidance in AI assisted coding tools, each rule has to be in one sentence as a direct instruction, avoid explanations and cosmetic language. Output in Markdown, I prefer bullet dash (-). --- # TRANSCRIPT"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -1861,6 +1861,16 @@
|
||||
"CR THINKING",
|
||||
"SELF"
|
||||
]
|
||||
},
|
||||
{
|
||||
"patternName": "generate_code_rules",
|
||||
"description": "Extracts a list of best practices rules for AI coding assisted tools.",
|
||||
"tags": [
|
||||
"ANALYSIS",
|
||||
"EXTRACT",
|
||||
"DEVELOPMENT",
|
||||
"AI"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -44,7 +44,7 @@ export default defineConfig({
|
||||
'/api': {
|
||||
target: FABRIC_BASE_URL,
|
||||
changeOrigin: true,
|
||||
timeout: 30000,
|
||||
timeout: 900000,
|
||||
rewrite: (path) => path.replace(/^\/api/, ''),
|
||||
configure: (proxy, _options) => {
|
||||
proxy.on('error', (err, req, res) => {
|
||||
@@ -59,7 +59,7 @@ export default defineConfig({
|
||||
'^/(patterns|models|sessions)/names': {
|
||||
target: FABRIC_BASE_URL,
|
||||
changeOrigin: true,
|
||||
timeout: 30000,
|
||||
timeout: 900000,
|
||||
configure: (proxy, _options) => {
|
||||
proxy.on('error', (err, req, res) => {
|
||||
console.log('proxy error', err);
|
||||
|
||||
Reference in New Issue
Block a user