mirror of
https://github.com/danielmiessler/Fabric.git
synced 2026-01-09 06:18:06 -05:00
feat: add AI-powered changelog generation with high-performance Go tool and comprehensive caching
## CHANGES - Add high-performance Go changelog generator with GraphQL integration - Implement SQLite-based persistent caching for incremental updates - Create one-pass git history walking algorithm with concurrent processing - Add comprehensive CLI with cobra framework and tag-based caching - Integrate AI summarization using Fabric CLI for enhanced output - Support batch PR fetching with GitHub Search API optimization - Add VSCode configuration with spell checking and markdown linting - Include extensive documentation with PRD and README files - Implement commit-PR mapping for lightning-fast git operations - Add content hashing for change detection and cache optimization
This commit is contained in:
7
.gitignore
vendored
7
.gitignore
vendored
@@ -131,9 +131,7 @@ celerybeat.pid
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
@@ -349,5 +347,6 @@ web/package-lock.json
|
||||
.gitignore_backup
|
||||
web/static/*.png
|
||||
|
||||
# Local VSCode project settings
|
||||
.vscode/
|
||||
# Local tmp directory
|
||||
.tmp/
|
||||
tmp/
|
||||
|
||||
3
.vscode/extensions.json
vendored
Normal file
3
.vscode/extensions.json
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"recommendations": ["davidanson.vscode-markdownlint"]
|
||||
}
|
||||
143
.vscode/settings.json
vendored
Normal file
143
.vscode/settings.json
vendored
Normal file
@@ -0,0 +1,143 @@
|
||||
{
|
||||
"cSpell.words": [
|
||||
"addextension",
|
||||
"AIML",
|
||||
"anthropics",
|
||||
"badfile",
|
||||
"Behrens",
|
||||
"blindspots",
|
||||
"Bombal",
|
||||
"Cerebras",
|
||||
"compinit",
|
||||
"creatordate",
|
||||
"custompatterns",
|
||||
"danielmiessler",
|
||||
"davidanson",
|
||||
"Debugf",
|
||||
"dedup",
|
||||
"deepseek",
|
||||
"direnv",
|
||||
"dryrun",
|
||||
"dsrp",
|
||||
"editability",
|
||||
"Eisler",
|
||||
"elif",
|
||||
"envrc",
|
||||
"eugeis",
|
||||
"Eugen",
|
||||
"excalidraw",
|
||||
"exolab",
|
||||
"fabriclogo",
|
||||
"fpath",
|
||||
"frequencypenalty",
|
||||
"fsdb",
|
||||
"gantt",
|
||||
"genai",
|
||||
"githelper",
|
||||
"gjson",
|
||||
"GOARCH",
|
||||
"godotenv",
|
||||
"gofmt",
|
||||
"goimports",
|
||||
"gomod",
|
||||
"gonic",
|
||||
"goopenai",
|
||||
"GOPATH",
|
||||
"gopkg",
|
||||
"GOROOT",
|
||||
"Graphviz",
|
||||
"grokai",
|
||||
"Groq",
|
||||
"hackerone",
|
||||
"Haddix",
|
||||
"hasura",
|
||||
"hormozi",
|
||||
"Hormozi's",
|
||||
"HTMLURL",
|
||||
"jaredmontoya",
|
||||
"jessevdk",
|
||||
"Jina",
|
||||
"joho",
|
||||
"ksylvan",
|
||||
"Langdock",
|
||||
"ldflags",
|
||||
"libexec",
|
||||
"listcontexts",
|
||||
"listextensions",
|
||||
"listmodels",
|
||||
"listpatterns",
|
||||
"listsessions",
|
||||
"liststrategies",
|
||||
"listvendors",
|
||||
"lmstudio",
|
||||
"Makefiles",
|
||||
"markmap",
|
||||
"matplotlib",
|
||||
"mattn",
|
||||
"Miessler",
|
||||
"nometa",
|
||||
"numpy",
|
||||
"ollama",
|
||||
"opencode",
|
||||
"openrouter",
|
||||
"otiai",
|
||||
"pdflatex",
|
||||
"pipx",
|
||||
"PKCE",
|
||||
"pkgs",
|
||||
"presencepenalty",
|
||||
"printcontext",
|
||||
"printsession",
|
||||
"pycache",
|
||||
"pyperclip",
|
||||
"readystream",
|
||||
"restapi",
|
||||
"rmextension",
|
||||
"samber",
|
||||
"sashabaranov",
|
||||
"sdist",
|
||||
"seaborn",
|
||||
"semgrep",
|
||||
"sess",
|
||||
"Streamlit",
|
||||
"stretchr",
|
||||
"talkpanel",
|
||||
"Telos",
|
||||
"Thacker",
|
||||
"tidwall",
|
||||
"topp",
|
||||
"ttrc",
|
||||
"unalias",
|
||||
"unmarshalling",
|
||||
"updatepatterns",
|
||||
"videoid",
|
||||
"webp",
|
||||
"wipecontext",
|
||||
"wipesession",
|
||||
"writeups",
|
||||
"xclip",
|
||||
"yourpatternname"
|
||||
],
|
||||
"cSpell.ignorePaths": ["go.mod", ".gitignore", "CHANGELOG.md"],
|
||||
"markdownlint.config": {
|
||||
"MD004": false,
|
||||
"MD011": false,
|
||||
"MD024": false,
|
||||
"MD025": false,
|
||||
"M032": false,
|
||||
"MD033": {
|
||||
"allowed_elements": [
|
||||
"a",
|
||||
"br",
|
||||
"code",
|
||||
"div",
|
||||
"em",
|
||||
"h4",
|
||||
"img",
|
||||
"module",
|
||||
"p"
|
||||
]
|
||||
},
|
||||
"MD041": false
|
||||
}
|
||||
}
|
||||
2339
CHANGELOG.md
Normal file
2339
CHANGELOG.md
Normal file
File diff suppressed because it is too large
Load Diff
151
cmd/generate_changelog/PRD.md
Normal file
151
cmd/generate_changelog/PRD.md
Normal file
@@ -0,0 +1,151 @@
|
||||
# Product Requirements Document: Changelog Generator
|
||||
|
||||
## Overview
|
||||
|
||||
The Changelog Generator is a high-performance Go tool that automatically generates comprehensive changelogs from git history and GitHub pull requests.
|
||||
|
||||
## Goals
|
||||
|
||||
1. **Performance**: Very fast. Efficient enough to be used in CI/CD as part of release process.
|
||||
2. **Completeness**: Capture ALL commits including unreleased changes
|
||||
3. **Efficiency**: Minimize API calls through caching and batch operations
|
||||
4. **Reliability**: Handle errors gracefully with proper Go error handling
|
||||
5. **Simplicity**: Single binary with no runtime dependencies
|
||||
|
||||
## Key Features
|
||||
|
||||
### 1. One-Pass Git History Algorithm
|
||||
|
||||
- Walk git history once from newest to oldest
|
||||
- Start with "Unreleased" bucket for all new commits
|
||||
- Switch buckets when encountering version commits
|
||||
- No need to calculate ranges between versions
|
||||
|
||||
### 2. Native Library Integration
|
||||
|
||||
- **go-git**: Pure Go git implementation (no git binary required)
|
||||
- **go-github**: Official GitHub Go client library
|
||||
- Benefits: Type safety, better error handling, no subprocess overhead
|
||||
|
||||
### 3. Smart Caching System
|
||||
|
||||
- SQLite-based persistent cache
|
||||
- Stores: versions, commits, PR details, last processed commit
|
||||
- Enables incremental updates on subsequent runs
|
||||
- Instant changelog regeneration from cache
|
||||
|
||||
### 4. Concurrent Processing
|
||||
|
||||
- Parallel GitHub API calls (up to 10 concurrent)
|
||||
- Batch PR fetching with deduplication
|
||||
- Rate limiting awareness
|
||||
|
||||
### 5. Enhanced Output
|
||||
|
||||
- "Unreleased" section for commits since last version
|
||||
- Clean markdown formatting
|
||||
- Configurable version limiting
|
||||
- Direct commit tracking (non-PR commits)
|
||||
|
||||
## Technical Architecture
|
||||
|
||||
### Module Structure
|
||||
|
||||
```text
|
||||
cmd/generate_changelog/
|
||||
├── main.go # CLI entry point with cobra
|
||||
├── internal/
|
||||
│ ├── git/ # Git operations (go-git)
|
||||
│ ├── github/ # GitHub API client (go-github)
|
||||
│ ├── cache/ # SQLite caching layer
|
||||
│ ├── changelog/ # Core generation logic
|
||||
│ └── config/ # Configuration management
|
||||
└── changelog.db # SQLite cache (generated)
|
||||
```
|
||||
|
||||
### Data Flow
|
||||
|
||||
1. Git walker collects all commits in one pass
|
||||
2. Commits bucketed by version (starting with "Unreleased")
|
||||
3. PR numbers extracted from merge commits
|
||||
4. GitHub API batch-fetches PR details
|
||||
5. Cache stores everything for future runs
|
||||
6. Formatter generates markdown output
|
||||
|
||||
### Cache Schema
|
||||
|
||||
- **metadata**: Last processed commit SHA
|
||||
- **versions**: Version names, dates, commit SHAs
|
||||
- **commits**: Full commit details with version associations
|
||||
- **pull_requests**: PR details including commits
|
||||
- Indexes on version and PR number for fast lookups
|
||||
|
||||
### Features
|
||||
|
||||
- **Unreleased section**: Shows all new commits
|
||||
- **Better caching**: SQLite vs JSON, incremental updates
|
||||
- **Smarter deduplication**: Removes consecutive duplicate commits
|
||||
- **Direct commit tracking**: Shows non-PR commits
|
||||
|
||||
### Reliability
|
||||
|
||||
- **No subprocess errors**: Direct library usage
|
||||
- **Type safety**: Compile-time checking
|
||||
- **Better error handling**: Go's explicit error returns
|
||||
|
||||
### Deployment
|
||||
|
||||
- **Single binary**: No Python/pip/dependencies
|
||||
- **Cross-platform**: Compile for any OS/architecture
|
||||
- **No git CLI required**: Uses go-git library
|
||||
|
||||
## Configuration
|
||||
|
||||
### Environment Variables
|
||||
|
||||
- `GITHUB_TOKEN`: GitHub API authentication token
|
||||
|
||||
### Command Line Flags
|
||||
|
||||
- `--repo, -r`: Repository path (default: current directory)
|
||||
- `--output, -o`: Output file (default: stdout)
|
||||
- `--limit, -l`: Version limit (default: all)
|
||||
- `--version, -v`: Target specific version
|
||||
- `--save-data`: Export debug JSON
|
||||
- `--cache`: Cache file location
|
||||
- `--no-cache`: Disable caching
|
||||
- `--rebuild-cache`: Force cache rebuild
|
||||
- `--token`: GitHub token override
|
||||
|
||||
## Success Metrics
|
||||
|
||||
1. **Performance**: Generate full changelog in <5 seconds for fabric repo
|
||||
2. **Completeness**: 100% commit coverage including unreleased
|
||||
3. **Accuracy**: Correct PR associations and change extraction
|
||||
4. **Reliability**: Handle network failures gracefully
|
||||
5. **Usability**: Simple CLI with sensible defaults
|
||||
|
||||
## Future Enhancements
|
||||
|
||||
1. **Multiple output formats**: JSON, HTML, etc.
|
||||
2. **Custom version patterns**: Configurable regex
|
||||
3. **Change categorization**: feat/fix/docs auto-grouping
|
||||
4. **Conventional commits**: Full support for semantic versioning
|
||||
5. **GitLab/Bitbucket**: Support other platforms
|
||||
6. **Web UI**: Interactive changelog browser
|
||||
7. **Incremental updates**: Update existing CHANGELOG.md file
|
||||
8. **Breaking change detection**: Highlight breaking changes
|
||||
|
||||
## Implementation Status
|
||||
|
||||
- ✅ Core architecture and modules
|
||||
- ✅ One-pass git walking algorithm
|
||||
- ✅ GitHub API integration with concurrency
|
||||
- ✅ SQLite caching system
|
||||
- ✅ Changelog formatting and generation
|
||||
- ✅ CLI with all planned flags
|
||||
- ✅ Documentation (README and PRD)
|
||||
|
||||
## Conclusion
|
||||
|
||||
This Go implementation provides a modern, efficient, and feature-rich changelog generator.
|
||||
178
cmd/generate_changelog/README.md
Normal file
178
cmd/generate_changelog/README.md
Normal file
@@ -0,0 +1,178 @@
|
||||
# Changelog Generator
|
||||
|
||||
A high-performance changelog generator for Git repositories that automatically creates comprehensive, well-formatted changelogs from your git history and GitHub pull requests.
|
||||
|
||||
## Features
|
||||
|
||||
- **One-pass git history walking**: Efficiently processes entire repository history in a single pass
|
||||
- **Automatic PR detection**: Extracts pull request information from merge commits
|
||||
- **GitHub API integration**: Fetches detailed PR information including commits, authors, and descriptions
|
||||
- **Smart caching**: SQLite-based caching for instant incremental updates
|
||||
- **Unreleased changes**: Tracks all commits since the last release
|
||||
- **Concurrent processing**: Parallel GitHub API calls for improved performance
|
||||
- **Flexible output**: Generate complete changelogs or target specific versions
|
||||
- **Optimized PR fetching**: Batch fetches all merged PRs using GitHub Search API (drastically reduces API calls)
|
||||
- **Intelligent sync**: Automatically syncs new PRs every 24 hours or when missing PRs are detected
|
||||
|
||||
## Installation
|
||||
|
||||
```bash
|
||||
go install github.com/danielmiessler/fabric/cmd/generate_changelog@latest
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
### Basic usage (generate complete changelog)
|
||||
```bash
|
||||
generate_changelog
|
||||
```
|
||||
|
||||
### Save to file
|
||||
```bash
|
||||
generate_changelog -o CHANGELOG.md
|
||||
```
|
||||
|
||||
### Generate for specific version
|
||||
```bash
|
||||
generate_changelog -v v1.4.244
|
||||
```
|
||||
|
||||
### Limit to recent versions
|
||||
```bash
|
||||
generate_changelog -l 10
|
||||
```
|
||||
|
||||
### Using GitHub token for private repos or higher rate limits
|
||||
```bash
|
||||
export GITHUB_TOKEN=your_token_here
|
||||
generate_changelog
|
||||
|
||||
# Or pass directly
|
||||
generate_changelog --token your_token_here
|
||||
```
|
||||
|
||||
### Cache management
|
||||
```bash
|
||||
# Rebuild cache from scratch
|
||||
generate_changelog --rebuild-cache
|
||||
|
||||
# Force a full PR sync from GitHub
|
||||
generate_changelog --force-pr-sync
|
||||
|
||||
# Disable cache usage
|
||||
generate_changelog --no-cache
|
||||
|
||||
# Use custom cache location
|
||||
generate_changelog --cache /path/to/cache.db
|
||||
```
|
||||
|
||||
## Command Line Options
|
||||
|
||||
| Flag | Short | Description | Default |
|
||||
|------|-------|-------------|---------|
|
||||
| `--repo` | `-r` | Repository path | `.` (current directory) |
|
||||
| `--output` | `-o` | Output file | stdout |
|
||||
| `--limit` | `-l` | Limit number of versions | 0 (all) |
|
||||
| `--version` | `-v` | Generate for specific version | |
|
||||
| `--save-data` | | Save version data to JSON | false |
|
||||
| `--cache` | | Cache database file | `./cmd/generate_changelog/changelog.db` |
|
||||
| `--no-cache` | | Disable cache usage | false |
|
||||
| `--rebuild-cache` | | Rebuild cache from scratch | false |
|
||||
| `--force-pr-sync` | | Force a full PR sync from GitHub | false |
|
||||
| `--token` | | GitHub API token | `$GITHUB_TOKEN` |
|
||||
|
||||
## Output Format
|
||||
|
||||
The generated changelog follows this structure:
|
||||
|
||||
```markdown
|
||||
# Changelog
|
||||
|
||||
## Unreleased
|
||||
|
||||
### PR [#1601](url) by [author](profile): PR Title
|
||||
- Change description 1
|
||||
- Change description 2
|
||||
|
||||
### Direct commits
|
||||
- Direct commit message 1
|
||||
- Direct commit message 2
|
||||
|
||||
## v1.4.244 (2025-07-09)
|
||||
|
||||
### PR [#1598](url) by [author](profile): PR Title
|
||||
- Change description
|
||||
...
|
||||
```
|
||||
|
||||
## How It Works
|
||||
|
||||
1. **Git History Walking**: The tool walks through your git history from newest to oldest commits
|
||||
2. **Version Detection**: Identifies version bump commits (pattern: "Update version to vX.Y.Z")
|
||||
3. **PR Extraction**: Detects merge commits and extracts PR numbers
|
||||
4. **GitHub API Calls**: Fetches detailed PR information in parallel batches
|
||||
5. **Change Extraction**: Extracts changes from PR commit messages or PR body
|
||||
6. **Formatting**: Generates clean, organized markdown output
|
||||
|
||||
## Performance
|
||||
|
||||
- **Native Go libraries**: Uses go-git and go-github for maximum performance
|
||||
- **Concurrent API calls**: Processes up to 10 GitHub API requests in parallel
|
||||
- **Smart caching**: SQLite cache eliminates redundant API calls
|
||||
- **Incremental updates**: Only processes new commits on subsequent runs
|
||||
- **Batch PR fetching**: Uses GitHub Search API to fetch all merged PRs in minimal API calls
|
||||
|
||||
### Major Optimization: Batch PR Fetching
|
||||
|
||||
The tool has been optimized to drastically reduce GitHub API calls:
|
||||
|
||||
**Before**: Individual API calls for each PR (2 API calls per PR - one for PR details, one for commits)
|
||||
- For a repo with 500 PRs: 1,000 API calls
|
||||
|
||||
**After**: Batch fetching using GitHub Search API
|
||||
- For a repo with 500 PRs: ~10 API calls (search) + 500 API calls (details) = ~510 API calls
|
||||
- **50% reduction in API calls!**
|
||||
|
||||
The optimization includes:
|
||||
1. **Batch Search**: Uses GitHub's Search API to find all merged PRs in paginated batches
|
||||
2. **Smart Caching**: Stores complete PR data and tracks last sync timestamp
|
||||
3. **Incremental Sync**: Only fetches PRs merged after the last sync
|
||||
4. **Automatic Refresh**: PRs are synced every 24 hours or when missing PRs are detected
|
||||
5. **Fallback Support**: If batch fetch fails, falls back to individual PR fetching
|
||||
|
||||
## Requirements
|
||||
|
||||
- Go 1.24+ (for installation from source)
|
||||
- Git repository
|
||||
- GitHub token (optional, for private repos or higher rate limits)
|
||||
|
||||
## Authentication
|
||||
|
||||
The tool supports GitHub authentication via:
|
||||
1. Environment variable: `export GITHUB_TOKEN=your_token`
|
||||
2. Command line flag: `--token your_token`
|
||||
|
||||
Without authentication, the tool is limited to 60 GitHub API requests per hour.
|
||||
|
||||
## Caching
|
||||
|
||||
The SQLite cache stores:
|
||||
- Version information and commit associations
|
||||
- Pull request details (title, body, commits, authors)
|
||||
- Last processed commit SHA for incremental updates
|
||||
- Last PR sync timestamp for intelligent refresh
|
||||
|
||||
Cache benefits:
|
||||
- Instant changelog regeneration
|
||||
- Drastically reduced GitHub API usage (50%+ reduction)
|
||||
- Offline changelog generation (after initial cache build)
|
||||
- Automatic PR data refresh every 24 hours
|
||||
- Batch database transactions for better performance
|
||||
|
||||
## Contributing
|
||||
|
||||
This tool is part of the Fabric project. Contributions are welcome!
|
||||
|
||||
## License
|
||||
|
||||
Same as the Fabric project.
|
||||
BIN
cmd/generate_changelog/changelog.db
Normal file
BIN
cmd/generate_changelog/changelog.db
Normal file
Binary file not shown.
448
cmd/generate_changelog/internal/cache/cache.go
vendored
Normal file
448
cmd/generate_changelog/internal/cache/cache.go
vendored
Normal file
@@ -0,0 +1,448 @@
|
||||
package cache
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/danielmiessler/fabric/cmd/generate_changelog/internal/git"
|
||||
"github.com/danielmiessler/fabric/cmd/generate_changelog/internal/github"
|
||||
_ "github.com/mattn/go-sqlite3"
|
||||
)
|
||||
|
||||
type Cache struct {
|
||||
db *sql.DB
|
||||
}
|
||||
|
||||
func New(dbPath string) (*Cache, error) {
|
||||
db, err := sql.Open("sqlite3", dbPath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to open database: %w", err)
|
||||
}
|
||||
|
||||
cache := &Cache{db: db}
|
||||
if err := cache.createTables(); err != nil {
|
||||
return nil, fmt.Errorf("failed to create tables: %w", err)
|
||||
}
|
||||
|
||||
return cache, nil
|
||||
}
|
||||
|
||||
func (c *Cache) Close() error {
|
||||
return c.db.Close()
|
||||
}
|
||||
|
||||
func (c *Cache) createTables() error {
|
||||
queries := []string{
|
||||
`CREATE TABLE IF NOT EXISTS metadata (
|
||||
key TEXT PRIMARY KEY,
|
||||
value TEXT NOT NULL,
|
||||
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
)`,
|
||||
`CREATE TABLE IF NOT EXISTS versions (
|
||||
name TEXT PRIMARY KEY,
|
||||
date DATETIME,
|
||||
commit_sha TEXT,
|
||||
pr_numbers TEXT,
|
||||
ai_summary TEXT,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
)`,
|
||||
`CREATE TABLE IF NOT EXISTS commits (
|
||||
sha TEXT PRIMARY KEY,
|
||||
version TEXT NOT NULL,
|
||||
message TEXT,
|
||||
author TEXT,
|
||||
email TEXT,
|
||||
date DATETIME,
|
||||
is_merge BOOLEAN,
|
||||
pr_number INTEGER,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (version) REFERENCES versions(name)
|
||||
)`,
|
||||
`CREATE TABLE IF NOT EXISTS pull_requests (
|
||||
number INTEGER PRIMARY KEY,
|
||||
title TEXT,
|
||||
body TEXT,
|
||||
author TEXT,
|
||||
author_url TEXT,
|
||||
author_type TEXT DEFAULT 'user',
|
||||
url TEXT,
|
||||
merged_at DATETIME,
|
||||
merge_commit TEXT,
|
||||
commits TEXT,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
)`,
|
||||
`CREATE INDEX IF NOT EXISTS idx_commits_version ON commits(version)`,
|
||||
`CREATE INDEX IF NOT EXISTS idx_commits_pr_number ON commits(pr_number)`,
|
||||
`CREATE TABLE IF NOT EXISTS commit_pr_mapping (
|
||||
commit_sha TEXT PRIMARY KEY,
|
||||
pr_number INTEGER NOT NULL,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (pr_number) REFERENCES pull_requests(number)
|
||||
)`,
|
||||
`CREATE INDEX IF NOT EXISTS idx_commit_pr_mapping_sha ON commit_pr_mapping(commit_sha)`,
|
||||
}
|
||||
|
||||
for _, query := range queries {
|
||||
if _, err := c.db.Exec(query); err != nil {
|
||||
return fmt.Errorf("failed to execute query: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *Cache) GetLastProcessedTag() (string, error) {
|
||||
var tag string
|
||||
err := c.db.QueryRow("SELECT value FROM metadata WHERE key = 'last_processed_tag'").Scan(&tag)
|
||||
if err == sql.ErrNoRows {
|
||||
return "", nil
|
||||
}
|
||||
return tag, err
|
||||
}
|
||||
|
||||
func (c *Cache) SetLastProcessedTag(tag string) error {
|
||||
_, err := c.db.Exec(`
|
||||
INSERT OR REPLACE INTO metadata (key, value, updated_at)
|
||||
VALUES ('last_processed_tag', ?, CURRENT_TIMESTAMP)
|
||||
`, tag)
|
||||
return err
|
||||
}
|
||||
|
||||
func (c *Cache) SaveVersion(v *git.Version) error {
|
||||
prNumbers, _ := json.Marshal(v.PRNumbers)
|
||||
|
||||
_, err := c.db.Exec(`
|
||||
INSERT OR REPLACE INTO versions (name, date, commit_sha, pr_numbers, ai_summary)
|
||||
VALUES (?, ?, ?, ?, ?)
|
||||
`, v.Name, v.Date, v.CommitSHA, string(prNumbers), v.AISummary)
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
// UpdateVersionAISummary updates only the AI summary for a specific version
|
||||
func (c *Cache) UpdateVersionAISummary(versionName, aiSummary string) error {
|
||||
_, err := c.db.Exec(`
|
||||
UPDATE versions SET ai_summary = ? WHERE name = ?
|
||||
`, aiSummary, versionName)
|
||||
return err
|
||||
}
|
||||
|
||||
func (c *Cache) SaveCommit(commit *git.Commit, version string) error {
|
||||
_, err := c.db.Exec(`
|
||||
INSERT OR REPLACE INTO commits
|
||||
(sha, version, message, author, email, date, is_merge, pr_number)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
||||
`, commit.SHA, version, commit.Message, commit.Author, commit.Email,
|
||||
commit.Date, commit.IsMerge, commit.PRNumber)
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func (c *Cache) SavePR(pr *github.PR) error {
|
||||
commits, _ := json.Marshal(pr.Commits)
|
||||
|
||||
_, err := c.db.Exec(`
|
||||
INSERT OR REPLACE INTO pull_requests
|
||||
(number, title, body, author, author_url, author_type, url, merged_at, merge_commit, commits)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
`, pr.Number, pr.Title, pr.Body, pr.Author, pr.AuthorURL, pr.AuthorType,
|
||||
pr.URL, pr.MergedAt, pr.MergeCommit, string(commits))
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func (c *Cache) GetPR(number int) (*github.PR, error) {
|
||||
var pr github.PR
|
||||
var commitsJSON string
|
||||
|
||||
err := c.db.QueryRow(`
|
||||
SELECT number, title, body, author, author_url, COALESCE(author_type, 'user'), url, merged_at, merge_commit, commits
|
||||
FROM pull_requests WHERE number = ?
|
||||
`, number).Scan(
|
||||
&pr.Number, &pr.Title, &pr.Body, &pr.Author, &pr.AuthorURL, &pr.AuthorType,
|
||||
&pr.URL, &pr.MergedAt, &pr.MergeCommit, &commitsJSON,
|
||||
)
|
||||
|
||||
if err == sql.ErrNoRows {
|
||||
return nil, nil
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := json.Unmarshal([]byte(commitsJSON), &pr.Commits); err != nil {
|
||||
return nil, fmt.Errorf("failed to unmarshal commits: %w", err)
|
||||
}
|
||||
|
||||
return &pr, nil
|
||||
}
|
||||
|
||||
func (c *Cache) GetVersions() (map[string]*git.Version, error) {
|
||||
rows, err := c.db.Query(`
|
||||
SELECT name, date, commit_sha, pr_numbers, ai_summary FROM versions
|
||||
`)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
versions := make(map[string]*git.Version)
|
||||
|
||||
for rows.Next() {
|
||||
var v git.Version
|
||||
var dateStr sql.NullString
|
||||
var prNumbersJSON string
|
||||
var aiSummary sql.NullString
|
||||
|
||||
if err := rows.Scan(&v.Name, &dateStr, &v.CommitSHA, &prNumbersJSON, &aiSummary); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if dateStr.Valid {
|
||||
v.Date, _ = time.Parse(time.RFC3339, dateStr.String)
|
||||
}
|
||||
|
||||
if prNumbersJSON != "" {
|
||||
json.Unmarshal([]byte(prNumbersJSON), &v.PRNumbers)
|
||||
}
|
||||
|
||||
if aiSummary.Valid {
|
||||
v.AISummary = aiSummary.String
|
||||
}
|
||||
|
||||
v.Commits, err = c.getCommitsForVersion(v.Name)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
versions[v.Name] = &v
|
||||
}
|
||||
|
||||
return versions, rows.Err()
|
||||
}
|
||||
|
||||
func (c *Cache) getCommitsForVersion(version string) ([]*git.Commit, error) {
|
||||
rows, err := c.db.Query(`
|
||||
SELECT sha, message, author, email, date, is_merge, pr_number
|
||||
FROM commits WHERE version = ?
|
||||
ORDER BY date DESC
|
||||
`, version)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var commits []*git.Commit
|
||||
|
||||
for rows.Next() {
|
||||
var commit git.Commit
|
||||
if err := rows.Scan(
|
||||
&commit.SHA, &commit.Message, &commit.Author, &commit.Email,
|
||||
&commit.Date, &commit.IsMerge, &commit.PRNumber,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
commits = append(commits, &commit)
|
||||
}
|
||||
|
||||
return commits, rows.Err()
|
||||
}
|
||||
|
||||
func (c *Cache) Clear() error {
|
||||
tables := []string{"metadata", "versions", "commits", "pull_requests"}
|
||||
for _, table := range tables {
|
||||
if _, err := c.db.Exec("DELETE FROM " + table); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// GetLastPRSync returns the timestamp of the last PR sync
|
||||
func (c *Cache) GetLastPRSync() (time.Time, error) {
|
||||
var timestamp string
|
||||
err := c.db.QueryRow("SELECT value FROM metadata WHERE key = 'last_pr_sync'").Scan(×tamp)
|
||||
if err == sql.ErrNoRows {
|
||||
return time.Time{}, nil
|
||||
}
|
||||
if err != nil {
|
||||
return time.Time{}, err
|
||||
}
|
||||
|
||||
return time.Parse(time.RFC3339, timestamp)
|
||||
}
|
||||
|
||||
// SetLastPRSync updates the timestamp of the last PR sync
|
||||
func (c *Cache) SetLastPRSync(timestamp time.Time) error {
|
||||
_, err := c.db.Exec(`
|
||||
INSERT OR REPLACE INTO metadata (key, value, updated_at)
|
||||
VALUES ('last_pr_sync', ?, CURRENT_TIMESTAMP)
|
||||
`, timestamp.Format(time.RFC3339))
|
||||
return err
|
||||
}
|
||||
|
||||
// SavePRBatch saves multiple PRs in a single transaction for better performance
|
||||
func (c *Cache) SavePRBatch(prs []*github.PR) error {
|
||||
tx, err := c.db.Begin()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to begin transaction: %w", err)
|
||||
}
|
||||
defer tx.Rollback()
|
||||
|
||||
stmt, err := tx.Prepare(`
|
||||
INSERT OR REPLACE INTO pull_requests
|
||||
(number, title, body, author, author_url, author_type, url, merged_at, merge_commit, commits)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
`)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to prepare statement: %w", err)
|
||||
}
|
||||
defer stmt.Close()
|
||||
|
||||
for _, pr := range prs {
|
||||
commits, _ := json.Marshal(pr.Commits)
|
||||
_, err := stmt.Exec(
|
||||
pr.Number, pr.Title, pr.Body, pr.Author, pr.AuthorURL, pr.AuthorType,
|
||||
pr.URL, pr.MergedAt, pr.MergeCommit, string(commits),
|
||||
)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to save PR #%d: %w", pr.Number, err)
|
||||
}
|
||||
}
|
||||
|
||||
return tx.Commit()
|
||||
}
|
||||
|
||||
// GetAllPRs returns all cached PRs
|
||||
func (c *Cache) GetAllPRs() (map[int]*github.PR, error) {
|
||||
rows, err := c.db.Query(`
|
||||
SELECT number, title, body, author, author_url, COALESCE(author_type, 'user'), url, merged_at, merge_commit, commits
|
||||
FROM pull_requests
|
||||
`)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
prs := make(map[int]*github.PR)
|
||||
|
||||
for rows.Next() {
|
||||
var pr github.PR
|
||||
var commitsJSON string
|
||||
|
||||
if err := rows.Scan(
|
||||
&pr.Number, &pr.Title, &pr.Body, &pr.Author, &pr.AuthorURL, &pr.AuthorType,
|
||||
&pr.URL, &pr.MergedAt, &pr.MergeCommit, &commitsJSON,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := json.Unmarshal([]byte(commitsJSON), &pr.Commits); err != nil {
|
||||
return nil, fmt.Errorf("failed to unmarshal commits for PR #%d: %w", pr.Number, err)
|
||||
}
|
||||
|
||||
prs[pr.Number] = &pr
|
||||
}
|
||||
|
||||
return prs, rows.Err()
|
||||
}
|
||||
|
||||
// MarkPRAsNonExistent marks a PR number as non-existent to avoid future fetches
|
||||
func (c *Cache) MarkPRAsNonExistent(prNumber int) error {
|
||||
_, err := c.db.Exec(`
|
||||
INSERT OR REPLACE INTO metadata (key, value, updated_at)
|
||||
VALUES (?, 'non_existent', CURRENT_TIMESTAMP)
|
||||
`, fmt.Sprintf("pr_non_existent_%d", prNumber))
|
||||
return err
|
||||
}
|
||||
|
||||
// IsPRMarkedAsNonExistent checks if a PR is marked as non-existent
|
||||
func (c *Cache) IsPRMarkedAsNonExistent(prNumber int) bool {
|
||||
var value string
|
||||
err := c.db.QueryRow("SELECT value FROM metadata WHERE key = ?",
|
||||
fmt.Sprintf("pr_non_existent_%d", prNumber)).Scan(&value)
|
||||
return err == nil && value == "non_existent"
|
||||
}
|
||||
|
||||
// SaveCommitPRMappings saves SHA→PR mappings for all commits in PRs
|
||||
func (c *Cache) SaveCommitPRMappings(prs []*github.PR) error {
|
||||
tx, err := c.db.Begin()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to begin transaction: %w", err)
|
||||
}
|
||||
defer tx.Rollback()
|
||||
|
||||
stmt, err := tx.Prepare(`
|
||||
INSERT OR REPLACE INTO commit_pr_mapping (commit_sha, pr_number)
|
||||
VALUES (?, ?)
|
||||
`)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to prepare statement: %w", err)
|
||||
}
|
||||
defer stmt.Close()
|
||||
|
||||
for _, pr := range prs {
|
||||
for _, commit := range pr.Commits {
|
||||
_, err := stmt.Exec(commit.SHA, pr.Number)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to save commit mapping %s→%d: %w", commit.SHA, pr.Number, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return tx.Commit()
|
||||
}
|
||||
|
||||
// GetPRNumberBySHA returns the PR number for a given commit SHA
|
||||
func (c *Cache) GetPRNumberBySHA(sha string) (int, bool) {
|
||||
var prNumber int
|
||||
err := c.db.QueryRow("SELECT pr_number FROM commit_pr_mapping WHERE commit_sha = ?", sha).Scan(&prNumber)
|
||||
if err == sql.ErrNoRows {
|
||||
return 0, false
|
||||
}
|
||||
if err != nil {
|
||||
return 0, false
|
||||
}
|
||||
return prNumber, true
|
||||
}
|
||||
|
||||
// GetCommitSHAsForPR returns all commit SHAs for a given PR number
|
||||
func (c *Cache) GetCommitSHAsForPR(prNumber int) ([]string, error) {
|
||||
rows, err := c.db.Query("SELECT commit_sha FROM commit_pr_mapping WHERE pr_number = ?", prNumber)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var shas []string
|
||||
for rows.Next() {
|
||||
var sha string
|
||||
if err := rows.Scan(&sha); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
shas = append(shas, sha)
|
||||
}
|
||||
|
||||
return shas, rows.Err()
|
||||
}
|
||||
|
||||
// GetUnreleasedContentHash returns the cached content hash for Unreleased
|
||||
func (c *Cache) GetUnreleasedContentHash() (string, error) {
|
||||
var hash string
|
||||
err := c.db.QueryRow("SELECT value FROM metadata WHERE key = 'unreleased_content_hash'").Scan(&hash)
|
||||
if err == sql.ErrNoRows {
|
||||
return "", fmt.Errorf("no content hash found")
|
||||
}
|
||||
return hash, err
|
||||
}
|
||||
|
||||
// SetUnreleasedContentHash stores the content hash for Unreleased
|
||||
func (c *Cache) SetUnreleasedContentHash(hash string) error {
|
||||
_, err := c.db.Exec(`
|
||||
INSERT OR REPLACE INTO metadata (key, value, updated_at)
|
||||
VALUES ('unreleased_content_hash', ?, CURRENT_TIMESTAMP)
|
||||
`, hash)
|
||||
return err
|
||||
}
|
||||
643
cmd/generate_changelog/internal/changelog/generator.go
Normal file
643
cmd/generate_changelog/internal/changelog/generator.go
Normal file
@@ -0,0 +1,643 @@
|
||||
package changelog
|
||||
|
||||
import (
|
||||
"crypto/sha256"
|
||||
"fmt"
|
||||
"os"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/danielmiessler/fabric/cmd/generate_changelog/internal/cache"
|
||||
"github.com/danielmiessler/fabric/cmd/generate_changelog/internal/config"
|
||||
"github.com/danielmiessler/fabric/cmd/generate_changelog/internal/git"
|
||||
"github.com/danielmiessler/fabric/cmd/generate_changelog/internal/github"
|
||||
)
|
||||
|
||||
type Generator struct {
|
||||
cfg *config.Config
|
||||
gitWalker *git.Walker
|
||||
ghClient *github.Client
|
||||
cache *cache.Cache
|
||||
versions map[string]*git.Version
|
||||
prs map[int]*github.PR
|
||||
}
|
||||
|
||||
func New(cfg *config.Config) (*Generator, error) {
|
||||
gitWalker, err := git.NewWalker(cfg.RepoPath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to create git walker: %w", err)
|
||||
}
|
||||
|
||||
owner, repo, err := gitWalker.GetRepoInfo()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get repo info: %w", err)
|
||||
}
|
||||
|
||||
ghClient := github.NewClient(cfg.GitHubToken, owner, repo)
|
||||
|
||||
var c *cache.Cache
|
||||
if !cfg.NoCache {
|
||||
c, err = cache.New(cfg.CacheFile)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to create cache: %w", err)
|
||||
}
|
||||
|
||||
if cfg.RebuildCache {
|
||||
if err := c.Clear(); err != nil {
|
||||
return nil, fmt.Errorf("failed to clear cache: %w", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return &Generator{
|
||||
cfg: cfg,
|
||||
gitWalker: gitWalker,
|
||||
ghClient: ghClient,
|
||||
cache: c,
|
||||
prs: make(map[int]*github.PR),
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (g *Generator) Generate() (string, error) {
|
||||
if err := g.collectData(); err != nil {
|
||||
return "", fmt.Errorf("failed to collect data: %w", err)
|
||||
}
|
||||
|
||||
if err := g.fetchPRs(); err != nil {
|
||||
return "", fmt.Errorf("failed to fetch PRs: %w", err)
|
||||
}
|
||||
|
||||
return g.formatChangelog(), nil
|
||||
}
|
||||
|
||||
func (g *Generator) collectData() error {
|
||||
if g.cache != nil && !g.cfg.RebuildCache {
|
||||
cachedTag, err := g.cache.GetLastProcessedTag()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get last processed tag: %w", err)
|
||||
}
|
||||
|
||||
if cachedTag != "" {
|
||||
// Get the current latest tag from git
|
||||
currentTag, err := g.gitWalker.GetLatestTag()
|
||||
if err == nil && currentTag == cachedTag {
|
||||
// Same tag - load cached data and walk commits since tag for "Unreleased"
|
||||
cachedVersions, err := g.cache.GetVersions()
|
||||
if err == nil && len(cachedVersions) > 0 {
|
||||
g.versions = cachedVersions
|
||||
|
||||
// Load cached PRs
|
||||
for _, version := range g.versions {
|
||||
for _, prNum := range version.PRNumbers {
|
||||
if pr, err := g.cache.GetPR(prNum); err == nil && pr != nil {
|
||||
g.prs[prNum] = pr
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Walk commits since the latest tag to get new unreleased commits
|
||||
unreleasedVersion, err := g.gitWalker.WalkCommitsSinceTag(currentTag)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Failed to walk commits since tag %s: %v\n", currentTag, err)
|
||||
} else if unreleasedVersion != nil {
|
||||
// Preserve existing AI summary if available
|
||||
if existingUnreleased, exists := g.versions["Unreleased"]; exists {
|
||||
unreleasedVersion.AISummary = existingUnreleased.AISummary
|
||||
}
|
||||
// Replace or add the unreleased version
|
||||
g.versions["Unreleased"] = unreleasedVersion
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
versions, err := g.gitWalker.WalkHistory()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to walk history: %w", err)
|
||||
}
|
||||
|
||||
g.versions = versions
|
||||
|
||||
if g.cache != nil {
|
||||
for _, version := range versions {
|
||||
if err := g.cache.SaveVersion(version); err != nil {
|
||||
return fmt.Errorf("failed to save version to cache: %w", err)
|
||||
}
|
||||
|
||||
for _, commit := range version.Commits {
|
||||
if err := g.cache.SaveCommit(commit, version.Name); err != nil {
|
||||
return fmt.Errorf("failed to save commit to cache: %w", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Save the latest tag as our cache anchor point
|
||||
if latestTag, err := g.gitWalker.GetLatestTag(); err == nil && latestTag != "" {
|
||||
if err := g.cache.SetLastProcessedTag(latestTag); err != nil {
|
||||
return fmt.Errorf("failed to save last processed tag: %w", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (g *Generator) fetchPRs() error {
|
||||
// First, load all cached PRs
|
||||
if g.cache != nil {
|
||||
cachedPRs, err := g.cache.GetAllPRs()
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Failed to load cached PRs: %v\n", err)
|
||||
} else {
|
||||
g.prs = cachedPRs
|
||||
}
|
||||
}
|
||||
|
||||
// Check if we need to fetch new PRs
|
||||
var lastSync time.Time
|
||||
if g.cache != nil {
|
||||
lastSync, _ = g.cache.GetLastPRSync()
|
||||
}
|
||||
|
||||
// If we have never synced or it's been more than 24 hours, do a full sync
|
||||
needsSync := lastSync.IsZero() || time.Since(lastSync) > 24*time.Hour || g.cfg.ForcePRSync
|
||||
|
||||
if !needsSync {
|
||||
fmt.Fprintf(os.Stderr, "Using cached PR data (last sync: %s)\n", lastSync.Format("2006-01-02 15:04:05"))
|
||||
return nil
|
||||
}
|
||||
|
||||
fmt.Fprintf(os.Stderr, "Fetching merged PRs from GitHub using GraphQL...\n")
|
||||
|
||||
// Use GraphQL for ultimate performance - gets everything in ~5-10 calls
|
||||
prs, err := g.ghClient.FetchAllMergedPRsGraphQL(lastSync)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "GraphQL fetch failed, falling back to REST API: %v\n", err)
|
||||
// Fall back to REST API
|
||||
prs, err = g.ghClient.FetchAllMergedPRs(lastSync)
|
||||
if err != nil {
|
||||
return fmt.Errorf("both GraphQL and REST API failed: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
// Update our PR map with new data
|
||||
for _, pr := range prs {
|
||||
g.prs[pr.Number] = pr
|
||||
}
|
||||
|
||||
// Save all PRs to cache in a batch transaction
|
||||
if g.cache != nil && len(prs) > 0 {
|
||||
// Save PRs
|
||||
if err := g.cache.SavePRBatch(prs); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Failed to cache PRs: %v\n", err)
|
||||
}
|
||||
|
||||
// Save SHA→PR mappings for lightning-fast git operations
|
||||
if err := g.cache.SaveCommitPRMappings(prs); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Failed to cache commit mappings: %v\n", err)
|
||||
}
|
||||
|
||||
// Update last sync timestamp
|
||||
if err := g.cache.SetLastPRSync(time.Now()); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Failed to update last sync timestamp: %v\n", err)
|
||||
}
|
||||
}
|
||||
|
||||
if len(prs) > 0 {
|
||||
fmt.Fprintf(os.Stderr, "Fetched %d PRs with commits (total cached: %d)\n", len(prs), len(g.prs))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (g *Generator) formatChangelog() string {
|
||||
var sb strings.Builder
|
||||
sb.WriteString("# Changelog\n")
|
||||
|
||||
versionList := g.getSortedVersions()
|
||||
|
||||
for _, version := range versionList {
|
||||
if g.cfg.Version != "" && version.Name != g.cfg.Version {
|
||||
continue
|
||||
}
|
||||
|
||||
versionText := g.formatVersion(version)
|
||||
if versionText != "" {
|
||||
sb.WriteString("\n")
|
||||
sb.WriteString(versionText)
|
||||
}
|
||||
}
|
||||
|
||||
return sb.String()
|
||||
}
|
||||
|
||||
func (g *Generator) getSortedVersions() []*git.Version {
|
||||
var versions []*git.Version
|
||||
var releasedVersions []*git.Version
|
||||
|
||||
// Collect all released versions (non-"Unreleased")
|
||||
for name, version := range g.versions {
|
||||
if name != "Unreleased" {
|
||||
releasedVersions = append(releasedVersions, version)
|
||||
}
|
||||
}
|
||||
|
||||
// Sort released versions by date (newest first)
|
||||
sort.Slice(releasedVersions, func(i, j int) bool {
|
||||
return releasedVersions[i].Date.After(releasedVersions[j].Date)
|
||||
})
|
||||
|
||||
// Add "Unreleased" first if it exists and has commits
|
||||
if unreleased, exists := g.versions["Unreleased"]; exists && len(unreleased.Commits) > 0 {
|
||||
versions = append(versions, unreleased)
|
||||
}
|
||||
|
||||
// Add sorted released versions
|
||||
versions = append(versions, releasedVersions...)
|
||||
|
||||
if g.cfg.Limit > 0 && len(versions) > g.cfg.Limit {
|
||||
versions = versions[:g.cfg.Limit]
|
||||
}
|
||||
|
||||
return versions
|
||||
}
|
||||
|
||||
func (g *Generator) formatVersion(version *git.Version) string {
|
||||
var sb strings.Builder
|
||||
|
||||
// Generate raw content
|
||||
rawContent := g.generateRawVersionContent(version)
|
||||
if rawContent == "" {
|
||||
return ""
|
||||
}
|
||||
|
||||
header := g.formatVersionHeader(version)
|
||||
sb.WriteString(("\n"))
|
||||
sb.WriteString(header)
|
||||
|
||||
// If AI summarization is enabled, enhance with AI
|
||||
if g.cfg.EnableAISummary {
|
||||
// For "Unreleased", check if content has changed since last AI summary
|
||||
if version.Name == "Unreleased" && version.AISummary != "" && g.cache != nil {
|
||||
// Get cached content hash
|
||||
cachedHash, err := g.cache.GetUnreleasedContentHash()
|
||||
if err == nil {
|
||||
// Calculate current content hash
|
||||
currentHash := hashContent(rawContent)
|
||||
if cachedHash == currentHash {
|
||||
// Content unchanged, use cached summary
|
||||
fmt.Fprintf(os.Stderr, "✅ %s content unchanged (skipping AI)\n", version.Name)
|
||||
sb.WriteString(version.AISummary)
|
||||
return fixMarkdown(sb.String())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if version.Name != "Unreleased" && version.AISummary != "" {
|
||||
fmt.Fprintf(os.Stderr, "✅ %s already summarized (skipping)\n", version.Name)
|
||||
sb.WriteString(version.AISummary)
|
||||
return fixMarkdown(sb.String())
|
||||
}
|
||||
|
||||
fmt.Fprintf(os.Stderr, "🤖 AI summarizing %s...", version.Name)
|
||||
|
||||
aiSummary, err := SummarizeVersionContent(rawContent)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, " Failed: %v\n", err)
|
||||
sb.WriteString((rawContent))
|
||||
return fixMarkdown(sb.String())
|
||||
}
|
||||
if checkForAIError(aiSummary) {
|
||||
fmt.Fprintf(os.Stderr, " AI error detected, using raw content instead\n")
|
||||
sb.WriteString(rawContent)
|
||||
fmt.Fprintf(os.Stderr, "Raw Content was: (%d bytes) %s \n", len(rawContent), rawContent)
|
||||
fmt.Fprintf(os.Stderr, "AI Summary was: (%d bytes) %s\n", len(aiSummary), aiSummary)
|
||||
return fixMarkdown(sb.String())
|
||||
}
|
||||
|
||||
fmt.Fprintf(os.Stderr, " Done!\n")
|
||||
aiSummary = strings.TrimSpace(aiSummary)
|
||||
|
||||
// Cache the AI summary and content hash
|
||||
version.AISummary = aiSummary
|
||||
if g.cache != nil {
|
||||
if err := g.cache.UpdateVersionAISummary(version.Name, aiSummary); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Failed to cache AI summary: %v\n", err)
|
||||
}
|
||||
// Cache content hash for "Unreleased" to detect changes
|
||||
if version.Name == "Unreleased" {
|
||||
if err := g.cache.SetUnreleasedContentHash(hashContent(rawContent)); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Failed to cache content hash: %v\n", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
sb.WriteString(aiSummary)
|
||||
return fixMarkdown(sb.String())
|
||||
}
|
||||
|
||||
sb.WriteString(rawContent)
|
||||
return fixMarkdown(sb.String())
|
||||
}
|
||||
|
||||
func checkForAIError(summary string) bool {
|
||||
// Check for common AI error patterns
|
||||
errorPatterns := []string{
|
||||
"I don't see any", "please provide",
|
||||
"content you've provided appears to be incomplete",
|
||||
}
|
||||
|
||||
for _, pattern := range errorPatterns {
|
||||
if strings.Contains(summary, pattern) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
// formatVersionHeader formats just the version header (## ...)
|
||||
func (g *Generator) formatVersionHeader(version *git.Version) string {
|
||||
if version.Name == "Unreleased" {
|
||||
return "## Unreleased\n\n"
|
||||
}
|
||||
return fmt.Sprintf("\n## %s (%s)\n\n", version.Name, version.Date.Format("2006-01-02"))
|
||||
}
|
||||
|
||||
// generateRawVersionContent generates the raw content (PRs + commits) for a version
|
||||
func (g *Generator) generateRawVersionContent(version *git.Version) string {
|
||||
var sb strings.Builder
|
||||
|
||||
// Build a set of commit SHAs that are part of fetched PRs
|
||||
prCommitSHAs := make(map[string]bool)
|
||||
for _, prNum := range version.PRNumbers {
|
||||
if pr, exists := g.prs[prNum]; exists {
|
||||
for _, prCommit := range pr.Commits {
|
||||
prCommitSHAs[prCommit.SHA] = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
prCommits := make(map[int][]*git.Commit)
|
||||
directCommits := []*git.Commit{}
|
||||
|
||||
for _, commit := range version.Commits {
|
||||
// Skip version bump commits from output
|
||||
if commit.IsVersion {
|
||||
continue
|
||||
}
|
||||
|
||||
// If this commit is part of a fetched PR, don't include it in direct commits
|
||||
if prCommitSHAs[commit.SHA] {
|
||||
continue
|
||||
}
|
||||
|
||||
if commit.PRNumber > 0 {
|
||||
prCommits[commit.PRNumber] = append(prCommits[commit.PRNumber], commit)
|
||||
} else {
|
||||
directCommits = append(directCommits, commit)
|
||||
}
|
||||
}
|
||||
|
||||
// There are occasionally no PRs or direct commits other than version bumps, so we handle that gracefully
|
||||
if len(prCommits) == 0 && len(directCommits) == 0 {
|
||||
return ""
|
||||
}
|
||||
|
||||
prependNewline := ""
|
||||
for _, prNum := range version.PRNumbers {
|
||||
if pr, exists := g.prs[prNum]; exists {
|
||||
sb.WriteString(prependNewline)
|
||||
sb.WriteString(g.formatPR(pr))
|
||||
prependNewline = "\n"
|
||||
}
|
||||
}
|
||||
|
||||
if len(directCommits) > 0 {
|
||||
// Sort direct commits by date (newest first) for consistent ordering
|
||||
sort.Slice(directCommits, func(i, j int) bool {
|
||||
return directCommits[i].Date.After(directCommits[j].Date)
|
||||
})
|
||||
|
||||
sb.WriteString(prependNewline + "### Direct commits\n\n")
|
||||
for _, commit := range directCommits {
|
||||
message := g.formatCommitMessage(strings.TrimSpace(commit.Message))
|
||||
if message != "" && !g.isDuplicateMessage(message, directCommits) {
|
||||
sb.WriteString(fmt.Sprintf("- %s\n", message))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return fixMarkdown(
|
||||
strings.ReplaceAll(sb.String(), "\n-\n", "\n"), // Remove empty list items
|
||||
)
|
||||
}
|
||||
|
||||
func fixMarkdown(content string) string {
|
||||
|
||||
// Fix MD032/blank-around-lists: Lists should be surrounded by blank lines
|
||||
lines := strings.Split(content, "\n")
|
||||
inList := false
|
||||
preListNewline := false
|
||||
for i := range lines {
|
||||
line := strings.TrimSpace(lines[i])
|
||||
if strings.HasPrefix(line, "- ") || strings.HasPrefix(line, "* ") {
|
||||
if !inList {
|
||||
inList = true
|
||||
// Ensure there's a blank line before the list starts
|
||||
if !preListNewline && i > 0 && lines[i-1] != "" {
|
||||
line = "\n" + line
|
||||
preListNewline = true
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if inList {
|
||||
inList = false
|
||||
preListNewline = false
|
||||
}
|
||||
}
|
||||
lines[i] = strings.TrimRight(line, " \t")
|
||||
}
|
||||
|
||||
fixedContent := strings.TrimSpace(strings.Join(lines, "\n"))
|
||||
|
||||
return fixedContent + "\n"
|
||||
}
|
||||
|
||||
func (g *Generator) formatPR(pr *github.PR) string {
|
||||
var sb strings.Builder
|
||||
|
||||
pr.Title = strings.TrimRight(strings.TrimSpace(pr.Title), ".")
|
||||
|
||||
// Add type indicator for non-users
|
||||
authorName := pr.Author
|
||||
switch pr.AuthorType {
|
||||
case "bot":
|
||||
authorName += "[bot]"
|
||||
case "organization":
|
||||
authorName += "[org]"
|
||||
}
|
||||
|
||||
sb.WriteString(fmt.Sprintf("### PR [#%d](%s) by [%s](%s): %s\n\n",
|
||||
pr.Number, pr.URL, authorName, pr.AuthorURL, strings.TrimSpace(pr.Title)))
|
||||
|
||||
changes := g.extractChanges(pr)
|
||||
for _, change := range changes {
|
||||
if change != "" {
|
||||
sb.WriteString(fmt.Sprintf("- %s\n", change))
|
||||
}
|
||||
}
|
||||
|
||||
return sb.String()
|
||||
}
|
||||
|
||||
func (g *Generator) extractChanges(pr *github.PR) []string {
|
||||
var changes []string
|
||||
seen := make(map[string]bool)
|
||||
|
||||
for _, commit := range pr.Commits {
|
||||
message := g.formatCommitMessage(commit.Message)
|
||||
if message != "" && !seen[message] {
|
||||
seen[message] = true
|
||||
changes = append(changes, message)
|
||||
}
|
||||
}
|
||||
|
||||
if len(changes) == 0 && pr.Body != "" {
|
||||
lines := strings.Split(pr.Body, "\n")
|
||||
for _, line := range lines {
|
||||
line = strings.TrimSpace(line)
|
||||
if strings.HasPrefix(line, "- ") || strings.HasPrefix(line, "* ") {
|
||||
change := strings.TrimPrefix(strings.TrimPrefix(line, "- "), "* ")
|
||||
if change != "" {
|
||||
changes = append(changes, change)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return changes
|
||||
}
|
||||
|
||||
func normalizeLineEndings(content string) string {
|
||||
return strings.ReplaceAll(content, "\r\n", "\n")
|
||||
}
|
||||
|
||||
func (g *Generator) formatCommitMessage(message string) string {
|
||||
prefixes := []string{"fix:", "feat:", "docs:", "style:", "refactor:",
|
||||
"test:", "chore:", "perf:", "ci:", "build:", "revert:", "# docs:"}
|
||||
strings_to_remove := []string{
|
||||
"### CHANGES\n", "## CHANGES\n", "# CHANGES\n",
|
||||
"...\n", "---\n", "## Changes\n", "## Change",
|
||||
"Update version to v..1 and commit\n",
|
||||
"# What this Pull Request (PR) does\n",
|
||||
"# Conflicts:",
|
||||
}
|
||||
|
||||
message = normalizeLineEndings(message)
|
||||
// No hard tabs
|
||||
message = strings.ReplaceAll(message, "\t", " ")
|
||||
|
||||
for _, prefix := range prefixes {
|
||||
if strings.HasPrefix(strings.ToLower(message), prefix) {
|
||||
message = strings.TrimSpace(message[len(prefix):])
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if len(message) > 0 {
|
||||
message = strings.ToUpper(message[:1]) + message[1:]
|
||||
}
|
||||
|
||||
for _, str := range strings_to_remove {
|
||||
if strings.Contains(message, str) {
|
||||
message = strings.ReplaceAll(message, str, "")
|
||||
}
|
||||
}
|
||||
|
||||
message = fixFormatting(message)
|
||||
|
||||
return message
|
||||
}
|
||||
|
||||
func fixFormatting(message string) string {
|
||||
// Turn "*"" lists into "-" lists"
|
||||
message = strings.ReplaceAll(message, "* ", "- ")
|
||||
// Remove extra spaces around dashes
|
||||
message = strings.ReplaceAll(message, "- ", "- ")
|
||||
message = strings.ReplaceAll(message, "- ", "- ")
|
||||
// turn bare URL into <URL>
|
||||
if strings.Contains(message, "http://") || strings.Contains(message, "https://") {
|
||||
// Use regex to wrap bare URLs with angle brackets
|
||||
urlRegex := regexp.MustCompile(`\b(https?://[^\s<>]+)`)
|
||||
message = urlRegex.ReplaceAllString(message, "<$1>")
|
||||
}
|
||||
|
||||
// Replace "## LINKS\n" with "- "
|
||||
message = strings.ReplaceAll(message, "## LINKS\n", "- ")
|
||||
// Dependabot messages: "- [Commits]" should become "\n- [Commits]"
|
||||
message = strings.TrimSpace(message)
|
||||
// Turn multiple newlines into a single newline
|
||||
message = strings.TrimSpace(strings.ReplaceAll(message, "\n\n", "\n"))
|
||||
// Fix inline trailing spaces
|
||||
message = strings.ReplaceAll(message, " \n", "\n")
|
||||
// Fix weird indent before list,
|
||||
message = strings.ReplaceAll(message, "\n - ", "\n- ")
|
||||
|
||||
// blanks-around-lists MD032 fix
|
||||
// Use regex to ensure blank line before list items that don't already have one
|
||||
listRegex := regexp.MustCompile(`(?m)([^\n-].*[^:\n])\n([-*] .*)`)
|
||||
message = listRegex.ReplaceAllString(message, "$1\n\n$2")
|
||||
|
||||
// Change random first-level "#" to 4th level "####"
|
||||
// This is a hack to fix spurious first-level headings that are not actual headings
|
||||
// but rather just comments or notes in the commit message.
|
||||
message = strings.ReplaceAll(message, "# ", "\n#### ")
|
||||
message = strings.ReplaceAll(message, "\n\n\n", "\n\n")
|
||||
|
||||
// Wrap any non-wrapped Emails with angle brackets
|
||||
emailRegex := regexp.MustCompile(`([a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,})`)
|
||||
message = emailRegex.ReplaceAllString(message, "<$1>")
|
||||
|
||||
// Wrap any non-wrapped URLs with angle brackets
|
||||
urlRegex := regexp.MustCompile(`(https?://[^\s<]+)`)
|
||||
message = urlRegex.ReplaceAllString(message, "<$1>")
|
||||
|
||||
message = strings.ReplaceAll(message, "<<", "<")
|
||||
message = strings.ReplaceAll(message, ">>", ">")
|
||||
|
||||
// Fix some spurious Issue/PR links at the beginning of a commit message line
|
||||
prOrIssueLinkRegex := regexp.MustCompile("\n" + `(#\d+)`)
|
||||
message = prOrIssueLinkRegex.ReplaceAllString(message, " $1")
|
||||
|
||||
// Remove leading/trailing whitespace
|
||||
message = strings.TrimSpace(message)
|
||||
return message
|
||||
}
|
||||
|
||||
func (g *Generator) isDuplicateMessage(message string, commits []*git.Commit) bool {
|
||||
if message == "." || strings.ToLower(message) == "fix" {
|
||||
count := 0
|
||||
for _, commit := range commits {
|
||||
formatted := g.formatCommitMessage(commit.Message)
|
||||
if formatted == message {
|
||||
count++
|
||||
if count > 1 {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// hashContent generates a SHA256 hash of the content for change detection
|
||||
func hashContent(content string) string {
|
||||
hash := sha256.Sum256([]byte(content))
|
||||
return fmt.Sprintf("%x", hash)
|
||||
}
|
||||
58
cmd/generate_changelog/internal/changelog/summarize.go
Normal file
58
cmd/generate_changelog/internal/changelog/summarize.go
Normal file
@@ -0,0 +1,58 @@
|
||||
package changelog
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"os/exec"
|
||||
"strings"
|
||||
)
|
||||
|
||||
const DefaultSummarizeModel = "claude-sonnet-4-20250514"
|
||||
const MinContentLength = 256 // Minimum content length to consider for summarization
|
||||
|
||||
// getSummarizeModel returns the model to use for AI summarization
|
||||
func getSummarizeModel() string {
|
||||
if model := os.Getenv("FABRIC_CHANGELOG_SUMMARIZE_MODEL"); model != "" {
|
||||
return model
|
||||
}
|
||||
return DefaultSummarizeModel
|
||||
}
|
||||
|
||||
// SummarizeVersionContent takes raw version content and returns AI-enhanced summary
|
||||
func SummarizeVersionContent(content string) (string, error) {
|
||||
if strings.TrimSpace(content) == "" {
|
||||
return "", fmt.Errorf("no content to summarize")
|
||||
}
|
||||
if len(content) < MinContentLength {
|
||||
// If content is too brief, return it as is
|
||||
return content, nil
|
||||
}
|
||||
|
||||
model := getSummarizeModel()
|
||||
|
||||
prompt := `Summarize the changes extracted from Git commit logs in a concise, professional way.
|
||||
Pay particular attention to the following rules:
|
||||
- Preserve the PR headers verbatim to your summary.
|
||||
- I REPEAT: Do not change the PR headers in any way. They contain links to the PRs and Author Profiles.
|
||||
- Use bullet points for lists and key changes (rendered using "-")
|
||||
- Focus on the main changes and improvements.
|
||||
- Avoid unnecessary details or preamble.
|
||||
- Keep it under 800 characters.
|
||||
- Be brief. List only the 5 most important changes along with the PR information which should be kept intact.
|
||||
- If the content is too brief or you do not see any PR headers, return the content as is.`
|
||||
|
||||
cmd := exec.Command("fabric", "-m", model, prompt)
|
||||
cmd.Stdin = strings.NewReader(content)
|
||||
|
||||
output, err := cmd.Output()
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("fabric command failed: %w", err)
|
||||
}
|
||||
|
||||
summary := strings.TrimSpace(string(output))
|
||||
if summary == "" {
|
||||
return "", fmt.Errorf("fabric returned empty summary")
|
||||
}
|
||||
|
||||
return summary, nil
|
||||
}
|
||||
15
cmd/generate_changelog/internal/config/config.go
Normal file
15
cmd/generate_changelog/internal/config/config.go
Normal file
@@ -0,0 +1,15 @@
|
||||
package config
|
||||
|
||||
type Config struct {
|
||||
RepoPath string
|
||||
OutputFile string
|
||||
Limit int
|
||||
Version string
|
||||
SaveData bool
|
||||
CacheFile string
|
||||
NoCache bool
|
||||
RebuildCache bool
|
||||
GitHubToken string
|
||||
ForcePRSync bool
|
||||
EnableAISummary bool
|
||||
}
|
||||
26
cmd/generate_changelog/internal/git/types.go
Normal file
26
cmd/generate_changelog/internal/git/types.go
Normal file
@@ -0,0 +1,26 @@
|
||||
package git
|
||||
|
||||
import (
|
||||
"time"
|
||||
)
|
||||
|
||||
type Commit struct {
|
||||
SHA string
|
||||
Message string
|
||||
Author string
|
||||
Email string
|
||||
Date time.Time
|
||||
IsMerge bool
|
||||
PRNumber int
|
||||
IsVersion bool
|
||||
Version string
|
||||
}
|
||||
|
||||
type Version struct {
|
||||
Name string
|
||||
Date time.Time
|
||||
CommitSHA string
|
||||
Commits []*Commit
|
||||
PRNumbers []int
|
||||
AISummary string
|
||||
}
|
||||
295
cmd/generate_changelog/internal/git/walker.go
Normal file
295
cmd/generate_changelog/internal/git/walker.go
Normal file
@@ -0,0 +1,295 @@
|
||||
package git
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/go-git/go-git/v5"
|
||||
"github.com/go-git/go-git/v5/plumbing"
|
||||
"github.com/go-git/go-git/v5/plumbing/object"
|
||||
)
|
||||
|
||||
var (
|
||||
versionPattern = regexp.MustCompile(`Update version to (v\d+\.\d+\.\d+)`)
|
||||
prPattern = regexp.MustCompile(`Merge pull request #(\d+)`)
|
||||
)
|
||||
|
||||
type Walker struct {
|
||||
repo *git.Repository
|
||||
}
|
||||
|
||||
func NewWalker(repoPath string) (*Walker, error) {
|
||||
repo, err := git.PlainOpen(repoPath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to open repository: %w", err)
|
||||
}
|
||||
|
||||
return &Walker{repo: repo}, nil
|
||||
}
|
||||
|
||||
// GetLatestTag returns the name of the most recent tag by committer date
|
||||
func (w *Walker) GetLatestTag() (string, error) {
|
||||
tagRefs, err := w.repo.Tags()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
var latestTagCommit *object.Commit
|
||||
var latestTagName string
|
||||
|
||||
err = tagRefs.ForEach(func(tagRef *plumbing.Reference) error {
|
||||
revision := plumbing.Revision(tagRef.Name().String())
|
||||
tagCommitHash, err := w.repo.ResolveRevision(revision)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
commit, err := w.repo.CommitObject(*tagCommitHash)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if latestTagCommit == nil {
|
||||
latestTagCommit = commit
|
||||
latestTagName = tagRef.Name().Short() // Get short name like "v1.4.245"
|
||||
}
|
||||
|
||||
if commit.Committer.When.After(latestTagCommit.Committer.When) {
|
||||
latestTagCommit = commit
|
||||
latestTagName = tagRef.Name().Short()
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return latestTagName, nil
|
||||
}
|
||||
|
||||
// WalkCommitsSinceTag walks commits from the specified tag to HEAD and returns only "Unreleased" version
|
||||
func (w *Walker) WalkCommitsSinceTag(tagName string) (*Version, error) {
|
||||
// Get the tag reference
|
||||
tagRef, err := w.repo.Tag(tagName)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to find tag %s: %w", tagName, err)
|
||||
}
|
||||
|
||||
// Get the commit that the tag points to
|
||||
tagCommit, err := w.repo.CommitObject(tagRef.Hash())
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get tag commit: %w", err)
|
||||
}
|
||||
|
||||
// Get HEAD
|
||||
headRef, err := w.repo.Head()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get HEAD: %w", err)
|
||||
}
|
||||
|
||||
// Walk from HEAD back to the tag commit (exclusive)
|
||||
commitIter, err := w.repo.Log(&git.LogOptions{
|
||||
From: headRef.Hash(),
|
||||
Order: git.LogOrderCommitterTime,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get commit log: %w", err)
|
||||
}
|
||||
|
||||
version := &Version{
|
||||
Name: "Unreleased",
|
||||
Commits: []*Commit{},
|
||||
}
|
||||
|
||||
prNumbers := []int{}
|
||||
|
||||
err = commitIter.ForEach(func(c *object.Commit) error {
|
||||
// Stop when we reach the tag commit (don't include it)
|
||||
if c.Hash == tagCommit.Hash {
|
||||
return fmt.Errorf("reached tag commit") // Use error to break out of iteration
|
||||
}
|
||||
|
||||
commit := &Commit{
|
||||
SHA: c.Hash.String(),
|
||||
Message: strings.TrimSpace(c.Message),
|
||||
Date: c.Committer.When,
|
||||
}
|
||||
|
||||
// Check for version patterns
|
||||
if versionMatch := versionPattern.FindStringSubmatch(commit.Message); versionMatch != nil {
|
||||
commit.IsVersion = true
|
||||
}
|
||||
|
||||
// Check for PR merge patterns
|
||||
if prMatch := prPattern.FindStringSubmatch(commit.Message); prMatch != nil {
|
||||
if prNumber, err := strconv.Atoi(prMatch[1]); err == nil {
|
||||
commit.PRNumber = prNumber
|
||||
prNumbers = append(prNumbers, prNumber)
|
||||
}
|
||||
}
|
||||
|
||||
version.Commits = append(version.Commits, commit)
|
||||
return nil
|
||||
})
|
||||
|
||||
// Ignore the "reached tag commit" error - it's expected
|
||||
if err != nil && !strings.Contains(err.Error(), "reached tag commit") {
|
||||
return nil, fmt.Errorf("failed to walk commits: %w", err)
|
||||
}
|
||||
|
||||
// Remove duplicates from prNumbers and set them
|
||||
prNumbersMap := make(map[int]bool)
|
||||
for _, prNum := range prNumbers {
|
||||
prNumbersMap[prNum] = true
|
||||
}
|
||||
|
||||
version.PRNumbers = make([]int, 0, len(prNumbersMap))
|
||||
for prNum := range prNumbersMap {
|
||||
version.PRNumbers = append(version.PRNumbers, prNum)
|
||||
}
|
||||
|
||||
return version, nil
|
||||
}
|
||||
|
||||
func (w *Walker) WalkHistory() (map[string]*Version, error) {
|
||||
ref, err := w.repo.Head()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get HEAD: %w", err)
|
||||
}
|
||||
|
||||
commitIter, err := w.repo.Log(&git.LogOptions{
|
||||
From: ref.Hash(),
|
||||
Order: git.LogOrderCommitterTime,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get commit log: %w", err)
|
||||
}
|
||||
|
||||
versions := make(map[string]*Version)
|
||||
currentVersion := "Unreleased"
|
||||
versions[currentVersion] = &Version{
|
||||
Name: currentVersion,
|
||||
Commits: []*Commit{},
|
||||
}
|
||||
|
||||
prNumbers := make(map[string][]int)
|
||||
|
||||
err = commitIter.ForEach(func(c *object.Commit) error {
|
||||
// c.Message = Summarize(c.Message)
|
||||
commit := &Commit{
|
||||
SHA: c.Hash.String(),
|
||||
Message: strings.TrimSpace(c.Message),
|
||||
Author: c.Author.Name,
|
||||
Email: c.Author.Email,
|
||||
Date: c.Author.When,
|
||||
IsMerge: len(c.ParentHashes) > 1,
|
||||
}
|
||||
|
||||
if matches := versionPattern.FindStringSubmatch(commit.Message); len(matches) > 1 {
|
||||
commit.IsVersion = true
|
||||
commit.Version = matches[1]
|
||||
currentVersion = commit.Version
|
||||
|
||||
if _, exists := versions[currentVersion]; !exists {
|
||||
versions[currentVersion] = &Version{
|
||||
Name: currentVersion,
|
||||
Date: commit.Date,
|
||||
CommitSHA: commit.SHA,
|
||||
Commits: []*Commit{},
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
if matches := prPattern.FindStringSubmatch(commit.Message); len(matches) > 1 {
|
||||
prNumber := 0
|
||||
fmt.Sscanf(matches[1], "%d", &prNumber)
|
||||
commit.PRNumber = prNumber
|
||||
|
||||
prNumbers[currentVersion] = append(prNumbers[currentVersion], prNumber)
|
||||
}
|
||||
|
||||
versions[currentVersion].Commits = append(versions[currentVersion].Commits, commit)
|
||||
|
||||
return nil
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to walk commits: %w", err)
|
||||
}
|
||||
|
||||
for version, prs := range prNumbers {
|
||||
versions[version].PRNumbers = dedupInts(prs)
|
||||
}
|
||||
|
||||
return versions, nil
|
||||
}
|
||||
|
||||
func (w *Walker) GetRepoInfo() (owner string, name string, err error) {
|
||||
remotes, err := w.repo.Remotes()
|
||||
if err != nil {
|
||||
return "", "", fmt.Errorf("failed to get remotes: %w", err)
|
||||
}
|
||||
|
||||
// First try upstream (preferred for forks)
|
||||
for _, remote := range remotes {
|
||||
if remote.Config().Name == "upstream" {
|
||||
urls := remote.Config().URLs
|
||||
if len(urls) > 0 {
|
||||
owner, name = parseGitHubURL(urls[0])
|
||||
if owner != "" && name != "" {
|
||||
return owner, name, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Then try origin
|
||||
for _, remote := range remotes {
|
||||
if remote.Config().Name == "origin" {
|
||||
urls := remote.Config().URLs
|
||||
if len(urls) > 0 {
|
||||
owner, name = parseGitHubURL(urls[0])
|
||||
if owner != "" && name != "" {
|
||||
return owner, name, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return "danielmiessler", "fabric", nil
|
||||
}
|
||||
|
||||
func parseGitHubURL(url string) (owner, repo string) {
|
||||
patterns := []string{
|
||||
`github\.com[:/]([^/]+)/([^/.]+)`,
|
||||
`github\.com[:/]([^/]+)/([^/]+)\.git$`,
|
||||
}
|
||||
|
||||
for _, pattern := range patterns {
|
||||
re := regexp.MustCompile(pattern)
|
||||
matches := re.FindStringSubmatch(url)
|
||||
if len(matches) > 2 {
|
||||
return matches[1], matches[2]
|
||||
}
|
||||
}
|
||||
|
||||
return "", ""
|
||||
}
|
||||
|
||||
func dedupInts(ints []int) []int {
|
||||
seen := make(map[int]bool)
|
||||
result := []int{}
|
||||
|
||||
for _, i := range ints {
|
||||
if !seen[i] {
|
||||
seen[i] = true
|
||||
result = append(result, i)
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
354
cmd/generate_changelog/internal/github/client.go
Normal file
354
cmd/generate_changelog/internal/github/client.go
Normal file
@@ -0,0 +1,354 @@
|
||||
package github
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"os"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/google/go-github/v66/github"
|
||||
"github.com/hasura/go-graphql-client"
|
||||
"golang.org/x/oauth2"
|
||||
)
|
||||
|
||||
type Client struct {
|
||||
client *github.Client
|
||||
graphqlClient *graphql.Client
|
||||
owner string
|
||||
repo string
|
||||
token string
|
||||
}
|
||||
|
||||
func NewClient(token, owner, repo string) *Client {
|
||||
var githubClient *github.Client
|
||||
var httpClient *http.Client
|
||||
var gqlClient *graphql.Client
|
||||
|
||||
if token != "" {
|
||||
ts := oauth2.StaticTokenSource(
|
||||
&oauth2.Token{AccessToken: token},
|
||||
)
|
||||
httpClient = oauth2.NewClient(context.Background(), ts)
|
||||
githubClient = github.NewClient(httpClient)
|
||||
gqlClient = graphql.NewClient("https://api.github.com/graphql", httpClient)
|
||||
} else {
|
||||
httpClient = http.DefaultClient
|
||||
githubClient = github.NewClient(nil)
|
||||
gqlClient = graphql.NewClient("https://api.github.com/graphql", httpClient)
|
||||
}
|
||||
|
||||
return &Client{
|
||||
client: githubClient,
|
||||
graphqlClient: gqlClient,
|
||||
owner: owner,
|
||||
repo: repo,
|
||||
token: token,
|
||||
}
|
||||
}
|
||||
|
||||
func (c *Client) FetchPRs(prNumbers []int) ([]*PR, error) {
|
||||
if len(prNumbers) == 0 {
|
||||
return []*PR{}, nil
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
prs := make([]*PR, 0, len(prNumbers))
|
||||
prsChan := make(chan *PR, len(prNumbers))
|
||||
errChan := make(chan error, len(prNumbers))
|
||||
|
||||
var wg sync.WaitGroup
|
||||
semaphore := make(chan struct{}, 10)
|
||||
|
||||
for _, prNumber := range prNumbers {
|
||||
wg.Add(1)
|
||||
go func(num int) {
|
||||
defer wg.Done()
|
||||
|
||||
semaphore <- struct{}{}
|
||||
defer func() { <-semaphore }()
|
||||
|
||||
pr, err := c.fetchSinglePR(ctx, num)
|
||||
if err != nil {
|
||||
errChan <- fmt.Errorf("failed to fetch PR #%d: %w", num, err)
|
||||
return
|
||||
}
|
||||
prsChan <- pr
|
||||
}(prNumber)
|
||||
}
|
||||
|
||||
go func() {
|
||||
wg.Wait()
|
||||
close(prsChan)
|
||||
close(errChan)
|
||||
}()
|
||||
|
||||
var errors []error
|
||||
for pr := range prsChan {
|
||||
prs = append(prs, pr)
|
||||
}
|
||||
for err := range errChan {
|
||||
errors = append(errors, err)
|
||||
}
|
||||
|
||||
if len(errors) > 0 {
|
||||
return prs, fmt.Errorf("some PRs failed to fetch: %v", errors)
|
||||
}
|
||||
|
||||
return prs, nil
|
||||
}
|
||||
|
||||
func (c *Client) fetchSinglePR(ctx context.Context, prNumber int) (*PR, error) {
|
||||
pr, _, err := c.client.PullRequests.Get(ctx, c.owner, c.repo, prNumber)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
commits, _, err := c.client.PullRequests.ListCommits(ctx, c.owner, c.repo, prNumber, nil)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to fetch commits: %w", err)
|
||||
}
|
||||
|
||||
result := &PR{
|
||||
Number: prNumber,
|
||||
Title: getString(pr.Title),
|
||||
Body: getString(pr.Body),
|
||||
URL: getString(pr.HTMLURL),
|
||||
Commits: make([]PRCommit, 0, len(commits)),
|
||||
}
|
||||
|
||||
if pr.MergedAt != nil {
|
||||
result.MergedAt = pr.MergedAt.Time
|
||||
}
|
||||
|
||||
if pr.User != nil {
|
||||
result.Author = getString(pr.User.Login)
|
||||
result.AuthorURL = getString(pr.User.HTMLURL)
|
||||
userType := getString(pr.User.Type) // GitHub API returns "User", "Organization", or "Bot"
|
||||
|
||||
// Convert GitHub API type to lowercase
|
||||
switch userType {
|
||||
case "User":
|
||||
result.AuthorType = "user"
|
||||
case "Organization":
|
||||
result.AuthorType = "organization"
|
||||
case "Bot":
|
||||
result.AuthorType = "bot"
|
||||
default:
|
||||
result.AuthorType = "user" // Default fallback
|
||||
}
|
||||
}
|
||||
|
||||
if pr.MergeCommitSHA != nil {
|
||||
result.MergeCommit = *pr.MergeCommitSHA
|
||||
}
|
||||
|
||||
for _, commit := range commits {
|
||||
if commit.Commit != nil {
|
||||
prCommit := PRCommit{
|
||||
SHA: getString(commit.SHA),
|
||||
Message: strings.TrimSpace(getString(commit.Commit.Message)),
|
||||
}
|
||||
if commit.Commit.Author != nil {
|
||||
prCommit.Author = getString(commit.Commit.Author.Name)
|
||||
}
|
||||
result.Commits = append(result.Commits, prCommit)
|
||||
}
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func getString(s *string) string {
|
||||
if s == nil {
|
||||
return ""
|
||||
}
|
||||
return *s
|
||||
}
|
||||
|
||||
// FetchAllMergedPRs fetches all merged PRs using GitHub's search API
|
||||
// This is much more efficient than fetching PRs individually
|
||||
func (c *Client) FetchAllMergedPRs(since time.Time) ([]*PR, error) {
|
||||
ctx := context.Background()
|
||||
var allPRs []*PR
|
||||
|
||||
// Build search query for merged PRs
|
||||
query := fmt.Sprintf("repo:%s/%s is:pr is:merged", c.owner, c.repo)
|
||||
if !since.IsZero() {
|
||||
query += fmt.Sprintf(" merged:>=%s", since.Format("2006-01-02"))
|
||||
}
|
||||
|
||||
opts := &github.SearchOptions{
|
||||
Sort: "created",
|
||||
Order: "desc",
|
||||
ListOptions: github.ListOptions{
|
||||
PerPage: 100, // Maximum allowed
|
||||
},
|
||||
}
|
||||
|
||||
for {
|
||||
result, resp, err := c.client.Search.Issues(ctx, query, opts)
|
||||
if err != nil {
|
||||
return allPRs, fmt.Errorf("failed to search PRs: %w", err)
|
||||
}
|
||||
|
||||
// Process PRs in parallel
|
||||
prsChan := make(chan *PR, len(result.Issues))
|
||||
errChan := make(chan error, len(result.Issues))
|
||||
var wg sync.WaitGroup
|
||||
semaphore := make(chan struct{}, 10) // Limit concurrent requests
|
||||
|
||||
for _, issue := range result.Issues {
|
||||
if issue.PullRequestLinks == nil {
|
||||
continue // Not a PR
|
||||
}
|
||||
|
||||
wg.Add(1)
|
||||
go func(prNumber int) {
|
||||
defer wg.Done()
|
||||
|
||||
semaphore <- struct{}{}
|
||||
defer func() { <-semaphore }()
|
||||
|
||||
pr, err := c.fetchSinglePR(ctx, prNumber)
|
||||
if err != nil {
|
||||
errChan <- fmt.Errorf("failed to fetch PR #%d: %w", prNumber, err)
|
||||
return
|
||||
}
|
||||
prsChan <- pr
|
||||
}(*issue.Number)
|
||||
}
|
||||
|
||||
go func() {
|
||||
wg.Wait()
|
||||
close(prsChan)
|
||||
close(errChan)
|
||||
}()
|
||||
|
||||
// Collect results
|
||||
for pr := range prsChan {
|
||||
allPRs = append(allPRs, pr)
|
||||
}
|
||||
|
||||
// Check for errors
|
||||
for err := range errChan {
|
||||
// Log error but continue processing
|
||||
fmt.Fprintf(os.Stderr, "Warning: %v\n", err)
|
||||
}
|
||||
|
||||
if resp.NextPage == 0 {
|
||||
break
|
||||
}
|
||||
opts.Page = resp.NextPage
|
||||
}
|
||||
|
||||
return allPRs, nil
|
||||
}
|
||||
|
||||
// FetchAllMergedPRsGraphQL fetches all merged PRs with their commits using GraphQL
|
||||
// This is the ultimate optimization - gets everything in ~5-10 API calls
|
||||
func (c *Client) FetchAllMergedPRsGraphQL(since time.Time) ([]*PR, error) {
|
||||
ctx := context.Background()
|
||||
var allPRs []*PR
|
||||
var after *string
|
||||
totalFetched := 0
|
||||
|
||||
for {
|
||||
// Prepare variables
|
||||
variables := map[string]interface{}{
|
||||
"owner": graphql.String(c.owner),
|
||||
"repo": graphql.String(c.repo),
|
||||
"after": (*graphql.String)(after),
|
||||
}
|
||||
|
||||
// Execute GraphQL query
|
||||
var query PullRequestsQuery
|
||||
err := c.graphqlClient.Query(ctx, &query, variables)
|
||||
if err != nil {
|
||||
return allPRs, fmt.Errorf("GraphQL query failed: %w", err)
|
||||
}
|
||||
|
||||
prs := query.Repository.PullRequests.Nodes
|
||||
fmt.Fprintf(os.Stderr, "Fetched %d PRs via GraphQL (page %d)\n", len(prs), (totalFetched/100)+1)
|
||||
|
||||
// Convert GraphQL PRs to our PR struct
|
||||
for _, gqlPR := range prs {
|
||||
// If we have a since filter, stop when we reach older PRs
|
||||
if !since.IsZero() && gqlPR.MergedAt.Before(since) {
|
||||
fmt.Fprintf(os.Stderr, "Reached PRs older than %s, stopping\n", since.Format("2006-01-02"))
|
||||
return allPRs, nil
|
||||
}
|
||||
|
||||
pr := &PR{
|
||||
Number: gqlPR.Number,
|
||||
Title: gqlPR.Title,
|
||||
Body: gqlPR.Body,
|
||||
URL: gqlPR.URL,
|
||||
MergedAt: gqlPR.MergedAt,
|
||||
Commits: make([]PRCommit, 0, len(gqlPR.Commits.Nodes)),
|
||||
}
|
||||
|
||||
// Handle author - check if it's nil first
|
||||
if gqlPR.Author != nil {
|
||||
pr.Author = gqlPR.Author.Login
|
||||
pr.AuthorURL = gqlPR.Author.URL
|
||||
|
||||
switch gqlPR.Author.Typename {
|
||||
case "Bot":
|
||||
pr.AuthorType = "bot"
|
||||
case "Organization":
|
||||
pr.AuthorType = "organization"
|
||||
case "User":
|
||||
pr.AuthorType = "user"
|
||||
default:
|
||||
pr.AuthorType = "user" // fallback
|
||||
if gqlPR.Author.Typename != "" {
|
||||
fmt.Fprintf(os.Stderr, "PR #%d: Unknown author typename '%s'\n", gqlPR.Number, gqlPR.Author.Typename)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Author is nil - try to fetch from REST API as fallback
|
||||
fmt.Fprintf(os.Stderr, "PR #%d: Author is nil in GraphQL response, fetching from REST API\n", gqlPR.Number)
|
||||
|
||||
// Fetch this specific PR from REST API
|
||||
restPR, err := c.fetchSinglePR(ctx, gqlPR.Number)
|
||||
if err == nil && restPR != nil && restPR.Author != "" {
|
||||
pr.Author = restPR.Author
|
||||
pr.AuthorURL = restPR.AuthorURL
|
||||
pr.AuthorType = restPR.AuthorType
|
||||
} else {
|
||||
// Fallback if REST API also fails
|
||||
pr.Author = "[unknown]"
|
||||
pr.AuthorURL = ""
|
||||
pr.AuthorType = "user"
|
||||
}
|
||||
}
|
||||
|
||||
// Convert commits
|
||||
for _, commitNode := range gqlPR.Commits.Nodes {
|
||||
commit := PRCommit{
|
||||
SHA: commitNode.Commit.OID,
|
||||
Message: strings.TrimSpace(commitNode.Commit.Message),
|
||||
Author: commitNode.Commit.Author.Name,
|
||||
}
|
||||
pr.Commits = append(pr.Commits, commit)
|
||||
}
|
||||
|
||||
allPRs = append(allPRs, pr)
|
||||
}
|
||||
|
||||
totalFetched += len(prs)
|
||||
|
||||
// Check if we need to fetch more pages
|
||||
if !query.Repository.PullRequests.PageInfo.HasNextPage {
|
||||
break
|
||||
}
|
||||
|
||||
after = &query.Repository.PullRequests.PageInfo.EndCursor
|
||||
}
|
||||
|
||||
fmt.Fprintf(os.Stderr, "Total PRs fetched via GraphQL: %d\n", len(allPRs))
|
||||
return allPRs, nil
|
||||
}
|
||||
57
cmd/generate_changelog/internal/github/types.go
Normal file
57
cmd/generate_changelog/internal/github/types.go
Normal file
@@ -0,0 +1,57 @@
|
||||
package github
|
||||
|
||||
import "time"
|
||||
|
||||
type PR struct {
|
||||
Number int
|
||||
Title string
|
||||
Body string
|
||||
Author string
|
||||
AuthorURL string
|
||||
AuthorType string // "user", "organization", or "bot"
|
||||
URL string
|
||||
MergedAt time.Time
|
||||
Commits []PRCommit
|
||||
MergeCommit string
|
||||
}
|
||||
|
||||
type PRCommit struct {
|
||||
SHA string
|
||||
Message string
|
||||
Author string
|
||||
}
|
||||
|
||||
// GraphQL query structures for hasura client
|
||||
type PullRequestsQuery struct {
|
||||
Repository struct {
|
||||
PullRequests struct {
|
||||
PageInfo struct {
|
||||
HasNextPage bool
|
||||
EndCursor string
|
||||
}
|
||||
Nodes []struct {
|
||||
Number int
|
||||
Title string
|
||||
Body string
|
||||
URL string
|
||||
MergedAt time.Time
|
||||
Author *struct {
|
||||
Typename string `graphql:"__typename"`
|
||||
Login string `graphql:"login"`
|
||||
URL string `graphql:"url"`
|
||||
}
|
||||
Commits struct {
|
||||
Nodes []struct {
|
||||
Commit struct {
|
||||
OID string `graphql:"oid"`
|
||||
Message string
|
||||
Author struct {
|
||||
Name string
|
||||
}
|
||||
}
|
||||
}
|
||||
} `graphql:"commits(first: 250)"`
|
||||
}
|
||||
} `graphql:"pullRequests(first: 100, after: $after, states: MERGED, orderBy: {field: UPDATED_AT, direction: DESC})"`
|
||||
} `graphql:"repository(owner: $owner, name: $repo)"`
|
||||
}
|
||||
84
cmd/generate_changelog/main.go
Normal file
84
cmd/generate_changelog/main.go
Normal file
@@ -0,0 +1,84 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/danielmiessler/fabric/cmd/generate_changelog/internal/changelog"
|
||||
"github.com/danielmiessler/fabric/cmd/generate_changelog/internal/config"
|
||||
"github.com/joho/godotenv"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var (
|
||||
cfg = &config.Config{}
|
||||
)
|
||||
|
||||
var rootCmd = &cobra.Command{
|
||||
Use: "generate_changelog",
|
||||
Short: "Generate changelog from git history and GitHub PRs",
|
||||
Long: `A high-performance changelog generator that walks git history,
|
||||
collects version information and pull requests, and generates a
|
||||
comprehensive changelog in markdown format.`,
|
||||
RunE: run,
|
||||
}
|
||||
|
||||
func init() {
|
||||
rootCmd.Flags().StringVarP(&cfg.RepoPath, "repo", "r", ".", "Repository path")
|
||||
rootCmd.Flags().StringVarP(&cfg.OutputFile, "output", "o", "", "Output file (default: stdout)")
|
||||
rootCmd.Flags().IntVarP(&cfg.Limit, "limit", "l", 0, "Limit number of versions (0 = all)")
|
||||
rootCmd.Flags().StringVarP(&cfg.Version, "version", "v", "", "Generate changelog for specific version")
|
||||
rootCmd.Flags().BoolVar(&cfg.SaveData, "save-data", false, "Save version data to JSON for debugging")
|
||||
rootCmd.Flags().StringVar(&cfg.CacheFile, "cache", "./cmd/generate_changelog/changelog.db", "Cache database file")
|
||||
rootCmd.Flags().BoolVar(&cfg.NoCache, "no-cache", false, "Disable cache usage")
|
||||
rootCmd.Flags().BoolVar(&cfg.RebuildCache, "rebuild-cache", false, "Rebuild cache from scratch")
|
||||
rootCmd.Flags().StringVar(&cfg.GitHubToken, "token", "", "GitHub API token (or set GITHUB_TOKEN env var)")
|
||||
rootCmd.Flags().BoolVar(&cfg.ForcePRSync, "force-pr-sync", false, "Force a full PR sync from GitHub (ignores cache age)")
|
||||
rootCmd.Flags().BoolVar(&cfg.EnableAISummary, "ai-summarize", false, "Generate AI-enhanced summaries using Fabric")
|
||||
}
|
||||
|
||||
func run(cmd *cobra.Command, args []string) error {
|
||||
if cfg.GitHubToken == "" {
|
||||
cfg.GitHubToken = os.Getenv("GITHUB_TOKEN")
|
||||
}
|
||||
|
||||
generator, err := changelog.New(cfg)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to create changelog generator: %w", err)
|
||||
}
|
||||
|
||||
output, err := generator.Generate()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to generate changelog: %w", err)
|
||||
}
|
||||
|
||||
if cfg.OutputFile != "" {
|
||||
if err := os.WriteFile(cfg.OutputFile, []byte(output), 0644); err != nil {
|
||||
return fmt.Errorf("failed to write output file: %w", err)
|
||||
}
|
||||
fmt.Printf("Changelog written to %s\n", cfg.OutputFile)
|
||||
} else {
|
||||
fmt.Print(output)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func main() {
|
||||
// Load .env file from the same directory as the binary
|
||||
if exePath, err := os.Executable(); err == nil {
|
||||
envPath := filepath.Join(filepath.Dir(exePath), ".env")
|
||||
if _, err := os.Stat(envPath); err == nil {
|
||||
// .env file exists, load it
|
||||
if err := godotenv.Load(envPath); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Failed to load .env file: %v\n", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if err := rootCmd.Execute(); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Error: %v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
17
go.mod
17
go.mod
@@ -16,17 +16,21 @@ require (
|
||||
github.com/go-git/go-git/v5 v5.16.2
|
||||
github.com/go-shiori/go-readability v0.0.0-20250217085726-9f5bf5ca7612
|
||||
github.com/google/generative-ai-go v0.20.1
|
||||
github.com/google/go-github/v66 v66.0.0
|
||||
github.com/hasura/go-graphql-client v0.14.4
|
||||
github.com/jessevdk/go-flags v1.6.1
|
||||
github.com/joho/godotenv v1.5.1
|
||||
github.com/mattn/go-sqlite3 v1.14.28
|
||||
github.com/ollama/ollama v0.9.0
|
||||
github.com/openai/openai-go v1.8.2
|
||||
github.com/otiai10/copy v1.14.1
|
||||
github.com/pkg/errors v0.9.1
|
||||
github.com/samber/lo v1.50.0
|
||||
github.com/sgaunet/perplexity-go/v2 v2.8.0
|
||||
github.com/spf13/cobra v1.9.1
|
||||
github.com/stretchr/testify v1.10.0
|
||||
golang.org/x/oauth2 v0.30.0
|
||||
golang.org/x/text v0.26.0
|
||||
golang.org/x/text v0.27.0
|
||||
google.golang.org/api v0.236.0
|
||||
gopkg.in/yaml.v3 v3.0.1
|
||||
)
|
||||
@@ -59,6 +63,7 @@ require (
|
||||
github.com/bytedance/sonic/loader v0.2.4 // indirect
|
||||
github.com/cloudflare/circl v1.6.1 // indirect
|
||||
github.com/cloudwego/base64x v0.1.5 // indirect
|
||||
github.com/coder/websocket v1.8.13 // indirect
|
||||
github.com/cyphar/filepath-securejoin v0.4.1 // indirect
|
||||
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||
github.com/emirpasic/gods v1.18.1 // indirect
|
||||
@@ -75,10 +80,12 @@ require (
|
||||
github.com/goccy/go-json v0.10.5 // indirect
|
||||
github.com/gogs/chardet v0.0.0-20211120154057-b7413eaefb8f // indirect
|
||||
github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 // indirect
|
||||
github.com/google/go-querystring v1.1.0 // indirect
|
||||
github.com/google/s2a-go v0.1.9 // indirect
|
||||
github.com/google/uuid v1.6.0 // indirect
|
||||
github.com/googleapis/enterprise-certificate-proxy v0.3.6 // indirect
|
||||
github.com/googleapis/gax-go/v2 v2.14.2 // indirect
|
||||
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
||||
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 // indirect
|
||||
github.com/json-iterator/go v1.1.12 // indirect
|
||||
github.com/kevinburke/ssh_config v1.2.0 // indirect
|
||||
@@ -89,10 +96,11 @@ require (
|
||||
github.com/modern-go/reflect2 v1.0.2 // indirect
|
||||
github.com/otiai10/mint v1.6.3 // indirect
|
||||
github.com/pelletier/go-toml/v2 v2.2.4 // indirect
|
||||
github.com/pjbgf/sha1cd v0.3.2 // indirect
|
||||
github.com/pjbgf/sha1cd v0.4.0 // indirect
|
||||
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||
github.com/sergi/go-diff v1.4.0 // indirect
|
||||
github.com/skeema/knownhosts v1.3.1 // indirect
|
||||
github.com/spf13/pflag v1.0.6 // indirect
|
||||
github.com/tidwall/gjson v1.18.0 // indirect
|
||||
github.com/tidwall/match v1.1.1 // indirect
|
||||
github.com/tidwall/pretty v1.2.1 // indirect
|
||||
@@ -108,9 +116,10 @@ require (
|
||||
go.opentelemetry.io/otel/trace v1.36.0 // indirect
|
||||
golang.org/x/arch v0.18.0 // indirect
|
||||
golang.org/x/crypto v0.39.0 // indirect
|
||||
golang.org/x/exp v0.0.0-20250531010427-b6e5de432a8b // indirect
|
||||
golang.org/x/net v0.41.0 // indirect
|
||||
golang.org/x/sync v0.15.0 // indirect
|
||||
golang.org/x/sys v0.33.0 // indirect
|
||||
golang.org/x/sync v0.16.0 // indirect
|
||||
golang.org/x/sys v0.34.0 // indirect
|
||||
golang.org/x/time v0.12.0 // indirect
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20250603155806-513f23925822 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250603155806-513f23925822 // indirect
|
||||
|
||||
40
go.sum
40
go.sum
@@ -71,6 +71,9 @@ github.com/cloudflare/circl v1.6.1/go.mod h1:uddAzsPgqdMAYatqJ0lsjX1oECcQLIlRpzZ
|
||||
github.com/cloudwego/base64x v0.1.5 h1:XPciSp1xaq2VCSt6lF0phncD4koWyULpl5bUxbfCyP4=
|
||||
github.com/cloudwego/base64x v0.1.5/go.mod h1:0zlkT4Wn5C6NdauXdJRhSKRlJvmclQ1hhJgA0rcu/8w=
|
||||
github.com/cloudwego/iasm v0.2.0/go.mod h1:8rXZaNYT2n95jn+zTI1sDr+IgcD2GVs0nlbbQPiEFhY=
|
||||
github.com/coder/websocket v1.8.13 h1:f3QZdXy7uGVz+4uCJy2nTZyM0yTBj8yANEHhqlXZ9FE=
|
||||
github.com/coder/websocket v1.8.13/go.mod h1:LNVeNrXQZfe5qhS9ALED3uA+l5pPqvwXg3CKoDBB2gs=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g=
|
||||
github.com/cyphar/filepath-securejoin v0.4.1 h1:JyxxyPEaktOD+GAnqIqTf9A8tHyAG22rowi7HkoSU1s=
|
||||
github.com/cyphar/filepath-securejoin v0.4.1/go.mod h1:Sdj7gXlvMcPZsbhwhQ33GguGLDGQL7h7bg04C/+u9jI=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
@@ -125,9 +128,14 @@ github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek
|
||||
github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps=
|
||||
github.com/google/generative-ai-go v0.20.1 h1:6dEIujpgN2V0PgLhr6c/M1ynRdc7ARtiIDPFzj45uNQ=
|
||||
github.com/google/generative-ai-go v0.20.1/go.mod h1:TjOnZJmZKzarWbjUJgy+r3Ee7HGBRVLhOIgupnwR4Bg=
|
||||
github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
|
||||
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
|
||||
github.com/google/go-github/v66 v66.0.0 h1:ADJsaXj9UotwdgK8/iFZtv7MLc8E8WBl62WLd/D/9+M=
|
||||
github.com/google/go-github/v66 v66.0.0/go.mod h1:+4SO9Zkuyf8ytMj0csN1NR/5OTR+MfqPp8P8dVlcvY4=
|
||||
github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8=
|
||||
github.com/google/go-querystring v1.1.0/go.mod h1:Kcdr2DB4koayq7X8pmAG4sNG59So17icRSOU623lUBU=
|
||||
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||
github.com/google/s2a-go v0.1.9 h1:LGD7gtMgezd8a/Xak7mEWL0PjoTQFvpRudN895yqKW0=
|
||||
github.com/google/s2a-go v0.1.9/go.mod h1:YA0Ei2ZQL3acow2O62kdp9UlnvMmU7kA6Eutn0dXayM=
|
||||
@@ -137,6 +145,10 @@ github.com/googleapis/enterprise-certificate-proxy v0.3.6 h1:GW/XbdyBFQ8Qe+YAmFU
|
||||
github.com/googleapis/enterprise-certificate-proxy v0.3.6/go.mod h1:MkHOF77EYAE7qfSuSS9PU6g4Nt4e11cnsDUowfwewLA=
|
||||
github.com/googleapis/gax-go/v2 v2.14.2 h1:eBLnkZ9635krYIPD+ag1USrOAI0Nr0QYF3+/3GqO0k0=
|
||||
github.com/googleapis/gax-go/v2 v2.14.2/go.mod h1:ON64QhlJkhVtSqp4v1uaK92VyZ2gmvDQsweuyLV+8+w=
|
||||
github.com/hasura/go-graphql-client v0.14.4 h1:bYU7/+V50T2YBGdNQXt6l4f2cMZPECPUd8cyCR+ixtw=
|
||||
github.com/hasura/go-graphql-client v0.14.4/go.mod h1:jfSZtBER3or+88Q9vFhWHiFMPppfYILRyl+0zsgPIIw=
|
||||
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
|
||||
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
|
||||
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A=
|
||||
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i6rXxKeerYnT8Nvf0QmHCRC1n8sfWVwXF2Frvo=
|
||||
github.com/jessevdk/go-flags v1.6.1 h1:Cvu5U8UGrLay1rZfv/zP7iLpSHGUZ/Ou68T0iX1bBK4=
|
||||
@@ -163,6 +175,8 @@ github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjS
|
||||
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
||||
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||
github.com/mattn/go-runewidth v0.0.10/go.mod h1:RAqKPSqVFrSLVXbA8x7dzmKdmGzieGRCM46jaSJTDAk=
|
||||
github.com/mattn/go-sqlite3 v1.14.28 h1:ThEiQrnbtumT+QMknw63Befp/ce/nUPgBPMlRFEum7A=
|
||||
github.com/mattn/go-sqlite3 v1.14.28/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
|
||||
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||
@@ -180,8 +194,8 @@ github.com/otiai10/mint v1.6.3 h1:87qsV/aw1F5as1eH1zS/yqHY85ANKVMgkDrf9rcxbQs=
|
||||
github.com/otiai10/mint v1.6.3/go.mod h1:MJm72SBthJjz8qhefc4z1PYEieWmy8Bku7CjcAqyUSM=
|
||||
github.com/pelletier/go-toml/v2 v2.2.4 h1:mye9XuhQ6gvn5h28+VilKrrPoQVanw5PMw/TB0t5Ec4=
|
||||
github.com/pelletier/go-toml/v2 v2.2.4/go.mod h1:2gIqNv+qfxSVS7cM2xJQKtLSTLUE9V8t9Stt+h56mCY=
|
||||
github.com/pjbgf/sha1cd v0.3.2 h1:a9wb0bp1oC2TGwStyn0Umc/IGKQnEgF0vVaZ8QF8eo4=
|
||||
github.com/pjbgf/sha1cd v0.3.2/go.mod h1:zQWigSxVmsHEZow5qaLtPYxpcKMMQpa09ixqBxuCS6A=
|
||||
github.com/pjbgf/sha1cd v0.4.0 h1:NXzbL1RvjTUi6kgYZCX3fPwwl27Q1LJndxtUDVfJGRY=
|
||||
github.com/pjbgf/sha1cd v0.4.0/go.mod h1:zQWigSxVmsHEZow5qaLtPYxpcKMMQpa09ixqBxuCS6A=
|
||||
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
||||
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
@@ -189,6 +203,7 @@ github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZN
|
||||
github.com/rivo/uniseg v0.1.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
|
||||
github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ=
|
||||
github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc=
|
||||
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
github.com/samber/lo v1.50.0 h1:XrG0xOeHs+4FQ8gJR97zDz5uOFMW7OwFWiFVzqopKgY=
|
||||
github.com/samber/lo v1.50.0/go.mod h1:RjZyNk6WSnUFRKK6EyOhsRJMqft3G+pg7dCWHQCWvsc=
|
||||
github.com/scylladb/termtables v0.0.0-20191203121021-c4c0b6d42ff4/go.mod h1:C1a7PQSMz9NShzorzCiG2fk9+xuCgLkPeCvMHYR2OWg=
|
||||
@@ -199,6 +214,10 @@ github.com/sgaunet/perplexity-go/v2 v2.8.0/go.mod h1:MSks4RNuivCi0GqJyylhFdgSJFV
|
||||
github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0=
|
||||
github.com/skeema/knownhosts v1.3.1 h1:X2osQ+RAjK76shCbvhHHHVl3ZlgDm8apHEHFqRjnBY8=
|
||||
github.com/skeema/knownhosts v1.3.1/go.mod h1:r7KTdC8l4uxWRyK2TpQZ/1o5HaSzh06ePQNxPwTcfiY=
|
||||
github.com/spf13/cobra v1.9.1 h1:CXSaggrXdbHK9CF+8ywj8Amf7PBRmPCOJugH954Nnlo=
|
||||
github.com/spf13/cobra v1.9.1/go.mod h1:nDyEzZ8ogv936Cinf6g1RU9MRY64Ir93oCnqb9wxYW0=
|
||||
github.com/spf13/pflag v1.0.6 h1:jFzHGLGAlb3ruxLB8MhbI6A8+AQX/2eW4qeyNZXNp2o=
|
||||
github.com/spf13/pflag v1.0.6/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
|
||||
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
|
||||
@@ -255,8 +274,8 @@ golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v
|
||||
golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk=
|
||||
golang.org/x/crypto v0.39.0 h1:SHs+kF4LP+f+p14esP5jAoDpHU8Gu/v9lFRK6IT5imM=
|
||||
golang.org/x/crypto v0.39.0/go.mod h1:L+Xg3Wf6HoL4Bn4238Z6ft6KfEpN0tJGo53AAPC632U=
|
||||
golang.org/x/exp v0.0.0-20250218142911-aa4b98e5adaa h1:t2QcU6V556bFjYgu4L6C+6VrCPyJZ+eyRsABUPs1mz4=
|
||||
golang.org/x/exp v0.0.0-20250218142911-aa4b98e5adaa/go.mod h1:BHOTPb3L19zxehTsLoJXVaTktb06DFgmdW6Wb9s8jqk=
|
||||
golang.org/x/exp v0.0.0-20250531010427-b6e5de432a8b h1:QoALfVG9rhQ/M7vYDScfPdWjGL9dlsVVM5VGh7aKoAA=
|
||||
golang.org/x/exp v0.0.0-20250531010427-b6e5de432a8b/go.mod h1:U6Lno4MTRCDY+Ba7aCcauB9T60gsv5s4ralQzP72ZoQ=
|
||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||
golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||
@@ -283,8 +302,8 @@ golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
|
||||
golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sync v0.15.0 h1:KWH3jNZsfyT6xfAfKiz6MRNmd46ByHDYaZ7KSkCtdW8=
|
||||
golang.org/x/sync v0.15.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
|
||||
golang.org/x/sync v0.16.0 h1:ycBJEhp9p4vXvUZNszeOq0kGTPghopOL8q0fq3vstxw=
|
||||
golang.org/x/sync v0.16.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
@@ -301,8 +320,8 @@ golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw=
|
||||
golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
|
||||
golang.org/x/sys v0.34.0 h1:H5Y5sJ2L2JRdyv7ROF1he/lPdvFsd0mJHFw2ThKHxLA=
|
||||
golang.org/x/sys v0.34.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
|
||||
golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
@@ -324,8 +343,8 @@ golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
|
||||
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||
golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
|
||||
golang.org/x/text v0.26.0 h1:P42AVeLghgTYr4+xUnTRKDMqpar+PtX7KWuNQL21L8M=
|
||||
golang.org/x/text v0.26.0/go.mod h1:QK15LZJUUQVJxhz7wXgxSy/CJaTFjd0G+YLonydOVQA=
|
||||
golang.org/x/text v0.27.0 h1:4fGWRpyh641NLlecmyl4LOe6yDdfaYNrGb2zdfo4JV4=
|
||||
golang.org/x/text v0.27.0/go.mod h1:1D28KMCvyooCX9hBiosv5Tz/+YLxj0j7XhWjpSUF7CU=
|
||||
golang.org/x/time v0.12.0 h1:ScB/8o8olJvc+CQPWrK3fPZNfh7qgwCrY0zJmoEQLSE=
|
||||
golang.org/x/time v0.12.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
@@ -335,6 +354,7 @@ golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
|
||||
golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58=
|
||||
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
google.golang.org/api v0.236.0 h1:CAiEiDVtO4D/Qja2IA9VzlFrgPnK3XVMmRoJZlSWbc0=
|
||||
google.golang.org/api v0.236.0/go.mod h1:X1WF9CU2oTc+Jml1tiIxGmWFK/UZezdqEu09gcxZAj4=
|
||||
google.golang.org/genproto v0.0.0-20250505200425-f936aa4a68b2 h1:1tXaIXCracvtsRxSBsYDiSBN0cuJvM7QYW+MrpIRY78=
|
||||
|
||||
@@ -9,6 +9,7 @@ import (
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"os"
|
||||
"os/exec"
|
||||
"strings"
|
||||
"time"
|
||||
@@ -71,7 +72,7 @@ func (t *OAuthTransport) getValidToken(tokenIdentifier string) (string, error) {
|
||||
}
|
||||
// If no token exists, run OAuth flow
|
||||
if token == nil {
|
||||
fmt.Println("No OAuth token found, initiating authentication...")
|
||||
fmt.Fprintln(os.Stderr, "No OAuth token found, initiating authentication...")
|
||||
newAccessToken, err := RunOAuthFlow(tokenIdentifier)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to authenticate: %w", err)
|
||||
@@ -81,11 +82,11 @@ func (t *OAuthTransport) getValidToken(tokenIdentifier string) (string, error) {
|
||||
|
||||
// Check if token needs refresh (5 minute buffer)
|
||||
if token.IsExpired(5) {
|
||||
fmt.Println("OAuth token expired, refreshing...")
|
||||
fmt.Fprintln(os.Stderr, "OAuth token expired, refreshing...")
|
||||
newAccessToken, err := RefreshToken(tokenIdentifier)
|
||||
if err != nil {
|
||||
// If refresh fails, try re-authentication
|
||||
fmt.Println("Token refresh failed, re-authenticating...")
|
||||
fmt.Fprintln(os.Stderr, "Token refresh failed, re-authenticating...")
|
||||
newAccessToken, err = RunOAuthFlow(tokenIdentifier)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to refresh or re-authenticate: %w", err)
|
||||
@@ -137,13 +138,13 @@ func RunOAuthFlow(tokenIdentifier string) (token string, err error) {
|
||||
if err == nil && existingToken != nil {
|
||||
// If token exists but is expired, try refreshing first
|
||||
if existingToken.IsExpired(5) {
|
||||
fmt.Println("Found expired OAuth token, attempting refresh...")
|
||||
fmt.Fprintln(os.Stderr, "Found expired OAuth token, attempting refresh...")
|
||||
refreshedToken, refreshErr := RefreshToken(tokenIdentifier)
|
||||
if refreshErr == nil {
|
||||
fmt.Println("Token refresh successful")
|
||||
fmt.Fprintln(os.Stderr, "Token refresh successful")
|
||||
return refreshedToken, nil
|
||||
}
|
||||
fmt.Printf("Token refresh failed (%v), proceeding with full OAuth flow...\n", refreshErr)
|
||||
fmt.Fprintf(os.Stderr, "Token refresh failed (%v), proceeding with full OAuth flow...\n", refreshErr)
|
||||
} else {
|
||||
// Token exists and is still valid
|
||||
return existingToken.AccessToken, nil
|
||||
@@ -170,10 +171,10 @@ func RunOAuthFlow(tokenIdentifier string) (token string, err error) {
|
||||
oauth2.SetAuthURLParam("state", verifier),
|
||||
)
|
||||
|
||||
fmt.Println("Open the following URL in your browser. Fabric would like to authorize:")
|
||||
fmt.Println(authURL)
|
||||
fmt.Fprintln(os.Stderr, "Open the following URL in your browser. Fabric would like to authorize:")
|
||||
fmt.Fprintln(os.Stderr, authURL)
|
||||
openBrowser(authURL)
|
||||
fmt.Print("Paste the authorization code here: ")
|
||||
fmt.Fprint(os.Stderr, "Paste the authorization code here: ")
|
||||
var code string
|
||||
fmt.Scanln(&code)
|
||||
parts := strings.SplitN(code, "#", 2)
|
||||
|
||||
Reference in New Issue
Block a user