mirror of
https://github.com/danielmiessler/Fabric.git
synced 2026-01-09 22:38:10 -05:00
Compare commits
25 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
85780fee76 | ||
|
|
497b1ed682 | ||
|
|
135433b749 | ||
|
|
f185dedb37 | ||
|
|
c74a157dcf | ||
|
|
91a336e870 | ||
|
|
5212fbcc37 | ||
|
|
6d8eb3d2b9 | ||
|
|
d3bba5d026 | ||
|
|
699762b694 | ||
|
|
f2a6f1bd98 | ||
|
|
3176adf59b | ||
|
|
7e29966622 | ||
|
|
0af0ab683d | ||
|
|
e72e67de71 | ||
|
|
414b6174e7 | ||
|
|
f63e0dfc05 | ||
|
|
4ef8578e47 | ||
|
|
12ee690ae4 | ||
|
|
cc378be485 | ||
|
|
06fc8d8732 | ||
|
|
9e4ed8ecb3 | ||
|
|
c369425708 | ||
|
|
cf074d3411 | ||
|
|
47f75237ff |
@@ -7,6 +7,8 @@ on:
|
||||
paths-ignore:
|
||||
- "data/patterns/**"
|
||||
- "**/*.md"
|
||||
- "data/strategies/**"
|
||||
- "cmd/generate_changelog/*.db"
|
||||
|
||||
permissions:
|
||||
contents: write # Ensure the workflow has write permissions
|
||||
|
||||
1
.vscode/settings.json
vendored
1
.vscode/settings.json
vendored
@@ -99,6 +99,7 @@
|
||||
"seaborn",
|
||||
"semgrep",
|
||||
"sess",
|
||||
"storer",
|
||||
"Streamlit",
|
||||
"stretchr",
|
||||
"talkpanel",
|
||||
|
||||
471
CHANGELOG.md
471
CHANGELOG.md
File diff suppressed because it is too large
Load Diff
@@ -1,3 +1,3 @@
|
||||
package main
|
||||
|
||||
var version = "v1.4.246"
|
||||
var version = "v1.4.251"
|
||||
|
||||
@@ -11,8 +11,12 @@ A high-performance changelog generator for Git repositories that automatically c
|
||||
- **Unreleased changes**: Tracks all commits since the last release
|
||||
- **Concurrent processing**: Parallel GitHub API calls for improved performance
|
||||
- **Flexible output**: Generate complete changelogs or target specific versions
|
||||
- **Optimized PR fetching**: Batch fetches all merged PRs using GitHub Search API (drastically reduces API calls)
|
||||
- **GraphQL optimization**: Ultra-fast PR fetching using GitHub GraphQL API (~5-10 calls vs 1000s)
|
||||
- **Intelligent sync**: Automatically syncs new PRs every 24 hours or when missing PRs are detected
|
||||
- **AI-powered summaries**: Optional Fabric integration for enhanced changelog summaries
|
||||
- **Advanced caching**: Content-based change detection for AI summaries with hash comparison
|
||||
- **Author type detection**: Distinguishes between users, bots, and organizations
|
||||
- **Lightning-fast incremental updates**: SHA→PR mapping for instant git operations
|
||||
|
||||
## Installation
|
||||
|
||||
@@ -23,26 +27,31 @@ go install github.com/danielmiessler/fabric/cmd/generate_changelog@latest
|
||||
## Usage
|
||||
|
||||
### Basic usage (generate complete changelog)
|
||||
|
||||
```bash
|
||||
generate_changelog
|
||||
```
|
||||
|
||||
### Save to file
|
||||
|
||||
```bash
|
||||
generate_changelog -o CHANGELOG.md
|
||||
```
|
||||
|
||||
### Generate for specific version
|
||||
|
||||
```bash
|
||||
generate_changelog -v v1.4.244
|
||||
```
|
||||
|
||||
### Limit to recent versions
|
||||
|
||||
```bash
|
||||
generate_changelog -l 10
|
||||
```
|
||||
|
||||
### Using GitHub token for private repos or higher rate limits
|
||||
|
||||
```bash
|
||||
export GITHUB_TOKEN=your_token_here
|
||||
generate_changelog
|
||||
@@ -51,7 +60,18 @@ generate_changelog
|
||||
generate_changelog --token your_token_here
|
||||
```
|
||||
|
||||
### AI-enhanced summaries
|
||||
|
||||
```bash
|
||||
# Enable AI summaries using Fabric
|
||||
generate_changelog --ai-summarize
|
||||
|
||||
# Use custom model for AI summaries
|
||||
FABRIC_CHANGELOG_SUMMARIZE_MODEL=claude-opus-4 generate_changelog --ai-summarize
|
||||
```
|
||||
|
||||
### Cache management
|
||||
|
||||
```bash
|
||||
# Rebuild cache from scratch
|
||||
generate_changelog --rebuild-cache
|
||||
@@ -80,6 +100,7 @@ generate_changelog --cache /path/to/cache.db
|
||||
| `--rebuild-cache` | | Rebuild cache from scratch | false |
|
||||
| `--force-pr-sync` | | Force a full PR sync from GitHub | false |
|
||||
| `--token` | | GitHub API token | `$GITHUB_TOKEN` |
|
||||
| `--ai-summarize` | | Generate AI-enhanced summaries using Fabric | false |
|
||||
|
||||
## Output Format
|
||||
|
||||
@@ -120,54 +141,118 @@ The generated changelog follows this structure:
|
||||
- **Concurrent API calls**: Processes up to 10 GitHub API requests in parallel
|
||||
- **Smart caching**: SQLite cache eliminates redundant API calls
|
||||
- **Incremental updates**: Only processes new commits on subsequent runs
|
||||
- **Batch PR fetching**: Uses GitHub Search API to fetch all merged PRs in minimal API calls
|
||||
- **GraphQL optimization**: Uses GitHub GraphQL API to fetch all PR data in ~5-10 calls
|
||||
- **AI-powered summaries**: Optional Fabric integration with intelligent caching
|
||||
- **Content-based change detection**: AI summaries only regenerated when content changes
|
||||
- **Lightning-fast git operations**: SHA→PR mapping stored in database for instant lookups
|
||||
|
||||
### Major Optimization: Batch PR Fetching
|
||||
### Major Optimization: GraphQL + Advanced Caching
|
||||
|
||||
The tool has been optimized to drastically reduce GitHub API calls:
|
||||
The tool has been optimized to drastically reduce GitHub API calls and improve performance:
|
||||
|
||||
**Previous approach**: Individual API calls for each PR (2 API calls per PR)
|
||||
|
||||
**Before**: Individual API calls for each PR (2 API calls per PR - one for PR details, one for commits)
|
||||
- For a repo with 500 PRs: 1,000 API calls
|
||||
|
||||
**After**: Batch fetching using GitHub Search API
|
||||
- For a repo with 500 PRs: ~10 API calls (search) + 500 API calls (details) = ~510 API calls
|
||||
- **50% reduction in API calls!**
|
||||
**Current approach**: GraphQL batch fetching with intelligent caching
|
||||
|
||||
- For a repo with 500 PRs: ~5-10 GraphQL calls (initial fetch) + 0 calls (subsequent runs with cache)
|
||||
- **99%+ reduction in API calls after initial run!**
|
||||
|
||||
The optimization includes:
|
||||
1. **Batch Search**: Uses GitHub's Search API to find all merged PRs in paginated batches
|
||||
2. **Smart Caching**: Stores complete PR data and tracks last sync timestamp
|
||||
3. **Incremental Sync**: Only fetches PRs merged after the last sync
|
||||
|
||||
1. **GraphQL Batch Fetch**: Uses GitHub's GraphQL API to fetch all merged PRs with commits in minimal calls
|
||||
2. **Smart Caching**: Stores complete PR data, commits, and SHA mappings in SQLite
|
||||
3. **Incremental Sync**: Only fetches PRs merged after the last sync timestamp
|
||||
4. **Automatic Refresh**: PRs are synced every 24 hours or when missing PRs are detected
|
||||
5. **Fallback Support**: If batch fetch fails, falls back to individual PR fetching
|
||||
5. **AI Summary Caching**: Content-based change detection prevents unnecessary AI regeneration
|
||||
6. **Fallback Support**: If GraphQL fails, falls back to REST API batch fetching
|
||||
7. **Lightning Git Operations**: Pre-computed SHA→PR mappings for instant commit association
|
||||
|
||||
## Requirements
|
||||
|
||||
- Go 1.24+ (for installation from source)
|
||||
- Git repository
|
||||
- GitHub token (optional, for private repos or higher rate limits)
|
||||
- Fabric CLI (optional, for AI-enhanced summaries)
|
||||
|
||||
## Authentication
|
||||
|
||||
The tool supports GitHub authentication via:
|
||||
|
||||
1. Environment variable: `export GITHUB_TOKEN=your_token`
|
||||
2. Command line flag: `--token your_token`
|
||||
3. `.env` file in the same directory as the binary
|
||||
|
||||
### Environment File Support
|
||||
|
||||
Create a `.env` file next to the `generate_changelog` binary:
|
||||
|
||||
```bash
|
||||
GITHUB_TOKEN=your_github_token_here
|
||||
FABRIC_CHANGELOG_SUMMARIZE_MODEL=claude-sonnet-4-20250514
|
||||
```
|
||||
|
||||
The tool automatically loads `.env` files for convenient configuration management.
|
||||
|
||||
Without authentication, the tool is limited to 60 GitHub API requests per hour.
|
||||
|
||||
## Caching
|
||||
|
||||
The SQLite cache stores:
|
||||
|
||||
- Version information and commit associations
|
||||
- Pull request details (title, body, commits, authors)
|
||||
- Last processed commit SHA for incremental updates
|
||||
- Last PR sync timestamp for intelligent refresh
|
||||
- AI summaries with content-based change detection
|
||||
- SHA→PR mappings for lightning-fast git operations
|
||||
|
||||
Cache benefits:
|
||||
|
||||
- Instant changelog regeneration
|
||||
- Drastically reduced GitHub API usage (50%+ reduction)
|
||||
- Drastically reduced GitHub API usage (99%+ reduction after initial run)
|
||||
- Offline changelog generation (after initial cache build)
|
||||
- Automatic PR data refresh every 24 hours
|
||||
- Batch database transactions for better performance
|
||||
- Content-aware AI summary regeneration
|
||||
|
||||
## AI-Enhanced Summaries
|
||||
|
||||
The tool can generate AI-powered summaries using Fabric for more polished, professional changelogs:
|
||||
|
||||
```bash
|
||||
# Enable AI summarization
|
||||
generate_changelog --ai-summarize
|
||||
|
||||
# Custom model (default: claude-sonnet-4-20250514)
|
||||
FABRIC_CHANGELOG_SUMMARIZE_MODEL=claude-opus-4 generate_changelog --ai-summarize
|
||||
```
|
||||
|
||||
### AI Summary Features
|
||||
|
||||
- **Content-based change detection**: AI summaries are only regenerated when version content changes
|
||||
- **Intelligent caching**: Preserves existing summaries and only processes changed versions
|
||||
- **Content hash comparison**: Uses SHA256 hashing to detect when "Unreleased" content changes
|
||||
- **Automatic fallback**: Falls back to raw content if AI processing fails
|
||||
- **Error detection**: Identifies and handles AI processing errors gracefully
|
||||
- **Minimum content filtering**: Skips AI processing for very brief content (< 256 characters)
|
||||
|
||||
### AI Model Configuration
|
||||
|
||||
Set the model via environment variable:
|
||||
|
||||
```bash
|
||||
export FABRIC_CHANGELOG_SUMMARIZE_MODEL=claude-opus-4
|
||||
# or
|
||||
export FABRIC_CHANGELOG_SUMMARIZE_MODEL=gpt-4
|
||||
```
|
||||
|
||||
AI summaries are cached and only regenerated when:
|
||||
|
||||
- Version content changes (detected via hash comparison)
|
||||
- No existing AI summary exists for the version
|
||||
- Force rebuild is requested
|
||||
|
||||
## Contributing
|
||||
|
||||
@@ -175,4 +260,4 @@ This tool is part of the Fabric project. Contributions are welcome!
|
||||
|
||||
## License
|
||||
|
||||
Same as the Fabric project.
|
||||
The MIT License. Same as the Fabric project.
|
||||
|
||||
Binary file not shown.
@@ -82,8 +82,8 @@ func (g *Generator) collectData() error {
|
||||
if cachedTag != "" {
|
||||
// Get the current latest tag from git
|
||||
currentTag, err := g.gitWalker.GetLatestTag()
|
||||
if err == nil && currentTag == cachedTag {
|
||||
// Same tag - load cached data and walk commits since tag for "Unreleased"
|
||||
if err == nil {
|
||||
// Load cached data - we can use it even if there are new tags
|
||||
cachedVersions, err := g.cache.GetVersions()
|
||||
if err == nil && len(cachedVersions) > 0 {
|
||||
g.versions = cachedVersions
|
||||
@@ -97,7 +97,30 @@ func (g *Generator) collectData() error {
|
||||
}
|
||||
}
|
||||
|
||||
// Walk commits since the latest tag to get new unreleased commits
|
||||
// If we have new tags since cache, process the new versions only
|
||||
if currentTag != cachedTag {
|
||||
fmt.Fprintf(os.Stderr, "Processing new versions since %s...\n", cachedTag)
|
||||
newVersions, err := g.gitWalker.WalkHistorySinceTag(cachedTag)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Failed to walk history since tag %s: %v\n", cachedTag, err)
|
||||
} else {
|
||||
// Merge new versions into cached versions (only add if not already cached)
|
||||
for name, version := range newVersions {
|
||||
if name != "Unreleased" { // Handle Unreleased separately
|
||||
if existingVersion, exists := g.versions[name]; !exists {
|
||||
g.versions[name] = version
|
||||
} else {
|
||||
// Update existing version with new PR numbers if they're missing
|
||||
if len(existingVersion.PRNumbers) == 0 && len(version.PRNumbers) > 0 {
|
||||
existingVersion.PRNumbers = version.PRNumbers
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Always update Unreleased section with latest commits
|
||||
unreleasedVersion, err := g.gitWalker.WalkCommitsSinceTag(currentTag)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Failed to walk commits since tag %s: %v\n", currentTag, err)
|
||||
@@ -110,6 +133,29 @@ func (g *Generator) collectData() error {
|
||||
g.versions["Unreleased"] = unreleasedVersion
|
||||
}
|
||||
|
||||
// Save any new versions to cache (after potential AI processing)
|
||||
if currentTag != cachedTag {
|
||||
for _, version := range g.versions {
|
||||
// Skip versions that were already cached and Unreleased
|
||||
if version.Name != "Unreleased" {
|
||||
if err := g.cache.SaveVersion(version); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Failed to save version to cache: %v\n", err)
|
||||
}
|
||||
|
||||
for _, commit := range version.Commits {
|
||||
if err := g.cache.SaveCommit(commit, version.Name); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Failed to save commit to cache: %v\n", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Update the last processed tag
|
||||
if err := g.cache.SetLastProcessedTag(currentTag); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Failed to update last processed tag: %v\n", err)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
}
|
||||
@@ -164,8 +210,26 @@ func (g *Generator) fetchPRs() error {
|
||||
lastSync, _ = g.cache.GetLastPRSync()
|
||||
}
|
||||
|
||||
// Check if we need to sync for missing PRs
|
||||
missingPRs := false
|
||||
for _, version := range g.versions {
|
||||
for _, prNum := range version.PRNumbers {
|
||||
if _, exists := g.prs[prNum]; !exists {
|
||||
missingPRs = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if missingPRs {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if missingPRs {
|
||||
fmt.Fprintf(os.Stderr, "Full sync triggered due to missing PRs in cache.\n")
|
||||
}
|
||||
// If we have never synced or it's been more than 24 hours, do a full sync
|
||||
needsSync := lastSync.IsZero() || time.Since(lastSync) > 24*time.Hour || g.cfg.ForcePRSync
|
||||
// Also sync if we have versions with PR numbers that aren't cached
|
||||
needsSync := lastSync.IsZero() || time.Since(lastSync) > 24*time.Hour || g.cfg.ForcePRSync || missingPRs
|
||||
|
||||
if !needsSync {
|
||||
fmt.Fprintf(os.Stderr, "Using cached PR data (last sync: %s)\n", lastSync.Format("2006-01-02 15:04:05"))
|
||||
@@ -298,6 +362,7 @@ func (g *Generator) formatVersion(version *git.Version) string {
|
||||
}
|
||||
}
|
||||
|
||||
// For released versions, if we have cached AI summary, use it!
|
||||
if version.Name != "Unreleased" && version.AISummary != "" {
|
||||
fmt.Fprintf(os.Stderr, "✅ %s already summarized (skipping)\n", version.Name)
|
||||
sb.WriteString(version.AISummary)
|
||||
@@ -529,8 +594,6 @@ func normalizeLineEndings(content string) string {
|
||||
}
|
||||
|
||||
func (g *Generator) formatCommitMessage(message string) string {
|
||||
prefixes := []string{"fix:", "feat:", "docs:", "style:", "refactor:",
|
||||
"test:", "chore:", "perf:", "ci:", "build:", "revert:", "# docs:"}
|
||||
strings_to_remove := []string{
|
||||
"### CHANGES\n", "## CHANGES\n", "# CHANGES\n",
|
||||
"...\n", "---\n", "## Changes\n", "## Change",
|
||||
@@ -543,13 +606,6 @@ func (g *Generator) formatCommitMessage(message string) string {
|
||||
// No hard tabs
|
||||
message = strings.ReplaceAll(message, "\t", " ")
|
||||
|
||||
for _, prefix := range prefixes {
|
||||
if strings.HasPrefix(strings.ToLower(message), prefix) {
|
||||
message = strings.TrimSpace(message[len(prefix):])
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if len(message) > 0 {
|
||||
message = strings.ToUpper(message[:1]) + message[1:]
|
||||
}
|
||||
|
||||
@@ -10,6 +10,38 @@ import (
|
||||
const DefaultSummarizeModel = "claude-sonnet-4-20250514"
|
||||
const MinContentLength = 256 // Minimum content length to consider for summarization
|
||||
|
||||
const prompt = `# ROLE
|
||||
You are an expert Technical Writer specializing in creating clear, concise,
|
||||
and professional release notes from raw Git commit logs.
|
||||
|
||||
# TASK
|
||||
Your goal is to transform a provided block of Git commit logs into a clean,
|
||||
human-readable changelog summary. You will identify the most important changes,
|
||||
format them as a bulleted list, and preserve the associated Pull Request (PR)
|
||||
information.
|
||||
|
||||
# INSTRUCTIONS:
|
||||
Follow these steps in order:
|
||||
1. Deeply analyze the input. You will be given a block of text containing PR
|
||||
information and commit log messages. Carefully read through the logs
|
||||
to identify individual commits and their descriptions.
|
||||
2. Identify Key Changes: Focus on commits that represent significant changes,
|
||||
such as new features ("feat"), bug fixes ("fix"), performance improvements ("perf"),
|
||||
or breaking changes ("BREAKING CHANGE").
|
||||
3. Select the Top 5: From the identified key changes, select a maximum of the five
|
||||
(5) most impactful ones to include in the summary.
|
||||
If there are five or fewer total changes, include all of them.
|
||||
4. Format the Output:
|
||||
- Where you see a PR header, include the PR header verbatim. NO CHANGES.
|
||||
**This is a critical rule: Do not modify the PR header, as it contains
|
||||
important links.** What follow the PR header are the related changes.
|
||||
- Do not add any additional text or preamble. Begin directly with the output.
|
||||
- Use bullet points for each key change. Starting each point with a hyphen ("-").
|
||||
- Ensure that the summary is concise and focused on the main changes.
|
||||
- The summary should be in American English (en-US), using proper grammar and punctuation.
|
||||
5. If the content is too brief or you do not see any PR headers, return the content as is.
|
||||
`
|
||||
|
||||
// getSummarizeModel returns the model to use for AI summarization
|
||||
func getSummarizeModel() string {
|
||||
if model := os.Getenv("FABRIC_CHANGELOG_SUMMARIZE_MODEL"); model != "" {
|
||||
@@ -30,17 +62,6 @@ func SummarizeVersionContent(content string) (string, error) {
|
||||
|
||||
model := getSummarizeModel()
|
||||
|
||||
prompt := `Summarize the changes extracted from Git commit logs in a concise, professional way.
|
||||
Pay particular attention to the following rules:
|
||||
- Preserve the PR headers verbatim to your summary.
|
||||
- I REPEAT: Do not change the PR headers in any way. They contain links to the PRs and Author Profiles.
|
||||
- Use bullet points for lists and key changes (rendered using "-")
|
||||
- Focus on the main changes and improvements.
|
||||
- Avoid unnecessary details or preamble.
|
||||
- Keep it under 800 characters.
|
||||
- Be brief. List only the 5 most important changes along with the PR information which should be kept intact.
|
||||
- If the content is too brief or you do not see any PR headers, return the content as is.`
|
||||
|
||||
cmd := exec.Command("fabric", "-m", model, prompt)
|
||||
cmd.Stdin = strings.NewReader(content)
|
||||
|
||||
|
||||
@@ -5,10 +5,12 @@ import (
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/go-git/go-git/v5"
|
||||
"github.com/go-git/go-git/v5/plumbing"
|
||||
"github.com/go-git/go-git/v5/plumbing/object"
|
||||
"github.com/go-git/go-git/v5/plumbing/storer"
|
||||
)
|
||||
|
||||
var (
|
||||
@@ -280,6 +282,111 @@ func parseGitHubURL(url string) (owner, repo string) {
|
||||
return "", ""
|
||||
}
|
||||
|
||||
// WalkHistorySinceTag walks git history from HEAD down to (but not including) the specified tag
|
||||
// and returns any version commits found along the way
|
||||
func (w *Walker) WalkHistorySinceTag(sinceTag string) (map[string]*Version, error) {
|
||||
// Get the commit SHA for the sinceTag
|
||||
tagRef, err := w.repo.Tag(sinceTag)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get tag %s: %w", sinceTag, err)
|
||||
}
|
||||
|
||||
tagCommit, err := w.repo.CommitObject(tagRef.Hash())
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get commit for tag %s: %w", sinceTag, err)
|
||||
}
|
||||
|
||||
// Get HEAD reference
|
||||
ref, err := w.repo.Head()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get HEAD: %w", err)
|
||||
}
|
||||
|
||||
// Walk from HEAD down to the tag commit (excluding it)
|
||||
commitIter, err := w.repo.Log(&git.LogOptions{
|
||||
From: ref.Hash(),
|
||||
Order: git.LogOrderCommitterTime,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to create commit iterator: %w", err)
|
||||
}
|
||||
defer commitIter.Close()
|
||||
|
||||
versions := make(map[string]*Version)
|
||||
currentVersion := "Unreleased"
|
||||
prNumbers := make(map[string][]int)
|
||||
|
||||
err = commitIter.ForEach(func(c *object.Commit) error {
|
||||
// Stop iteration when the hash of the current commit matches the hash of the specified sinceTag commit
|
||||
if c.Hash == tagCommit.Hash {
|
||||
return storer.ErrStop
|
||||
}
|
||||
|
||||
commit := &Commit{
|
||||
SHA: c.Hash.String(),
|
||||
Message: strings.TrimSpace(c.Message),
|
||||
Author: c.Author.Name,
|
||||
Email: c.Author.Email,
|
||||
Date: c.Author.When,
|
||||
IsMerge: len(c.ParentHashes) > 1,
|
||||
}
|
||||
|
||||
// Check for version pattern
|
||||
if matches := versionPattern.FindStringSubmatch(commit.Message); len(matches) > 1 {
|
||||
commit.IsVersion = true
|
||||
commit.Version = matches[1]
|
||||
currentVersion = commit.Version
|
||||
|
||||
if _, exists := versions[currentVersion]; !exists {
|
||||
versions[currentVersion] = &Version{
|
||||
Name: currentVersion,
|
||||
Date: commit.Date,
|
||||
CommitSHA: commit.SHA,
|
||||
Commits: []*Commit{},
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Check for PR merge pattern
|
||||
if matches := prPattern.FindStringSubmatch(commit.Message); len(matches) > 1 {
|
||||
prNumber, err := strconv.Atoi(matches[1])
|
||||
if err != nil {
|
||||
// Handle parsing error (e.g., log it or skip processing)
|
||||
return fmt.Errorf("failed to parse PR number: %v", err)
|
||||
}
|
||||
commit.PRNumber = prNumber
|
||||
|
||||
prNumbers[currentVersion] = append(prNumbers[currentVersion], prNumber)
|
||||
}
|
||||
|
||||
// Add commit to current version
|
||||
if _, exists := versions[currentVersion]; !exists {
|
||||
versions[currentVersion] = &Version{
|
||||
Name: currentVersion,
|
||||
Date: time.Time{}, // Zero value, will be set by version commit
|
||||
CommitSHA: "",
|
||||
Commits: []*Commit{},
|
||||
}
|
||||
}
|
||||
|
||||
versions[currentVersion].Commits = append(versions[currentVersion].Commits, commit)
|
||||
return nil
|
||||
})
|
||||
|
||||
// Handle the stop condition - storer.ErrStop is expected
|
||||
if err == storer.ErrStop {
|
||||
err = nil
|
||||
}
|
||||
|
||||
// Assign collected PR numbers to each version
|
||||
for version, prs := range prNumbers {
|
||||
versions[version].PRNumbers = dedupInts(prs)
|
||||
}
|
||||
|
||||
return versions, err
|
||||
}
|
||||
|
||||
func dedupInts(ints []int) []int {
|
||||
seen := make(map[int]bool)
|
||||
result := []int{}
|
||||
|
||||
@@ -259,8 +259,24 @@ func (o *PluginRegistry) GetModels() (ret *ai.VendorsModels, err error) {
|
||||
func (o *PluginRegistry) Configure() (err error) {
|
||||
o.ConfigureVendors()
|
||||
_ = o.Defaults.Configure()
|
||||
if err := o.CustomPatterns.Configure(); err != nil {
|
||||
return fmt.Errorf("error configuring CustomPatterns: %w", err)
|
||||
}
|
||||
_ = o.PatternsLoader.Configure()
|
||||
|
||||
// Refresh the database custom patterns directory after custom patterns plugin is configured
|
||||
customPatternsDir := os.Getenv("CUSTOM_PATTERNS_DIRECTORY")
|
||||
if customPatternsDir != "" {
|
||||
// Expand home directory if needed
|
||||
if strings.HasPrefix(customPatternsDir, "~/") {
|
||||
if homeDir, err := os.UserHomeDir(); err == nil {
|
||||
customPatternsDir = filepath.Join(homeDir, customPatternsDir[2:])
|
||||
}
|
||||
}
|
||||
o.Db.Patterns.CustomPatternsDir = customPatternsDir
|
||||
o.PatternsLoader.Patterns.CustomPatternsDir = customPatternsDir
|
||||
}
|
||||
|
||||
//YouTube and Jina are not mandatory, so ignore not configured error
|
||||
_ = o.YouTube.Configure()
|
||||
_ = o.Jina.Configure()
|
||||
|
||||
@@ -86,9 +86,10 @@ func (o *Session) String() (ret string) {
|
||||
ret += fmt.Sprintf("\n--- \n[%v]\n%v", message.Role, message.Content)
|
||||
if message.MultiContent != nil {
|
||||
for _, part := range message.MultiContent {
|
||||
if part.Type == chat.ChatMessagePartTypeImageURL {
|
||||
switch part.Type {
|
||||
case chat.ChatMessagePartTypeImageURL:
|
||||
ret += fmt.Sprintf("\n%v: %v", part.Type, *part.ImageURL)
|
||||
} else if part.Type == chat.ChatMessagePartTypeText {
|
||||
case chat.ChatMessagePartTypeText:
|
||||
ret += fmt.Sprintf("\n%v: %v", part.Type, part.Text)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,6 +4,8 @@ import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"github.com/danielmiessler/fabric/internal/plugins"
|
||||
"github.com/danielmiessler/fabric/internal/plugins/db/fsdb"
|
||||
@@ -107,6 +109,12 @@ func (o *PatternsLoader) PopulateDB() (err error) {
|
||||
}
|
||||
|
||||
fmt.Printf("✅ Successfully downloaded and installed patterns to %s\n", o.Patterns.Dir)
|
||||
|
||||
// Create the unique patterns file after patterns are successfully moved
|
||||
if err = o.createUniquePatternsFile(); err != nil {
|
||||
return fmt.Errorf("failed to create unique patterns file: %w", err)
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
@@ -301,3 +309,60 @@ func (o *PatternsLoader) countPatternsInDirectory(dir string) (int, error) {
|
||||
|
||||
return patternCount, nil
|
||||
}
|
||||
|
||||
// createUniquePatternsFile creates the unique_patterns.txt file with all pattern names
|
||||
func (o *PatternsLoader) createUniquePatternsFile() (err error) {
|
||||
// Read patterns from the main patterns directory
|
||||
entries, err := os.ReadDir(o.Patterns.Dir)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to read patterns directory: %w", err)
|
||||
}
|
||||
|
||||
patternNamesMap := make(map[string]bool) // Use map to avoid duplicates
|
||||
|
||||
// Add patterns from main directory
|
||||
for _, entry := range entries {
|
||||
if entry.IsDir() {
|
||||
patternNamesMap[entry.Name()] = true
|
||||
}
|
||||
}
|
||||
|
||||
// Add patterns from custom patterns directory if it exists
|
||||
if o.Patterns.CustomPatternsDir != "" {
|
||||
if customEntries, customErr := os.ReadDir(o.Patterns.CustomPatternsDir); customErr == nil {
|
||||
for _, entry := range customEntries {
|
||||
if entry.IsDir() {
|
||||
patternNamesMap[entry.Name()] = true
|
||||
}
|
||||
}
|
||||
fmt.Fprintf(os.Stderr, "📂 Also included patterns from custom directory: %s\n", o.Patterns.CustomPatternsDir)
|
||||
} else {
|
||||
fmt.Fprintf(os.Stderr, "Warning: Could not read custom patterns directory %s: %v\n", o.Patterns.CustomPatternsDir, customErr)
|
||||
}
|
||||
}
|
||||
|
||||
if len(patternNamesMap) == 0 {
|
||||
if o.Patterns.CustomPatternsDir != "" {
|
||||
return fmt.Errorf("no patterns found in directories %s and %s", o.Patterns.Dir, o.Patterns.CustomPatternsDir)
|
||||
}
|
||||
return fmt.Errorf("no patterns found in directory %s", o.Patterns.Dir)
|
||||
}
|
||||
|
||||
// Convert map to sorted slice
|
||||
var patternNames []string
|
||||
for name := range patternNamesMap {
|
||||
patternNames = append(patternNames, name)
|
||||
}
|
||||
|
||||
// Sort patterns alphabetically for consistent output
|
||||
sort.Strings(patternNames)
|
||||
|
||||
// Join pattern names with newlines
|
||||
content := strings.Join(patternNames, "\n") + "\n"
|
||||
if err = os.WriteFile(o.Patterns.UniquePatternsFilePath, []byte(content), 0644); err != nil {
|
||||
return fmt.Errorf("failed to write unique patterns file: %w", err)
|
||||
}
|
||||
|
||||
fmt.Printf("📝 Created unique patterns file with %d patterns\n", len(patternNames))
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -1 +1 @@
|
||||
"1.4.246"
|
||||
"1.4.251"
|
||||
|
||||
Reference in New Issue
Block a user