Compare commits

...

20 Commits

Author SHA1 Message Date
github-actions[bot]
126a9ff406 Update version to v1.4.217 and commit 2025-06-26 23:09:56 +00:00
Kayvan Sylvan
e906425138 Merge pull request #1546 from ksylvan/0626-fix-yt-in-web-interface
New YouTube Transcript Endpoint Added to REST API
2025-06-26 16:08:23 -07:00
Daniel Miessler
df4a560302 Add extract_mcp_servers pattern
New pattern to extract mentions of MCP (Model Context Protocol) servers from content. Identifies server names, features, capabilities, and usage examples.

🤖 Generated with [Claude Code](https://claude.ai/code)

Co-Authored-By: Claude <noreply@anthropic.com>
2025-06-26 11:39:21 -07:00
Kayvan Sylvan
34cf669bd4 chore: fix endpoint calls from frontend 2025-06-26 01:37:53 -07:00
Kayvan Sylvan
0dbe1bbb4e feat: add dedicated YouTube transcript API endpoint
## CHANGES

- Add new YouTube handler for transcript requests
- Create `/youtube/transcript` POST endpoint route
- Add request/response types for YouTube API
- Support language and timestamp options
- Update frontend to use new endpoint
- Remove chat endpoint dependency for transcripts
- Validate video vs playlist URLs properly
2025-06-26 01:21:27 -07:00
github-actions[bot]
e29ed908e6 Update version to v1.4.216 and commit 2025-06-26 06:52:16 +00:00
Kayvan Sylvan
3d049a435a Merge pull request #1545 from ksylvan/0625-fix-attachments-used-with-patterns
Update Message Handling for Attachments and Multi-Modal content
2025-06-25 23:50:43 -07:00
Kayvan Sylvan
1a335b3fb9 refactor(ai): unify assistant and user message formatting in dryrun
### CHANGES

- Unify assistant and user message formatting logic.
- Use `formatMultiContentMessage` for assistant role messages.
- Improve dryrun support for multi-part message content.
2025-06-25 23:49:23 -07:00
Kayvan Sylvan
e2430b6c75 fix: correctly combine text and attachments in raw mode sessions
### CHANGES

- Combine user text and attachments into MultiContent.
- Preserve existing non-text parts like images.
- Use standard content field for text-only messages.
2025-06-25 23:28:12 -07:00
Kayvan Sylvan
2497f10eca feat: add MultiContent support to chat message construction in raw mode 2025-06-25 23:18:56 -07:00
Kayvan Sylvan
f62d2198f9 refactor: extract message and option formatting logic into reusable methods
## CHANGES

- Extract multi-content message formatting to dedicated method
- Create formatMessages method for all message types
- Add formatOptions method for chat options display
- Replace inline formatting with strings.Builder usage
- Reduce code duplication between Send and SendStream
- Improve code organization and maintainability
2025-06-25 22:08:26 -07:00
Kayvan Sylvan
816e4072f4 fix(chatter): prevent duplicate user message when applying patterns
### CHANGES

*   Prevent adding user message twice when using patterns.
*   Ensure multi-part content is always included in session.
2025-06-25 21:43:46 -07:00
Kayvan Sylvan
85ee6196bd chore: fix formatting. 2025-06-25 18:31:46 -07:00
Kayvan Sylvan
e15645c1bc chore: clean up comments in chatter.go for clarity 2025-06-25 17:15:13 -07:00
Kayvan Sylvan
fada6bb044 chore: simplify user message appending logic in BuildSession
### CHANGES
- Remove conditional check for pattern name in message appending.
- Always append user message if it exists in request.
2025-06-25 17:12:48 -07:00
Kayvan Sylvan
4ad14bb752 feat: enhance dryrun client to display multi-content user messages
### CHANGES

- Handle multi-content messages for the user role.
- Display image URLs from user messages in output.
- Update both `Send` and `SendStream` methods.
- Retain existing behavior for simple text messages.
2025-06-25 17:08:30 -07:00
Kayvan Sylvan
97fc9b0d58 feat: allow combining user messages and attachments with patterns
- Allow user messages and attachments with patterns.
- Append user message to session regardless of pattern.
- Refactor chat request builder for improved clarity.
2025-06-25 16:24:47 -07:00
github-actions[bot]
ad0df37d10 Update version to v1.4.215 and commit 2025-06-25 11:07:45 +00:00
Kayvan Sylvan
666302c3c1 Merge pull request #1543 from ksylvan/0625-fix-pattern-descriptions-json
fix: Revert multiline tags in generated json files
2025-06-25 04:06:12 -07:00
Kayvan Sylvan
71e20cf251 chore: reformat pattern_descriptions.json to improve readability
### CHANGES

- Reformat JSON `tags` array to display on new lines.
- Update `write_essay` pattern description for clarity.
- Apply consistent formatting to both data files.
2025-06-25 03:55:00 -07:00
11 changed files with 2111 additions and 515 deletions

File diff suppressed because it is too large Load Diff

View File

@@ -279,14 +279,7 @@ func (o *Flags) BuildChatRequest(Meta string) (ret *common.ChatRequest, err erro
}
var message *goopenai.ChatCompletionMessage
if len(o.Attachments) == 0 {
if o.Message != "" {
message = &goopenai.ChatCompletionMessage{
Role: goopenai.ChatMessageRoleUser,
Content: strings.TrimSpace(o.Message),
}
}
} else {
if len(o.Attachments) > 0 {
message = &goopenai.ChatCompletionMessage{
Role: goopenai.ChatMessageRoleUser,
}
@@ -323,7 +316,13 @@ func (o *Flags) BuildChatRequest(Meta string) (ret *common.ChatRequest, err erro
},
})
}
} else if o.Message != "" {
message = &goopenai.ChatCompletionMessage{
Role: goopenai.ChatMessageRoleUser,
Content: strings.TrimSpace(o.Message),
}
}
ret.Message = message
if o.Language != "" {

View File

@@ -30,11 +30,11 @@ type Chatter struct {
strategy string
}
// Send processes a chat request and applies any file changes if using the create_coding_feature pattern
// Send processes a chat request and applies file changes for create_coding_feature pattern
func (o *Chatter) Send(request *common.ChatRequest, opts *common.ChatOptions) (session *fsdb.Session, err error) {
modelToUse := opts.Model
if modelToUse == "" {
modelToUse = o.model // Default to the model set in the Chatter struct
modelToUse = o.model
}
if o.vendor.NeedsRawMode(modelToUse) {
opts.Raw = true
@@ -89,18 +89,15 @@ func (o *Chatter) Send(request *common.ChatRequest, opts *common.ChatOptions) (s
return
}
// Process file changes if using the create_coding_feature pattern
// Process file changes for create_coding_feature pattern
if request.PatternName == "create_coding_feature" {
// Look for file changes in the response
summary, fileChanges, parseErr := common.ParseFileChanges(message)
if parseErr != nil {
fmt.Printf("Warning: Failed to parse file changes: %v\n", parseErr)
} else if len(fileChanges) > 0 {
// Get the project root - use the current directory
projectRoot, err := os.Getwd()
if err != nil {
fmt.Printf("Warning: Failed to get current directory: %v\n", err)
// Continue without applying changes
} else {
if applyErr := common.ApplyFileChanges(projectRoot, fileChanges); applyErr != nil {
fmt.Printf("Warning: Failed to apply file changes: %v\n", applyErr)
@@ -122,7 +119,6 @@ func (o *Chatter) Send(request *common.ChatRequest, opts *common.ChatOptions) (s
}
func (o *Chatter) BuildSession(request *common.ChatRequest, raw bool) (session *fsdb.Session, err error) {
// If a session name is provided, retrieve it from the database
if request.SessionName != "" {
var sess *fsdb.Session
if sess, err = o.db.Sessions.Get(request.SessionName); err != nil {
@@ -149,9 +145,9 @@ func (o *Chatter) BuildSession(request *common.ChatRequest, raw bool) (session *
contextContent = ctx.Content
}
// Process any template variables in the message content (user input)
// Process template variables in message content
// Double curly braces {{variable}} indicate template substitution
// Ensure we have a message before processing, other wise we'll get an error when we pass to pattern.go
// Ensure we have a message before processing
if request.Message == nil {
request.Message = &goopenai.ChatCompletionMessage{
Role: goopenai.ChatMessageRoleUser,
@@ -168,19 +164,19 @@ func (o *Chatter) BuildSession(request *common.ChatRequest, raw bool) (session *
}
var patternContent string
inputUsed := false
if request.PatternName != "" {
pattern, err := o.db.Patterns.GetApplyVariables(request.PatternName, request.PatternVariables, request.Message.Content)
// pattern will now contain user input, and all variables will be resolved, or errored
if err != nil {
return nil, fmt.Errorf("could not get pattern %s: %v", request.PatternName, err)
}
patternContent = pattern.Pattern
inputUsed = true
}
systemMessage := strings.TrimSpace(contextContent) + strings.TrimSpace(patternContent)
// Apply strategy if specified
if request.StrategyName != "" {
strategy, err := strategy.LoadStrategy(request.StrategyName)
if err != nil {
@@ -199,33 +195,51 @@ func (o *Chatter) BuildSession(request *common.ChatRequest, raw bool) (session *
}
if raw {
// In raw mode, we want to avoid duplicating the input that's already in the pattern
var finalContent string
if systemMessage != "" {
// If we have a pattern, it already includes the user input
if request.PatternName != "" {
finalContent = systemMessage
} else {
// No pattern, combine system message with user input
finalContent = fmt.Sprintf("%s\n\n%s", systemMessage, request.Message.Content)
}
request.Message = &goopenai.ChatCompletionMessage{
Role: goopenai.ChatMessageRoleUser,
Content: finalContent,
// Handle MultiContent properly in raw mode
if len(request.Message.MultiContent) > 0 {
// When we have attachments, add the text as a text part in MultiContent
newMultiContent := []goopenai.ChatMessagePart{
{
Type: goopenai.ChatMessagePartTypeText,
Text: finalContent,
},
}
// Add existing non-text parts (like images)
for _, part := range request.Message.MultiContent {
if part.Type != goopenai.ChatMessagePartTypeText {
newMultiContent = append(newMultiContent, part)
}
}
request.Message = &goopenai.ChatCompletionMessage{
Role: goopenai.ChatMessageRoleUser,
MultiContent: newMultiContent,
}
} else {
// No attachments, use regular Content field
request.Message = &goopenai.ChatCompletionMessage{
Role: goopenai.ChatMessageRoleUser,
Content: finalContent,
}
}
}
// After this, if request.Message is not nil, append it
if request.Message != nil {
session.Append(request.Message)
}
} else { // Not raw mode
} else {
if systemMessage != "" {
session.Append(&goopenai.ChatCompletionMessage{Role: goopenai.ChatMessageRoleSystem, Content: systemMessage})
}
// If a pattern was used (request.PatternName != ""), its output (systemMessage)
// already incorporates the user input (request.Message.Content via GetApplyVariables).
// So, we only append the direct user message if NO pattern was used.
if request.PatternName == "" && request.Message != nil {
// If multi-part content, it is in the user message, and should be added.
// Otherwise, we should only add it if we have not already used it in the systemMessage.
if len(request.Message.MultiContent) > 0 || (request.Message != nil && !inputUsed) {
session.Append(request.Message)
}
}

View File

@@ -1 +1 @@
"1.4.214"
"1.4.217"

View File

@@ -0,0 +1,64 @@
# IDENTITY and PURPOSE
You are an expert at analyzing content related to MCP (Model Context Protocol) servers. You excel at identifying and extracting mentions of MCP servers, their features, capabilities, integrations, and usage patterns.
Take a step back and think step-by-step about how to achieve the best results for extracting MCP server information.
# STEPS
- Read and analyze the entire content carefully
- Identify all mentions of MCP servers, including:
- Specific MCP server names
- Server capabilities and features
- Integration details
- Configuration examples
- Use cases and applications
- Installation or setup instructions
- API endpoints or methods exposed
- Any limitations or requirements
# OUTPUT SECTIONS
- Output a summary of all MCP servers mentioned with the following sections:
## SERVERS FOUND
- List each MCP server found with a 15-word description
- Include the server name and its primary purpose
- Use bullet points for each server
## SERVER DETAILS
For each server found, provide:
- **Server Name**: The official name
- **Purpose**: Main functionality in 25 words or less
- **Key Features**: Up to 5 main features as bullet points
- **Integration**: How it integrates with systems (if mentioned)
- **Configuration**: Any configuration details mentioned
- **Requirements**: Dependencies or requirements (if specified)
## USAGE EXAMPLES
- Extract any code snippets or usage examples
- Include configuration files or setup instructions
- Present each example with context
## INSIGHTS
- Provide 3-5 insights about the MCP servers mentioned
- Focus on patterns, trends, or notable characteristics
- Each insight should be a 20-word bullet point
# OUTPUT INSTRUCTIONS
- Output in clean, readable Markdown
- Use proper heading hierarchy
- Include code blocks with appropriate language tags
- Do not include warnings or notes about the content
- If no MCP servers are found, simply state "No MCP servers mentioned in the content"
- Ensure all server names are accurately captured
- Preserve technical details and specifications
# INPUT:
INPUT:

View File

@@ -4,6 +4,7 @@ import (
"bytes"
"context"
"fmt"
"strings"
goopenai "github.com/sashabaranov/go-openai"
@@ -23,62 +24,77 @@ func (c *Client) ListModels() ([]string, error) {
return []string{"dry-run-model"}, nil
}
func (c *Client) SendStream(msgs []*goopenai.ChatCompletionMessage, opts *common.ChatOptions, channel chan string) error {
output := "Dry run: Would send the following request:\n\n"
func (c *Client) formatMultiContentMessage(msg *goopenai.ChatCompletionMessage) string {
var builder strings.Builder
if len(msg.MultiContent) > 0 {
builder.WriteString(fmt.Sprintf("%s:\n", msg.Role))
for _, part := range msg.MultiContent {
builder.WriteString(fmt.Sprintf(" - Type: %s\n", part.Type))
if part.Type == goopenai.ChatMessagePartTypeImageURL {
builder.WriteString(fmt.Sprintf(" Image URL: %s\n", part.ImageURL.URL))
} else {
builder.WriteString(fmt.Sprintf(" Text: %s\n", part.Text))
}
}
builder.WriteString("\n")
} else {
builder.WriteString(fmt.Sprintf("%s:\n%s\n\n", msg.Role, msg.Content))
}
return builder.String()
}
func (c *Client) formatMessages(msgs []*goopenai.ChatCompletionMessage) string {
var builder strings.Builder
for _, msg := range msgs {
switch msg.Role {
case goopenai.ChatMessageRoleSystem:
output += fmt.Sprintf("System:\n%s\n\n", msg.Content)
builder.WriteString(fmt.Sprintf("System:\n%s\n\n", msg.Content))
case goopenai.ChatMessageRoleAssistant:
output += fmt.Sprintf("Assistant:\n%s\n\n", msg.Content)
builder.WriteString(c.formatMultiContentMessage(msg))
case goopenai.ChatMessageRoleUser:
output += fmt.Sprintf("User:\n%s\n\n", msg.Content)
builder.WriteString(c.formatMultiContentMessage(msg))
default:
output += fmt.Sprintf("%s:\n%s\n\n", msg.Role, msg.Content)
builder.WriteString(fmt.Sprintf("%s:\n%s\n\n", msg.Role, msg.Content))
}
}
output += "Options:\n"
output += fmt.Sprintf("Model: %s\n", opts.Model)
output += fmt.Sprintf("Temperature: %f\n", opts.Temperature)
output += fmt.Sprintf("TopP: %f\n", opts.TopP)
output += fmt.Sprintf("PresencePenalty: %f\n", opts.PresencePenalty)
output += fmt.Sprintf("FrequencyPenalty: %f\n", opts.FrequencyPenalty)
return builder.String()
}
func (c *Client) formatOptions(opts *common.ChatOptions) string {
var builder strings.Builder
builder.WriteString("Options:\n")
builder.WriteString(fmt.Sprintf("Model: %s\n", opts.Model))
builder.WriteString(fmt.Sprintf("Temperature: %f\n", opts.Temperature))
builder.WriteString(fmt.Sprintf("TopP: %f\n", opts.TopP))
builder.WriteString(fmt.Sprintf("PresencePenalty: %f\n", opts.PresencePenalty))
builder.WriteString(fmt.Sprintf("FrequencyPenalty: %f\n", opts.FrequencyPenalty))
if opts.ModelContextLength != 0 {
output += fmt.Sprintf("ModelContextLength: %d\n", opts.ModelContextLength)
builder.WriteString(fmt.Sprintf("ModelContextLength: %d\n", opts.ModelContextLength))
}
channel <- output
return builder.String()
}
func (c *Client) SendStream(msgs []*goopenai.ChatCompletionMessage, opts *common.ChatOptions, channel chan string) error {
var builder strings.Builder
builder.WriteString("Dry run: Would send the following request:\n\n")
builder.WriteString(c.formatMessages(msgs))
builder.WriteString(c.formatOptions(opts))
channel <- builder.String()
close(channel)
return nil
}
func (c *Client) Send(_ context.Context, msgs []*goopenai.ChatCompletionMessage, opts *common.ChatOptions) (string, error) {
fmt.Println("Dry run: Would send the following request:")
for _, msg := range msgs {
switch msg.Role {
case goopenai.ChatMessageRoleSystem:
fmt.Printf("System:\n%s\n\n", msg.Content)
case goopenai.ChatMessageRoleAssistant:
fmt.Printf("Assistant:\n%s\n\n", msg.Content)
case goopenai.ChatMessageRoleUser:
fmt.Printf("User:\n%s\n\n", msg.Content)
default:
fmt.Printf("%s:\n%s\n\n", msg.Role, msg.Content)
}
}
fmt.Println("Options:")
fmt.Printf("Model: %s\n", opts.Model)
fmt.Printf("Temperature: %f\n", opts.Temperature)
fmt.Printf("TopP: %f\n", opts.TopP)
fmt.Printf("PresencePenalty: %f\n", opts.PresencePenalty)
fmt.Printf("FrequencyPenalty: %f\n", opts.FrequencyPenalty)
if opts.ModelContextLength != 0 {
fmt.Printf("ModelContextLength: %d\n", opts.ModelContextLength)
}
fmt.Print(c.formatMessages(msgs))
fmt.Print(c.formatOptions(opts))
return "", nil
}

View File

@@ -26,6 +26,7 @@ func Serve(registry *core.PluginRegistry, address string, apiKey string) (err er
NewContextsHandler(r, fabricDb.Contexts)
NewSessionsHandler(r, fabricDb.Sessions)
NewChatHandler(r, registry, fabricDb)
NewYouTubeHandler(r, registry)
NewConfigHandler(r, fabricDb)
NewModelsHandler(r, registry.VendorManager)
NewStrategiesHandler(r)

70
restapi/youtube.go Normal file
View File

@@ -0,0 +1,70 @@
package restapi
import (
"net/http"
"github.com/danielmiessler/fabric/core"
"github.com/danielmiessler/fabric/plugins/tools/youtube"
"github.com/gin-gonic/gin"
)
type YouTubeHandler struct {
yt *youtube.YouTube
}
type YouTubeRequest struct {
URL string `json:"url"`
Language string `json:"language"`
Timestamps bool `json:"timestamps"`
}
type YouTubeResponse struct {
Transcript string `json:"transcript"`
Title string `json:"title"`
}
func NewYouTubeHandler(r *gin.Engine, registry *core.PluginRegistry) *YouTubeHandler {
handler := &YouTubeHandler{yt: registry.YouTube}
r.POST("/youtube/transcript", handler.Transcript)
return handler
}
func (h *YouTubeHandler) Transcript(c *gin.Context) {
var req YouTubeRequest
if err := c.BindJSON(&req); err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid request"})
return
}
if req.URL == "" {
c.JSON(http.StatusBadRequest, gin.H{"error": "url is required"})
return
}
language := req.Language
if language == "" {
language = "en"
}
var videoID, playlistID string
var err error
if videoID, playlistID, err = h.yt.GetVideoOrPlaylistId(req.URL); err != nil {
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
return
}
if videoID == "" && playlistID != "" {
c.JSON(http.StatusBadRequest, gin.H{"error": "URL is a playlist, not a video"})
return
}
var transcript string
if req.Timestamps {
transcript, err = h.yt.GrabTranscriptWithTimestamps(videoID, language)
} else {
transcript, err = h.yt.GrabTranscript(videoID, language)
}
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
return
}
c.JSON(http.StatusOK, YouTubeResponse{Transcript: transcript, Title: videoID})
}

View File

@@ -1,3 +1,3 @@
package main
var version = "v1.4.214"
var version = "v1.4.217"

View File

@@ -1,5 +1,5 @@
import { get } from 'svelte/store';
import { languageStore } from '$lib/store/language-store';
import { get } from 'svelte/store';
export interface TranscriptResponse {
transcript: string;
@@ -18,18 +18,18 @@ export async function getTranscript(url: string): Promise<TranscriptResponse> {
console.log('\n=== YouTube Transcript Service Start ===');
console.log('1. Request details:', {
url,
endpoint: '/chat',
endpoint: '/api/youtube/transcript',
method: 'POST',
isYouTubeURL: url.includes('youtube.com') || url.includes('youtu.be'),
originalLanguage
});
const response = await fetch('/chat', {
const response = await fetch('/api/youtube/transcript', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
body: JSON.stringify({
url,
language: originalLanguage // Pass original language to server
})

File diff suppressed because it is too large Load Diff