mirror of
https://github.com/danielmiessler/Fabric.git
synced 2026-01-09 22:38:10 -05:00
Compare commits
20 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
126a9ff406 | ||
|
|
e906425138 | ||
|
|
df4a560302 | ||
|
|
34cf669bd4 | ||
|
|
0dbe1bbb4e | ||
|
|
e29ed908e6 | ||
|
|
3d049a435a | ||
|
|
1a335b3fb9 | ||
|
|
e2430b6c75 | ||
|
|
2497f10eca | ||
|
|
f62d2198f9 | ||
|
|
816e4072f4 | ||
|
|
85ee6196bd | ||
|
|
e15645c1bc | ||
|
|
fada6bb044 | ||
|
|
4ad14bb752 | ||
|
|
97fc9b0d58 | ||
|
|
ad0df37d10 | ||
|
|
666302c3c1 | ||
|
|
71e20cf251 |
File diff suppressed because it is too large
Load Diff
15
cli/flags.go
15
cli/flags.go
@@ -279,14 +279,7 @@ func (o *Flags) BuildChatRequest(Meta string) (ret *common.ChatRequest, err erro
|
||||
}
|
||||
|
||||
var message *goopenai.ChatCompletionMessage
|
||||
if len(o.Attachments) == 0 {
|
||||
if o.Message != "" {
|
||||
message = &goopenai.ChatCompletionMessage{
|
||||
Role: goopenai.ChatMessageRoleUser,
|
||||
Content: strings.TrimSpace(o.Message),
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if len(o.Attachments) > 0 {
|
||||
message = &goopenai.ChatCompletionMessage{
|
||||
Role: goopenai.ChatMessageRoleUser,
|
||||
}
|
||||
@@ -323,7 +316,13 @@ func (o *Flags) BuildChatRequest(Meta string) (ret *common.ChatRequest, err erro
|
||||
},
|
||||
})
|
||||
}
|
||||
} else if o.Message != "" {
|
||||
message = &goopenai.ChatCompletionMessage{
|
||||
Role: goopenai.ChatMessageRoleUser,
|
||||
Content: strings.TrimSpace(o.Message),
|
||||
}
|
||||
}
|
||||
|
||||
ret.Message = message
|
||||
|
||||
if o.Language != "" {
|
||||
|
||||
@@ -30,11 +30,11 @@ type Chatter struct {
|
||||
strategy string
|
||||
}
|
||||
|
||||
// Send processes a chat request and applies any file changes if using the create_coding_feature pattern
|
||||
// Send processes a chat request and applies file changes for create_coding_feature pattern
|
||||
func (o *Chatter) Send(request *common.ChatRequest, opts *common.ChatOptions) (session *fsdb.Session, err error) {
|
||||
modelToUse := opts.Model
|
||||
if modelToUse == "" {
|
||||
modelToUse = o.model // Default to the model set in the Chatter struct
|
||||
modelToUse = o.model
|
||||
}
|
||||
if o.vendor.NeedsRawMode(modelToUse) {
|
||||
opts.Raw = true
|
||||
@@ -89,18 +89,15 @@ func (o *Chatter) Send(request *common.ChatRequest, opts *common.ChatOptions) (s
|
||||
return
|
||||
}
|
||||
|
||||
// Process file changes if using the create_coding_feature pattern
|
||||
// Process file changes for create_coding_feature pattern
|
||||
if request.PatternName == "create_coding_feature" {
|
||||
// Look for file changes in the response
|
||||
summary, fileChanges, parseErr := common.ParseFileChanges(message)
|
||||
if parseErr != nil {
|
||||
fmt.Printf("Warning: Failed to parse file changes: %v\n", parseErr)
|
||||
} else if len(fileChanges) > 0 {
|
||||
// Get the project root - use the current directory
|
||||
projectRoot, err := os.Getwd()
|
||||
if err != nil {
|
||||
fmt.Printf("Warning: Failed to get current directory: %v\n", err)
|
||||
// Continue without applying changes
|
||||
} else {
|
||||
if applyErr := common.ApplyFileChanges(projectRoot, fileChanges); applyErr != nil {
|
||||
fmt.Printf("Warning: Failed to apply file changes: %v\n", applyErr)
|
||||
@@ -122,7 +119,6 @@ func (o *Chatter) Send(request *common.ChatRequest, opts *common.ChatOptions) (s
|
||||
}
|
||||
|
||||
func (o *Chatter) BuildSession(request *common.ChatRequest, raw bool) (session *fsdb.Session, err error) {
|
||||
// If a session name is provided, retrieve it from the database
|
||||
if request.SessionName != "" {
|
||||
var sess *fsdb.Session
|
||||
if sess, err = o.db.Sessions.Get(request.SessionName); err != nil {
|
||||
@@ -149,9 +145,9 @@ func (o *Chatter) BuildSession(request *common.ChatRequest, raw bool) (session *
|
||||
contextContent = ctx.Content
|
||||
}
|
||||
|
||||
// Process any template variables in the message content (user input)
|
||||
// Process template variables in message content
|
||||
// Double curly braces {{variable}} indicate template substitution
|
||||
// Ensure we have a message before processing, other wise we'll get an error when we pass to pattern.go
|
||||
// Ensure we have a message before processing
|
||||
if request.Message == nil {
|
||||
request.Message = &goopenai.ChatCompletionMessage{
|
||||
Role: goopenai.ChatMessageRoleUser,
|
||||
@@ -168,19 +164,19 @@ func (o *Chatter) BuildSession(request *common.ChatRequest, raw bool) (session *
|
||||
}
|
||||
|
||||
var patternContent string
|
||||
inputUsed := false
|
||||
if request.PatternName != "" {
|
||||
pattern, err := o.db.Patterns.GetApplyVariables(request.PatternName, request.PatternVariables, request.Message.Content)
|
||||
// pattern will now contain user input, and all variables will be resolved, or errored
|
||||
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("could not get pattern %s: %v", request.PatternName, err)
|
||||
}
|
||||
patternContent = pattern.Pattern
|
||||
inputUsed = true
|
||||
}
|
||||
|
||||
systemMessage := strings.TrimSpace(contextContent) + strings.TrimSpace(patternContent)
|
||||
|
||||
// Apply strategy if specified
|
||||
if request.StrategyName != "" {
|
||||
strategy, err := strategy.LoadStrategy(request.StrategyName)
|
||||
if err != nil {
|
||||
@@ -199,33 +195,51 @@ func (o *Chatter) BuildSession(request *common.ChatRequest, raw bool) (session *
|
||||
}
|
||||
|
||||
if raw {
|
||||
// In raw mode, we want to avoid duplicating the input that's already in the pattern
|
||||
var finalContent string
|
||||
if systemMessage != "" {
|
||||
// If we have a pattern, it already includes the user input
|
||||
if request.PatternName != "" {
|
||||
finalContent = systemMessage
|
||||
} else {
|
||||
// No pattern, combine system message with user input
|
||||
finalContent = fmt.Sprintf("%s\n\n%s", systemMessage, request.Message.Content)
|
||||
}
|
||||
request.Message = &goopenai.ChatCompletionMessage{
|
||||
Role: goopenai.ChatMessageRoleUser,
|
||||
Content: finalContent,
|
||||
|
||||
// Handle MultiContent properly in raw mode
|
||||
if len(request.Message.MultiContent) > 0 {
|
||||
// When we have attachments, add the text as a text part in MultiContent
|
||||
newMultiContent := []goopenai.ChatMessagePart{
|
||||
{
|
||||
Type: goopenai.ChatMessagePartTypeText,
|
||||
Text: finalContent,
|
||||
},
|
||||
}
|
||||
// Add existing non-text parts (like images)
|
||||
for _, part := range request.Message.MultiContent {
|
||||
if part.Type != goopenai.ChatMessagePartTypeText {
|
||||
newMultiContent = append(newMultiContent, part)
|
||||
}
|
||||
}
|
||||
request.Message = &goopenai.ChatCompletionMessage{
|
||||
Role: goopenai.ChatMessageRoleUser,
|
||||
MultiContent: newMultiContent,
|
||||
}
|
||||
} else {
|
||||
// No attachments, use regular Content field
|
||||
request.Message = &goopenai.ChatCompletionMessage{
|
||||
Role: goopenai.ChatMessageRoleUser,
|
||||
Content: finalContent,
|
||||
}
|
||||
}
|
||||
}
|
||||
// After this, if request.Message is not nil, append it
|
||||
if request.Message != nil {
|
||||
session.Append(request.Message)
|
||||
}
|
||||
} else { // Not raw mode
|
||||
} else {
|
||||
if systemMessage != "" {
|
||||
session.Append(&goopenai.ChatCompletionMessage{Role: goopenai.ChatMessageRoleSystem, Content: systemMessage})
|
||||
}
|
||||
// If a pattern was used (request.PatternName != ""), its output (systemMessage)
|
||||
// already incorporates the user input (request.Message.Content via GetApplyVariables).
|
||||
// So, we only append the direct user message if NO pattern was used.
|
||||
if request.PatternName == "" && request.Message != nil {
|
||||
// If multi-part content, it is in the user message, and should be added.
|
||||
// Otherwise, we should only add it if we have not already used it in the systemMessage.
|
||||
if len(request.Message.MultiContent) > 0 || (request.Message != nil && !inputUsed) {
|
||||
session.Append(request.Message)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1 +1 @@
|
||||
"1.4.214"
|
||||
"1.4.217"
|
||||
|
||||
64
patterns/extract_mcp_servers/system.md
Normal file
64
patterns/extract_mcp_servers/system.md
Normal file
@@ -0,0 +1,64 @@
|
||||
# IDENTITY and PURPOSE
|
||||
|
||||
You are an expert at analyzing content related to MCP (Model Context Protocol) servers. You excel at identifying and extracting mentions of MCP servers, their features, capabilities, integrations, and usage patterns.
|
||||
|
||||
Take a step back and think step-by-step about how to achieve the best results for extracting MCP server information.
|
||||
|
||||
# STEPS
|
||||
|
||||
- Read and analyze the entire content carefully
|
||||
- Identify all mentions of MCP servers, including:
|
||||
- Specific MCP server names
|
||||
- Server capabilities and features
|
||||
- Integration details
|
||||
- Configuration examples
|
||||
- Use cases and applications
|
||||
- Installation or setup instructions
|
||||
- API endpoints or methods exposed
|
||||
- Any limitations or requirements
|
||||
|
||||
# OUTPUT SECTIONS
|
||||
|
||||
- Output a summary of all MCP servers mentioned with the following sections:
|
||||
|
||||
## SERVERS FOUND
|
||||
|
||||
- List each MCP server found with a 15-word description
|
||||
- Include the server name and its primary purpose
|
||||
- Use bullet points for each server
|
||||
|
||||
## SERVER DETAILS
|
||||
|
||||
For each server found, provide:
|
||||
- **Server Name**: The official name
|
||||
- **Purpose**: Main functionality in 25 words or less
|
||||
- **Key Features**: Up to 5 main features as bullet points
|
||||
- **Integration**: How it integrates with systems (if mentioned)
|
||||
- **Configuration**: Any configuration details mentioned
|
||||
- **Requirements**: Dependencies or requirements (if specified)
|
||||
|
||||
## USAGE EXAMPLES
|
||||
|
||||
- Extract any code snippets or usage examples
|
||||
- Include configuration files or setup instructions
|
||||
- Present each example with context
|
||||
|
||||
## INSIGHTS
|
||||
|
||||
- Provide 3-5 insights about the MCP servers mentioned
|
||||
- Focus on patterns, trends, or notable characteristics
|
||||
- Each insight should be a 20-word bullet point
|
||||
|
||||
# OUTPUT INSTRUCTIONS
|
||||
|
||||
- Output in clean, readable Markdown
|
||||
- Use proper heading hierarchy
|
||||
- Include code blocks with appropriate language tags
|
||||
- Do not include warnings or notes about the content
|
||||
- If no MCP servers are found, simply state "No MCP servers mentioned in the content"
|
||||
- Ensure all server names are accurately captured
|
||||
- Preserve technical details and specifications
|
||||
|
||||
# INPUT:
|
||||
|
||||
INPUT:
|
||||
@@ -4,6 +4,7 @@ import (
|
||||
"bytes"
|
||||
"context"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
goopenai "github.com/sashabaranov/go-openai"
|
||||
|
||||
@@ -23,62 +24,77 @@ func (c *Client) ListModels() ([]string, error) {
|
||||
return []string{"dry-run-model"}, nil
|
||||
}
|
||||
|
||||
func (c *Client) SendStream(msgs []*goopenai.ChatCompletionMessage, opts *common.ChatOptions, channel chan string) error {
|
||||
output := "Dry run: Would send the following request:\n\n"
|
||||
func (c *Client) formatMultiContentMessage(msg *goopenai.ChatCompletionMessage) string {
|
||||
var builder strings.Builder
|
||||
|
||||
if len(msg.MultiContent) > 0 {
|
||||
builder.WriteString(fmt.Sprintf("%s:\n", msg.Role))
|
||||
for _, part := range msg.MultiContent {
|
||||
builder.WriteString(fmt.Sprintf(" - Type: %s\n", part.Type))
|
||||
if part.Type == goopenai.ChatMessagePartTypeImageURL {
|
||||
builder.WriteString(fmt.Sprintf(" Image URL: %s\n", part.ImageURL.URL))
|
||||
} else {
|
||||
builder.WriteString(fmt.Sprintf(" Text: %s\n", part.Text))
|
||||
}
|
||||
}
|
||||
builder.WriteString("\n")
|
||||
} else {
|
||||
builder.WriteString(fmt.Sprintf("%s:\n%s\n\n", msg.Role, msg.Content))
|
||||
}
|
||||
|
||||
return builder.String()
|
||||
}
|
||||
|
||||
func (c *Client) formatMessages(msgs []*goopenai.ChatCompletionMessage) string {
|
||||
var builder strings.Builder
|
||||
|
||||
for _, msg := range msgs {
|
||||
switch msg.Role {
|
||||
case goopenai.ChatMessageRoleSystem:
|
||||
output += fmt.Sprintf("System:\n%s\n\n", msg.Content)
|
||||
builder.WriteString(fmt.Sprintf("System:\n%s\n\n", msg.Content))
|
||||
case goopenai.ChatMessageRoleAssistant:
|
||||
output += fmt.Sprintf("Assistant:\n%s\n\n", msg.Content)
|
||||
builder.WriteString(c.formatMultiContentMessage(msg))
|
||||
case goopenai.ChatMessageRoleUser:
|
||||
output += fmt.Sprintf("User:\n%s\n\n", msg.Content)
|
||||
builder.WriteString(c.formatMultiContentMessage(msg))
|
||||
default:
|
||||
output += fmt.Sprintf("%s:\n%s\n\n", msg.Role, msg.Content)
|
||||
builder.WriteString(fmt.Sprintf("%s:\n%s\n\n", msg.Role, msg.Content))
|
||||
}
|
||||
}
|
||||
|
||||
output += "Options:\n"
|
||||
output += fmt.Sprintf("Model: %s\n", opts.Model)
|
||||
output += fmt.Sprintf("Temperature: %f\n", opts.Temperature)
|
||||
output += fmt.Sprintf("TopP: %f\n", opts.TopP)
|
||||
output += fmt.Sprintf("PresencePenalty: %f\n", opts.PresencePenalty)
|
||||
output += fmt.Sprintf("FrequencyPenalty: %f\n", opts.FrequencyPenalty)
|
||||
return builder.String()
|
||||
}
|
||||
|
||||
func (c *Client) formatOptions(opts *common.ChatOptions) string {
|
||||
var builder strings.Builder
|
||||
|
||||
builder.WriteString("Options:\n")
|
||||
builder.WriteString(fmt.Sprintf("Model: %s\n", opts.Model))
|
||||
builder.WriteString(fmt.Sprintf("Temperature: %f\n", opts.Temperature))
|
||||
builder.WriteString(fmt.Sprintf("TopP: %f\n", opts.TopP))
|
||||
builder.WriteString(fmt.Sprintf("PresencePenalty: %f\n", opts.PresencePenalty))
|
||||
builder.WriteString(fmt.Sprintf("FrequencyPenalty: %f\n", opts.FrequencyPenalty))
|
||||
if opts.ModelContextLength != 0 {
|
||||
output += fmt.Sprintf("ModelContextLength: %d\n", opts.ModelContextLength)
|
||||
builder.WriteString(fmt.Sprintf("ModelContextLength: %d\n", opts.ModelContextLength))
|
||||
}
|
||||
|
||||
channel <- output
|
||||
return builder.String()
|
||||
}
|
||||
|
||||
func (c *Client) SendStream(msgs []*goopenai.ChatCompletionMessage, opts *common.ChatOptions, channel chan string) error {
|
||||
var builder strings.Builder
|
||||
builder.WriteString("Dry run: Would send the following request:\n\n")
|
||||
builder.WriteString(c.formatMessages(msgs))
|
||||
builder.WriteString(c.formatOptions(opts))
|
||||
|
||||
channel <- builder.String()
|
||||
close(channel)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *Client) Send(_ context.Context, msgs []*goopenai.ChatCompletionMessage, opts *common.ChatOptions) (string, error) {
|
||||
fmt.Println("Dry run: Would send the following request:")
|
||||
|
||||
for _, msg := range msgs {
|
||||
switch msg.Role {
|
||||
case goopenai.ChatMessageRoleSystem:
|
||||
fmt.Printf("System:\n%s\n\n", msg.Content)
|
||||
case goopenai.ChatMessageRoleAssistant:
|
||||
fmt.Printf("Assistant:\n%s\n\n", msg.Content)
|
||||
case goopenai.ChatMessageRoleUser:
|
||||
fmt.Printf("User:\n%s\n\n", msg.Content)
|
||||
default:
|
||||
fmt.Printf("%s:\n%s\n\n", msg.Role, msg.Content)
|
||||
}
|
||||
}
|
||||
|
||||
fmt.Println("Options:")
|
||||
fmt.Printf("Model: %s\n", opts.Model)
|
||||
fmt.Printf("Temperature: %f\n", opts.Temperature)
|
||||
fmt.Printf("TopP: %f\n", opts.TopP)
|
||||
fmt.Printf("PresencePenalty: %f\n", opts.PresencePenalty)
|
||||
fmt.Printf("FrequencyPenalty: %f\n", opts.FrequencyPenalty)
|
||||
if opts.ModelContextLength != 0 {
|
||||
fmt.Printf("ModelContextLength: %d\n", opts.ModelContextLength)
|
||||
}
|
||||
fmt.Print(c.formatMessages(msgs))
|
||||
fmt.Print(c.formatOptions(opts))
|
||||
|
||||
return "", nil
|
||||
}
|
||||
|
||||
@@ -26,6 +26,7 @@ func Serve(registry *core.PluginRegistry, address string, apiKey string) (err er
|
||||
NewContextsHandler(r, fabricDb.Contexts)
|
||||
NewSessionsHandler(r, fabricDb.Sessions)
|
||||
NewChatHandler(r, registry, fabricDb)
|
||||
NewYouTubeHandler(r, registry)
|
||||
NewConfigHandler(r, fabricDb)
|
||||
NewModelsHandler(r, registry.VendorManager)
|
||||
NewStrategiesHandler(r)
|
||||
|
||||
70
restapi/youtube.go
Normal file
70
restapi/youtube.go
Normal file
@@ -0,0 +1,70 @@
|
||||
package restapi
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/danielmiessler/fabric/core"
|
||||
"github.com/danielmiessler/fabric/plugins/tools/youtube"
|
||||
"github.com/gin-gonic/gin"
|
||||
)
|
||||
|
||||
type YouTubeHandler struct {
|
||||
yt *youtube.YouTube
|
||||
}
|
||||
|
||||
type YouTubeRequest struct {
|
||||
URL string `json:"url"`
|
||||
Language string `json:"language"`
|
||||
Timestamps bool `json:"timestamps"`
|
||||
}
|
||||
|
||||
type YouTubeResponse struct {
|
||||
Transcript string `json:"transcript"`
|
||||
Title string `json:"title"`
|
||||
}
|
||||
|
||||
func NewYouTubeHandler(r *gin.Engine, registry *core.PluginRegistry) *YouTubeHandler {
|
||||
handler := &YouTubeHandler{yt: registry.YouTube}
|
||||
r.POST("/youtube/transcript", handler.Transcript)
|
||||
return handler
|
||||
}
|
||||
|
||||
func (h *YouTubeHandler) Transcript(c *gin.Context) {
|
||||
var req YouTubeRequest
|
||||
if err := c.BindJSON(&req); err != nil {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid request"})
|
||||
return
|
||||
}
|
||||
if req.URL == "" {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": "url is required"})
|
||||
return
|
||||
}
|
||||
language := req.Language
|
||||
if language == "" {
|
||||
language = "en"
|
||||
}
|
||||
|
||||
var videoID, playlistID string
|
||||
var err error
|
||||
if videoID, playlistID, err = h.yt.GetVideoOrPlaylistId(req.URL); err != nil {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||
return
|
||||
}
|
||||
if videoID == "" && playlistID != "" {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": "URL is a playlist, not a video"})
|
||||
return
|
||||
}
|
||||
|
||||
var transcript string
|
||||
if req.Timestamps {
|
||||
transcript, err = h.yt.GrabTranscriptWithTimestamps(videoID, language)
|
||||
} else {
|
||||
transcript, err = h.yt.GrabTranscript(videoID, language)
|
||||
}
|
||||
if err != nil {
|
||||
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||
return
|
||||
}
|
||||
|
||||
c.JSON(http.StatusOK, YouTubeResponse{Transcript: transcript, Title: videoID})
|
||||
}
|
||||
@@ -1,3 +1,3 @@
|
||||
package main
|
||||
|
||||
var version = "v1.4.214"
|
||||
var version = "v1.4.217"
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { get } from 'svelte/store';
|
||||
import { languageStore } from '$lib/store/language-store';
|
||||
import { get } from 'svelte/store';
|
||||
|
||||
export interface TranscriptResponse {
|
||||
transcript: string;
|
||||
@@ -18,18 +18,18 @@ export async function getTranscript(url: string): Promise<TranscriptResponse> {
|
||||
console.log('\n=== YouTube Transcript Service Start ===');
|
||||
console.log('1. Request details:', {
|
||||
url,
|
||||
endpoint: '/chat',
|
||||
endpoint: '/api/youtube/transcript',
|
||||
method: 'POST',
|
||||
isYouTubeURL: url.includes('youtube.com') || url.includes('youtu.be'),
|
||||
originalLanguage
|
||||
});
|
||||
|
||||
const response = await fetch('/chat', {
|
||||
const response = await fetch('/api/youtube/transcript', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
body: JSON.stringify({
|
||||
url,
|
||||
language: originalLanguage // Pass original language to server
|
||||
})
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user