Compare commits

...

12 Commits

Author SHA1 Message Date
github-actions[bot]
104513f72b Update version to v1.4.125 and commit 2024-12-22 14:53:17 +00:00
Eugen Eisler
e434999802 Merge pull request #1222 from wmahfoudh/fix-cross-filesystem-move
Fix cross-filesystem file move in to_pdf plugin (issue 1221)
2024-12-22 15:52:13 +01:00
Walid
fce06b5294 Fix cross-filesystem file move in to_pdf plugin (issue 1221) 2024-12-22 13:58:45 +04:00
github-actions[bot]
6d00405eb6 Update version to v1.4.124 and commit 2024-12-21 14:01:47 +00:00
Eugen Eisler
65285fdef0 Merge pull request #1215 from infosecwatchman/main
Add Endpoints to facilitate Ollama based chats
2024-12-21 15:00:52 +01:00
Eugen Eisler
89edd7152a Merge pull request #1214 from iliajie/fix/patterns-translate
Fix the typo in the sentence
2024-12-21 14:59:48 +01:00
Eugen Eisler
5527dc8db5 Merge pull request #1213 from AnirudhG07/main
Spelling Fixes
2024-12-21 14:59:21 +01:00
InfosecWatchman
103388ecec Add Endpoints to facilitate Ollama based chats
Add Endpoints to facilitate Ollama based chats.

Built to use with Open WebUI
2024-12-19 16:14:51 -05:00
Ilia Ross
53ea7ab126 Fix the typo in the sentence 2024-12-19 12:26:44 +02:00
AnirudhG07
b008d17b6e Spelling fixes in create_quiz pattern 2024-12-19 13:52:25 +05:30
AnirudhG07
2ba294f4d6 Spelling fix in READEME 2024-12-19 13:50:06 +05:30
AnirudhG07
a7ed257fe3 Spelling fixes in patterns 2024-12-19 13:38:37 +05:30
11 changed files with 324 additions and 13 deletions

View File

@@ -68,7 +68,7 @@
> [!NOTE]
> November 8, 2024
>
> - **Multimodal Support**: You can now us `-a` (attachment) for Multimodal submissions to OpenAI models that support it. Example: `fabric -a https://path/to/image "Give me a description of this image."`
> - **Multimodal Support**: You can now use `-a` (attachment) for Multimodal submissions to OpenAI models that support it. Example: `fabric -a https://path/to/image "Give me a description of this image."`
## What and why

View File

@@ -56,6 +56,12 @@ func Cli(version string) (err error) {
return
}
if currentFlags.ServeOllama {
registry.ConfigureVendors()
err = restapi.ServeOllama(registry, currentFlags.ServeAddress, version)
return
}
if currentFlags.UpdatePatterns {
err = registry.PatternsLoader.PopulateDB()
return

View File

@@ -61,6 +61,7 @@ type Flags struct {
InputHasVars bool `long:"input-has-vars" description:"Apply variables to user input"`
DryRun bool `long:"dry-run" description:"Show what would be sent to the model without actually sending it"`
Serve bool `long:"serve" description:"Serve the Fabric Rest API"`
ServeOllama bool `long:"serveOllama" description:"Serve the Fabric Rest API with ollama endpoints"`
ServeAddress string `long:"address" description:"The address to bind the REST API" default:":8080"`
Config string `long:"config" description:"Path to YAML config file"`
Version bool `long:"version" description:"Print current version"`

View File

@@ -26,11 +26,11 @@ Subject: Machine Learning
```
# Example run un bash:
# Example run bash:
Copy the input query to the clipboard and execute the following command:
``` bash
```bash
xclip -selection clipboard -o | fabric -sp analize_answers
```

View File

@@ -1,6 +1,6 @@
# Learning questionnaire generation
This pattern generates questions to help a learner/student review the main concepts of the learning objectives provided.
This pattern generates questions to help a learner/student review the main concepts of the learning objectives provided.
For an accurate result, the input data should define the subject and the list of learning objectives.
@@ -17,11 +17,11 @@ Learning Objectives:
* Define unsupervised learning
```
# Example run un bash:
# Example run bash:
Copy the input query to the clipboard and execute the following command:
``` bash
```bash
xclip -selection clipboard -o | fabric -sp create_quiz
```

View File

@@ -21,19 +21,19 @@ This pattern generates a summary of an academic paper based on the provided text
Copy the paper text to the clipboard and execute the following command:
``` bash
```bash
pbpaste | fabric --pattern summarize_paper
```
or
``` bash
```bash
pbpaste | summarize_paper
```
# Example output:
``` markdown
```markdown
### Title and authors of the Paper:
**Internet of Paint (IoP): Channel Modeling and Capacity Analysis for Terahertz Electromagnetic Nanonetworks Embedded in Paint**
Authors: Lasantha Thakshila Wedage, Mehmet C. Vuran, Bernard Butler, Yevgeni Koucheryavy, Sasitharan Balasubramaniam

View File

@@ -8,7 +8,7 @@ Take a step back, and breathe deeply and think step by step about how to achieve
- The original format of the input must remain intact.
- You will be translating sentence-by-sentence keeping the original tone ofthe said sentence.
- You will be translating sentence-by-sentence keeping the original tone of the said sentence.
- You will not be manipulate the wording to change the meaning.

View File

@@ -1 +1 @@
"1.4.123"
"1.4.125"

View File

@@ -76,12 +76,19 @@ func main() {
}
// Move the output PDF to the current directory
err = os.Rename(pdfPath, outputFile)
err = copyFile(pdfPath, outputFile)
if err != nil {
fmt.Fprintf(os.Stderr, "Error moving output file: %v\n", err)
os.Exit(1)
}
// Remove the original file after copying
err = os.Remove(pdfPath)
if err != nil {
fmt.Fprintf(os.Stderr, "Error cleaning up temporary file: %v\n", err)
os.Exit(1)
}
// Clean up temporary files
cleanupTempFiles(tmpDir)
@@ -103,3 +110,25 @@ func cleanupTempFiles(dir string) {
}
}
}
// Copy a file from source src to destination dst
func copyFile(src, dst string) error {
sourceFile, err := os.Open(src)
if err != nil {
return err
}
defer sourceFile.Close()
destFile, err := os.Create(dst)
if err != nil {
return err
}
defer destFile.Close()
_, err = io.Copy(destFile, sourceFile)
if err != nil {
return err
}
return destFile.Sync()
}

275
restapi/ollama.go Normal file
View File

@@ -0,0 +1,275 @@
package restapi
import (
"bytes"
"context"
"encoding/json"
"fmt"
"github.com/danielmiessler/fabric/core"
"github.com/gin-gonic/gin"
"io"
"log"
"net/http"
"strings"
"time"
)
type OllamaModel struct {
Models []Model `json:"models"`
}
type Model struct {
Details ModelDetails `json:"details"`
Digest string `json:"digest"`
Model string `json:"model"`
ModifiedAt string `json:"modified_at"`
Name string `json:"name"`
Size int64 `json:"size"`
}
type ModelDetails struct {
Families []string `json:"families"`
Family string `json:"family"`
Format string `json:"format"`
ParameterSize string `json:"parameter_size"`
ParentModel string `json:"parent_model"`
QuantizationLevel string `json:"quantization_level"`
}
type APIConvert struct {
registry *core.PluginRegistry
r *gin.Engine
addr *string
}
type OllamaRequestBody struct {
Messages []OllamaMessage `json:"messages"`
Model string `json:"model"`
Options struct {
} `json:"options"`
Stream bool `json:"stream"`
}
type OllamaMessage struct {
Content string `json:"content"`
Role string `json:"role"`
}
type OllamaResponse struct {
Model string `json:"model"`
CreatedAt string `json:"created_at"`
Message struct {
Role string `json:"role"`
Content string `json:"content"`
} `json:"message"`
DoneReason string `json:"done_reason,omitempty"`
Done bool `json:"done"`
TotalDuration int64 `json:"total_duration,omitempty"`
LoadDuration int `json:"load_duration,omitempty"`
PromptEvalCount int `json:"prompt_eval_count,omitempty"`
PromptEvalDuration int `json:"prompt_eval_duration,omitempty"`
EvalCount int `json:"eval_count,omitempty"`
EvalDuration int64 `json:"eval_duration,omitempty"`
}
type FabricResponseFormat struct {
Type string `json:"type"`
Format string `json:"format"`
Content string `json:"content"`
}
func ServeOllama(registry *core.PluginRegistry, address string, version string) (err error) {
r := gin.New()
// Middleware
r.Use(gin.Logger())
r.Use(gin.Recovery())
// Register routes
fabricDb := registry.Db
NewPatternsHandler(r, fabricDb.Patterns)
NewContextsHandler(r, fabricDb.Contexts)
NewSessionsHandler(r, fabricDb.Sessions)
NewChatHandler(r, registry, fabricDb)
NewConfigHandler(r, fabricDb)
NewModelsHandler(r, registry.VendorManager)
typeConversion := APIConvert{
registry: registry,
r: r,
addr: &address,
}
// Ollama Endpoints
r.GET("/api/tags", typeConversion.ollamaTags)
r.GET("/api/version", func(c *gin.Context) {
c.Data(200, "application/json", []byte(fmt.Sprintf("{\"%s\"}", version)))
return
})
r.POST("/api/chat", typeConversion.ollamaChat)
// Start server
err = r.Run(address)
if err != nil {
return err
}
return
}
func (f APIConvert) ollamaTags(c *gin.Context) {
patterns, err := f.registry.Db.Patterns.GetNames()
if err != nil {
c.JSON(http.StatusInternalServerError, gin.H{"error": err})
return
}
var response OllamaModel
for _, pattern := range patterns {
today := time.Now().Format("2024-11-25T12:07:58.915991813-05:00")
details := ModelDetails{
Families: []string{"fabric"},
Family: "fabric",
Format: "custom",
ParameterSize: "42.0B",
ParentModel: "",
QuantizationLevel: "",
}
response.Models = append(response.Models, Model{
Details: details,
Digest: "365c0bd3c000a25d28ddbf732fe1c6add414de7275464c4e4d1c3b5fcb5d8ad1",
Model: fmt.Sprintf("%s:latest", pattern),
ModifiedAt: today,
Name: fmt.Sprintf("%s:latest", pattern),
Size: 0,
})
}
c.JSON(200, response)
}
func (f APIConvert) ollamaChat(c *gin.Context) {
body, err := io.ReadAll(c.Request.Body)
if err != nil {
log.Printf("Error reading body: %v", err)
c.JSON(http.StatusInternalServerError, gin.H{"error": "testing endpoint"})
return
}
var prompt OllamaRequestBody
err = json.Unmarshal(body, &prompt)
if err != nil {
log.Printf("Error unmarshalling body: %v", err)
c.JSON(http.StatusInternalServerError, gin.H{"error": "testing endpoint"})
return
}
now := time.Now()
var chat ChatRequest
if len(prompt.Messages) == 1 {
chat.Prompts = []PromptRequest{{
UserInput: prompt.Messages[0].Content,
Vendor: "",
Model: "",
ContextName: "",
PatternName: strings.Split(prompt.Model, ":")[0],
}}
} else if len(prompt.Messages) > 1 {
var content string
for _, msg := range prompt.Messages {
content = fmt.Sprintf("%s%s:%s\n", content, msg.Role, msg.Content)
}
chat.Prompts = []PromptRequest{{
UserInput: content,
Vendor: "",
Model: "",
ContextName: "",
PatternName: strings.Split(prompt.Model, ":")[0],
}}
}
fabricChatReq, err := json.Marshal(chat)
if err != nil {
log.Printf("Error marshalling body: %v", err)
c.JSON(http.StatusInternalServerError, gin.H{"error": err})
return
}
ctx := context.Background()
var req *http.Request
if strings.Contains(*f.addr, "http") {
req, err = http.NewRequest("POST", fmt.Sprintf("%s/chat", *f.addr), bytes.NewBuffer(fabricChatReq))
} else {
req, err = http.NewRequest("POST", fmt.Sprintf("http://127.0.0.1%s/chat", *f.addr), bytes.NewBuffer(fabricChatReq))
}
if err != nil {
log.Fatal(err)
}
req = req.WithContext(ctx)
fabricRes, err := http.DefaultClient.Do(req)
if err != nil {
log.Printf("Error getting /chat body: %v", err)
c.JSON(http.StatusInternalServerError, gin.H{"error": err})
return
}
body, err = io.ReadAll(fabricRes.Body)
if err != nil {
log.Printf("Error reading body: %v", err)
c.JSON(http.StatusInternalServerError, gin.H{"error": "testing endpoint"})
return
}
var forwardedResponse OllamaResponse
var forwardedResponses []OllamaResponse
var fabricResponse FabricResponseFormat
err = json.Unmarshal([]byte(strings.Split(strings.Split(string(body), "\n")[0], "data: ")[1]), &fabricResponse)
if err != nil {
log.Printf("Error unmarshalling body: %v", err)
c.JSON(http.StatusInternalServerError, gin.H{"error": "testing endpoint"})
return
}
for _, word := range strings.Split(fabricResponse.Content, " ") {
forwardedResponse = OllamaResponse{
Model: "",
CreatedAt: "",
Message: struct {
Role string `json:"role"`
Content string `json:"content"`
}(struct {
Role string
Content string
}{Content: fmt.Sprintf("%s ", word), Role: "assistant"}),
Done: false,
}
forwardedResponses = append(forwardedResponses, forwardedResponse)
}
forwardedResponse.Model = prompt.Model
forwardedResponse.CreatedAt = time.Now().UTC().Format("2006-01-02T15:04:05.999999999Z")
forwardedResponse.Message.Role = "assistant"
forwardedResponse.Message.Content = ""
forwardedResponse.DoneReason = "stop"
forwardedResponse.Done = true
forwardedResponse.TotalDuration = time.Since(now).Nanoseconds()
forwardedResponse.LoadDuration = int(time.Since(now).Nanoseconds())
forwardedResponse.PromptEvalCount = 42
forwardedResponse.PromptEvalDuration = int(time.Since(now).Nanoseconds())
forwardedResponse.EvalCount = 420
forwardedResponse.EvalDuration = time.Since(now).Nanoseconds()
forwardedResponses = append(forwardedResponses, forwardedResponse)
var res []byte
for _, response := range forwardedResponses {
marshalled, err := json.Marshal(response)
if err != nil {
log.Printf("Error marshalling body: %v", err)
c.JSON(http.StatusInternalServerError, gin.H{"error": err})
return
}
for _, bytein := range marshalled {
res = append(res, bytein)
}
for _, bytebreak := range []byte("\n") {
res = append(res, bytebreak)
}
}
c.Data(200, "application/json", res)
//c.JSON(200, forwardedResponse)
return
}

View File

@@ -1,3 +1,3 @@
package main
var version = "v1.4.123"
var version = "v1.4.125"