mirror of
https://github.com/danielmiessler/Fabric.git
synced 2026-01-08 22:08:03 -05:00
feat: implement print session and context
This commit is contained in:
73
README.md
73
README.md
@@ -212,41 +212,44 @@ Usage:
|
||||
fabric [OPTIONS]
|
||||
|
||||
Application Options:
|
||||
-p, --pattern= Choose a pattern
|
||||
-v, --variable= Values for pattern variables, e.g. -v=$name:John -v=$age:30
|
||||
-C, --context= Choose a context
|
||||
--session= Choose a session
|
||||
-S, --setup Run setup for all reconfigurable parts of fabric
|
||||
--setup-skip-patterns Run setup for all reconfigurable parts of fabric, but skip update fabric patterns.
|
||||
--setup-vendor= Run Setup for specific vendor, one of Ollama, OpenAI, Anthropic, Azure, Gemini, Groq, Mistral, OpenRouter, SiliconCloud. E.g. fabric --setup-vendor=OpenAI
|
||||
-t, --temperature= Set temperature (default: 0.7)
|
||||
-T, --topp= Set top P (default: 0.9)
|
||||
-s, --stream Stream
|
||||
-P, --presencepenalty= Set presence penalty (default: 0.0)
|
||||
-r, --raw Use the defaults of the model without sending chat options (like temperature etc.) and use the user role instead of the system role for patterns
|
||||
-F, --frequencypenalty= Set frequency penalty (default: 0.0)
|
||||
-l, --listpatterns List all patterns
|
||||
-L, --listmodels List all available models
|
||||
-x, --listcontexts List all contexts
|
||||
-X, --listsessions List all sessions
|
||||
-U, --updatepatterns Update patterns
|
||||
-c, --copy Copy to clipboard
|
||||
-m, --model= Choose model
|
||||
-o, --output= Output to file
|
||||
-n, --latest= Number of latest patterns to list (default: 0)
|
||||
-d, --changeDefaultModel Change default model
|
||||
-y, --youtube= YouTube video "URL" to grab transcript, comments from it and send to chat
|
||||
--transcript Grab transcript from YouTube video and send to chat (it used per default).
|
||||
--comments Grab comments from YouTube video and send to chat
|
||||
-g, --language= Specify the Language Code for the chat, e.g. -g=en -g=zh
|
||||
-u, --scrape_url= Scrape website URL to markdown using Jina AI
|
||||
-q, --scrape_question= Search question using Jina AI
|
||||
-e, --seed= Seed to be used for LMM generation
|
||||
-w, --wipecontext= Wipe context
|
||||
-W, --wipesession= Wipe session
|
||||
--readability Convert HTML input into a clean, readable view
|
||||
--dry-run Show what would be sent to the model without actually sending it
|
||||
--version Print current version
|
||||
-p, --pattern= Choose a pattern
|
||||
-v, --variable= Values for pattern variables, e.g. -v=$name:John -v=$age:30
|
||||
-C, --context= Choose a context
|
||||
--session= Choose a session
|
||||
-S, --setup Run setup for all reconfigurable parts of fabric
|
||||
--setup-skip-patterns Run Setup for specific vendor, one of Ollama, OpenAI, Anthropic, Azure, Gemini, Groq, Mistral, OpenRouter, SiliconCloud. E.g. fabric --setup-vendor=OpenAI
|
||||
--setup-vendor= Run Setup for specific vendor. E.g. --setup-vendor=OpenAI
|
||||
-t, --temperature= Set temperature (default: 0.7)
|
||||
-T, --topp= Set top P (default: 0.9)
|
||||
-s, --stream Stream
|
||||
-P, --presencepenalty= Set presence penalty (default: 0.0)
|
||||
-r, --raw Use the defaults of the model without sending chat options (like temperature etc.) and use the user role instead of the system role for patterns.
|
||||
-F, --frequencypenalty= Set frequency penalty (default: 0.0)
|
||||
-l, --listpatterns List all patterns
|
||||
-L, --listmodels List all available models
|
||||
-x, --listcontexts List all contexts
|
||||
-X, --listsessions List all sessions
|
||||
-U, --updatepatterns Update patterns
|
||||
-c, --copy Copy to clipboard
|
||||
-m, --model= Choose model
|
||||
-o, --output= Output to file
|
||||
--output-prompt Output used prompt before the result
|
||||
-n, --latest= Number of latest patterns to list (default: 0)
|
||||
-d, --changeDefaultModel Change default model
|
||||
-y, --youtube= YouTube video "URL" to grab transcript, comments from it and send to chat
|
||||
--transcript Grab transcript from YouTube video and send to chat (it used per default).
|
||||
--comments Grab comments from YouTube video and send to chat
|
||||
-g, --language= Specify the Language Code for the chat, e.g. -g=en -g=zh
|
||||
-u, --scrape_url= Scrape website URL to markdown using Jina AI
|
||||
-q, --scrape_question= Search question using Jina AI
|
||||
-e, --seed= Seed to be used for LMM generation
|
||||
-w, --wipecontext= Wipe context
|
||||
-W, --wipesession= Wipe session
|
||||
--printcontext= Print context
|
||||
--printsession= Print session
|
||||
--readability Convert HTML input into a clean, readable view
|
||||
--dry-run Show what would be sent to the model without actually sending it
|
||||
--version Print current version
|
||||
|
||||
Help Options:
|
||||
-h, --help Show this help message
|
||||
|
||||
59
cli/cli.go
59
cli/cli.go
@@ -13,7 +13,7 @@ import (
|
||||
)
|
||||
|
||||
// Cli Controls the cli. It takes in the flags and runs the appropriate functions
|
||||
func Cli(version string) (message string, err error) {
|
||||
func Cli(version string) (err error) {
|
||||
var currentFlags *Flags
|
||||
if currentFlags, err = Init(); err != nil {
|
||||
return
|
||||
@@ -55,7 +55,6 @@ func Cli(version string) (message string, err error) {
|
||||
}
|
||||
}
|
||||
|
||||
// if the update patterns flag is set, run the update patterns function
|
||||
if currentFlags.UpdatePatterns {
|
||||
err = fabric.PopulateDB()
|
||||
return
|
||||
@@ -66,7 +65,6 @@ func Cli(version string) (message string, err error) {
|
||||
return
|
||||
}
|
||||
|
||||
// if the latest patterns flag is set, run the latest patterns function
|
||||
if currentFlags.LatestPatterns != "0" {
|
||||
var parsedToInt int
|
||||
if parsedToInt, err = strconv.Atoi(currentFlags.LatestPatterns); err != nil {
|
||||
@@ -79,42 +77,46 @@ func Cli(version string) (message string, err error) {
|
||||
return
|
||||
}
|
||||
|
||||
// if the list patterns flag is set, run the list all patterns function
|
||||
if currentFlags.ListPatterns {
|
||||
err = fabricDb.Patterns.ListNames()
|
||||
return
|
||||
}
|
||||
|
||||
// if the list all models flag is set, run the list all models function
|
||||
if currentFlags.ListAllModels {
|
||||
fabric.GetModels().Print()
|
||||
return
|
||||
}
|
||||
|
||||
// if the list all contexts flag is set, run the list all contexts function
|
||||
if currentFlags.ListAllContexts {
|
||||
err = fabricDb.Contexts.ListNames()
|
||||
return
|
||||
}
|
||||
|
||||
// if the list all sessions flag is set, run the list all sessions function
|
||||
if currentFlags.ListAllSessions {
|
||||
err = fabricDb.Sessions.ListNames()
|
||||
return
|
||||
}
|
||||
|
||||
// if the wipe context flag is set, run the wipe context function
|
||||
if currentFlags.WipeContext != "" {
|
||||
err = fabricDb.Contexts.Delete(currentFlags.WipeContext)
|
||||
return
|
||||
}
|
||||
|
||||
// if the wipe session flag is set, run the wipe session function
|
||||
if currentFlags.WipeSession != "" {
|
||||
err = fabricDb.Sessions.Delete(currentFlags.WipeSession)
|
||||
return
|
||||
}
|
||||
|
||||
if currentFlags.PrintSession != "" {
|
||||
err = fabricDb.Sessions.PrintSession(currentFlags.PrintSession)
|
||||
return
|
||||
}
|
||||
|
||||
if currentFlags.PrintContext != "" {
|
||||
err = fabricDb.Contexts.PrintContext(currentFlags.PrintContext)
|
||||
return
|
||||
}
|
||||
|
||||
if currentFlags.HtmlReadability {
|
||||
if msg, cleanErr := converter.HtmlReadability(currentFlags.Message); cleanErr != nil {
|
||||
fmt.Println("use original input, because can't apply html readability", err)
|
||||
@@ -151,8 +153,6 @@ func Cli(version string) (message string, err error) {
|
||||
return
|
||||
}
|
||||
|
||||
// fmt.Println(transcript)
|
||||
|
||||
currentFlags.AppendMessage(transcript)
|
||||
}
|
||||
|
||||
@@ -164,12 +164,10 @@ func Cli(version string) (message string, err error) {
|
||||
|
||||
commentsString := strings.Join(comments, "\n")
|
||||
|
||||
// fmt.Println(commentsString)
|
||||
|
||||
currentFlags.AppendMessage(commentsString)
|
||||
}
|
||||
|
||||
if currentFlags.Pattern == "" {
|
||||
if !currentFlags.IsChatRequest() {
|
||||
// if the pattern flag is not set, we wanted only to grab the transcript or comments
|
||||
fmt.Println(currentFlags.Message)
|
||||
return
|
||||
@@ -179,27 +177,25 @@ func Cli(version string) (message string, err error) {
|
||||
if (currentFlags.ScrapeURL != "" || currentFlags.ScrapeQuestion != "") && fabric.Jina.IsConfigured() {
|
||||
// Check if the scrape_url flag is set and call ScrapeURL
|
||||
if currentFlags.ScrapeURL != "" {
|
||||
if message, err = fabric.Jina.ScrapeURL(currentFlags.ScrapeURL); err != nil {
|
||||
var website string
|
||||
if website, err = fabric.Jina.ScrapeURL(currentFlags.ScrapeURL); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
//fmt.Println(message)
|
||||
|
||||
currentFlags.AppendMessage(message)
|
||||
currentFlags.AppendMessage(website)
|
||||
}
|
||||
|
||||
// Check if the scrape_question flag is set and call ScrapeQuestion
|
||||
if currentFlags.ScrapeQuestion != "" {
|
||||
if message, err = fabric.Jina.ScrapeQuestion(currentFlags.ScrapeQuestion); err != nil {
|
||||
var website string
|
||||
if website, err = fabric.Jina.ScrapeQuestion(currentFlags.ScrapeQuestion); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
//fmt.Println(message)
|
||||
|
||||
currentFlags.AppendMessage(message)
|
||||
currentFlags.AppendMessage(website)
|
||||
}
|
||||
|
||||
if currentFlags.Pattern == "" {
|
||||
if !currentFlags.IsChatRequest() {
|
||||
// if the pattern flag is not set, we wanted only to grab the url or get the answer to the question
|
||||
fmt.Println(currentFlags.Message)
|
||||
return
|
||||
@@ -211,24 +207,33 @@ func Cli(version string) (message string, err error) {
|
||||
return
|
||||
}
|
||||
|
||||
if message, err = chatter.Send(currentFlags.BuildChatRequest(), currentFlags.BuildChatOptions()); err != nil {
|
||||
var session *db.Session
|
||||
if session, err = chatter.Send(currentFlags.BuildChatRequest(), currentFlags.BuildChatOptions()); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
result := session.GetLastMessage().Content
|
||||
|
||||
if !currentFlags.Stream {
|
||||
fmt.Println(message)
|
||||
// print the result if it was not streamed already
|
||||
fmt.Println(result)
|
||||
}
|
||||
|
||||
// if the copy flag is set, copy the message to the clipboard
|
||||
if currentFlags.Copy {
|
||||
if err = fabric.CopyToClipboard(message); err != nil {
|
||||
if err = fabric.CopyToClipboard(result); err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// if the output flag is set, create an output file
|
||||
if currentFlags.Output != "" {
|
||||
err = fabric.CreateOutputFile(message, currentFlags.Output)
|
||||
if currentFlags.OutputPrompt {
|
||||
sessionAsString := session.String()
|
||||
err = fabric.CreateOutputFile(sessionAsString, currentFlags.Output)
|
||||
} else {
|
||||
err = fabric.CreateOutputFile(result, currentFlags.Output)
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
@@ -15,10 +15,9 @@ func TestCli(t *testing.T) {
|
||||
defer func() { os.Args = originalArgs }()
|
||||
|
||||
os.Args = []string{os.Args[0]}
|
||||
message, err := Cli("test")
|
||||
err := Cli("test")
|
||||
assert.Error(t, err)
|
||||
assert.Equal(t, core.NoSessionPatternUserMessages, err.Error())
|
||||
assert.Empty(t, message)
|
||||
}
|
||||
|
||||
func TestSetup(t *testing.T) {
|
||||
|
||||
@@ -36,6 +36,7 @@ type Flags struct {
|
||||
Copy bool `short:"c" long:"copy" description:"Copy to clipboard"`
|
||||
Model string `short:"m" long:"model" description:"Choose model"`
|
||||
Output string `short:"o" long:"output" description:"Output to file" default:""`
|
||||
OutputPrompt bool `long:"output-prompt" description:"Output used prompt before the result"`
|
||||
LatestPatterns string `short:"n" long:"latest" description:"Number of latest patterns to list" default:"0"`
|
||||
ChangeDefaultModel bool `short:"d" long:"changeDefaultModel" description:"Change default model"`
|
||||
YouTube string `short:"y" long:"youtube" description:"YouTube video \"URL\" to grab transcript, comments from it and send to chat"`
|
||||
@@ -47,6 +48,8 @@ type Flags struct {
|
||||
Seed int `short:"e" long:"seed" description:"Seed to be used for LMM generation"`
|
||||
WipeContext string `short:"w" long:"wipecontext" description:"Wipe context"`
|
||||
WipeSession string `short:"W" long:"wipesession" description:"Wipe session"`
|
||||
PrintContext string `long:"printcontext" description:"Print context"`
|
||||
PrintSession string `long:"printsession" description:"Print session"`
|
||||
HtmlReadability bool `long:"readability" description:"Convert HTML input into a clean, readable view"`
|
||||
DryRun bool `long:"dry-run" description:"Show what would be sent to the model without actually sending it"`
|
||||
Version bool `long:"version" description:"Print current version"`
|
||||
@@ -135,3 +138,8 @@ func (o *Flags) AppendMessage(message string) {
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (o *Flags) IsChatRequest() (ret bool) {
|
||||
ret = o.Message != "" || o.Session != ""
|
||||
return
|
||||
}
|
||||
|
||||
@@ -19,13 +19,12 @@ type Chatter struct {
|
||||
vendor vendors.Vendor
|
||||
}
|
||||
|
||||
func (o *Chatter) Send(request *common.ChatRequest, opts *common.ChatOptions) (message string, err error) {
|
||||
func (o *Chatter) Send(request *common.ChatRequest, opts *common.ChatOptions) (session *db.Session, err error) {
|
||||
var chatRequest *Chat
|
||||
if chatRequest, err = o.NewChat(request); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
var session *db.Session
|
||||
if session, err = chatRequest.BuildChatSession(opts.Raw); err != nil {
|
||||
return
|
||||
}
|
||||
@@ -34,6 +33,8 @@ func (o *Chatter) Send(request *common.ChatRequest, opts *common.ChatOptions) (m
|
||||
opts.Model = o.model
|
||||
}
|
||||
|
||||
message := ""
|
||||
|
||||
if o.Stream {
|
||||
channel := make(chan string)
|
||||
go func() {
|
||||
@@ -52,9 +53,15 @@ func (o *Chatter) Send(request *common.ChatRequest, opts *common.ChatOptions) (m
|
||||
}
|
||||
}
|
||||
|
||||
if chatRequest.Session != nil && message != "" {
|
||||
chatRequest.Session.Append(&common.Message{Role: goopenai.ChatMessageRoleAssistant, Content: message})
|
||||
err = o.db.Sessions.SaveSession(chatRequest.Session)
|
||||
if message == "" {
|
||||
session = nil
|
||||
err = fmt.Errorf("empty response")
|
||||
}
|
||||
|
||||
session.Append(&common.Message{Role: goopenai.ChatMessageRoleAssistant, Content: message})
|
||||
|
||||
if chatRequest.Session != nil {
|
||||
err = o.db.Sessions.SaveSession(session)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
package db
|
||||
|
||||
import "fmt"
|
||||
|
||||
type Contexts struct {
|
||||
*Storage
|
||||
}
|
||||
@@ -15,6 +17,15 @@ func (o *Contexts) GetContext(name string) (ret *Context, err error) {
|
||||
return
|
||||
}
|
||||
|
||||
func (o *Contexts) PrintContext(name string) (err error) {
|
||||
var context *Context
|
||||
if context, err = o.GetContext(name); err != nil {
|
||||
return
|
||||
}
|
||||
fmt.Println(context.Content)
|
||||
return
|
||||
}
|
||||
|
||||
type Context struct {
|
||||
Name string
|
||||
Content string
|
||||
|
||||
@@ -20,6 +20,16 @@ func (o *Sessions) GetOrCreateSession(name string) (session *Session, err error)
|
||||
return
|
||||
}
|
||||
|
||||
func (o *Sessions) PrintSession(name string) (err error) {
|
||||
if o.Exists(name) {
|
||||
var session Session
|
||||
if err = o.LoadAsJson(name, &session.Messages); err == nil {
|
||||
fmt.Println(session)
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (o *Sessions) SaveSession(session *Session) (err error) {
|
||||
return o.SaveAsJson(session.Name, session.Messages)
|
||||
}
|
||||
@@ -36,3 +46,17 @@ func (o *Session) IsEmpty() bool {
|
||||
func (o *Session) Append(messages ...*common.Message) {
|
||||
o.Messages = append(o.Messages, messages...)
|
||||
}
|
||||
|
||||
func (o *Session) GetLastMessage() (ret *common.Message) {
|
||||
if len(o.Messages) > 0 {
|
||||
ret = o.Messages[len(o.Messages)-1]
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (o *Session) String() (ret string) {
|
||||
for _, message := range o.Messages {
|
||||
ret += fmt.Sprintf("[%v] >\n\n%v\n\n", message.Role, message.Content)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user