Compare commits

..

4 Commits

Author SHA1 Message Date
github-actions[bot]
36eb321059 Update version to v1.4.192 and commit 2025-05-23 05:44:31 +00:00
Eugen Eisler
47bf9600d6 Merge pull request #1480 from ksylvan/0522-auto-raw-mode-for-some-models
Automatic setting of "raw mode" for some models
2025-05-23 07:43:04 +02:00
Kayvan Sylvan
be674841e7 feat: add automatic raw mode detection for specific AI models
## CHANGES

- Add model-specific raw mode detection logic
- Check Ollama llama2/llama3 models for raw mode
- Check OpenAI o1/o3/o4 models for raw mode
- Use model from options or default chatter
- Auto-enable raw mode when vendor requires it
- Import strings package for prefix matching
2025-05-22 17:04:11 -07:00
Kayvan Sylvan
39a8b67438 feat: add NeedsRawMode method to AI vendor interface
## CHANGES

- Add NeedsRawMode to Vendor interface
- Implement NeedsRawMode in all AI clients
- Return false for all implementations
- Support model-specific raw mode detection
- Enable future raw mode requirements
2025-05-22 16:41:12 -07:00
12 changed files with 62 additions and 2 deletions

View File

@@ -32,6 +32,13 @@ type Chatter struct {
// Send processes a chat request and applies any file changes if using the create_coding_feature pattern
func (o *Chatter) Send(request *common.ChatRequest, opts *common.ChatOptions) (session *fsdb.Session, err error) {
modelToUse := opts.Model
if modelToUse == "" {
modelToUse = o.model // Default to the model set in the Chatter struct
}
if o.vendor.NeedsRawMode(modelToUse) {
opts.Raw = true
}
if session, err = o.BuildSession(request, opts.Raw); err != nil {
return
}

View File

@@ -1 +1 @@
"1.4.191"
"1.4.192"

View File

@@ -205,3 +205,7 @@ func (an *Client) toMessages(msgs []*goopenai.ChatCompletionMessage) (ret []anth
return anthropicMessages
}
func (an *Client) NeedsRawMode(modelName string) bool {
return false
}

View File

@@ -41,3 +41,7 @@ func (oi *Client) ListModels() (ret []string, err error) {
ret = oi.apiDeployments
return
}
func (oi *Client) NeedsRawMode(modelName string) bool {
return false
}

View File

@@ -90,3 +90,7 @@ func (c *Client) Setup() error {
func (c *Client) SetupFillEnvFileContent(_ *bytes.Buffer) {
// No environment variables needed for dry run
}
func (c *Client) NeedsRawMode(modelName string) bool {
return false
}

View File

@@ -43,3 +43,7 @@ func (oi *Client) ListModels() (ret []string, err error) {
ret = oi.apiModels
return
}
func (oi *Client) NeedsRawMode(modelName string) bool {
return false
}

View File

@@ -143,6 +143,10 @@ func (o *Client) extractText(response *genai.GenerateContentResponse) (ret strin
return
}
func (o *Client) NeedsRawMode(modelName string) bool {
return false
}
func toMessages(msgs []*goopenai.ChatCompletionMessage) (systemInstruction *genai.Content, messages []genai.Part) {
if len(msgs) >= 2 {
systemInstruction = &genai.Content{

View File

@@ -345,3 +345,7 @@ func (c *Client) GetEmbeddings(ctx context.Context, input string, opts *common.C
embeddings = result.Data[0].Embedding
return
}
func (c *Client) NeedsRawMode(modelName string) bool {
return false
}

View File

@@ -5,6 +5,7 @@ import (
"fmt"
"net/http"
"net/url"
"strings"
"time"
ollamaapi "github.com/ollama/ollama/api"
@@ -138,3 +139,16 @@ func (o *Client) createChatRequest(msgs []*goopenai.ChatCompletionMessage, opts
}
return
}
func (o *Client) NeedsRawMode(modelName string) bool {
ollamaPrefixes := []string{
"llama3",
"llama2",
}
for _, prefix := range ollamaPrefixes {
if strings.HasPrefix(modelName, prefix) {
return true
}
}
return false
}

View File

@@ -123,6 +123,20 @@ func (o *Client) Send(ctx context.Context, msgs []*goopenai.ChatCompletionMessag
return
}
func (o *Client) NeedsRawMode(modelName string) bool {
openaiModelsPrefixes := []string{
"o1",
"o3",
"o4",
}
for _, prefix := range openaiModelsPrefixes {
if strings.HasPrefix(modelName, prefix) {
return true
}
}
return false
}
func (o *Client) buildChatCompletionRequest(
inputMsgs []*goopenai.ChatCompletionMessage, opts *common.ChatOptions,
) (ret goopenai.ChatCompletionRequest) {

View File

@@ -14,4 +14,5 @@ type Vendor interface {
ListModels() ([]string, error)
SendStream([]*goopenai.ChatCompletionMessage, *common.ChatOptions, chan string) error
Send(context.Context, []*goopenai.ChatCompletionMessage, *common.ChatOptions) (string, error)
NeedsRawMode(modelName string) bool
}

View File

@@ -1,3 +1,3 @@
package main
var version = "v1.4.191"
var version = "v1.4.192"