mirror of
https://github.com/danielmiessler/Fabric.git
synced 2026-01-10 14:58:02 -05:00
Compare commits
17 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2ad454b6dc | ||
|
|
c0ea25f816 | ||
|
|
87796d4fa9 | ||
|
|
e1945a0b62 | ||
|
|
ecac2b4c34 | ||
|
|
7ed4de269e | ||
|
|
6bd305906d | ||
|
|
6aeca6e4da | ||
|
|
b34f249e24 | ||
|
|
b187a80275 | ||
|
|
a6fc54a991 | ||
|
|
b9f4b9837a | ||
|
|
2bedf35957 | ||
|
|
b9df64a0d8 | ||
|
|
6b07b33ff2 | ||
|
|
ff245edd51 | ||
|
|
2e0a4da876 |
10
README.md
10
README.md
@@ -3,7 +3,7 @@ Fabric is graciously supported by…
|
||||
|
||||
[](https://warp.dev/fabric)
|
||||
|
||||
<img src="./images/fabric-logo-gif.gif" alt="fabriclogo" width="400" height="400"/>
|
||||
<img src="./docs/images/fabric-logo-gif.gif" alt="fabriclogo" width="400" height="400"/>
|
||||
|
||||
# `fabric`
|
||||
|
||||
@@ -29,7 +29,7 @@ Fabric is graciously supported by…
|
||||
[Helper Apps](#helper-apps) •
|
||||
[Meta](#meta)
|
||||
|
||||

|
||||

|
||||
|
||||
</div>
|
||||
|
||||
@@ -628,7 +628,7 @@ Now let's look at some things you can do with Fabric.
|
||||
<br />
|
||||
<br />
|
||||
|
||||
If you're not looking to do anything fancy, and you just want a lot of great prompts, you can navigate to the [`/patterns`](https://github.com/danielmiessler/fabric/tree/main/patterns) directory and start exploring!
|
||||
If you're not looking to do anything fancy, and you just want a lot of great prompts, you can navigate to the [`/patterns`](https://github.com/danielmiessler/fabric/tree/main/data/patterns) directory and start exploring!
|
||||
|
||||
We hope that if you used nothing else from Fabric, the Patterns by themselves will make the project useful.
|
||||
|
||||
@@ -644,7 +644,7 @@ be used in addition to the basic patterns.
|
||||
See the [Thinking Faster by Writing Less](https://arxiv.org/pdf/2502.18600) paper and
|
||||
the [Thought Generation section of Learn Prompting](https://learnprompting.org/docs/advanced/thought_generation/introduction) for examples of prompt strategies.
|
||||
|
||||
Each strategy is available as a small `json` file in the [`/strategies`](https://github.com/danielmiessler/fabric/tree/main/strategies) directory.
|
||||
Each strategy is available as a small `json` file in the [`/strategies`](https://github.com/danielmiessler/fabric/tree/main/data/strategies) directory.
|
||||
|
||||
The prompt modification of the strategy is applied to the system prompt and passed on to the
|
||||
LLM in the chat session.
|
||||
@@ -736,7 +736,7 @@ Make sure you have a LaTeX distribution (like TeX Live or MiKTeX) installed on y
|
||||
It generates a `json` representation of a directory of code that can be fed into an AI model
|
||||
with instructions to create a new feature or edit the code in a specified way.
|
||||
|
||||
See [the Create Coding Feature Pattern README](./patterns/create_coding_feature/README.md) for details.
|
||||
See [the Create Coding Feature Pattern README](./data/patterns/create_coding_feature/README.md) for details.
|
||||
|
||||
Install it first using:
|
||||
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
package main
|
||||
|
||||
var version = "v1.4.244"
|
||||
var version = "v1.4.245"
|
||||
|
||||
@@ -7,7 +7,7 @@ Generate code changes to an existing coding project using AI.
|
||||
After installing the `code_helper` binary:
|
||||
|
||||
```bash
|
||||
go install github.com/danielmiessler/fabric/plugins/tools/code_helper@latest
|
||||
go install github.com/danielmiessler/fabric/cmd/code_helper@latest
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
105
internal/plugins/ai/openai_compatible/direct_models_call.go
Normal file
105
internal/plugins/ai/openai_compatible/direct_models_call.go
Normal file
@@ -0,0 +1,105 @@
|
||||
package openai_compatible
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"time"
|
||||
)
|
||||
|
||||
// Model represents a model returned by the API
|
||||
type Model struct {
|
||||
ID string `json:"id"`
|
||||
}
|
||||
|
||||
// ErrorResponseLimit defines the maximum length of error response bodies for truncation.
|
||||
const errorResponseLimit = 1024 // Limit for error response body size
|
||||
|
||||
// DirectlyGetModels is used to fetch models directly from the API
|
||||
// when the standard OpenAI SDK method fails due to a nonstandard format.
|
||||
// This is useful for providers like Together that return a direct array of models.
|
||||
func (c *Client) DirectlyGetModels(ctx context.Context) ([]string, error) {
|
||||
if ctx == nil {
|
||||
ctx = context.Background()
|
||||
}
|
||||
baseURL := c.ApiBaseURL.Value
|
||||
if baseURL == "" {
|
||||
return nil, fmt.Errorf("API base URL not configured for provider %s", c.GetName())
|
||||
}
|
||||
|
||||
// Build the /models endpoint URL
|
||||
fullURL, err := url.JoinPath(baseURL, "models")
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to create models URL: %w", err)
|
||||
}
|
||||
|
||||
req, err := http.NewRequestWithContext(ctx, "GET", fullURL, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", c.ApiKey.Value))
|
||||
req.Header.Set("Accept", "application/json")
|
||||
|
||||
// TODO: Consider reusing a single http.Client instance (e.g., as a field on Client) instead of allocating a new one for each request.
|
||||
|
||||
client := &http.Client{
|
||||
Timeout: 10 * time.Second,
|
||||
}
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
// Read the response body for debugging
|
||||
bodyBytes, _ := io.ReadAll(resp.Body)
|
||||
bodyString := string(bodyBytes)
|
||||
if len(bodyString) > errorResponseLimit { // Truncate if too large
|
||||
bodyString = bodyString[:errorResponseLimit] + "..."
|
||||
}
|
||||
return nil, fmt.Errorf("unexpected status code: %d from provider %s, response body: %s",
|
||||
resp.StatusCode, c.GetName(), bodyString)
|
||||
}
|
||||
|
||||
// Read the response body once
|
||||
bodyBytes, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Try to parse as an object with data field (OpenAI format)
|
||||
var openAIFormat struct {
|
||||
Data []Model `json:"data"`
|
||||
}
|
||||
// Try to parse as a direct array (Together format)
|
||||
var directArray []Model
|
||||
|
||||
if err := json.Unmarshal(bodyBytes, &openAIFormat); err == nil && len(openAIFormat.Data) > 0 {
|
||||
return extractModelIDs(openAIFormat.Data), nil
|
||||
}
|
||||
|
||||
if err := json.Unmarshal(bodyBytes, &directArray); err == nil && len(directArray) > 0 {
|
||||
return extractModelIDs(directArray), nil
|
||||
}
|
||||
|
||||
var truncatedBody string
|
||||
if len(bodyBytes) > errorResponseLimit {
|
||||
truncatedBody = string(bodyBytes[:errorResponseLimit]) + "..."
|
||||
} else {
|
||||
truncatedBody = string(bodyBytes)
|
||||
}
|
||||
return nil, fmt.Errorf("unable to parse models response; raw response: %s", truncatedBody)
|
||||
}
|
||||
|
||||
func extractModelIDs(models []Model) []string {
|
||||
modelIDs := make([]string, 0, len(models))
|
||||
for _, model := range models {
|
||||
modelIDs = append(modelIDs, model.ID)
|
||||
}
|
||||
return modelIDs
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
package openai_compatible
|
||||
|
||||
import (
|
||||
"context"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
@@ -31,6 +32,19 @@ func NewClient(providerConfig ProviderConfig) *Client {
|
||||
return client
|
||||
}
|
||||
|
||||
// ListModels overrides the default ListModels to handle different response formats
|
||||
func (c *Client) ListModels() ([]string, error) {
|
||||
// First try the standard OpenAI SDK approach
|
||||
models, err := c.Client.ListModels()
|
||||
if err == nil && len(models) > 0 { // only return if OpenAI SDK returns models
|
||||
return models, nil
|
||||
}
|
||||
|
||||
// TODO: Handle context properly in Fabric by accepting and propagating a context.Context
|
||||
// instead of creating a new one here.
|
||||
return c.DirectlyGetModels(context.Background())
|
||||
}
|
||||
|
||||
// ProviderMap is a map of provider name to ProviderConfig for O(1) lookup
|
||||
var ProviderMap = map[string]ProviderConfig{
|
||||
"AIML": {
|
||||
@@ -83,6 +97,11 @@ var ProviderMap = map[string]ProviderConfig{
|
||||
BaseURL: "https://api.siliconflow.cn/v1",
|
||||
ImplementsResponses: false,
|
||||
},
|
||||
"Together": {
|
||||
Name: "Together",
|
||||
BaseURL: "https://api.together.xyz/v1",
|
||||
ImplementsResponses: false,
|
||||
},
|
||||
}
|
||||
|
||||
// GetProviderByName returns the provider configuration for a given name with O(1) lookup
|
||||
|
||||
@@ -1 +1 @@
|
||||
"1.4.244"
|
||||
"1.4.245"
|
||||
|
||||
Reference in New Issue
Block a user