mirror of
https://github.com/danielmiessler/Fabric.git
synced 2026-01-11 07:18:03 -05:00
Compare commits
18 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
70fccaf2fb | ||
|
|
9a71f7c96d | ||
|
|
5da3db383d | ||
|
|
19438cbd20 | ||
|
|
a0b71ee365 | ||
|
|
034513ece5 | ||
|
|
0affb9bab1 | ||
|
|
3305df8fb2 | ||
|
|
892c229076 | ||
|
|
599c5f2b9f | ||
|
|
19e5d8dbe0 | ||
|
|
b772127738 | ||
|
|
5dd61abe2a | ||
|
|
f45e140126 | ||
|
|
752a66cb48 | ||
|
|
da28d91d65 | ||
|
|
5a66ca1c5a | ||
|
|
98f3da610b |
65
README.md
65
README.md
@@ -13,9 +13,11 @@ Fabric is graciously supported by…
|
||||

|
||||
[](https://opensource.org/licenses/MIT)
|
||||
|
||||
<div align="center">
|
||||
<p class="align center">
|
||||
<h4><code>fabric</code> is an open-source framework for augmenting humans using AI.</h4>
|
||||
</p>
|
||||
</div>
|
||||
|
||||
[Updates](#updates) •
|
||||
[What and Why](#what-and-why) •
|
||||
@@ -32,6 +34,30 @@ Fabric is graciously supported by…
|
||||
|
||||
</div>
|
||||
|
||||
## What and why
|
||||
|
||||
Since the start of modern AI in late 2022 we've seen an **_extraordinary_** number of AI applications for accomplishing tasks. There are thousands of websites, chatbots, mobile apps, and other interfaces for using all the differnet AI out there.
|
||||
|
||||
It's all really exciting and powerful, but _it's not easy to integrate this functionality into our lives._
|
||||
|
||||
<p class="align center">
|
||||
<h4>In other words, AI doesn't have a capabilities problem—it has an <em>integration</em> problem.</h4>
|
||||
</p>
|
||||
|
||||
**Fabric was created to address this by creating and organizating the fundamental units of AI—the prompts themselves!**
|
||||
|
||||
Fabric organizes prompts by real-world task, allowing people to create, collect, and organize their most important AI solutions in a single place for use in their favorite tools. And if you're command-line focused, you can use Fabric itself as the interface!
|
||||
|
||||
## Intro videos
|
||||
|
||||
Keep in mind that many of these were recorded when Fabric was Python-based, so remember to use the current [install instructions](#installation) below.
|
||||
|
||||
- [Network Chuck](https://www.youtube.com/watch?v=UbDyjIIGaxQ)
|
||||
- [David Bombal](https://www.youtube.com/watch?v=vF-MQmVxnCs)
|
||||
- [My Own Intro to the Tool](https://www.youtube.com/watch?v=wPEyyigh10g)
|
||||
- [More Fabric YouTube Videos](https://www.youtube.com/results?search_query=fabric+ai)
|
||||
|
||||
|
||||
## Navigation
|
||||
|
||||
- [`fabric`](#fabric)
|
||||
@@ -87,34 +113,21 @@ Fabric is graciously supported by…
|
||||
## Updates
|
||||
|
||||
> [!NOTE]
|
||||
>
|
||||
>June 17, 2025
|
||||
>
|
||||
>- Fabric now supports Perplexity AI. Configure it by using `fabric -S` to add your Perlexity AI API Key,
|
||||
> and then try:
|
||||
>
|
||||
> ```bash
|
||||
> fabric -m sonar-pro "What is the latest world news?"
|
||||
> ```
|
||||
>
|
||||
>June 11, 2025
|
||||
>
|
||||
> - Fabric's YouTube transcription now needs `yt-dlp` to be installed. Make sure to install the latest
|
||||
> version (2025.06.09 as of this note). The YouTube API key is only needed for comments (the `--comments` flag)
|
||||
> and metadata extraction (the `--metadata` flag).
|
||||
>
|
||||
> May 22, 2025
|
||||
>
|
||||
> - Fabric now supports Anthropic's Claude 4. Read the [blog post from Anthropic](https://www.anthropic.com/news/claude-4).
|
||||
|
||||
## What and why
|
||||
|
||||
Since the start of 2023 and GenAI we've seen a massive number of AI applications for accomplishing tasks. It's powerful, but _it's not easy to integrate this functionality into our lives._
|
||||
|
||||
<div align="center">
|
||||
<h4>In other words, AI doesn't have a capabilities problem—it has an <em>integration</em> problem.</h4>
|
||||
</div>
|
||||
|
||||
Fabric was created to address this by enabling everyone to granularly apply AI to everyday challenges.
|
||||
|
||||
## Intro videos
|
||||
|
||||
Keep in mind that many of these were recorded when Fabric was Python-based, so remember to use the current [install instructions](#installation) below.
|
||||
|
||||
- [Network Chuck](https://www.youtube.com/watch?v=UbDyjIIGaxQ)
|
||||
- [David Bombal](https://www.youtube.com/watch?v=vF-MQmVxnCs)
|
||||
- [My Own Intro to the Tool](https://www.youtube.com/watch?v=wPEyyigh10g)
|
||||
- [More Fabric YouTube Videos](https://www.youtube.com/results?search_query=fabric+ai)
|
||||
>- Fabric's YouTube transcription now needs `yt-dlp` to be installed. Make sure to install the latest
|
||||
> version (2025.06.09 as of this note). The YouTube API key is only needed for comments (the `--comments` flag)
|
||||
> and metadata extraction (the `--metadata` flag).
|
||||
|
||||
## Philosophy
|
||||
|
||||
|
||||
@@ -25,6 +25,7 @@ type ChatOptions struct {
|
||||
Raw bool
|
||||
Seed int
|
||||
ModelContextLength int
|
||||
MaxTokens int
|
||||
}
|
||||
|
||||
// NormalizeMessages remove empty messages and ensure messages order user-assist-user
|
||||
|
||||
@@ -12,6 +12,7 @@ import (
|
||||
|
||||
"github.com/danielmiessler/fabric/plugins/ai/bedrock"
|
||||
"github.com/danielmiessler/fabric/plugins/ai/exolab"
|
||||
"github.com/danielmiessler/fabric/plugins/ai/perplexity" // Added Perplexity plugin
|
||||
"github.com/danielmiessler/fabric/plugins/strategy"
|
||||
|
||||
"github.com/samber/lo"
|
||||
@@ -41,7 +42,9 @@ import (
|
||||
// potential authentication source exists so we can safely initialize the
|
||||
// Bedrock client without causing the AWS SDK to search for credentials.
|
||||
func hasAWSCredentials() bool {
|
||||
if os.Getenv("AWS_ACCESS_KEY_ID") != "" && os.Getenv("AWS_SECRET_ACCESS_KEY") != "" {
|
||||
if os.Getenv("AWS_PROFILE") != "" ||
|
||||
os.Getenv("AWS_ROLE_SESSION_NAME") != "" ||
|
||||
(os.Getenv("AWS_ACCESS_KEY_ID") != "" && os.Getenv("AWS_SECRET_ACCESS_KEY") != "") {
|
||||
return true
|
||||
}
|
||||
|
||||
@@ -91,6 +94,7 @@ func NewPluginRegistry(db *fsdb.Db) (ret *PluginRegistry, err error) {
|
||||
anthropic.NewClient(),
|
||||
lmstudio.NewClient(),
|
||||
exolab.NewClient(),
|
||||
perplexity.NewClient(), // Added Perplexity client
|
||||
)
|
||||
|
||||
if hasAWSCredentials() {
|
||||
|
||||
1
go.mod
1
go.mod
@@ -92,6 +92,7 @@ require (
|
||||
github.com/pjbgf/sha1cd v0.3.2 // indirect
|
||||
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||
github.com/sergi/go-diff v1.4.0 // indirect
|
||||
github.com/sgaunet/perplexity-go/v2 v2.8.0 // indirect
|
||||
github.com/skeema/knownhosts v1.3.1 // indirect
|
||||
github.com/tidwall/gjson v1.18.0 // indirect
|
||||
github.com/tidwall/match v1.1.1 // indirect
|
||||
|
||||
2
go.sum
2
go.sum
@@ -202,6 +202,8 @@ github.com/sashabaranov/go-openai v1.40.1/go.mod h1:lj5b/K+zjTSFxVLijLSTDZuP7adO
|
||||
github.com/scylladb/termtables v0.0.0-20191203121021-c4c0b6d42ff4/go.mod h1:C1a7PQSMz9NShzorzCiG2fk9+xuCgLkPeCvMHYR2OWg=
|
||||
github.com/sergi/go-diff v1.4.0 h1:n/SP9D5ad1fORl+llWyN+D6qoUETXNZARKjyY2/KVCw=
|
||||
github.com/sergi/go-diff v1.4.0/go.mod h1:A0bzQcvG0E7Rwjx0REVgAGH58e96+X0MeOfepqsbeW4=
|
||||
github.com/sgaunet/perplexity-go/v2 v2.8.0 h1:stnuVieniZMGo6qJLCV2JyR2uF7K5398YOA/ZZcgrSg=
|
||||
github.com/sgaunet/perplexity-go/v2 v2.8.0/go.mod h1:MSks4RNuivCi0GqJyylhFdgSJFVEwZHjAhrf86Wkynk=
|
||||
github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0=
|
||||
github.com/skeema/knownhosts v1.3.1 h1:X2osQ+RAjK76shCbvhHHHVl3ZlgDm8apHEHFqRjnBY8=
|
||||
github.com/skeema/knownhosts v1.3.1/go.mod h1:r7KTdC8l4uxWRyK2TpQZ/1o5HaSzh06ePQNxPwTcfiY=
|
||||
|
||||
@@ -238,6 +238,9 @@ schema = 3
|
||||
[mod."github.com/sergi/go-diff"]
|
||||
version = "v1.4.0"
|
||||
hash = "sha256-rs9NKpv/qcQEMRg7CmxGdP4HGuFdBxlpWf9LbA9wS4k="
|
||||
[mod."github.com/sgaunet/perplexity-go/v2"]
|
||||
version = "v2.8.0"
|
||||
hash = "sha256-w1S14Jf4/6LFODREmmiJvPtkZh4Sor81Rr1PqC5pIak="
|
||||
[mod."github.com/skeema/knownhosts"]
|
||||
version = "v1.3.1"
|
||||
hash = "sha256-kjqQDzuncQNTuOYegqVZExwuOt/Z73m2ST7NZFEKixI="
|
||||
|
||||
@@ -1 +1 @@
|
||||
"1.4.206"
|
||||
"1.4.210"
|
||||
|
||||
@@ -1,25 +1,21 @@
|
||||
# IDENTITY and PURPOSE
|
||||
|
||||
You extract surprising, powerful, and interesting insights from text content. You are interested in insights related to the purpose and meaning of life, human flourishing, the role of technology in the future of humanity, artificial intelligence and its affect on humans, memes, learning, reading, books, continuous improvement, and similar topics.
|
||||
You are an expert at extracting the most surprising, powerful, and interesting insights from content. You are interested in insights related to the purpose and meaning of life, human flourishing, the role of technology in the future of humanity, artificial intelligence and its affect on humans, memes, learning, reading, books, continuous improvement, and similar topics.
|
||||
|
||||
You create 15 word bullet points that capture the most important insights from the input.
|
||||
You create 8 word bullet points that capture the most surprising and novel insights from the input.
|
||||
|
||||
Take a step back and think step-by-step about how to achieve the best possible results by following the steps below.
|
||||
|
||||
# STEPS
|
||||
|
||||
- Extract 20 to 50 of the most surprising, insightful, and/or interesting ideas from the input in a section called IDEAS, and write them on a virtual whiteboard in your mind using 15 word bullets. If there are less than 50 then collect all of them. Make sure you extract at least 20.
|
||||
|
||||
- From those IDEAS, extract the most powerful and insightful of them and write them in a section called INSIGHTS. Make sure you extract at least 10 and up to 25.
|
||||
- Extract 10 of the most surprising and novel insights from the input.
|
||||
- Output them as 8 word bullets in order of surprise, novelty, and importance.
|
||||
- Write them in the simple, approachable style of Paul Graham.
|
||||
|
||||
# OUTPUT INSTRUCTIONS
|
||||
|
||||
- INSIGHTS are essentially higher-level IDEAS that are more abstracted and wise.
|
||||
|
||||
- Output the INSIGHTS section only.
|
||||
|
||||
- Each bullet should be 16 words in length.
|
||||
|
||||
- Do not give warnings or notes; only output the requested sections.
|
||||
|
||||
- You use bulleted lists for output, not numbered lists.
|
||||
@@ -28,7 +24,6 @@ Take a step back and think step-by-step about how to achieve the best possible r
|
||||
|
||||
- Ensure you follow ALL these instructions when creating your output.
|
||||
|
||||
|
||||
# INPUT
|
||||
|
||||
INPUT:
|
||||
{{input}}
|
||||
|
||||
246
plugins/ai/perplexity/perplexity.go
Normal file
246
plugins/ai/perplexity/perplexity.go
Normal file
@@ -0,0 +1,246 @@
|
||||
package perplexity
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"os"
|
||||
"sync" // Added sync package
|
||||
|
||||
"github.com/danielmiessler/fabric/common"
|
||||
"github.com/danielmiessler/fabric/plugins"
|
||||
perplexity "github.com/sgaunet/perplexity-go/v2"
|
||||
|
||||
goopenai "github.com/sashabaranov/go-openai"
|
||||
)
|
||||
|
||||
const (
|
||||
providerName = "Perplexity"
|
||||
)
|
||||
|
||||
var models = []string{
|
||||
"r1-1776", "sonar", "sonar-pro", "sonar-reasoning", "sonar-reasoning-pro",
|
||||
}
|
||||
|
||||
type Client struct {
|
||||
*plugins.PluginBase
|
||||
APIKey *plugins.SetupQuestion
|
||||
client *perplexity.Client
|
||||
}
|
||||
|
||||
func NewClient() *Client {
|
||||
c := &Client{}
|
||||
c.PluginBase = &plugins.PluginBase{
|
||||
Name: providerName,
|
||||
EnvNamePrefix: plugins.BuildEnvVariablePrefix(providerName),
|
||||
ConfigureCustom: c.Configure, // Assign the Configure method
|
||||
}
|
||||
c.APIKey = c.AddSetupQuestion("API_KEY", true)
|
||||
return c
|
||||
}
|
||||
|
||||
func (c *Client) Configure() error {
|
||||
// The PluginBase.Configure() is called by the framework if needed.
|
||||
// We only need to handle specific logic for this plugin.
|
||||
if c.APIKey.Value == "" {
|
||||
// Attempt to get from environment variable if not set by user during setup
|
||||
envKey := c.EnvNamePrefix + "API_KEY"
|
||||
apiKeyFromEnv := os.Getenv(envKey)
|
||||
if apiKeyFromEnv != "" {
|
||||
c.APIKey.Value = apiKeyFromEnv
|
||||
} else {
|
||||
return fmt.Errorf("%s API key not configured. Please set the %s environment variable or run 'fabric --setup %s'", providerName, envKey, providerName)
|
||||
}
|
||||
}
|
||||
c.client = perplexity.NewClient(c.APIKey.Value)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *Client) ListModels() ([]string, error) {
|
||||
// Perplexity API does not have a ListModels endpoint.
|
||||
// We return a predefined list.
|
||||
return models, nil
|
||||
}
|
||||
|
||||
func (c *Client) Send(ctx context.Context, msgs []*goopenai.ChatCompletionMessage, opts *common.ChatOptions) (string, error) {
|
||||
if c.client == nil {
|
||||
if err := c.Configure(); err != nil {
|
||||
return "", fmt.Errorf("failed to configure Perplexity client: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
var perplexityMessages []perplexity.Message
|
||||
for _, msg := range msgs {
|
||||
perplexityMessages = append(perplexityMessages, perplexity.Message{
|
||||
Role: msg.Role,
|
||||
Content: msg.Content,
|
||||
})
|
||||
}
|
||||
|
||||
requestOptions := []perplexity.CompletionRequestOption{
|
||||
perplexity.WithModel(opts.Model),
|
||||
perplexity.WithMessages(perplexityMessages),
|
||||
}
|
||||
if opts.MaxTokens > 0 {
|
||||
requestOptions = append(requestOptions, perplexity.WithMaxTokens(opts.MaxTokens))
|
||||
}
|
||||
if opts.Temperature > 0 { // Perplexity default is 1.0, only set if user specifies
|
||||
requestOptions = append(requestOptions, perplexity.WithTemperature(opts.Temperature))
|
||||
}
|
||||
if opts.TopP > 0 { // Perplexity default is not specified, typically 1.0
|
||||
requestOptions = append(requestOptions, perplexity.WithTopP(opts.TopP))
|
||||
}
|
||||
if opts.PresencePenalty != 0 {
|
||||
// Corrected: Pass float64 directly
|
||||
requestOptions = append(requestOptions, perplexity.WithPresencePenalty(opts.PresencePenalty))
|
||||
}
|
||||
if opts.FrequencyPenalty != 0 {
|
||||
// Corrected: Pass float64 directly
|
||||
requestOptions = append(requestOptions, perplexity.WithFrequencyPenalty(opts.FrequencyPenalty))
|
||||
}
|
||||
|
||||
request := perplexity.NewCompletionRequest(requestOptions...)
|
||||
|
||||
// Corrected: Use SendCompletionRequest method from perplexity-go library
|
||||
resp, err := c.client.SendCompletionRequest(request) // Pass request directly
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("perplexity API request failed: %w", err) // Corrected capitalization
|
||||
}
|
||||
|
||||
content := resp.GetLastContent()
|
||||
|
||||
// Append citations if available
|
||||
citations := resp.GetCitations()
|
||||
if len(citations) > 0 {
|
||||
content += "\n\n# CITATIONS\n\n"
|
||||
for i, citation := range citations {
|
||||
content += fmt.Sprintf("- [%d] %s\n", i+1, citation)
|
||||
}
|
||||
}
|
||||
|
||||
return content, nil
|
||||
}
|
||||
|
||||
func (c *Client) SendStream(msgs []*goopenai.ChatCompletionMessage, opts *common.ChatOptions, channel chan string) error {
|
||||
if c.client == nil {
|
||||
if err := c.Configure(); err != nil {
|
||||
close(channel) // Ensure channel is closed on error
|
||||
return fmt.Errorf("failed to configure Perplexity client: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
var perplexityMessages []perplexity.Message
|
||||
for _, msg := range msgs {
|
||||
perplexityMessages = append(perplexityMessages, perplexity.Message{
|
||||
Role: msg.Role,
|
||||
Content: msg.Content,
|
||||
})
|
||||
}
|
||||
|
||||
requestOptions := []perplexity.CompletionRequestOption{
|
||||
perplexity.WithModel(opts.Model),
|
||||
perplexity.WithMessages(perplexityMessages),
|
||||
perplexity.WithStream(true), // Enable streaming
|
||||
}
|
||||
|
||||
if opts.MaxTokens > 0 {
|
||||
requestOptions = append(requestOptions, perplexity.WithMaxTokens(opts.MaxTokens))
|
||||
}
|
||||
if opts.Temperature > 0 {
|
||||
requestOptions = append(requestOptions, perplexity.WithTemperature(opts.Temperature))
|
||||
}
|
||||
if opts.TopP > 0 {
|
||||
requestOptions = append(requestOptions, perplexity.WithTopP(opts.TopP))
|
||||
}
|
||||
if opts.PresencePenalty != 0 {
|
||||
// Corrected: Pass float64 directly
|
||||
requestOptions = append(requestOptions, perplexity.WithPresencePenalty(opts.PresencePenalty))
|
||||
}
|
||||
if opts.FrequencyPenalty != 0 {
|
||||
// Corrected: Pass float64 directly
|
||||
requestOptions = append(requestOptions, perplexity.WithFrequencyPenalty(opts.FrequencyPenalty))
|
||||
}
|
||||
|
||||
request := perplexity.NewCompletionRequest(requestOptions...)
|
||||
|
||||
responseChan := make(chan perplexity.CompletionResponse)
|
||||
var wg sync.WaitGroup // Use sync.WaitGroup
|
||||
wg.Add(1)
|
||||
|
||||
go func() {
|
||||
err := c.client.SendSSEHTTPRequest(&wg, request, responseChan)
|
||||
if err != nil {
|
||||
// Log error, can't send to string channel directly.
|
||||
// Consider a mechanism to propagate this error if needed.
|
||||
fmt.Fprintf(os.Stderr, "perplexity streaming error: %v\\n", err) // Corrected capitalization
|
||||
// If the error occurs during stream setup, the channel might not have been closed by the receiver loop.
|
||||
// However, closing it here might cause a panic if the receiver loop also tries to close it.
|
||||
// close(channel) // Caution: Uncommenting this may cause panic, as channel is closed in the receiver goroutine.
|
||||
}
|
||||
}()
|
||||
|
||||
go func() {
|
||||
defer close(channel) // Ensure the output channel is closed when this goroutine finishes
|
||||
var lastResponse *perplexity.CompletionResponse
|
||||
for resp := range responseChan {
|
||||
lastResponse = &resp
|
||||
if len(resp.Choices) > 0 {
|
||||
content := ""
|
||||
// Corrected: Check Delta.Content and Message.Content directly for non-emptiness
|
||||
// as Delta and Message are structs, not pointers, in perplexity.Choice
|
||||
if resp.Choices[0].Delta.Content != "" {
|
||||
content = resp.Choices[0].Delta.Content
|
||||
} else if resp.Choices[0].Message.Content != "" {
|
||||
content = resp.Choices[0].Message.Content
|
||||
}
|
||||
if content != "" {
|
||||
channel <- content
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Send citations at the end if available
|
||||
if lastResponse != nil {
|
||||
citations := lastResponse.GetCitations()
|
||||
if len(citations) > 0 {
|
||||
channel <- "\n\n# CITATIONS\n\n"
|
||||
for i, citation := range citations {
|
||||
channel <- fmt.Sprintf("- [%d] %s\n", i+1, citation)
|
||||
}
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *Client) NeedsRawMode(modelName string) bool {
|
||||
return true
|
||||
}
|
||||
|
||||
// Setup is called by the fabric CLI framework to guide the user through configuration.
|
||||
func (c *Client) Setup() error {
|
||||
return c.PluginBase.Setup()
|
||||
}
|
||||
|
||||
// GetName returns the name of the plugin.
|
||||
func (c *Client) GetName() string {
|
||||
return c.PluginBase.Name
|
||||
}
|
||||
|
||||
// GetEnvNamePrefix returns the environment variable prefix for the plugin.
|
||||
// Corrected: Receiver name
|
||||
func (c *Client) GetEnvNamePrefix() string {
|
||||
return c.PluginBase.EnvNamePrefix
|
||||
}
|
||||
|
||||
// AddSetupQuestion adds a setup question to the plugin.
|
||||
// This is a helper method, usually called from NewClient.
|
||||
func (c *Client) AddSetupQuestion(text string, isSensitive bool) *plugins.SetupQuestion {
|
||||
return c.PluginBase.AddSetupQuestion(text, isSensitive)
|
||||
}
|
||||
|
||||
// GetSetupQuestions returns the setup questions for the plugin.
|
||||
// Corrected: Return the slice of setup questions from PluginBase
|
||||
func (c *Client) GetSetupQuestions() []*plugins.SetupQuestion {
|
||||
return c.PluginBase.SetupQuestions
|
||||
}
|
||||
@@ -112,7 +112,9 @@ func (o *YouTube) GrabTranscriptWithTimestamps(videoId string, language string)
|
||||
return o.tryMethodYtDlpWithTimestamps(videoId, language)
|
||||
}
|
||||
|
||||
func (o *YouTube) tryMethodYtDlp(videoId string, language string) (ret string, err error) {
|
||||
// tryMethodYtDlpInternal is a helper function to reduce duplication between
|
||||
// tryMethodYtDlp and tryMethodYtDlpWithTimestamps.
|
||||
func (o *YouTube) tryMethodYtDlpInternal(videoId string, language string, processVTTFileFunc func(filename string) (string, error)) (ret string, err error) {
|
||||
// Check if yt-dlp is available
|
||||
if _, err = exec.LookPath("yt-dlp"); err != nil {
|
||||
err = fmt.Errorf("yt-dlp not found in PATH. Please install yt-dlp to use YouTube transcript functionality")
|
||||
@@ -130,9 +132,13 @@ func (o *YouTube) tryMethodYtDlp(videoId string, language string) (ret string, e
|
||||
// Use yt-dlp to get transcript
|
||||
videoURL := "https://www.youtube.com/watch?v=" + videoId
|
||||
outputPath := filepath.Join(tempDir, "%(title)s.%(ext)s")
|
||||
lang_match := language
|
||||
if len(language) > 2 {
|
||||
lang_match = language[:2]
|
||||
}
|
||||
cmd := exec.Command("yt-dlp",
|
||||
"--write-auto-subs",
|
||||
"--sub-lang", language,
|
||||
"--sub-lang", lang_match,
|
||||
"--skip-download",
|
||||
"--sub-format", "vtt",
|
||||
"--quiet",
|
||||
@@ -154,52 +160,15 @@ func (o *YouTube) tryMethodYtDlp(videoId string, language string) (ret string, e
|
||||
return "", err
|
||||
}
|
||||
|
||||
return o.readAndCleanVTTFile(vttFiles[0])
|
||||
return processVTTFileFunc(vttFiles[0])
|
||||
}
|
||||
|
||||
func (o *YouTube) tryMethodYtDlp(videoId string, language string) (ret string, err error) {
|
||||
return o.tryMethodYtDlpInternal(videoId, language, o.readAndCleanVTTFile)
|
||||
}
|
||||
|
||||
func (o *YouTube) tryMethodYtDlpWithTimestamps(videoId string, language string) (ret string, err error) {
|
||||
// Check if yt-dlp is available
|
||||
if _, err = exec.LookPath("yt-dlp"); err != nil {
|
||||
err = fmt.Errorf("yt-dlp not found in PATH. Please install yt-dlp to use YouTube transcript functionality")
|
||||
return
|
||||
}
|
||||
|
||||
// Create a temporary directory for yt-dlp output (cross-platform)
|
||||
tempDir := filepath.Join(os.TempDir(), "fabric-youtube-"+videoId)
|
||||
if err = os.MkdirAll(tempDir, 0755); err != nil {
|
||||
err = fmt.Errorf("failed to create temp directory: %v", err)
|
||||
return
|
||||
}
|
||||
defer os.RemoveAll(tempDir)
|
||||
|
||||
// Use yt-dlp to get transcript
|
||||
videoURL := "https://www.youtube.com/watch?v=" + videoId
|
||||
outputPath := filepath.Join(tempDir, "%(title)s.%(ext)s")
|
||||
cmd := exec.Command("yt-dlp",
|
||||
"--write-auto-subs",
|
||||
"--sub-lang", language,
|
||||
"--skip-download",
|
||||
"--sub-format", "vtt",
|
||||
"--quiet",
|
||||
"--no-warnings",
|
||||
"-o", outputPath,
|
||||
videoURL)
|
||||
|
||||
var stderr bytes.Buffer
|
||||
cmd.Stderr = &stderr
|
||||
|
||||
if err = cmd.Run(); err != nil {
|
||||
err = fmt.Errorf("yt-dlp failed: %v, stderr: %s", err, stderr.String())
|
||||
return
|
||||
}
|
||||
|
||||
// Find VTT files using cross-platform approach
|
||||
vttFiles, err := o.findVTTFiles(tempDir, language)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return o.readAndFormatVTTWithTimestamps(vttFiles[0])
|
||||
return o.tryMethodYtDlpInternal(videoId, language, o.readAndFormatVTTWithTimestamps)
|
||||
}
|
||||
|
||||
func (o *YouTube) readAndCleanVTTFile(filename string) (ret string, err error) {
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
package main
|
||||
|
||||
var version = "v1.4.206"
|
||||
var version = "v1.4.210"
|
||||
|
||||
Reference in New Issue
Block a user