mirror of
https://github.com/danielmiessler/Fabric.git
synced 2026-01-11 07:18:03 -05:00
Compare commits
33 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
70fccaf2fb | ||
|
|
9a71f7c96d | ||
|
|
5da3db383d | ||
|
|
19438cbd20 | ||
|
|
a0b71ee365 | ||
|
|
034513ece5 | ||
|
|
0affb9bab1 | ||
|
|
3305df8fb2 | ||
|
|
892c229076 | ||
|
|
599c5f2b9f | ||
|
|
19e5d8dbe0 | ||
|
|
b772127738 | ||
|
|
5dd61abe2a | ||
|
|
f45e140126 | ||
|
|
752a66cb48 | ||
|
|
da28d91d65 | ||
|
|
5a66ca1c5a | ||
|
|
98f3da610b | ||
|
|
73ce92ccd9 | ||
|
|
7f3f1d641f | ||
|
|
44b5c46beb | ||
|
|
8d37c9d6b9 | ||
|
|
1138d0b60e | ||
|
|
b78217088d | ||
|
|
76b889733d | ||
|
|
3911fd9f5d | ||
|
|
b06e29f8a8 | ||
|
|
11a7e542e1 | ||
|
|
6681078259 | ||
|
|
be1edf7b1d | ||
|
|
8ce748a1b1 | ||
|
|
96070f6f39 | ||
|
|
ca3e89a889 |
@@ -11,6 +11,10 @@ on:
|
||||
permissions:
|
||||
contents: write # Ensure the workflow has write permissions
|
||||
|
||||
concurrency:
|
||||
group: version-update
|
||||
cancel-in-progress: false
|
||||
|
||||
jobs:
|
||||
update-version:
|
||||
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
|
||||
@@ -30,6 +34,11 @@ jobs:
|
||||
git config user.name "github-actions[bot]"
|
||||
git config user.email "github-actions[bot]@users.noreply.github.com"
|
||||
|
||||
- name: Pull latest main and tags
|
||||
run: |
|
||||
git pull --rebase origin main
|
||||
git fetch --tags
|
||||
|
||||
- name: Get the latest tag
|
||||
id: get_latest_tag
|
||||
run: |
|
||||
|
||||
65
README.md
65
README.md
@@ -13,9 +13,11 @@ Fabric is graciously supported by…
|
||||

|
||||
[](https://opensource.org/licenses/MIT)
|
||||
|
||||
<div align="center">
|
||||
<p class="align center">
|
||||
<h4><code>fabric</code> is an open-source framework for augmenting humans using AI.</h4>
|
||||
</p>
|
||||
</div>
|
||||
|
||||
[Updates](#updates) •
|
||||
[What and Why](#what-and-why) •
|
||||
@@ -32,6 +34,30 @@ Fabric is graciously supported by…
|
||||
|
||||
</div>
|
||||
|
||||
## What and why
|
||||
|
||||
Since the start of modern AI in late 2022 we've seen an **_extraordinary_** number of AI applications for accomplishing tasks. There are thousands of websites, chatbots, mobile apps, and other interfaces for using all the differnet AI out there.
|
||||
|
||||
It's all really exciting and powerful, but _it's not easy to integrate this functionality into our lives._
|
||||
|
||||
<p class="align center">
|
||||
<h4>In other words, AI doesn't have a capabilities problem—it has an <em>integration</em> problem.</h4>
|
||||
</p>
|
||||
|
||||
**Fabric was created to address this by creating and organizating the fundamental units of AI—the prompts themselves!**
|
||||
|
||||
Fabric organizes prompts by real-world task, allowing people to create, collect, and organize their most important AI solutions in a single place for use in their favorite tools. And if you're command-line focused, you can use Fabric itself as the interface!
|
||||
|
||||
## Intro videos
|
||||
|
||||
Keep in mind that many of these were recorded when Fabric was Python-based, so remember to use the current [install instructions](#installation) below.
|
||||
|
||||
- [Network Chuck](https://www.youtube.com/watch?v=UbDyjIIGaxQ)
|
||||
- [David Bombal](https://www.youtube.com/watch?v=vF-MQmVxnCs)
|
||||
- [My Own Intro to the Tool](https://www.youtube.com/watch?v=wPEyyigh10g)
|
||||
- [More Fabric YouTube Videos](https://www.youtube.com/results?search_query=fabric+ai)
|
||||
|
||||
|
||||
## Navigation
|
||||
|
||||
- [`fabric`](#fabric)
|
||||
@@ -87,34 +113,21 @@ Fabric is graciously supported by…
|
||||
## Updates
|
||||
|
||||
> [!NOTE]
|
||||
>
|
||||
>June 17, 2025
|
||||
>
|
||||
>- Fabric now supports Perplexity AI. Configure it by using `fabric -S` to add your Perlexity AI API Key,
|
||||
> and then try:
|
||||
>
|
||||
> ```bash
|
||||
> fabric -m sonar-pro "What is the latest world news?"
|
||||
> ```
|
||||
>
|
||||
>June 11, 2025
|
||||
>
|
||||
> - Fabric's YouTube transcription now needs `yt-dlp` to be installed. Make sure to install the latest
|
||||
> version (2025.06.09 as of this note). The YouTube API key is only needed for comments (the `--comments` flag)
|
||||
> and metadata extraction (the `--metadata` flag).
|
||||
>
|
||||
> May 22, 2025
|
||||
>
|
||||
> - Fabric now supports Anthropic's Claude 4. Read the [blog post from Anthropic](https://www.anthropic.com/news/claude-4).
|
||||
|
||||
## What and why
|
||||
|
||||
Since the start of 2023 and GenAI we've seen a massive number of AI applications for accomplishing tasks. It's powerful, but _it's not easy to integrate this functionality into our lives._
|
||||
|
||||
<div align="center">
|
||||
<h4>In other words, AI doesn't have a capabilities problem—it has an <em>integration</em> problem.</h4>
|
||||
</div>
|
||||
|
||||
Fabric was created to address this by enabling everyone to granularly apply AI to everyday challenges.
|
||||
|
||||
## Intro videos
|
||||
|
||||
Keep in mind that many of these were recorded when Fabric was Python-based, so remember to use the current [install instructions](#installation) below.
|
||||
|
||||
- [Network Chuck](https://www.youtube.com/watch?v=UbDyjIIGaxQ)
|
||||
- [David Bombal](https://www.youtube.com/watch?v=vF-MQmVxnCs)
|
||||
- [My Own Intro to the Tool](https://www.youtube.com/watch?v=wPEyyigh10g)
|
||||
- [More Fabric YouTube Videos](https://www.youtube.com/results?search_query=fabric+ai)
|
||||
>- Fabric's YouTube transcription now needs `yt-dlp` to be installed. Make sure to install the latest
|
||||
> version (2025.06.09 as of this note). The YouTube API key is only needed for comments (the `--comments` flag)
|
||||
> and metadata extraction (the `--metadata` flag).
|
||||
|
||||
## Philosophy
|
||||
|
||||
|
||||
@@ -25,6 +25,7 @@ type ChatOptions struct {
|
||||
Raw bool
|
||||
Seed int
|
||||
ModelContextLength int
|
||||
MaxTokens int
|
||||
}
|
||||
|
||||
// NormalizeMessages remove empty messages and ensure messages order user-assist-user
|
||||
|
||||
@@ -12,6 +12,7 @@ import (
|
||||
|
||||
"github.com/danielmiessler/fabric/plugins/ai/bedrock"
|
||||
"github.com/danielmiessler/fabric/plugins/ai/exolab"
|
||||
"github.com/danielmiessler/fabric/plugins/ai/perplexity" // Added Perplexity plugin
|
||||
"github.com/danielmiessler/fabric/plugins/strategy"
|
||||
|
||||
"github.com/samber/lo"
|
||||
@@ -35,6 +36,32 @@ import (
|
||||
"github.com/danielmiessler/fabric/plugins/tools/youtube"
|
||||
)
|
||||
|
||||
// hasAWSCredentials checks if any AWS credentials are present either in the
|
||||
// environment variables or in the default/shared credentials file. It doesn't
|
||||
// attempt to verify the validity of the credentials, but simply ensures that a
|
||||
// potential authentication source exists so we can safely initialize the
|
||||
// Bedrock client without causing the AWS SDK to search for credentials.
|
||||
func hasAWSCredentials() bool {
|
||||
if os.Getenv("AWS_PROFILE") != "" ||
|
||||
os.Getenv("AWS_ROLE_SESSION_NAME") != "" ||
|
||||
(os.Getenv("AWS_ACCESS_KEY_ID") != "" && os.Getenv("AWS_SECRET_ACCESS_KEY") != "") {
|
||||
return true
|
||||
}
|
||||
|
||||
credFile := os.Getenv("AWS_SHARED_CREDENTIALS_FILE")
|
||||
if credFile == "" {
|
||||
if home, err := os.UserHomeDir(); err == nil {
|
||||
credFile = filepath.Join(home, ".aws", "credentials")
|
||||
}
|
||||
}
|
||||
if credFile != "" {
|
||||
if _, err := os.Stat(credFile); err == nil {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func NewPluginRegistry(db *fsdb.Db) (ret *PluginRegistry, err error) {
|
||||
ret = &PluginRegistry{
|
||||
Db: db,
|
||||
@@ -67,9 +94,13 @@ func NewPluginRegistry(db *fsdb.Db) (ret *PluginRegistry, err error) {
|
||||
anthropic.NewClient(),
|
||||
lmstudio.NewClient(),
|
||||
exolab.NewClient(),
|
||||
bedrock.NewClient(),
|
||||
perplexity.NewClient(), // Added Perplexity client
|
||||
)
|
||||
|
||||
if hasAWSCredentials() {
|
||||
vendors = append(vendors, bedrock.NewClient())
|
||||
}
|
||||
|
||||
// Add all OpenAI-compatible providers
|
||||
for providerName := range openai_compatible.ProviderMap {
|
||||
provider, _ := openai_compatible.GetProviderByName(providerName)
|
||||
|
||||
8
go.mod
8
go.mod
@@ -41,13 +41,14 @@ require (
|
||||
github.com/ProtonMail/go-crypto v1.3.0 // indirect
|
||||
github.com/andybalholm/cascadia v1.3.3 // indirect
|
||||
github.com/araddon/dateparse v0.0.0-20210429162001-6b43995a97de // indirect
|
||||
github.com/aws/aws-sdk-go-v2 v1.36.3 // indirect
|
||||
github.com/aws/aws-sdk-go-v2 v1.36.4 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.10 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/credentials v1.17.27 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.11 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.34 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.34 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.35 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.35 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.0 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/bedrock v1.34.1 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.3 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.17 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/sso v1.22.4 // indirect
|
||||
@@ -91,6 +92,7 @@ require (
|
||||
github.com/pjbgf/sha1cd v0.3.2 // indirect
|
||||
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||
github.com/sergi/go-diff v1.4.0 // indirect
|
||||
github.com/sgaunet/perplexity-go/v2 v2.8.0 // indirect
|
||||
github.com/skeema/knownhosts v1.3.1 // indirect
|
||||
github.com/tidwall/gjson v1.18.0 // indirect
|
||||
github.com/tidwall/match v1.1.1 // indirect
|
||||
|
||||
10
go.sum
10
go.sum
@@ -33,6 +33,8 @@ github.com/atotto/clipboard v0.1.4 h1:EH0zSVneZPSuFR11BlR9YppQTVDbh5+16AmcJi4g1z
|
||||
github.com/atotto/clipboard v0.1.4/go.mod h1:ZY9tmq7sm5xIbd9bOK4onWV4S6X0u6GY7Vn0Yu86PYI=
|
||||
github.com/aws/aws-sdk-go-v2 v1.36.3 h1:mJoei2CxPutQVxaATCzDUjcZEjVRdpsiiXi2o38yqWM=
|
||||
github.com/aws/aws-sdk-go-v2 v1.36.3/go.mod h1:LLXuLpgzEbD766Z5ECcRmi8AzSwfZItDtmABVkRLGzg=
|
||||
github.com/aws/aws-sdk-go-v2 v1.36.4 h1:GySzjhVvx0ERP6eyfAbAuAXLtAda5TEy19E5q5W8I9E=
|
||||
github.com/aws/aws-sdk-go-v2 v1.36.4/go.mod h1:LLXuLpgzEbD766Z5ECcRmi8AzSwfZItDtmABVkRLGzg=
|
||||
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.10 h1:zAybnyUQXIZ5mok5Jqwlf58/TFE7uvd3IAsa1aF9cXs=
|
||||
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.10/go.mod h1:qqvMj6gHLR/EXWZw4ZbqlPbQUyenf4h82UQUlKc+l14=
|
||||
github.com/aws/aws-sdk-go-v2/config v1.27.27 h1:HdqgGt1OAP0HkEDDShEl0oSYa9ZZBSOmKpdpsDMdO90=
|
||||
@@ -43,10 +45,16 @@ github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.11 h1:KreluoV8FZDEtI6Co2xuNk
|
||||
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.11/go.mod h1:SeSUYBLsMYFoRvHE0Tjvn7kbxaUhl75CJi1sbfhMxkU=
|
||||
github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.34 h1:ZK5jHhnrioRkUNOc+hOgQKlUL5JeC3S6JgLxtQ+Rm0Q=
|
||||
github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.34/go.mod h1:p4VfIceZokChbA9FzMbRGz5OV+lekcVtHlPKEO0gSZY=
|
||||
github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.35 h1:o1v1VFfPcDVlK3ll1L5xHsaQAFdNtZ5GXnNR7SwueC4=
|
||||
github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.35/go.mod h1:rZUQNYMNG+8uZxz9FOerQJ+FceCiodXvixpeRtdESrU=
|
||||
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.34 h1:SZwFm17ZUNNg5Np0ioo/gq8Mn6u9w19Mri8DnJ15Jf0=
|
||||
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.34/go.mod h1:dFZsC0BLo346mvKQLWmoJxT+Sjp+qcVR1tRVHQGOH9Q=
|
||||
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.35 h1:R5b82ubO2NntENm3SAm0ADME+H630HomNJdgv+yZ3xw=
|
||||
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.35/go.mod h1:FuA+nmgMRfkzVKYDNEqQadvEMxtxl9+RLT9ribCwEMs=
|
||||
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.0 h1:hT8rVHwugYE2lEfdFE0QWVo81lF7jMrYJVDWI+f+VxU=
|
||||
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.0/go.mod h1:8tu/lYfQfFe6IGnaOdrpVgEL2IrrDOf6/m9RQum4NkY=
|
||||
github.com/aws/aws-sdk-go-v2/service/bedrock v1.34.1 h1:sD4KqDKG8aOaMWaWTMB8l8VnLa/Di7XHb0Uf4plrndA=
|
||||
github.com/aws/aws-sdk-go-v2/service/bedrock v1.34.1/go.mod h1:lrn8DOVFYFeaUZKxJ95T5eGDBjnhffgGz68Wq2sfBbA=
|
||||
github.com/aws/aws-sdk-go-v2/service/bedrockruntime v1.30.0 h1:eMOwQ8ZZK+76+08RfxeaGUtRFN6wxmD1rvqovc2kq2w=
|
||||
github.com/aws/aws-sdk-go-v2/service/bedrockruntime v1.30.0/go.mod h1:0b5Rq7rUvSQFYHI1UO0zFTV/S6j6DUyuykXA80C+YOI=
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.3 h1:dT3MqvGhSoaIhRseqw2I0yH81l7wiR2vjs57O51EAm8=
|
||||
@@ -194,6 +202,8 @@ github.com/sashabaranov/go-openai v1.40.1/go.mod h1:lj5b/K+zjTSFxVLijLSTDZuP7adO
|
||||
github.com/scylladb/termtables v0.0.0-20191203121021-c4c0b6d42ff4/go.mod h1:C1a7PQSMz9NShzorzCiG2fk9+xuCgLkPeCvMHYR2OWg=
|
||||
github.com/sergi/go-diff v1.4.0 h1:n/SP9D5ad1fORl+llWyN+D6qoUETXNZARKjyY2/KVCw=
|
||||
github.com/sergi/go-diff v1.4.0/go.mod h1:A0bzQcvG0E7Rwjx0REVgAGH58e96+X0MeOfepqsbeW4=
|
||||
github.com/sgaunet/perplexity-go/v2 v2.8.0 h1:stnuVieniZMGo6qJLCV2JyR2uF7K5398YOA/ZZcgrSg=
|
||||
github.com/sgaunet/perplexity-go/v2 v2.8.0/go.mod h1:MSks4RNuivCi0GqJyylhFdgSJFVEwZHjAhrf86Wkynk=
|
||||
github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0=
|
||||
github.com/skeema/knownhosts v1.3.1 h1:X2osQ+RAjK76shCbvhHHHVl3ZlgDm8apHEHFqRjnBY8=
|
||||
github.com/skeema/knownhosts v1.3.1/go.mod h1:r7KTdC8l4uxWRyK2TpQZ/1o5HaSzh06ePQNxPwTcfiY=
|
||||
|
||||
@@ -44,8 +44,8 @@ schema = 3
|
||||
version = "v0.1.4"
|
||||
hash = "sha256-ZZ7U5X0gWOu8zcjZcWbcpzGOGdycwq0TjTFh/eZHjXk="
|
||||
[mod."github.com/aws/aws-sdk-go-v2"]
|
||||
version = "v1.36.3"
|
||||
hash = "sha256-vPTkqBoyjitwpN8cgkjjJZkOJdQgqounK/5WgBcDcdM="
|
||||
version = "v1.36.4"
|
||||
hash = "sha256-Cpdphp8FQUbQlhAYvtPKDh1oZc84+/0bzLlx8CM1/BM="
|
||||
[mod."github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream"]
|
||||
version = "v1.6.10"
|
||||
hash = "sha256-9+ZMhWxtsm7ZtZCjBV5PZkOR5rt3bCOznuv45Iwf55c="
|
||||
@@ -59,14 +59,17 @@ schema = 3
|
||||
version = "v1.16.11"
|
||||
hash = "sha256-uedtRd/SIcFJlYZg1jtJdIJViZq1Poks9/J2Bm9/Ehw="
|
||||
[mod."github.com/aws/aws-sdk-go-v2/internal/configsources"]
|
||||
version = "v1.3.34"
|
||||
hash = "sha256-PrqDvN7iVniP3+XnXdg3yLgUS3BeIB1Z6hi9/dQXdMs="
|
||||
version = "v1.3.35"
|
||||
hash = "sha256-AyQ+eJvyhahypIAqPScdkn44MYwBcr9iyrMC1BRSeZI="
|
||||
[mod."github.com/aws/aws-sdk-go-v2/internal/endpoints/v2"]
|
||||
version = "v2.6.34"
|
||||
hash = "sha256-GTxKcV6XujgDA08vXTiBpCCC+frj9b6pV6ACdXh9NGQ="
|
||||
version = "v2.6.35"
|
||||
hash = "sha256-c8K+Nk5XrFMWaaxVsyhKgyJBZhs3Hkhjr/dIDXWZfSQ="
|
||||
[mod."github.com/aws/aws-sdk-go-v2/internal/ini"]
|
||||
version = "v1.8.0"
|
||||
hash = "sha256-v76jTAr4rEgS5en49ikLh6nuvclN+VjpOPj83ZQ3sLo="
|
||||
[mod."github.com/aws/aws-sdk-go-v2/service/bedrock"]
|
||||
version = "v1.34.1"
|
||||
hash = "sha256-OK7t+ieq4pviCnnhfSytANBF5Lwdz4KxjN10CC5pXyY="
|
||||
[mod."github.com/aws/aws-sdk-go-v2/service/bedrockruntime"]
|
||||
version = "v1.30.0"
|
||||
hash = "sha256-MsEQfbqIREtMikRFqBpLCqdAC4gfgPSNbk08k5OJTbo="
|
||||
@@ -235,6 +238,9 @@ schema = 3
|
||||
[mod."github.com/sergi/go-diff"]
|
||||
version = "v1.4.0"
|
||||
hash = "sha256-rs9NKpv/qcQEMRg7CmxGdP4HGuFdBxlpWf9LbA9wS4k="
|
||||
[mod."github.com/sgaunet/perplexity-go/v2"]
|
||||
version = "v2.8.0"
|
||||
hash = "sha256-w1S14Jf4/6LFODREmmiJvPtkZh4Sor81Rr1PqC5pIak="
|
||||
[mod."github.com/skeema/knownhosts"]
|
||||
version = "v1.3.1"
|
||||
hash = "sha256-kjqQDzuncQNTuOYegqVZExwuOt/Z73m2ST7NZFEKixI="
|
||||
|
||||
@@ -1 +1 @@
|
||||
"1.4.203"
|
||||
"1.4.210"
|
||||
|
||||
@@ -1,29 +0,0 @@
|
||||
# IDENTITY and PURPOSE
|
||||
|
||||
You are a wisdom extraction service for text content. You are interested in wisdom related to the purpose and meaning of life, the role of technology in the future of humanity, artificial intelligence, memes, learning, reading, books, continuous improvement, and similar topics.
|
||||
|
||||
Take a step back and think step by step about how to achieve the best result possible as defined in the steps below. You have a lot of freedom to make this work well.
|
||||
|
||||
## OUTPUT SECTIONS
|
||||
|
||||
1. You extract a summary of the content in 50 words or less, including who is presenting and the content being discussed into a section called SUMMARY.
|
||||
|
||||
2. You extract the top 50 ideas from the input in a section called IDEAS:. If there are less than 50 then collect all of them.
|
||||
|
||||
3. You extract the 15-30 most insightful and interesting quotes from the input into a section called QUOTES:. Use the exact quote text from the input.
|
||||
|
||||
4. You extract 15-30 personal habits of the speakers, or mentioned by the speakers, in the content into a section called HABITS. Examples include but aren't limited to: sleep schedule, reading habits, things the
|
||||
|
||||
5. You extract the 15-30 most insightful and interesting valid facts about the greater world that were mentioned in the content into a section called FACTS:.
|
||||
|
||||
6. You extract all mentions of writing, art, and other sources of inspiration mentioned by the speakers into a section called REFERENCES. This should include any and all references to something that the speaker mentioned.
|
||||
|
||||
7. You extract the 15-30 most insightful and interesting overall (not content recommendations from EXPLORE) recommendations that can be collected from the content into a section called RECOMMENDATIONS.
|
||||
|
||||
## OUTPUT INSTRUCTIONS
|
||||
|
||||
1. You only output Markdown.
|
||||
2. Do not give warnings or notes; only output the requested sections.
|
||||
3. You use numbered lists, not bullets.
|
||||
4. Do not repeat ideas, quotes, habits, facts, or references.
|
||||
5. Do not start items with the same opening words.
|
||||
@@ -1 +0,0 @@
|
||||
CONTENT:
|
||||
@@ -1,25 +1,21 @@
|
||||
# IDENTITY and PURPOSE
|
||||
|
||||
You extract surprising, powerful, and interesting insights from text content. You are interested in insights related to the purpose and meaning of life, human flourishing, the role of technology in the future of humanity, artificial intelligence and its affect on humans, memes, learning, reading, books, continuous improvement, and similar topics.
|
||||
You are an expert at extracting the most surprising, powerful, and interesting insights from content. You are interested in insights related to the purpose and meaning of life, human flourishing, the role of technology in the future of humanity, artificial intelligence and its affect on humans, memes, learning, reading, books, continuous improvement, and similar topics.
|
||||
|
||||
You create 15 word bullet points that capture the most important insights from the input.
|
||||
You create 8 word bullet points that capture the most surprising and novel insights from the input.
|
||||
|
||||
Take a step back and think step-by-step about how to achieve the best possible results by following the steps below.
|
||||
|
||||
# STEPS
|
||||
|
||||
- Extract 20 to 50 of the most surprising, insightful, and/or interesting ideas from the input in a section called IDEAS, and write them on a virtual whiteboard in your mind using 15 word bullets. If there are less than 50 then collect all of them. Make sure you extract at least 20.
|
||||
|
||||
- From those IDEAS, extract the most powerful and insightful of them and write them in a section called INSIGHTS. Make sure you extract at least 10 and up to 25.
|
||||
- Extract 10 of the most surprising and novel insights from the input.
|
||||
- Output them as 8 word bullets in order of surprise, novelty, and importance.
|
||||
- Write them in the simple, approachable style of Paul Graham.
|
||||
|
||||
# OUTPUT INSTRUCTIONS
|
||||
|
||||
- INSIGHTS are essentially higher-level IDEAS that are more abstracted and wise.
|
||||
|
||||
- Output the INSIGHTS section only.
|
||||
|
||||
- Each bullet should be 16 words in length.
|
||||
|
||||
- Do not give warnings or notes; only output the requested sections.
|
||||
|
||||
- You use bulleted lists for output, not numbered lists.
|
||||
@@ -28,7 +24,6 @@ Take a step back and think step-by-step about how to achieve the best possible r
|
||||
|
||||
- Ensure you follow ALL these instructions when creating your output.
|
||||
|
||||
|
||||
# INPUT
|
||||
|
||||
INPUT:
|
||||
{{input}}
|
||||
|
||||
@@ -1,29 +0,0 @@
|
||||
# IDENTITY and PURPOSE
|
||||
|
||||
You are a wisdom extraction service for text content. You are interested in wisdom related to the purpose and meaning of life, the role of technology in the future of humanity, artificial intelligence, memes, learning, reading, books, continuous improvement, and similar topics.
|
||||
|
||||
Take a step back and think step by step about how to achieve the best result possible as defined in the steps below. You have a lot of freedom to make this work well.
|
||||
|
||||
## OUTPUT SECTIONS
|
||||
|
||||
1. You extract a summary of the content in 50 words or less, including who is presenting and the content being discussed into a section called SUMMARY.
|
||||
|
||||
2. You extract the top 50 ideas from the input in a section called IDEAS:. If there are less than 50 then collect all of them.
|
||||
|
||||
3. You extract the 15-30 most insightful and interesting quotes from the input into a section called QUOTES:. Use the exact quote text from the input.
|
||||
|
||||
4. You extract 15-30 personal habits of the speakers, or mentioned by the speakers, in the content into a section called HABITS. Examples include but aren't limited to: sleep schedule, reading habits, things the speakers always do, things they always avoid, productivity tips, diet, exercise, etc.
|
||||
|
||||
5. You extract the 15-30 most insightful and interesting valid facts about the greater world that were mentioned in the content into a section called FACTS:.
|
||||
|
||||
6. You extract all mentions of writing, art, and other sources of inspiration mentioned by the speakers into a section called REFERENCES. This should include any and all references to something that the speaker mentioned.
|
||||
|
||||
7. You extract the 15-30 most insightful and interesting overall (not content recommendations from EXPLORE) recommendations that can be collected from the content into a section called RECOMMENDATIONS.
|
||||
|
||||
## OUTPUT INSTRUCTIONS
|
||||
|
||||
1. You only output Markdown.
|
||||
2. Do not give warnings or notes; only output the requested sections.
|
||||
3. You use numbered lists, not bullets.
|
||||
4. Do not repeat ideas, quotes, habits, facts, or references.
|
||||
5. Do not start items with the same opening words.
|
||||
@@ -1 +0,0 @@
|
||||
CONTENT:
|
||||
@@ -1,29 +0,0 @@
|
||||
# IDENTITY and PURPOSE
|
||||
|
||||
You are a wisdom extraction service for text content. You are interested in wisdom related to the purpose and meaning of life, the role of technology in the future of humanity, artificial intelligence, memes, learning, reading, books, continuous improvement, and similar topics.
|
||||
|
||||
Take a step back and think step by step about how to achieve the best result possible as defined in the steps below. You have a lot of freedom to make this work well.
|
||||
|
||||
## OUTPUT SECTIONS
|
||||
|
||||
1. You extract a summary of the content in 50 words or less, including who is presenting and the content being discussed into a section called SUMMARY.
|
||||
|
||||
2. You extract the top 50 ideas from the input in a section called IDEAS:. If there are less than 50 then collect all of them.
|
||||
|
||||
3. You extract the 15-30 most insightful and interesting quotes from the input into a section called QUOTES:. Use the exact quote text from the input.
|
||||
|
||||
4. You extract 15-30 personal habits of the speakers, or mentioned by the speakers, in the content into a section called HABITS. Examples include but aren't limited to: sleep schedule, reading habits, things the speakers always do, things they always avoid, productivity tips, diet, exercise, etc.
|
||||
|
||||
5. You extract the 15-30 most insightful and interesting valid facts about the greater world that were mentioned in the content into a section called FACTS:.
|
||||
|
||||
6. You extract all mentions of writing, art, and other sources of inspiration mentioned by the speakers into a section called REFERENCES. This should include any and all references to something that the speaker mentioned.
|
||||
|
||||
7. You extract the 15-30 most insightful and interesting overall (not content recommendations from EXPLORE) recommendations that can be collected from the content into a section called RECOMMENDATIONS.
|
||||
|
||||
## OUTPUT INSTRUCTIONS
|
||||
|
||||
1. You only output Markdown.
|
||||
2. Do not give warnings or notes; only output the requested sections.
|
||||
3. You use numbered lists, not bullets.
|
||||
4. Do not repeat ideas, quotes, habits, facts, or references.
|
||||
5. Do not start items with the same opening words.
|
||||
@@ -1 +0,0 @@
|
||||
CONTENT:
|
||||
@@ -2,13 +2,13 @@
|
||||
|
||||
// Who you are
|
||||
|
||||
You are a hyper-intelligent AI system with a 4,312 IQ. You convert jacked up HTML to proper markdown using a set of rules.
|
||||
You are a hyper-intelligent AI system with a 4,312 IQ. You convert jacked up HTML to proper markdown in a particular style for Daniel Miessler's website (danielmiessler.com) using a set of rules.
|
||||
|
||||
# GOAL
|
||||
|
||||
// What we are trying to achieve
|
||||
|
||||
1. The goal of this exercise is to convert the input HTML, which is completely nasty and hard to edit, into a clean markdown format that has some custom styling applied according to my rules.
|
||||
1. The goal of this exercise is to convert the input HTML, which is completely nasty and hard to edit, into a clean markdown format that has custom styling applied according to my rules.
|
||||
|
||||
2. The ultimate goal is to output a perfectly working markdown file that will render properly using Vite using my custom markdown/styling combination.
|
||||
|
||||
@@ -32,18 +32,59 @@ You are a hyper-intelligent AI system with a 4,312 IQ. You convert jacked up HTM
|
||||
|
||||
Our new markdown / styling uses the following tags for styling:
|
||||
|
||||
<callout></callous> for wrapping a callous
|
||||
### Quotes
|
||||
|
||||
Wherever you see regular quotes like "Something in here", use:
|
||||
|
||||
<blockquote><cite></cite></blockquote>
|
||||
|
||||
Fill in the CITE part if it's like an official sounding quote and author of the quote, or leave it empty if it's just a regular quote where the context is clear from the text above it.
|
||||
|
||||
### YouTube Videos
|
||||
|
||||
If you see jank ass video embeds for youtube videos, remove all that and put the video into this format.
|
||||
|
||||
<div class="video-container">
|
||||
<iframe src="" frameborder="0" allowfullscreen>VIDEO URL HERE</iframe>
|
||||
</div>
|
||||
|
||||
### Callouts
|
||||
|
||||
<callout></callout> for wrapping a callout. This is like a narrator voice, or a piece of wisdom. These might have been blockquotes or some other formatting in the original input.
|
||||
|
||||
### Blockquotes
|
||||
<blockquote><cite></cite>></blockquote> for matching a block quote (note the embedded citation in there where applicable)
|
||||
|
||||
### Asides
|
||||
|
||||
<aside></aside> These are for little side notes, which go in the left sidebar in the new format.
|
||||
|
||||
### Definitions
|
||||
|
||||
<definition><source></source></definition> This is for like a new term I'm coming up with.
|
||||
|
||||
### Notes
|
||||
|
||||
<bottomNote>
|
||||
|
||||
1. Note one
|
||||
2. Note two.
|
||||
3. Etc.
|
||||
|
||||
</bottomNote>
|
||||
|
||||
NOTE: You'll have to remove the ### Note or whatever syntax is already in the input because the bottomNote inclusion adds that automatically.
|
||||
|
||||
# OUTPUT INSTRUCTIONS
|
||||
|
||||
// What the output should look like:
|
||||
|
||||
- The output should perfectly preserve the input, only it should look way better once rendered to HTML because it'll be following the new styling.
|
||||
- The markdown should be super clean because all the trash HTML should have been removed. Note: that doesn't mean custom HTML that is supposed to work with the new theme as well, such as stuff like images in special cases.
|
||||
- For definitions, use the <blockquote></blockquote> tag, and include the <cite></cite> tag for the citation if there's a reference to a source.
|
||||
|
||||
- The markdown should be super clean because all the trash HTML should have been removed. Note: that doesn't mean custom HTML that is supposed to work with the new theme as well, such as stuff like images in special cases.
|
||||
|
||||
- Ensure YOU HAVE NOT CHANGED THE INPUT CONTENT—only the formatting. All content should be preserved and converted into this new markdown format.
|
||||
|
||||
# INPUT
|
||||
|
||||
INPUT:
|
||||
{{input}}
|
||||
|
||||
@@ -1,25 +0,0 @@
|
||||
# IDENTITY and PURPOSE
|
||||
|
||||
You are a summarization system that extracts the most interesting, useful, and surprising aspects of an article.
|
||||
|
||||
Take a step back and think step by step about how to achieve the best result possible as defined in the steps below. You have a lot of freedom to make this work well.
|
||||
|
||||
## OUTPUT SECTIONS
|
||||
|
||||
1. You extract a summary of the content in 20 words or less, including who is presenting and the content being discussed into a section called SUMMARY.
|
||||
|
||||
2. You extract the top 20 ideas from the input in a section called IDEAS:.
|
||||
|
||||
3. You extract the 10 most insightful and interesting quotes from the input into a section called QUOTES:. Use the exact quote text from the input.
|
||||
|
||||
4. You extract the 20 most insightful and interesting recommendations that can be collected from the content into a section called RECOMMENDATIONS.
|
||||
|
||||
5. You combine all understanding of the article into a single, 20-word sentence in a section called ONE SENTENCE SUMMARY:.
|
||||
|
||||
## OUTPUT INSTRUCTIONS
|
||||
|
||||
1. You only output Markdown.
|
||||
2. Do not give warnings or notes; only output the requested sections.
|
||||
3. You use numbered lists, not bullets.
|
||||
4. Do not repeat ideas, or quotes.
|
||||
5. Do not start items with the same opening words.
|
||||
@@ -1 +0,0 @@
|
||||
CONTENT:
|
||||
@@ -15,6 +15,7 @@ import (
|
||||
"github.com/aws/aws-sdk-go-v2/aws"
|
||||
"github.com/aws/aws-sdk-go-v2/aws/middleware"
|
||||
"github.com/aws/aws-sdk-go-v2/config"
|
||||
"github.com/aws/aws-sdk-go-v2/service/bedrock"
|
||||
"github.com/aws/aws-sdk-go-v2/service/bedrockruntime"
|
||||
"github.com/aws/aws-sdk-go-v2/service/bedrockruntime/types"
|
||||
|
||||
@@ -24,7 +25,8 @@ import (
|
||||
// BedrockClient is a plugin to add support for Amazon Bedrock
|
||||
type BedrockClient struct {
|
||||
*plugins.PluginBase
|
||||
client *bedrockruntime.Client
|
||||
runtimeClient *bedrockruntime.Client
|
||||
controlPlaneClient *bedrock.Client
|
||||
}
|
||||
|
||||
// NewClient returns a new Bedrock plugin client
|
||||
@@ -39,14 +41,16 @@ func NewClient() (ret *BedrockClient) {
|
||||
fmt.Printf("Unable to load AWS Config: %s\n", err)
|
||||
}
|
||||
|
||||
client := bedrockruntime.NewFromConfig(cfg)
|
||||
runtimeClient := bedrockruntime.NewFromConfig(cfg)
|
||||
controlPlaneClient := bedrock.NewFromConfig(cfg)
|
||||
|
||||
ret = &BedrockClient{
|
||||
PluginBase: &plugins.PluginBase{
|
||||
Name: vendorName,
|
||||
EnvNamePrefix: plugins.BuildEnvVariablePrefix(vendorName),
|
||||
},
|
||||
client: client,
|
||||
runtimeClient: runtimeClient,
|
||||
controlPlaneClient: controlPlaneClient,
|
||||
}
|
||||
|
||||
return
|
||||
@@ -54,7 +58,32 @@ func NewClient() (ret *BedrockClient) {
|
||||
|
||||
// ListModels lists the models available for use with the Bedrock plugin
|
||||
func (c *BedrockClient) ListModels() ([]string, error) {
|
||||
return MODELS, nil
|
||||
models := []string{}
|
||||
ctx := context.TODO()
|
||||
|
||||
foundationModels, err := c.controlPlaneClient.ListFoundationModels(ctx, &bedrock.ListFoundationModelsInput{})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, model := range foundationModels.ModelSummaries {
|
||||
models = append(models, *model.ModelId)
|
||||
}
|
||||
|
||||
inferenceProfilesPaginator := bedrock.NewListInferenceProfilesPaginator(c.controlPlaneClient, &bedrock.ListInferenceProfilesInput{})
|
||||
|
||||
for inferenceProfilesPaginator.HasMorePages() {
|
||||
inferenceProfiles, err := inferenceProfilesPaginator.NextPage(context.TODO())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, profile := range inferenceProfiles.InferenceProfileSummaries {
|
||||
models = append(models, *profile.InferenceProfileId)
|
||||
}
|
||||
}
|
||||
|
||||
return models, nil
|
||||
}
|
||||
|
||||
// SendStream sends the messages to the the Bedrock ConverseStream API
|
||||
@@ -70,7 +99,7 @@ func (c *BedrockClient) SendStream(msgs []*goopenai.ChatCompletionMessage, opts
|
||||
TopP: aws.Float32(float32(opts.TopP))},
|
||||
}
|
||||
|
||||
response, err := c.client.ConverseStream(context.TODO(), &converseInput)
|
||||
response, err := c.runtimeClient.ConverseStream(context.TODO(), &converseInput)
|
||||
if err != nil {
|
||||
fmt.Printf("Error conversing with Bedrock: %s\n", err)
|
||||
return
|
||||
@@ -114,7 +143,7 @@ func (c *BedrockClient) Send(ctx context.Context, msgs []*goopenai.ChatCompletio
|
||||
ModelId: aws.String(opts.Model),
|
||||
Messages: messages,
|
||||
}
|
||||
response, err := c.client.Converse(ctx, &converseInput)
|
||||
response, err := c.runtimeClient.Converse(ctx, &converseInput)
|
||||
if err != nil {
|
||||
fmt.Printf("Error conversing with Bedrock: %s\n", err)
|
||||
return "", err
|
||||
@@ -157,152 +186,3 @@ func (c *BedrockClient) toMessages(inputMessages []*goopenai.ChatCompletionMessa
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
var MODELS = []string{
|
||||
"amazon.nova-micro-v1:0",
|
||||
"amazon.nova-lite-v1:0",
|
||||
"amazon.nova-pro-v1:0",
|
||||
"amazon.nova-premier-v1:0",
|
||||
|
||||
"amazon.titan-tg1-large",
|
||||
"amazon.titan-text-premier-v1:0",
|
||||
|
||||
"amazon.titan-text-lite-v1",
|
||||
"amazon.titan-text-express-v1",
|
||||
|
||||
"ai21.jamba-instruct-v1:0",
|
||||
"ai21.jamba-1-5-large-v1:0",
|
||||
"ai21.jamba-1-5-mini-v1:0",
|
||||
|
||||
"anthropic.claude-instant-v1",
|
||||
"anthropic.claude-v2",
|
||||
"anthropic.claude-v2:1",
|
||||
"anthropic.claude-3-haiku-20240307-v1:0",
|
||||
"anthropic.claude-3-sonnet-20240229-v1:0",
|
||||
"anthropic.claude-3-opus-20240229-v1:0",
|
||||
"anthropic.claude-3-5-haiku-20241022-v1:0",
|
||||
"anthropic.claude-3-5-sonnet-20240620-v1:0",
|
||||
"anthropic.claude-3-5-sonnet-20241022-v2:0",
|
||||
"anthropic.claude-3-7-sonnet-20250219-v1:0",
|
||||
"anthropic.claude-sonnet-4-20250514-v1:0",
|
||||
"anthropic.claude-opus-4-20250514-v1:0",
|
||||
|
||||
"meta.llama3-8b-instruct-v1:0",
|
||||
"meta.llama3-70b-instruct-v1:0",
|
||||
"meta.llama3-1-8b-instruct-v1:0",
|
||||
"meta.llama3-1-70b-instruct-v1:0",
|
||||
"meta.llama3-2-11b-instruct-v1:0",
|
||||
"meta.llama3-2-90b-instruct-v1:0",
|
||||
"meta.llama3-2-1b-instruct-v1:0",
|
||||
"meta.llama3-2-3b-instruct-v1:0",
|
||||
"meta.llama3-3-70b-instruct-v1:0",
|
||||
"meta.llama4-scout-17b-instruct-v1:0",
|
||||
"meta.llama4-maverick-17b-instruct-v1:0",
|
||||
|
||||
"mistral.mistral-7b-instruct-v0:2",
|
||||
"mistral.mixtral-8x7b-instruct-v0:1",
|
||||
"mistral.mistral-small-2402-v1:0",
|
||||
"mistral.mistral-large-2402-v1:0",
|
||||
"mistral.pixtral-large-2502-v1:0",
|
||||
|
||||
// Cross Region Inferences Profiles
|
||||
// https://docs.aws.amazon.com/bedrock/latest/userguide/inference-profiles-support.html#inference-profiles-support-system
|
||||
"us.amazon.nova-lite-v1:0",
|
||||
"us.amazon.nova-lite-v1:0",
|
||||
"us.amazon.nova-micro-v1:0",
|
||||
"us.amazon.nova-micro-v1:0",
|
||||
"us.amazon.nova-premier-v1:0",
|
||||
"us.amazon.nova-premier-v1:0",
|
||||
"us.amazon.nova-pro-v1:0",
|
||||
"us.amazon.nova-pro-v1:0",
|
||||
"us.anthropic.claude-3-5-haiku-20241022-v1:0",
|
||||
"us.anthropic.claude-3-5-haiku-20241022-v1:0",
|
||||
"us.anthropic.claude-3-5-sonnet-20240620-v1:0",
|
||||
"us.anthropic.claude-3-5-sonnet-20240620-v1:0",
|
||||
"us.anthropic.claude-3-5-sonnet-20241022-v2:0",
|
||||
"us.anthropic.claude-3-5-sonnet-20241022-v2:0",
|
||||
"us.anthropic.claude-3-7-sonnet-20250219-v1:0",
|
||||
"us.anthropic.claude-3-7-sonnet-20250219-v1:0",
|
||||
"us.anthropic.claude-3-haiku-20240307-v1:0",
|
||||
"us.anthropic.claude-3-haiku-20240307-v1:0",
|
||||
"us.anthropic.claude-3-opus-20240229-v1:0",
|
||||
"us.anthropic.claude-3-opus-20240229-v1:0",
|
||||
"us.anthropic.claude-3-sonnet-20240229-v1:0",
|
||||
"us.anthropic.claude-3-sonnet-20240229-v1:0",
|
||||
"us.anthropic.claude-opus-4-20250514-v1:0",
|
||||
"us.anthropic.claude-opus-4-20250514-v1:0",
|
||||
"us.anthropic.claude-sonnet-4-20250514-v1:0",
|
||||
"us.anthropic.claude-sonnet-4-20250514-v1:0",
|
||||
"us.deepseek.r1-v1:0",
|
||||
"us.deepseek.r1-v1:0",
|
||||
"us.meta.llama3-1-405b-instruct-v1:0",
|
||||
"us.meta.llama3-1-405b-instruct-v1:0",
|
||||
"us.meta.llama3-1-70b-instruct-v1:0",
|
||||
"us.meta.llama3-1-70b-instruct-v1:0",
|
||||
"us.meta.llama3-1-8b-instruct-v1:0",
|
||||
"us.meta.llama3-1-8b-instruct-v1:0",
|
||||
"us.meta.llama3-2-11b-instruct-v1:0",
|
||||
"us.meta.llama3-2-11b-instruct-v1:0",
|
||||
"us.meta.llama3-2-1b-instruct-v1:0",
|
||||
"us.meta.llama3-2-1b-instruct-v1:0",
|
||||
"us.meta.llama3-2-3b-instruct-v1:0",
|
||||
"us.meta.llama3-2-3b-instruct-v1:0",
|
||||
"us.meta.llama3-2-90b-instruct-v1:0",
|
||||
"us.meta.llama3-2-90b-instruct-v1:0",
|
||||
"us.meta.llama3-3-70b-instruct-v1:0",
|
||||
"us.meta.llama3-3-70b-instruct-v1:0",
|
||||
"us.meta.llama4-maverick-17b-instruct-v1:0",
|
||||
"us.meta.llama4-maverick-17b-instruct-v1:0",
|
||||
"us.meta.llama4-scout-17b-instruct-v1:0",
|
||||
"us.meta.llama4-scout-17b-instruct-v1:0",
|
||||
"us.mistral.pixtral-large-2502-v1:0",
|
||||
"us.mistral.pixtral-large-2502-v1:0",
|
||||
"us.writer.palmyra-x4-v1:0",
|
||||
"us.writer.palmyra-x4-v1:0",
|
||||
"us.writer.palmyra-x5-v1:0",
|
||||
"us.writer.palmyra-x5-v1:0",
|
||||
"us-gov.anthropic.claude-3-5-sonnet-20240620-v1:0",
|
||||
"us-gov.anthropic.claude-3-5-sonnet-20240620-v1:0",
|
||||
"us-gov.anthropic.claude-3-haiku-20240307-v1:0",
|
||||
"us-gov.anthropic.claude-3-haiku-20240307-v1:0",
|
||||
"eu.amazon.nova-lite-v1:0",
|
||||
"eu.amazon.nova-lite-v1:0",
|
||||
"eu.amazon.nova-micro-v1:0",
|
||||
"eu.amazon.nova-micro-v1:0",
|
||||
"eu.amazon.nova-pro-v1:0",
|
||||
"eu.amazon.nova-pro-v1:0",
|
||||
"eu.anthropic.claude-3-5-sonnet-20240620-v1:0",
|
||||
"eu.anthropic.claude-3-5-sonnet-20240620-v1:0",
|
||||
"eu.anthropic.claude-3-7-sonnet-20250219-v1:0",
|
||||
"eu.anthropic.claude-3-7-sonnet-20250219-v1:0",
|
||||
"eu.anthropic.claude-3-haiku-20240307-v1:0",
|
||||
"eu.anthropic.claude-3-haiku-20240307-v1:0",
|
||||
"eu.anthropic.claude-3-sonnet-20240229-v1:0",
|
||||
"eu.anthropic.claude-3-sonnet-20240229-v1:0",
|
||||
"eu.anthropic.claude-sonnet-4-20250514-v1:0",
|
||||
"eu.anthropic.claude-sonnet-4-20250514-v1:0",
|
||||
"eu.meta.llama3-2-1b-instruct-v1:0",
|
||||
"eu.meta.llama3-2-1b-instruct-v1:0",
|
||||
"eu.meta.llama3-2-3b-instruct-v1:0",
|
||||
"eu.meta.llama3-2-3b-instruct-v1:0",
|
||||
"eu.mistral.pixtral-large-2502-v1:0",
|
||||
"eu.mistral.pixtral-large-2502-v1:0",
|
||||
"apac.amazon.nova-lite-v1:0",
|
||||
"apac.amazon.nova-lite-v1:0",
|
||||
"apac.amazon.nova-micro-v1:0",
|
||||
"apac.amazon.nova-micro-v1:0",
|
||||
"apac.amazon.nova-pro-v1:0",
|
||||
"apac.amazon.nova-pro-v1:0",
|
||||
"apac.anthropic.claude-3-5-sonnet-20240620-v1:0",
|
||||
"apac.anthropic.claude-3-5-sonnet-20240620-v1:0",
|
||||
"apac.anthropic.claude-3-5-sonnet-20241022-v2:0",
|
||||
"apac.anthropic.claude-3-5-sonnet-20241022-v2:0",
|
||||
"apac.anthropic.claude-3-7-sonnet-20250219-v1:0",
|
||||
"apac.anthropic.claude-3-7-sonnet-20250219-v1:0",
|
||||
"apac.anthropic.claude-3-haiku-20240307-v1:0",
|
||||
"apac.anthropic.claude-3-haiku-20240307-v1:0",
|
||||
"apac.anthropic.claude-3-sonnet-20240229-v1:0",
|
||||
"apac.anthropic.claude-3-sonnet-20240229-v1:0",
|
||||
"apac.anthropic.claude-sonnet-4-20250514-v1:0",
|
||||
"apac.anthropic.claude-sonnet-4-20250514-v1:0",
|
||||
}
|
||||
|
||||
246
plugins/ai/perplexity/perplexity.go
Normal file
246
plugins/ai/perplexity/perplexity.go
Normal file
@@ -0,0 +1,246 @@
|
||||
package perplexity
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"os"
|
||||
"sync" // Added sync package
|
||||
|
||||
"github.com/danielmiessler/fabric/common"
|
||||
"github.com/danielmiessler/fabric/plugins"
|
||||
perplexity "github.com/sgaunet/perplexity-go/v2"
|
||||
|
||||
goopenai "github.com/sashabaranov/go-openai"
|
||||
)
|
||||
|
||||
const (
|
||||
providerName = "Perplexity"
|
||||
)
|
||||
|
||||
var models = []string{
|
||||
"r1-1776", "sonar", "sonar-pro", "sonar-reasoning", "sonar-reasoning-pro",
|
||||
}
|
||||
|
||||
type Client struct {
|
||||
*plugins.PluginBase
|
||||
APIKey *plugins.SetupQuestion
|
||||
client *perplexity.Client
|
||||
}
|
||||
|
||||
func NewClient() *Client {
|
||||
c := &Client{}
|
||||
c.PluginBase = &plugins.PluginBase{
|
||||
Name: providerName,
|
||||
EnvNamePrefix: plugins.BuildEnvVariablePrefix(providerName),
|
||||
ConfigureCustom: c.Configure, // Assign the Configure method
|
||||
}
|
||||
c.APIKey = c.AddSetupQuestion("API_KEY", true)
|
||||
return c
|
||||
}
|
||||
|
||||
func (c *Client) Configure() error {
|
||||
// The PluginBase.Configure() is called by the framework if needed.
|
||||
// We only need to handle specific logic for this plugin.
|
||||
if c.APIKey.Value == "" {
|
||||
// Attempt to get from environment variable if not set by user during setup
|
||||
envKey := c.EnvNamePrefix + "API_KEY"
|
||||
apiKeyFromEnv := os.Getenv(envKey)
|
||||
if apiKeyFromEnv != "" {
|
||||
c.APIKey.Value = apiKeyFromEnv
|
||||
} else {
|
||||
return fmt.Errorf("%s API key not configured. Please set the %s environment variable or run 'fabric --setup %s'", providerName, envKey, providerName)
|
||||
}
|
||||
}
|
||||
c.client = perplexity.NewClient(c.APIKey.Value)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *Client) ListModels() ([]string, error) {
|
||||
// Perplexity API does not have a ListModels endpoint.
|
||||
// We return a predefined list.
|
||||
return models, nil
|
||||
}
|
||||
|
||||
func (c *Client) Send(ctx context.Context, msgs []*goopenai.ChatCompletionMessage, opts *common.ChatOptions) (string, error) {
|
||||
if c.client == nil {
|
||||
if err := c.Configure(); err != nil {
|
||||
return "", fmt.Errorf("failed to configure Perplexity client: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
var perplexityMessages []perplexity.Message
|
||||
for _, msg := range msgs {
|
||||
perplexityMessages = append(perplexityMessages, perplexity.Message{
|
||||
Role: msg.Role,
|
||||
Content: msg.Content,
|
||||
})
|
||||
}
|
||||
|
||||
requestOptions := []perplexity.CompletionRequestOption{
|
||||
perplexity.WithModel(opts.Model),
|
||||
perplexity.WithMessages(perplexityMessages),
|
||||
}
|
||||
if opts.MaxTokens > 0 {
|
||||
requestOptions = append(requestOptions, perplexity.WithMaxTokens(opts.MaxTokens))
|
||||
}
|
||||
if opts.Temperature > 0 { // Perplexity default is 1.0, only set if user specifies
|
||||
requestOptions = append(requestOptions, perplexity.WithTemperature(opts.Temperature))
|
||||
}
|
||||
if opts.TopP > 0 { // Perplexity default is not specified, typically 1.0
|
||||
requestOptions = append(requestOptions, perplexity.WithTopP(opts.TopP))
|
||||
}
|
||||
if opts.PresencePenalty != 0 {
|
||||
// Corrected: Pass float64 directly
|
||||
requestOptions = append(requestOptions, perplexity.WithPresencePenalty(opts.PresencePenalty))
|
||||
}
|
||||
if opts.FrequencyPenalty != 0 {
|
||||
// Corrected: Pass float64 directly
|
||||
requestOptions = append(requestOptions, perplexity.WithFrequencyPenalty(opts.FrequencyPenalty))
|
||||
}
|
||||
|
||||
request := perplexity.NewCompletionRequest(requestOptions...)
|
||||
|
||||
// Corrected: Use SendCompletionRequest method from perplexity-go library
|
||||
resp, err := c.client.SendCompletionRequest(request) // Pass request directly
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("perplexity API request failed: %w", err) // Corrected capitalization
|
||||
}
|
||||
|
||||
content := resp.GetLastContent()
|
||||
|
||||
// Append citations if available
|
||||
citations := resp.GetCitations()
|
||||
if len(citations) > 0 {
|
||||
content += "\n\n# CITATIONS\n\n"
|
||||
for i, citation := range citations {
|
||||
content += fmt.Sprintf("- [%d] %s\n", i+1, citation)
|
||||
}
|
||||
}
|
||||
|
||||
return content, nil
|
||||
}
|
||||
|
||||
func (c *Client) SendStream(msgs []*goopenai.ChatCompletionMessage, opts *common.ChatOptions, channel chan string) error {
|
||||
if c.client == nil {
|
||||
if err := c.Configure(); err != nil {
|
||||
close(channel) // Ensure channel is closed on error
|
||||
return fmt.Errorf("failed to configure Perplexity client: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
var perplexityMessages []perplexity.Message
|
||||
for _, msg := range msgs {
|
||||
perplexityMessages = append(perplexityMessages, perplexity.Message{
|
||||
Role: msg.Role,
|
||||
Content: msg.Content,
|
||||
})
|
||||
}
|
||||
|
||||
requestOptions := []perplexity.CompletionRequestOption{
|
||||
perplexity.WithModel(opts.Model),
|
||||
perplexity.WithMessages(perplexityMessages),
|
||||
perplexity.WithStream(true), // Enable streaming
|
||||
}
|
||||
|
||||
if opts.MaxTokens > 0 {
|
||||
requestOptions = append(requestOptions, perplexity.WithMaxTokens(opts.MaxTokens))
|
||||
}
|
||||
if opts.Temperature > 0 {
|
||||
requestOptions = append(requestOptions, perplexity.WithTemperature(opts.Temperature))
|
||||
}
|
||||
if opts.TopP > 0 {
|
||||
requestOptions = append(requestOptions, perplexity.WithTopP(opts.TopP))
|
||||
}
|
||||
if opts.PresencePenalty != 0 {
|
||||
// Corrected: Pass float64 directly
|
||||
requestOptions = append(requestOptions, perplexity.WithPresencePenalty(opts.PresencePenalty))
|
||||
}
|
||||
if opts.FrequencyPenalty != 0 {
|
||||
// Corrected: Pass float64 directly
|
||||
requestOptions = append(requestOptions, perplexity.WithFrequencyPenalty(opts.FrequencyPenalty))
|
||||
}
|
||||
|
||||
request := perplexity.NewCompletionRequest(requestOptions...)
|
||||
|
||||
responseChan := make(chan perplexity.CompletionResponse)
|
||||
var wg sync.WaitGroup // Use sync.WaitGroup
|
||||
wg.Add(1)
|
||||
|
||||
go func() {
|
||||
err := c.client.SendSSEHTTPRequest(&wg, request, responseChan)
|
||||
if err != nil {
|
||||
// Log error, can't send to string channel directly.
|
||||
// Consider a mechanism to propagate this error if needed.
|
||||
fmt.Fprintf(os.Stderr, "perplexity streaming error: %v\\n", err) // Corrected capitalization
|
||||
// If the error occurs during stream setup, the channel might not have been closed by the receiver loop.
|
||||
// However, closing it here might cause a panic if the receiver loop also tries to close it.
|
||||
// close(channel) // Caution: Uncommenting this may cause panic, as channel is closed in the receiver goroutine.
|
||||
}
|
||||
}()
|
||||
|
||||
go func() {
|
||||
defer close(channel) // Ensure the output channel is closed when this goroutine finishes
|
||||
var lastResponse *perplexity.CompletionResponse
|
||||
for resp := range responseChan {
|
||||
lastResponse = &resp
|
||||
if len(resp.Choices) > 0 {
|
||||
content := ""
|
||||
// Corrected: Check Delta.Content and Message.Content directly for non-emptiness
|
||||
// as Delta and Message are structs, not pointers, in perplexity.Choice
|
||||
if resp.Choices[0].Delta.Content != "" {
|
||||
content = resp.Choices[0].Delta.Content
|
||||
} else if resp.Choices[0].Message.Content != "" {
|
||||
content = resp.Choices[0].Message.Content
|
||||
}
|
||||
if content != "" {
|
||||
channel <- content
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Send citations at the end if available
|
||||
if lastResponse != nil {
|
||||
citations := lastResponse.GetCitations()
|
||||
if len(citations) > 0 {
|
||||
channel <- "\n\n# CITATIONS\n\n"
|
||||
for i, citation := range citations {
|
||||
channel <- fmt.Sprintf("- [%d] %s\n", i+1, citation)
|
||||
}
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *Client) NeedsRawMode(modelName string) bool {
|
||||
return true
|
||||
}
|
||||
|
||||
// Setup is called by the fabric CLI framework to guide the user through configuration.
|
||||
func (c *Client) Setup() error {
|
||||
return c.PluginBase.Setup()
|
||||
}
|
||||
|
||||
// GetName returns the name of the plugin.
|
||||
func (c *Client) GetName() string {
|
||||
return c.PluginBase.Name
|
||||
}
|
||||
|
||||
// GetEnvNamePrefix returns the environment variable prefix for the plugin.
|
||||
// Corrected: Receiver name
|
||||
func (c *Client) GetEnvNamePrefix() string {
|
||||
return c.PluginBase.EnvNamePrefix
|
||||
}
|
||||
|
||||
// AddSetupQuestion adds a setup question to the plugin.
|
||||
// This is a helper method, usually called from NewClient.
|
||||
func (c *Client) AddSetupQuestion(text string, isSensitive bool) *plugins.SetupQuestion {
|
||||
return c.PluginBase.AddSetupQuestion(text, isSensitive)
|
||||
}
|
||||
|
||||
// GetSetupQuestions returns the setup questions for the plugin.
|
||||
// Corrected: Return the slice of setup questions from PluginBase
|
||||
func (c *Client) GetSetupQuestions() []*plugins.SetupQuestion {
|
||||
return c.PluginBase.SetupQuestions
|
||||
}
|
||||
@@ -112,7 +112,9 @@ func (o *YouTube) GrabTranscriptWithTimestamps(videoId string, language string)
|
||||
return o.tryMethodYtDlpWithTimestamps(videoId, language)
|
||||
}
|
||||
|
||||
func (o *YouTube) tryMethodYtDlp(videoId string, language string) (ret string, err error) {
|
||||
// tryMethodYtDlpInternal is a helper function to reduce duplication between
|
||||
// tryMethodYtDlp and tryMethodYtDlpWithTimestamps.
|
||||
func (o *YouTube) tryMethodYtDlpInternal(videoId string, language string, processVTTFileFunc func(filename string) (string, error)) (ret string, err error) {
|
||||
// Check if yt-dlp is available
|
||||
if _, err = exec.LookPath("yt-dlp"); err != nil {
|
||||
err = fmt.Errorf("yt-dlp not found in PATH. Please install yt-dlp to use YouTube transcript functionality")
|
||||
@@ -130,9 +132,13 @@ func (o *YouTube) tryMethodYtDlp(videoId string, language string) (ret string, e
|
||||
// Use yt-dlp to get transcript
|
||||
videoURL := "https://www.youtube.com/watch?v=" + videoId
|
||||
outputPath := filepath.Join(tempDir, "%(title)s.%(ext)s")
|
||||
lang_match := language
|
||||
if len(language) > 2 {
|
||||
lang_match = language[:2]
|
||||
}
|
||||
cmd := exec.Command("yt-dlp",
|
||||
"--write-auto-subs",
|
||||
"--sub-lang", language,
|
||||
"--sub-lang", lang_match,
|
||||
"--skip-download",
|
||||
"--sub-format", "vtt",
|
||||
"--quiet",
|
||||
@@ -154,52 +160,15 @@ func (o *YouTube) tryMethodYtDlp(videoId string, language string) (ret string, e
|
||||
return "", err
|
||||
}
|
||||
|
||||
return o.readAndCleanVTTFile(vttFiles[0])
|
||||
return processVTTFileFunc(vttFiles[0])
|
||||
}
|
||||
|
||||
func (o *YouTube) tryMethodYtDlp(videoId string, language string) (ret string, err error) {
|
||||
return o.tryMethodYtDlpInternal(videoId, language, o.readAndCleanVTTFile)
|
||||
}
|
||||
|
||||
func (o *YouTube) tryMethodYtDlpWithTimestamps(videoId string, language string) (ret string, err error) {
|
||||
// Check if yt-dlp is available
|
||||
if _, err = exec.LookPath("yt-dlp"); err != nil {
|
||||
err = fmt.Errorf("yt-dlp not found in PATH. Please install yt-dlp to use YouTube transcript functionality")
|
||||
return
|
||||
}
|
||||
|
||||
// Create a temporary directory for yt-dlp output (cross-platform)
|
||||
tempDir := filepath.Join(os.TempDir(), "fabric-youtube-"+videoId)
|
||||
if err = os.MkdirAll(tempDir, 0755); err != nil {
|
||||
err = fmt.Errorf("failed to create temp directory: %v", err)
|
||||
return
|
||||
}
|
||||
defer os.RemoveAll(tempDir)
|
||||
|
||||
// Use yt-dlp to get transcript
|
||||
videoURL := "https://www.youtube.com/watch?v=" + videoId
|
||||
outputPath := filepath.Join(tempDir, "%(title)s.%(ext)s")
|
||||
cmd := exec.Command("yt-dlp",
|
||||
"--write-auto-subs",
|
||||
"--sub-lang", language,
|
||||
"--skip-download",
|
||||
"--sub-format", "vtt",
|
||||
"--quiet",
|
||||
"--no-warnings",
|
||||
"-o", outputPath,
|
||||
videoURL)
|
||||
|
||||
var stderr bytes.Buffer
|
||||
cmd.Stderr = &stderr
|
||||
|
||||
if err = cmd.Run(); err != nil {
|
||||
err = fmt.Errorf("yt-dlp failed: %v, stderr: %s", err, stderr.String())
|
||||
return
|
||||
}
|
||||
|
||||
// Find VTT files using cross-platform approach
|
||||
vttFiles, err := o.findVTTFiles(tempDir, language)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return o.readAndFormatVTTWithTimestamps(vttFiles[0])
|
||||
return o.tryMethodYtDlpInternal(videoId, language, o.readAndFormatVTTWithTimestamps)
|
||||
}
|
||||
|
||||
func (o *YouTube) readAndCleanVTTFile(filename string) (ret string, err error) {
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
package main
|
||||
|
||||
var version = "v1.4.203"
|
||||
var version = "v1.4.210"
|
||||
|
||||
Reference in New Issue
Block a user