Compare commits

..

1 Commits

Author SHA1 Message Date
jad2121
1201184257 trying something out as a new branch 2024-03-05 21:01:07 -05:00
4 changed files with 52 additions and 233 deletions

View File

@@ -197,8 +197,10 @@ Once you have it all set up, here's how to use it.
`fabric -h`
```bash
fabric [-h] [--text TEXT] [--copy] [--agents {trip_planner,ApiKeys}] [--output [OUTPUT]] [--stream] [--list] [--update] [--pattern PATTERN] [--setup] [--changeDefaultModel CHANGEDEFAULTMODEL] [--local]
[--claude] [--model MODEL] [--listmodels] [--context]
fabric [-h] [--text TEXT] [--copy] [--agents {trip_planner,ApiKeys}]
[--output [OUTPUT]] [--stream] [--list] [--update]
[--pattern PATTERN] [--setup] [--local] [--claude]
[--model MODEL] [--listmodels] [--context]
An open source framework for augmenting humans using AI.
@@ -207,23 +209,27 @@ options:
--text TEXT, -t TEXT Text to extract summary from
--copy, -C Copy the response to the clipboard
--agents {trip_planner,ApiKeys}, -a {trip_planner,ApiKeys}
Use an AI agent to help you with a task. Acceptable values are 'trip_planner' or 'ApiKeys'. This option cannot be used with any other flag.
Use an AI agent to help you with a task. Acceptable
values are 'trip_planner' or 'ApiKeys'. This option
cannot be used with any other flag.
--output [OUTPUT], -o [OUTPUT]
Save the response to a file
--stream, -s Use this option if you want to see the results in realtime. NOTE: You will not be able to pipe the output into another command.
--stream, -s Use this option if you want to see the results in
realtime. NOTE: You will not be able to pipe the
output into another command.
--list, -l List available patterns
--update, -u Update patterns
--pattern PATTERN, -p PATTERN
The pattern (prompt) to use
--setup Set up your fabric instance
--changeDefaultModel CHANGEDEFAULTMODEL
Change the default model. Your choice will be saved in ~/.config/fabric/.env). For a list of available models, use the --listmodels flag.
--local, -L Use local LLM. Default is llama2
--claude Use Claude AI
--model MODEL, -m MODEL
Select the model to use (GPT-4 by default for chatGPT and llama2 for Ollama)
Select the model to use (GPT-4 by default for chatGPT
and llama2 for Ollama)
--listmodels List all available models
--context, -c Use Context file (context.md) to add context to your pattern
--context, -c Use Context file (context.md) to add context to your
pattern
```
#### Example commands

View File

@@ -43,8 +43,6 @@ def main():
parser.add_argument(
"--setup", help="Set up your fabric instance", action="store_true"
)
parser.add_argument('--changeDefaultModel',
help="Change the default model. Your choice will be saved in ~/.config/fabric/.env). For a list of available models, use the --listmodels flag.")
parser.add_argument(
'--local', '-L', help="Use local LLM. Default is llama2", action="store_true")
@@ -79,9 +77,6 @@ def main():
Update()
Alias()
sys.exit()
if args.changeDefaultModel:
Setup().default_model(args.changeDefaultModel)
sys.exit()
if args.agents:
# Handle the agents logic
if args.agents == 'trip_planner':
@@ -118,10 +113,7 @@ def main():
print("No patterns found")
sys.exit()
if args.listmodels:
setup = Setup()
allmodels = setup.fetch_available_models()
for model in allmodels:
print(model)
standalone.fetch_available_models()
sys.exit()
if args.text is not None:
text = args.text

View File

@@ -8,7 +8,6 @@ import platform
from dotenv import load_dotenv
import zipfile
import tempfile
import re
import shutil
current_directory = os.path.dirname(os.path.realpath(__file__))
@@ -50,11 +49,11 @@ class Standalone:
self.config_pattern_directory = config_directory
self.pattern = pattern
self.args = args
self.model = args.model
self.claude = claude
try:
self.model = os.environ["DEFAULT_MODEL"]
self.model = os.environ["CUSTOM_MODEL"]
except:
self.model = args.model
if self.local:
if self.args.model == 'gpt-4-turbo-preview':
self.model = 'llama2'
@@ -69,7 +68,7 @@ class Standalone:
async def localStream(self, messages):
from ollama import AsyncClient
async for part in await AsyncClient().chat(model=self.model, messages=messages, stream=True):
async for part in await AsyncClient().chat(model=self.args.model, messages=messages, stream=True):
print(part['message']['content'], end='', flush=True)
async def claudeStream(self, system, user):
@@ -247,8 +246,6 @@ class Standalone:
if "overloaded_error" in str(e):
print(
"Error: Fabric is working fine, but claude is overloaded. Please try again later.")
if "Attempted to call a sync iterator on an async stream" in str(e):
print("Error: There is a problem connecting fabric with your local ollama installation. Please visit https://ollama.com for installation instructions. It is possible that you have chosen the wrong model. Please run fabric --listmodels to see the available models and choose the right one with fabric --model <model> or fabric --changeDefaultModel. If this does not work. Restart your computer (always a good idea) and try again. If you are still having problems, please visit https://ollama.com for installation instructions.")
else:
print(f"Error: {e}")
print(e)
@@ -267,7 +264,6 @@ class Standalone:
"https://api.openai.com/v1/models", headers=headers)
if response.status_code == 200:
print("OpenAI GPT models:\n")
models = response.json().get("data", [])
# Filter only gpt models
gpt_models = [model for model in models if model.get(
@@ -277,13 +273,6 @@ class Standalone:
for model in sorted_gpt_models:
print(model.get("id"))
print("\nLocal Ollama models:")
import ollama
ollamaList = ollama.list()['models']
for model in ollamaList:
print(model['name'].rstrip(":latest"))
print("\nClaude models:")
print("claude-3-opus-20240229")
else:
print(f"Failed to fetch models: HTTP {response.status_code}")
@@ -422,49 +411,6 @@ class Setup:
self.config_directory, "patterns")
os.makedirs(self.pattern_directory, exist_ok=True)
self.env_file = os.path.join(self.config_directory, ".env")
self.gptlist = []
self.fullOllamaList = []
self.claudeList = ['claude-3-opus-20240229']
load_dotenv(self.env_file)
try:
openaiapikey = os.environ["OPENAI_API_KEY"]
self.openaiapi_key = openaiapikey
except KeyError:
print("OPENAI_API_KEY not found in environment variables.")
sys.exit()
self.fetch_available_models()
def fetch_available_models(self):
headers = {
"Authorization": f"Bearer {self.openaiapi_key}"
}
response = requests.get(
"https://api.openai.com/v1/models", headers=headers)
if response.status_code == 200:
models = response.json().get("data", [])
# Filter only gpt models
gpt_models = [model for model in models if model.get(
"id", "").startswith(("gpt"))]
# Sort the models alphabetically by their ID
sorted_gpt_models = sorted(
gpt_models, key=lambda x: x.get("id"))
for model in sorted_gpt_models:
self.gptlist.append(model.get("id"))
else:
print(f"Failed to fetch models: HTTP {response.status_code}")
sys.exit()
import ollama
try:
default_modelollamaList = ollama.list()['models']
for model in default_modelollamaList:
self.fullOllamaList.append(model['name'].rstrip(":latest"))
except:
self.fullOllamaList = []
allmodels = self.gptlist + self.fullOllamaList + self.claudeList
return allmodels
def api_key(self, api_key):
""" Set the OpenAI API key in the environment file.
@@ -515,98 +461,40 @@ class Setup:
f.write(line)
f.write(f"CLAUDE_API_KEY={claude_key}")
elif claude_key:
with open(self.env_file, "w") as f:
f.write(f"CLAUDE_API_KEY={claude_key}")
with open(self.env_file, "r") as r:
lines = r.readlines()
with open(self.env_file, "w") as w:
for line in lines:
if "CLAUDE_API_KEY" not in line:
w.write(line)
w.write(f"CLAUDE_API_KEY={claude_key}")
def update_fabric_command(self, line, model):
fabric_command_regex = re.compile(
r"(fabric --pattern\s+\S+.*?)( --claude| --local)?'")
match = fabric_command_regex.search(line)
if match:
base_command = match.group(1)
# Provide a default value for current_flag
current_flag = match.group(2) if match.group(2) else ""
new_flag = ""
if model in self.claudeList:
new_flag = " --claude"
elif model in self.fullOllamaList:
new_flag = " --local"
# Update the command if the new flag is different or to remove an existing flag.
# Ensure to add the closing quote that was part of the original regex
return f"{base_command}{new_flag}'\n"
else:
return line # Return the line unmodified if no match is found.
def update_fabric_alias(self, line, model):
fabric_alias_regex = re.compile(
r"(alias fabric='[^']+?)( --claude| --local)?'")
match = fabric_alias_regex.search(line)
if match:
base_command, current_flag = match.groups()
new_flag = ""
if model in self.claudeList:
new_flag = " --claude"
elif model in self.fullOllamaList:
new_flag = " --local"
# Update the alias if the new flag is different or to remove an existing flag.
return f"{base_command}{new_flag}'\n"
else:
return line # Return the line unmodified if no match is found.
def default_model(self, model):
"""Set the default model in the environment file.
def custom_model(self, model):
"""
Set the custom model in the environment file
Args:
model (str): The model to be set.
model (str): The model to be set.
Returns:
None
"""
model = model.strip()
if model:
# Write or update the DEFAULT_MODEL in env_file
if os.path.exists(self.env_file):
with open(self.env_file, "r") as f:
lines = f.readlines()
with open(self.env_file, "w") as f:
found = False
for line in lines:
if line.startswith("DEFAULT_MODEL"):
f.write(f"DEFAULT_MODEL={model}\n")
found = True
else:
f.write(line)
if not found:
f.write(f"DEFAULT_MODEL={model}\n")
else:
with open(self.env_file, "w") as f:
f.write(f"DEFAULT_MODEL={model}\n")
# Compile regular expressions outside of the loop for efficiency
user_home = os.path.expanduser("~")
sh_config = None
# Check for shell configuration files
if os.path.exists(os.path.join(user_home, ".bashrc")):
sh_config = os.path.join(user_home, ".bashrc")
elif os.path.exists(os.path.join(user_home, ".zshrc")):
sh_config = os.path.join(user_home, ".zshrc")
if sh_config:
with open(sh_config, "r") as f:
if os.path.exists(self.env_file) and model:
with open(self.env_file, "r") as f:
lines = f.readlines()
with open(sh_config, "w") as f:
with open(self.env_file, "w") as f:
for line in lines:
modified_line = line
# Update existing fabric commands
if "fabric --pattern" in line:
modified_line = self.update_fabric_command(
modified_line, model)
elif "fabric=" in line:
modified_line = self.update_fabric_alias(
modified_line, model)
f.write(modified_line)
print(f"""Default model changed to {
model}. Please restart your terminal to use it.""")
else:
print("No shell configuration file found.")
if "CUSTOM_MODEL" not in line:
f.write(line)
f.write(f"CUSTOM_MODEL={model}")
elif model:
with open(self.env_file, "r") as r:
lines = r.readlines()
with open(self.env_file, "w") as w:
for line in lines:
if "CUSTOM_MODEL" not in line:
w.write(line)
w.write(f"CUSTOM_MODEL={model}")
def patterns(self):
""" Method to update patterns and exit the system.
@@ -629,11 +517,13 @@ class Setup:
print("Welcome to Fabric. Let's get started.")
apikey = input(
"Please enter your OpenAI API key. If you do not have one or if you have already entered it, press enter.\n")
self.api_key(apikey.strip())
print("Please enter your claude API key. If you do not have one, or if you have already entered it, press enter.\n")
claudekey = input()
self.claude_key(claudekey.strip())
print("Please enter your default model. Press enter to choose the default gpt-4-turbo-preview\n")
self.api_key(apikey)
claudekey = input(
"Please enter your claude API key. If you do not have one, or if you have already entered it, press enter.\n")
self.claude_key(claudekey)
custom_model = input(
"Please enter your custom model. If you do not have one, or if you have already entered it, press enter. If none is entered, it will default to gpt-4-turbo-preview\n")
self.custom_model(custom_model)
self.patterns()

View File

@@ -1,69 +0,0 @@
# IDENTITY and PURPOSE
You are an expert at extracting all of the inspirational, educational quotes and aphorisms from Founders or notable individuals quoted in their biographies, from a given transcript such a from a podcast, video transcript, essay, or whatever.
# Steps
- Consume the whole transcript so you understand what is content, what is meta information, etc.
- Output the following:
## QUOTES
- $Quote1$
- $Quote2$
- $Quote3$
- And so on…
## EXAMPLE OUTPUT
## QUOTES
1. "It sometimes pays an agency to be imaginative and unorthodox in hiring."
2. "The best ideas come as jokes."
3. "If it doesn't sell, it isn't creative."
4. "Hire people who are better than you are, then leave them to get on with it."
5. "Every ad must tell the wholesale story."
6. "Permanent success has rarely been built on frivolity and that people do not buy from clowns."
7. "Superlatives have no place in a serious advertisement."
8. "In general, study the methods of your competitors and do the exact opposite."
9. "The worst fault a salesman can commit is to be a bore."
10. "Quality of salesmanship involves energy, time, and knowledge of the product."
11. "Appeal to the interest of the person you're selling to."
12. "Use social proof and then avoid mentioning your competitors."
13. "Pay peanuts and you get monkeys."
14. "A blind pig can sometimes find truffles, but it helps to know that they're found in oak forests."
15. "Search all the parks in your cities, you'll find no statues of committees."
16. "Hire gentlemen with brains and be one too."
17. "You train an animal. You teach a person."
18. "Ogilvy and Mather does two things. We look after clients and we teach young advertising people."
19. "Always think service first."
20. "Make it memorable."
21. "Unless your advertising contains a big idea, it will pass like a ship in the night."
22. "You cannot bore people into buying your product."
23. "I prefer a posture of confident authority."
24. "You have a first-class mind. Stretch it."
25. "I wrote what I really believe, my last will and testament."
26. "I have outlived all my competitors."
27. "Don't be a dull bore. We can't save souls in an empty church."
28. "Change the name of your agency to Ogilvy."
29. "Decide what you want to be known for and then stick to it."
30. "Any damn fool can put on a deal. But it takes genius, faith, and perseverance to create a brand."
31. "If you ever find a man who is better than you are, hire him."
32. "Raise your sights, blaze new trails, compete with the immortals."
33. "I prefer the discipline of knowledge to the anarchy of ignorance."
34. "Only first-class business and that in a first-class way."
35. "Never run an advertisement you would not want your own family to see."
36. "The line between pride in our work and neurotic obstinacy is a narrow one."
37. "We have a habit of divine discontent with our performance."
38. "We like reports and correspondence to be well written, easy to read and short."
## END EXAMPLE OUTPUT
# OUTPUT INSTRUCTIONS
- List all quotes
- Do not output warnings or notes—just the requested sections
# INPUT:
INPUT: