This commit is contained in:
BillSchumacher
2023-04-15 19:32:05 -05:00
14 changed files with 169 additions and 70 deletions

View File

@@ -139,3 +139,12 @@ USE_BRIAN_TTS=False
ELEVENLABS_API_KEY=your-elevenlabs-api-key
ELEVENLABS_VOICE_1_ID=your-voice-id-1
ELEVENLABS_VOICE_2_ID=your-voice-id-2
################################################################################
### TWITTER API
################################################################################
TW_CONSUMER_KEY=
TW_CONSUMER_SECRET=
TW_ACCESS_TOKEN=
TW_ACCESS_TOKEN_SECRET=

View File

@@ -24,6 +24,7 @@ from autogpt.processing.text import summarize_text
from autogpt.speech import say_text
from autogpt.commands.web_selenium import browse_website
from autogpt.commands.git_operations import clone_repository
from autogpt.commands.twitter import send_tweet
CFG = Config()
@@ -184,6 +185,8 @@ def execute_command(command_name: str, arguments):
return read_audio_from_file(arguments["file"])
elif command_name == "generate_image":
return generate_image(arguments["prompt"])
elif command_name == "send_tweet":
return send_tweet(arguments['text'])
elif command_name == "do_nothing":
return "No action performed."
elif command_name == "task_complete":

View File

@@ -0,0 +1,25 @@
import tweepy
import os
from dotenv import load_dotenv
load_dotenv()
def send_tweet(tweet_text):
consumer_key = os.environ.get("TW_CONSUMER_KEY")
consumer_secret= os.environ.get("TW_CONSUMER_SECRET")
access_token= os.environ.get("TW_ACCESS_TOKEN")
access_token_secret= os.environ.get("TW_ACCESS_TOKEN_SECRET")
# Authenticate to Twitter
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
# Create API object
api = tweepy.API(auth)
# Send tweet
try:
api.update_status(tweet_text)
print("Tweet sent successfully!")
except tweepy.TweepyException as e:
print("Error sending tweet: {}".format(e.reason))

View File

@@ -0,0 +1,78 @@
"""Web scraping commands using Playwright"""
try:
from playwright.sync_api import sync_playwright
except ImportError:
print(
"Playwright not installed. Please install it with 'pip install playwright' to use."
)
from bs4 import BeautifulSoup
from autogpt.processing.html import extract_hyperlinks, format_hyperlinks
from typing import List, Union
def scrape_text(url: str) -> str:
"""Scrape text from a webpage
Args:
url (str): The URL to scrape text from
Returns:
str: The scraped text
"""
with sync_playwright() as p:
browser = p.chromium.launch()
page = browser.new_page()
try:
page.goto(url)
html_content = page.content()
soup = BeautifulSoup(html_content, "html.parser")
for script in soup(["script", "style"]):
script.extract()
text = soup.get_text()
lines = (line.strip() for line in text.splitlines())
chunks = (phrase.strip() for line in lines for phrase in line.split(" "))
text = "\n".join(chunk for chunk in chunks if chunk)
except Exception as e:
text = f"Error: {str(e)}"
finally:
browser.close()
return text
def scrape_links(url: str) -> Union[str, List[str]]:
"""Scrape links from a webpage
Args:
url (str): The URL to scrape links from
Returns:
Union[str, List[str]]: The scraped links
"""
with sync_playwright() as p:
browser = p.chromium.launch()
page = browser.new_page()
try:
page.goto(url)
html_content = page.content()
soup = BeautifulSoup(html_content, "html.parser")
for script in soup(["script", "style"]):
script.extract()
hyperlinks = extract_hyperlinks(soup, url)
formatted_links = format_hyperlinks(hyperlinks)
except Exception as e:
formatted_links = f"Error: {str(e)}"
finally:
browser.close()
return formatted_links

View File

@@ -9,6 +9,7 @@ from bs4 import BeautifulSoup
from autogpt.config import Config
from autogpt.memory import get_memory
from autogpt.processing.html import extract_hyperlinks, format_hyperlinks
CFG = Config()
memory = get_memory(CFG)
@@ -135,37 +136,6 @@ def scrape_text(url: str) -> str:
return text
def extract_hyperlinks(soup: BeautifulSoup, base_url: str) -> List[Tuple[str, str]]:
"""Extract hyperlinks from a BeautifulSoup object
Args:
soup (BeautifulSoup): The BeautifulSoup object
base_url (str): The base URL
Returns:
List[Tuple[str, str]]: The extracted hyperlinks
"""
return [
(link.text, urljoin(base_url, link["href"]))
for link in soup.find_all("a", href=True)
]
def format_hyperlinks(hyperlinks: List[Tuple[str, str]]) -> List[str]:
"""Format hyperlinks into a list of strings
Args:
hyperlinks (List[Tuple[str, str]]): The hyperlinks to format
Returns:
List[str]: The formatted hyperlinks
"""
formatted_links = []
for link_text, link_url in hyperlinks:
formatted_links.append(f"{link_text} ({link_url})")
return formatted_links
def scrape_links(url: str) -> Union[str, List[str]]:
"""Scrape links from a webpage

View File

@@ -1,5 +1,6 @@
"""Selenium web scraping module."""
from selenium import webdriver
from autogpt.processing.html import extract_hyperlinks, format_hyperlinks
import autogpt.processing.text as summary
from bs4 import BeautifulSoup
from selenium.webdriver.remote.webdriver import WebDriver
@@ -33,7 +34,7 @@ def browse_website(url: str, question: str) -> Tuple[str, WebDriver]:
driver, text = scrape_text_with_selenium(url)
add_header(driver)
summary_text = summary.summarize_text(url, text, question, driver)
links = scrape_links_with_selenium(driver)
links = scrape_links_with_selenium(driver, url)
# Limit links to 5
if len(links) > 5:
@@ -96,7 +97,7 @@ def scrape_text_with_selenium(url: str) -> Tuple[WebDriver, str]:
return driver, text
def scrape_links_with_selenium(driver: WebDriver) -> List[str]:
def scrape_links_with_selenium(driver: WebDriver, url: str) -> List[str]:
"""Scrape links from a website using selenium
Args:
@@ -111,7 +112,7 @@ def scrape_links_with_selenium(driver: WebDriver) -> List[str]:
for script in soup(["script", "style"]):
script.extract()
hyperlinks = extract_hyperlinks(soup)
hyperlinks = extract_hyperlinks(soup, url)
return format_hyperlinks(hyperlinks)
@@ -128,30 +129,6 @@ def close_browser(driver: WebDriver) -> None:
driver.quit()
def extract_hyperlinks(soup: BeautifulSoup) -> List[Tuple[str, str]]:
"""Extract hyperlinks from a BeautifulSoup object
Args:
soup (BeautifulSoup): The BeautifulSoup object to extract the hyperlinks from
Returns:
List[Tuple[str, str]]: The hyperlinks extracted from the BeautifulSoup object
"""
return [(link.text, link["href"]) for link in soup.find_all("a", href=True)]
def format_hyperlinks(hyperlinks: List[Tuple[str, str]]) -> List[str]:
"""Format hyperlinks to be displayed to the user
Args:
hyperlinks (List[Tuple[str, str]]): The hyperlinks to format
Returns:
List[str]: The formatted hyperlinks
"""
return [f"{link_text} ({link_url})" for link_text, link_url in hyperlinks]
def add_header(driver: WebDriver) -> None:
"""Add a header to the website

View File

@@ -0,0 +1,32 @@
"""HTML processing functions"""
from requests.compat import urljoin
from typing import List, Tuple
from bs4 import BeautifulSoup
def extract_hyperlinks(soup: BeautifulSoup, base_url: str) -> List[Tuple[str, str]]:
"""Extract hyperlinks from a BeautifulSoup object
Args:
soup (BeautifulSoup): The BeautifulSoup object
base_url (str): The base URL
Returns:
List[Tuple[str, str]]: The extracted hyperlinks
"""
return [
(link.text, urljoin(base_url, link["href"]))
for link in soup.find_all("a", href=True)
]
def format_hyperlinks(hyperlinks: List[Tuple[str, str]]) -> List[str]:
"""Format hyperlinks to be displayed to the user
Args:
hyperlinks (List[Tuple[str, str]]): The hyperlinks to format
Returns:
List[str]: The formatted hyperlinks
"""
return [f"{link_text} ({link_url})" for link_text, link_url in hyperlinks]

View File

@@ -83,6 +83,8 @@ def get_prompt() -> str:
("Execute Python File", "execute_python_file", {"file": "<file>"}),
("Generate Image", "generate_image", {"prompt": "<prompt>"}),
("Convert Audio to text", "read_audio_from_file", {"file": "<file>"}),
("Send Tweet", "send_tweet", {"text": "<text>"}),
]
# Only add shell command to the prompt if the AI is allowed to execute it

View File

@@ -25,3 +25,5 @@ black
sourcery
isort
gitpython==3.1.31
pytest
pytest-mock

View File

@@ -1,5 +1,6 @@
import os
import sys
import unittest
from autogpt.memory.local import LocalCache

View File

@@ -3,7 +3,7 @@ import subprocess
import sys
import unittest
from autogpt.file_operations import delete_file, read_file
from autogpt.commands.file_operations import delete_file, read_file
env_vars = {"MEMORY_BACKEND": "no_memory", "TEMPERATURE": "0"}

View File

@@ -4,7 +4,7 @@
# pip install pytest-mock
import pytest
from scripts.browse import scrape_links
from autogpt.commands.web_requests import scrape_links
"""
Code Analysis
@@ -55,7 +55,7 @@ class TestScrapeLinks:
mock_response.text = (
"<html><body><a href='https://www.google.com'>Google</a></body></html>"
)
mocker.patch("requests.get", return_value=mock_response)
mocker.patch("requests.Session.get", return_value=mock_response)
# Call the function with a valid URL
result = scrape_links("https://www.example.com")
@@ -68,7 +68,7 @@ class TestScrapeLinks:
# Mock the requests.get() function to return an HTTP error response
mock_response = mocker.Mock()
mock_response.status_code = 404
mocker.patch("requests.get", return_value=mock_response)
mocker.patch("requests.Session.get", return_value=mock_response)
# Call the function with an invalid URL
result = scrape_links("https://www.invalidurl.com")
@@ -82,7 +82,7 @@ class TestScrapeLinks:
mock_response = mocker.Mock()
mock_response.status_code = 200
mock_response.text = "<html><body><p>No hyperlinks here</p></body></html>"
mocker.patch("requests.get", return_value=mock_response)
mocker.patch("requests.Session.get", return_value=mock_response)
# Call the function with a URL containing no hyperlinks
result = scrape_links("https://www.example.com")
@@ -105,7 +105,7 @@ class TestScrapeLinks:
</body>
</html>
"""
mocker.patch("requests.get", return_value=mock_response)
mocker.patch("requests.Session.get", return_value=mock_response)
# Call the function being tested
result = scrape_links("https://www.example.com")

View File

@@ -41,7 +41,7 @@ class TestScrapeText:
mock_response = mocker.Mock()
mock_response.status_code = 200
mock_response.text = f"<html><body><div><p style='color: blue;'>{expected_text}</p></div></body></html>"
mocker.patch("requests.get", return_value=mock_response)
mocker.patch("requests.Session.get", return_value=mock_response)
# Call the function with a valid URL and assert that it returns the expected text
url = "http://www.example.com"
@@ -50,7 +50,7 @@ class TestScrapeText:
# Tests that the function returns an error message when an invalid or unreachable url is provided.
def test_invalid_url(self, mocker):
# Mock the requests.get() method to raise an exception
mocker.patch("requests.get", side_effect=requests.exceptions.RequestException)
mocker.patch("requests.Session.get", side_effect=requests.exceptions.RequestException)
# Call the function with an invalid URL and assert that it returns an error message
url = "http://www.invalidurl.com"
@@ -63,7 +63,7 @@ class TestScrapeText:
mock_response = mocker.Mock()
mock_response.status_code = 200
mock_response.text = "<html><body></body></html>"
mocker.patch("requests.get", return_value=mock_response)
mocker.patch("requests.Session.get", return_value=mock_response)
# Call the function with a valid URL and assert that it returns an empty string
url = "http://www.example.com"
@@ -72,7 +72,7 @@ class TestScrapeText:
# Tests that the function returns an error message when the response status code is an http error (>=400).
def test_http_error(self, mocker):
# Mock the requests.get() method to return a response with a 404 status code
mocker.patch("requests.get", return_value=mocker.Mock(status_code=404))
mocker.patch("requests.Session.get", return_value=mocker.Mock(status_code=404))
# Call the function with a URL
result = scrape_text("https://www.example.com")
@@ -87,7 +87,7 @@ class TestScrapeText:
mock_response = mocker.Mock()
mock_response.status_code = 200
mock_response.text = html
mocker.patch("requests.get", return_value=mock_response)
mocker.patch("requests.Session.get", return_value=mock_response)
# Call the function with a URL
result = scrape_text("https://www.example.com")

View File

@@ -1,5 +1,5 @@
import autogpt.agent.agent_manager as agent_manager
from autogpt.app import start_agent, list_agents
from autogpt.app import start_agent, list_agents, execute_command
import unittest
from unittest.mock import patch, MagicMock