basic api and initial work on bot

This commit is contained in:
Bad_Investment
2021-05-03 14:07:04 -07:00
parent a631f61934
commit 7513b11a04
10 changed files with 155 additions and 0 deletions

2
.gitignore vendored
View File

@@ -1,3 +1,5 @@
.devcontainer
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]

55
app.py Normal file
View File

@@ -0,0 +1,55 @@
from fastapi import FastAPI, Request, HTTPException
from fastapi.responses import FileResponse
import asyncio
import yaml
import asyncpraw
from scraper import RedditScraper
from bot import RedditBot
from models.settings import RedditSettings
from asyncpraw.models import MoreComments
app = FastAPI()
@app.on_event('startup')
async def startup_event():
settings = yaml.safe_load(open('settings.yaml', 'r'))
reddit_settings = RedditSettings.parse_obj(settings['reddit'])
reddit_client = asyncpraw.Reddit(
username=reddit_settings.auth.username,
password=reddit_settings.auth.password.get_secret_value(),
client_id=reddit_settings.auth.client_id,
client_secret=reddit_settings.auth.client_secret.get_secret_value(),
user_agent=reddit_settings.auth.user_agent
)
bot = RedditBot(reddit_client)
asyncio.create_task(bot.run())
app.state.scraper = RedditScraper(reddit_client)
app.state.bot = bot
@app.get(
"/scrape/get_usernames_by_submission",
tags=['scrape']
)
async def get_usernames_by_submission(request: Request, submission_id: str, traverse: bool = False):
try:
comments = await request.app.state.scraper.get_comments_by_submission_id(submission_id, traverse)
return list(set([c.author.name for c in comments if c.author]))
except Exception as e:
# todo better exception handling
raise HTTPException(status_code=500, detail=str(e))
@app.get(
"/scrape/get_usernames_by_comment",
tags=['scrape']
)
async def get_usernames_by_comment(request: Request, comment_id: str, traverse: bool = False):
try:
comments = await request.app.state.scraper.get_comments_by_comment_id(comment_id, traverse)
return list(set([c.author.name for c in comments if c.author]))
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))

23
bot/__init__.py Normal file
View File

@@ -0,0 +1,23 @@
from asyncpraw import Reddit
from asyncpraw.models import Message
import asyncio
from models.event import Event, AttendanceType
class RedditBot:
def __init__(self, client: Reddit):
self.client = client
self.run_task = None
async def message_handler(self, message: Message):
print(message)
if message.body and 'ping' in message.body.lower():
await message.reply('pong')
await message.mark_read()
async def run(self):
async for item in self.client.inbox.stream():
if isinstance(item, Message):
asyncio.create_task(self.message_handler(item))

15
models/event.py Normal file
View File

@@ -0,0 +1,15 @@
from pydantic import BaseModel, Field
from uuid import uuid4 as uuid
from datetime import datetime
from enum import Enum
class AttendanceType(Enum):
STATIC = "STATIC" # fixed attendance list
DYNAMIC = "DYNAMIC" # will look for new attendance until expiry
class Event(BaseModel):
_id: str = Field(default_factory=lambda: str(uuid()))
name: str
description: str = ""
attendance_type = AttendanceType
expiry_date: datetime

View File

@@ -0,0 +1 @@
from .reddit import RedditSettings

14
models/settings/reddit.py Normal file
View File

@@ -0,0 +1,14 @@
from pydantic import BaseModel, SecretStr
class AuthSettings(BaseModel):
username: str
password: SecretStr
client_id: str
client_secret: SecretStr
user_agent: str
class Config:
env_prefix = 'reddit_auth_'
class RedditSettings(BaseModel):
auth: AuthSettings

5
requirements.txt Normal file
View File

@@ -0,0 +1,5 @@
pydantic==1.8.1
fastapi==0.63.0
uvicorn[standard]
asyncpraw==7.2.0
pyyaml==5.4.1

33
scraper/__init__.py Normal file
View File

@@ -0,0 +1,33 @@
import asyncpraw
import asyncio
from .exceptions import *
class RedditScraper:
def __init__(self, client: asyncpraw.Reddit, concurrency: int = 8):
self.client = client
self.concurrency = concurrency
self.sem = asyncio.Semaphore(concurrency)
async def get_comments_by_comment_id(self, comment_id: str, traverse: bool = False):
async with self.sem:
comment = await self.client.comment(comment_id)
await comment.refresh()
comments = comment.replies
await comments.replace_more(limit=None)
if traverse:
comments = await comments.list()
else:
comments = list(comments)
comments.append(comment)
return comments
async def get_comments_by_submission_id(self, submission_id: str, traverse: bool = False):
async with self.sem:
submission = await self.client.submission(submission_id, lazy=True)
comments = await submission.comments()
await comments.replace_more(limit=None)
if traverse:
comments = await comments.list()
return comments

0
scraper/exceptions.py Normal file
View File

7
settings.yaml Normal file
View File

@@ -0,0 +1,7 @@
reddit:
auth:
username: POAPbot
password:
client_id:
client_secret:
user_agent: "POAPbot by /u/Bad_Investment https://github.com/badinvestment/poap-reddit-bot"