mirror of
https://github.com/reddit-archive/reddit.git
synced 2026-01-22 21:38:11 -05:00
* Discount 1.6.1
* Lines beginning with spaces are considered code. I don't know why markdown.py didn't trigger this.
* tables in mark down: why not?
* validation of resulting HTML vial libxml to prevent hax.
* private RSS and JSON feeds
* optional whitelists for subreddits
* Moderator messaging
Additions:
* destination sanitization to cut down on XSRF
* cosmetic fix to spam and reported listing
* make the rss feeds on messages useful
* /admin/errors
* Of the types of listings hitting the precomputers (top/controversy by hour/day/week/month/year), the ones over long periods of time don't change often. So we can try to run them at most once per day, and then merge in the day's listings.
* google analytics
* logging queue
* Created empty commentspanel.xml, errorpage.xml, login.xml
* add subreddit rules/info box to submit page
* add 'via' link on messages in moderator inbox
* add a show=all get parameter to link listings to optionally ignore hiding preferences.
* Raise edited timelimit to three mins
* Remove UI that makes it look like you can edit deleted selftexts
* Make it clearer to admins when a link is deleted
* Fix [S] leak on deleted comments
* Fix /user/[deleted] misrendering
* New house ads system
* updated so that minimalcontrollers actually can use the page cache.
* Added /admin/usage
Bugfixes:
* Reduce the number of results that we request from Solr and simplify that caching a bit
* Require a secret key to shut down app-servers
* Make get_title a little more resilient to malformed documents and slow remote servers
* Cause the SearchBuilder to only byID the results that it's going to render instead of all 1000
* Remove ability for an author to XSS himself
* fix spam listings and an xsrf
* More verbose VDestination
* Fixing the famous ?limit=0.1 error, and one last password-validation one
* distinguish deleted comments' and deleted links' error messages
* Don't allow ridiculously long log lines to widen the page
* Bug with HardCache.add() when existing key is expired
* Add adminbox next to domain
175 lines
5.2 KiB
Python
175 lines
5.2 KiB
Python
#! /usr/bin/python
|
|
|
|
from r2.lib import amqp, emailer
|
|
from pylons import g
|
|
from datetime import datetime
|
|
from md5 import md5
|
|
from random import shuffle, choice
|
|
|
|
import pickle
|
|
|
|
try:
|
|
words = file(g.words_file).read().split("\n")
|
|
except IOError:
|
|
words = []
|
|
|
|
shuffle(words)
|
|
|
|
def randword():
|
|
try:
|
|
return choice(words)
|
|
except IndexError:
|
|
return '???'
|
|
|
|
q = 'log_q'
|
|
|
|
def run(limit=100, streamfile=None, verbose=False):
|
|
if streamfile:
|
|
stream_fp = open(streamfile, "a")
|
|
else:
|
|
stream_fp = None
|
|
|
|
def streamlog(msg, important=False):
|
|
if stream_fp:
|
|
stream_fp.write(msg + "\n")
|
|
stream_fp.flush()
|
|
if important:
|
|
print msg
|
|
|
|
def add_timestamps (d):
|
|
d['hms'] = d['time'].strftime("%H:%M:%S")
|
|
|
|
d['occ'] = "<%s:%s, pid=%-5s, %s>" % (d['host'], d['port'], d['pid'],
|
|
d['time'].strftime("%Y-%m-%d %H:%M:%S"))
|
|
|
|
def limited_append(l, item):
|
|
if len(l) >= 25:
|
|
l.pop(12)
|
|
l.append(item)
|
|
|
|
def log_exception(d, daystring):
|
|
exc_desc = d['exception_desc']
|
|
exc_type = d['exception_type']
|
|
|
|
exc_str = "%s: %s" % (exc_type, exc_desc)
|
|
|
|
add_timestamps(d)
|
|
|
|
tb = []
|
|
|
|
key_material = "exc_type"
|
|
pretty_lines = []
|
|
|
|
for tpl in d['traceback']:
|
|
tb.append(tpl)
|
|
filename, lineno, funcname, text = tpl
|
|
key_material += "%s %s " % (filename, funcname)
|
|
pretty_lines.append ("%s:%s: %s()" % (filename, lineno, funcname))
|
|
pretty_lines.append (" %s" % text)
|
|
|
|
fingerprint = md5(key_material).hexdigest()
|
|
|
|
nickname_key = "error_nickname-" + fingerprint
|
|
status_key = "error_status-" + fingerprint
|
|
|
|
nickname = g.hardcache.get(nickname_key)
|
|
|
|
if nickname is None:
|
|
nickname = '"%s" Exception' % randword().capitalize()
|
|
news = ("A new kind of thing just happened! " +
|
|
"I'm going to call it a %s\n\n" % nickname)
|
|
|
|
news += "Where and when: %s\n\n" % d['occ']
|
|
news += "Traceback:\n"
|
|
news += "\n".join(pretty_lines)
|
|
news += exc_str
|
|
news += "\n"
|
|
|
|
emailer.nerds_email(news, "Exception Watcher")
|
|
|
|
g.hardcache.set(nickname_key, nickname, 86400 * 365)
|
|
g.hardcache.set(status_key, "new", 86400)
|
|
|
|
if g.hardcache.get(status_key) == "fixed":
|
|
g.hardcache.set(status_key, "new", 86400)
|
|
news = "This was marked as fixed: %s\n" % nickname
|
|
news += "But it just occurred, so I'm marking it new again."
|
|
emailer.nerds_email(news, "Exception Watcher")
|
|
|
|
err_key = "-".join(["error", daystring, fingerprint])
|
|
|
|
existing = g.hardcache.get(err_key)
|
|
|
|
if not existing:
|
|
existing = dict(exception=exc_str, traceback=tb, occurrences=[])
|
|
|
|
limited_append(existing['occurrences'], d['occ'])
|
|
|
|
g.hardcache.set(err_key, existing, 7 * 86400)
|
|
|
|
streamlog ("%s [X] %-70s" % (d['hms'], nickname), verbose)
|
|
|
|
def log_text(d, daystring):
|
|
add_timestamps(d)
|
|
char = d['level'][0].upper()
|
|
streamlog ("%s [%s] %r" % (d['hms'], char, d['text']), verbose)
|
|
logclass_key = "logclass-" + d['classification']
|
|
|
|
if not g.hardcache.get(logclass_key):
|
|
g.hardcache.set(logclass_key, True, 86400 * 90)
|
|
|
|
if d['level'] != 'debug':
|
|
news = "The code just generated a [%s] message.\n" % \
|
|
d['classification']
|
|
news += "I don't remember ever seeing one of those before.\n"
|
|
news += "\n"
|
|
news += "It happened on: %s\n" % d['occ']
|
|
news += "The log level was: %s\n" % d['level']
|
|
news += "The complete text was:\n"
|
|
news += repr(d['text'])
|
|
emailer.nerds_email (news, "reddit secretary")
|
|
|
|
occ_key = "-".join(["logtext", daystring,
|
|
d['level'], d['classification']])
|
|
|
|
occurrences = g.hardcache.get(occ_key)
|
|
|
|
if occurrences is None:
|
|
occurrences = []
|
|
|
|
d2 = {}
|
|
|
|
d2['occ'] = d['occ']
|
|
d2['text'] = repr(d['text'])
|
|
|
|
limited_append(occurrences, d2)
|
|
g.hardcache.set(occ_key, occurrences, 86400 * 7)
|
|
|
|
def myfunc(msgs, chan):
|
|
daystring = datetime.now(g.display_tz).strftime("%Y/%m/%d")
|
|
|
|
for msg in msgs:
|
|
try:
|
|
d = pickle.loads(msg.body)
|
|
except TypeError:
|
|
streamlog ("wtf is %r" % msg.body, True)
|
|
continue
|
|
|
|
if not 'type' in d:
|
|
streamlog ("wtf is %r" % d, True)
|
|
elif d['type'] == 'exception':
|
|
try:
|
|
log_exception(d, daystring)
|
|
except:
|
|
print "Error in log_exception()"
|
|
elif d['type'] == 'text':
|
|
try:
|
|
log_text(d, daystring)
|
|
except:
|
|
print "Error in log_text()"
|
|
else:
|
|
streamlog ("wtf is %r" % d['type'], True)
|
|
|
|
amqp.handle_items(q, myfunc, limit=limit, drain=False, verbose=verbose)
|
|
|