mirror of
https://github.com/reddit-archive/reddit.git
synced 2026-01-23 22:08:11 -05:00
* Discount 1.6.1
* Lines beginning with spaces are considered code. I don't know why markdown.py didn't trigger this.
* tables in mark down: why not?
* validation of resulting HTML vial libxml to prevent hax.
* private RSS and JSON feeds
* optional whitelists for subreddits
* Moderator messaging
Additions:
* destination sanitization to cut down on XSRF
* cosmetic fix to spam and reported listing
* make the rss feeds on messages useful
* /admin/errors
* Of the types of listings hitting the precomputers (top/controversy by hour/day/week/month/year), the ones over long periods of time don't change often. So we can try to run them at most once per day, and then merge in the day's listings.
* google analytics
* logging queue
* Created empty commentspanel.xml, errorpage.xml, login.xml
* add subreddit rules/info box to submit page
* add 'via' link on messages in moderator inbox
* add a show=all get parameter to link listings to optionally ignore hiding preferences.
* Raise edited timelimit to three mins
* Remove UI that makes it look like you can edit deleted selftexts
* Make it clearer to admins when a link is deleted
* Fix [S] leak on deleted comments
* Fix /user/[deleted] misrendering
* New house ads system
* updated so that minimalcontrollers actually can use the page cache.
* Added /admin/usage
Bugfixes:
* Reduce the number of results that we request from Solr and simplify that caching a bit
* Require a secret key to shut down app-servers
* Make get_title a little more resilient to malformed documents and slow remote servers
* Cause the SearchBuilder to only byID the results that it's going to render instead of all 1000
* Remove ability for an author to XSS himself
* fix spam listings and an xsrf
* More verbose VDestination
* Fixing the famous ?limit=0.1 error, and one last password-validation one
* distinguish deleted comments' and deleted links' error messages
* Don't allow ridiculously long log lines to widen the page
* Bug with HardCache.add() when existing key is expired
* Add adminbox next to domain
97 lines
2.7 KiB
Python
97 lines
2.7 KiB
Python
#! /usr/bin/python
|
|
|
|
from r2.lib import amqp
|
|
from r2.lib.log import log_text
|
|
from pylons import g
|
|
from datetime import datetime
|
|
from time import sleep
|
|
|
|
import pickle
|
|
|
|
q = 'usage_q'
|
|
tz = g.display_tz
|
|
|
|
def check_dict(body):
|
|
d = pickle.loads(body)
|
|
|
|
for k in ("start_time", "end_time", "action"):
|
|
if k not in d:
|
|
raise TypeError
|
|
|
|
return d
|
|
|
|
def hund_from_start_and_end(start_time, end_time):
|
|
elapsed = end_time - start_time
|
|
|
|
hund_sec = int(elapsed.seconds * 100 +
|
|
elapsed.microseconds / 10000)
|
|
hund_sec = max(hund_sec, 1) # assume every request takes at least 0.01s
|
|
|
|
return hund_sec
|
|
|
|
def trunc_time(time, period):
|
|
return time.replace(minute = period * (time.minute / period),
|
|
second = 0,
|
|
microsecond = 0)
|
|
|
|
def buckets(time):
|
|
time = time.astimezone(tz)
|
|
|
|
# Keep:
|
|
# Daily buckets for eight days
|
|
# 1-hour buckets for 24 hours
|
|
# 5-min buckets for two hours
|
|
|
|
return [
|
|
(86400 * 8, time.strftime("%Y/%m/%d_xx:xx")),
|
|
(86400 * 1, time.strftime("%Y/%m/%d_%H:xx")),
|
|
( 3600 * 2, trunc_time(time, 5).strftime("%Y/%m/%d_%H:%M")),
|
|
]
|
|
|
|
def run(limit=1000, verbose=False):
|
|
def myfunc(msgs, chan):
|
|
incrs = {}
|
|
|
|
for msg in msgs:
|
|
try:
|
|
d = check_dict(msg.body)
|
|
except TypeError:
|
|
log_text("usage_q error", "wtf is %r" % msg.body, "error")
|
|
continue
|
|
|
|
hund_sec = hund_from_start_and_end(d["start_time"], d["end_time"])
|
|
|
|
action = d["action"].replace("-", "_")
|
|
|
|
for exp_time, bucket in buckets(d["end_time"]):
|
|
k = "%s-%s" % (bucket, action)
|
|
incrs.setdefault(k, [0, 0, exp_time])
|
|
incrs[k][0] += 1
|
|
incrs[k][1] += hund_sec
|
|
|
|
for k, (count, elapsed, exp_time) in incrs.iteritems():
|
|
c_key = "profile_count-" + k
|
|
e_key = "profile_elapsed-" + k
|
|
|
|
if verbose:
|
|
c_old = g.hardcache.get(c_key)
|
|
e_old = g.hardcache.get(e_key)
|
|
|
|
g.hardcache.accrue(c_key, delta=count, time=exp_time)
|
|
g.hardcache.accrue(e_key, delta=elapsed, time=exp_time)
|
|
|
|
if verbose:
|
|
c_new = g.hardcache.get(c_key)
|
|
e_new = g.hardcache.get(e_key)
|
|
|
|
print "%s: %s -> %s" % (c_key, c_old, c_new)
|
|
print "%s: %s -> %s" % (e_key, e_old, e_new)
|
|
|
|
if len(msgs) < limit / 2:
|
|
if verbose:
|
|
print "Sleeping..."
|
|
sleep (10)
|
|
amqp.handle_items(q, myfunc, limit=limit, drain=False, verbose=verbose,
|
|
sleep_time = 30)
|
|
|