New Features
* Two new 404 aliens
* enable a comment page cache if the user isn't author of any of the comments and the page is a non-permalink. votes and friends are updated by javascript
Additions
* descriptive example.ini
* create "trusted_sponsor" who get auto-accepted
* split off url cache from the permacache, with major clean-up to app_globals
* Move is_banned_domain() call into POST_submit() so it can take the sanitized url
* Redirect /r/asdfasdfasdf to the search page, not the create page.
* updates to draw_load.py
* orangered envelope goes to unread, not all.
* Remove "enable whitelisting" checkbox, and switch to new failiens
* Put verification link on email prefs page
Bugfixes:
* split off a minimal api controller for onload calls (which don't care about c.site or c.user, making them cheaper)
* Assume admins want to keep their cookies forever just like everyone else
* /randomrising bugfix
* JSON error page was rendering in html. it now returns "{ error: ${code} }"
@@ -26,9 +26,9 @@ def draw_load(row_size = 12, width = 200, out_file = "/tmp/load.png"):
|
||||
|
||||
number = (len([x for x in hosts if x.services]) +
|
||||
len([x for x in hosts if x.database]) +
|
||||
sum(len(x.queue.queues) for x in hosts if x.queue)) + 3
|
||||
sum(len(x.queue.queues) for x in hosts if x.queue)) + 9
|
||||
|
||||
im = Image.new("RGB", (width, number * row_size + 2))
|
||||
im = Image.new("RGB", (width, number * row_size + 3))
|
||||
draw = ImageDraw.Draw(im)
|
||||
def draw_box(label, color, center = False):
|
||||
ypos = draw_box.ypos
|
||||
@@ -49,13 +49,33 @@ def draw_load(row_size = 12, width = 200, out_file = "/tmp/load.png"):
|
||||
|
||||
draw_box(" ==== SERVICES ==== ", "#BBBBBB", center = True)
|
||||
for host in hosts:
|
||||
if host.services:
|
||||
if host.host.startswith('app'):
|
||||
draw_box(" %s load: %s" % (host.host, host.load()),
|
||||
get_load_level(host))
|
||||
|
||||
draw_box(" ==== BUTTONS ==== ", "#BBBBBB", center = True)
|
||||
for host in hosts:
|
||||
if host.host.startswith('button'):
|
||||
draw_box(" %s load: %s" % (host.host, host.load()),
|
||||
get_load_level(host))
|
||||
|
||||
draw_box(" ==== SEARCH ==== ", "#BBBBBB", center = True)
|
||||
for host in hosts:
|
||||
if host.host.startswith('search'):
|
||||
draw_box(" %s load: %s" % (host.host, host.load()),
|
||||
get_load_level(host))
|
||||
|
||||
draw_box(" ==== CACHES ==== ", "#BBBBBB", center = True)
|
||||
for host in hosts:
|
||||
if host.host.startswith('cache') or host.host.startswith('pmc'):
|
||||
draw_box(" %s load: %s" % (host.host, host.load()),
|
||||
get_load_level(host))
|
||||
|
||||
draw_box(" ==== QUEUES ==== ", "#BBBBBB", center = True)
|
||||
for host in hosts:
|
||||
if host.queue:
|
||||
draw_box(" %s load: %s" % (host.host, host.load()),
|
||||
get_load_level(host))
|
||||
for name, data in host.queue:
|
||||
max_len = host.queue.max_length(name)
|
||||
draw_box(" %16s: %5s / %5s" % (name, data(), max_len),
|
||||
|
||||
367
r2/example.ini
@@ -4,72 +4,137 @@
|
||||
# The %(here)s variable will be replaced with the parent directory of this file
|
||||
#
|
||||
[DEFAULT]
|
||||
|
||||
# -- debug and configuation flags --
|
||||
# global debug flag -- displays pylons stacktrace rather than 500 page on error when true
|
||||
debug = true
|
||||
# enables/disables template caching (for development)
|
||||
template_debug = true
|
||||
# use uncompressed static files (out of /static/js and /static/css)
|
||||
# rather than compressed files out of /static (for development if true)
|
||||
uncompressedJS = true
|
||||
translator = true
|
||||
# enable/disable verbose SQL printing
|
||||
sqlprinting = false
|
||||
# enable/disable writing errors as they occur to a rabbit-mq queue
|
||||
exception_logging = false
|
||||
# whether to print a "reddit app started" message at start"
|
||||
log_start = true
|
||||
# enable/disable logging for amqp/rabbitmq
|
||||
amqp_logging = false
|
||||
# -- SECRETS! <-- update these first! --
|
||||
# global secret
|
||||
SECRET = abcdefghijklmnopqrstuvwxyz0123456789
|
||||
# secret for making the modhash
|
||||
MODSECRET = abcdefghijklmnopqrstuvwxyz0123456789
|
||||
# secret for /prefs/feeds
|
||||
FEEDSECRET = abcdefghijklmnopqrstuvwxyz0123456789
|
||||
|
||||
proxy_addr =
|
||||
log_path =
|
||||
# -- important settings --
|
||||
# the domain that this app serves itself up as
|
||||
domain = localhost
|
||||
# if you use www for the old-timey feel, put it here
|
||||
domain_prefix =
|
||||
# the user used for "system" operations and messages
|
||||
system_user = reddit
|
||||
# list of admin accounts
|
||||
admins = reddit
|
||||
# the default subreddit for submissions
|
||||
default_sr = reddit.com
|
||||
|
||||
# time for the page cache (for unlogged in users)
|
||||
page_cache_time = 90
|
||||
|
||||
# default localization for strings (when using python's locale.format)
|
||||
locale = C
|
||||
# default site language (two letter character code)
|
||||
lang = en
|
||||
# if your webserver is a proxy and on a different instance, use
|
||||
# X-forwarded-for and set this to the webserver's IP
|
||||
proxy_addr =
|
||||
# hash for validating HTTP_TRUE_CLIENT_IP_HASH
|
||||
ip_hash =
|
||||
# timezone for storing
|
||||
timezone = UTC
|
||||
# timezone for the database
|
||||
display_timezone = MST
|
||||
# secret key for accessing /shutdown
|
||||
shutdown_secret = 12345
|
||||
# list of servers that the service monitor will care about
|
||||
monitored_servers = reddit, localhost
|
||||
# name of the cookie to drop with login information
|
||||
login_cookie = reddit_session
|
||||
|
||||
# set to a path to enable per-request logging
|
||||
log_path =
|
||||
# fraction of requests to pass into the queue-based usage sampler
|
||||
usage_sampling = 0.
|
||||
|
||||
# account used for default feedback messaging (can be #subreddit)
|
||||
admin_message_acct = reddit
|
||||
|
||||
|
||||
# -- caching options --
|
||||
# data cache (used for caching Thing objects)
|
||||
num_mc_clients = 5
|
||||
memcaches = 127.0.0.1:11211
|
||||
# render caches (the second is "remote" and the local is optional but in the same format)
|
||||
local_rendercache =
|
||||
rendercaches = 127.0.0.1:11211
|
||||
rec_cache = 127.0.0.1:11311
|
||||
# cache for storing service monitor information
|
||||
servicecaches = 127.0.0.1:11211
|
||||
|
||||
# -- permacache options --
|
||||
# permacache is memcaches -> cassanda -> memcachedb
|
||||
# memcaches that sit in front of cassandra
|
||||
permacache_memcaches = 127.0.0.1:11211
|
||||
# cassandra hosts. one of these will be chosen at random by pycassa
|
||||
cassandra_seeds = 127.0.0.1:9160
|
||||
# memcachedbs
|
||||
permacaches = 127.0.0.1:11211
|
||||
cassandra_seeds =
|
||||
|
||||
# site tracking urls. All urls are assumed to be to an image unless
|
||||
# otherwise noted:
|
||||
tracker_url =
|
||||
adtracker_url =
|
||||
adframetracker_url =
|
||||
# for tracking clicks. Should be the url of a redirector
|
||||
clicktracker_url =
|
||||
traffic_url =
|
||||
|
||||
# Just a list of words. Used by errlog.py to make up names for new errors.
|
||||
words_file = /usr/dict/words
|
||||
|
||||
# for sponsored links:
|
||||
payment_domain = https://pay.localhost/
|
||||
authorizenetname =
|
||||
authorizenetkey =
|
||||
authorizenetapi =
|
||||
min_promote_bid = 20
|
||||
max_promote_bid = 9999
|
||||
min_promote_future = 2
|
||||
# -- url cache options --
|
||||
url_caches = 127.0.0.1:11211
|
||||
# cassandra hosts. one of these will be chosen at random by pycassa
|
||||
url_seeds =
|
||||
|
||||
|
||||
# caches for storing number of times a link is rendered
|
||||
incr_cache = udp:localhost:11211
|
||||
incr_caches = localhost:11211
|
||||
# recommender cache (depricated)
|
||||
rec_cache = 127.0.0.1:11211
|
||||
|
||||
## -- traffic tracking urls --
|
||||
# image to render to track pageviews
|
||||
tracker_url = /static/pixel.png
|
||||
# images to render to track sponsored links
|
||||
adtracker_url = /static/pixel.png
|
||||
# image to render to track the ad frame
|
||||
adframetracker_url = http://pixel.reddit.com/pixel/of_defenestration.png
|
||||
# open redirector to bounce clicks off of on sponsored links for tracking
|
||||
clicktracker_url = /static/pixel.png
|
||||
|
||||
# amqp
|
||||
amqp_host = localhost:5672
|
||||
amqp_user = guest
|
||||
amqp_pass = guest
|
||||
amqp_user = reddit
|
||||
amqp_pass = reddit
|
||||
amqp_virtual_host = /
|
||||
|
||||
databases = main, comment, vote, change, email, authorize, award
|
||||
## -- database setup --
|
||||
# list of all databases named in the subsequent table
|
||||
databases = main, comment, vote, email, authorize, award, hc
|
||||
|
||||
#db name db host user, pass
|
||||
main_db = newreddit, 127.0.0.1, ri, password
|
||||
comment_db = newreddit, 127.0.0.1, ri, password
|
||||
comment2_db = newreddit, 127.0.0.1, ri, password
|
||||
vote_db = newreddit, 127.0.0.1, ri, password
|
||||
change_db = changed, 127.0.0.1, ri, password
|
||||
email_db = email, 127.0.0.1, ri, password
|
||||
authorize_db = authorize, 127.0.0.1, ri, password
|
||||
award_db = award, 127.0.0.1, ri, password
|
||||
main_db = reddit, 127.0.0.1, reddit, password
|
||||
comment_db = reddit, 127.0.0.1, reddit, password
|
||||
comment2_db = reddit, 127.0.0.1, reddit, password
|
||||
vote_db = reddit, 127.0.0.1, reddit, password
|
||||
email_db = reddit, 127.0.0.1, reddit, password
|
||||
authorize_db = reddit, 127.0.0.1, reddit, password
|
||||
award_db = reddit, 127.0.0.1, reddit, password
|
||||
hc_db = reddit, 127.0.0.1, reddit, password
|
||||
|
||||
# this setting will prefix all of the table names
|
||||
db_app_name = reddit
|
||||
# are we allowed to create tables?
|
||||
db_create_tables = True
|
||||
|
||||
type_db = main
|
||||
@@ -95,6 +160,7 @@ db_table_vote_account_comment = relation, account, comment, vote
|
||||
|
||||
db_table_inbox_account_comment = relation, account, comment, main
|
||||
db_table_inbox_account_message = relation, account, message, main
|
||||
db_table_moderatorinbox = relation, subreddit, message, main
|
||||
|
||||
db_table_report_account_link = relation, account, link, main
|
||||
db_table_report_account_comment = relation, account, comment, comment
|
||||
@@ -104,107 +170,36 @@ db_table_report_account_subreddit = relation, account, subreddit, main
|
||||
db_table_award = thing, award
|
||||
db_table_trophy = relation, account, award, award
|
||||
|
||||
db_table_jury_account_link = relation, account, link, main
|
||||
|
||||
db_table_ad = thing, main
|
||||
db_table_adsr = relation, ad, subreddit, main
|
||||
|
||||
disallow_db_writes = False
|
||||
|
||||
###
|
||||
# Other magic settings
|
||||
###
|
||||
|
||||
timezone = UTC
|
||||
lang = en
|
||||
monitored_servers = localhost
|
||||
|
||||
usage_sampling = 0.0
|
||||
|
||||
#query cache settings
|
||||
num_query_queue_workers = 0
|
||||
query_queue_worker =
|
||||
enable_doquery = False
|
||||
use_query_cache = False
|
||||
write_query_queue = False
|
||||
|
||||
stylesheet = reddit.css
|
||||
stylesheet_rtl = reddit_rtl.css
|
||||
|
||||
allowed_css_linked_domains = my.domain.com, my.otherdomain.com
|
||||
css_killswitch = False
|
||||
max_sr_images = 20
|
||||
|
||||
takedown_sr = _takedowns
|
||||
login_cookie = reddit_session
|
||||
domain = localhost
|
||||
domain_prefix =
|
||||
media_domain = localhost
|
||||
default_sr = localhost
|
||||
automatic_reddits =
|
||||
|
||||
admins =
|
||||
sponsors =
|
||||
paid_sponsors =
|
||||
page_cache_time = 30
|
||||
static_path = /static/
|
||||
useragent = Mozilla/5.0 (compatible; bot/1.0; ChangeMe)
|
||||
allow_shutdown = False
|
||||
profanity_wordlist =
|
||||
|
||||
solr_url =
|
||||
solr_cache_time = 300
|
||||
|
||||
markdown_backend = py
|
||||
|
||||
SECRET = abcdefghijklmnopqrstuvwxyz0123456789
|
||||
MODSECRET = abcdefghijklmnopqrstuvwxyz0123456789
|
||||
## -- traffic analytics --
|
||||
# google analytics token
|
||||
googleanalytics =
|
||||
# machine to get traffic metrics from
|
||||
traffic_url = http://localhost:8010/tracker/pickle/
|
||||
# secret used for talking to the traffic machine
|
||||
tracking_secret = abcdefghijklmnopqrstuvwxyz0123456789
|
||||
ip_hash =
|
||||
S3KEY_ID = ABCDEFGHIJKLMNOP1234
|
||||
S3SECRET_KEY = aBcDeFgHiJkLmNoPqRsTuVwXyZ1234567890AbCd
|
||||
s3_thumb_bucket = your.bucket.here
|
||||
default_thumb = /static/noimage.png
|
||||
|
||||
MIN_DOWN_LINK = 0
|
||||
MIN_UP_KARMA = 0
|
||||
MIN_DOWN_KARMA = 0
|
||||
MIN_RATE_LIMIT_KARMA = 0
|
||||
MIN_RATE_LIMIT_COMMENT_KARMA = 0
|
||||
|
||||
WIKI_KARMA = 50
|
||||
## -- Self-service sponsored link stuff --
|
||||
# (secure) payment domain
|
||||
payment_domain = http://pay.localhost/
|
||||
ad_domain = http://localhost
|
||||
# authorize.net credentials
|
||||
authorizenetname =
|
||||
authorizenetkey =
|
||||
authorizenetapi = https://api.authorize.net/xml/v1/request.api
|
||||
min_promote_bid = 20
|
||||
max_promote_bid = 9999
|
||||
min_promote_future = 2
|
||||
|
||||
# time in days
|
||||
MODWINDOW = 2
|
||||
HOT_PAGE_AGE = 1
|
||||
|
||||
#
|
||||
rising_period = 12 hours
|
||||
|
||||
# time of ratelimit purgatory (min)
|
||||
RATELIMIT = 10
|
||||
|
||||
QUOTA_THRESHOLD = 0
|
||||
|
||||
num_comments = 200
|
||||
max_comments = 500
|
||||
num_default_reddits = 10
|
||||
num_serendipity = 100
|
||||
sr_dropdown_threshold = 15
|
||||
|
||||
smtp_server = localhost
|
||||
new_link_share_delay = 5 minutes
|
||||
|
||||
# email address of the person / people running your site
|
||||
nerds_email = root@localhost
|
||||
|
||||
share_reply = noreply@yourdomain.com
|
||||
|
||||
#user-agents to limit
|
||||
agents =
|
||||
|
||||
feedback_email = abuse@localhost
|
||||
system_user = reddit
|
||||
|
||||
# t-shirt stuff
|
||||
# -- spreadshirt --
|
||||
spreadshirt_url =
|
||||
spreadshirt_vendor_id =
|
||||
spreadshirt_min_font = 42
|
||||
@@ -212,10 +207,122 @@ spreadshirt_max_width = 620
|
||||
spreadshirt_test_font =
|
||||
|
||||
|
||||
###
|
||||
# Other magic settings
|
||||
###
|
||||
|
||||
# list of cnames allowed to render as reddit.com without a frame
|
||||
authorized_cnames =
|
||||
|
||||
# -- query cache settings --
|
||||
num_query_queue_workers = 5
|
||||
query_queue_worker = http://cslowe.local:8000
|
||||
enable_doquery = True
|
||||
use_query_cache = False
|
||||
write_query_queue = True
|
||||
|
||||
# -- stylesheet editor --
|
||||
# disable custom stylesheets
|
||||
css_killswitch = False
|
||||
# list of "trusted" domains that can be referenced in url()
|
||||
allowed_css_linked_domains = static.reddit.com, www.reddit.com, reallystatic.reddit.com
|
||||
# max number of uploaded images per subreddit
|
||||
max_sr_images = 50
|
||||
# default stylesheet and it's rtl version
|
||||
stylesheet = reddit.css
|
||||
stylesheet_rtl = reddit-rtl.css
|
||||
# location of the static directory
|
||||
static_path = /static/
|
||||
|
||||
# -- translator UI --
|
||||
# enable/disable access to the translation UI in /admin/i18n
|
||||
translator = true
|
||||
# reddit runs the translator UI on a different instance with a
|
||||
# password to login. These settings are used when generating messages
|
||||
# to translators to tell them their credentials
|
||||
translator_username =
|
||||
translator_password =
|
||||
|
||||
# subreddit used for DMCA takedowns
|
||||
takedown_sr = _takedowns
|
||||
# png compressor
|
||||
png_optimizer = /usr/bin/env optipng
|
||||
# bad words that should be *'d out
|
||||
profanity_wordlist =
|
||||
# which markdown backent to use (c = discount, py = markdown.py)
|
||||
markdown_backend = c
|
||||
|
||||
# -- search --
|
||||
# where is solor?
|
||||
solr_url =
|
||||
# how long do we cache search results (in seconds)
|
||||
solr_cache_time = 300
|
||||
|
||||
# Just a list of words. Used by errlog.py to make up names for new errors.
|
||||
words_file = /usr/dict/words
|
||||
|
||||
# -- media stuff --
|
||||
# user agent for the scraper
|
||||
useragent = Mozilla/5.0 (compatible; redditbot/1.0; +http://www.reddit.com/feedback)
|
||||
# your s3 credentials
|
||||
S3KEY_ID =
|
||||
S3SECRET_KEY =
|
||||
# s3 bucket
|
||||
s3_thumb_bucket = test.your.domain.here
|
||||
default_thumb = /static/noimage.png
|
||||
self_thumb = /static/self_default.png
|
||||
media_domain = localhost
|
||||
|
||||
# -- limits --
|
||||
# rate limiter duration (minutes)
|
||||
RATELIMIT = 10
|
||||
# minimum display karma
|
||||
MIN_UP_KARMA = 1
|
||||
MIN_RATE_LIMIT_KARMA = 10
|
||||
MIN_RATE_LIMIT_COMMENT_KARMA = 1
|
||||
QUOTA_THRESHOLD = 5
|
||||
|
||||
# min amount of karma to edit
|
||||
WIKI_KARMA = 100
|
||||
|
||||
# time in days
|
||||
MODWINDOW = 2
|
||||
HOT_PAGE_AGE = 1000
|
||||
|
||||
# -- display options --
|
||||
# how long to consider links eligible for the rising page
|
||||
rising_period = 12 hours
|
||||
# max number of comments (default)
|
||||
num_comments = 200
|
||||
# max number of comments (if show all is selected)
|
||||
max_comments = 500
|
||||
# list of reddits to auto-subscribe users to
|
||||
automatic_reddits =
|
||||
# cutoff number of reddits to show unsubscribed users
|
||||
num_default_reddits = 10
|
||||
# how deep do we go into the top listing when fetching /random
|
||||
num_serendipity = 250
|
||||
sr_dropdown_threshold = 15
|
||||
|
||||
#user-agents to rate-limit
|
||||
agents =
|
||||
|
||||
# -- email --
|
||||
# smtp server
|
||||
smtp_server = localhost
|
||||
# delay before allowing a link to be shared
|
||||
new_link_share_delay = 30 seconds
|
||||
# alerter emails
|
||||
nerds_email = nerds@reddit.com
|
||||
# share reply email
|
||||
share_reply = noreply@reddit.com
|
||||
# feedback email
|
||||
feedback_email = reddit@gmail.com
|
||||
|
||||
[server:main]
|
||||
use = egg:Paste#http
|
||||
host = 0.0.0.0
|
||||
port = %(scgi_port)s
|
||||
port = %(http_port)s
|
||||
|
||||
[app:main]
|
||||
use = egg:r2
|
||||
|
||||
@@ -75,7 +75,11 @@ def error_mapper(code, message, environ, global_conf=None, **kw):
|
||||
if c.response.headers.has_key('x-sup-id'):
|
||||
d['x-sup-id'] = c.response.headers['x-sup-id']
|
||||
|
||||
url = '/error/document/?%s' % (urllib.urlencode(d))
|
||||
extension = environ.get("extension")
|
||||
if extension:
|
||||
url = '/error/document/.%s?%s' % (extension, urllib.urlencode(d))
|
||||
else:
|
||||
url = '/error/document/?%s' % (urllib.urlencode(d))
|
||||
return url
|
||||
|
||||
class DebugMiddleware(object):
|
||||
@@ -163,19 +167,32 @@ class ProfilingMiddleware(DebugMiddleware):
|
||||
from pstats import Stats
|
||||
|
||||
tmpfile = tempfile.NamedTemporaryFile()
|
||||
try:
|
||||
file, line = prof_arg.split(':')
|
||||
line, func = line.split('(')
|
||||
func = func.strip(')')
|
||||
except:
|
||||
file = line = func = None
|
||||
file = line = func = None
|
||||
sort_order = 'time'
|
||||
if prof_arg:
|
||||
tokens = prof_arg.split(',')
|
||||
else:
|
||||
tokens = ()
|
||||
|
||||
for token in tokens:
|
||||
if token == "cum":
|
||||
sort_order = "cumulative"
|
||||
elif token == "name":
|
||||
sort_order = "name"
|
||||
else:
|
||||
try:
|
||||
file, line = prof_arg.split(':')
|
||||
line, func = line.split('(')
|
||||
func = func.strip(')')
|
||||
except:
|
||||
file = line = func = None
|
||||
|
||||
try:
|
||||
profile.runctx('execution_func()',
|
||||
globals(), locals(), tmpfile.name)
|
||||
out = StringIO()
|
||||
stats = Stats(tmpfile.name, stream=out)
|
||||
stats.sort_stats('time', 'calls')
|
||||
stats.sort_stats(sort_order, 'calls')
|
||||
|
||||
def parse_table(t, ncol):
|
||||
table = []
|
||||
|
||||
@@ -207,6 +207,8 @@ def make_map(global_conf={}, app_conf={}):
|
||||
mc('/api/gadget/:type', controller = 'api', action='gadget')
|
||||
mc('/api/:action', controller='promote',
|
||||
requirements=dict(action="promote|unpromote|edit_promo|link_thumb|freebie|promote_note|update_pay|refund|traffic_viewer|rm_traffic_viewer|edit_campaign|delete_campaign|meta_promo|add_roadblock|rm_roadblock"))
|
||||
mc('/api/:action', controller='apiminimal',
|
||||
requirements=dict(action="onload"))
|
||||
mc('/api/:action', controller='api')
|
||||
|
||||
mc("/button_info", controller="api", action="info", limit = 1)
|
||||
|
||||
@@ -60,6 +60,7 @@ try:
|
||||
except ImportError:
|
||||
from api import ApiController
|
||||
|
||||
from api import ApiminimalController
|
||||
from admin import AdminController
|
||||
from redirect import RedirectController
|
||||
|
||||
|
||||
@@ -71,6 +71,42 @@ def reject_vote(thing):
|
||||
(voteword, c.user.name, request.ip, thing.__class__.__name__,
|
||||
thing._id36, request.referer), "info")
|
||||
|
||||
class ApiminimalController(RedditController):
|
||||
"""
|
||||
Put API calls in here which won't come from logged in users (or
|
||||
don't rely on the user being logged int)
|
||||
"""
|
||||
|
||||
@validatedForm(promoted = VByName('ids', thing_cls = Link,
|
||||
multiple = True),
|
||||
sponsorships = VByName('ids', thing_cls = Subreddit,
|
||||
multiple = True))
|
||||
def POST_onload(self, form, jquery, promoted, sponsorships, *a, **kw):
|
||||
suffix = ""
|
||||
if not isinstance(c.site, FakeSubreddit):
|
||||
suffix = "-" + c.site.name
|
||||
def add_tracker(dest, where, what):
|
||||
jquery.set_tracker(
|
||||
where,
|
||||
tracking.PromotedLinkInfo.gen_url(fullname=what + suffix,
|
||||
ip = request.ip),
|
||||
tracking.PromotedLinkClickInfo.gen_url(fullname =what + suffix,
|
||||
dest = dest,
|
||||
ip = request.ip)
|
||||
)
|
||||
|
||||
if promoted:
|
||||
# make sure that they are really promoted
|
||||
promoted = [ l for l in promoted if l.promoted ]
|
||||
for l in promoted:
|
||||
add_tracker(l.url, l._fullname, l._fullname)
|
||||
|
||||
if sponsorships:
|
||||
for s in sponsorships:
|
||||
add_tracker(s.sponsorship_url, s._fullname,
|
||||
"%s_%s" % (s._fullname, s.sponsorship_name))
|
||||
|
||||
|
||||
class ApiController(RedditController):
|
||||
"""
|
||||
Controller which deals with almost all AJAX site interaction.
|
||||
@@ -163,8 +199,9 @@ class ApiController(RedditController):
|
||||
kind = VOneOf('kind', ['link', 'self', 'poll']),
|
||||
then = VOneOf('then', ('tb', 'comments'),
|
||||
default='comments'))
|
||||
def POST_submit(self, form, jquery, url, banmsg, selftext, kind, title,
|
||||
def POST_submit(self, form, jquery, url, selftext, kind, title,
|
||||
save, sr, ip, then):
|
||||
from r2.models.admintools import is_banned_domain
|
||||
|
||||
if isinstance(url, (unicode, str)):
|
||||
#backwards compatability
|
||||
@@ -202,13 +239,16 @@ class ApiController(RedditController):
|
||||
elif form.has_errors("title", errors.NO_TEXT):
|
||||
pass
|
||||
|
||||
banmsg = is_banned_domain(url)
|
||||
|
||||
# Uncomment if we want to let spammers know we're on to them
|
||||
# if banmsg:
|
||||
# form.set_html(".field-url.BAD_URL", banmsg)
|
||||
# return
|
||||
|
||||
elif kind == 'self' and form.has_errors('text', errors.TOO_LONG):
|
||||
pass
|
||||
else:
|
||||
form.has_errors('text', errors.TOO_LONG)
|
||||
banmsg = None
|
||||
|
||||
if form.has_errors("title", errors.TOO_LONG, errors.NO_TEXT):
|
||||
pass
|
||||
@@ -358,6 +398,7 @@ class ApiController(RedditController):
|
||||
if email:
|
||||
user.email = email
|
||||
|
||||
user.registration_ip = request.ip
|
||||
user.pref_lang = c.lang
|
||||
if c.content_langs == 'all':
|
||||
user.pref_content_langs = 'all'
|
||||
@@ -412,24 +453,33 @@ class ApiController(RedditController):
|
||||
def POST_unfriend(self, nuser, iuser, container, type):
|
||||
"""
|
||||
Handles removal of a friend (a user-user relation) or removal
|
||||
of a user's priviledges from a subreddit (a user-subreddit
|
||||
of a user's privileges from a subreddit (a user-subreddit
|
||||
relation). The user can either be passed in by name (nuser)
|
||||
or buy fullname (iuser). 'container' will either be the
|
||||
or by fullname (iuser). 'container' will either be the
|
||||
current user or the subreddit.
|
||||
|
||||
"""
|
||||
# The user who made the request must be an admin or a moderator
|
||||
# for the privilege change to succeed.
|
||||
|
||||
victim = iuser or nuser
|
||||
|
||||
if (not c.user_is_admin
|
||||
and (type in ('moderator','contributor','banned')
|
||||
and not c.site.is_moderator(c.user))):
|
||||
abort(403, 'forbidden')
|
||||
if (type == 'moderator' and not
|
||||
(c.user_is_admin or container.can_demod(c.user, victim))):
|
||||
abort(403, 'forbidden')
|
||||
# if we are (strictly) unfriending, the container had better
|
||||
# be the current user.
|
||||
if type == "friend" and container != c.user:
|
||||
abort(403, 'forbidden')
|
||||
fn = getattr(container, 'remove_' + type)
|
||||
fn(iuser or nuser)
|
||||
fn(victim)
|
||||
|
||||
if type in ("moderator", "contributor"):
|
||||
Subreddit.special_reddits(victim, type, _update=True)
|
||||
|
||||
if type in ("moderator", "contributor"):
|
||||
Subreddit.special_reddits(iuser or nuser, type, _update=True)
|
||||
@@ -1099,7 +1149,7 @@ class ApiController(RedditController):
|
||||
kw = dict((k, v) for k, v in kw.iteritems()
|
||||
if k in ('name', 'title', 'domain', 'description', 'over_18',
|
||||
'show_media', 'type', 'lang', "css_on_cname",
|
||||
'allow_top', 'use_whitelist'))
|
||||
'allow_top'))
|
||||
|
||||
#if a user is banned, return rate-limit errors
|
||||
if c.user._spam:
|
||||
@@ -1182,8 +1232,9 @@ class ApiController(RedditController):
|
||||
why = VSrCanBan('id'),
|
||||
thing = VByName('id'))
|
||||
def POST_remove(self, why, thing):
|
||||
end_trial(thing, why + "-removed")
|
||||
admintools.spam(thing, False, not c.user_is_admin, c.user.name)
|
||||
if getattr(thing, "promoted", None) is None:
|
||||
end_trial(thing, why + "-removed")
|
||||
admintools.spam(thing, False, not c.user_is_admin, c.user.name)
|
||||
|
||||
@noresponse(VUser(), VModhash(),
|
||||
why = VSrCanBan('id'),
|
||||
@@ -1257,8 +1308,7 @@ class ApiController(RedditController):
|
||||
return
|
||||
# if the message has a recipient, try validating that
|
||||
# desitination first (as it is cheaper and more common)
|
||||
if not hasattr(thing, "to_id") or c.user._id == thing.to_id:
|
||||
queries.set_unread(thing, c.user, unread)
|
||||
queries.set_unread(thing, c.user, unread)
|
||||
# if the message is for a subreddit, check that next
|
||||
if hasattr(thing, "sr_id"):
|
||||
sr = thing.subreddit_slow
|
||||
@@ -1742,36 +1792,6 @@ class ApiController(RedditController):
|
||||
c.user.pref_frame_commentspanel = False
|
||||
c.user._commit()
|
||||
|
||||
@validatedForm(promoted = VByName('ids', thing_cls = Link,
|
||||
multiple = True),
|
||||
sponsorships = VByName('ids', thing_cls = Subreddit,
|
||||
multiple = True))
|
||||
def POST_onload(self, form, jquery, promoted, sponsorships, *a, **kw):
|
||||
suffix = ""
|
||||
if not isinstance(c.site, FakeSubreddit):
|
||||
suffix = "-" + c.site.name
|
||||
def add_tracker(dest, where, what):
|
||||
jquery.set_tracker(
|
||||
where,
|
||||
tracking.PromotedLinkInfo.gen_url(fullname=what + suffix,
|
||||
ip = request.ip),
|
||||
tracking.PromotedLinkClickInfo.gen_url(fullname =what + suffix,
|
||||
dest = dest,
|
||||
ip = request.ip)
|
||||
)
|
||||
|
||||
if promoted:
|
||||
# make sure that they are really promoted
|
||||
promoted = [ l for l in promoted if l.promoted ]
|
||||
for l in promoted:
|
||||
add_tracker(l.url, l._fullname, l._fullname)
|
||||
|
||||
if sponsorships:
|
||||
for s in sponsorships:
|
||||
add_tracker(s.sponsorship_url, s._fullname,
|
||||
"%s_%s" % (s._fullname, s.sponsorship_name))
|
||||
|
||||
|
||||
@json_validate(query = VPrintable('query', max_length = 50))
|
||||
def POST_search_reddit_names(self, query):
|
||||
names = []
|
||||
|
||||
@@ -48,6 +48,8 @@ except Exception, e:
|
||||
import os
|
||||
os._exit(1)
|
||||
|
||||
NUM_FAILIENS = 3
|
||||
|
||||
redditbroke = \
|
||||
'''<html>
|
||||
<head>
|
||||
@@ -57,7 +59,7 @@ redditbroke = \
|
||||
<div style="margin: auto; text-align: center">
|
||||
<p>
|
||||
<a href="/">
|
||||
<img border="0" src="/static/youbrokeit.png" alt="you broke reddit" />
|
||||
<img border="0" src="/static/youbrokeit%d.png" alt="you broke reddit" />
|
||||
</a>
|
||||
</p>
|
||||
<p>
|
||||
@@ -65,7 +67,7 @@ redditbroke = \
|
||||
</p>
|
||||
</body>
|
||||
</html>
|
||||
'''
|
||||
'''
|
||||
|
||||
toofast = \
|
||||
'''<html>
|
||||
@@ -85,7 +87,7 @@ class ErrorController(RedditController):
|
||||
This behaviour can be altered by changing the parameters to the
|
||||
ErrorDocuments middleware in your config/middleware.py file.
|
||||
"""
|
||||
allowed_render_styles = ('html', 'xml', 'js', 'embed', '')
|
||||
allowed_render_styles = ('html', 'xml', 'js', 'embed', '', 'api')
|
||||
def __before__(self):
|
||||
try:
|
||||
c.error_page = True
|
||||
@@ -150,19 +152,27 @@ class ErrorController(RedditController):
|
||||
c.cookies = Cookies()
|
||||
|
||||
code = request.GET.get('code', '')
|
||||
try:
|
||||
code = int(code)
|
||||
except ValueError:
|
||||
code = 404
|
||||
srname = request.GET.get('srname', '')
|
||||
takedown = request.GET.get('takedown', "")
|
||||
if srname:
|
||||
c.site = Subreddit._by_name(srname)
|
||||
if c.render_style not in self.allowed_render_styles:
|
||||
return str(int(code))
|
||||
c.response.content = str(int(code))
|
||||
return c.response
|
||||
elif c.render_style == "api":
|
||||
c.response.content = "{error: %s}" % code
|
||||
return c.response
|
||||
elif takedown and code == '404':
|
||||
link = Link._by_fullname(takedown)
|
||||
return pages.TakedownPage(link).render()
|
||||
elif code == '403':
|
||||
return self.send403()
|
||||
elif code == '500':
|
||||
return redditbroke % rand_strings.sadmessages
|
||||
return redditbroke % (rand.randint(1,NUM_FAILIENS), rand_strings.sadmessages)
|
||||
elif code == '503':
|
||||
return self.send503()
|
||||
elif code == '304':
|
||||
@@ -193,7 +203,7 @@ def handle_awful_failure(fail_text):
|
||||
import traceback
|
||||
g.log.error("FULLPATH: %s" % fail_text)
|
||||
g.log.error(traceback.format_exc())
|
||||
return redditbroke % fail_text
|
||||
return redditbroke % (rand.randint(1,NUM_FAILIENS), fail_text)
|
||||
except:
|
||||
# we are doomed. Admit defeat
|
||||
return "This is an error that should never occur. You win."
|
||||
|
||||
@@ -185,11 +185,6 @@ class FrontController(RedditController):
|
||||
if limit is not None and 0 < limit < g.max_comments:
|
||||
num = limit
|
||||
|
||||
builder = CommentBuilder(article, CommentSortMenu.operator(sort),
|
||||
comment, context, **kw)
|
||||
listing = NestedListing(builder, num = num,
|
||||
parent_name = article._fullname)
|
||||
|
||||
displayPane = PaneStack()
|
||||
|
||||
# if permalink page, add that message first to the content
|
||||
@@ -206,7 +201,8 @@ class FrontController(RedditController):
|
||||
cloneable = True))
|
||||
|
||||
# finally add the comment listing
|
||||
displayPane.append(listing.listing())
|
||||
displayPane.append(CommentPane(article, CommentSortMenu.operator(sort),
|
||||
comment, context, num, **kw))
|
||||
|
||||
loc = None if c.focal_comment or context is not None else 'comments'
|
||||
|
||||
@@ -337,8 +333,10 @@ class FrontController(RedditController):
|
||||
elif is_moderator and location == 'banned':
|
||||
pane = BannedList(editable = is_moderator)
|
||||
elif (location == 'contributors' and
|
||||
(c.site.type != 'public' or
|
||||
(c.user_is_loggedin and c.site.use_whitelist and
|
||||
# On public reddits, only moderators can see the whitelist.
|
||||
# On private reddits, all contributors can see each other.
|
||||
(c.site.type != 'public' or
|
||||
(c.user_is_loggedin and
|
||||
(c.site.is_moderator(c.user) or c.user_is_admin)))):
|
||||
pane = ContributorList(editable = is_moderator)
|
||||
elif (location == 'stylesheet'
|
||||
@@ -855,7 +853,7 @@ class FormsController(RedditController):
|
||||
#check like this because c.user_is_admin is still false
|
||||
if not c.user.name in g.admins:
|
||||
return self.abort404()
|
||||
self.login(c.user, admin = True)
|
||||
self.login(c.user, admin = True, rem = True)
|
||||
return self.redirect(dest)
|
||||
|
||||
@validate(VAdmin(),
|
||||
@@ -864,7 +862,7 @@ class FormsController(RedditController):
|
||||
"""disable admin interaction with site."""
|
||||
if not c.user.name in g.admins:
|
||||
return self.abort404()
|
||||
self.login(c.user, admin = False)
|
||||
self.login(c.user, admin = False, rem = True)
|
||||
return self.redirect(dest)
|
||||
|
||||
def GET_validuser(self):
|
||||
|
||||
@@ -414,7 +414,8 @@ class RandomrisingController(ListingController):
|
||||
if isinstance(links, Query):
|
||||
links._limit = 200
|
||||
links = [x._fullname for x in links]
|
||||
|
||||
|
||||
links = list(links)
|
||||
random.shuffle(links)
|
||||
|
||||
return links
|
||||
@@ -577,7 +578,7 @@ class MessageController(ListingController):
|
||||
wouldkeep = item.keep_item(item)
|
||||
# don't show user their own unread stuff
|
||||
if ((self.where == 'unread' or self.subwhere == 'unread')
|
||||
and item.author_id == c.user._id):
|
||||
and (item.author_id == c.user._id or not item.new)):
|
||||
return False
|
||||
return wouldkeep
|
||||
return keep
|
||||
@@ -729,7 +730,7 @@ class MyredditsController(ListingController):
|
||||
@property
|
||||
def menus(self):
|
||||
buttons = (NavButton(plurals.subscriber, 'subscriber'),
|
||||
NavButton(plurals.contributor, 'contributor'),
|
||||
NavButton(getattr(plurals, "approved submitter"), 'contributor'),
|
||||
NavButton(plurals.moderator, 'moderator'))
|
||||
|
||||
return [NavMenu(buttons, base_path = '/reddits/mine/',
|
||||
|
||||
@@ -185,8 +185,7 @@ class PromoteController(ListingController):
|
||||
changed = False
|
||||
# live items can only be changed by a sponsor, and also
|
||||
# pay the cost of de-approving the link
|
||||
trusted = c.user_is_sponsor or \
|
||||
getattr(c.user, "trusted_sponsor", False)
|
||||
trusted = c.user_is_sponsor or c.user.trusted_sponsor
|
||||
if not promote.is_promoted(l) or trusted:
|
||||
if title and title != l.title:
|
||||
l.title = title
|
||||
@@ -198,6 +197,8 @@ class PromoteController(ListingController):
|
||||
# only trips if the title and url are changed by a non-sponsor
|
||||
if changed and not promote.is_unpaid(l):
|
||||
promote.unapprove_promotion(l)
|
||||
if trusted and promote.is_unapproved(l):
|
||||
promote.accept_promotion(l)
|
||||
|
||||
if c.user_is_sponsor:
|
||||
l.maximum_clicks = max_clicks
|
||||
@@ -455,7 +456,8 @@ class PromoteController(ListingController):
|
||||
if any(errors.values()):
|
||||
return UploadedImage("", "", "upload", errors = errors).render()
|
||||
else:
|
||||
if not c.user_is_sponsor and not promote.is_unpaid(link):
|
||||
if (not c.user_is_sponsor and not c.user.trusted_sponsor and
|
||||
not promote.is_unpaid(link)):
|
||||
promote.unapprove_promotion(link)
|
||||
return UploadedImage(_('saved'), thumbnail_url(link), "",
|
||||
errors = errors).render()
|
||||
|
||||
@@ -237,11 +237,12 @@ def set_subreddit():
|
||||
sr_name = request.environ.get("subreddit", request.POST.get('r'))
|
||||
domain = request.environ.get("domain")
|
||||
|
||||
c.site = Default
|
||||
if not sr_name:
|
||||
#check for cnames
|
||||
sub_domain = request.environ.get('sub_domain')
|
||||
sr = Subreddit._by_domain(sub_domain) if sub_domain else None
|
||||
c.site = sr or Default
|
||||
if sub_domain and not sub_domain.endswith(g.media_domain):
|
||||
c.site = Subreddit._by_domain(sub_domain) or Default
|
||||
elif sr_name == 'r':
|
||||
#reddits
|
||||
c.site = Sub
|
||||
@@ -258,9 +259,9 @@ def set_subreddit():
|
||||
else:
|
||||
c.site = Subreddit._by_name(sr_name)
|
||||
except NotFound:
|
||||
c.site = Default
|
||||
if chksrname(sr_name):
|
||||
redirect_to("/reddits/create?name=%s" % sr_name)
|
||||
sr_name = chksrname(sr_name)
|
||||
if sr_name:
|
||||
redirect_to("/reddits/search?q=%s" % sr_name)
|
||||
elif not c.error_page:
|
||||
abort(404, "not found")
|
||||
#if we didn't find a subreddit, check for a domain listing
|
||||
@@ -288,7 +289,7 @@ def set_content_type():
|
||||
user = valid_feed(request.GET.get("user"),
|
||||
request.GET.get("feed"),
|
||||
request.path)
|
||||
if user:
|
||||
if user and not g.read_only_mode:
|
||||
c.user = user
|
||||
c.user_is_loggedin = True
|
||||
|
||||
@@ -573,6 +574,13 @@ class MinimalController(BaseController):
|
||||
sampling_rate = g.usage_sampling,
|
||||
action = action)
|
||||
|
||||
def abort404(self):
|
||||
abort(404, "not found")
|
||||
|
||||
def abort403(self):
|
||||
abort(403, "forbidden")
|
||||
|
||||
|
||||
|
||||
class RedditController(MinimalController):
|
||||
|
||||
@@ -603,6 +611,8 @@ class RedditController(MinimalController):
|
||||
|
||||
c.response_wrappers = []
|
||||
c.firsttime = firsttime()
|
||||
|
||||
|
||||
(c.user, maybe_admin) = \
|
||||
valid_cookie(c.cookies[g.login_cookie].value
|
||||
if g.login_cookie in c.cookies
|
||||
@@ -709,12 +719,6 @@ class RedditController(MinimalController):
|
||||
if modified_since and modified_since >= last_modified:
|
||||
abort(304, 'not modified')
|
||||
|
||||
def abort404(self):
|
||||
abort(404, "not found")
|
||||
|
||||
def abort403(self):
|
||||
abort(403, "forbidden")
|
||||
|
||||
def sendpng(self, string):
|
||||
c.response_content_type = 'image/png'
|
||||
c.response.content = string
|
||||
|
||||
@@ -1156,10 +1156,6 @@ class ValidIP(Validator):
|
||||
self.set_error(errors.BANNED_IP)
|
||||
return request.ip
|
||||
|
||||
class VOkayDomain(Validator):
|
||||
def run(self, url):
|
||||
return is_banned_domain(url)
|
||||
|
||||
class VDate(Validator):
|
||||
"""
|
||||
Date checker that accepts string inputs in %m/%d/%Y format.
|
||||
|
||||
@@ -36,6 +36,7 @@ class Globals(object):
|
||||
|
||||
int_props = ['page_cache_time',
|
||||
'solr_cache_time',
|
||||
'num_mc_clients',
|
||||
'MIN_DOWN_LINK',
|
||||
'MIN_UP_KARMA',
|
||||
'MIN_DOWN_KARMA',
|
||||
@@ -73,16 +74,17 @@ class Globals(object):
|
||||
'disallow_db_writes',
|
||||
'exception_logging',
|
||||
'amqp_logging',
|
||||
'read_only_mode',
|
||||
]
|
||||
|
||||
tuple_props = ['memcaches',
|
||||
'rec_cache',
|
||||
'rendercaches',
|
||||
'local_rendercache',
|
||||
'servicecaches',
|
||||
'permacache_memcaches',
|
||||
'cassandra_seeds',
|
||||
'permacaches',
|
||||
'url_caches',
|
||||
'url_seeds',
|
||||
'admins',
|
||||
'sponsors',
|
||||
'monitored_servers',
|
||||
@@ -138,55 +140,30 @@ class Globals(object):
|
||||
|
||||
localcache_cls = (SelfEmptyingCache if self.running_as_script
|
||||
else LocalCache)
|
||||
num_mc_clients = 2# if self.running_as_script else 10
|
||||
num_mc_clients = self.num_mc_clients
|
||||
|
||||
c_mc = CMemcache(self.memcaches, num_clients = num_mc_clients, legacy=True)
|
||||
rmc = CMemcache(self.rendercaches, num_clients = num_mc_clients,
|
||||
noreply=True, no_block=True)
|
||||
lrmc = None
|
||||
if self.local_rendercache:
|
||||
lrmc = CMemcache(self.local_rendercache,
|
||||
num_clients = num_mc_clients,
|
||||
noreply=True, no_block=True)
|
||||
smc = CMemcache(self.servicecaches, num_clients = num_mc_clients)
|
||||
rec_cache = None # we're not using this for now
|
||||
self.cache_chains = []
|
||||
|
||||
pmc_chain = (localcache_cls(),)
|
||||
if self.permacache_memcaches:
|
||||
pmc_chain += (CMemcache(self.permacache_memcaches,
|
||||
num_clients=num_mc_clients,
|
||||
legacy=True),)
|
||||
if self.cassandra_seeds:
|
||||
self.cassandra_seeds = list(self.cassandra_seeds)
|
||||
random.shuffle(self.cassandra_seeds)
|
||||
pmc_chain += (CassandraCache('permacache', 'permacache',
|
||||
self.cassandra_seeds),)
|
||||
if self.permacaches:
|
||||
pmc_chain += (CMemcache(self.permacaches,
|
||||
num_clients=num_mc_clients,
|
||||
legacy=True),)
|
||||
if len(pmc_chain) == 1:
|
||||
print 'Warning: proceding without a permacache'
|
||||
self.permacache = self.init_cass_cache('permacache',
|
||||
self.permacache_memcaches,
|
||||
self.cassandra_seeds)
|
||||
|
||||
self.permacache = CassandraCacheChain(pmc_chain, cache_negative_results = True)
|
||||
self.urlcache = self.init_cass_cache('urls',
|
||||
self.url_caches,
|
||||
self.url_seeds)
|
||||
|
||||
# hardcache is done after the db info is loaded, and then the
|
||||
# chains are reset to use the appropriate initial entries
|
||||
|
||||
self.memcache = c_mc # we'll keep using this one for locks
|
||||
# intermediately
|
||||
self.cache = self.init_memcached(self.memcaches)
|
||||
self.memcache = self.cache.caches[1] # used by lock.py
|
||||
|
||||
self.cache = MemcacheChain((localcache_cls(), c_mc))
|
||||
if lrmc:
|
||||
self.rendercache = MemcacheChain((localcache_cls(), lrmc, rmc))
|
||||
else:
|
||||
self.rendercache = MemcacheChain((localcache_cls(), rmc))
|
||||
self.servicecache = MemcacheChain((localcache_cls(), smc))
|
||||
self.rec_cache = rec_cache
|
||||
self.rendercache = self.init_memcached(self.rendercaches,
|
||||
noreply=True, no_block=True)
|
||||
|
||||
self.servicecache = self.init_memcached(self.servicecaches)
|
||||
|
||||
self.make_lock = make_lock_factory(self.memcache)
|
||||
cache_chains = [self.cache, self.permacache, self.rendercache,
|
||||
self.servicecache]
|
||||
|
||||
# set default time zone if one is not set
|
||||
tz = global_conf.get('timezone')
|
||||
@@ -199,15 +176,17 @@ class Globals(object):
|
||||
self.dbm = self.load_db_params(global_conf)
|
||||
|
||||
# can't do this until load_db_params() has been called
|
||||
self.hardcache = HardcacheChain((localcache_cls(), c_mc,
|
||||
self.hardcache = HardcacheChain((localcache_cls(),
|
||||
self.memcache,
|
||||
HardCache(self)),
|
||||
cache_negative_results = True)
|
||||
cache_chains.append(self.hardcache)
|
||||
self.cache_chains.append(self.hardcache)
|
||||
|
||||
# I know this sucks, but we need non-request-threads to be
|
||||
# able to reset the caches, so we need them be able to close
|
||||
# around 'cache_chains' without being able to call getattr on
|
||||
# 'g'
|
||||
cache_chains = self.cache_chains[::]
|
||||
def reset_caches():
|
||||
for chain in cache_chains:
|
||||
chain.reset()
|
||||
@@ -317,7 +296,37 @@ class Globals(object):
|
||||
self.version = self.short_version = '(unknown)'
|
||||
|
||||
if self.log_start:
|
||||
self.log.error("reddit app started %s at %s" % (self.short_version, datetime.now()))
|
||||
self.log.error("reddit app %s:%s started %s at %s" % (self.reddit_host, self.reddit_pid,
|
||||
self.short_version, datetime.now()))
|
||||
|
||||
def init_memcached(self, caches, **kw):
|
||||
return self.init_cass_cache(None, caches, None, memcached_kw = kw)
|
||||
|
||||
def init_cass_cache(self, cluster, caches, cassandra_seeds,
|
||||
memcached_kw = {}, cassandra_kw = {}):
|
||||
localcache_cls = (SelfEmptyingCache if self.running_as_script
|
||||
else LocalCache)
|
||||
|
||||
pmc_chain = (localcache_cls(),)
|
||||
|
||||
# if caches, append
|
||||
if caches:
|
||||
pmc_chain += (CMemcache(caches, num_clients=self.num_mc_clients,
|
||||
**memcached_kw),)
|
||||
|
||||
# if seeds, append
|
||||
if cassandra_seeds:
|
||||
cassandra_seeds = list(cassandra_seeds)
|
||||
random.shuffle(cassandra_seeds)
|
||||
pmc_chain += (CassandraCache(cluster, cluster, cassandra_seeds,
|
||||
**cassandra_kw),)
|
||||
mc = CassandraCacheChain(pmc_chain, cache_negative_results = True)
|
||||
else:
|
||||
mc = MemcacheChain(pmc_chain)
|
||||
|
||||
self.cache_chains.append(mc)
|
||||
return mc
|
||||
|
||||
|
||||
@staticmethod
|
||||
def to_bool(x):
|
||||
|
||||
@@ -271,7 +271,7 @@ class HardCache(CacheUtils):
|
||||
return category, ids
|
||||
|
||||
def set(self, key, val, time=0):
|
||||
if val is NoneResult:
|
||||
if val == NoneResult:
|
||||
# NoneResult caching is for other parts of the chain
|
||||
return
|
||||
|
||||
@@ -296,7 +296,7 @@ class HardCache(CacheUtils):
|
||||
|
||||
def set_multi(self, keys, prefix='', time=0):
|
||||
for k,v in keys.iteritems():
|
||||
if v is not NoneResult:
|
||||
if v != NoneResult:
|
||||
self.set(prefix+str(k), v, time=time)
|
||||
|
||||
def get(self, key, default=None):
|
||||
@@ -434,7 +434,7 @@ class CacheChain(CacheUtils, local):
|
||||
break # so we don't set caches later in the chain
|
||||
d.set(key, val)
|
||||
|
||||
if self.cache_negative_results and val is NoneResult:
|
||||
if self.cache_negative_results and val == NoneResult:
|
||||
return default
|
||||
else:
|
||||
return val
|
||||
@@ -475,14 +475,14 @@ class CacheChain(CacheUtils, local):
|
||||
need = need - set(r.keys())
|
||||
|
||||
if need and self.cache_negative_results:
|
||||
d = dict( (key,NoneResult) for key in need)
|
||||
d = dict((key, NoneResult) for key in need)
|
||||
for c in self.caches:
|
||||
c.set_multi(d)
|
||||
|
||||
if self.cache_negative_results:
|
||||
filtered_out = {}
|
||||
for k,v in out.iteritems():
|
||||
if v is not NoneResult:
|
||||
if v != NoneResult:
|
||||
filtered_out[k] = v
|
||||
out = filtered_out
|
||||
|
||||
|
||||
@@ -201,8 +201,10 @@ class CachedResults(object):
|
||||
def insert(self, items):
|
||||
"""Inserts the item into the cached data. This only works
|
||||
under certain criteria, see can_insert."""
|
||||
self._insert_tuples([self.make_item_tuple(item) for item in tup(items)])
|
||||
|
||||
def _insert_tuples(self, t):
|
||||
self.fetch()
|
||||
t = [ self.make_item_tuple(item) for item in tup(items) ]
|
||||
|
||||
# insert the new items, remove the duplicates (keeping the one
|
||||
# being inserted over the stored value if applicable), and
|
||||
@@ -676,6 +678,8 @@ def new_vote(vote):
|
||||
|
||||
sr.last_valid_vote = datetime.now(g.tz)
|
||||
sr._commit()
|
||||
|
||||
vote._fast_query_timestamp_touch(user)
|
||||
|
||||
#must update both because we don't know if it's a changed vote
|
||||
if vote._name == '1':
|
||||
@@ -713,7 +717,7 @@ def set_unread(message, to, unread):
|
||||
kw = dict(insert_items = i) if unread else dict(delete_items = i)
|
||||
add_queries([get_unread_subreddit_messages(i._thing1)], **kw)
|
||||
else:
|
||||
for i in Inbox.set_unread(message, unread):
|
||||
for i in Inbox.set_unread(message, unread, to = to):
|
||||
kw = dict(insert_items = i) if unread else dict(delete_items = i)
|
||||
if i._name == 'selfreply':
|
||||
add_queries([get_unread_selfreply(i._thing1)], **kw)
|
||||
@@ -992,34 +996,6 @@ def get_likes(user, items):
|
||||
for k, v in r.iteritems():
|
||||
res[keys[k]] = v
|
||||
|
||||
# performance hack: if their last vote came in before this thing
|
||||
# was created, they can't possibly have voted on it
|
||||
cantexist = {}
|
||||
for item in items:
|
||||
if (user, item) in res:
|
||||
continue
|
||||
|
||||
last_vote_attr_name = 'last_vote_' + item.__class__.__name__
|
||||
last_vote = getattr(user, last_vote_attr_name, None)
|
||||
if not last_vote:
|
||||
continue
|
||||
|
||||
try:
|
||||
if last_vote < item._date:
|
||||
res[(user, item)] = '0'
|
||||
cantexist[prequeued_vote_key(user, item)] = '0'
|
||||
except TypeError:
|
||||
g.log.error("user %s has a broken %s? (%r)"
|
||||
% (user._id, last_vote_attr_name, last_vote))
|
||||
# accounts for broken last_vote properties
|
||||
pass
|
||||
|
||||
# this is a bit dodgy, but should save us from having to reload
|
||||
# all of the votes on pages they've already loaded as soon as they
|
||||
# cast a new vote
|
||||
if cantexist:
|
||||
g.cache.set_multi(cantexist)
|
||||
|
||||
# now hit the vote db with the remainder
|
||||
likes = Vote.likes(user, [i for i in items if (user, i) not in res])
|
||||
|
||||
|
||||
@@ -26,8 +26,9 @@ from pylons import g
|
||||
epoch = datetime(1970, 1, 1, tzinfo = g.tz)
|
||||
|
||||
def epoch_seconds(date):
|
||||
"""Returns the number of seconds from the epoch to date. Should match
|
||||
the number returned by the equivalent function in postgres."""
|
||||
"""Returns the number of seconds from the epoch to date. Should
|
||||
match the number returned by the equivalent function in
|
||||
postgres."""
|
||||
td = date - epoch
|
||||
return td.days * 86400 + td.seconds + (float(td.microseconds) / 1000000)
|
||||
|
||||
@@ -35,11 +36,13 @@ def score(ups, downs):
|
||||
return ups - downs
|
||||
|
||||
def hot(ups, downs, date):
|
||||
return _hot(ups, downs, epoch_seconds(date))
|
||||
def _hot(ups, downs, date):
|
||||
"""The hot formula. Should match the equivalent function in postgres."""
|
||||
s = score(ups, downs)
|
||||
order = log(max(abs(s), 1), 10)
|
||||
sign = 1 if s > 0 else -1 if s < 0 else 0
|
||||
seconds = epoch_seconds(date) - 1134028003
|
||||
seconds = date - 1134028003
|
||||
return round(order + sign * seconds / 45000, 7)
|
||||
|
||||
def controversy(ups, downs):
|
||||
|
||||
@@ -708,9 +708,18 @@ def Relation(type1, type2, denorm1 = None, denorm2 = None):
|
||||
self._name = 'un' + self._name
|
||||
|
||||
@classmethod
|
||||
def _fast_query(cls, thing1s, thing2s, name, data=True, eager_load=True):
|
||||
"""looks up all the relationships between thing1_ids and thing2_ids
|
||||
and caches them"""
|
||||
def _fast_query_timestamp_touch(cls, thing1):
|
||||
assert thing1._loaded
|
||||
timestamp_dict = getattr(thing1, 'fast_query_timestamp', {}).copy()
|
||||
timestamp_dict[cls._type_name] = datetime.now(g.tz)
|
||||
thing1.fast_query_timestamp = timestamp_dict
|
||||
thing1._commit()
|
||||
|
||||
@classmethod
|
||||
def _fast_query(cls, thing1s, thing2s, name, data=True, eager_load=True,
|
||||
timestamp_optimize = False):
|
||||
"""looks up all the relationships between thing1_ids and
|
||||
thing2_ids and caches them"""
|
||||
prefix = thing_prefix(cls.__name__)
|
||||
|
||||
thing1_dict = dict((t._id, t) for t in tup(thing1s))
|
||||
@@ -721,16 +730,46 @@ def Relation(type1, type2, denorm1 = None, denorm2 = None):
|
||||
|
||||
name = tup(name)
|
||||
|
||||
def can_skip_lookup(t1, t2, name):
|
||||
# we can't possibly have voted on things that were
|
||||
# created after the last time we voted. for relations
|
||||
# that have an invariant like this we can avoid doing
|
||||
# these lookups as long as the relation takes
|
||||
# responsibility for keeping the timestamp up-to-date
|
||||
thing1 = thing1_dict[t1]
|
||||
thing2 = thing2_dict[t2]
|
||||
|
||||
# check to see if we have the history information
|
||||
if not thing1._loaded:
|
||||
return False
|
||||
if not hasattr(thing1, 'fast_query_timestamp'):
|
||||
return False
|
||||
|
||||
last_done = thing1.fast_query_timestamp.get(cls._type_name,None)
|
||||
|
||||
if not last_done:
|
||||
return False
|
||||
|
||||
if thing2._date > last_done:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
# permute all of the pairs
|
||||
pairs = set((x, y, n)
|
||||
for x in thing1_ids
|
||||
for y in thing2_ids
|
||||
for n in name)
|
||||
|
||||
def items_db(pairs):
|
||||
rel_ids = {}
|
||||
|
||||
t1_ids = set()
|
||||
t2_ids = set()
|
||||
names = set()
|
||||
for t1, t2, name in pairs:
|
||||
if timestamp_optimize and can_skip_lookup(t1, t2, name):
|
||||
continue
|
||||
t1_ids.add(t1)
|
||||
t2_ids.add(t2)
|
||||
names.add(name)
|
||||
@@ -741,21 +780,21 @@ def Relation(type1, type2, denorm1 = None, denorm2 = None):
|
||||
eager_load = eager_load,
|
||||
data = data)
|
||||
|
||||
rel_ids = {}
|
||||
for rel in q:
|
||||
#TODO an alternative for multiple
|
||||
#relations with the same keys
|
||||
#l = rel_ids.setdefault((rel._thing1_id, rel._thing2_id), [])
|
||||
#l.append(rel._id)
|
||||
rel_ids[(rel._thing1_id, rel._thing2_id, rel._name)] = rel._id
|
||||
|
||||
|
||||
for p in pairs:
|
||||
if p not in rel_ids:
|
||||
rel_ids[p] = None
|
||||
|
||||
|
||||
return rel_ids
|
||||
|
||||
res = sgm(cache, pairs, items_db, prefix)
|
||||
|
||||
#convert the keys back into objects
|
||||
|
||||
# populate up the local-cache in batch
|
||||
@@ -1194,14 +1233,15 @@ def MultiRelation(name, *relations):
|
||||
return Merge(queries)
|
||||
|
||||
@classmethod
|
||||
def _fast_query(cls, sub, obj, name, data=True, eager_load=True):
|
||||
def _fast_query(cls, sub, obj, name, data=True, eager_load=True,
|
||||
timestamp_optimize = False):
|
||||
#divide into types
|
||||
def type_dict(items):
|
||||
types = {}
|
||||
for i in items:
|
||||
types.setdefault(i.__class__, []).append(i)
|
||||
return types
|
||||
|
||||
|
||||
sub_dict = type_dict(tup(sub))
|
||||
obj_dict = type_dict(tup(obj))
|
||||
|
||||
@@ -1211,7 +1251,8 @@ def MultiRelation(name, *relations):
|
||||
t1, t2 = types
|
||||
if sub_dict.has_key(t1) and obj_dict.has_key(t2):
|
||||
res.update(rel._fast_query(sub_dict[t1], obj_dict[t2], name,
|
||||
data = data, eager_load=eager_load))
|
||||
data = data, eager_load=eager_load,
|
||||
timestamp_optimize = timestamp_optimize))
|
||||
|
||||
return res
|
||||
|
||||
|
||||
@@ -46,7 +46,8 @@ def log_exception(e, e_type, e_value, e_traceback):
|
||||
d['traceback'] = traceback.extract_tb(e_traceback)
|
||||
|
||||
d['exception_type'] = e.__class__.__name__
|
||||
d['exception_desc'] = str(e)
|
||||
s = str(e)
|
||||
d['exception_desc'] = s[:10000]
|
||||
|
||||
amqp.add_item(Q, pickle.dumps(d))
|
||||
|
||||
|
||||
@@ -127,7 +127,7 @@ menu = MenuHandler(hot = _('hot'),
|
||||
edit = _("edit this reddit"),
|
||||
moderators = _("edit moderators"),
|
||||
modmail = _("moderator mail"),
|
||||
contributors = _("edit contributors"),
|
||||
contributors = _("edit approved submitters"),
|
||||
banned = _("ban users"),
|
||||
banusers = _("ban users"),
|
||||
|
||||
|
||||
@@ -380,3 +380,26 @@ def shorten_byurl_keys():
|
||||
for (old_key, value)
|
||||
in old.iteritems())
|
||||
g.permacache.set_multi(new)
|
||||
|
||||
def prime_url_cache(f, verbosity = 10000):
|
||||
import gzip, time
|
||||
from pylons import g
|
||||
handle = gzip.open(f, 'rb')
|
||||
counter = 0
|
||||
start_time = time.time()
|
||||
for line in handle:
|
||||
try:
|
||||
tid, key, url, kind = line.split('|')
|
||||
tid = int(tid)
|
||||
if url.lower() != "self":
|
||||
key = Link.by_url_key(url)
|
||||
link_ids = g.urlcache.get(key) or []
|
||||
if tid not in link_ids:
|
||||
link_ids.append(tid)
|
||||
g.urlcache.set(key, link_ids)
|
||||
except ValueError:
|
||||
print "FAIL: %s" % line
|
||||
counter += 1
|
||||
if counter % verbosity == 0:
|
||||
print "%6d: %s" % (counter, line)
|
||||
print "--> doing %5.2f / s" % (float(counter) / (time.time() - start_time))
|
||||
|
||||
171
r2/r2/lib/mr_tools.py
Executable file
@@ -0,0 +1,171 @@
|
||||
import sys
|
||||
|
||||
class LineReader(object):
|
||||
"""A simple class to read lines from a File (like stdin) that
|
||||
supports pushing lines back into the buffer"""
|
||||
def __init__(self, stream):
|
||||
self.stream = stream
|
||||
self.pushed_back = []
|
||||
|
||||
def readline(self):
|
||||
if self.pushed_back:
|
||||
return self.pushed_back.pop()
|
||||
else:
|
||||
return self.stream.readline()
|
||||
|
||||
def push_back(self, line):
|
||||
self.pushed_back.append(line)
|
||||
|
||||
def in_chunks(it, size=25):
|
||||
chunk = []
|
||||
it = iter(it)
|
||||
try:
|
||||
while True:
|
||||
chunk.append(it.next())
|
||||
if len(chunk) >= size:
|
||||
yield chunk
|
||||
chunk = []
|
||||
except StopIteration:
|
||||
if chunk:
|
||||
yield chunk
|
||||
|
||||
def valiter(key, lr, firstline):
|
||||
line = firstline
|
||||
while line:
|
||||
linevals = line.strip('\n').split('\t')
|
||||
readkey, vals = linevals[0], linevals[1:]
|
||||
if readkey == key:
|
||||
yield vals
|
||||
line = lr.readline()
|
||||
else:
|
||||
lr.push_back(line)
|
||||
line = None
|
||||
|
||||
def keyiter(stream):
|
||||
lr = LineReader(stream)
|
||||
|
||||
line = lr.readline()
|
||||
while line:
|
||||
key = line.strip('\n').split('\t',1)[0]
|
||||
|
||||
vi = valiter(key, lr, line)
|
||||
yield key, vi
|
||||
# read the rest of the valueiter before reading any more lines
|
||||
try:
|
||||
while vi.next():
|
||||
pass
|
||||
except StopIteration:
|
||||
pass
|
||||
|
||||
line = lr.readline()
|
||||
|
||||
def status(msg, **opts):
|
||||
if opts:
|
||||
msg = msg % opts
|
||||
sys.stderr.write("%s\n" % msg)
|
||||
|
||||
def emit(vals):
|
||||
print '\t'.join(str(val) for val in vals)
|
||||
|
||||
def emit_all(vals):
|
||||
for val in vals:
|
||||
emit(val)
|
||||
|
||||
class Storage(dict):
|
||||
def __getattr__(self, attr):
|
||||
return self[attr]
|
||||
|
||||
def format_dataspec(msg, specs):
|
||||
# spec() =:= name | (name, fn)
|
||||
# specs =:= [ spec() ]
|
||||
ret = Storage()
|
||||
for val, spec in zip(msg, specs):
|
||||
if isinstance(spec, basestring):
|
||||
name = spec
|
||||
ret[name] = val
|
||||
else:
|
||||
name, fn = spec
|
||||
ret[name] = fn(val)
|
||||
return Storage(**ret)
|
||||
|
||||
class dataspec_m(object):
|
||||
def __init__(self, *specs):
|
||||
self.specs = specs
|
||||
|
||||
def __call__(self, fn):
|
||||
specs = self.specs
|
||||
def wrapped_fn(args):
|
||||
return fn(format_dataspec(args, specs))
|
||||
return wrapped_fn
|
||||
|
||||
class dataspec_r(object):
|
||||
def __init__(self, *specs):
|
||||
self.specs = specs
|
||||
|
||||
def __call__(self, fn):
|
||||
specs = self.specs
|
||||
def wrapped_fn(key, msgs):
|
||||
return fn(key, ( format_dataspec(msg, specs)
|
||||
for msg in msgs ))
|
||||
return wrapped_fn
|
||||
|
||||
def mr_map(process, fd = sys.stdin):
|
||||
for line in fd:
|
||||
vals = line.strip('\n').split('\t')
|
||||
for res in process(vals):
|
||||
emit(res)
|
||||
|
||||
def mr_reduce(process, fd = sys.stdin):
|
||||
for key, vals in keyiter(fd):
|
||||
for res in process(key, vals):
|
||||
emit(res)
|
||||
|
||||
def mr_foldl(process, init, emit = False, fd = sys.stdin):
|
||||
acc = init
|
||||
for key, vals in keyiter(fd):
|
||||
acc = process(key, vals, acc)
|
||||
|
||||
if emit:
|
||||
emit(acc)
|
||||
|
||||
return acc
|
||||
|
||||
def mr_max(process, idx = 0, num = 10, emit = False, fd = sys.stdin):
|
||||
"""a reducer that, in the process of reduction, only returns the
|
||||
top N results"""
|
||||
maxes = []
|
||||
for key, vals in keyiter(fd):
|
||||
for newvals in in_chunks(process(key, vals)):
|
||||
for val in newvals:
|
||||
if len(maxes) < num or newval[idx] > maxes[-1][idx]:
|
||||
maxes.append(newval)
|
||||
maxes.sort(reverse=True)
|
||||
maxes = maxes[:num]
|
||||
|
||||
if emit:
|
||||
emit_all(maxes)
|
||||
|
||||
return maxes
|
||||
|
||||
def mr_reduce_max_per_key(sort_key, post = None, num = 10, fd = sys.stdin):
|
||||
def process(key, vals):
|
||||
maxes = []
|
||||
for val_chunk in in_chunks(vals, num):
|
||||
maxes.extend(val_chunk)
|
||||
maxes.sort(reverse=True, key=sort_key)
|
||||
maxes = maxes[:num]
|
||||
if post:
|
||||
# if we were passed a "post" function, he takes
|
||||
# responsibility for emitting
|
||||
post(key, maxes)
|
||||
else:
|
||||
for item in maxes:
|
||||
yield [key] + item
|
||||
|
||||
return mr_reduce(process, fd = fd)
|
||||
|
||||
if __name__ == '__main__':
|
||||
for key, vals in keyiter(sys.stdin):
|
||||
print key, vals
|
||||
for val in vals:
|
||||
print '\t', val
|
||||
@@ -129,7 +129,9 @@ class Reddit(Templated):
|
||||
#add the infobar
|
||||
self.infobar = None
|
||||
if self.show_firsttext and not infotext:
|
||||
if c.firsttime == 'iphone':
|
||||
if g.read_only_mode:
|
||||
infotext = strings.read_only_msg
|
||||
elif c.firsttime == 'iphone':
|
||||
infotext = strings.iphone_first
|
||||
elif c.firsttime and c.site.firsttext:
|
||||
infotext = c.site.firsttext
|
||||
@@ -156,9 +158,9 @@ class Reddit(Templated):
|
||||
if c.site.type != 'public':
|
||||
buttons.append(NamedButton('contributors',
|
||||
css_class = 'reddit-contributors'))
|
||||
elif (c.user_is_loggedin and c.site.use_whitelist and
|
||||
elif (c.user_is_loggedin and
|
||||
(c.site.is_moderator(c.user) or c.user_is_admin)):
|
||||
buttons.append(NavButton(menu.whitelist, "contributors",
|
||||
buttons.append(NavButton(menu.contributors, "contributors",
|
||||
css_class = 'reddit-contributors'))
|
||||
|
||||
buttons.extend([
|
||||
@@ -184,7 +186,7 @@ class Reddit(Templated):
|
||||
if self.searchbox:
|
||||
ps.append(SearchForm())
|
||||
|
||||
if not c.user_is_loggedin and self.loginbox:
|
||||
if not c.user_is_loggedin and self.loginbox and not g.read_only_mode:
|
||||
ps.append(LoginFormWide())
|
||||
|
||||
if not isinstance(c.site, FakeSubreddit):
|
||||
@@ -456,7 +458,7 @@ class SubredditInfoBar(CachedTemplate):
|
||||
def nav(self):
|
||||
buttons = [NavButton(plurals.moderators, 'moderators')]
|
||||
if self.type != 'public':
|
||||
buttons.append(NavButton(plurals.contributors, 'contributors'))
|
||||
buttons.append(NavButton(getattr(plurals, "approved submitters"), 'contributors'))
|
||||
|
||||
if self.is_moderator or self.is_admin:
|
||||
buttons.extend([
|
||||
@@ -783,6 +785,107 @@ class LinkInfoPage(Reddit):
|
||||
rb.insert(1, LinkInfoBar(a = self.link))
|
||||
return rb
|
||||
|
||||
class CommentPane(Templated):
|
||||
def cache_key(self):
|
||||
return "_".join(map(str, ["commentpane", self.article._fullname,
|
||||
self.article.num_comments,
|
||||
self.sort, self.num, c.lang,
|
||||
self.can_reply, c.render_style]))
|
||||
|
||||
def __init__(self, article, sort, comment, context, num, **kw):
|
||||
# keys: lang, num, can_reply, render_style
|
||||
# disable: admin
|
||||
|
||||
from r2.models import CommentBuilder, NestedListing
|
||||
from r2.controllers.reddit_base import UnloggedUser
|
||||
|
||||
self.sort = sort
|
||||
self.num = num
|
||||
self.article = article
|
||||
|
||||
# don't cache on permalinks or contexts, and keep it to html
|
||||
try_cache = not comment and not context and (c.render_style == "html")
|
||||
self.can_reply = False
|
||||
if try_cache and c.user_is_loggedin:
|
||||
sr = article.subreddit_slow
|
||||
c.can_reply = self.can_reply = sr.can_comment(c.user)
|
||||
# don't cache if the current user can ban comments in the listing
|
||||
try_cache = not sr.can_ban(c.user)
|
||||
# don't cache for users with custom hide threshholds
|
||||
try_cache = (c.user.pref_min_comment_score ==
|
||||
Account._defaults["pref_min_comment_score"])
|
||||
|
||||
def renderer():
|
||||
builder = CommentBuilder(article, sort, comment, context, **kw)
|
||||
listing = NestedListing(builder, num = num,
|
||||
parent_name = article._fullname)
|
||||
return listing.listing()
|
||||
|
||||
# generate the listing we would make for this user if caching is disabled.
|
||||
my_listing = renderer()
|
||||
|
||||
# for now, disable the cache if the user happens to be an author of anything.
|
||||
if try_cache:
|
||||
for t in self.listing_iter(my_listing):
|
||||
if getattr(t, "is_author", False):
|
||||
try_cache = False
|
||||
break
|
||||
|
||||
if try_cache:
|
||||
# try to fetch the comment tree from the caceh
|
||||
key = self.cache_key()
|
||||
self.rendered = g.cache.get(key)
|
||||
if not self.rendered:
|
||||
# spoof an unlogged in user
|
||||
user = c.user
|
||||
logged_in = c.user_is_loggedin
|
||||
try:
|
||||
c.user = UnloggedUser([c.lang])
|
||||
c.user_is_loggedin = False
|
||||
|
||||
# render as if not logged in (but possibly with reply buttons)
|
||||
self.rendered = renderer().render()
|
||||
g.cache.set(key, self.rendered, time = 30)
|
||||
|
||||
finally:
|
||||
# undo the spoofing
|
||||
c.user = user
|
||||
c.user_is_loggedin = logged_in
|
||||
|
||||
# figure out what needs to be updated on the listing
|
||||
likes = []
|
||||
dislikes = []
|
||||
is_friend = set()
|
||||
for t in self.listing_iter(my_listing):
|
||||
if not hasattr(t, "likes"):
|
||||
# this is for MoreComments and MoreRecursion
|
||||
continue
|
||||
if getattr(t, "friend", False):
|
||||
is_friend.add(t.author._fullname)
|
||||
if t.likes:
|
||||
likes.append(t._fullname)
|
||||
if t.likes is False:
|
||||
dislikes.append(t._fullname)
|
||||
self.rendered += ThingUpdater(likes = likes,
|
||||
dislikes = dislikes,
|
||||
is_friend = is_friend).render()
|
||||
g.log.debug("using comment page cache")
|
||||
else:
|
||||
self.rendered = my_listing.render()
|
||||
|
||||
def listing_iter(self, l):
|
||||
for t in l:
|
||||
yield t
|
||||
for x in self.listing_iter(getattr(t, "child", [])):
|
||||
yield x
|
||||
|
||||
def render(self, *a, **kw):
|
||||
return self.rendered
|
||||
|
||||
class ThingUpdater(Templated):
|
||||
pass
|
||||
|
||||
|
||||
class LinkInfoBar(Templated):
|
||||
"""Right box for providing info about a link."""
|
||||
def __init__(self, a = None):
|
||||
@@ -978,7 +1081,7 @@ class RedditError(BoringPage):
|
||||
class Reddit404(BoringPage):
|
||||
site_tracking = False
|
||||
def __init__(self):
|
||||
ch=random.choice(['a','b','c','d','e'])
|
||||
ch=random.choice(['a','b','c'])
|
||||
BoringPage.__init__(self, _("page not found"), loginbox=False,
|
||||
show_sidebar = False,
|
||||
content=UnfoundPage(ch))
|
||||
@@ -996,9 +1099,10 @@ class ErrorPage(Templated):
|
||||
class Profiling(Templated):
|
||||
"""Debugging template for code profiling using built in python
|
||||
library (only used in middleware)"""
|
||||
def __init__(self, header = '', table = [], caller = [], callee = [], path = ''):
|
||||
def __init__(self, header = '', table = [], caller = [], callee = [],
|
||||
path = '', sort_order = ''):
|
||||
Templated.__init__(self, header = header, table = table, caller = caller,
|
||||
callee = callee, path = path)
|
||||
callee = callee, path = path)
|
||||
|
||||
class Over18(Templated):
|
||||
"""The creepy 'over 18' check page for nsfw content."""
|
||||
@@ -1851,6 +1955,7 @@ class WrappedUser(CachedTemplate):
|
||||
karma = karma,
|
||||
ip_span = ip_span,
|
||||
context_deleted = context_deleted,
|
||||
fullname = user._fullname,
|
||||
user_deleted = user._deleted)
|
||||
|
||||
# Classes for dealing with friend/moderator/contributor/banned lists
|
||||
@@ -1883,6 +1988,7 @@ class UserList(Templated):
|
||||
_class = ""
|
||||
destination = "friend"
|
||||
remove_action = "unfriend"
|
||||
editable_fn = None
|
||||
|
||||
def __init__(self, editable = True):
|
||||
self.editable = editable
|
||||
@@ -1892,8 +1998,13 @@ class UserList(Templated):
|
||||
"""Convenience method for constructing a UserTableItem
|
||||
instance of the user with type, container_name, etc. of this
|
||||
UserList instance"""
|
||||
editable = self.editable
|
||||
|
||||
if self.editable_fn and not self.editable_fn(user):
|
||||
editable = False
|
||||
|
||||
return UserTableItem(user, self.type, self.cells, self.container_name,
|
||||
self.editable, self.remove_action)
|
||||
editable, self.remove_action)
|
||||
|
||||
@property
|
||||
def users(self, site = None):
|
||||
@@ -1940,13 +2051,11 @@ class ContributorList(UserList):
|
||||
|
||||
@property
|
||||
def form_title(self):
|
||||
if c.site.type == "public":
|
||||
return _("add to whitelist")
|
||||
return _('add contributor')
|
||||
return _("add approved submitter")
|
||||
|
||||
@property
|
||||
def table_title(self):
|
||||
return _("contributors to %(reddit)s") % dict(reddit = c.site.name)
|
||||
return _("approved submitters for %(reddit)s") % dict(reddit = c.site.name)
|
||||
|
||||
def user_ids(self):
|
||||
return c.site.contributors
|
||||
@@ -1963,6 +2072,14 @@ class ModList(UserList):
|
||||
def table_title(self):
|
||||
return _("moderators to %(reddit)s") % dict(reddit = c.site.name)
|
||||
|
||||
def editable_fn(self, user):
|
||||
if not c.user_is_loggedin:
|
||||
return False
|
||||
elif c.user_is_admin:
|
||||
return True
|
||||
else:
|
||||
return c.site.can_demod(c.user, user)
|
||||
|
||||
def user_ids(self):
|
||||
return c.site.moderators
|
||||
|
||||
@@ -2412,8 +2529,12 @@ class RedditTraffic(Traffic):
|
||||
if self.has_data:
|
||||
imp_by_day = [[] for i in range(7)]
|
||||
uni_by_day = [[] for i in range(7)]
|
||||
dates, imps = zip(*self.impressions_day_chart)
|
||||
dates, uniques = zip(*self.uniques_day_chart)
|
||||
if c.site.domain:
|
||||
dates, imps, foo = zip(*self.impressions_day_chart)
|
||||
dates, uniques, foo = zip(*self.uniques_day_chart)
|
||||
else:
|
||||
dates, imps = zip(*self.impressions_day_chart)
|
||||
dates, uniques = zip(*self.uniques_day_chart)
|
||||
self.uniques_mean = sum(map(float, uniques))/len(uniques)
|
||||
self.impressions_mean = sum(map(float, imps))/len(imps)
|
||||
for i, d in enumerate(dates):
|
||||
|
||||
@@ -34,8 +34,13 @@ class PrintableButtons(Styled):
|
||||
def __init__(self, style, thing,
|
||||
show_delete = False, show_report = True,
|
||||
show_distinguish = False,
|
||||
show_indict = False, **kw):
|
||||
show_indict = False, is_link=False, **kw):
|
||||
show_report = show_report and c.user_is_loggedin
|
||||
show_ignore = (thing.show_reports or
|
||||
(thing.reveal_trial_info and not thing.show_spam))
|
||||
approval_checkmark = getattr(thing, "approval_checkmark", None)
|
||||
show_approve = (thing.show_spam or show_ignore or
|
||||
(is_link and approval_checkmark is None))
|
||||
|
||||
Styled.__init__(self, style = style,
|
||||
thing = thing,
|
||||
@@ -43,11 +48,10 @@ class PrintableButtons(Styled):
|
||||
can_ban = thing.can_ban,
|
||||
show_spam = thing.show_spam,
|
||||
show_reports = thing.show_reports,
|
||||
show_ignore = thing.show_reports or
|
||||
(thing.reveal_trial_info and not thing.show_spam),
|
||||
approval_checkmark = getattr(thing,
|
||||
"approval_checkmark", None),
|
||||
show_ignore = show_ignore,
|
||||
approval_checkmark = approval_checkmark,
|
||||
show_delete = show_delete,
|
||||
show_approve = show_approve,
|
||||
show_report = show_report,
|
||||
show_indict = show_indict,
|
||||
show_distinguish = show_distinguish,
|
||||
@@ -64,7 +68,9 @@ class LinkButtons(PrintableButtons):
|
||||
is_author = (c.user_is_loggedin and thing.author and
|
||||
c.user.name == thing.author.name)
|
||||
# do we show the report button?
|
||||
show_report = not is_author and report
|
||||
show_report = (not is_author and
|
||||
report and
|
||||
getattr(thing, "promoted", None) is None)
|
||||
|
||||
if c.user_is_admin and thing.promoted is None:
|
||||
show_report = False
|
||||
@@ -112,13 +118,13 @@ class LinkButtons(PrintableButtons):
|
||||
show_comments = comments,
|
||||
# promotion
|
||||
promoted = thing.promoted,
|
||||
is_link = True,
|
||||
**kw)
|
||||
|
||||
class CommentButtons(PrintableButtons):
|
||||
def __init__(self, thing, delete = True, report = True):
|
||||
# is the current user the author?
|
||||
is_author = (c.user_is_loggedin and thing.author and
|
||||
c.user.name == thing.author.name)
|
||||
is_author = thing.is_author
|
||||
# do we show the report button?
|
||||
show_report = not is_author and report
|
||||
# do we show the delete button?
|
||||
|
||||
@@ -310,7 +310,7 @@ def new_promotion(title, url, user, ip):
|
||||
l._commit()
|
||||
|
||||
# set the status of the link, populating the query queue
|
||||
if c.user_is_sponsor or getattr(user, "trusted_sponsor", False):
|
||||
if c.user_is_sponsor or user.trusted_sponsor:
|
||||
set_status(l, STATUS.accepted)
|
||||
else:
|
||||
set_status(l, STATUS.unpaid)
|
||||
|
||||
@@ -84,7 +84,7 @@ string_dict = dict(
|
||||
msg_add_friend = dict(
|
||||
friend = None,
|
||||
moderator = _("you have been added as a moderator to [%(title)s](%(url)s)."),
|
||||
contributor = _("you have been added as a contributor to [%(title)s](%(url)s)."),
|
||||
contributor = _("you have been added as an approved submitter to [%(title)s](%(url)s)."),
|
||||
banned = _("you have been banned from posting to [%(title)s](%(url)s)."),
|
||||
traffic = _('you have been added to the list of users able to see [traffic for the sponsored link "%(title)s"](%(traffic_url)s).')
|
||||
),
|
||||
@@ -92,7 +92,7 @@ string_dict = dict(
|
||||
subj_add_friend = dict(
|
||||
friend = None,
|
||||
moderator = _("you are a moderator"),
|
||||
contributor = _("you are a contributor"),
|
||||
contributor = _("you are an approved submitter"),
|
||||
banned = _("you've been banned"),
|
||||
traffic = _("you can view traffic on a promoted link")
|
||||
),
|
||||
@@ -100,7 +100,7 @@ string_dict = dict(
|
||||
sr_messages = dict(
|
||||
empty = _('you have not subscribed to any reddits.'),
|
||||
subscriber = _('below are the reddits you have subscribed to'),
|
||||
contributor = _('below are the reddits that you have contributor access to.'),
|
||||
contributor = _('below are the reddits that you are an approved submitter on.'),
|
||||
moderator = _('below are the reddits that you have moderator access to.')
|
||||
),
|
||||
|
||||
@@ -135,6 +135,7 @@ string_dict = dict(
|
||||
search_failed = _("Our search machines are under too much load to handle your request right now. :( Sorry for the inconvenience. [Try again](%(link)s) in a little bit -- but please don't mash reload; that only makes the problem worse."),
|
||||
verified_quota_msg = _("You've submitted several links recently that haven't been doing very well. You'll have to wait a while before you can submit again, or [write to the moderators of this reddit](%(link)s) and ask for an exemption."),
|
||||
unverified_quota_msg = _("You haven't [verified your email address](%(link1)s); until you do, your submitting privileges will be severely limited. Please try again in an hour or verify your email address. If you'd like an exemption from this rule, please [write to the moderators of this reddit](%(link2)s)."),
|
||||
read_only_msg = _("reddit is in \"emergency read-only mode\" right now. :( you won't be able to log in. we're sorry, and are working frantically to fix the problem."),
|
||||
)
|
||||
|
||||
class StringHandler(object):
|
||||
@@ -206,7 +207,7 @@ plurals = PluralManager([P_("comment", "comments"),
|
||||
# people
|
||||
P_("reader", "readers"),
|
||||
P_("subscriber", "subscribers"),
|
||||
P_("contributor", "contributors"),
|
||||
P_("approved submitter", "approved submitters"),
|
||||
P_("moderator", "moderators"),
|
||||
|
||||
# time words
|
||||
|
||||
@@ -27,4 +27,4 @@ from cmd_utils import *
|
||||
try:
|
||||
from r2admin.lib.admin_utils import *
|
||||
except ImportError:
|
||||
pass
|
||||
from admin_utils import *
|
||||
|
||||
@@ -60,7 +60,7 @@ def indict(defendant):
|
||||
rv = False
|
||||
if defendant._deleted:
|
||||
result = "already deleted"
|
||||
elif hasattr(defendant, "promoted") and defendant.promoted:
|
||||
elif getattr(defendant, "promoted", None) is not None:
|
||||
result = "it's promoted"
|
||||
elif hasattr(defendant, "verdict") and defendant.verdict is not None:
|
||||
result = "it already has a verdict"
|
||||
|
||||
@@ -1273,7 +1273,7 @@ def progress(it, verbosity=100, key=repr, estimate=None, persec=False):
|
||||
except:
|
||||
pass
|
||||
|
||||
print 'Starting at %s' % (start,)
|
||||
sys.stderr.write('Starting at %s\n' % (start,))
|
||||
|
||||
seen = 0
|
||||
for item in it:
|
||||
@@ -1300,20 +1300,19 @@ def progress(it, verbosity=100, key=repr, estimate=None, persec=False):
|
||||
key_str = ''
|
||||
|
||||
if persec and elapsed_seconds > 0:
|
||||
persec_str = ' (%.2f/s)' % (seen/elapsed_seconds,)
|
||||
persec_str = ' (%.2f/s)' % (float(seen)/elapsed_seconds,)
|
||||
else:
|
||||
persec_str = ''
|
||||
|
||||
sys.stdout.write('%s%s, %s%s%s\n'
|
||||
sys.stderr.write('%s%s, %s%s%s\n'
|
||||
% (count_str, persec_str,
|
||||
elapsed, estimate_str, key_str))
|
||||
sys.stdout.flush()
|
||||
this_chunk = 0
|
||||
yield item
|
||||
|
||||
now = datetime.now()
|
||||
elapsed = now - start
|
||||
print 'Processed %d items in %s..%s (%s)' % (seen, start, now, elapsed)
|
||||
sys.stderr.write('Processed %d items in %s..%s (%s)\n' % (seen, start, now, elapsed))
|
||||
|
||||
class Hell(object):
|
||||
def __str__(self):
|
||||
|
||||
@@ -62,6 +62,7 @@ class Account(Thing):
|
||||
pref_threaded_messages = True,
|
||||
pref_collapse_read_messages = False,
|
||||
pref_private_feeds = True,
|
||||
trusted_sponsor = False,
|
||||
reported = 0,
|
||||
report_made = 0,
|
||||
report_correct = 0,
|
||||
@@ -397,6 +398,9 @@ def valid_cookie(cookie):
|
||||
except:
|
||||
return (False, False)
|
||||
|
||||
if g.read_only_mode:
|
||||
return (False, False)
|
||||
|
||||
try:
|
||||
account = Account._byID(uid, True)
|
||||
if account._deleted:
|
||||
|
||||
@@ -43,6 +43,9 @@ class AdminTools(object):
|
||||
Report.accept(new_things, True)
|
||||
|
||||
for t in all_things:
|
||||
if getattr(t, "promoted", None) is not None:
|
||||
g.log.debug("Refusing to mark promotion %r as spam" % t)
|
||||
continue
|
||||
t._spam = True
|
||||
ban_info = copy(getattr(t, 'ban_info', {}))
|
||||
ban_info.update(auto = auto,
|
||||
@@ -198,8 +201,6 @@ def filter_quotas(unfiltered):
|
||||
quotas_changed = True
|
||||
continue
|
||||
|
||||
score = item._downs - item._ups
|
||||
|
||||
verdict = getattr(item, "verdict", None)
|
||||
approved = verdict and verdict in (
|
||||
'admin-approved', 'mod-approved')
|
||||
@@ -211,9 +212,9 @@ def filter_quotas(unfiltered):
|
||||
pass
|
||||
elif item._deleted:
|
||||
pass
|
||||
elif score <= 0:
|
||||
elif item._score <= 0:
|
||||
pass
|
||||
elif age < 86400 and score <= g.QUOTA_THRESHOLD and not approved:
|
||||
elif age < 86400 and item._score <= g.QUOTA_THRESHOLD and not approved:
|
||||
pass
|
||||
else:
|
||||
quotas_changed = True
|
||||
|
||||
@@ -204,7 +204,8 @@ class Builder(object):
|
||||
or (user
|
||||
and hasattr(item,'sr_id')
|
||||
and item.sr_id in can_ban_set)):
|
||||
w.can_ban = True
|
||||
if getattr(item, "promoted", None) is None:
|
||||
w.can_ban = True
|
||||
|
||||
ban_info = getattr(item, 'ban_info', {})
|
||||
w.unbanner = ban_info.get('unbanner')
|
||||
@@ -498,7 +499,7 @@ class CommentBuilder(Builder):
|
||||
|
||||
def get_items(self, num):
|
||||
r = link_comments(self.link._id)
|
||||
cids, comment_tree, depth, num_children = r
|
||||
cids, cid_tree, depth, num_children = r
|
||||
|
||||
if (not isinstance(self.comment, utils.iters)
|
||||
and self.comment and not self.comment._id in depth):
|
||||
@@ -506,7 +507,7 @@ class CommentBuilder(Builder):
|
||||
% self.comment._id)
|
||||
|
||||
r = link_comments(self.link._id, _update=True)
|
||||
cids, comment_tree, depth, num_children = r
|
||||
cids, cid_tree, depth, num_children = r
|
||||
|
||||
if not self.comment._id in depth:
|
||||
g.log.error("Update didn't help. This is gonna end in tears.")
|
||||
@@ -520,9 +521,9 @@ class CommentBuilder(Builder):
|
||||
comment_dict = dict((cm._id, cm) for cm in comments)
|
||||
|
||||
#convert tree into objects
|
||||
for k, v in comment_tree.iteritems():
|
||||
comment_tree[k] = [comment_dict[cid] for cid in comment_tree[k]]
|
||||
|
||||
comment_tree = {}
|
||||
for k, v in cid_tree.iteritems():
|
||||
comment_tree[k] = [comment_dict[cid] for cid in cid_tree[k]]
|
||||
items = []
|
||||
extra = {}
|
||||
top = None
|
||||
|
||||
@@ -80,7 +80,7 @@ class Link(Thing, Printable):
|
||||
sr = None
|
||||
|
||||
url = cls.by_url_key(url)
|
||||
link_ids = g.permacache.get(url)
|
||||
link_ids = g.urlcache.get(url)
|
||||
if link_ids:
|
||||
links = Link._byID(link_ids, data = True, return_dict = False)
|
||||
links = [l for l in links if not l._deleted]
|
||||
@@ -97,20 +97,20 @@ class Link(Thing, Printable):
|
||||
def set_url_cache(self):
|
||||
if self.url != 'self':
|
||||
key = self.by_url_key(self.url)
|
||||
link_ids = g.permacache.get(key) or []
|
||||
link_ids = g.urlcache.get(key) or []
|
||||
if self._id not in link_ids:
|
||||
link_ids.append(self._id)
|
||||
g.permacache.set(key, link_ids)
|
||||
g.urlcache.set(key, link_ids)
|
||||
|
||||
def update_url_cache(self, old_url):
|
||||
"""Remove the old url from the by_url cache then update the
|
||||
cache with the new url."""
|
||||
if old_url != 'self':
|
||||
key = self.by_url_key(old_url)
|
||||
link_ids = g.permacache.get(key) or []
|
||||
link_ids = g.urlcache.get(key) or []
|
||||
while self._id in link_ids:
|
||||
link_ids.remove(self._id)
|
||||
g.permacache.set(key, link_ids)
|
||||
g.urlcache.set(key, link_ids)
|
||||
self.set_url_cache()
|
||||
|
||||
@property
|
||||
@@ -142,16 +142,19 @@ class Link(Thing, Printable):
|
||||
|
||||
@classmethod
|
||||
def _somethinged(cls, rel, user, link, name):
|
||||
return rel._fast_query(tup(user), tup(link), name = name)
|
||||
return rel._fast_query(tup(user), tup(link), name = name,
|
||||
timestamp_optimize = True)
|
||||
|
||||
def _something(self, rel, user, somethinged, name):
|
||||
try:
|
||||
saved = rel(user, self, name=name)
|
||||
saved._commit()
|
||||
return saved
|
||||
except CreationError, e:
|
||||
return somethinged(user, self)[(user, self, name)]
|
||||
|
||||
rel._fast_query_timestamp_touch(user)
|
||||
return saved
|
||||
|
||||
def _unsomething(self, user, somethinged, name):
|
||||
saved = somethinged(user, self)[(user, self, name)]
|
||||
if saved:
|
||||
@@ -498,6 +501,7 @@ class Comment(Thing, Printable):
|
||||
_defaults = dict(reported = 0, parent_id = None,
|
||||
moderator_banned = False, new = False,
|
||||
banned_before_moderator = False)
|
||||
_essentials = ('link_id', 'author_id')
|
||||
|
||||
def _markdown(self):
|
||||
pass
|
||||
@@ -583,9 +587,7 @@ class Comment(Thing, Printable):
|
||||
def add_props(cls, user, wrapped):
|
||||
from r2.lib.template_helpers import add_attr
|
||||
from r2.lib import promote
|
||||
|
||||
#fetch parent links
|
||||
|
||||
links = Link._byID(set(l.link_id for l in wrapped), data = True,
|
||||
return_dict = True)
|
||||
|
||||
@@ -642,7 +644,7 @@ class Comment(Thing, Printable):
|
||||
else:
|
||||
item.parent_permalink = None
|
||||
|
||||
item.can_reply = (item.sr_id in can_reply_srs)
|
||||
item.can_reply = c.can_reply or (item.sr_id in can_reply_srs)
|
||||
|
||||
|
||||
# not deleted on profile pages,
|
||||
@@ -915,7 +917,7 @@ class Message(Thing, Printable):
|
||||
|
||||
# load the inbox relations for the messages to determine new-ness
|
||||
# TODO: query cache?
|
||||
inbox = Inbox._fast_query(c.user,
|
||||
inbox = Inbox._fast_query(c.user,
|
||||
[item.lookups[0] for item in wrapped],
|
||||
['inbox', 'selfreply'])
|
||||
|
||||
@@ -923,15 +925,17 @@ class Message(Thing, Printable):
|
||||
inbox = dict((m._fullname, v)
|
||||
for (u, m, n), v in inbox.iteritems() if v)
|
||||
|
||||
modinbox = ModeratorInbox._query(
|
||||
ModeratorInbox.c._thing2_id == [item._id for item in wrapped],
|
||||
data = True)
|
||||
msgs = filter (lambda x: isinstance(x.lookups[0], Message), wrapped)
|
||||
|
||||
modinbox = ModeratorInbox._fast_query(m_subreddits.values(),
|
||||
msgs,
|
||||
['inbox'] )
|
||||
|
||||
# best to not have to eager_load the things
|
||||
def make_message_fullname(mid):
|
||||
return "t%s_%s" % (utils.to36(Message._type_id), utils.to36(mid))
|
||||
modinbox = dict((make_message_fullname(v._thing2_id), v)
|
||||
for v in modinbox)
|
||||
for (u, m, n), v in modinbox.iteritems() if v)
|
||||
|
||||
for item in wrapped:
|
||||
item.to = tos.get(item.to_id)
|
||||
@@ -1036,10 +1040,15 @@ class Inbox(MultiRelation('inbox',
|
||||
return i
|
||||
|
||||
@classmethod
|
||||
def set_unread(cls, thing, unread):
|
||||
def set_unread(cls, thing, unread, to = None):
|
||||
inbox_rel = cls.rel(Account, thing.__class__)
|
||||
inbox = inbox_rel._query(inbox_rel.c._thing2_id == thing._id,
|
||||
eager_load = True)
|
||||
if to:
|
||||
inbox = inbox_rel._query(inbox_rel.c._thing2_id == thing._id,
|
||||
eager_load = True)
|
||||
else:
|
||||
inbox = inbox_rel._query(inbox_rel.c._thing2_id == thing._id,
|
||||
inbox_rel.c._thing1_id == to._id,
|
||||
eager_load = True)
|
||||
res = []
|
||||
for i in inbox:
|
||||
if i:
|
||||
|
||||
@@ -21,51 +21,53 @@
|
||||
################################################################################
|
||||
from r2.models import *
|
||||
from r2.lib.utils import fetch_things2
|
||||
from pylons import g
|
||||
from r2.lib.db import queries
|
||||
|
||||
|
||||
import string
|
||||
import random
|
||||
|
||||
def populate(sr_name = 'reddit.com', sr_title = "reddit.com: what's new online",
|
||||
num = 100):
|
||||
create_accounts(num)
|
||||
|
||||
a = list(Account._query(limit = 1))[0]
|
||||
|
||||
def populate(num_srs = 10, num_users = 10, num_links = 100):
|
||||
try:
|
||||
sr = Subreddit._new(name = sr_name, title = sr_title,
|
||||
ip = '0.0.0.0', author_id = a._id)
|
||||
sr._commit()
|
||||
except SubredditExists:
|
||||
pass
|
||||
a = Account._by_name(g.system_user)
|
||||
except NotFound:
|
||||
a = register(g.system_user, "password")
|
||||
|
||||
create_links(num)
|
||||
|
||||
def create_accounts(num):
|
||||
for i in range(num):
|
||||
srs = []
|
||||
for i in range(num_srs):
|
||||
name = "reddit_test%d" % i
|
||||
try:
|
||||
sr = Subreddit._new(name = name, title = "everything about #%d"%i,
|
||||
ip = '0.0.0.0', author_id = a._id)
|
||||
sr._downs = 10
|
||||
sr.lang = "en"
|
||||
sr._commit()
|
||||
except SubredditExists:
|
||||
sr = Subreddit._by_name(name)
|
||||
srs.append(sr)
|
||||
|
||||
accounts = []
|
||||
for i in range(num_users):
|
||||
name_ext = ''.join([ random.choice(string.letters)
|
||||
for x
|
||||
in range(int(random.uniform(1, 10))) ])
|
||||
name = 'test_' + name_ext
|
||||
try:
|
||||
register(name, name)
|
||||
a = register(name, name)
|
||||
except AccountExists:
|
||||
pass
|
||||
a = Account._by_name(name)
|
||||
accounts.append(a)
|
||||
|
||||
def create_links(num):
|
||||
from r2.lib.db import queries
|
||||
|
||||
accounts = list(Account._query(limit = num, data = True))
|
||||
subreddits = list(Subreddit._query(limit = num, data = True))
|
||||
for i in range(num):
|
||||
for i in range(num_links):
|
||||
id = random.uniform(1,100)
|
||||
title = url = 'http://google.com/?q=' + str(id)
|
||||
user = random.choice(accounts)
|
||||
sr = random.choice(subreddits)
|
||||
sr = random.choice(srs)
|
||||
l = Link._submit(title, url, user, sr, '127.0.0.1')
|
||||
queries.new_link(l)
|
||||
|
||||
queries.worker.join()
|
||||
|
||||
|
||||
|
||||
def by_url_cache():
|
||||
|
||||
@@ -57,9 +57,8 @@ class Subreddit(Thing, Printable):
|
||||
valid_votes = 0,
|
||||
show_media = False,
|
||||
css_on_cname = True,
|
||||
use_whitelist = False,
|
||||
domain = None,
|
||||
over_18 = False,
|
||||
over_18 = None,
|
||||
mod_actions = 0,
|
||||
sponsorship_text = "this reddit is sponsored by",
|
||||
sponsorship_url = None,
|
||||
@@ -215,9 +214,7 @@ class Subreddit(Thing, Printable):
|
||||
return (user
|
||||
and (c.user_is_admin
|
||||
or self.is_moderator(user)
|
||||
or ((self.type in ('restricted', 'private') or
|
||||
self.use_whitelist) and
|
||||
self.is_contributor(user))))
|
||||
or self.is_contributor(user)))
|
||||
|
||||
def can_give_karma(self, user):
|
||||
return self.is_special(user)
|
||||
@@ -243,6 +240,15 @@ class Subreddit(Thing, Printable):
|
||||
#private requires contributorship
|
||||
return self.is_contributor(user) or self.is_moderator(user)
|
||||
|
||||
def can_demod(self, bully, victim):
|
||||
# This works because the is_*() functions return the relation
|
||||
# when True. So we can compare the dates on the relations.
|
||||
bully_rel = self.is_moderator(bully)
|
||||
victim_rel = self.is_moderator(victim)
|
||||
if bully_rel is None or victim_rel is None:
|
||||
return False
|
||||
return bully_rel._date <= victim_rel._date
|
||||
|
||||
@classmethod
|
||||
def load_subreddits(cls, links, return_dict = True):
|
||||
"""returns the subreddits for a list of links. it also preloads the
|
||||
@@ -787,7 +793,7 @@ class ModSR(ModContribSR):
|
||||
|
||||
class ContribSR(ModContribSR):
|
||||
name = "contrib"
|
||||
title = "communities you're a contributor on"
|
||||
title = "communities you're approved on"
|
||||
query_param = "contributor"
|
||||
real_path = "contrib"
|
||||
|
||||
|
||||
@@ -101,9 +101,12 @@ class Trial(Storage):
|
||||
g.log.debug("not enough votes yet")
|
||||
return (None, koshers, spams)
|
||||
|
||||
# Stop showing this in the spotlight box once it has 30 votes
|
||||
if total_votes >= 30:
|
||||
# Stop showing this in the spotlight box once it has 20 votes
|
||||
if total_votes >= 20:
|
||||
g.cache.set("quench_jurors-" + self.defendant._fullname, True)
|
||||
quenching = True
|
||||
else:
|
||||
quenching = False
|
||||
|
||||
# If a trial is less than an hour old, and votes are still trickling
|
||||
# in (i.e., there was one in the past five minutes), we're going to
|
||||
@@ -127,9 +130,20 @@ class Trial(Storage):
|
||||
return ("guilty", koshers, spams)
|
||||
elif kosher_pct > 0.66:
|
||||
return ("innocent", koshers, spams)
|
||||
elif not quenching:
|
||||
g.log.debug("not yet quenching")
|
||||
return (None, koshers, spams)
|
||||
# At this point, we're not showing the link to any new jurors, and
|
||||
# the existing jurors haven't changed or submitted votes for several
|
||||
# minutes, so we're not really expecting to get many more votes.
|
||||
# Thus, lower our standards for consensus.
|
||||
elif kosher_pct < 0.3999:
|
||||
return ("guilty", koshers, spams)
|
||||
elif kosher_pct > 0.6001:
|
||||
return ("innocent", koshers, spams)
|
||||
elif total_votes >= 100:
|
||||
# This should never really happen; quenching should kick in
|
||||
# after 30 votes, so new jurors won't be assigned to the
|
||||
# after 20 votes, so new jurors won't be assigned to the
|
||||
# trial. Just in case something goes wrong, close any trials
|
||||
# with more than 100 votes.
|
||||
return ("hung jury", koshers, spams)
|
||||
@@ -173,7 +187,7 @@ class Trial(Storage):
|
||||
defs = Thing._by_fullname(all, data=True).values()
|
||||
|
||||
if quench:
|
||||
# Used for the spotlight, to filter out trials with over 30 votes;
|
||||
# Used for the spotlight, to filter out trials with over 20 votes;
|
||||
# otherwise, hung juries would hog the spotlight for an hour as
|
||||
# their vote counts continued to skyrocket
|
||||
|
||||
|
||||
@@ -102,16 +102,7 @@ class Vote(MultiRelation('vote',
|
||||
v._commit()
|
||||
g.cache.delete(queries.prequeued_vote_key(sub, obj))
|
||||
|
||||
lastvote_attr_name = 'last_vote_' + obj.__class__.__name__
|
||||
try:
|
||||
setattr(sub, lastvote_attr_name, datetime.now(g.tz))
|
||||
except TypeError:
|
||||
# this temporarily works around an issue with timezones in
|
||||
# a really hacky way. Remove me later
|
||||
setattr(sub, lastvote_attr_name, None)
|
||||
sub._commit()
|
||||
setattr(sub, lastvote_attr_name, datetime.now(g.tz))
|
||||
sub._commit()
|
||||
v._fast_query_timestamp_touch(sub)
|
||||
|
||||
up_change, down_change = score_changes(amount, oldamount)
|
||||
|
||||
@@ -136,7 +127,8 @@ class Vote(MultiRelation('vote',
|
||||
@classmethod
|
||||
def likes(cls, sub, obj):
|
||||
votes = cls._fast_query(sub, obj, ('1', '-1'),
|
||||
data=False, eager_load=False)
|
||||
data=False, eager_load=False,
|
||||
timestamp_optimize=True)
|
||||
votes = dict((tuple(k[:2]), v) for k, v in votes.iteritems() if v)
|
||||
return votes
|
||||
|
||||
|
||||
@@ -3983,4 +3983,4 @@ a.pretty-button.positive.pressed {
|
||||
.oatmeal img {
|
||||
display: block;
|
||||
margin: 5px auto;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -254,9 +254,12 @@ rate_limit = function() {
|
||||
}()
|
||||
|
||||
|
||||
$.fn.vote = function(vh, callback, event) {
|
||||
$.fn.vote = function(vh, callback, event, ui_only) {
|
||||
/* for vote to work, $(this) should be the clicked arrow */
|
||||
if($(this).hasClass("arrow")) {
|
||||
if (!reddit.logged) {
|
||||
showcover(true, 'vote_' + $(this).thing_id());
|
||||
}
|
||||
else if($(this).hasClass("arrow")) {
|
||||
var dir = ( $(this).hasClass(up_cls) ? 1 :
|
||||
( $(this).hasClass(down_cls) ? -1 : 0) );
|
||||
var things = $(this).all_things_by_id();
|
||||
@@ -267,7 +270,7 @@ $.fn.vote = function(vh, callback, event) {
|
||||
var u_before = (dir == 1) ? up_cls : upmod_cls;
|
||||
var u_after = (dir == 1) ? upmod_cls : up_cls;
|
||||
arrows.filter("."+u_before).removeClass(u_before).addClass(u_after);
|
||||
|
||||
|
||||
var d_before = (dir == -1) ? down_cls : downmod_cls;
|
||||
var d_after = (dir == -1) ? downmod_cls : down_cls;
|
||||
arrows.filter("."+d_before).removeClass(d_before).addClass(d_after);
|
||||
@@ -276,20 +279,22 @@ $.fn.vote = function(vh, callback, event) {
|
||||
if(reddit.logged) {
|
||||
things.each(function() {
|
||||
var entry = $(this).find(".entry:first, .midcol:first");
|
||||
if(dir > 0)
|
||||
if(dir > 0)
|
||||
entry.addClass('likes')
|
||||
.removeClass('dislikes unvoted');
|
||||
else if(dir < 0)
|
||||
else if(dir < 0)
|
||||
entry.addClass('dislikes')
|
||||
.removeClass('likes unvoted');
|
||||
else
|
||||
entry.addClass('unvoted')
|
||||
.removeClass('likes dislikes');
|
||||
});
|
||||
var thing_id = things.filter(":first").thing_id();
|
||||
/* IE6 hack */
|
||||
vh += event ? "" : ("-" + thing_id);
|
||||
$.request("vote", {id: thing_id, dir : dir, vh : vh});
|
||||
if(!$.defined(ui_only)) {
|
||||
var thing_id = things.filter(":first").thing_id();
|
||||
/* IE6 hack */
|
||||
vh += event ? "" : ("-" + thing_id);
|
||||
$.request("vote", {id: thing_id, dir : dir, vh : vh});
|
||||
}
|
||||
}
|
||||
/* execute any callbacks passed in. */
|
||||
if(callback)
|
||||
|
||||
@@ -1283,3 +1283,22 @@ $(function() {
|
||||
last_click();
|
||||
|
||||
});
|
||||
|
||||
function friend(account_fullname) {
|
||||
var ua = $(".author.id-" + account_fullname).addClass("friend")
|
||||
.next(".userattrs");
|
||||
var add_braces = (!ua.html());
|
||||
|
||||
ua.html(((add_braces) ? " [" : "") +
|
||||
'<a class="friend" title="friend" href="/prefs/friends">F</a>' +
|
||||
((add_braces) ? "]" : ""));
|
||||
}
|
||||
|
||||
function unfriend(account_fullname) {
|
||||
var ua = $(".author.id-" + account_fullname).removeClass("friend")
|
||||
.next(".userattrs");
|
||||
ua.find("a.friend").remove();
|
||||
if (ua.find("a").length == 0) {
|
||||
ua.html("");
|
||||
}
|
||||
}
|
||||
|
Before Width: | Height: | Size: 5.2 KiB After Width: | Height: | Size: 195 KiB |
|
Before Width: | Height: | Size: 5.6 KiB After Width: | Height: | Size: 19 KiB |
|
Before Width: | Height: | Size: 5.3 KiB After Width: | Height: | Size: 23 KiB |
@@ -8,4 +8,7 @@ Disallow: /*after=
|
||||
Disallow: /*before=
|
||||
Disallow: /domain/*t=
|
||||
Disallow: /login
|
||||
Disallow: /reddits/search
|
||||
Disallow: /search
|
||||
Disallow: /r/*/search
|
||||
Allow: /
|
||||
|
||||
|
Before Width: | Height: | Size: 9.4 KiB |
BIN
r2/r2/public/static/youbrokeit1.png
Normal file
|
After Width: | Height: | Size: 28 KiB |
BIN
r2/r2/public/static/youbrokeit2.png
Normal file
|
After Width: | Height: | Size: 5.4 KiB |
BIN
r2/r2/public/static/youbrokeit3.png
Normal file
|
After Width: | Height: | Size: 170 KiB |
@@ -117,7 +117,6 @@ ${parent.Child(not thing.collapsed)}
|
||||
${parent.midcol()}
|
||||
</%def>
|
||||
|
||||
|
||||
<%def name="buttons()">
|
||||
${CommentButtons(thing)}
|
||||
${self.admintagline()}
|
||||
|
||||
@@ -130,9 +130,9 @@ function update_title(elem) {
|
||||
<table >
|
||||
${radio_type("public", _("public"), _("anyone can view and submit"))}
|
||||
${radio_type("restricted", _("restricted"),
|
||||
_("anyone can view, but only contributors can submit links"))}
|
||||
_("anyone can view, but only some are approved to submit links"))}
|
||||
${radio_type("private", _("private"),
|
||||
_("only contributors can view and submit"))}
|
||||
_("only approved members can view and submit"))}
|
||||
</table>
|
||||
</div>
|
||||
</%utils:line_field>
|
||||
@@ -163,14 +163,6 @@ function update_title(elem) {
|
||||
${_("show thumbnail images of content")}
|
||||
</label>
|
||||
</li>
|
||||
<li>
|
||||
<input class="nomargin" type="checkbox"
|
||||
name="use_whitelist" id="whitelist"
|
||||
${thing.site and thing.site.use_whitelist and "checked='checked'" or ""}/>
|
||||
<label for="whitelist">
|
||||
${_("enable whitelisting on this reddit")}
|
||||
</label>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
</%utils:line_field>
|
||||
|
||||
@@ -40,6 +40,27 @@
|
||||
<form action="/post/update" method="post"
|
||||
onsubmit="return post_form(this, 'update')" id="pref-update">
|
||||
|
||||
<%
|
||||
if not c.user.email:
|
||||
description = _("not set")
|
||||
v_link = None
|
||||
elif c.user.email_verified:
|
||||
description = _("verified")
|
||||
v_link = None
|
||||
elif c.user.email_verified is False:
|
||||
description = _("verification pending")
|
||||
v_link = _("click to resend")
|
||||
else:
|
||||
description = _("unverified")
|
||||
v_link = _("click to verify")
|
||||
|
||||
if v_link and not thing.verify:
|
||||
description = "(%s; <a href='/verify'>%s</a>)" % (description, v_link)
|
||||
description = unsafe(description)
|
||||
else:
|
||||
description = "(%s)" % description
|
||||
%>
|
||||
|
||||
<div class="spacer">
|
||||
<%utils:round_field title="${_('current password')}" description="${_('(required)')}">
|
||||
<input type="password" name="curpass" />
|
||||
@@ -49,21 +70,11 @@
|
||||
|
||||
%if thing.email:
|
||||
<div class="spacer">
|
||||
<%
|
||||
if c.user.email_verified:
|
||||
description = _("verified")
|
||||
elif c.user.email_verified is False:
|
||||
description = _("verification pending")
|
||||
else:
|
||||
description = _("unverified")
|
||||
description = "(%s)" % description
|
||||
%>
|
||||
<%utils:round_field title="${_('email')}" description="${description}">
|
||||
<input type="text" name="email" value="${getattr(c.user, 'email', '')}"/>
|
||||
${error_field("BAD_EMAILS", "email")}
|
||||
</%utils:round_field>
|
||||
</div>
|
||||
%endif
|
||||
</div>
|
||||
|
||||
%if thing.password:
|
||||
<div class="spacer">
|
||||
|
||||
@@ -122,11 +122,7 @@ thing id-${what._fullname}
|
||||
fullname = this._fullname
|
||||
%>
|
||||
<div class="arrow ${_class}"
|
||||
%if c.user_is_loggedin:
|
||||
onclick="$(this).vote('${thing.votehash}', null, event)"
|
||||
%else:
|
||||
onclick="showcover(true, 'vote_${fullname}')"
|
||||
%endif
|
||||
onclick="$(this).vote('${thing.votehash}', null, event)"
|
||||
>
|
||||
</div>
|
||||
</%def>
|
||||
|
||||
@@ -47,7 +47,7 @@
|
||||
</li>
|
||||
%endif
|
||||
|
||||
%if thing.show_spam or thing.show_ignore:
|
||||
%if thing.show_approve:
|
||||
<li>
|
||||
${self.state_button("approve", _("approve"),
|
||||
"return change_state(this, 'approve');", _("approved"))}
|
||||
|
||||
@@ -20,6 +20,19 @@
|
||||
## CondeNet, Inc. All Rights Reserved.
|
||||
################################################################################
|
||||
|
||||
<%
|
||||
boring_starters = ("{", "<")
|
||||
boring_middles = ("Pylons", "r2/lib/base.py", "middleware.py")
|
||||
|
||||
def boring(line):
|
||||
if any(line.startswith(b) for b in boring_starters):
|
||||
return True
|
||||
elif any(b in line for b in boring_middles):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
%>
|
||||
|
||||
<html>
|
||||
<head>
|
||||
<title>Code Profile</title>
|
||||
@@ -69,7 +82,7 @@
|
||||
<td><pre>${s}</pre></td>
|
||||
%endfor
|
||||
<% line = t[-1] %>
|
||||
%if line.startswith('{'):
|
||||
%if boring(line):
|
||||
<td class="bltin">${line|h}</td>
|
||||
%else:
|
||||
<td class="a"><a href="${thing.path}?profile=${line|u}">${line|h}</a></td>
|
||||
|
||||
@@ -275,7 +275,7 @@
|
||||
<input type="hidden" name="link_id" value="${to36(thing.link._id)}"/>
|
||||
%endif
|
||||
<%
|
||||
trusted = c.user_is_sponsor or getattr(c.user, "trusted_sponsor", False)
|
||||
trusted = c.user_is_sponsor or c.user.trusted_sponsor
|
||||
%>
|
||||
<%utils:line_field title="${_('title')}" id="title-field">
|
||||
<textarea name="title" rows="2" cols="1"
|
||||
|
||||
@@ -71,13 +71,15 @@
|
||||
mail_img += ".png"
|
||||
mail_img_class = 'havemail'
|
||||
mail_img_title = "new mail!"
|
||||
mail_path = "/message/unread/"
|
||||
else:
|
||||
mail_img += "gray.png"
|
||||
mail_img_class = 'nohavemail'
|
||||
mail_img_title = "no new mail"
|
||||
mail_path = "/message/inbox/"
|
||||
mail_img = static(mail_img)
|
||||
%>
|
||||
${img_link(_("messages"), mail_img, path="/message/inbox/",
|
||||
${img_link(_("messages"), mail_img, path=mail_path,
|
||||
alt = mail_img_title, title = mail_img_title,
|
||||
_id = "mail", _class=mail_img_class)}
|
||||
${separator("|")}
|
||||
@@ -88,13 +90,15 @@
|
||||
mail_img += ".png"
|
||||
mail_img_class = 'havemail'
|
||||
mail_img_title = "new mail!"
|
||||
mail_path = "/message/moderator/"
|
||||
else:
|
||||
mail_img += "gray.png"
|
||||
mail_img_class = 'nohavemail'
|
||||
mail_img_title = "no new mail"
|
||||
mail_path = "/message/moderator/"
|
||||
mail_img = static(mail_img)
|
||||
%>
|
||||
${img_link(_("mod messages"), mail_img , path="/message/moderator/",
|
||||
${img_link(_("mod messages"), mail_img , path=mail_path,
|
||||
alt = mail_img_title, title = mail_img_title,
|
||||
_id = "modmail", _class=mail_img_class)}
|
||||
${separator("|")}
|
||||
|
||||
@@ -77,12 +77,12 @@
|
||||
%if sr.type in ("restricted", "private") and not sr.moderator:
|
||||
<img class="sr-type-img"
|
||||
%if sr.contributor:
|
||||
alt="${_('contributor')}"
|
||||
title="${_('contributor')}"
|
||||
alt="${_('approved submitter')}"
|
||||
title="${_('approved submitter')}"
|
||||
src="/static/pencil.png"
|
||||
%else:
|
||||
alt="${_('not contributor')}"
|
||||
title="${_('not contributor')}"
|
||||
alt="${_('not approved')}"
|
||||
title="${_('not approved')}"
|
||||
src="/static/pencil-gray.png"
|
||||
%endif
|
||||
/>
|
||||
|
||||
@@ -52,9 +52,9 @@
|
||||
|
||||
%if thing.sr.contributor:
|
||||
${moderate_button("leavecontributor",
|
||||
_("you are a contributor of this reddit. (%(leave)s)"),
|
||||
_("stop being a contributor?"),
|
||||
_("you are no longer a contributor"))}
|
||||
_("you are an approved submitter on this reddit. (%(leave)s)"),
|
||||
_("stop being an approved submitter?"),
|
||||
_("you are no longer an approved submitter"))}
|
||||
%endif
|
||||
|
||||
%if thing.sr.description:
|
||||
|
||||
16
r2/r2/templates/thingupdater.html
Normal file
@@ -0,0 +1,16 @@
|
||||
<%
|
||||
import simplejson
|
||||
%>
|
||||
<script type="text/javascript">
|
||||
var likes = ${unsafe(simplejson.dumps(thing.likes))};
|
||||
var dislikes = ${unsafe(simplejson.dumps(thing.dislikes))};
|
||||
var friends = ${unsafe(simplejson.dumps(list(thing.is_friend)))};
|
||||
$.map(likes, function(l) {
|
||||
$(".id-" + l).find(".arrow.up:first").vote("", null, null, true);
|
||||
});
|
||||
$.map(dislikes, function(l) {
|
||||
$(".id-" + l).find(".arrow.down:first").vote("", null, null, true);
|
||||
});
|
||||
$.map(friends, friend);
|
||||
|
||||
</script>
|
||||
@@ -63,16 +63,28 @@
|
||||
|
||||
<br/>
|
||||
|
||||
%if hasattr(trophy, "description"):
|
||||
on
|
||||
${trophy.description}
|
||||
%endif
|
||||
%if hasattr(trophy, "description") and not trophy.description.startswith("20"):
|
||||
(
|
||||
%if hasattr(trophy, "url"):
|
||||
<a href="${trophy.url}">
|
||||
${trophy.description}
|
||||
</a>
|
||||
%else:
|
||||
${trophy.description}
|
||||
%endif
|
||||
)
|
||||
%else:
|
||||
%if hasattr(trophy, "description"):
|
||||
on
|
||||
${trophy.description}
|
||||
%endif
|
||||
|
||||
%if hasattr(trophy, "url"):
|
||||
 
|
||||
<a href="${trophy.url}">
|
||||
${_("for this")}
|
||||
</a>
|
||||
%if hasattr(trophy, "url"):
|
||||
 
|
||||
<a href="${trophy.url}">
|
||||
${_("for this")}
|
||||
</a>
|
||||
%endif
|
||||
%endif
|
||||
</div>
|
||||
|
||||
|
||||
@@ -46,11 +46,15 @@
|
||||
${plain_link(_("send message"),
|
||||
"/message/compose?to=%s" % (thing.user.name))}
|
||||
|
||||
%elif thing.name == "remove" and thing.editable:
|
||||
${ynbutton(_("remove"), "removed", thing.remove_action,
|
||||
callback="deleteRow",
|
||||
hidden_data = dict(type = thing.type,
|
||||
id = thing.user._fullname,
|
||||
container = thing.container_name))}
|
||||
%elif thing.name == "remove":
|
||||
%if thing.editable:
|
||||
${ynbutton(_("remove"), "removed", thing.remove_action,
|
||||
callback="deleteRow",
|
||||
hidden_data = dict(type = thing.type,
|
||||
id = thing.user._fullname,
|
||||
container = thing.container_name))}
|
||||
%else:
|
||||
<span class="gray">${_("can't remove")}</span>
|
||||
%endif
|
||||
%endif
|
||||
</%def>
|
||||
|
||||
@@ -29,9 +29,10 @@
|
||||
<span>[deleted]</span>
|
||||
%else:
|
||||
${plain_link(thing.name + thing.karma, "/user/%s" % thing.name,
|
||||
_class = thing.author_cls, _sr_path = False, target=target)}
|
||||
_class = thing.author_cls + (" id-%s" % thing.fullname),
|
||||
_sr_path = False, target=target)}
|
||||
<span class="userattrs">
|
||||
%if thing.attribs:
|
||||
<span class="userattrs">
|
||||
 [
|
||||
%for priority, abbv, css_class, label, attr_link, img in thing.attribs:
|
||||
%if attr_link:
|
||||
@@ -56,8 +57,8 @@
|
||||
%endif
|
||||
%endfor
|
||||
]
|
||||
</span>
|
||||
%endif
|
||||
</span>
|
||||
%endif
|
||||
%endif
|
||||
|
||||
|
||||
@@ -75,13 +75,13 @@ except ImportError:
|
||||
|
||||
# we're using a custom build of pylibmc at the moment, so we need to
|
||||
# be sure that we have the right version
|
||||
pylibmc_version = '1.0-reddit-03'
|
||||
pylibmc_version = '1.0-reddit-04'
|
||||
try:
|
||||
import pylibmc
|
||||
assert pylibmc.__version__ == pylibmc_version
|
||||
except (ImportError, AssertionError):
|
||||
print "Installing pylibmc"
|
||||
easy_install(["http://github.com/downloads/ketralnis/pylibmc/pylibmc-1.0-reddit-03.tar.gz"])
|
||||
easy_install(["http://github.com/downloads/ketralnis/pylibmc/pylibmc-1.0-reddit-04.tar.gz"])
|
||||
|
||||
filtermod = Extension('Cfilters',
|
||||
sources = ['r2/lib/c/filters.c'])
|
||||
|
||||
35
r2/updateini.py
Executable file
@@ -0,0 +1,35 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import re, sys
|
||||
|
||||
line_rx = re.compile('^([-_a-zA-Z0-9 ]*[-_a-zA-Z0-9]+)\s*=\s*(.*)')
|
||||
|
||||
def parse_line(line):
|
||||
m = line_rx.match(line)
|
||||
if m:
|
||||
return m.groups()
|
||||
|
||||
def main(source_ini, update_ini):
|
||||
#read in update file
|
||||
update_parts = {}
|
||||
for line in open(update_ini):
|
||||
m = parse_line(line)
|
||||
if m:
|
||||
update_parts[m[0]] = m[1]
|
||||
|
||||
#pass through main file
|
||||
m = None
|
||||
for line in open(source_ini):
|
||||
line = line.strip()
|
||||
m = parse_line(line)
|
||||
if m and update_parts.has_key(m[0]):
|
||||
line = '%s = %s' % (m[0], update_parts[m[0]])
|
||||
print line
|
||||
|
||||
if __name__ == '__main__':
|
||||
args = sys.argv
|
||||
if len(args) != 3:
|
||||
print 'usage: iniupdate.py [source] [update]'
|
||||
sys.exit(1)
|
||||
else:
|
||||
main(sys.argv[1], sys.argv[2])
|
||||
@@ -57,17 +57,33 @@ def run(limit=100, streamfile=None, verbose=False):
|
||||
|
||||
tb = []
|
||||
|
||||
key_material = "exc_type"
|
||||
key_material = exc_type
|
||||
pretty_lines = []
|
||||
|
||||
make_lock_seen = False
|
||||
|
||||
for tpl in d['traceback']:
|
||||
tb.append(tpl)
|
||||
filename, lineno, funcname, text = tpl
|
||||
if text is None:
|
||||
pass
|
||||
elif (text.startswith("with g.make_lock(") or
|
||||
text.startswith("with make_lock(")):
|
||||
make_lock_seen = True
|
||||
key_material += "%s %s " % (filename, funcname)
|
||||
pretty_lines.append ("%s:%s: %s()" % (filename, lineno, funcname))
|
||||
pretty_lines.append (" %s" % text)
|
||||
|
||||
fingerprint = md5(key_material).hexdigest()
|
||||
if exc_desc.startswith("QueuePool limit of size"):
|
||||
fingerprint = "QueuePool_overflow"
|
||||
elif exc_desc.startswith("error 2 from memcached_get: HOSTNAME "):
|
||||
fingerprint = "memcache_suckitude"
|
||||
elif exc_type == "TimeoutExpired" and make_lock_seen:
|
||||
fingerprint = "make_lock_timeout"
|
||||
elif exc_type == "NoServerAvailable":
|
||||
fingerprint = "cassandra_suckitude"
|
||||
else:
|
||||
fingerprint = md5(key_material).hexdigest()
|
||||
|
||||
nickname_key = "error_nickname-" + fingerprint
|
||||
status_key = "error_status-" + fingerprint
|
||||
@@ -160,13 +176,13 @@ def run(limit=100, streamfile=None, verbose=False):
|
||||
elif d['type'] == 'exception':
|
||||
try:
|
||||
log_exception(d, daystring)
|
||||
except:
|
||||
print "Error in log_exception()"
|
||||
except Exception as e:
|
||||
print "Error in log_exception(): %r" % e
|
||||
elif d['type'] == 'text':
|
||||
try:
|
||||
log_text(d, daystring)
|
||||
except:
|
||||
print "Error in log_text()"
|
||||
except Exception as e:
|
||||
print "Error in log_text(): %r" % e
|
||||
else:
|
||||
streamlog ("wtf is %r" % d['type'], True)
|
||||
|
||||
|
||||