diff --git a/r2/Makefile b/r2/Makefile
index f744460e1..770d84723 100644
--- a/r2/Makefile
+++ b/r2/Makefile
@@ -21,7 +21,7 @@
################################################################################
# Jacascript files to be compressified
-js_targets = jquery.js jquery.json.js jquery.reddit.js reddit.js ui.core.js ui.datepicker.js sponsored.js
+js_targets = jquery.js jquery.json.js jquery.reddit.js reddit.js ui.core.js ui.datepicker.js sponsored.js jquery.flot.js jquery.lazyload.js
# CSS targets
main_css = reddit.css
css_targets = reddit-ie6-hax.css reddit-ie7-hax.css mobile.css spreadshirt.css
diff --git a/r2/example.ini b/r2/example.ini
index b4b79804d..8552c373b 100644
--- a/r2/example.ini
+++ b/r2/example.ini
@@ -161,7 +161,7 @@ tracking_secret = abcdefghijklmnopqrstuvwxyz0123456789
ip_hash =
S3KEY_ID = ABCDEFGHIJKLMNOP1234
S3SECRET_KEY = aBcDeFgHiJkLmNoPqRsTuVwXyZ1234567890AbCd
-s3_thumb_bucket = /your.bucket.here/
+s3_thumb_bucket = your.bucket.here
default_thumb = /static/noimage.png
MIN_DOWN_LINK = 0
@@ -182,6 +182,8 @@ rising_period = 12 hours
# time of ratelimit purgatory (min)
RATELIMIT = 10
+QUOTA_THRESHOLD = 0
+
num_comments = 200
max_comments = 500
num_default_reddits = 10
@@ -200,6 +202,7 @@ share_reply = noreply@yourdomain.com
agents =
feedback_email = abuse@localhost
+system_user = reddit
# t-shirt stuff
spreadshirt_url =
diff --git a/r2/r2/config/routing.py b/r2/r2/config/routing.py
index a700965bb..be6a6aa98 100644
--- a/r2/r2/config/routing.py
+++ b/r2/r2/config/routing.py
@@ -32,13 +32,13 @@ def make_map(global_conf={}, app_conf={}):
admin_routes.add(mc)
- mc('/login', controller='front', action='login')
- mc('/logout', controller='front', action='logout')
- mc('/verify', controller='front', action='verify')
- mc('/adminon', controller='front', action='adminon')
- mc('/adminoff', controller='front', action='adminoff')
+ mc('/login', controller='forms', action='login')
+ mc('/logout', controller='forms', action='logout')
+ mc('/verify', controller='forms', action='verify')
+ mc('/adminon', controller='forms', action='adminon')
+ mc('/adminoff', controller='forms', action='adminoff')
mc('/submit', controller='front', action='submit')
- mc('/validuser', controller='front', action='validuser')
+ mc('/validuser', controller='forms', action='validuser')
mc('/over18', controller='post', action='over18')
@@ -63,6 +63,7 @@ def make_map(global_conf={}, app_conf={}):
requirements=dict(where='subscriber|contributor|moderator'))
mc('/buttons', controller='buttons', action='button_demo_page')
+ mc('/upgradebuttons', controller='buttons', action='upgrade_buttons')
#the frame
mc('/button_content', controller='buttons', action='button_content')
#/button.js and buttonlite.js - the embeds
@@ -78,7 +79,6 @@ def make_map(global_conf={}, app_conf={}):
mc('/feedback', controller='feedback', action='feedback')
mc('/ad_inq', controller='feedback', action='ad_inq')
-
mc('/admin/i18n', controller='i18n', action='list')
mc('/admin/i18n/:action', controller='i18n')
mc('/admin/i18n/:action/:lang', controller='i18n')
@@ -103,10 +103,10 @@ def make_map(global_conf={}, app_conf={}):
mc('/user/:username/:where', controller='user', action='listing',
where='overview')
- mc('/prefs/:location', controller='front',
+ mc('/prefs/:location', controller='forms',
action='prefs', location='options')
- mc('/juryduty', controller='front', action='juryduty')
+ mc('/depmod', controller='forms', action='depmod')
mc('/info/0:article/*rest', controller = 'front',
action='oldinfo', dest='comments', type='ancient')
@@ -126,8 +126,8 @@ def make_map(global_conf={}, app_conf={}):
mc('/duplicates/:article/:title', controller = 'front',
action = 'duplicates', title=None)
- mc('/mail/optout', controller='front', action = 'optout')
- mc('/mail/optin', controller='front', action = 'optin')
+ mc('/mail/optout', controller='forms', action = 'optout')
+ mc('/mail/optin', controller='forms', action = 'optin')
mc('/stylesheet', controller = 'front', action = 'stylesheet')
mc('/frame', controller='front', action = 'frame')
mc('/framebuster/:blah', controller='front', action = 'framebuster')
@@ -136,12 +136,12 @@ def make_map(global_conf={}, app_conf={}):
mc('/promoted/edit_promo/:link',
controller='promote', action = 'edit_promo')
- mc('/promoted/pay/:link',
+ mc('/promoted/pay/:link/:indx',
controller='promote', action = 'pay')
mc('/promoted/graph',
controller='promote', action = 'graph')
mc('/promoted/:action', controller='promote',
- requirements = dict(action = "new_promo"))
+ requirements = dict(action = "edit_promo|new_promo|roadblock"))
mc('/promoted/:sort', controller='promote', action = "listing")
mc('/promoted/', controller='promoted', action = "listing",
sort = "")
@@ -151,7 +151,7 @@ def make_map(global_conf={}, app_conf={}):
mc('/', controller='hot', action='listing')
- listing_controllers = "hot|saved|new|recommended|randomrising|comments"
+ listing_controllers = "hot|saved|new|randomrising|comments"
mc('/:controller', action='listing',
requirements=dict(controller=listing_controllers))
@@ -168,8 +168,9 @@ def make_map(global_conf={}, app_conf={}):
mc('/message/moderator/:subwhere', controller='message', action='listing',
where = 'moderator')
+ mc('/password', controller='forms', action="password")
mc('/:action', controller='front',
- requirements=dict(action="password|random|framebuster"))
+ requirements=dict(action="random|framebuster|selfserviceoatmeal"))
mc('/:action', controller='embed',
requirements=dict(action="help|blog"))
mc('/help/*anything', controller='embed', action='help')
@@ -187,11 +188,11 @@ def make_map(global_conf={}, app_conf={}):
mc('/d/:what', controller='api', action='bookmarklet')
- mc('/resetpassword/:key', controller='front',
+ mc('/resetpassword/:key', controller='forms',
action='resetpassword')
- mc('/verification/:key', controller='front',
+ mc('/verification/:key', controller='forms',
action='verify_email')
- mc('/resetpassword', controller='front',
+ mc('/resetpassword', controller='forms',
action='resetpassword')
mc('/post/:action/:url_user', controller='post',
@@ -205,7 +206,7 @@ def make_map(global_conf={}, app_conf={}):
mc('/api/gadget/click/:ids', controller = 'api', action='gadget', type='click')
mc('/api/gadget/:type', controller = 'api', action='gadget')
mc('/api/:action', controller='promote',
- requirements=dict(action="promote|unpromote|new_promo|link_thumb|freebie|promote_note|update_pay|refund|traffic_viewer|rm_traffic_viewer"))
+ requirements=dict(action="promote|unpromote|edit_promo|link_thumb|freebie|promote_note|update_pay|refund|traffic_viewer|rm_traffic_viewer|edit_campaign|delete_campaign|meta_promo|add_roadblock|rm_roadblock"))
mc('/api/:action', controller='api')
mc("/button_info", controller="api", action="info", limit = 1)
diff --git a/r2/r2/controllers/__init__.py b/r2/r2/controllers/__init__.py
index 90ca79f8d..17452febb 100644
--- a/r2/r2/controllers/__init__.py
+++ b/r2/r2/controllers/__init__.py
@@ -24,7 +24,6 @@ from listingcontroller import HotController
from listingcontroller import SavedController
from listingcontroller import NewController
from listingcontroller import BrowseController
-from listingcontroller import RecommendedController
from listingcontroller import MessageController
from listingcontroller import RedditsController
from listingcontroller import ByIDController as ByidController
@@ -35,6 +34,7 @@ from listingcontroller import CommentsController
from listingcontroller import MyredditsController
from feedback import FeedbackController
+from front import FormsController
from front import FrontController
from health import HealthController
from buttons import ButtonsController
diff --git a/r2/r2/controllers/ads.py b/r2/r2/controllers/ads.py
index 4d53f080e..3aba5a5ee 100644
--- a/r2/r2/controllers/ads.py
+++ b/r2/r2/controllers/ads.py
@@ -26,14 +26,14 @@ from validator import *
class AdsController(RedditController):
- @validate(VAdmin())
+ @validate(VSponsorAdmin())
def GET_index(self):
res = AdminPage(content = AdminAds(),
show_sidebar = False,
title = 'ads').render()
return res
- @validate(VAdmin(),
+ @validate(VSponsorAdmin(),
ad = VAdByCodename('adcn'))
def GET_assign(self, ad):
if ad is None:
@@ -44,7 +44,7 @@ class AdsController(RedditController):
title='assign an ad to a community').render()
return res
- @validate(VAdmin(),
+ @validate(VSponsorAdmin(),
ad = VAdByCodename('adcn'))
def GET_srs(self, ad):
if ad is None:
diff --git a/r2/r2/controllers/api.py b/r2/r2/controllers/api.py
index 9816c6390..5b00a6f5b 100644
--- a/r2/r2/controllers/api.py
+++ b/r2/r2/controllers/api.py
@@ -35,7 +35,7 @@ from r2.lib.utils import timeago, tup, filter_links
from r2.lib.pages import FriendList, ContributorList, ModList, \
BannedList, BoringPage, FormPage, CssError, UploadedImage, \
ClickGadget
-from r2.lib.utils.trial_utils import indict, on_trial
+from r2.lib.utils.trial_utils import indict, end_trial, trial_info
from r2.lib.pages.things import wrap_links, default_thing_wrapper
from r2.lib import spreadshirt
@@ -52,6 +52,7 @@ from r2.lib.comment_tree import add_comment, delete_comment
from r2.lib import tracking, cssfilter, emailer
from r2.lib.subreddit_search import search_reddits
from r2.lib.log import log_text
+from r2.lib.filters import safemarkdown
from datetime import datetime, timedelta
from md5 import md5
@@ -164,11 +165,12 @@ class ApiController(RedditController):
default='comments'))
def POST_submit(self, form, jquery, url, banmsg, selftext, kind, title,
save, sr, ip, then):
- #backwards compatability
- if url == 'self':
- kind = 'self'
if isinstance(url, (unicode, str)):
+ #backwards compatability
+ if url.lower() == 'self':
+ url = kind = 'self'
+
# VUrl may have replaced 'url' by adding 'http://'
form.set_inputs(url = url)
@@ -217,6 +219,29 @@ class ApiController(RedditController):
if form.has_error() or not title:
return
+ if should_ratelimit:
+ filled_quota = c.user.quota_full('link')
+ if filled_quota is not None and not c.user._spam:
+ log_text ("over-quota",
+ "%s just went over their per-%s quota" %
+ (c.user.name, filled_quota), "info")
+
+ compose_link = ("/message/compose?to=%23" + sr.name +
+ "&subject=Exemption+request")
+
+ verify_link = "/verify?reason=submit"
+
+ if c.user.email_verified:
+ msg = strings.verified_quota_msg % dict(link=compose_link)
+ else:
+ msg = strings.unverified_quota_msg % dict(link1=verify_link,
+ link2=compose_link)
+
+ md = safemarkdown(msg)
+ form.set_html(".status", md)
+ return
+
+
# well, nothing left to do but submit it
l = Link._submit(request.post.title, url if kind == 'link' else 'self',
c.user, sr, ip)
@@ -240,6 +265,7 @@ class ApiController(RedditController):
#set the ratelimiter
if should_ratelimit:
+ c.user.clog_quota('link', l)
VRatelimit.ratelimit(rate_user=True, rate_ip = True,
prefix = "rate_submit_")
@@ -363,6 +389,7 @@ class ApiController(RedditController):
"""
if container and container.is_moderator(c.user):
container.remove_moderator(c.user)
+ Subreddit.special_reddits(c.user, "moderator", _update=True)
@noresponse(VUser(),
VModhash(),
@@ -373,8 +400,8 @@ class ApiController(RedditController):
"""
if container and container.is_contributor(c.user):
container.remove_contributor(c.user)
+ Subreddit.special_reddits(c.user, "contributor", _update=True)
-
@noresponse(VUser(),
VModhash(),
nuser = VExistingUname('name'),
@@ -394,7 +421,7 @@ class ApiController(RedditController):
# The user who made the request must be an admin or a moderator
# for the privilege change to succeed.
if (not c.user_is_admin
- and (type in ('moderator','contributer','banned')
+ and (type in ('moderator','contributor','banned')
and not c.site.is_moderator(c.user))):
abort(403, 'forbidden')
# if we are (strictly) unfriending, the container had better
@@ -404,6 +431,9 @@ class ApiController(RedditController):
fn = getattr(container, 'remove_' + type)
fn(iuser or nuser)
+ if type in ("moderator", "contributor"):
+ Subreddit.special_reddits(iuser or nuser, type, _update=True)
+
@validatedForm(VUser(),
@@ -424,7 +454,7 @@ class ApiController(RedditController):
# The user who made the request must be an admin or a moderator
# for the privilege change to succeed.
if (not c.user_is_admin
- and (type in ('moderator','contributer', 'banned')
+ and (type in ('moderator','contributor', 'banned')
and not c.site.is_moderator(c.user))):
abort(403,'forbidden')
@@ -433,34 +463,39 @@ class ApiController(RedditController):
if type == "friend" and container != c.user:
abort(403,'forbidden')
- elif not form.has_errors("name",
- errors.USER_DOESNT_EXIST, errors.NO_USER):
- new = fn(friend)
- cls = dict(friend=FriendList,
- moderator=ModList,
- contributor=ContributorList,
- banned=BannedList).get(type)
- form.set_inputs(name = "")
- form.set_html(".status:first", _("added"))
- if new and cls:
- user_row = cls().user_row(friend)
- jquery("#" + type + "-table").show(
- ).find("table").insert_table_rows(user_row)
+ elif form.has_errors("name", errors.USER_DOESNT_EXIST, errors.NO_USER):
+ return
- if type != 'friend':
- msg = strings.msg_add_friend.get(type)
- subj = strings.subj_add_friend.get(type)
- if msg and subj and friend.name != c.user.name:
- # fullpath with domain needed or the markdown link
- # will break
- d = dict(url = container.path,
- title = container.title)
- msg = msg % d
- subj = subj % d
- item, inbox_rel = Message._new(c.user, friend,
- subj, msg, ip)
+ new = fn(friend)
- queries.new_message(item, inbox_rel)
+ if type in ("moderator", "contributor"):
+ Subreddit.special_reddits(friend, type, _update=True)
+
+ cls = dict(friend=FriendList,
+ moderator=ModList,
+ contributor=ContributorList,
+ banned=BannedList).get(type)
+ form.set_inputs(name = "")
+ form.set_html(".status:first", _("added"))
+ if new and cls:
+ user_row = cls().user_row(friend)
+ jquery("#" + type + "-table").show(
+ ).find("table").insert_table_rows(user_row)
+
+ if type != 'friend':
+ msg = strings.msg_add_friend.get(type)
+ subj = strings.subj_add_friend.get(type)
+ if msg and subj and friend.name != c.user.name:
+ # fullpath with domain needed or the markdown link
+ # will break
+ d = dict(url = container.path,
+ title = container.title)
+ msg = msg % d
+ subj = subj % d
+ item, inbox_rel = Message._new(c.user, friend,
+ subj, msg, ip)
+
+ queries.new_message(item, inbox_rel)
@validatedForm(VUser('curpass', default = ''),
@@ -536,8 +571,9 @@ class ApiController(RedditController):
if not thing: return
'''for deleting all sorts of things'''
thing._deleted = True
- if getattr(thing, "promoted", None) is not None:
- promote.delete_promo(thing)
+ if (getattr(thing, "promoted", None) is not None and
+ not promote.is_promoted(thing)):
+ promote.reject_promotion(thing)
thing._commit()
# flag search indexer that something has changed
@@ -784,7 +820,7 @@ class ApiController(RedditController):
j = Jury.by_account_and_defendant(c.user, thing)
- if not on_trial([thing]).get(thing._fullname,False):
+ if not trial_info([thing]).get(thing._fullname,False):
log_text("juryvote: not on trial", level="warning")
return
@@ -793,7 +829,7 @@ class ApiController(RedditController):
return
log_text("juryvote",
- "%s cast a %d juryvote on %r" % (c.user.name, dir, thing),
+ "%s cast a %d juryvote on %s" % (c.user.name, dir, thing._id36),
level="info")
j._name = str(dir)
@@ -1143,23 +1179,20 @@ class ApiController(RedditController):
jquery.refresh()
@noresponse(VUser(), VModhash(),
- VSrCanBan('id'),
+ why = VSrCanBan('id'),
thing = VByName('id'))
- def POST_ban(self, thing):
+ def POST_remove(self, why, thing):
+ end_trial(thing, why + "-removed")
admintools.spam(thing, False, not c.user_is_admin, c.user.name)
@noresponse(VUser(), VModhash(),
- VSrCanBan('id'),
+ why = VSrCanBan('id'),
thing = VByName('id'))
- def POST_unban(self, thing):
- admintools.unspam(thing, c.user.name)
-
- @noresponse(VUser(), VModhash(),
- VSrCanBan('id'),
- thing = VByName('id'))
- def POST_ignore(self, thing):
+ def POST_approve(self, why, thing):
if not thing: return
- Report.accept(thing, False)
+
+ end_trial(thing, why + "-approved")
+ admintools.unspam(thing, c.user.name)
@validatedForm(VUser(), VModhash(),
VSrCanDistinguish('id'),
@@ -1714,12 +1747,15 @@ class ApiController(RedditController):
sponsorships = VByName('ids', thing_cls = Subreddit,
multiple = True))
def POST_onload(self, form, jquery, promoted, sponsorships, *a, **kw):
+ suffix = ""
+ if not isinstance(c.site, FakeSubreddit):
+ suffix = "-" + c.site.name
def add_tracker(dest, where, what):
jquery.set_tracker(
where,
- tracking.PromotedLinkInfo.gen_url(fullname=what,
+ tracking.PromotedLinkInfo.gen_url(fullname=what + suffix,
ip = request.ip),
- tracking.PromotedLinkClickInfo.gen_url(fullname = what,
+ tracking.PromotedLinkClickInfo.gen_url(fullname =what + suffix,
dest = dest,
ip = request.ip)
)
diff --git a/r2/r2/controllers/buttons.py b/r2/r2/controllers/buttons.py
index 98ed43547..10304a258 100644
--- a/r2/r2/controllers/buttons.py
+++ b/r2/r2/controllers/buttons.py
@@ -21,7 +21,7 @@
################################################################################
from reddit_base import RedditController, MinimalController, make_key
from r2.lib.pages import Button, ButtonNoBody, ButtonEmbed, ButtonLite, \
- ButtonDemoPanel, WidgetDemoPanel, Bookmarklets, BoringPage
+ ButtonDemoPanel, WidgetDemoPanel, Bookmarklets, BoringPage, UpgradeButtons
from r2.lib.pages.things import wrap_links
from r2.models import *
from r2.lib.utils import tup, query_string
@@ -124,40 +124,16 @@ class ButtonsController(RedditController):
return wrapper(None)
- @validate(url = VSanitizedUrl('url'),
- title = nop('title'),
- css = nop('css'),
- vote = VBoolean('vote', default=True),
- newwindow = VBoolean('newwindow'),
- width = VInt('width', 0, 800),
- l = VByName('id'))
- def GET_button_content(self, url, title, css, vote, newwindow, width, l):
- # no buttons on domain listings
- if isinstance(c.site, DomainSR):
- c.site = Default
- return self.redirect(request.path + query_string(request.GET))
-
- #disable css hack
- if (css != 'http://blog.wired.com/css/redditsocial.css' and
- css != 'http://www.wired.com/css/redditsocial.css'):
- css = None
-
- if l:
- url = l.url
- title = l.title
- kw = {}
- if title:
- kw = dict(title = title)
- wrapper = make_wrapper(Button if vote else ButtonNoBody,
- url = url,
- target = "_new" if newwindow else "_parent",
- vote = vote, bgcolor = c.bgcolor,
- width = width, css = css,
- button = self.buttontype(), **kw)
-
- l = self.get_wrapped_link(url, l, wrapper)
- return l.render()
-
+ def GET_button_content(self, *a, **kw):
+ return """
+
+
+
+
+ upgrade
+
+
+ """
@validate(buttontype = VInt('t', 1, 5),
url = VSanitizedUrl("url"),
@@ -226,6 +202,14 @@ class ButtonsController(RedditController):
show_sidebar = False,
content=ButtonDemoPanel()).render()
+ def GET_upgrade_buttons(self):
+ # no buttons for domain listings -> redirect to top level
+ if isinstance(c.site, DomainSR):
+ return self.redirect('/buttons')
+ return BoringPage(_("reddit buttons"),
+ show_sidebar = False,
+ content=UpgradeButtons()).render()
+
def GET_widget_demo_page(self):
return BoringPage(_("reddit widget"),
diff --git a/r2/r2/controllers/errors.py b/r2/r2/controllers/errors.py
index 28bcacf10..1e4ba292e 100644
--- a/r2/r2/controllers/errors.py
+++ b/r2/r2/controllers/errors.py
@@ -67,6 +67,7 @@ error_list = dict((
('BAD_EMAILS', _('the following emails are invalid: %(emails)s')),
('NO_EMAILS', _('please enter at least one email address')),
('TOO_MANY_EMAILS', _('please only share to %(num)s emails at a time.')),
+ ('OVERSOLD', _('that reddit has already been oversold on %(start)s to %(end)s. Please pick another reddit or date.')),
('BAD_DATE', _('please provide a date of the form mm/dd/yyyy')),
('BAD_DATE_RANGE', _('the dates need to be in order and not identical')),
('BAD_FUTURE_DATE', _('please enter a date at least %(day)s days in the future')),
diff --git a/r2/r2/controllers/front.py b/r2/r2/controllers/front.py
index ef8fac2ba..b5cdb5f3a 100644
--- a/r2/r2/controllers/front.py
+++ b/r2/r2/controllers/front.py
@@ -51,6 +51,8 @@ from urllib import quote_plus
class FrontController(RedditController):
+ allow_stylesheets = True
+
@validate(article = VLink('article'),
comment = VCommentID('comment'))
def GET_oldinfo(self, article, type, dest, rest=None, comment=''):
@@ -102,71 +104,6 @@ class FrontController(RedditController):
else:
return self.redirect(add_sr('/'))
- def GET_password(self):
- """The 'what is my password' page"""
- return BoringPage(_("password"), content=Password()).render()
-
- @validate(VUser(),
- dest = VDestination())
- def GET_verify(self, dest):
- if c.user.email_verified:
- content = InfoBar(message = strings.email_verified)
- if dest:
- return self.redirect(dest)
- else:
- content = PaneStack(
- [InfoBar(message = strings.verify_email),
- PrefUpdate(email = True, verify = True,
- password = False)])
- return BoringPage(_("verify email"), content = content).render()
-
- @validate(VUser(),
- cache_evt = VCacheKey('email_verify', ('key',)),
- key = nop('key'),
- dest = VDestination(default = "/prefs/update"))
- def GET_verify_email(self, cache_evt, key, dest):
- if c.user_is_loggedin and c.user.email_verified:
- cache_evt.clear()
- return self.redirect(dest)
- elif not (cache_evt.user and
- key == passhash(cache_evt.user.name, cache_evt.user.email)):
- content = PaneStack(
- [InfoBar(message = strings.email_verify_failed),
- PrefUpdate(email = True, verify = True,
- password = False)])
- return BoringPage(_("verify email"), content = content).render()
- elif c.user != cache_evt.user:
- # wrong user. Log them out and try again.
- self.logout()
- return self.redirect(request.fullpath)
- else:
- cache_evt.clear()
- c.user.email_verified = True
- c.user._commit()
- Award.give_if_needed("verified_email", c.user)
- return self.redirect(dest)
-
- @validate(cache_evt = VCacheKey('reset', ('key',)),
- key = nop('key'))
- def GET_resetpassword(self, cache_evt, key):
- """page hit once a user has been sent a password reset email
- to verify their identity before allowing them to update their
- password."""
-
- #if another user is logged-in, log them out
- if c.user_is_loggedin:
- self.logout()
- return self.redirect(request.path)
-
- done = False
- if not key and request.referer:
- referer_path = request.referer.split(g.domain)[-1]
- done = referer_path.startswith(request.fullpath)
- elif not getattr(cache_evt, "user", None):
- return self.abort404()
- return BoringPage(_("reset password"),
- content=ResetPassword(key=key, done=done)).render()
-
@validate(VAdmin(),
article = VLink('article'))
def GET_details(self, article):
@@ -176,6 +113,13 @@ class FrontController(RedditController):
return DetailsPage(link = article, expand_children=False).render()
+ def GET_selfserviceoatmeal(self
+):
+ return BoringPage(_("self service help"),
+ show_sidebar = False,
+ content = SelfServiceOatmeal()).render()
+
+
@validate(article = VLink('article'))
def GET_shirt(self, article):
if not can_view_link_comments(article):
@@ -275,78 +219,6 @@ class FrontController(RedditController):
infotext = infotext).render()
return res
- @validate(VUser())
- def GET_juryduty(self):
- displayPane = PaneStack()
-
- active_trials = {}
- finished_trials = {}
-
- juries = Jury.by_account(c.user)
-
- trials = on_trial([j._thing2 for j in juries])
-
- for j in juries:
- defendant = j._thing2
-
- if trials.get(defendant._fullname, False):
- active_trials[defendant._fullname] = j._name
- else:
- finished_trials[defendant._fullname] = j._name
-
- if active_trials:
- fullnames = sorted(active_trials.keys(), reverse=True)
-
- def my_wrap(thing):
- w = Wrapped(thing)
- w.hide_score = True
- w.likes = None
- w.trial_mode = True
- w.render_class = LinkOnTrial
- w.juryvote = active_trials[thing._fullname]
- return w
-
- listing = wrap_links(fullnames, wrapper=my_wrap)
- displayPane.append(InfoBar(strings.active_trials,
- extra_class="mellow"))
- displayPane.append(listing)
-
- if finished_trials:
- fullnames = sorted(finished_trials.keys(), reverse=True)
- listing = wrap_links(fullnames)
- displayPane.append(InfoBar(strings.finished_trials,
- extra_class="mellow"))
- displayPane.append(listing)
-
- displayPane.append(InfoBar(strings.more_info_link %
- dict(link="/help/juryduty"),
- extra_class="mellow"))
-
- return Reddit(content = displayPane).render()
-
- @validate(VUser(),
- location = nop("location"))
- def GET_prefs(self, location=''):
- """Preference page"""
- content = None
- infotext = None
- if not location or location == 'options':
- content = PrefOptions(done=request.get.get('done'))
- elif location == 'friends':
- content = PaneStack()
- infotext = strings.friends % Friends.path
- content.append(FriendList())
- elif location == 'update':
- content = PrefUpdate()
- elif location == 'feeds' and c.user.pref_private_feeds:
- content = PrefFeeds()
- elif location == 'delete':
- content = PrefDelete()
- else:
- return self.abort404()
-
- return PrefsPage(content = content, infotext=infotext).render()
-
@validate(VUser(),
name = nop('name'))
def GET_newreddit(self, name):
@@ -360,22 +232,98 @@ class FrontController(RedditController):
def GET_stylesheet(self):
if hasattr(c.site,'stylesheet_contents') and not g.css_killswitch:
- self.check_modified(c.site,'stylesheet_contents')
+ c.allow_loggedin_cache = True
+ self.check_modified(c.site,'stylesheet_contents',
+ private=False, max_age=7*24*60*60,
+ must_revalidate=False)
c.response_content_type = 'text/css'
c.response.content = c.site.stylesheet_contents
return c.response
else:
return self.abort404()
- @base_listing
- @validate(location = nop('location'),
- created = VOneOf('created', ('true','false'),
- default = 'false'))
- def GET_editreddit(self, location, num, after, reverse, count, created):
- """Edit reddit form."""
- if isinstance(c.site, FakeSubreddit):
+ def _make_spamlisting(self, location, num, after, reverse, count):
+ if location == 'reports':
+ query = c.site.get_reported()
+ elif location == 'spam':
+ query = c.site.get_spam()
+ elif location == 'trials':
+ query = c.site.get_trials()
+ elif location == 'modqueue':
+ query = c.site.get_modqueue()
+ else:
+ raise ValueError
+
+ if isinstance(query, thing.Query):
+ builder_cls = QueryBuilder
+ elif isinstance (query, list):
+ builder_cls = QueryBuilder
+ else:
+ builder_cls = IDBuilder
+
+ def keep_fn(x):
+ # no need to bother mods with banned users, or deleted content
+ if x.hidden or x._deleted:
+ return False
+
+ if location == "reports":
+ return x.reported > 0 and not x._spam
+ elif location == "spam":
+ return x._spam
+ elif location == "trials":
+ return not getattr(x, "verdict", None)
+ elif location == "modqueue":
+ if x.reported > 0 and not x._spam:
+ return True # reported but not banned
+ verdict = getattr(x, "verdict", None)
+ if verdict is None:
+ return True # anything without a verdict (i.e., trials)
+ if x._spam and verdict != 'mod-removed':
+ return True # spam, unless banned by a moderator
+ return False
+ else:
+ raise ValueError
+
+ builder = builder_cls(query,
+ skip = True,
+ num = num, after = after,
+ keep_fn = keep_fn,
+ count = count, reverse = reverse,
+ wrap = ListingController.builder_wrapper)
+ listing = LinkListing(builder)
+ pane = listing.listing()
+
+ return pane
+
+ def _edit_modcontrib_reddit(self, location, num, after, reverse, count, created):
+ extension_handling = False
+
+ if not c.user_is_loggedin:
+ return self.abort404()
+ if isinstance(c.site, ModSR):
+ level = 'mod'
+ elif isinstance(c.site, ContribSR):
+ level = 'contrib'
+ elif isinstance(c.site, AllSR):
+ level = 'all'
+ else:
+ raise ValueError
+
+ if ((level == 'mod' and
+ location in ('reports', 'spam', 'trials', 'modqueue'))
+ or
+ (level == 'all' and
+ location == 'trials')):
+ pane = self._make_spamlisting(location, num, after, reverse, count)
+ if c.user.pref_private_feeds:
+ extension_handling = "private"
+ else:
return self.abort404()
+ return EditReddit(content = pane,
+ extension_handling = extension_handling).render()
+
+ def _edit_normal_reddit(self, location, num, after, reverse, count, created):
# moderator is either reddit's moderator or an admin
is_moderator = c.user_is_loggedin and c.site.is_moderator(c.user) or c.user_is_admin
extension_handling = False
@@ -404,29 +352,8 @@ class FrontController(RedditController):
stylesheet_contents = ''
pane = SubredditStylesheet(site = c.site,
stylesheet_contents = stylesheet_contents)
- elif location in ('reports', 'spam') and is_moderator:
- query = (c.site.get_reported() if location == 'reports'
- else c.site.get_spam())
- builder_cls = (QueryBuilder if isinstance(query, thing.Query)
- else IDBuilder)
- def keep_fn(x):
- # no need to bother mods with banned users, or deleted content
- if x.hidden or x._deleted:
- return False
- if location == "reports" and not x._spam:
- return (x.reported > 0)
- if location == "spam":
- return x._spam
- return True
-
- builder = builder_cls(query,
- skip = True,
- num = num, after = after,
- keep_fn = keep_fn,
- count = count, reverse = reverse,
- wrap = ListingController.builder_wrapper)
- listing = LinkListing(builder)
- pane = listing.listing()
+ elif location in ('reports', 'spam', 'trials', 'modqueue') and is_moderator:
+ pane = self._make_spamlisting(location, num, after, reverse, count)
if c.user.pref_private_feeds:
extension_handling = "private"
elif is_moderator and location == 'traffic':
@@ -439,6 +366,25 @@ class FrontController(RedditController):
return EditReddit(content = pane,
extension_handling = extension_handling).render()
+ @base_listing
+ @validate(location = nop('location'),
+ created = VOneOf('created', ('true','false'),
+ default = 'false'))
+ def GET_editreddit(self, location, num, after, reverse, count, created):
+ """Edit reddit form."""
+ if isinstance(c.site, ModContribSR):
+ return self._edit_modcontrib_reddit(location, num, after, reverse,
+ count, created)
+ elif isinstance(c.site, AllSR) and c.user_is_admin:
+ return self._edit_modcontrib_reddit(location, num, after, reverse,
+ count, created)
+ elif isinstance(c.site, FakeSubreddit):
+ return self.abort404()
+ else:
+ return self._edit_normal_reddit(location, num, after, reverse,
+ count, created)
+
+
def GET_awards(self):
"""The awards page."""
return BoringPage(_("awards"), content = UserAwards()).render()
@@ -605,63 +551,6 @@ class FrontController(RedditController):
return builder.total_num, timing, res
- @validate(dest = VDestination())
- def GET_login(self, dest):
- """The /login form. No link to this page exists any more on
- the site (all actions invoking it now go through the login
- cover). However, this page is still used for logging the user
- in during submission or voting from the bookmarklets."""
-
- if (c.user_is_loggedin and
- not request.environ.get('extension') == 'embed'):
- return self.redirect(dest)
- return LoginPage(dest = dest).render()
-
- @validate(VUser(),
- VModhash(),
- dest = VDestination())
- def GET_logout(self, dest):
- return self.redirect(dest)
-
- @validate(VUser(),
- VModhash(),
- dest = VDestination())
- def POST_logout(self, dest):
- """wipe login cookie and redirect to referer."""
- self.logout()
- return self.redirect(dest)
-
-
- @validate(VUser(),
- dest = VDestination())
- def GET_adminon(self, dest):
- """Enable admin interaction with site"""
- #check like this because c.user_is_admin is still false
- if not c.user.name in g.admins:
- return self.abort404()
- self.login(c.user, admin = True)
- return self.redirect(dest)
-
- @validate(VAdmin(),
- dest = VDestination())
- def GET_adminoff(self, dest):
- """disable admin interaction with site."""
- if not c.user.name in g.admins:
- return self.abort404()
- self.login(c.user, admin = False)
- return self.redirect(dest)
-
- def GET_validuser(self):
- """checks login cookie to verify that a user is logged in and
- returns their user name"""
- c.response_content_type = 'text/plain'
- if c.user_is_loggedin:
- perm = str(c.user.can_wiki())
- c.response.content = c.user.name + "," + perm
- else:
- c.response.content = ''
- return c.response
-
@validate(VAdmin(),
comment = VCommentByID('comment_id'))
def GET_comment_by_id(self, comment):
@@ -711,22 +600,6 @@ class FrontController(RedditController):
sent = sent,
msg_hash = msg_hash)).render()
- @validate(msg_hash = nop('x'))
- def GET_optout(self, msg_hash):
- """handles /mail/optout to add an email to the optout mailing
- list. The actual email addition comes from the user posting
- the subsequently rendered form and is handled in
- ApiController.POST_optout."""
- return self._render_opt_in_out(msg_hash, True)
-
- @validate(msg_hash = nop('x'))
- def GET_optin(self, msg_hash):
- """handles /mail/optin to remove an email address from the
- optout list. The actual email removal comes from the user
- posting the subsequently rendered form and is handled in
- ApiController.POST_optin."""
- return self._render_opt_in_out(msg_hash, False)
-
def GET_frame(self):
"""used for cname support. makes a frame and
puts the proper url as the frame source"""
@@ -801,3 +674,223 @@ class FrontController(RedditController):
def GET_site_traffic(self):
return BoringPage("traffic",
content = RedditTraffic()).render()
+
+class FormsController(RedditController):
+
+ def GET_password(self):
+ """The 'what is my password' page"""
+ return BoringPage(_("password"), content=Password()).render()
+
+ @validate(VUser(),
+ dest = VDestination(),
+ reason = nop('reason'))
+ def GET_verify(self, dest, reason):
+ if c.user.email_verified:
+ content = InfoBar(message = strings.email_verified)
+ if dest:
+ return self.redirect(dest)
+ else:
+ if reason == "submit":
+ infomsg = strings.verify_email_submit
+ else:
+ infomsg = strings.verify_email
+
+ content = PaneStack(
+ [InfoBar(message = infomsg),
+ PrefUpdate(email = True, verify = True,
+ password = False)])
+ return BoringPage(_("verify email"), content = content).render()
+
+ @validate(VUser(),
+ cache_evt = VCacheKey('email_verify', ('key',)),
+ key = nop('key'),
+ dest = VDestination(default = "/prefs/update"))
+ def GET_verify_email(self, cache_evt, key, dest):
+ if c.user_is_loggedin and c.user.email_verified:
+ cache_evt.clear()
+ return self.redirect(dest)
+ elif not (cache_evt.user and
+ key == passhash(cache_evt.user.name, cache_evt.user.email)):
+ content = PaneStack(
+ [InfoBar(message = strings.email_verify_failed),
+ PrefUpdate(email = True, verify = True,
+ password = False)])
+ return BoringPage(_("verify email"), content = content).render()
+ elif c.user != cache_evt.user:
+ # wrong user. Log them out and try again.
+ self.logout()
+ return self.redirect(request.fullpath)
+ else:
+ cache_evt.clear()
+ c.user.email_verified = True
+ c.user._commit()
+ Award.give_if_needed("verified_email", c.user)
+ return self.redirect(dest)
+
+ @validate(cache_evt = VCacheKey('reset', ('key',)),
+ key = nop('key'))
+ def GET_resetpassword(self, cache_evt, key):
+ """page hit once a user has been sent a password reset email
+ to verify their identity before allowing them to update their
+ password."""
+
+ #if another user is logged-in, log them out
+ if c.user_is_loggedin:
+ self.logout()
+ return self.redirect(request.path)
+
+ done = False
+ if not key and request.referer:
+ referer_path = request.referer.split(g.domain)[-1]
+ done = referer_path.startswith(request.fullpath)
+ elif not getattr(cache_evt, "user", None):
+ return self.abort404()
+ return BoringPage(_("reset password"),
+ content=ResetPassword(key=key, done=done)).render()
+
+ @validate(VUser())
+ def GET_depmod(self):
+ displayPane = PaneStack()
+
+ active_trials = {}
+ finished_trials = {}
+
+ juries = Jury.by_account(c.user)
+
+ trials = trial_info([j._thing2 for j in juries])
+
+ for j in juries:
+ defendant = j._thing2
+
+ if trials.get(defendant._fullname, False):
+ active_trials[defendant._fullname] = j._name
+ else:
+ finished_trials[defendant._fullname] = j._name
+
+ if active_trials:
+ fullnames = sorted(active_trials.keys(), reverse=True)
+
+ def my_wrap(thing):
+ w = Wrapped(thing)
+ w.hide_score = True
+ w.likes = None
+ w.trial_mode = True
+ w.render_class = LinkOnTrial
+ w.juryvote = active_trials[thing._fullname]
+ return w
+
+ listing = wrap_links(fullnames, wrapper=my_wrap)
+ displayPane.append(InfoBar(strings.active_trials,
+ extra_class="mellow"))
+ displayPane.append(listing)
+
+ if finished_trials:
+ fullnames = sorted(finished_trials.keys(), reverse=True)
+ listing = wrap_links(fullnames)
+ displayPane.append(InfoBar(strings.finished_trials,
+ extra_class="mellow"))
+ displayPane.append(listing)
+
+ displayPane.append(InfoBar(strings.more_info_link %
+ dict(link="/help/deputies"),
+ extra_class="mellow"))
+
+ return Reddit(content = displayPane).render()
+
+ @validate(VUser(),
+ location = nop("location"))
+ def GET_prefs(self, location=''):
+ """Preference page"""
+ content = None
+ infotext = None
+ if not location or location == 'options':
+ content = PrefOptions(done=request.get.get('done'))
+ elif location == 'friends':
+ content = PaneStack()
+ infotext = strings.friends % Friends.path
+ content.append(FriendList())
+ elif location == 'update':
+ content = PrefUpdate()
+ elif location == 'feeds' and c.user.pref_private_feeds:
+ content = PrefFeeds()
+ elif location == 'delete':
+ content = PrefDelete()
+ else:
+ return self.abort404()
+
+ return PrefsPage(content = content, infotext=infotext).render()
+
+
+ @validate(dest = VDestination())
+ def GET_login(self, dest):
+ """The /login form. No link to this page exists any more on
+ the site (all actions invoking it now go through the login
+ cover). However, this page is still used for logging the user
+ in during submission or voting from the bookmarklets."""
+
+ if (c.user_is_loggedin and
+ not request.environ.get('extension') == 'embed'):
+ return self.redirect(dest)
+ return LoginPage(dest = dest).render()
+
+ @validate(VUser(),
+ VModhash(),
+ dest = VDestination())
+ def GET_logout(self, dest):
+ return self.redirect(dest)
+
+ @validate(VUser(),
+ VModhash(),
+ dest = VDestination())
+ def POST_logout(self, dest):
+ """wipe login cookie and redirect to referer."""
+ self.logout()
+ return self.redirect(dest)
+
+
+ @validate(VUser(),
+ dest = VDestination())
+ def GET_adminon(self, dest):
+ """Enable admin interaction with site"""
+ #check like this because c.user_is_admin is still false
+ if not c.user.name in g.admins:
+ return self.abort404()
+ self.login(c.user, admin = True)
+ return self.redirect(dest)
+
+ @validate(VAdmin(),
+ dest = VDestination())
+ def GET_adminoff(self, dest):
+ """disable admin interaction with site."""
+ if not c.user.name in g.admins:
+ return self.abort404()
+ self.login(c.user, admin = False)
+ return self.redirect(dest)
+
+ def GET_validuser(self):
+ """checks login cookie to verify that a user is logged in and
+ returns their user name"""
+ c.response_content_type = 'text/plain'
+ if c.user_is_loggedin:
+ perm = str(c.user.can_wiki())
+ c.response.content = c.user.name + "," + perm
+ else:
+ c.response.content = ''
+ return c.response
+
+ @validate(msg_hash = nop('x'))
+ def GET_optout(self, msg_hash):
+ """handles /mail/optout to add an email to the optout mailing
+ list. The actual email addition comes from the user posting
+ the subsequently rendered form and is handled in
+ ApiController.POST_optout."""
+ return self._render_opt_in_out(msg_hash, True)
+
+ @validate(msg_hash = nop('x'))
+ def GET_optin(self, msg_hash):
+ """handles /mail/optin to remove an email address from the
+ optout list. The actual email removal comes from the user
+ posting the subsequently rendered form and is handled in
+ ApiController.POST_optin."""
+ return self._render_opt_in_out(msg_hash, False)
+
diff --git a/r2/r2/controllers/health.py b/r2/r2/controllers/health.py
index 0668245c0..ac9e2f007 100644
--- a/r2/r2/controllers/health.py
+++ b/r2/r2/controllers/health.py
@@ -5,12 +5,12 @@ import time
from pylons.controllers.util import abort
from pylons import c, g
-from reddit_base import RedditController
+from reddit_base import MinimalController
from r2.lib.amqp import worker
from validator import *
-class HealthController(RedditController):
+class HealthController(MinimalController):
def shutdown(self):
thread_pool = c.thread_pool
def _shutdown():
diff --git a/r2/r2/controllers/listingcontroller.py b/r2/r2/controllers/listingcontroller.py
index 5c466cbc4..8c31c6ecc 100644
--- a/r2/r2/controllers/listingcontroller.py
+++ b/r2/r2/controllers/listingcontroller.py
@@ -19,7 +19,7 @@
# All portions of the code written by CondeNet are Copyright (c) 2006-2010
# CondeNet, Inc. All Rights Reserved.
################################################################################
-from reddit_base import RedditController, base_listing
+from reddit_base import RedditController, base_listing, organic_pos
from validator import *
from r2.models import *
@@ -30,7 +30,6 @@ from r2.lib.menus import ControversyTimeMenu
from r2.lib.rising import get_rising
from r2.lib.wrapped import Wrapped
from r2.lib.normalized_hot import normalized_hot, get_hot
-from r2.lib.recommendation import get_recommended
from r2.lib.db.thing import Query, Merge, Relations
from r2.lib.db import queries
from r2.lib.strings import Score
@@ -40,7 +39,7 @@ from r2.lib.solrsearch import SearchQuery
from r2.lib.utils import iters, check_cheating, timeago
from r2.lib.utils.trial_utils import populate_spotlight
from r2.lib import sup
-from r2.lib.promote import PromoteSR
+from r2.lib.promote import randomized_promotion_list, get_promote_srid
from r2.lib.contrib.pysolr import SolrError
from admin import admin_profile_query
@@ -56,6 +55,9 @@ class ListingController(RedditController):
# toggle skipping of links based on the users' save/hide/vote preferences
skip = True
+ # allow stylesheets on listings
+ allow_stylesheets = True
+
# toggles showing numbers
show_nums = True
@@ -149,7 +151,8 @@ class ListingController(RedditController):
def listing(self):
"""Listing to generate from the builder"""
- if c.site.path == PromoteSR.path and not c.user_is_sponsor:
+ if (getattr(c.site, "_id", -1) == get_promote_srid() and
+ not c.user_is_sponsor):
abort(403, 'forbidden')
listing = LinkListing(self.builder_obj, show_nums = self.show_nums)
try:
@@ -187,6 +190,7 @@ class ListingController(RedditController):
builder_wrapper = staticmethod(default_thing_wrapper())
def GET_listing(self, **env):
+ check_cheating('site')
return self.build_listing(**env)
class FixListing(object):
@@ -219,55 +223,73 @@ class HotController(FixListing, ListingController):
where = 'hot'
def spotlight(self):
- spotlight_links, pos = organic.organic_links(c.user)
+ if (c.site == Default
+ and (not c.user_is_loggedin
+ or (c.user_is_loggedin and c.user.pref_organic))):
- trial = populate_spotlight()
+ spotlight_links = organic.organic_links(c.user)
+ pos = organic_pos()
- if trial:
- spotlight_links.insert(pos, trial._fullname)
+ if not spotlight_links:
+ pos = 0
+ elif pos != 0:
+ pos = pos % len(spotlight_links)
- if not spotlight_links:
- return None
+ spotlight_links, pos = promote.insert_promoted(spotlight_links, pos)
+ trial = populate_spotlight()
- # get links in proximity to pos
- num_tl = len(spotlight_links)
- if num_tl <= 3:
- disp_links = spotlight_links
- else:
- left_side = max(-1, min(num_tl - 3, 8))
- disp_links = [spotlight_links[(i + pos) % num_tl]
- for i in xrange(-2, left_side)]
+ if trial:
+ spotlight_links.insert(pos, trial._fullname)
- def keep_fn(item):
- if trial and trial._fullname == item._fullname:
- return True
- elif item.likes is not None:
- return False
+ if not spotlight_links:
+ return None
+
+ # get links in proximity to pos
+ num_tl = len(spotlight_links)
+ if num_tl <= 3:
+ disp_links = spotlight_links
else:
- return item.keep_item(item)
+ left_side = max(-1, min(num_tl - 3, 8))
+ disp_links = [spotlight_links[(i + pos) % num_tl]
+ for i in xrange(-2, left_side)]
+ def keep_fn(item):
+ if trial and trial._fullname == item._fullname:
+ return True
+ return organic.keep_fresh_links(item)
- def wrap(item):
- if item is trial:
- w = Wrapped(item)
- w.trial_mode = True
- w.render_class = LinkOnTrial
- return w
- return self.builder_wrapper(item)
+ def wrap(item):
+ if item is trial:
+ w = Wrapped(item)
+ w.trial_mode = True
+ w.render_class = LinkOnTrial
+ return w
+ return self.builder_wrapper(item)
+ b = IDBuilder(disp_links, wrap = wrap,
+ num = organic.organic_length,
+ skip = True, keep_fn = keep_fn)
- b = IDBuilder(disp_links, wrap = wrap,
- skip = True, keep_fn = keep_fn)
+ s = SpotlightListing(b,
+ spotlight_links = spotlight_links,
+ visible_link = spotlight_links[pos],
+ max_num = self.listing_obj.max_num,
+ max_score = self.listing_obj.max_score).listing()
- s = SpotlightListing(b,
- spotlight_links = spotlight_links,
- visible_link = spotlight_links[pos],
- max_num = self.listing_obj.max_num,
- max_score = self.listing_obj.max_score).listing()
+ if len(s.things) > 0:
+ # only pass through a listing if the links made it
+ # through our builder
+ organic.update_pos(pos+1)
+ return s
+
+ # no organic box on a hot page, then show a random promoted link
+ elif c.site != Default:
+ link_ids = randomized_promotion_list(c.user, c.site)
+ if link_ids:
+ res = wrap_links(link_ids, wrapper = self.builder_wrapper,
+ num = 1, keep_fn = lambda x: x.fresh,
+ skip = True)
+ if res.things:
+ return res
- if len(s.things) > 0:
- # only pass through a listing if the links made it
- # through our builder
- organic.update_pos(pos+1)
- return s
def query(self):
@@ -289,9 +311,7 @@ class HotController(FixListing, ListingController):
def content(self):
# only send a spotlight listing for HTML rendering
- if (c.site == Default and c.render_style == "html"
- and (not c.user_is_loggedin
- or (c.user_is_loggedin and c.user.pref_organic))):
+ if c.render_style == "html":
spotlight = self.spotlight()
if spotlight:
return PaneStack([spotlight, self.listing_obj], css_class='spacer')
@@ -414,22 +434,22 @@ class ByIDController(ListingController):
return ListingController.GET_listing(self, **env)
-class RecommendedController(ListingController):
- where = 'recommended'
- title_text = _('recommended for you')
-
- @property
- def menus(self):
- return [RecSortMenu(default = self.sort)]
-
- def query(self):
- return get_recommended(c.user._id, sort = self.sort)
-
- @validate(VUser(),
- sort = VMenu("controller", RecSortMenu))
- def GET_listing(self, sort, **env):
- self.sort = sort
- return ListingController.GET_listing(self, **env)
+#class RecommendedController(ListingController):
+# where = 'recommended'
+# title_text = _('recommended for you')
+#
+# @property
+# def menus(self):
+# return [RecSortMenu(default = self.sort)]
+#
+# def query(self):
+# return get_recommended(c.user._id, sort = self.sort)
+#
+# @validate(VUser(),
+# sort = VMenu("controller", RecSortMenu))
+# def GET_listing(self, sort, **env):
+# self.sort = sort
+# return ListingController.GET_listing(self, **env)
class UserController(ListingController):
render_cls = ProfilePage
@@ -527,6 +547,7 @@ class MessageController(ListingController):
show_sidebar = False
show_nums = False
render_cls = MessagePage
+ allow_stylesheets = False
@property
def menus(self):
@@ -671,7 +692,7 @@ class MessageController(ListingController):
def GET_compose(self, to, subject, message, success):
captcha = Captcha() if c.user.needs_captcha() else None
content = MessageCompose(to = to, subject = subject,
- captcha = captcha,
+ captcha = captcha,
message = message,
success = success)
return MessagePage(content = content).render()
@@ -742,7 +763,7 @@ class MyredditsController(ListingController):
stack.append(InfoBar(message=message))
stack.append(self.listing_obj)
-
+
return stack
@validate(VUser())
@@ -754,5 +775,9 @@ class CommentsController(ListingController):
title_text = _('comments')
def query(self):
- return queries.get_all_comments()
+ return c.site.get_all_comments()
+
+ def GET_listing(self, **env):
+ c.profilepage = True
+ return ListingController.GET_listing(self, **env)
diff --git a/r2/r2/controllers/mediaembed.py b/r2/r2/controllers/mediaembed.py
index 9ab54ad32..cb147b8bb 100644
--- a/r2/r2/controllers/mediaembed.py
+++ b/r2/r2/controllers/mediaembed.py
@@ -22,7 +22,7 @@
from validator import *
from reddit_base import MinimalController
-from r2.lib.scraper import scrapers
+from r2.lib.scraper import get_media_embed
from r2.lib.pages import MediaEmbedBody, ComScore, render_ad
from pylons import request
@@ -48,9 +48,7 @@ class MediaembedController(MinimalController):
elif isinstance(link.media_object, dict):
# otherwise it's the new style, which is a dict(type=type, **args)
- media_object_type = link.media_object['type']
- scraper = scrapers[media_object_type]
- media_embed = scraper.media_embed(**link.media_object)
+ media_embed = get_media_embed(link.media_object)
content = media_embed.content
return MediaEmbedBody(body = content).render()
diff --git a/r2/r2/controllers/promotecontroller.py b/r2/r2/controllers/promotecontroller.py
index 73539d5f6..62ba61170 100644
--- a/r2/r2/controllers/promotecontroller.py
+++ b/r2/r2/controllers/promotecontroller.py
@@ -25,13 +25,13 @@ from r2.models import *
from r2.lib.authorize import get_account_info, edit_profile
from r2.lib.pages import *
from r2.lib.pages.things import wrap_links
+from r2.lib.strings import strings
from r2.lib.menus import *
from r2.controllers import ListingController
from r2.controllers.reddit_base import RedditController
-from r2.lib.promote import get_promoted, STATUS, PromoteSR
-from r2.lib.utils import timetext
+from r2.lib.utils import timetext, make_offset_date
from r2.lib.media import force_thumbnail, thumbnail_url
from r2.lib import cssfilter
from datetime import datetime
@@ -46,33 +46,20 @@ class PromoteController(ListingController):
return _('promoted by you')
def query(self):
- q = Link._query(Link.c.sr_id == PromoteSR._id)
- if not c.user_is_sponsor:
- # get user's own promotions
- q._filter(Link.c.author_id == c.user._id)
- q._filter(Link.c._spam == (True, False),
- Link.c.promoted == (True, False))
- q._sort = desc('_date')
-
+ author_id = None if c.user_is_sponsor else c.user._id
if self.sort == "future_promos":
- q._filter(Link.c.promote_status == STATUS.unseen)
+ return promote.get_unapproved_links(author_id)
elif self.sort == "pending_promos":
- if c.user_is_admin:
- q._filter(Link.c.promote_status == STATUS.pending)
- else:
- q._filter(Link.c.promote_status == (STATUS.unpaid,
- STATUS.unseen,
- STATUS.accepted,
- STATUS.rejected))
+ return promote.get_accepted_links(author_id)
elif self.sort == "unpaid_promos":
- q._filter(Link.c.promote_status == STATUS.unpaid)
+ return promote.get_unpaid_links(author_id)
elif self.sort == "rejected_promos":
- q._filter(Link.c.promote_status == STATUS.rejected)
+ return promote.get_rejected_links(author_id)
elif self.sort == "live_promos":
- q._filter(Link.c.promote_status == STATUS.promoted)
-
- return q
+ return promote.get_live_links(author_id)
+ return promote.get_all_links(author_id)
+ @validate(VSponsor())
def GET_listing(self, sort = "", **env):
if not c.user_is_loggedin or not c.user.email_verified:
return self.redirect("/ad_inq")
@@ -81,7 +68,7 @@ class PromoteController(ListingController):
GET_index = GET_listing
- @validate(VVerifiedUser())
+ @validate(VSponsor())
def GET_new_promo(self):
return PromotePage('content', content = PromoteLinkForm()).render()
@@ -90,113 +77,77 @@ class PromoteController(ListingController):
def GET_edit_promo(self, link):
if link.promoted is None:
return self.abort404()
- rendered = wrap_links(link)
- timedeltatext = ''
- if link.promote_until:
- timedeltatext = timetext(link.promote_until - datetime.now(g.tz),
- resultion=2)
+ rendered = wrap_links(link, wrapper = promote.sponsor_wrapper,
+ skip = False)
form = PromoteLinkForm(link = link,
listing = rendered,
- timedeltatext = timedeltatext)
+ timedeltatext = "")
+
page = PromotePage('new_promo', content = form)
return page.render()
- @validate(VVerifiedUser())
+ @validate(VSponsor())
def GET_graph(self):
content = Promote_Graph()
if c.user_is_sponsor and c.render_style == 'csv':
c.response.content = content.as_csv()
return c.response
- return PromotePage("grpaph", content = content).render()
+ return PromotePage("graph", content = content).render()
### POST controllers below
- @validatedForm(VSponsor(),
- link = VByName("link"),
- bid = VBid('bid', "link"))
- def POST_freebie(self, form, jquery, link, bid):
- if link and link.promoted is not None and bid:
- promote.auth_paid_promo(link, c.user, -1, bid)
- jquery.refresh()
+ @validatedForm(VSponsorAdmin(),
+ link = VLink("link_id"),
+ indx = VInt("indx"))
+ def POST_freebie(self, form, jquery, link, indx):
+ if promote.is_promo(link) and indx is not None:
+ promote.free_campaign(link, indx, c.user)
+ form.redirect(promote.promo_edit_url(link))
- @validatedForm(VSponsor(),
+ @validatedForm(VSponsorAdmin(),
link = VByName("link"),
note = nop("note"))
def POST_promote_note(self, form, jquery, link, note):
- if link and link.promoted is not None:
+ if promote.is_promo(link):
form.find(".notes").children(":last").after(
"
" + promote.promotion_log(link, note, True) + "
")
- @validatedForm(VSponsor(),
- link = VByName("link"),
- refund = VFloat("refund"))
- def POST_refund(self, form, jquery, link, refund):
- if link:
- # make sure we don't refund more than we should
- author = Account._byID(link.author_id)
- promote.refund_promo(link, author, refund)
- jquery.refresh()
-
- @noresponse(VSponsor(),
+ @noresponse(VSponsorAdmin(),
thing = VByName('id'))
def POST_promote(self, thing):
- if thing:
- now = datetime.now(g.tz)
- # make accepted if unseen or already rejected
- if thing.promote_status in (promote.STATUS.unseen,
- promote.STATUS.rejected):
- promote.accept_promo(thing)
- # if not finished and the dates are current
- elif (thing.promote_status < promote.STATUS.finished and
- thing._date <= now and thing.promote_until > now):
- # if already pending, cron job must have failed. Promote.
- if thing.promote_status == promote.STATUS.accepted:
- promote.pending_promo(thing)
- promote.promote(thing)
+ if promote.is_promo(thing):
+ promote.accept_promotion(thing)
- @noresponse(VSponsor(),
+ @noresponse(VSponsorAdmin(),
thing = VByName('id'),
reason = nop("reason"))
def POST_unpromote(self, thing, reason):
- if thing:
- # reject anything that hasn't yet been promoted
- if (c.user_is_sponsor and
- thing.promote_status < promote.STATUS.promoted):
- promote.reject_promo(thing, reason = reason)
- # also reject anything that is live but has a reason given
- elif (c.user_is_sponsor and reason and
- thing.promote_status == promote.STATUS.promoted):
- promote.reject_promo(thing, reason = reason)
- # otherwise, mark it as "finished"
- else:
- promote.unpromote(thing)
+ if promote.is_promo(thing):
+ promote.reject_promotion(thing, reason = reason)
@validatedForm(VSponsor('link_id'),
VModhash(),
VRatelimit(rate_user = True,
rate_ip = True,
prefix = 'create_promo_'),
- ip = ValidIP(),
l = VLink('link_id'),
title = VTitle('title'),
- url = VUrl('url', allow_self = False),
- dates = VDateRange(['startdate', 'enddate'],
- future = g.min_promote_future,
- reference_date = promote.promo_datetime_now,
- business_days = True,
- admin_override = True),
+ url = VUrl('url', allow_self = False, lookup = False),
+ ip = ValidIP(),
disable_comments = VBoolean("disable_comments"),
set_clicks = VBoolean("set_maximum_clicks"),
max_clicks = VInt("maximum_clicks", min = 0),
set_views = VBoolean("set_maximum_views"),
max_views = VInt("maximum_views", min = 0),
- bid = VBid('bid', 'link_id'))
- def POST_new_promo(self, form, jquery, l, ip, title, url, dates,
- disable_comments,
- set_clicks, max_clicks, set_views, max_views, bid):
+ )
+ def POST_edit_promo(self, form, jquery, ip, l, title, url,
+ disable_comments,
+ set_clicks, max_clicks,
+ set_views, max_views):
+
should_ratelimit = False
if not c.user_is_sponsor:
set_clicks = False
@@ -209,7 +160,7 @@ class PromoteController(ListingController):
if not should_ratelimit:
c.errors.remove((errors.RATELIMIT, 'ratelimit'))
-
+
# demangle URL in canonical way
if url:
if isinstance(url, (unicode, str)):
@@ -220,102 +171,168 @@ class PromoteController(ListingController):
# want the URL
url = url[0].url
+ # users can change the disable_comments on promoted links
+ if ((not l or not promote.is_promoted(l)) and
+ (form.has_errors('title', errors.NO_TEXT,
+ errors.TOO_LONG) or
+ form.has_errors('url', errors.NO_URL, errors.BAD_URL) or
+ jquery.has_errors('ratelimit', errors.RATELIMIT))):
+ return
+
+ if not l:
+ l = promote.new_promotion(title, url, c.user, ip)
+ elif promote.is_promo(l):
+ changed = False
+ # live items can only be changed by a sponsor, and also
+ # pay the cost of de-approving the link
+ trusted = c.user_is_sponsor or \
+ getattr(c.user, "trusted_sponsor", False)
+ if not promote.is_promoted(l) or trusted:
+ if title and title != l.title:
+ l.title = title
+ changed = not trusted
+ if url and url != l.url:
+ l.url = url
+ changed = not trusted
+
+ # only trips if the title and url are changed by a non-sponsor
+ if changed and not promote.is_unpaid(l):
+ promote.unapprove_promotion(l)
+
+ if c.user_is_sponsor:
+ l.maximum_clicks = max_clicks
+ l.maximum_views = max_views
+
+ # comment disabling is free to be changed any time.
+ l.disable_comments = disable_comments
+ l._commit()
+
+ form.redirect(promote.promo_edit_url(l))
+
+ @validate(VSponsorAdmin())
+ def GET_roadblock(self):
+ return PromotePage('content', content = Roadblocks()).render()
+
+ @validatedForm(VSponsorAdmin(),
+ VModhash(),
+ dates = VDateRange(['startdate', 'enddate'],
+ future = 1,
+ reference_date = promote.promo_datetime_now,
+ business_days = False,
+ admin_override = True),
+ sr = VSubmitSR('sr'))
+ def POST_add_roadblock(self, form, jquery, dates, sr):
+ if (form.has_errors('startdate', errors.BAD_DATE,
+ errors.BAD_FUTURE_DATE) or
+ form.has_errors('enddate', errors.BAD_DATE,
+ errors.BAD_FUTURE_DATE, errors.BAD_DATE_RANGE)):
+ return
+ if form.has_errors('sr', errors.SUBREDDIT_NOEXIST,
+ errors.SUBREDDIT_NOTALLOWED,
+ errors.SUBREDDIT_REQUIRED):
+ return
+ if dates and sr:
+ sd, ed = dates
+ promote.roadblock_reddit(sr.name, sd.date(), ed.date())
+ jquery.refresh()
+
+ @validatedForm(VSponsorAdmin(),
+ VModhash(),
+ dates = VDateRange(['startdate', 'enddate'],
+ future = 1,
+ reference_date = promote.promo_datetime_now,
+ business_days = False,
+ admin_override = True),
+ sr = VSubmitSR('sr'))
+ def POST_rm_roadblock(self, form, jquery, dates, sr):
+ if dates and sr:
+ sd, ed = dates
+ promote.unroadblock_reddit(sr.name, sd.date(), ed.date())
+ jquery.refresh()
+
+ @validatedForm(VSponsor('link_id'),
+ VModhash(),
+ dates = VDateRange(['startdate', 'enddate'],
+ future = 1,
+ reference_date = promote.promo_datetime_now,
+ business_days = False,
+ admin_override = True),
+ l = VLink('link_id'),
+ bid = VBid('bid', 'link_id', 'sr'),
+ sr = VSubmitSR('sr'),
+ indx = VInt("indx"),
+ targeting = VLength("targeting", 10))
+ def POST_edit_campaign(self, form, jquery, l, indx,
+ dates, bid, sr, targeting):
+ if not l:
+ return
+
+ start, end = [x.date() for x in dates] if dates else (None, None)
+
+ if start and end and not promote.is_accepted(l) and not c.user_is_sponsor:
+ # if the ad is not approved already, ensure the start date
+ # is at least 2 days in the future
+ now = promote.promo_datetime_now()
+ future = make_offset_date(now, g.min_promote_future,
+ business_days = True)
+ if start < future.date():
+ c.errors.add(errors.BAD_FUTURE_DATE,
+ msg_params = dict(day=g.min_promote_future),
+ field = "startdate")
+
+
+ if (form.has_errors('startdate', errors.BAD_DATE,
+ errors.BAD_FUTURE_DATE) or
+ form.has_errors('enddate', errors.BAD_DATE,
+ errors.BAD_FUTURE_DATE, errors.BAD_DATE_RANGE)):
+ return
+
+ duration = max((end - start).days, 1)
+
if form.has_errors('bid', errors.BAD_BID):
return
- # check dates and date range
- start, end = [x.date() for x in dates] if dates else (None, None)
- if (not l or
- (l.promote_status != promote.STATUS.promoted and
- (l._date.date(), l.promote_until.date()) != (start,end))):
- if (form.has_errors('startdate', errors.BAD_DATE,
- errors.BAD_FUTURE_DATE) or
- form.has_errors('enddate', errors.BAD_DATE,
- errors.BAD_FUTURE_DATE, errors.BAD_DATE_RANGE)):
- return
- # if the dates have been updated, it is possible that the
- # bid is no longer valid
- duration = max((end - start).days, 1)
- if float(bid) / duration < g.min_promote_bid:
- c.errors.add(errors.BAD_BID, field = 'bid',
- msg_params = {"min": g.min_promote_bid,
- "max": g.max_promote_bid})
-
- # dates have been validated at this point. Next validate title, etc.
- if (form.has_errors('title', errors.NO_TEXT,
- errors.TOO_LONG) or
- form.has_errors('url', errors.NO_URL, errors.BAD_URL) or
- (not l and jquery.has_errors('ratelimit', errors.RATELIMIT))):
+ if bid is None or float(bid) / duration < g.min_promote_bid:
+ c.errors.add(errors.BAD_BID, field = 'bid',
+ msg_params = {"min": g.min_promote_bid,
+ "max": g.max_promote_bid})
+ form.has_errors('bid', errors.BAD_BID)
return
- elif l:
- if l.promote_status == promote.STATUS.finished:
- form.parent().set_html(".status",
- _("that promoted link is already finished."))
- else:
- # we won't penalize for changes of dates provided
- # the submission isn't pending (or promoted, or
- # finished)
- changed = False
- if dates and not promote.update_promo_dates(l, *dates):
- form.parent().set_html(".status",
- _("too late to change the date."))
- else:
- changed = True
- # check for changes in the url and title
- if promote.update_promo_data(l, title, url):
- changed = True
- # sponsors can change the bid value (at the expense of making
- # the promotion a freebie)
- if c.user_is_sponsor and bid != l.promote_bid:
- promote.auth_paid_promo(l, c.user, -1, bid)
- promote.accept_promo(l)
- changed = True
+ if targeting == 'one':
+ if form.has_errors('sr', errors.SUBREDDIT_NOEXIST,
+ errors.SUBREDDIT_NOTALLOWED,
+ errors.SUBREDDIT_REQUIRED):
+ # checking to get the error set in the form, but we can't
+ # check for rate-limiting if there's no subreddit
+ return
+ oversold = promote.is_roadblocked(sr.name, start, end)
+ if oversold:
+ c.errors.add(errors.OVERSOLD, field = 'sr',
+ msg_params = {"start": oversold[0].strftime('%m/%d/%Y'),
+ "end": oversold[1].strftime('%m/%d/%Y')})
+ form.has_errors('sr', errors.OVERSOLD)
+ return
+ if targeting == 'none':
+ sr = None
- if c.user_is_sponsor:
- l.maximum_clicks = max_clicks
- l.maximum_views = max_views
- changed = True
+ if indx is not None:
+ promote.edit_campaign(l, indx, dates, bid, sr)
+ l = promote.editable_add_props(l)
+ jquery.update_campaign(*l.campaigns[indx])
+ else:
+ indx = promote.new_campaign(l, dates, bid, sr)
+ l = promote.editable_add_props(l)
+ jquery.new_campaign(*l.campaigns[indx])
- l.disable_comments = disable_comments
- l._commit()
+ @validatedForm(VSponsor('link_id'),
+ VModhash(),
+ l = VLink('link_id'),
+ indx = VInt("indx"))
+ def POST_delete_campaign(self, form, jquery, l, indx):
+ if l and indx is not None:
+ promote.delete_campaign(l, indx)
- if changed:
- jquery.refresh()
-
- # no link so we are creating a new promotion
- elif dates:
- promote_start, promote_end = dates
- # check that the bid satisfies the minimum
- duration = max((promote_end - promote_start).days, 1)
- if bid / duration >= g.min_promote_bid:
- l = promote.new_promotion(title, url, c.user, ip,
- promote_start, promote_end, bid,
- disable_comments = disable_comments,
- max_clicks = max_clicks,
- max_views = max_views)
- # if the submitter is a sponsor (or implicitly an admin) we can
- # fast-track the approval and auto-accept the bid
- if c.user_is_sponsor:
- promote.auth_paid_promo(l, c.user, -1, bid)
- promote.accept_promo(l)
-
- # register a vote
- v = Vote.vote(c.user, l, True, ip)
-
- # set the rate limiter
- if should_ratelimit:
- VRatelimit.ratelimit(rate_user=True, rate_ip = True,
- prefix = "create_promo_",
- seconds = 60)
-
- form.redirect(promote.promo_edit_url(l))
- else:
- c.errors.add(errors.BAD_BID,
- msg_params = dict(min=g.min_promote_bid,
- max=g.max_promote_bid),
- field = 'bid')
- form.set_error(errors.BAD_BID, "bid")
@validatedForm(VSponsor('container'),
VModhash(),
@@ -361,8 +378,8 @@ class PromoteController(ListingController):
@validatedForm(VSponsor('link'),
link = VByName("link"),
+ indx = VInt("indx"),
customer_id = VInt("customer_id", min = 0),
- bid = VBid("bid", "link"),
pay_id = VInt("account", min = 0),
edit = VBoolean("edit"),
address = ValidAddress(["firstName", "lastName",
@@ -372,7 +389,7 @@ class PromoteController(ListingController):
usa_only = True),
creditcard = ValidCard(["cardNumber", "expirationDate",
"cardCode"]))
- def POST_update_pay(self, form, jquery, bid, link, customer_id, pay_id,
+ def POST_update_pay(self, form, jquery, link, indx, customer_id, pay_id,
edit, address, creditcard):
address_modified = not pay_id or edit
if address_modified:
@@ -385,35 +402,32 @@ class PromoteController(ListingController):
pass
else:
pay_id = edit_profile(c.user, address, creditcard, pay_id)
- if form.has_errors('bid', errors.BAD_BID) or not bid:
- pass
# if link is in use or finished, don't make a change
- elif link.promote_status == promote.STATUS.promoted:
- form.set_html(".status",
- _("that link is currently promoted. "
- "you can't update your bid now."))
- elif link.promote_status == promote.STATUS.finished:
- form.set_html(".status",
- _("that promotion is already over, so updating "
- "your bid is kind of pointless, don't you think?"))
- elif pay_id:
+ if pay_id:
# valid bid and created or existing bid id.
# check if already a transaction
- if promote.auth_paid_promo(link, c.user, pay_id, bid):
+ success, reason = promote.auth_campaign(link, indx, c.user, pay_id)
+ if success:
form.redirect(promote.promo_edit_url(link))
else:
form.set_html(".status",
+ reason or
_("failed to authenticate card. sorry."))
@validate(VSponsor("link"),
- article = VLink("link"))
- def GET_pay(self, article):
- data = get_account_info(c.user)
+ article = VLink("link"),
+ indx = VInt("indx"))
+ def GET_pay(self, article, indx):
# no need for admins to play in the credit card area
if c.user_is_loggedin and c.user._id != article.author_id:
return self.abort404()
- content = PaymentForm(link = article,
+ # make sure this is a valid campaign index
+ if indx not in getattr(article, "campaigns", {}):
+ return self.abort404()
+
+ data = get_account_info(c.user)
+ content = PaymentForm(article, indx,
customer_id = data.customerProfileId,
profiles = data.paymentProfiles)
res = LinkInfoPage(link = article,
@@ -441,8 +455,8 @@ class PromoteController(ListingController):
if any(errors.values()):
return UploadedImage("", "", "upload", errors = errors).render()
else:
- if not c.user_is_sponsor:
- promote.unapproved_promo(link)
+ if not c.user_is_sponsor and not promote.is_unpaid(link):
+ promote.unapprove_promotion(link)
return UploadedImage(_('saved'), thumbnail_url(link), "",
errors = errors).render()
diff --git a/r2/r2/controllers/reddit_base.py b/r2/r2/controllers/reddit_base.py
index 1d97349c1..f9400bb8f 100644
--- a/r2/r2/controllers/reddit_base.py
+++ b/r2/r2/controllers/reddit_base.py
@@ -183,7 +183,11 @@ def firsttime():
def get_redditfirst(key,default=None):
try:
- cookie = simplejson.loads(c.cookies['reddit_first'].value)
+ val = c.cookies['reddit_first'].value
+ # on cookie presence, return as much
+ if default is None:
+ default = True
+ cookie = simplejson.loads(val)
return cookie[key]
except (ValueError,TypeError,KeyError),e:
# it's not a proper json dict, or the cookie isn't present, or
@@ -259,7 +263,6 @@ def set_subreddit():
redirect_to("/reddits/create?name=%s" % sr_name)
elif not c.error_page:
abort(404, "not found")
-
#if we didn't find a subreddit, check for a domain listing
if not sr_name and c.site == Default and domain:
c.site = DomainSR(domain)
@@ -437,6 +440,8 @@ def base_listing(fn):
class MinimalController(BaseController):
+ allow_stylesheets = False
+
def request_key(self):
# note that this references the cookie at request time, not
# the current value of it
@@ -446,7 +451,7 @@ class MinimalController(BaseController):
except CookieError:
cookies_key = ''
- return make_key('request_key',
+ return make_key('request_key_',
c.lang,
c.content_langs,
request.host,
@@ -470,6 +475,8 @@ class MinimalController(BaseController):
ratelimit_agents()
ratelimit_throttled()
+ c.allow_loggedin_cache = False
+
# the domain has to be set before Cookies get initialized
set_subreddit()
c.errors = ErrorSet()
@@ -480,6 +487,7 @@ class MinimalController(BaseController):
if not c.user_is_loggedin:
r = g.rendercache.get(self.request_key())
if r and request.method == 'GET':
+ r, c.cookies = r
response = c.response
response.headers = r.headers
response.content = r.content
@@ -517,38 +525,35 @@ class MinimalController(BaseController):
if c.response_access_control:
c.response.headers['Access-Control'] = c.response_access_control
- if c.user_is_loggedin:
+ if c.user_is_loggedin and not c.allow_loggedin_cache:
response.headers['Cache-Control'] = 'no-cache'
response.headers['Pragma'] = 'no-cache'
- # send cookies
- if not c.used_cache and c.cookies:
- # if we used the cache, these cookies should be set by the
- # cached response object instead
- for k,v in c.cookies.iteritems():
- if v.dirty:
- response.set_cookie(key = k,
- value = quote(v.value),
- domain = v.domain,
- expires = v.expires)
-
#return
#set content cache
if (g.page_cache_time
and request.method == 'GET'
- and not c.user_is_loggedin
+ and (not c.user_is_loggedin or c.allow_loggedin_cache)
and not c.used_cache
and not c.dontcache
and response.status_code != 503
and response.content and response.content[0]):
try:
g.rendercache.set(self.request_key(),
- response,
+ (response, c.cookies),
g.page_cache_time)
except MemcachedError:
# the key was too big to set in the rendercache
g.log.debug("Ignored too-big render cache")
+ # send cookies
+ for k,v in c.cookies.iteritems():
+ if v.dirty:
+ response.set_cookie(key = k,
+ value = quote(v.value),
+ domain = v.domain,
+ expires = v.expires)
+
if g.usage_sampling <= 0.0:
return
@@ -649,6 +654,15 @@ class RedditController(MinimalController):
if not isinstance(c.site, FakeSubreddit):
request.environ['REDDIT_NAME'] = c.site.name
+ # random reddit trickery -- have to do this after the content lang is set
+ if c.site == Random:
+ c.site = Subreddit.random_reddit()
+ redirect_to("/" + c.site.path.strip('/') + request.path)
+ elif c.site == RandomNSFW:
+ c.site = Subreddit.random_reddit(over18 = True)
+ redirect_to("/" + c.site.path.strip('/') + request.path)
+
+
# check that the site is available:
if c.site._spam and not c.user_is_admin and not c.error_page:
abort(404, "not found")
@@ -664,7 +678,7 @@ class RedditController(MinimalController):
return self.intermediate_redirect("/over18")
#check whether to allow custom styles
- c.allow_styles = True
+ c.allow_styles = self.allow_stylesheets
if g.css_killswitch:
c.allow_styles = False
#if the preference is set and we're not at a cname
@@ -674,14 +688,22 @@ class RedditController(MinimalController):
elif c.site.domain and c.site.css_on_cname and not c.cname:
c.allow_styles = False
- def check_modified(self, thing, action):
- if c.user_is_loggedin:
+ def check_modified(self, thing, action,
+ private=True, max_age=0, must_revalidate=True):
+ if c.user_is_loggedin and not c.allow_loggedin_cache:
return
last_modified = utils.last_modified_date(thing, action)
date_str = http_utils.http_date_str(last_modified)
c.response.headers['last-modified'] = date_str
- c.response.headers['cache-control'] = "private, max-age=0, must-revalidate"
+
+ cache_control = []
+ if private:
+ cache_control.append('private')
+ cache_control.append('max-age=%d' % max_age)
+ if must_revalidate:
+ cache_control.append('must-revalidate')
+ c.response.headers['cache-control'] = ', '.join(cache_control)
modified_since = request.if_modified_since
if modified_since and modified_since >= last_modified:
diff --git a/r2/r2/controllers/toolbar.py b/r2/r2/controllers/toolbar.py
index e76c8c4bd..e09984d8f 100644
--- a/r2/r2/controllers/toolbar.py
+++ b/r2/r2/controllers/toolbar.py
@@ -75,6 +75,9 @@ def auto_expand_panel(link):
return c.user.pref_frame_commentspanel
class ToolbarController(RedditController):
+
+ allow_stylesheets = True
+
@validate(link1 = VByName('id'),
link2 = VLink('id', redirect = False))
def GET_goto(self, link1, link2):
@@ -89,7 +92,7 @@ class ToolbarController(RedditController):
"/tb/$id36, show a given link with the toolbar"
if not link:
return self.abort404()
- elif link.is_self:
+ elif link.is_self or not link.subreddit_slow.can_view(c.user):
return self.redirect(link.url)
res = Frame(title = link.title,
diff --git a/r2/r2/controllers/validator/validator.py b/r2/r2/controllers/validator/validator.py
index 15abbfa78..f2303b4fd 100644
--- a/r2/r2/controllers/validator/validator.py
+++ b/r2/r2/controllers/validator/validator.py
@@ -560,6 +560,19 @@ class VVerifiedUser(VUser):
if not c.user.email_verified:
raise VerifiedUserRequiredException
+class VSponsorAdmin(VVerifiedUser):
+ """
+ Validator which checks c.user_is_sponsor
+ """
+ def user_test(self, thing):
+ return (thing.author_id == c.user._id)
+
+ def run(self, link_id = None):
+ VVerifiedUser.run(self)
+ if c.user_is_sponsor:
+ return
+ abort(403, 'forbidden')
+
class VSponsor(VVerifiedUser):
"""
Not intended to be used as a check for c.user_is_sponsor, but
@@ -616,15 +629,17 @@ class VSrCanDistinguish(VByName):
class VSrCanBan(VByName):
def run(self, thing_name):
if c.user_is_admin:
- return True
+ return 'admin'
elif c.user_is_loggedin:
item = VByName.run(self, thing_name)
# will throw a legitimate 500 if this isn't a link or
# comment, because this should only be used on links and
# comments
subreddit = item.subreddit_slow
- if subreddit.can_ban(c.user):
- return True
+ if subreddit.is_moderator(c.user):
+ return 'mod'
+ # elif subreddit.is_contributor(c.user):
+ # return 'contributor'
abort(403,'forbidden')
class VSrSpecial(VByName):
@@ -747,8 +762,9 @@ class VSanitizedUrl(Validator):
return utils.sanitize_url(url)
class VUrl(VRequired):
- def __init__(self, item, allow_self = True, *a, **kw):
+ def __init__(self, item, allow_self = True, lookup = True, *a, **kw):
self.allow_self = allow_self
+ self.lookup = lookup
VRequired.__init__(self, item, errors.NO_URL, *a, **kw)
def run(self, url, sr = None):
@@ -769,6 +785,8 @@ class VUrl(VRequired):
if url == 'self':
if self.allow_self:
return url
+ elif not self.lookup:
+ return url
elif url:
try:
l = Link._by_url(url, sr)
@@ -786,7 +804,7 @@ class VExistingUname(VRequired):
if name and name.startswith('~') and c.user_is_admin:
try:
user_id = int(name[1:])
- return Account._byID(user_id)
+ return Account._byID(user_id, True)
except (NotFound, ValueError):
return self.error(errors.USER_DOESNT_EXIST)
@@ -868,16 +886,18 @@ class VFloat(VNumber):
return float(val)
class VBid(VNumber):
- def __init__(self, bid, link_id):
+ def __init__(self, bid, link_id, sr):
self.duration = 1
- VNumber.__init__(self, (bid, link_id), min = g.min_promote_bid,
+ VNumber.__init__(self, (bid, link_id, sr),
+ # targeting is a little more expensive
+ min = g.min_promote_bid,
max = g.max_promote_bid, coerce = False,
error = errors.BAD_BID)
def cast(self, val):
return float(val)/self.duration
- def run(self, bid, link_id):
+ def run(self, bid, link_id, sr = None):
if link_id:
try:
link = Thing._by_fullname(link_id, return_dict = False,
@@ -886,7 +906,14 @@ class VBid(VNumber):
except NotFound:
pass
if VNumber.run(self, bid):
- return float(bid)
+ if sr:
+ if self.cast(bid) >= self.min * 1.5:
+ return float(bid)
+ else:
+ self.set_error(self.error, msg_params = dict(min=self.min * 1.5,
+ max=self.max))
+ else:
+ return float(bid)
@@ -953,7 +980,7 @@ class VRatelimit(Validator):
expire_time = max(r.values())
time = utils.timeuntil(expire_time)
- print "rate-limiting %s from %s" % (self.prefix, r.keys())
+ g.log.debug("rate-limiting %s from %s" % (self.prefix, r.keys()))
# when errors have associated field parameters, we'll need
# to add that here
@@ -974,8 +1001,7 @@ class VRatelimit(Validator):
to_set['user' + str(c.user._id36)] = expire_time
if rate_ip:
to_set['ip' + str(request.ip)] = expire_time
-
- g.cache.set_multi(to_set, prefix, time = seconds)
+ g.cache.set_multi(to_set, prefix = prefix, time = seconds)
class VCommentIDs(Validator):
#id_str is a comma separated list of id36's
diff --git a/r2/r2/lib/app_globals.py b/r2/r2/lib/app_globals.py
index 34ea36f92..3e926ac74 100644
--- a/r2/r2/lib/app_globals.py
+++ b/r2/r2/lib/app_globals.py
@@ -24,7 +24,7 @@ from pylons import config
import pytz, os, logging, sys, socket, re, subprocess, random
from datetime import timedelta, datetime
from r2.lib.cache import LocalCache, SelfEmptyingCache
-from r2.lib.cache import PyMemcache, CMemcache
+from r2.lib.cache import CMemcache
from r2.lib.cache import HardCache, MemcacheChain, MemcacheChain, HardcacheChain
from r2.lib.cache import CassandraCache, CassandraCacheChain
from r2.lib.db.stats import QueryStats
@@ -45,6 +45,7 @@ class Globals(object):
'HOT_PAGE_AGE',
'MODWINDOW',
'RATELIMIT',
+ 'QUOTA_THRESHOLD',
'num_comments',
'max_comments',
'num_default_reddits',
@@ -77,6 +78,8 @@ class Globals(object):
tuple_props = ['memcaches',
'rec_cache',
'rendercaches',
+ 'local_rendercache',
+ 'servicecaches',
'permacache_memcaches',
'cassandra_seeds',
'permacaches',
@@ -135,40 +138,55 @@ class Globals(object):
localcache_cls = (SelfEmptyingCache if self.running_as_script
else LocalCache)
- num_mc_clients = 2 if self.running_as_script else 10
+ num_mc_clients = 2# if self.running_as_script else 10
- py_mc = PyMemcache(self.memcaches)
- c_mc = CMemcache(self.memcaches, num_clients = num_mc_clients)
- rmc = CMemcache(self.rendercaches, num_clients = num_mc_clients)
+ c_mc = CMemcache(self.memcaches, num_clients = num_mc_clients, legacy=True)
+ rmc = CMemcache(self.rendercaches, num_clients = num_mc_clients,
+ noreply=True, no_block=True)
+ lrmc = None
+ if self.local_rendercache:
+ lrmc = CMemcache(self.local_rendercache,
+ num_clients = num_mc_clients,
+ noreply=True, no_block=True)
+ smc = CMemcache(self.servicecaches, num_clients = num_mc_clients)
rec_cache = None # we're not using this for now
pmc_chain = (localcache_cls(),)
if self.permacache_memcaches:
- pmc_chain += (PyMemcache(self.permacache_memcaches),)
+ pmc_chain += (CMemcache(self.permacache_memcaches,
+ num_clients=num_mc_clients,
+ legacy=True),)
if self.cassandra_seeds:
self.cassandra_seeds = list(self.cassandra_seeds)
random.shuffle(self.cassandra_seeds)
pmc_chain += (CassandraCache('permacache', 'permacache',
self.cassandra_seeds),)
if self.permacaches:
- pmc_chain += (PyMemcache(self.permacaches),)
+ pmc_chain += (CMemcache(self.permacaches,
+ num_clients=num_mc_clients,
+ legacy=True),)
if len(pmc_chain) == 1:
print 'Warning: proceding without a permacache'
-
- self.permacache = CassandraCacheChain(pmc_chain)
+
+ self.permacache = CassandraCacheChain(pmc_chain, cache_negative_results = True)
# hardcache is done after the db info is loaded, and then the
# chains are reset to use the appropriate initial entries
- self.memcache = py_mc # we'll keep using this one for locks
- # intermediately
+ self.memcache = c_mc # we'll keep using this one for locks
+ # intermediately
- self.cache = MemcacheChain((localcache_cls(), py_mc))
- self.rendercache = MemcacheChain((localcache_cls(), rmc))
+ self.cache = MemcacheChain((localcache_cls(), c_mc))
+ if lrmc:
+ self.rendercache = MemcacheChain((localcache_cls(), lrmc, rmc))
+ else:
+ self.rendercache = MemcacheChain((localcache_cls(), rmc))
+ self.servicecache = MemcacheChain((localcache_cls(), smc))
self.rec_cache = rec_cache
self.make_lock = make_lock_factory(self.memcache)
- cache_chains = [self.cache, self.permacache, self.rendercache]
+ cache_chains = [self.cache, self.permacache, self.rendercache,
+ self.servicecache]
# set default time zone if one is not set
tz = global_conf.get('timezone')
@@ -181,7 +199,7 @@ class Globals(object):
self.dbm = self.load_db_params(global_conf)
# can't do this until load_db_params() has been called
- self.hardcache = HardcacheChain((localcache_cls(), py_mc,
+ self.hardcache = HardcacheChain((localcache_cls(), c_mc,
HardCache(self)),
cache_negative_results = True)
cache_chains.append(self.hardcache)
diff --git a/r2/r2/lib/authorize/interaction.py b/r2/r2/lib/authorize/interaction.py
index dd7eef124..093e6aa5a 100644
--- a/r2/r2/lib/authorize/interaction.py
+++ b/r2/r2/lib/authorize/interaction.py
@@ -99,7 +99,7 @@ def _make_transaction(trans_cls, amount, user, pay_id,
return req.make_request()
-def auth_transaction(amount, user, payid, thing, test = None):
+def auth_transaction(amount, user, payid, thing, campaign, test = None):
# use negative pay_ids to identify freebies, coupons, or anything
# that doesn't require a CC.
if payid < 0:
@@ -107,12 +107,13 @@ def auth_transaction(amount, user, payid, thing, test = None):
# update previous freebie transactions if we can
try:
bid = Bid.one(thing_id = thing._id,
- pay_id = payid)
+ transaction = trans_id,
+ campaign = campaign)
bid.bid = amount
bid.auth()
except NotFound:
- bid = Bid._new(trans_id, user, payid, thing._id, amount)
- return bid.transaction
+ bid = Bid._new(trans_id, user, payid, thing._id, amount, campaign)
+ return bid.transaction, ""
elif int(payid) in PayID.get_ids(user):
order = Order(invoiceNumber = "%dT%d" % (user._id, thing._id))
@@ -120,8 +121,12 @@ def auth_transaction(amount, user, payid, thing, test = None):
amount, user, payid,
order = order, test = test)
if success:
- Bid._new(res.trans_id, user, payid, thing._id, amount)
- return res.trans_id
+ if test:
+ return auth_transaction(amount, user, -1, thing, campaign,
+ test = test)
+ else:
+ Bid._new(res.trans_id, user, payid, thing._id, amount, campaign)
+ return res.trans_id, ""
elif res is None:
# we are in test mode!
return auth_transaction(amount, user, -1, thing, test = test)
@@ -132,45 +137,62 @@ def auth_transaction(amount, user, payid, thing, test = None):
Bid.one(res.trans_id)
except NotFound:
Bid._new(res.trans_id, user, payid, thing._id, amount)
- return res.trans_id
+ return res.trans_id, res.response_reason_text
-def void_transaction(user, trans_id, test = None):
- bid = Bid.one(trans_id)
+def void_transaction(user, trans_id, campaign, test = None):
+ bid = Bid.one(transaction = trans_id, campaign = campaign)
bid.void()
- # verify that the transaction has the correct ownership
- if bid.account_id == user._id and trans_id > 0:
+ if trans_id > 0:
res = _make_transaction(ProfileTransVoid,
None, user, None, trans_id = trans_id,
test = test)
return res
-def charge_transaction(user, trans_id, test = None):
- bid = Bid.one(trans_id)
- bid.charged()
- if trans_id < 0:
- # freebies are automatically approved
- return True
- elif bid.account_id == user._id:
- res = _make_transaction(ProfileTransPriorAuthCapture,
- bid.bid, user,
- bid.pay_id, trans_id = trans_id,
- test = test)
- return bool(res)
+def is_charged_transaction(trans_id, campaign):
+ bid = Bid.one(transaction = trans_id, campaign = campaign)
+ return bid.is_charged()
+
+def charge_transaction(user, trans_id, campaign, test = None):
+ bid = Bid.one(transaction = trans_id, campaign = campaign)
+ if not bid.is_charged():
+ bid.charged()
+ if trans_id < 0:
+ # freebies are automatically authorized
+ return True
+ elif bid.account_id == user._id:
+ res = _make_transaction(ProfileTransPriorAuthCapture,
+ bid.bid, user,
+ bid.pay_id, trans_id = trans_id,
+ test = test)
+ return bool(res)
+
+ # already charged
+ return True
-def refund_transaction(amount, user, trans_id, test = None):
- bid = Bid.one(trans_id)
- if trans_id > 0:
- # create a new bid to identify the refund
- success, res = _make_transaction(ProfileTransRefund,
- amount, user, bid.pay_id,
- trans_id = trans_id,
- test = test)
- if success:
- bid = Bid._new(res.trans_id, user, -1, bid.thing_id, amount)
- bid.refund()
- return bool(res.trans_id)
+#def refund_transaction(amount, user, trans_id, campaign, test = None):
+# bid = Bid.one(transaction = trans_id, campaign = campaign)
+# if trans_id > 0:
+# # create a new bid to identify the refund
+# success, res = _make_transaction(ProfileTransRefund,
+# amount, user, bid.pay_id,
+# trans_id = trans_id,
+# test = test)
+# if success:
+# bid = Bid._new(res.trans_id, user, -1, bid.thing_id, amount)
+# bid.refund()
+# return bool(res.trans_id)
+def get_transactions(*trans_keys):
+ from sqlalchemy import and_, or_
+
+ if trans_keys:
+ f = or_(*[and_(Bid.transaction == trans_id, Bid.campaign == camp)
+ for trans_id, camp in trans_keys])
+ q = Bid.query()
+ q = q.filter(f)
+ return dict(((p.transaction, p.campaign), p) for p in q)
+ return {}
diff --git a/r2/r2/lib/cache.py b/r2/r2/lib/cache.py
index b6e24f625..13c7b30bb 100644
--- a/r2/r2/lib/cache.py
+++ b/r2/r2/lib/cache.py
@@ -29,8 +29,8 @@ from _pylibmc import MemcachedError
import pycassa
import cassandra.ttypes
-from contrib import memcache
-from utils import lstrips, in_chunks, tup
+from r2.lib.contrib import memcache
+from r2.lib.utils import lstrips, in_chunks, tup
from r2.lib.hardcachebackend import HardCacheBackend
from r2.lib.utils import trace
@@ -71,7 +71,6 @@ class PyMemcache(CacheUtils, memcache.Client):
memcache.Client.__init__(self, servers, pickleProtocol = 1)
def set_multi(self, keys, prefix='', time=0):
-
new_keys = {}
for k,v in keys.iteritems():
new_keys[str(k)] = v
@@ -97,24 +96,67 @@ class CMemcache(CacheUtils):
def __init__(self,
servers,
debug = False,
- binary = True,
noreply = False,
- num_clients = 10):
+ no_block = False,
+ num_clients = 10,
+ legacy = False):
self.servers = servers
+ self.legacy = legacy
self.clients = pylibmc.ClientPool(n_slots = num_clients)
for x in xrange(num_clients):
- client = pylibmc.Client(servers, binary=binary)
+ client = pylibmc.Client(servers, binary=not self.legacy)
behaviors = {
- 'no_block': True, # use async I/O
+ 'no_block': no_block, # use async I/O
'cache_lookups': True, # cache DNS lookups
'tcp_nodelay': True, # no nagle
- 'ketama': True, # consistant hashing
'_noreply': int(noreply),
'verify_key': int(debug), # spend the CPU to verify keys
}
+
+ # so that we can drop this in to share keys with
+ # python-memcached, we have a 'legacy' mode that MD5s the
+ # keys and uses the old CRC32-modula mapping
+ if self.legacy:
+ behaviors['hash'] = 'crc'
+ else:
+ behaviors['ketama'] = True # consistent hashing
+
client.behaviors.update(behaviors)
self.clients.put(client)
+ self.min_compress_len = 512*1024
+
+ def hashkey(fn):
+ # decorator to MD5 the key for a single-action command when
+ # self.legacy is set
+ def _fn(self, key, *a, **kw):
+ if self.legacy:
+ key = md5(key).hexdigest()
+ return fn(self, key, *a, **kw)
+ return _fn
+
+ def hashmap(fn):
+ # same as hashkey, but for _multi commands taking a dictionary
+ def _fn(self, keys, *a, **kw):
+ if self.legacy:
+ prefix = kw.pop('prefix', '')
+ keys = dict((md5('%s%s' % (prefix, key)).hexdigest(), value)
+ for (key, value)
+ in keys.iteritems())
+ return fn(self, keys, *a, **kw)
+ return _fn
+
+ def hashkeys(fn):
+ # same as hashkey, but for _multi commands taking a list
+ def _fn(self, keys, *a, **kw):
+ if self.legacy:
+ prefix = kw.pop('prefix', '')
+ keys = [md5('%s%s' % (prefix, key)).hexdigest()
+ for key in keys]
+ return fn(self, keys, *a, **kw)
+ return _fn
+
+ @hashkey
def get(self, key, default = None):
with self.clients.reserve() as mc:
ret = mc.get(key)
@@ -123,8 +165,22 @@ class CMemcache(CacheUtils):
return ret
def get_multi(self, keys, prefix = ''):
+ if self.legacy:
+ # get_multi can't use @hashkeys for md5ing the keys
+ # because we have to map the returns too
+ keymap = dict((md5('%s%s' % (prefix, key)).hexdigest(), key)
+ for key in keys)
+ prefix = ''
+ else:
+ keymap = dict((str(key), key)
+ for key in keys)
+
with self.clients.reserve() as mc:
- return mc.get_multi(keys, key_prefix = prefix)
+ ret = mc.get_multi(keymap.keys(), key_prefix = prefix)
+
+ return dict((keymap[retkey], retval)
+ for (retkey, retval)
+ in ret.iteritems())
# simple_get_multi exists so that a cache chain can
# single-instance the handling of prefixes for performance, but
@@ -134,27 +190,50 @@ class CMemcache(CacheUtils):
# them, so here it is
simple_get_multi = get_multi
+ @hashkey
def set(self, key, val, time = 0):
with self.clients.reserve() as mc:
- return mc.set(key, val, time = time)
+ return mc.set(key, val, time = time,
+ min_compress_len = self.min_compress_len)
+ @hashmap
def set_multi(self, keys, prefix='', time=0):
new_keys = {}
for k,v in keys.iteritems():
new_keys[str(k)] = v
with self.clients.reserve() as mc:
return mc.set_multi(new_keys, key_prefix = prefix,
+ time = time,
+ min_compress_len = self.min_compress_len)
+
+ @hashmap
+ def add_multi(self, keys, prefix='', time=0):
+ new_keys = {}
+ for k,v in keys.iteritems():
+ new_keys[str(k)] = v
+ with self.clients.reserve() as mc:
+ return mc.add_multi(new_keys, key_prefix = prefix,
time = time)
+ @hashkeys
+ def incr_multi(self, keys, prefix='', delta=1):
+ with self.clients.reserve() as mc:
+ return mc.incr_multi(map(str, keys),
+ key_prefix = prefix,
+ delta=delta)
+
+ @hashkey
def append(self, key, val, time=0):
with self.clients.reserve() as mc:
return mc.append(key, val, time=time)
+ @hashkey
def incr(self, key, delta=1, time=0):
# ignore the time on these
with self.clients.reserve() as mc:
return mc.incr(key, delta)
+ @hashkey
def add(self, key, val, time=0):
try:
with self.clients.reserve() as mc:
@@ -162,10 +241,12 @@ class CMemcache(CacheUtils):
except pylibmc.DataExists:
return None
+ @hashkey
def delete(self, key, time=0):
with self.clients.reserve() as mc:
return mc.delete(key)
+ @hashkeys
def delete_multi(self, keys, prefix='', time=0):
with self.clients.reserve() as mc:
return mc.delete_multi(keys, time = time,
@@ -330,7 +411,9 @@ class CacheChain(CacheUtils, local):
replace = make_set_fn('replace')
set_multi = make_set_fn('set_multi')
add = make_set_fn('add')
+ add_multi = make_set_fn('add_multi')
incr = make_set_fn('incr')
+ incr_multi = make_set_fn('incr_multi')
decr = make_set_fn('decr')
delete = make_set_fn('delete')
delete_multi = make_set_fn('delete_multi')
@@ -539,7 +622,7 @@ def sgm(cache, keys, miss_fn, prefix='', time=0):
nr = miss_fn([s_keys[i] for i in need])
nr = dict((str(k), v) for k,v in nr.iteritems())
r.update(nr)
- cache.set_multi(nr, prefix, time = time)
+ cache.set_multi(nr, prefix=prefix, time = time)
return dict((s_keys[k], v) for k,v in r.iteritems())
@@ -563,6 +646,21 @@ def test_cache(cache, prefix=''):
assert cache.get_multi(('%sp_3' % prefix, '%sp_4' % prefix)) == {'%sp_3'%prefix: 3,
'%sp_4'%prefix: 4}
+ # delete
+ cache.set('%s1'%prefix, 1)
+ assert cache.get('%s1'%prefix) == 1
+ cache.delete('%s1'%prefix)
+ assert cache.get('%s1'%prefix) is None
+
+ cache.set('%s1'%prefix, 1)
+ cache.set('%s2'%prefix, 2)
+ cache.set('%s3'%prefix, 3)
+ assert cache.get('%s1'%prefix) == 1 and cache.get('%s2'%prefix) == 2
+ cache.delete_multi(['%s1'%prefix, '%s2'%prefix])
+ assert (cache.get('%s1'%prefix) is None
+ and cache.get('%s2'%prefix) is None
+ and cache.get('%s3'%prefix) == 3)
+
#incr
cache.set('%s5'%prefix, 1)
cache.set('%s6'%prefix, 1)
@@ -574,6 +672,72 @@ def test_cache(cache, prefix=''):
assert cache.get('%s5'%prefix) == 5
assert cache.get('%s6'%prefix) == 2
+def test_cache_compat(caches, prefix=''):
+ """Test that that the keyspaces of a list of cache objects are
+ compatible. Used to compare legacy-mode CMemcache to
+ PyMemcache"""
+ import random
+
+ def _gen():
+ keys = dict((str(random.random()), x) for x in xrange(1000))
+ prefixed = dict(('%s%s' % (prefix, key), value)
+ for (key, value)
+ in keys.iteritems())
+ return keys, prefixed
+
+ for cache in caches:
+ print 'with source == %r' % (cache,)
+
+ # single-ops
+ keys, prefixed = _gen()
+ for key, val in prefixed.iteritems():
+ for ocache in caches:
+ cache.set(key, val)
+ assert ocache.get(key) == val
+
+ # double-prefixed
+ ocache.set('%s%s' % (prefix, key), val)
+ assert cache.get('%s%s' % (prefix, key)) == val
+
+ # pre-prefixed
+ keys, prefixed = _gen()
+ cache.set_multi(prefixed)
+ for ocache in caches:
+ assert ocache.get_multi(prefixed.keys()) == prefixed
+ for key, val in prefixed.iteritems():
+ assert ocache.get(key) == val
+
+ # prefixed in place
+ keys, prefixed = _gen()
+ cache.set_multi(keys, prefix=prefix)
+ for ocache in caches:
+ assert ocache.get_multi(keys.keys()) == keys
+ for key, val in prefixed.iteritems():
+ assert ocache.get(key) == val
+
+ # prefixed on set
+ keys, prefixed = _gen()
+ cache.set_multi(keys, prefix=prefix)
+ for ocache in caches:
+ assert ocache.get_multi(prefixed.keys()) == keys
+ for key, val in prefixed.iteritems():
+ assert ocache.get(key) == val
+
+ # prefixed on get
+ keys, prefixed = _gen()
+ cache.set_multi(prefixed)
+ for ocache in caches:
+ assert ocache.get_multi(keys.keys(), prefix=prefix) == keys
+ for key, val in prefixed.iteritems():
+ assert ocache.get(key) == val
+
+ #delete
+ keys, prefixed = _gen()
+ for ocache in caches:
+ cache.set_multi(prefixed)
+ ocache.delete_multi(prefixed.keys())
+ assert cache.get_multi(prefixed.keys()) == {}
+
def test_multi(cache):
from threading import Thread
diff --git a/r2/r2/lib/contrib/memcache.py b/r2/r2/lib/contrib/memcache.py
index 30bc52ad0..93b4114ca 100755
--- a/r2/r2/lib/contrib/memcache.py
+++ b/r2/r2/lib/contrib/memcache.py
@@ -47,7 +47,7 @@ import sys
import socket
import time
import os
-from md5 import md5
+from hashlib import md5
import re
import types
try:
@@ -69,9 +69,6 @@ try:
except ImportError:
from StringIO import StringIO
-from binascii import crc32 # zlib version is not cross-platform
-serverHashFunction = crc32
-
__author__ = "Evan Martin "
__version__ = "1.43"
__copyright__ = "Copyright (C) 2003 Danga Interactive"
@@ -94,6 +91,97 @@ except ImportError:
class local(object):
pass
+# stolen wholesale from cmemcache_hash
+# so that we're using the
+# same hashing algorithm as pylibmc's 'crc'
+crc32tab = (
+ 0x00000000, 0x77073096, 0xee0e612c, 0x990951ba,
+ 0x076dc419, 0x706af48f, 0xe963a535, 0x9e6495a3,
+ 0x0edb8832, 0x79dcb8a4, 0xe0d5e91e, 0x97d2d988,
+ 0x09b64c2b, 0x7eb17cbd, 0xe7b82d07, 0x90bf1d91,
+ 0x1db71064, 0x6ab020f2, 0xf3b97148, 0x84be41de,
+ 0x1adad47d, 0x6ddde4eb, 0xf4d4b551, 0x83d385c7,
+ 0x136c9856, 0x646ba8c0, 0xfd62f97a, 0x8a65c9ec,
+ 0x14015c4f, 0x63066cd9, 0xfa0f3d63, 0x8d080df5,
+ 0x3b6e20c8, 0x4c69105e, 0xd56041e4, 0xa2677172,
+ 0x3c03e4d1, 0x4b04d447, 0xd20d85fd, 0xa50ab56b,
+ 0x35b5a8fa, 0x42b2986c, 0xdbbbc9d6, 0xacbcf940,
+ 0x32d86ce3, 0x45df5c75, 0xdcd60dcf, 0xabd13d59,
+ 0x26d930ac, 0x51de003a, 0xc8d75180, 0xbfd06116,
+ 0x21b4f4b5, 0x56b3c423, 0xcfba9599, 0xb8bda50f,
+ 0x2802b89e, 0x5f058808, 0xc60cd9b2, 0xb10be924,
+ 0x2f6f7c87, 0x58684c11, 0xc1611dab, 0xb6662d3d,
+ 0x76dc4190, 0x01db7106, 0x98d220bc, 0xefd5102a,
+ 0x71b18589, 0x06b6b51f, 0x9fbfe4a5, 0xe8b8d433,
+ 0x7807c9a2, 0x0f00f934, 0x9609a88e, 0xe10e9818,
+ 0x7f6a0dbb, 0x086d3d2d, 0x91646c97, 0xe6635c01,
+ 0x6b6b51f4, 0x1c6c6162, 0x856530d8, 0xf262004e,
+ 0x6c0695ed, 0x1b01a57b, 0x8208f4c1, 0xf50fc457,
+ 0x65b0d9c6, 0x12b7e950, 0x8bbeb8ea, 0xfcb9887c,
+ 0x62dd1ddf, 0x15da2d49, 0x8cd37cf3, 0xfbd44c65,
+ 0x4db26158, 0x3ab551ce, 0xa3bc0074, 0xd4bb30e2,
+ 0x4adfa541, 0x3dd895d7, 0xa4d1c46d, 0xd3d6f4fb,
+ 0x4369e96a, 0x346ed9fc, 0xad678846, 0xda60b8d0,
+ 0x44042d73, 0x33031de5, 0xaa0a4c5f, 0xdd0d7cc9,
+ 0x5005713c, 0x270241aa, 0xbe0b1010, 0xc90c2086,
+ 0x5768b525, 0x206f85b3, 0xb966d409, 0xce61e49f,
+ 0x5edef90e, 0x29d9c998, 0xb0d09822, 0xc7d7a8b4,
+ 0x59b33d17, 0x2eb40d81, 0xb7bd5c3b, 0xc0ba6cad,
+ 0xedb88320, 0x9abfb3b6, 0x03b6e20c, 0x74b1d29a,
+ 0xead54739, 0x9dd277af, 0x04db2615, 0x73dc1683,
+ 0xe3630b12, 0x94643b84, 0x0d6d6a3e, 0x7a6a5aa8,
+ 0xe40ecf0b, 0x9309ff9d, 0x0a00ae27, 0x7d079eb1,
+ 0xf00f9344, 0x8708a3d2, 0x1e01f268, 0x6906c2fe,
+ 0xf762575d, 0x806567cb, 0x196c3671, 0x6e6b06e7,
+ 0xfed41b76, 0x89d32be0, 0x10da7a5a, 0x67dd4acc,
+ 0xf9b9df6f, 0x8ebeeff9, 0x17b7be43, 0x60b08ed5,
+ 0xd6d6a3e8, 0xa1d1937e, 0x38d8c2c4, 0x4fdff252,
+ 0xd1bb67f1, 0xa6bc5767, 0x3fb506dd, 0x48b2364b,
+ 0xd80d2bda, 0xaf0a1b4c, 0x36034af6, 0x41047a60,
+ 0xdf60efc3, 0xa867df55, 0x316e8eef, 0x4669be79,
+ 0xcb61b38c, 0xbc66831a, 0x256fd2a0, 0x5268e236,
+ 0xcc0c7795, 0xbb0b4703, 0x220216b9, 0x5505262f,
+ 0xc5ba3bbe, 0xb2bd0b28, 0x2bb45a92, 0x5cb36a04,
+ 0xc2d7ffa7, 0xb5d0cf31, 0x2cd99e8b, 0x5bdeae1d,
+ 0x9b64c2b0, 0xec63f226, 0x756aa39c, 0x026d930a,
+ 0x9c0906a9, 0xeb0e363f, 0x72076785, 0x05005713,
+ 0x95bf4a82, 0xe2b87a14, 0x7bb12bae, 0x0cb61b38,
+ 0x92d28e9b, 0xe5d5be0d, 0x7cdcefb7, 0x0bdbdf21,
+ 0x86d3d2d4, 0xf1d4e242, 0x68ddb3f8, 0x1fda836e,
+ 0x81be16cd, 0xf6b9265b, 0x6fb077e1, 0x18b74777,
+ 0x88085ae6, 0xff0f6a70, 0x66063bca, 0x11010b5c,
+ 0x8f659eff, 0xf862ae69, 0x616bffd3, 0x166ccf45,
+ 0xa00ae278, 0xd70dd2ee, 0x4e048354, 0x3903b3c2,
+ 0xa7672661, 0xd06016f7, 0x4969474d, 0x3e6e77db,
+ 0xaed16a4a, 0xd9d65adc, 0x40df0b66, 0x37d83bf0,
+ 0xa9bcae53, 0xdebb9ec5, 0x47b2cf7f, 0x30b5ffe9,
+ 0xbdbdf21c, 0xcabac28a, 0x53b39330, 0x24b4a3a6,
+ 0xbad03605, 0xcdd70693, 0x54de5729, 0x23d967bf,
+ 0xb3667a2e, 0xc4614ab8, 0x5d681b02, 0x2a6f2b94,
+ 0xb40bbe37, 0xc30c8ea1, 0x5a05df1b, 0x2d02ef8d
+)
+def serverHashFunction(key):
+ r"""Calculate a cmemcache-style CRC32 hash of *key*.
+
+ Note that unlike cmemcache's version, this does calculate the key even if
+ only one server exists, mostly because we don't layer violate enough to
+ know how many servers there are or aren't.
+
+ >>> cmemcache_hash("Hello world")
+ 3030
+ >>> cmemcache_hash("Hello worle")
+ 31953
+ >>> cmemcache_hash("")
+ 1
+ """
+ crc = ~0
+
+ for c in key:
+ crc = ((crc & 0xffffffff) >> 8) ^ crc32tab[(crc ^ ord(c)) & 0xff]
+
+ crc = int((~crc >> 16) & 0x7fff)
+
+ return crc or 1
class Client(local):
"""
@@ -230,10 +318,7 @@ class Client(local):
self.buckets.append(server)
def _get_server(self, key):
- if type(key) == types.TupleType:
- serverhash, key = key
- else:
- serverhash = serverHashFunction(key)
+ serverhash = serverHashFunction(key)
# the original version suffered from a failure rate of
# 1 in n^_SERVER_RETRIES, where n = number of bukets
@@ -295,7 +380,8 @@ class Client(local):
server_keys, prefixed_to_orig_key = self._map_and_prefix_keys(keys, key_prefix)
- # send out all requests on each server before reading anything
+ # send out all requests on each server before reading
+ # anything. can this deadlock if the return buffer fills up?
dead_servers = []
rc = 1
@@ -338,8 +424,8 @@ class Client(local):
@param time: number of seconds any subsequent set / update commands should fail. Defaults to 0 for no delay.
@rtype: int
'''
- server, key = self._get_server(key)
key = check_key(key)
+ server, key = self._get_server(key)
if not server:
return 0
self._statlog('delete')
@@ -397,8 +483,8 @@ class Client(local):
return self._incrdecr("decr", key, delta)
def _incrdecr(self, cmd, key, delta):
- server, key = self._get_server(key)
key = check_key(key)
+ server, key = self._get_server(key)
if not server:
return 0
self._statlog(cmd)
@@ -501,26 +587,14 @@ class Client(local):
prefixed_to_orig_key = {}
# build up a list for each server of all the keys we want.
for orig_key in key_iterable:
- if type(orig_key) is types.TupleType:
- # Tuple of hashvalue, key ala _get_server(). Caller is essentially telling us what server to stuff this on.
- # Ensure call to _get_server gets a Tuple as well.
- str_orig_key = str(orig_key[1])
- server, key = self._get_server((orig_key[0], key_prefix + str_orig_key)) # Gotta pre-mangle key before hashing to a server. Returns the mangled key.
- else:
- str_orig_key = str(orig_key) # set_multi supports int / long keys.
- server, key = self._get_server(key_prefix + str_orig_key)
-
- # Now check to make sure key length is proper ...
- #changed by steve
- #check_key(str_orig_key, key_extra_len=key_extra_len)
- key = check_key(key_prefix + str_orig_key)
+ key = '%s%s' % (key_prefix, orig_key)
+ key = check_key(key)
+ server, key = self._get_server(key)
if not server:
continue
- if not server_keys.has_key(server):
- server_keys[server] = []
- server_keys[server].append(key)
+ server_keys.setdefault(server, []).append(key)
prefixed_to_orig_key[key] = orig_key
return (server_keys, prefixed_to_orig_key)
@@ -653,8 +727,8 @@ class Client(local):
return (flags, len(val), val)
def _set(self, cmd, key, val, time, min_compress_len = 0):
- server, key = self._get_server(key)
key = check_key(key)
+ server, key = self._get_server(key)
if not server:
return 0
@@ -678,8 +752,8 @@ class Client(local):
@return: The value or None.
'''
- server, key = self._get_server(key)
key = check_key(key)
+ server, key = self._get_server(key)
if not server:
return None
@@ -960,7 +1034,6 @@ def check_key(key, key_extra_len=0):
Contains control characters (Raises MemcachedKeyCharacterError).
Is not a string (Raises MemcachedStringEncodingError)
"""
- if type(key) == types.TupleType: key = key[1]
if not isinstance(key, str):
raise Client.MemcachedStringEncodingError, ("Keys must be str()'s, not"
"unicode. Convert your unicode strings using "
diff --git a/r2/r2/lib/cssfilter.py b/r2/r2/lib/cssfilter.py
index 1df364713..6f6c997a1 100644
--- a/r2/r2/lib/cssfilter.py
+++ b/r2/r2/lib/cssfilter.py
@@ -30,6 +30,7 @@ from pylons import g, c
from pylons.i18n import _
from mako import filters
+import os
import tempfile
from r2.lib import s3cp
from md5 import md5
@@ -192,7 +193,7 @@ def valid_url(prop,value,report):
# the label -> image number lookup is stored on the subreddit
if c.site.images.has_key(name):
num = c.site.images[name]
- value._setCssText("url(http:/%s%s_%d.png?v=%s)"
+ value._setCssText("url(http://%s/%s_%d.png?v=%s)"
% (g.s3_thumb_bucket, c.site._fullname, num,
randstr(36)))
else:
@@ -390,29 +391,32 @@ def save_sr_image(sr, data, resource = None):
"""
uploades image data to s3 as a PNG and returns its new url. Urls
will be of the form:
- http:/${g.s3_thumb_bucket}/${sr._fullname}[_${num}].png?v=${md5hash}
+ http://${g.s3_thumb_bucket}/${sr._fullname}[_${num}].png?v=${md5hash}
[Note: g.s3_thumb_bucket begins with a "/" so the above url is valid.]
"""
hash = md5(data).hexdigest()
+ f = tempfile.NamedTemporaryFile(suffix = '.png',delete=False)
try:
- f = tempfile.NamedTemporaryFile(suffix = '.png')
f.write(data)
- f.flush()
+ f.close()
+
+ optimize_png(f.name, g.png_optimizer)
+ contents = open(f.name).read()
if resource is not None:
resource = "_%s" % resource
else:
resource = ""
- resource = g.s3_thumb_bucket + sr._fullname + resource + ".png"
+ fname = resource = sr._fullname + resource + ".png"
+
+ s3cp.send_file(g.s3_thumb_bucket, fname, contents, 'image/png')
- s3cp.send_file(optimize_png(f.name, g.png_optimizer), resource,
- 'image/png', 'public-read', None, False)
finally:
- f.close()
+ os.unlink(f.name)
- return 'http:/%s%s?v=%s' % (g.s3_thumb_bucket,
- resource.split('/')[-1], hash)
+ return 'http://%s/%s?v=%s' % (g.s3_thumb_bucket,
+ resource.split('/')[-1], hash)
diff --git a/r2/r2/lib/db/queries.py b/r2/r2/lib/db/queries.py
index 12a677f73..b4d2bf0c9 100644
--- a/r2/r2/lib/db/queries.py
+++ b/r2/r2/lib/db/queries.py
@@ -1,5 +1,5 @@
-from r2.models import Account, Link, Comment, Vote, SaveHide
-from r2.models import Message, Inbox, Subreddit, ModeratorInbox
+from r2.models import Account, Link, Comment, Trial, Vote, SaveHide
+from r2.models import Message, Inbox, Subreddit, ModContribSR, ModeratorInbox
from r2.lib.db.thing import Thing, Merge
from r2.lib.db.operators import asc, desc, timeago
from r2.lib.db import query_queue
@@ -296,7 +296,9 @@ def make_results(query, filter = filter_identity):
return query
def merge_results(*results):
- if g.use_query_cache:
+ if not results:
+ return QueryishList([])
+ elif g.use_query_cache:
return MergedCachedResults(results)
else:
m = Merge(results, sort = results[0]._sort)
@@ -342,7 +344,12 @@ def get_spam_comments(sr):
return make_results(q_c)
def get_spam(sr):
- return get_spam_links(sr)
+ if isinstance(sr, ModContribSR):
+ srs = Subreddit._byID(sr.sr_ids(), return_dict=False)
+ results = [ get_spam_links(sr) for sr in srs ]
+ return merge_results(*results)
+ else:
+ return get_spam_links(sr)
#return merge_results(get_spam_links(sr),
# get_spam_comments(sr))
@@ -361,10 +368,79 @@ def get_reported_comments(sr):
return make_results(q_c)
def get_reported(sr):
- return get_reported_links(sr)
+ if isinstance(sr, ModContribSR):
+ srs = Subreddit._byID(sr.sr_ids(), return_dict=False)
+ results = [ get_reported_links(sr) for sr in srs ]
+ return merge_results(*results)
+ else:
+ return get_reported_links(sr)
#return merge_results(get_reported_links(sr),
# get_reported_comments(sr))
+# TODO: Wow, what a hack. I'm doing this in a hurry to make
+# /r/blah/about/trials and /r/blah/about/modqueue work. At some point
+# before the heat death of the universe, we should start precomputing
+# these things instead. That would require an "on_trial" attribute to be
+# maintained on Links, a precomputer that keeps track of such links,
+# and changes to:
+# trial_utils.py: trial_info(), end_trial(), indict()
+# trial.py: all_defendants_cache()
+class QueryishList(list):
+ prewrap_fn = None
+ _rules = None
+ _sort = None
+
+ @property
+ def sort(self):
+ return self._sort
+
+ def _cursor(self):
+ return self
+
+ def _filter(self):
+ return True
+
+ @property
+ def data(self):
+ return [ (t._fullname, 2145945600) for t in self ]
+ # Jan 1 2038 ^^^^^^^^^^
+ # so that trials show up before spam and reports
+
+ def fetchone(self):
+ if self:
+ return self.pop(0)
+ else:
+ raise StopIteration
+
+def get_trials_links(sr):
+ l = Trial.defendants_by_sr(sr)
+ s = QueryishList(l)
+ s._sort = [db_sort('new')]
+ return s
+
+def get_trials(sr):
+ if isinstance(sr, ModContribSR):
+ srs = Subreddit._byID(sr.sr_ids(), return_dict=False)
+ return get_trials_links(srs)
+ else:
+ return get_trials_links(sr)
+
+def get_modqueue(sr):
+ results = []
+ if isinstance(sr, ModContribSR):
+ srs = Subreddit._byID(sr.sr_ids(), return_dict=False)
+ results.append(get_trials_links(srs))
+
+ for sr in srs:
+ results.append(get_reported_links(sr))
+ results.append(get_spam_links(sr))
+ else:
+ results.append(get_trials_links(sr))
+ results.append(get_reported_links(sr))
+ results.append(get_spam_links(sr))
+
+ return merge_results(*results)
+
def get_domain_links(domain, sort, time):
return DomainSearchQuery(domain, sort=search_sort[sort], timerange=time)
@@ -889,7 +965,7 @@ def run_commentstree():
def queue_vote(user, thing, dir, ip, organic = False,
cheater = False, store = True):
# set the vote in memcached so the UI gets updated immediately
- key = "registered_vote_%s_%s" % (user._id, thing._fullname)
+ key = prequeued_vote_key(user, thing)
g.cache.set(key, '1' if dir is True else '0' if dir is None else '-1')
# queue the vote to be stored unless told not to
if store:
@@ -900,19 +976,50 @@ def queue_vote(user, thing, dir, ip, organic = False,
else:
handle_vote(user, thing, dir, ip, organic)
+def prequeued_vote_key(user, item):
+ return 'registered_vote_%s_%s' % (user._id, item._fullname)
+
def get_likes(user, items):
if not user or not items:
return {}
keys = {}
res = {}
- for i in items:
- keys['registered_vote_%s_%s' % (user._id, i._fullname)] = (user, i)
+ keys = dict((prequeued_vote_key(user, item), (user,item))
+ for item in items)
r = g.cache.get_multi(keys.keys())
# populate the result set based on what we fetched from the cache first
for k, v in r.iteritems():
res[keys[k]] = v
+ # performance hack: if their last vote came in before this thing
+ # was created, they can't possibly have voted on it
+ cantexist = {}
+ for item in items:
+ if (user, item) in res:
+ continue
+
+ last_vote_attr_name = 'last_vote_' + item.__class__.__name__
+ last_vote = getattr(user, last_vote_attr_name, None)
+ if not last_vote:
+ continue
+
+ try:
+ if last_vote < item._date:
+ res[(user, item)] = '0'
+ cantexist[prequeued_vote_key(user, item)] = '0'
+ except TypeError:
+ g.log.error("user %s has a broken %s? (%r)"
+ % (user._id, last_vote_attr_name, last_vote))
+ # accounts for broken last_vote properties
+ pass
+
+ # this is a bit dodgy, but should save us from having to reload
+ # all of the votes on pages they've already loaded as soon as they
+ # cast a new vote
+ if cantexist:
+ g.cache.set_multi(cantexist)
+
# now hit the vote db with the remainder
likes = Vote.likes(user, [i for i in items if (user, i) not in res])
diff --git a/r2/r2/lib/db/tdb_sql.py b/r2/r2/lib/db/tdb_sql.py
index 246b1ef13..6e61490dc 100644
--- a/r2/r2/lib/db/tdb_sql.py
+++ b/r2/r2/lib/db/tdb_sql.py
@@ -330,7 +330,7 @@ def get_read_table(tables):
ip_weights.extend((ip, .01) for ip in no_connections)
#rebalance the weights
- total_weight = sum(w[1] for w in ip_weights)
+ total_weight = sum(w[1] for w in ip_weights) or 1
ip_weights = [(ip, weight / total_weight)
for ip, weight in ip_weights]
diff --git a/r2/r2/lib/db/thing.py b/r2/r2/lib/db/thing.py
index d662b3a47..02de55bd8 100644
--- a/r2/r2/lib/db/thing.py
+++ b/r2/r2/lib/db/thing.py
@@ -264,7 +264,7 @@ class DataThing(object):
prefix = thing_prefix(cls.__name__)
#write the data to the cache
- cache.set_multi(to_save, prefix)
+ cache.set_multi(to_save, prefix=prefix)
def _load(self):
self._load_multi(self)
@@ -708,7 +708,7 @@ def Relation(type1, type2, denorm1 = None, denorm2 = None):
self._name = 'un' + self._name
@classmethod
- def _fast_query(cls, thing1s, thing2s, name, data=True):
+ def _fast_query(cls, thing1s, thing2s, name, data=True, eager_load=True):
"""looks up all the relationships between thing1_ids and thing2_ids
and caches them"""
prefix = thing_prefix(cls.__name__)
@@ -738,7 +738,7 @@ def Relation(type1, type2, denorm1 = None, denorm2 = None):
q = cls._query(cls.c._thing1_id == t1_ids,
cls.c._thing2_id == t2_ids,
cls.c._name == names,
- eager_load = True,
+ eager_load = eager_load,
data = data)
rel_ids = {}
@@ -747,7 +747,7 @@ def Relation(type1, type2, denorm1 = None, denorm2 = None):
#relations with the same keys
#l = rel_ids.setdefault((rel._thing1_id, rel._thing2_id), [])
#l.append(rel._id)
- rel_ids[(rel._thing1._id, rel._thing2._id, rel._name)] = rel._id
+ rel_ids[(rel._thing1_id, rel._thing2_id, rel._name)] = rel._id
for p in pairs:
if p not in rel_ids:
@@ -1056,6 +1056,7 @@ class MergeCursor(MultiCursor):
return [p for p in pairs if not p[2]]
pairs = undone(safe_next(c) for c in cursors)
+
while pairs:
#only one query left, just dump it
if len(pairs) == 1:
@@ -1193,7 +1194,7 @@ def MultiRelation(name, *relations):
return Merge(queries)
@classmethod
- def _fast_query(cls, sub, obj, name, data=True):
+ def _fast_query(cls, sub, obj, name, data=True, eager_load=True):
#divide into types
def type_dict(items):
types = {}
@@ -1210,7 +1211,7 @@ def MultiRelation(name, *relations):
t1, t2 = types
if sub_dict.has_key(t1) and obj_dict.has_key(t2):
res.update(rel._fast_query(sub_dict[t1], obj_dict[t2], name,
- data = True))
+ data = data, eager_load=eager_load))
return res
diff --git a/r2/r2/lib/db/userrel.py b/r2/r2/lib/db/userrel.py
index 32ef0dce4..d7431c5b2 100644
--- a/r2/r2/lib/db/userrel.py
+++ b/r2/r2/lib/db/userrel.py
@@ -67,7 +67,8 @@ def UserRel(name, relation, disable_ids_fn = False, disable_reverse_ids_fn = Fal
@memoize(ids_fn_name)
def userrel_ids(self):
q = relation._query(relation.c._thing1_id == self._id,
- relation.c._name == name)
+ relation.c._name == name,
+ sort = "_date")
#removed set() here, shouldn't be required
return [r._thing2_id for r in q]
diff --git a/r2/r2/lib/emailer.py b/r2/r2/lib/emailer.py
index 192f1908f..f211f9bae 100644
--- a/r2/r2/lib/emailer.py
+++ b/r2/r2/lib/emailer.py
@@ -62,7 +62,7 @@ def verify_email(user, dest):
Award.take_away("verified_email", user)
emaillink = ('http://' + g.domain + '/verification/' + key
+ query_string(dict(dest=dest)))
- print "Generated email verification link: " + emaillink
+ g.log.debug("Generated email verification link: " + emaillink)
g.cache.set("email_verify_%s" %key, user._id, time=1800)
_system_email(user.email,
@@ -118,7 +118,7 @@ def send_queued_mail(test = False):
"""sends mail from the mail queue to smtplib for delivery. Also,
on successes, empties the mail queue and adds all emails to the
sent_mail list."""
- from r2.lib.pages import PasswordReset, Share, Mail_Opt, VerifyEmail, Promo_Email
+ from r2.lib.pages import PasswordReset, Share, Mail_Opt, VerifyEmail
now = datetime.datetime.now(g.tz)
if not c.site:
c.site = Default
@@ -164,17 +164,6 @@ def send_queued_mail(test = False):
elif email.kind == Email.Kind.OPTIN:
email.body = Mail_Opt(msg_hash = email.msg_hash,
leave = False).render(style = "email")
- elif email.kind in (Email.Kind.ACCEPT_PROMO,
- Email.Kind.REJECT_PROMO,
- Email.Kind.QUEUED_PROMO,
- Email.Kind.LIVE_PROMO,
- Email.Kind.BID_PROMO,
- Email.Kind.FINISHED_PROMO,
- Email.Kind.NEW_PROMO):
- email.body = Promo_Email(link = email.thing,
- kind = email.kind,
- body = email.body).render(style="email")
-
# handle unknown types here
elif not email.body:
email.set_sent(rejected = True)
@@ -208,8 +197,11 @@ def opt_in(msg_hash):
return email, removed
-def _promo_email(thing, kind, body = ""):
+def _promo_email(thing, kind, body = "", **kw):
+ from r2.lib.pages import Promo_Email
a = Account._byID(thing.author_id)
+ body = Promo_Email(link = thing, kind = kind,
+ body = body, **kw).render(style = "email")
return _system_email(a.email, body, kind, thing = thing,
reply_to = "selfservicesupport@reddit.com")
@@ -217,8 +209,9 @@ def _promo_email(thing, kind, body = ""):
def new_promo(thing):
return _promo_email(thing, Email.Kind.NEW_PROMO)
-def promo_bid(thing):
- return _promo_email(thing, Email.Kind.BID_PROMO)
+def promo_bid(thing, bid, start_date):
+ return _promo_email(thing, Email.Kind.BID_PROMO, bid = bid,
+ start_date = start_date)
def accept_promo(thing):
return _promo_email(thing, Email.Kind.ACCEPT_PROMO)
@@ -226,8 +219,9 @@ def accept_promo(thing):
def reject_promo(thing, reason = ""):
return _promo_email(thing, Email.Kind.REJECT_PROMO, reason)
-def queue_promo(thing):
- return _promo_email(thing, Email.Kind.QUEUED_PROMO)
+def queue_promo(thing, bid, trans_id):
+ return _promo_email(thing, Email.Kind.QUEUED_PROMO, bid = bid,
+ trans_id = trans_id)
def live_promo(thing):
return _promo_email(thing, Email.Kind.LIVE_PROMO)
diff --git a/r2/r2/lib/jsontemplates.py b/r2/r2/lib/jsontemplates.py
index fb6708329..37a5ea3b9 100644
--- a/r2/r2/lib/jsontemplates.py
+++ b/r2/r2/lib/jsontemplates.py
@@ -231,20 +231,21 @@ class LinkJsonTemplate(ThingJsonTemplate):
num_comments = "num_comments",
subreddit = "subreddit",
subreddit_id = "subreddit_id",
+ is_self = "is_self",
permalink = "permalink"
)
def thing_attr(self, thing, attr):
- from r2.lib.scraper import scrapers
+ from r2.lib.scraper import get_media_embed
if attr == "media_embed":
if (thing.media_object and
not isinstance(thing.media_object, basestring)):
- scraper = scrapers[thing.media_object['type']]
- media_embed = scraper.media_embed(**thing.media_object)
- return dict(scrolling = media_embed.scrolling,
- width = media_embed.width,
- height = media_embed.height,
- content = media_embed.content)
+ media_embed = get_media_embed(thing.media_object)
+ if media_embed:
+ return dict(scrolling = media_embed.scrolling,
+ width = media_embed.width,
+ height = media_embed.height,
+ content = media_embed.content)
return dict()
elif attr == 'subreddit':
return thing.subreddit.name
@@ -275,7 +276,8 @@ class CommentJsonTemplate(ThingJsonTemplate):
likes = "likes",
author = "author",
link_id = "link_id",
- sr_id = "sr_id",
+ subreddit = "subreddit",
+ subreddit_id = "subreddit_id",
parent_id = "parent_id",
)
@@ -283,10 +285,10 @@ class CommentJsonTemplate(ThingJsonTemplate):
from r2.models import Comment, Link, Subreddit
if attr == 'link_id':
return make_fullname(Link, thing.link_id)
- elif attr == 'sr_id':
- if hasattr(thing, attr):
- return make_fullname(Subreddit, thing.sr_id)
- return None
+ elif attr == 'subreddit':
+ return thing.subreddit.name
+ elif attr == 'subreddit_id':
+ return thing.subreddit._fullname
elif attr == "parent_id":
if getattr(thing, "parent_id", None):
return make_fullname(Comment, thing.parent_id)
diff --git a/r2/r2/lib/media.py b/r2/r2/lib/media.py
index 68a2c0ec1..6911e0e71 100644
--- a/r2/r2/lib/media.py
+++ b/r2/r2/lib/media.py
@@ -31,6 +31,7 @@ from r2.lib.scraper import make_scraper, str_to_image, image_to_str, prepare_ima
from r2.lib import amqp
from r2.lib.contrib.nymph import optimize_png
+import os
import tempfile
import traceback
@@ -40,19 +41,26 @@ log = g.log
def thumbnail_url(link):
"""Given a link, returns the url for its thumbnail based on its fullname"""
- return 'http:/%s%s.png' % (s3_thumbnail_bucket, link._fullname)
+ return 'http://%s/%s.png' % (s3_thumbnail_bucket, link._fullname)
def upload_thumb(link, image):
"""Given a link and an image, uploads the image to s3 into an image
based on the link's fullname"""
- f = tempfile.NamedTemporaryFile(suffix = '.png')
- image.save(f)
+ f = tempfile.NamedTemporaryFile(suffix = '.png', delete=False)
+ try:
+ image.save(f)
+ f.close()
+ g.log.debug("optimizing %s in %s" % (link._fullname,f.name))
+ optimize_png(f.name, g.png_optimizer)
+ contents = open(f.name).read()
- resource = s3_thumbnail_bucket + link._fullname + '.png'
- log.debug('uploading to s3: %s' % link._fullname)
- s3cp.send_file(optimize_png(f.name, g.png_optimizer),
- resource, 'image/png', 'public-read', None, False)
- log.debug('thumbnail %s: %s' % (link._fullname, thumbnail_url(link)))
+ s3fname = link._fullname + '.png'
+
+ log.debug('uploading to s3: %s' % link._fullname)
+ s3cp.send_file(g.s3_thumb_bucket, s3fname, contents, 'image/png', never_expire=True)
+ log.debug('thumbnail %s: %s' % (link._fullname, thumbnail_url(link)))
+ finally:
+ os.unlink(f.name)
def update_link(link, thumbnail, media_object):
diff --git a/r2/r2/lib/menus.py b/r2/r2/lib/menus.py
index 483a9a8f3..ce9e156e7 100644
--- a/r2/r2/lib/menus.py
+++ b/r2/r2/lib/menus.py
@@ -159,7 +159,8 @@ menu = MenuHandler(hot = _('hot'),
new_promo = _('create promotion'),
my_current_promos = _('my promoted links'),
current_promos = _('all promoted links'),
- future_promos = _('unapproved'),
+ future_promos = _('unseen'),
+ roadblock = _('roadblock'),
graph = _('analytics'),
live_promos = _('live'),
unpaid_promos = _('unpaid'),
@@ -310,8 +311,10 @@ class OffsiteButton(NavButton):
class SubredditButton(NavButton):
def __init__(self, sr):
+ from r2.models.subreddit import Mod
self.path = sr.path
- NavButton.__init__(self, sr.name, sr.path, False,
+ name = 'mod' if sr == Mod else sr.name
+ NavButton.__init__(self, name, sr.path, False,
isselected = (c.site == sr))
def build(self, base_path = ''):
diff --git a/r2/r2/lib/migrate.py b/r2/r2/lib/migrate.py
index 25a8d1fc2..a47b4322b 100644
--- a/r2/r2/lib/migrate.py
+++ b/r2/r2/lib/migrate.py
@@ -35,83 +35,6 @@ def add_allow_top_to_srs():
for sr in fetch_things2(q):
sr.allow_top = True; sr._commit()
-def convert_promoted():
- """
- should only need to be run once to update old style promoted links
- to the new style.
- """
- from r2.lib.utils import fetch_things2
- from r2.lib import authorize
-
- q = Link._query(Link.c.promoted == (True, False),
- sort = desc("_date"))
- sr_id = PromoteSR._id
- bid = 100
- with g.make_lock(promoted_lock_key):
- promoted = {}
- set_promoted({})
- for l in fetch_things2(q):
- print "updating:", l
- try:
- if not l._loaded: l._load()
- # move the promotion into the promo subreddit
- l.sr_id = sr_id
- # set it to accepted (since some of the update functions
- # check that it is not already promoted)
- l.promote_status = STATUS.accepted
- author = Account._byID(l.author_id)
- l.promote_trans_id = authorize.auth_transaction(bid, author, -1, l)
- l.promote_bid = bid
- l.maximum_clicks = None
- l.maximum_views = None
- # set the dates
- start = getattr(l, "promoted_on", l._date)
- until = getattr(l, "promote_until", None) or \
- (l._date + timedelta(1))
- l.promote_until = None
- update_promo_dates(l, start, until)
- # mark it as promoted if it was promoted when we got there
- if l.promoted and l.promote_until > datetime.now(g.tz):
- l.promote_status = STATUS.pending
- else:
- l.promote_status = STATUS.finished
-
- if not hasattr(l, "disable_comments"):
- l.disable_comments = False
- # add it to the auction list
- if l.promote_status == STATUS.pending and l._fullname not in promoted:
- promoted[l._fullname] = auction_weight(l)
- l._commit()
- except AttributeError:
- print "BAD THING:", l
- print promoted
- set_promoted(promoted)
- # run what is normally in a cron job to clear out finished promos
- #promote_promoted()
-
-def store_market():
-
- """
- create index ix_promote_date_actual_end on promote_date(actual_end);
- create index ix_promote_date_actual_start on promote_date(actual_start);
- create index ix_promote_date_start_date on promote_date(start_date);
- create index ix_promote_date_end_date on promote_date(end_date);
-
- alter table promote_date add column account_id bigint;
- create index ix_promote_date_account_id on promote_date(account_id);
- alter table promote_date add column bid real;
- alter table promote_date add column refund real;
-
- """
-
- for p in PromoteDates.query().all():
- l = Link._by_fullname(p.thing_name, True)
- if hasattr(l, "promote_bid") and hasattr(l, "author_id"):
- p.account_id = l.author_id
- p._commit()
- PromoteDates.update(l, l._date, l.promote_until)
- PromoteDates.update_bid(l)
-
def subscribe_to_blog_and_annoucements(filename):
import re
from time import sleep
@@ -207,6 +130,8 @@ def recompute_unread(min_date = None):
queries.get_unread_comments(a).update()
queries.get_unread_selfreply(a).update()
+
+
def pushup_permacache(verbosity=1000):
"""When putting cassandra into the permacache chain, we need to
push everything up into the rest of the chain, so this is
@@ -345,57 +270,54 @@ def add_byurl_prefix():
in old.iteritems())
g.permacache.set_multi(new)
-def _progress(it, verbosity=100, key=repr, estimate=None, persec=False):
- """An iterator that yields everything from `it', but prints progress
- information along the way, including time-estimates if
- possible"""
+# alter table bids DROP constraint bids_pkey;
+# alter table bids add column campaign integer;
+# update bids set campaign = 0;
+# alter table bids ADD primary key (transaction, campaign);
+def promote_v2():
+ # alter table bids add column campaign integer;
+ # update bids set campaign = 0;
+ from r2.models import Link, NotFound, PromoteDates, Bid
from datetime import datetime
- import sys
+ from pylons import g
+ for p in PromoteDates.query():
+ try:
+ l = Link._by_fullname(p.thing_name,
+ data = True, return_dict = False)
+ if not l:
+ raise NotFound, p.thing_name
- now = start = datetime.now()
- elapsed = start - start
+ # update the promote status
+ l.promoted = True
+ l.promote_status = getattr(l, "promote_status", STATUS.unseen)
+ l._date = datetime(*(list(p.start_date.timetuple()[:7]) + [g.tz]))
+ set_status(l, l.promote_status)
- print 'Starting at %s' % (start,)
+ # add new campaign
+ print (l, (p.start_date, p.end_date), p.bid, None)
+ if not p.bid:
+ print "no bid? ", l
+ p.bid = 20
+ new_campaign(l, (p.start_date, p.end_date), p.bid, None)
+ print "updated: %s (%s)" % (l, l._date)
- seen = 0
- for item in it:
- seen += 1
- if seen % verbosity == 0:
- now = datetime.now()
- elapsed = now - start
- elapsed_seconds = elapsed.days * 86400 + elapsed.seconds
+ except NotFound:
+ print "NotFound: %s" % p.thing_name
- if estimate:
- remaining = ((elapsed/seen)*estimate)-elapsed
- completion = now + remaining
- count_str = ('%d/%d %.2f%%'
- % (seen, estimate, float(seen)/estimate*100))
- estimate_str = (' (%s remaining; completion %s)'
- % (remaining, completion))
- else:
- count_str = '%d' % seen
- estimate_str = ''
+ print "updating campaigns"
+ for b in Bid.query():
+ l = Link._byID(int(b.thing_id))
+ print "updating: ", l
+ campaigns = getattr(l, "campaigns", {}).copy()
+ indx = b.campaign
+ if indx in campaigns:
+ sd, ed, bid, sr, trans_id = campaigns[indx]
+ campaigns[indx] = sd, ed, bid, sr, b.transaction
+ l.campaigns = campaigns
+ l._commit()
+ else:
+ print "no campaign information: ", l
- if key:
- key_str = ': %s' % key(item)
- else:
- key_str = ''
-
- if persec and elapsed_seconds > 0:
- persec_str = ' (%.2f/s)' % (seen/elapsed_seconds,)
- else:
- persec_str = ''
-
- sys.stdout.write('%s%s, %s%s%s\n'
- % (count_str, persec_str,
- elapsed, estimate_str, key_str))
- sys.stdout.flush()
- this_chunk = 0
- yield item
-
- now = datetime.now()
- elapsed = now - start
- print 'Processed %d items in %s..%s (%s)' % (seen, start, now, elapsed)
def shorten_byurl_keys():
"""We changed by_url keys from a format like
@@ -412,7 +334,7 @@ def shorten_byurl_keys():
from pylons import g
from r2.lib.utils import fetch_things2, in_chunks
from r2.lib.db.operators import desc
- from r2.lib.utils import base_url
+ from r2.lib.utils import base_url, progress
# from link.py
def old_by_url_key(url):
@@ -435,7 +357,7 @@ def shorten_byurl_keys():
sort=desc('_date'))
for links in (
in_chunks(
- _progress(
+ progress(
fetch_things2(l_q, verbosity),
key = lambda link: link._date,
verbosity=verbosity,
diff --git a/r2/r2/lib/organic.py b/r2/r2/lib/organic.py
index 970b7d545..cd5c63b2c 100644
--- a/r2/r2/lib/organic.py
+++ b/r2/r2/lib/organic.py
@@ -24,7 +24,6 @@ from r2.lib.memoize import memoize
from r2.lib.normalized_hot import get_hot
from r2.lib import count
from r2.lib.utils import UniqueIterator, timeago
-from r2.lib.promote import random_promoted
from pylons import c
@@ -33,65 +32,13 @@ from time import time
organic_lifetime = 5*60
organic_length = 30
+organic_max_length= 50
-# how many regular organic links should show between promoted ones
-promoted_every_n = 5
-
-def keep_link(link):
- return link.fresh
-
-def insert_promoted(link_names, sr_ids, logged_in):
- """
- Inserts promoted links into an existing organic list. Destructive
- on `link_names'
- """
- promoted_items = random_promoted()
-
- if not promoted_items:
- return
-
- # no point in running the builder over more promoted links than
- # we'll even use
- max_promoted = max(1,len(link_names)/promoted_every_n)
-
- # remove any that the user has acted on
- def keep(item):
- if c.user_is_loggedin and c.user._id == item.author_id:
- return True
- else:
- return item.keep_item(item)
-
- builder = IDBuilder(promoted_items, keep_fn = keep,
- skip = True, num = max_promoted)
- promoted_items = builder.get_items()[0]
-
- if not promoted_items:
- return
- # don't insert one at the head of the list 50% of the time for
- # logged in users, and 50% of the time for logged-off users when
- # the pool of promoted links is less than 3 (to avoid showing the
- # same promoted link to the same person too often)
- if (logged_in or len(promoted_items) < 3) and random.choice((True,False)):
- promoted_items.insert(0, None)
-
- # insert one promoted item for every N items
- for i, item in enumerate(promoted_items):
- pos = i * promoted_every_n + i
- if pos > len(link_names):
- break
- elif item is None:
- continue
- else:
- link_names.insert(pos, item._fullname)
-
-@memoize('cached_organic_links2', time = organic_lifetime)
-def cached_organic_links(user_id, langs):
- if user_id is None:
- sr_ids = Subreddit.default_subreddits()
- else:
- user = Account._byID(user_id, data=True)
- sr_ids = Subreddit.user_subreddits(user)
+def keep_fresh_links(item):
+ return (c.user_is_loggedin and c.user._id == item.author_id) or item.fresh
+@memoize('cached_organic_links', time = organic_lifetime)
+def cached_organic_links(*sr_ids):
sr_count = count.get_link_counts()
#only use links from reddits that you're subscribed to
link_names = filter(lambda n: sr_count[n][1] in sr_ids, sr_count.keys())
@@ -100,10 +47,10 @@ def cached_organic_links(user_id, langs):
if not link_names and g.debug:
q = All.get_links('new', 'all')
q._limit = 100 # this decomposes to a _query
- link_names = [x._fullname for x in q]
+ link_names = [x._fullname for x in q if x.promoted is None]
g.log.debug('Used inorganic links')
- #potentially add a up and coming link
+ #potentially add an up and coming link
if random.choice((True, False)) and sr_ids:
sr = Subreddit._byID(random.choice(sr_ids))
fnames = get_hot([sr], True)[0]
@@ -114,23 +61,6 @@ def cached_organic_links(user_id, langs):
new_item = random.choice(fnames[1:4])
link_names.insert(0, new_item)
- insert_promoted(link_names, sr_ids, user_id is not None)
-
- # remove any that the user has acted on
- builder = IDBuilder(link_names,
- skip = True, keep_fn = keep_link,
- num = organic_length)
- link_names = [ x._fullname for x in builder.get_items()[0] ]
-
- #if not logged in, don't reset the count. if we did that we might get in a
- #cycle where the cache will return the same link over and over
- if user_id:
- update_pos(0)
-
- # remove any duplicates caused by insert_promoted if the user is logged in
- if user_id:
- link_names = list(UniqueIterator(link_names))
-
return link_names
def organic_links(user):
@@ -140,20 +70,14 @@ def organic_links(user):
# make sure that these are sorted so the cache keys are constant
sr_ids.sort()
- if c.user_is_loggedin:
- links = cached_organic_links(user._id, None)
- else:
- links = cached_organic_links(None, c.content_langs)
+ # get the default subreddits if the user is not logged in
+ user_id = None if isinstance(user, FakeAccount) else user
+ sr_ids = Subreddit.user_subreddits(user, True)
- pos = organic_pos()
-
- # Make sure that links[pos] exists. Or, if links is [], at least set pos=0
- if not links:
- pos = 0
- elif pos != 0:
- pos = pos % len(links)
-
- return links, pos
+ # pass the cached function a sorted list so that we can guarantee
+ # cachability
+ sr_ids.sort()
+ return cached_organic_links(*sr_ids)[:organic_max_length]
def update_pos(pos):
"Update the user's current position within the cached organic list."
diff --git a/r2/r2/lib/pages/pages.py b/r2/r2/lib/pages/pages.py
index 7835cf832..f010ad83a 100644
--- a/r2/r2/lib/pages/pages.py
+++ b/r2/r2/lib/pages/pages.py
@@ -22,8 +22,8 @@
from r2.lib.wrapped import Wrapped, Templated, CachedTemplate
from r2.models import Account, Default, make_feedurl
from r2.models import FakeSubreddit, Subreddit, Ad, AdSR
-from r2.models import Friends, All, Sub, NotFound, DomainSR
-from r2.models import Link, Printable, Trophy, bidding, PromoteDates
+from r2.models import Friends, All, Sub, NotFound, DomainSR, Random, Mod, RandomNSFW
+from r2.models import Link, Printable, Trophy, bidding, PromotionWeights
from r2.config import cache
from r2.lib.tracking import AdframeInfo
from r2.lib.jsonresponse import json_respond
@@ -47,10 +47,11 @@ from r2.lib.utils import link_duplicates, make_offset_date, to_csv, median
from r2.lib.utils import trunc_time
from r2.lib.template_helpers import add_sr, get_domain
from r2.lib.subreddit_search import popular_searches
-from r2.lib.scraper import scrapers
+from r2.lib.scraper import get_media_embed
from r2.lib.log import log_text
+from r2.lib.memoize import memoize
-import sys, random, datetime, locale, calendar, simplejson, re
+import sys, random, datetime, locale, calendar, simplejson, re, time
import graph, pycountry
from itertools import chain
from urllib import quote
@@ -277,12 +278,16 @@ class Reddit(Templated):
"""Sets the layout of the navigation topbar on a Reddit. The result
is a list of menus which will be rendered in order and
displayed at the top of the Reddit."""
- main_buttons = [NamedButton('hot', dest='', aliases=['/hot']),
- NamedButton('new'),
- NamedButton('controversial'),
- NamedButton('top'),
- NamedButton('saved', False)
- ]
+ if c.site == Friends:
+ main_buttons = [NamedButton('new', dest='', aliases=['/hot']),
+ NamedButton('comments')]
+ else:
+ main_buttons = [NamedButton('hot', dest='', aliases=['/hot']),
+ NamedButton('new'),
+ NamedButton('controversial'),
+ NamedButton('top'),
+ NamedButton('saved', False)
+ ]
more_buttons = []
@@ -705,17 +710,18 @@ class LinkInfoPage(Reddit):
self.link = self.link_listing.things[0]
link_title = ((self.link.title) if hasattr(self.link, 'title') else '')
- if comment:
- if comment._deleted and not c.user_is_admin:
- author = _("[deleted]")
- else:
- author = Account._byID(comment.author_id, data=True).name
- params = {'author' : author, 'title' : _force_unicode(link_title)}
- title = strings.permalink_title % params
- else:
- params = {'title':_force_unicode(link_title), 'site' : c.site.name}
- title = strings.link_info_title % params
+ # defaults whether or not there is a comment
+ params = {'title':_force_unicode(link_title), 'site' : c.site.name}
+ title = strings.link_info_title % params
+
+ # only modify the title if the comment/author are neither deleted nor spam
+ if comment and not comment._deleted and not comment._spam:
+ author = Account._byID(comment.author_id, data=True)
+
+ if not author._deleted and not author._spam:
+ params = {'author' : author.name, 'title' : _force_unicode(link_title)}
+ title = strings.permalink_title % params
self.subtitle = subtitle
@@ -903,17 +909,18 @@ class ProfilePage(Reddit):
def rightbox(self):
rb = Reddit.rightbox(self)
- rb.push(ProfileBar(self.user))
- if c.user_is_admin:
- from admin_pages import AdminSidebar
- rb.append(AdminSidebar(self.user))
tc = TrophyCase(self.user)
helplink = ( "/help/awards", _("what's this?") )
scb = SideContentBox(title=_("trophy case"),
helplink=helplink, content=[tc],
extra_class="trophy-area")
- rb.append(scb)
+
+ rb.push(scb)
+ if c.user_is_admin:
+ from admin_pages import AdminSidebar
+ rb.push(AdminSidebar(self.user))
+ rb.push(ProfileBar(self.user))
return rb
@@ -1026,7 +1033,7 @@ class SubredditTopBar(Templated):
return SubredditMenu(drop_down_buttons,
title = _('my reddits'),
type = 'srdrop')
-
+
def subscribed_reddits(self):
srs = [SubredditButton(sr) for sr in
sorted(self.my_reddits,
@@ -1036,28 +1043,44 @@ class SubredditTopBar(Templated):
]
return NavMenu(srs,
type='flatlist', separator = '-',
- _id = 'sr-bar')
+ css_class = 'sr-bar')
def popular_reddits(self, exclude=[]):
exclusions = set(exclude)
buttons = [SubredditButton(sr)
for sr in self.pop_reddits if sr not in exclusions]
-
+
return NavMenu(buttons,
type='flatlist', separator = '-',
- _id = 'sr-bar')
+ css_class = 'sr-bar', _id = 'sr-bar')
+ def special_reddits(self):
+ reddits = [All, Random]
+ if getattr(c.site, "over_18", False):
+ reddits.append(RandomNSFW)
+ if c.user_is_loggedin:
+ if c.user.friends:
+ reddits.append(Friends)
+ if c.show_mod_mail:
+ reddits.append(Mod)
+ return NavMenu([SubredditButton(sr) for sr in reddits],
+ type = 'flatlist', separator = '-',
+ css_class = 'sr-bar')
+
def sr_bar (self):
+ sep = ' | '
menus = []
+ menus.append(self.special_reddits())
+ menus.append(RawString(sep))
+
if not c.user_is_loggedin:
menus.append(self.popular_reddits())
else:
if len(self.my_reddits) > g.sr_dropdown_threshold:
- menus.append(self.my_reddits_dropdown())
+ menus = [self.my_reddits_dropdown()] + menus
menus.append(self.subscribed_reddits())
-
sep = ' – '
menus.append(RawString(sep))
@@ -1263,7 +1286,6 @@ class FrameToolbar(Wrapped):
query_string(submit_url_options))
else:
w.tblink = add_sr("/tb/"+w._id36)
-
w.upstyle = "mod" if w.likes else ""
w.downstyle = "mod" if w.likes is False else ""
if not c.user_is_loggedin:
@@ -1280,7 +1302,6 @@ class NewLink(Templated):
tabs = (('link', ('link-desc', 'url-field')),
('text', ('text-desc', 'text-field')))
all_fields = set(chain(*(parts for (tab, parts) in tabs)))
-
buttons = []
self.default_tabs = tabs[0][1]
self.default_tab = tabs[0][0]
@@ -1289,7 +1310,6 @@ class NewLink(Templated):
to_hide = ','.join('#' + p for p in all_fields if p not in parts)
onclick = "return select_form_tab(this, '%s', '%s');"
onclick = onclick % (to_show, to_hide)
-
if tab_name == self.default_tab:
self.default_show = to_show
self.default_hide = to_hide
@@ -1385,6 +1405,10 @@ class ButtonDemoPanel(Templated):
"""The page for showing the different styles of embedable voting buttons"""
pass
+class UpgradeButtons(Templated):
+ """The page for showing the different styles of embedable voting buttons"""
+ pass
+
class SelfServeBlurb(Templated):
pass
@@ -2010,6 +2034,9 @@ class Cnameframe(Templated):
class FrameBuster(Templated):
pass
+class SelfServiceOatmeal(Templated):
+ pass
+
class PromotePage(Reddit):
create_reddit_box = False
submit_box = False
@@ -2018,7 +2045,8 @@ class PromotePage(Reddit):
def __init__(self, title, nav_menus = None, *a, **kw):
buttons = [NamedButton('new_promo')]
- if c.user_is_admin:
+ if c.user_is_sponsor:
+ buttons.append(NamedButton('roadblock'))
buttons.append(NamedButton('current_promos', dest = ''))
else:
buttons.append(NamedButton('my_current_promos', dest = ''))
@@ -2045,12 +2073,13 @@ class PromoteLinkForm(Templated):
def __init__(self, sr = None, link = None, listing = '',
timedeltatext = '', *a, **kw):
bids = []
- if c.user_is_admin and link:
+ if c.user_is_sponsor and link:
+ self.author = Account._byID(link.author_id)
try:
bids = bidding.Bid.lookup(thing_id = link._id)
bids.sort(key = lambda x: x.date, reverse = True)
except NotFound:
- bids = []
+ pass
# reference "now" to what we use for promtions
now = promote.promo_datetime_now()
@@ -2060,22 +2089,70 @@ class PromoteLinkForm(Templated):
business_days = True) -
datetime.timedelta(1))
- if link:
- startdate = link._date
- enddate = link.promote_until
- else:
- startdate = mindate + datetime.timedelta(1)
- enddate = startdate + datetime.timedelta(1)
+ startdate = mindate + datetime.timedelta(1)
+ enddate = startdate + datetime.timedelta(3)
self.startdate = startdate.strftime("%m/%d/%Y")
self.enddate = enddate .strftime("%m/%d/%Y")
+
self.mindate = mindate .strftime("%m/%d/%Y")
- Templated.__init__(self, sr = sr, link = link,
- datefmt = datefmt, bids = bids,
- timedeltatext = timedeltatext,
- listing = listing,
- *a, **kw)
+ self.link = None
+ if link:
+ self.sr_searches = simplejson.dumps(popular_searches())
+ self.subreddits = (Subreddit.submit_sr_names(c.user) or
+ Subreddit.submit_sr_names(None))
+ self.default_sr = self.subreddits[0] if self.subreddits \
+ else g.default_sr
+ # have the promo code wrap the campaigns for rendering
+ self.link = promote.editable_add_props(link)
+
+ if not c.user_is_sponsor:
+ self.now = promote.promo_datetime_now().date()
+ start_date = promote.promo_datetime_now(offset = -14).date()
+ end_date = promote.promo_datetime_now(offset = 14).date()
+ self.promo_traffic = dict(load_traffic('day', 'promos'))
+ self.market, self.promo_counter = \
+ Promote_Graph.get_market(None, start_date, end_date)
+
+ Templated.__init__(self, sr = sr,
+ datefmt = datefmt,
+ timedeltatext = timedeltatext,
+ listing = listing, bids = bids,
+ *a, **kw)
+
+class PromoteLinkFormOld(PromoteLinkForm):
+ def __init__(self, **kw):
+ PromoteLinkForm.__init__(self, **kw)
+ self.bid = g.min_promote_bid
+ campaign = {}
+ if self.link:
+ campaign = self.link.campaigns[0]
+ self.startdate = campaign.start_date
+ self.enddate = campaign.end_date
+
+ self.bid = campaign.get("bid", g.min_promote_bid)
+ self.freebie = campaign.get("status",{}).get("free", False)
+ self.complete = campaign.get("status",{}).get("complete", False)
+ self.paid = campaign.get("status",{}).get("paid", False)
+
+class Roadblocks(Templated):
+ def __init__(self):
+ self.roadblocks = promote.get_roadblocks()
+ Templated.__init__(self)
+ # reference "now" to what we use for promtions
+ now = promote.promo_datetime_now()
+
+ startdate = now + datetime.timedelta(1)
+ enddate = startdate + datetime.timedelta(1)
+
+ self.startdate = startdate.strftime("%m/%d/%Y")
+ self.enddate = enddate .strftime("%m/%d/%Y")
+ self.sr_searches = simplejson.dumps(popular_searches())
+ self.subreddits = (Subreddit.submit_sr_names(c.user) or
+ Subreddit.submit_sr_names(None))
+ self.default_sr = self.subreddits[0] if self.subreddits \
+ else g.default_sr
class TabbedPane(Templated):
def __init__(self, tabs):
@@ -2100,22 +2177,54 @@ class LinkChild(object):
def content(self):
return ''
+def make_link_child(item):
+ link_child = None
+ editable = False
+
+ # if the item has a media_object, try to make a MediaEmbed for rendering
+ if item.media_object:
+ media_embed = None
+ if isinstance(item.media_object, basestring):
+ media_embed = item.media_object
+ else:
+ media_embed = get_media_embed(item.media_object)
+ if media_embed:
+ media_embed = MediaEmbed(media_domain = g.media_domain,
+ height = media_embed.height + 10,
+ width = media_embed.width + 10,
+ scrolling = media_embed.scrolling,
+ id36 = item._id36)
+ else:
+ g.log.debug("media_object without media_embed %s" % item)
+
+ if media_embed:
+ link_child = MediaChild(item, media_embed, load = True)
+
+ # if the item has selftext, add a selftext child
+ elif item.selftext:
+ expand = getattr(item, 'expand_children', False)
+ link_child = SelfTextChild(item, expand = expand,
+ nofollow = item.nofollow)
+ #draw the edit button if the contents are pre-expanded
+ editable = (expand and
+ item.author == c.user and
+ not item._deleted)
+
+ return link_child, editable
+
+
class MediaChild(LinkChild):
"""renders when the user hits the expando button to expand media
objects, like embedded videos"""
css_style = "video"
+ def __init__(self, link, content, **kw):
+ self._content = content
+ LinkChild.__init__(self, link, **kw)
def content(self):
- if isinstance(self.link.media_object, basestring):
- return self.link.media_object
-
- scraper = scrapers[self.link.media_object['type']]
- media_embed = scraper.media_embed(**self.link.media_object)
- return MediaEmbed(media_domain = g.media_domain,
- height = media_embed.height+10,
- width = media_embed.width+10,
- scrolling = media_embed.scrolling,
- id36 = self.link._id36).render()
+ if isinstance(self._content, basestring):
+ return self._content
+ return self._content.render()
class MediaEmbed(Templated):
"""The actual rendered iframe for a media child"""
@@ -2150,6 +2259,9 @@ class UserText(CachedTemplate):
if extra_css:
css_class += " " + extra_css
+ if text is None:
+ text = ''
+
CachedTemplate.__init__(self,
fullname = item._fullname if item else "",
text = text,
@@ -2183,26 +2295,32 @@ class PromotedTraffic(Traffic):
multiy format) and a table of the data.
"""
def __init__(self, thing):
+ # TODO: needs a fix for multiple campaigns
self.thing = thing
- d = thing._date.astimezone(g.tz) - promote.timezone_offset
- d = d.replace(minute = 0, second = 0, microsecond = 0)
- until = thing.promote_until - promote.timezone_offset
- now = datetime.datetime.now(g.tz)
+ d = until = None
+ self.traffic = []
+ if thing.campaigns:
+ d = min(sd.date() if isinstance(sd, datetime.datetime) else sd
+ for sd, ed, bid, sr, trans_id in thing.campaigns.values()
+ if trans_id)
+ until = max(ed.date() if isinstance(ed, datetime.datetime) else ed
+ for sd, ed, bid, sr, trans_id in thing.campaigns.values()
+ if trans_id)
+ now = datetime.datetime.now(g.tz).date()
- # the results are preliminary until 1 day after the promotion ends
- self.preliminary = (until + datetime.timedelta(1) > now)
+ # the results are preliminary until 1 day after the promotion ends
+ self.preliminary = (until + datetime.timedelta(1) > now)
+ self.traffic = load_traffic('hour', "thing", thing._fullname,
+ start_time = d, stop_time = until)
- self.traffic = load_traffic('hour', "thing", thing._fullname,
- start_time = d, stop_time = until)
-
- # load monthly totals if we have them, otherwise use the daily totals
- self.totals = load_traffic('month', "thing", thing._fullname)
- if not self.totals:
- self.totals = load_traffic('day', "thing", thing._fullname)
- # generate a list of
- # (uniq impressions, # impressions, uniq clicks, # clicks)
- if self.totals:
- self.totals = map(sum, zip(*zip(*self.totals)[1]))
+ # load monthly totals if we have them, otherwise use the daily totals
+ self.totals = load_traffic('month', "thing", thing._fullname)
+ if not self.totals:
+ self.totals = load_traffic('day', "thing", thing._fullname)
+ # generate a list of
+ # (uniq impressions, # impressions, uniq clicks, # clicks)
+ if self.totals:
+ self.totals = map(sum, zip(*zip(*self.totals)[1]))
imp = self.slice_traffic(self.traffic, 0, 1)
@@ -2213,21 +2331,20 @@ class PromotedTraffic(Traffic):
self.totals[1] = imp_total
imp_total = locale.format('%d', imp_total, True)
- chart = graph.LineGraph(imp)
- self.imp_graph = chart.google_chart(ylabels = ['uniques', 'total'],
- title = ("impressions (%s)" %
- imp_total))
-
+
+ self.imp_graph = TrafficGraph(imp[-72:], ylabels = ['uniques', 'total'],
+ title = ("recent impressions (%s total)" %
+ imp_total))
cli = self.slice_traffic(self.traffic, 2, 3)
cli_total = sum(x[2] for x in cli)
# ensure total consistency
if self.totals:
self.totals[3] = cli_total
cli_total = locale.format('%d', cli_total, True)
- chart = graph.LineGraph(cli)
- self.cli_graph = chart.google_chart(ylabels = ['uniques', 'total'],
- title = ("clicks (%s)" %
- cli_total))
+ self.cli_graph = TrafficGraph(cli[-72:], ylabels = ['uniques', 'total'],
+ title = ("recent clicks (%s total)" %
+ cli_total))
+
else:
self.imp_graph = self.cli_graph = None
@@ -2286,21 +2403,17 @@ class RedditTraffic(Traffic):
setattr(self, ival + "_data", data)
for name, indx, color in slices:
data2 = self.slice_traffic(data, *indx)
- chart = graph.LineGraph(data2, colors = [color, "B0B0B0"])
- setattr(self, name + "_" + ival + "_chart", chart)
+ setattr(self, name + "_" + ival + "_chart", data2)
title = "%s by %s" % (name, ival)
- res = chart.google_chart(ylabels = [name],
- multiy = False,
- title = title)
+ res = TrafficGraph(data2, colors = [color], title = title)
setattr(self, name + "_" + ival, res)
else:
self.has_data = True
if self.has_data:
imp_by_day = [[] for i in range(7)]
uni_by_day = [[] for i in range(7)]
- dates = self.uniques_day_chart.xdata
- uniques = self.uniques_day_chart.ydata[0]
- imps = self.impressions_day_chart.ydata[0]
+ dates, imps = zip(*self.impressions_day_chart)
+ dates, uniques = zip(*self.uniques_day_chart)
self.uniques_mean = sum(map(float, uniques))/len(uniques)
self.impressions_mean = sum(map(float, imps))/len(imps)
for i, d in enumerate(dates):
@@ -2400,6 +2513,42 @@ class RedditTraffic(Traffic):
"%5.2f%%" % f))
return res
+class TrafficGraph(Templated):
+ def __init__(self, data, width = 300, height = 175,
+ bar_fmt = True, colors = ("FF4500", "336699"), title = '',
+ ylabels = [], multiy = True):
+ # fallback on google charts
+ chart = graph.LineGraph(data[:72], colors = colors)
+ self.gc = chart.google_chart(ylabels = ylabels, multiy = multiy, title = title)
+
+ xdata = []
+ ydata = []
+ for d in data:
+ xdata.append(time.mktime(d[0].timetuple())*1000)
+ ydata.append(d[1:])
+ ydata = zip(*ydata)
+ self.colors = colors
+ self.title = title
+
+ if bar_fmt:
+ xdata = graph.DataSeries(xdata).toBarX()
+
+ if ydata and not isinstance(ydata[0], (list, tuple)):
+ if bar_fmt:
+ ydata = graph.DataSeries(ydata).toBarY()
+ self.data = [zip(xdata, ydata)]
+ else:
+ self.data = []
+ for ys in ydata:
+ if bar_fmt:
+ ys = graph.DataSeries(ys).toBarY()
+ self.data.append(zip(xdata, ys))
+
+ self.width = width
+ self.height = height
+ Templated.__init__(self)
+
+
class RedditAds(Templated):
def __init__(self, **kw):
self.sr_name = c.site.name
@@ -2423,48 +2572,87 @@ class RedditAds(Templated):
Templated.__init__(self, **kw)
class PaymentForm(Templated):
- def __init__(self, **kw):
+ def __init__(self, link, indx, **kw):
self.countries = pycountry.countries
+ self.link = promote.editable_add_props(link)
+ self.campaign = self.link.campaigns[indx]
+ self.indx = indx
Templated.__init__(self, **kw)
class Promote_Graph(Templated):
+
+ @classmethod
+ @memoize('get_market', time = 60)
+ def get_market(cls, user_id, start_date, end_date):
+ market = {}
+ promo_counter = {}
+ def callback(link, bid, bid_day, starti, endi, indx):
+ for i in xrange(starti, endi):
+ if user_id is None or link.author_id == user_id:
+ if (not promote.is_unpaid(link) and
+ not promote.is_rejected(link)):
+ market[i] = market.get(i, 0) + bid_day
+ promo_counter[i] = promo_counter.get(i, 0) + 1
+ cls.promo_iter(start_date, end_date, callback)
+ return market, promo_counter
+
+ @classmethod
+ def promo_iter(cls, start_date, end_date, callback):
+ size = (end_date - start_date).days
+ for link, indx, s, e in cls.get_current_promos(start_date, end_date):
+ if indx in link.campaigns:
+ sdate, edate, bid, sr, trans_id = link.campaigns[indx]
+ if isinstance(sdate, datetime.datetime):
+ sdate = sdate.date()
+ if isinstance(edate, datetime.datetime):
+ edate = edate.date()
+ starti = max((sdate - start_date).days, 0)
+ endi = min((edate - start_date).days, size)
+ bid_day = bid / max((edate - sdate).days, 1)
+ callback(link, bid, bid_day, starti, endi, indx)
+
+ @classmethod
+ def get_current_promos(cls, start_date, end_date):
+ # grab promoted links
+ # returns a list of (thing_id, campaign_idx, start, end)
+ promos = PromotionWeights.get_schedule(start_date, end_date)
+ # sort based on the start date
+ promos.sort(key = lambda x: x[2])
+
+ # wrap the links
+ links = wrap_links([p[0] for p in promos])
+ # remove rejected/unpaid promos
+ links = dict((l._fullname, l) for l in links.things
+ if promote.is_accepted(l) or promote.is_unapproved(l))
+ # filter promos accordingly
+ promos = [(links[thing_name], indx, s, e)
+ for thing_name, indx, s, e in promos
+ if links.has_key(thing_name)]
+
+ return promos
+
def __init__(self):
self.now = promote.promo_datetime_now()
- start_date = (self.now - datetime.timedelta(7)).date()
- end_date = (self.now + datetime.timedelta(7)).date()
+
+ start_date = promote.promo_datetime_now(offset = -7).date()
+ end_date = promote.promo_datetime_now(offset = 7).date()
size = (end_date - start_date).days
- # grab promoted links
- promos = PromoteDates.for_date_range(start_date, end_date)
- promos.sort(key = lambda x: x.start_date)
-
- # wrap the links
- links = wrap_links([p.thing_name for p in promos])
- # remove rejected/unpaid promos
- links = dict((l._fullname, l) for l in links.things
- if (l.promoted is not None and
- l.promote_status not in ( promote.STATUS.rejected,
- promote.STATUS.unpaid)) )
- # filter promos accordingly
- promos = filter(lambda p: links.has_key(p.thing_name), promos)
+ # these will be cached queries
+ market, promo_counter = self.get_market(None, start_date, end_date)
+ my_market = market
+ if not c.user_is_sponsor:
+ my_market = self.get_market(c.user._id, start_date, end_date)[0]
+ # determine the range of each link
promote_blocks = []
- market = {}
- my_market = {}
- promo_counter = {}
- for p in promos:
- starti = max((p.start_date - start_date).days, 0)
- endi = min((p.end_date - start_date).days, size)
- link = links[p.thing_name]
- bid_day = link.promote_bid/max((p.end_date - p.start_date).days, 1)
- for i in xrange(starti, endi):
- market[i] = market.get(i, 0) + bid_day
- if c.user_is_sponsor or link.author_id == c.user._id:
- my_market[i] = my_market.get(i, 0) + bid_day
- promo_counter[i] = promo_counter.get(i, 0) + 1
- if c.user_is_sponsor or link.author_id == c.user._id:
- promote_blocks.append( (link, starti, endi) )
+ def block_maker(link, bid, bid_day, starti, endi, indx):
+ if ((c.user_is_sponsor or link.author_id == c.user._id)
+ and not promote.is_rejected(link)
+ and not promote.is_unpaid(link)):
+ promote_blocks.append( (link, bid, starti, endi, indx) )
+ self.promo_iter(start_date, end_date, block_maker)
# now sort the promoted_blocks into the most contiguous chuncks we can
sorted_blocks = []
@@ -2473,17 +2661,18 @@ class Promote_Graph(Templated):
while True:
sorted_blocks.append(cur)
# get the future items (sort will be preserved)
- future = filter(lambda x: x[1] >= cur[2], promote_blocks)
+ future = filter(lambda x: x[2] >= cur[3], promote_blocks)
if future:
# resort by date and give precidence to longest promo:
- cur = min(future, key = lambda x: (x[1], x[1]-x[2]))
+ cur = min(future, key = lambda x: (x[2], x[2]-x[3]))
promote_blocks.remove(cur)
else:
break
# load recent traffic as well:
self.recent = {}
- for k, v in load_summary("thing"):
+ #TODO
+ for k, v in []:#load_summary("thing"):
if k.startswith('t%d_' % Link._type_id):
self.recent[k] = v
@@ -2497,36 +2686,38 @@ class Promote_Graph(Templated):
# graphs of money
history = self.now - datetime.timedelta(60)
- pool = bidding.PromoteDates.bid_history(history)
+
+ pool =PromotionWeights.bid_history(promote.promo_datetime_now(offset=-60),
+ promote.promo_datetime_now(offset=2))
if pool:
# we want to generate a stacked line graph, so store the
# bids and the total including refunded amounts
- chart = graph.LineGraph([(d, b, r) for (d, b, r) in pool],
- colors = ("008800", "FF0000"))
total_sale = sum(b for (d, b, r) in pool)
total_refund = sum(r for (d, b, r) in pool)
- self.money_graph = chart.google_chart(
- ylabels = ['total ($)'],
- title = ("monthly sales ($%.2f total, $%.2f credits)" %
- (total_sale, total_refund)),
- multiy = False)
+
+ self.money_graph = TrafficGraph([(d, b, r) for (d, b, r) in pool],
+ colors = ("008800", "FF0000"),
+ ylabels = ['total ($)'],
+ title = ("monthly sales ($%.2f total, $%.2f credits)" %
+ (total_sale, total_refund)),
+ multiy = False)
history = self.now - datetime.timedelta(30)
- self.top_promoters = bidding.PromoteDates.top_promoters(history)
+ #TODO
+ self.top_promoters = []#bidding.PromoteDates.top_promoters(history)
else:
self.money_graph = None
self.top_promoters = []
# graphs of impressions and clicks
self.promo_traffic = load_traffic('day', 'promos')
- impressions = [(d, i) for (d, (i, k)) in self.promo_traffic]
+ impressions = [(d, i) for (d, (i, k)) in self.promo_traffic]
pool = dict((d, b+r) for (d, b, r) in pool)
if impressions:
- chart = graph.LineGraph(impressions)
- self.imp_graph = chart.google_chart(ylabels = ['total'],
- title = "impressions")
+ self.imp_graph = TrafficGraph(impressions, ylabels = ['total'],
+ title = "impressions")
clicks = [(d, k) for (d, (i, k)) in self.promo_traffic]
@@ -2539,27 +2730,22 @@ class Promote_Graph(Templated):
CTR = [(d, (100 * float(k) / i if i else 0))
for (d, (i, k)) in self.promo_traffic]
- chart = graph.LineGraph(clicks)
- self.cli_graph = chart.google_chart(ylabels = ['total'],
- title = "clicks")
-
+ self.cli_graph = TrafficGraph(clicks, ylabels = ['total'],
+ title = "clicks")
mean_CPM = sum(x[1] for x in CPM) * 1. / max(len(CPM), 1)
- chart = graph.LineGraph([(d, min(x, mean_CPM*2)) for d, x in CPM],
- colors = ["336699"])
- self.cpm_graph = chart.google_chart(ylabels = ['CPM ($)'],
- title = "cost per 1k impressions " +
- "($%.2f average)" % mean_CPM)
+ self.cpm_graph = TrafficGraph([(d, min(x, mean_CPM*2)) for d, x in CPM],
+ colors = ["336699"], ylabels = ['CPM ($)'],
+ title = "cost per 1k impressions " +
+ "($%.2f average)" % mean_CPM)
mean_CPC = sum(x[1] for x in CPC) * 1. / max(len(CPC), 1)
- chart = graph.LineGraph([(d, min(x, mean_CPC*2)) for d, x in CPC],
- colors = ["336699"])
- self.cpc_graph = chart.google_chart(ylabels = ['CPC ($0.01)'],
- title = "cost per click " +
- "($%.2f average)" % (mean_CPC/100.))
+ self.cpc_graph = TrafficGraph([(d, min(x, mean_CPC*2)) for d, x in CPC],
+ colors = ["336699"], ylabels = ['CPC ($0.01)'],
+ title = "cost per click " +
+ "($%.2f average)" % (mean_CPC/100.))
- chart = graph.LineGraph(CTR, colors = ["336699"])
- self.ctr_graph = chart.google_chart(ylabels = ['CTR (%)'],
- title = "click through rate")
+ self.ctr_graph = TrafficGraph(CTR, colors = ["336699"], ylabels = ['CTR (%)'],
+ title = "click through rate")
else:
self.imp_graph = self.cli_graph = None
diff --git a/r2/r2/lib/pages/things.py b/r2/r2/lib/pages/things.py
index dbb159690..ac372a91c 100644
--- a/r2/r2/lib/pages/things.py
+++ b/r2/r2/lib/pages/things.py
@@ -25,9 +25,10 @@ from r2.models import LinkListing, Link, PromotedLink
from r2.models import make_wrapper, IDBuilder, Thing
from r2.lib.utils import tup
from r2.lib.strings import Score
-from r2.lib.promote import promo_edit_url, promo_traffic_url
+from r2.lib.promote import *
from datetime import datetime
from pylons import c, g
+from pylons.i18n import _, ungettext
class PrintableButtons(Styled):
def __init__(self, style, thing,
@@ -35,11 +36,17 @@ class PrintableButtons(Styled):
show_distinguish = False,
show_indict = False, **kw):
show_report = show_report and c.user_is_loggedin
+
Styled.__init__(self, style = style,
+ thing = thing,
fullname = thing._fullname,
can_ban = thing.can_ban,
show_spam = thing.show_spam,
show_reports = thing.show_reports,
+ show_ignore = thing.show_reports or
+ (thing.reveal_trial_info and not thing.show_spam),
+ approval_checkmark = getattr(thing,
+ "approval_checkmark", None),
show_delete = show_delete,
show_report = show_report,
show_indict = show_indict,
@@ -59,7 +66,7 @@ class LinkButtons(PrintableButtons):
# do we show the report button?
show_report = not is_author and report
- if c.user_is_admin:
+ if c.user_is_admin and thing.promoted is None:
show_report = False
show_indict = True
else:
@@ -67,6 +74,10 @@ class LinkButtons(PrintableButtons):
# do we show the delete button?
show_delete = is_author and delete and not thing._deleted
+ # disable the delete button for live sponsored links
+ if (is_promoted(thing) and not c.user_is_sponsor):
+ show_delete = False
+
# do we show the distinguish button? among other things,
# we never want it to appear on link listings -- only
# comments pages
@@ -76,15 +87,12 @@ class LinkButtons(PrintableButtons):
kw = {}
if thing.promoted is not None:
now = datetime.now(g.tz)
- promotable = (thing._date <= now and thing.promote_until > now)
kw = dict(promo_url = promo_edit_url(thing),
- promote_bid = thing.promote_bid,
promote_status = getattr(thing, "promote_status", 0),
user_is_sponsor = c.user_is_sponsor,
- promotable = promotable,
traffic_url = promo_traffic_url(thing),
is_author = thing.is_author)
-
+
PrintableButtons.__init__(self, 'linkbuttons', thing,
# user existence and preferences
is_loggedin = c.user_is_loggedin,
@@ -163,16 +171,16 @@ def default_thing_wrapper(**params):
def wrap_links(links, wrapper = default_thing_wrapper(),
listing_cls = LinkListing,
num = None, show_nums = False, nextprev = False,
- num_margin = None, mid_margin = None):
+ num_margin = None, mid_margin = None, **kw):
links = tup(links)
if not all(isinstance(x, str) for x in links):
links = [x._fullname for x in links]
- b = IDBuilder(links, num = num, wrap = wrapper)
+ b = IDBuilder(links, num = num, wrap = wrapper, **kw)
l = listing_cls(b, nextprev = nextprev, show_nums = show_nums)
if num_margin is not None:
l.num_margin = num_margin
if mid_margin is not None:
l.mid_margin = mid_margin
return l.listing()
-
+
diff --git a/r2/r2/lib/promote.py b/r2/r2/lib/promote.py
index e81eb944b..abe5b9122 100644
--- a/r2/r2/lib/promote.py
+++ b/r2/r2/lib/promote.py
@@ -22,13 +22,18 @@
from __future__ import with_statement
from r2.models import *
+from r2.lib.wrapped import Wrapped
from r2.lib import authorize
from r2.lib import emailer, filters
from r2.lib.memoize import memoize
from r2.lib.template_helpers import get_domain
-from r2.lib.utils import Enum
+from r2.lib.utils import Enum, UniqueIterator
+from organic import keep_fresh_links
from pylons import g, c
from datetime import datetime, timedelta
+from r2.lib.db.queries import make_results, db_sort, add_queries, merge_results
+import itertools
+
import random
promoted_memo_lifetime = 30
@@ -38,15 +43,22 @@ promoted_lock_key = 'cached_promoted_links_lock2'
STATUS = Enum("unpaid", "unseen", "accepted", "rejected",
"pending", "promoted", "finished")
-PromoteSR = 'promos'
-try:
- PromoteSR = Subreddit._new(name = PromoteSR,
- title = "promoted links",
- author_id = -1,
- type = "public",
- ip = '0.0.0.0')
-except SubredditExists:
- PromoteSR = Subreddit._by_name(PromoteSR)
+CAMPAIGN = Enum("start", "end", "bid", "sr", "trans_id")
+
+@memoize("get_promote_srid")
+def get_promote_srid(name = 'promos'):
+ try:
+ sr = Subreddit._by_name(name)
+ except NotFound:
+ sr = Subreddit._new(name = name,
+ title = "promoted links",
+ # negative author_ids make this unlisable
+ author_id = -1,
+ type = "public",
+ ip = '0.0.0.0')
+ return sr._id
+
+# attrs
def promo_traffic_url(l):
domain = get_domain(cname = False, subreddit = False)
@@ -56,6 +68,191 @@ def promo_edit_url(l):
domain = get_domain(cname = False, subreddit = False)
return "http://%s/promoted/edit_promo/%s" % (domain, l._id36)
+def pay_url(l, indx):
+ return "%spromoted/pay/%s/%d" % (g.payment_domain, l._id36, indx)
+
+# booleans
+
+def is_promo(link):
+ return (link and not link._deleted and link.promoted is not None
+ and hasattr(link, "promote_status"))
+
+def is_accepted(link):
+ return is_promo(link) and (link.promote_status != STATUS.rejected and
+ link.promote_status >= STATUS.accepted)
+
+def is_unpaid(link):
+ return is_promo(link) and link.promote_status == STATUS.unpaid
+
+def is_unapproved(link):
+ return is_promo(link) and link.promote_status <= STATUS.unseen
+
+def is_rejected(link):
+ return is_promo(link) and link.promote_status == STATUS.rejected
+
+def is_promoted(link):
+ return is_promo(link) and link.promote_status == STATUS.promoted
+
+# no references to promote_status below this function, pls
+def set_status(l, status, onchange = None):
+ # keep this out here. Useful for updating the queue if there is a bug
+ # and for initial migration
+ add_queries([_sponsored_link_query(None, l.author_id),
+ _sponsored_link_query(None),
+ _sponsored_link_query(status, l.author_id),
+ _sponsored_link_query(status)], insert_items = [l])
+
+ # no need to delete or commit of the status is unchanged
+ if status != getattr(l, "promote_status", None):
+ # new links won't even have a promote_status yet
+ if hasattr(l, "promote_status"):
+ add_queries([_sponsored_link_query(l.promote_status, l.author_id),
+ _sponsored_link_query(l.promote_status)],
+ delete_items = [l])
+ l.promote_status = status
+ l._commit()
+ if onchange:
+ onchange()
+
+# query queue updates below
+
+def _sponsored_link_query(status, author_id = None):
+ q = Link._query(Link.c.sr_id == get_promote_srid(),
+ Link.c._spam == (True, False),
+ Link.c._deleted == (True,False),
+ sort = db_sort('new'))
+ if status is not None:
+ q._filter(Link.c.promote_status == status)
+ if author_id is not None:
+ q._filter(Link.c.author_id == author_id)
+ return make_results(q)
+
+def get_unpaid_links(author_id = None):
+ return _sponsored_link_query(STATUS.unpaid, author_id = author_id)
+
+def get_unapproved_links(author_id = None):
+ return _sponsored_link_query(STATUS.unseen, author_id = author_id)
+
+def get_rejected_links(author_id = None):
+ return _sponsored_link_query(STATUS.rejected, author_id = author_id)
+
+def get_live_links(author_id = None):
+ return _sponsored_link_query(STATUS.promoted, author_id = author_id)
+
+def get_accepted_links(author_id = None):
+ return merge_results(_sponsored_link_query(STATUS.accepted,
+ author_id = author_id),
+ _sponsored_link_query(STATUS.pending,
+ author_id = author_id),
+ _sponsored_link_query(STATUS.finished,
+ author_id = author_id))
+
+def get_all_links(author_id = None):
+ return _sponsored_link_query(None, author_id = author_id)
+
+
+# subreddit roadblocking functions
+
+roadblock_prefix = "promotion_roadblock"
+def roadblock_key(sr_name, d):
+ return "%s-%s_%s" % (roadblock_prefix,
+ sr_name, d.strftime("%Y_%m_%d"))
+
+def roadblock_reddit(sr_name, start_date, end_date):
+ d = start_date
+ now = promo_datetime_now().date()
+ # set the expire to be 1 week after the roadblock end date
+ expire = ((end_date - now).days + 7) * 86400
+ while d < end_date:
+ g.hardcache.add(roadblock_key(sr_name, d),
+ "%s-%s" % (start_date.strftime("%Y_%m_%d"),
+ end_date.strftime("%Y_%m_%d")),
+ time = expire)
+ d += timedelta(1)
+
+def unroadblock_reddit(sr_name, start_date, end_date):
+ d = start_date
+ while d < end_date:
+ g.hardcache.delete(roadblock_key(sr_name, d))
+ d += timedelta(1)
+
+def is_roadblocked(sr_name, start_date, end_date):
+ d = start_date
+ while d < end_date:
+ res = g.hardcache.get(roadblock_key(sr_name, d))
+ if res:
+ start_date, end_date = res.split('-')
+ start_date = datetime.strptime(start_date, "%Y_%m_%d").date()
+ end_date = datetime.strptime(end_date, "%Y_%m_%d").date()
+ return (start_date, end_date)
+ d += timedelta(1)
+
+def get_roadblocks():
+ rbs = g.hardcache.backend.ids_by_category(roadblock_prefix)
+ by_sr = {}
+ for rb in rbs:
+ rb = rb.split('_')
+ date = datetime.strptime('_'.join(rb[1:]), "%Y_%m_%d").date()
+ by_sr.setdefault(rb[0], []).append((date, date + timedelta(1)))
+
+ blobs = []
+ for k, v in by_sr.iteritems():
+ for sd, ed in sorted(v):
+ if blobs and blobs[-1][0] == k and blobs[-1][-1] == sd:
+ blobs[-1] = (k, blobs[-1][1], ed)
+ else:
+ blobs.append((k, sd, ed))
+ blobs.sort(key = lambda x: x[1])
+ return blobs
+
+# control functions
+
+class RenderableCampaign():
+ __slots__ = ["indx", "start_date", "end_date", "duration",
+ "bid", "sr", "status"]
+
+ def __init__(self, link, indx, raw_campaign, transaction):
+ sd, ed, bid, sr, trans_id = raw_campaign
+ self.indx = indx
+ self.start_date = sd.strftime("%m/%d/%Y")
+ self.end_date = ed.strftime("%m/%d/%Y")
+ ndays = (ed - sd).days
+ self.duration = strings.time_label % dict(num = ndays,
+ time = ungettext("day", "days", ndays))
+ self.bid = "%.2f" % bid
+ self.sr = sr
+
+ self.status = dict(paid = bool(transaction),
+ complete = False,
+ free = (trans_id < 0),
+ pay_url = pay_url(link, indx),
+ sponsor = c.user_is_sponsor)
+ if transaction:
+ if transaction.is_void():
+ self.status['paid'] = False
+ self.status['free'] = False
+ elif transaction.is_charged():
+ self.status['complete'] = True
+
+ def get(self, key, default):
+ return getattr(self, key, default)
+
+ def __iter__(self):
+ for s in self.__slots__:
+ yield getattr(self, s)
+
+def editable_add_props(l):
+ if not isinstance(l, Wrapped):
+ l = Wrapped(l)
+
+ l.bids = get_transactions(l)
+ l.campaigns = dict((indx, RenderableCampaign(l, indx,
+ campaign, l.bids.get(indx)))
+ for indx, campaign in
+ getattr(l, "campaigns", {}).iteritems())
+
+ return l
+
# These could be done with relationships, but that seeks overkill as
# we never query based on user and only check per-thing
def is_traffic_viewer(thing, user):
@@ -98,420 +295,492 @@ def promotion_log(thing, text, commit = False):
if commit:
thing._commit()
return text
-
-def new_promotion(title, url, user, ip, promote_start, promote_until, bid,
- disable_comments = False,
- max_clicks = None, max_views = None):
+
+def new_promotion(title, url, user, ip):
"""
Creates a new promotion with the provided title, etc, and sets it
status to be 'unpaid'.
"""
- l = Link._submit(title, url, user, PromoteSR, ip)
+ sr = Subreddit._byID(get_promote_srid())
+ l = Link._submit(title, url, user, sr, ip)
l.promoted = True
- l.promote_until = None
- l.promote_status = STATUS.unpaid
- l.promote_trans_id = 0
- l.promote_bid = bid
- l.maximum_clicks = max_clicks
- l.maximum_views = max_views
- l.disable_comments = disable_comments
- update_promo_dates(l, promote_start, promote_until)
+ l.disable_comments = False
+ l.campaigns = {}
promotion_log(l, "promotion created")
l._commit()
+
+ # set the status of the link, populating the query queue
+ if c.user_is_sponsor or getattr(user, "trusted_sponsor", False):
+ set_status(l, STATUS.accepted)
+ else:
+ set_status(l, STATUS.unpaid)
+
# the user has posted a promotion, so enable the promote menu unless
# they have already opted out
if user.pref_show_promote is not False:
user.pref_show_promote = True
user._commit()
+
+ # notify of new promo
emailer.new_promo(l)
return l
-def update_promo_dates(thing, start_date, end_date, commit = True):
- if thing and thing.promote_status < STATUS.pending or c.user_is_admin:
- if (thing._date != start_date or
- thing.promote_until != end_date):
- promotion_log(thing, "duration updated (was %s -> %s)" %
- (thing._date, thing.promote_until))
- thing._date = start_date
- thing.promote_until = end_date
- PromoteDates.update(thing, start_date, end_date)
- if commit:
- thing._commit()
- return True
- return False
+def sponsor_wrapper(link):
+ w = Wrapped(link)
+ w.render_class = PromotedLink
+ w.rowstyle = "promoted link"
+ return w
-def update_promo_data(thing, title, url, commit = True):
- if thing and (thing.url != url or thing.title != title):
- if thing.title != title:
- promotion_log(thing, "title updated (was '%s')" %
- thing.title)
- if thing.url != url:
- promotion_log(thing, "url updated (was '%s')" %
- thing.url)
- old_url = thing.url
- thing.url = url
- thing.title = title
- if not c.user_is_sponsor:
- unapproved_promo(thing)
- thing.update_url_cache(old_url)
- if commit:
- thing._commit()
- return True
- return False
+def campaign_lock(link):
+ return "edit_promo_campaign_lock_" + str(link._id)
-def refund_promo(thing, user, refund):
- cur_refund = getattr(thing, "promo_refund", 0)
- refund = min(refund, thing.promote_bid - cur_refund)
- if refund > 0:
- thing.promo_refund = cur_refund + refund
- if authorize.refund_transaction(refund, user, thing.promote_trans_id):
- promotion_log(thing, "payment update: refunded '%.2f'" % refund)
- else:
- promotion_log(thing, "payment update: refund failed")
- if thing.promote_status in (STATUS.promoted, STATUS.finished):
- PromoteDates.update_bid(thing)
- thing._commit()
+def get_transactions(link):
+ # tuple of (transaction_id, key)
+ trans_tuples = [(v[CAMPAIGN.trans_id], k)
+ for k, v in getattr(link, "campaigns", {}).iteritems()
+ if v[CAMPAIGN.trans_id] != 0]
+ bids = authorize.get_transactions(*trans_tuples)
+ return dict((indx, bids.get((t, indx))) for t, indx in trans_tuples)
-def auth_paid_promo(thing, user, pay_id, bid):
+
+def new_campaign(link, dates, bid, sr):
+ with g.make_lock(campaign_lock(link)):
+ # get a copy of the attr so that it'll be
+ # marked as dirty on the next write.
+ campaigns = getattr(link, "campaigns", {}).copy()
+ # create a new index
+ indx = max(campaigns.keys() or [-1]) + 1
+ # add the campaign
+ # store the name not the reddit
+ sr = sr.name if sr else ""
+ campaigns[indx] = list(dates) + [bid, sr, 0]
+ PromotionWeights.add(link, indx, sr, dates[0], dates[1], bid)
+ link.campaigns = {}
+ link.campaigns = campaigns
+ link._commit()
+ return indx
+
+def free_campaign(link, index, user):
+ auth_campaign(link, index, user, -1)
+
+def edit_campaign(link, index, dates, bid, sr):
+ with g.make_lock(campaign_lock(link)):
+ campaigns = getattr(link, "campaigns", {}).copy()
+ if index in campaigns:
+ trans_id = campaigns[index][CAMPAIGN.trans_id]
+ prev_bid = campaigns[index][CAMPAIGN.bid]
+ # store the name not the reddit
+ sr = sr.name if sr else ""
+ campaigns[index] = list(dates) + [bid, sr, trans_id]
+ PromotionWeights.reschedule(link, index,
+ sr, dates[0], dates[1], bid)
+ link.campaigns = {}
+ link.campaigns = campaigns
+ link._commit()
+
+ #TODO cancel any existing charges if the bid has changed
+ if prev_bid != bid:
+ void_campaign(link, index, c.user)
+
+
+def delete_campaign(link, index):
+ with g.make_lock(campaign_lock(link)):
+ campaigns = getattr(link, "campaigns", {}).copy()
+ if index in campaigns:
+ PromotionWeights.delete_unfinished(link, index)
+ del campaigns[index]
+ link.campaigns = {}
+ link.campaigns = campaigns
+ link._commit()
+ #TODO cancel any existing charges
+ void_campaign(link, index, c.user)
+
+def void_campaign(link, index, user):
+ campaigns = getattr(link, "campaigns", {}).copy()
+ if index in campaigns:
+ sd, ed, bid, sr, trans_id = campaigns[index]
+ transactions = get_transactions(link)
+ if transactions.get(index):
+ # void the existing transaction
+ a = Account._byID(link.author_id)
+ authorize.void_transaction(a, trans_id, index)
+
+def auth_campaign(link, index, user, pay_id):
"""
- promotes a promotion from 'unpaid' to 'unseen'.
-
- In the case that bid already exists on the current promotion, the
- previous transaction is voided and repalced with the new bid.
+ for setting up a campaign as a real bid with authorize.net
"""
- if thing.promote_status == STATUS.finished:
- return
- elif (thing.promote_status > STATUS.unpaid and
- thing.promote_trans_id):
- # void the existing transaction
- authorize.void_transaction(user, thing.promote_trans_id)
+ with g.make_lock(campaign_lock(link)):
+ campaigns = getattr(link, "campaigns", {}).copy()
+ if index in campaigns:
+ # void any existing campaign
+ void_campaign(link, index, user)
- # create a new transaction and update the bid
- trans_id = authorize.auth_transaction(bid, user, pay_id, thing)
- thing.promote_bid = bid
-
- if trans_id is not None and int(trans_id) != 0:
- # we won't reset to unseen if already approved and the payment went ok
- promotion_log(thing, "updated payment and/or bid: SUCCESS (id: %s)"
- % trans_id)
- if trans_id < 0:
- promotion_log(thing, "FREEBIE")
- thing.promote_status = max(thing.promote_status, STATUS.unseen)
- thing.promote_trans_id = trans_id
- else:
- # something bad happend.
- promotion_log(thing, "updated payment and/or bid: FAILED")
- thing.promore_status = STATUS.unpaid
- thing.promote_trans_id = 0
- thing._commit()
+ sd, ed, bid, sr, trans_id = campaigns[index]
+ # create a new transaction and update the bid
+ test = 1 if g.debug else None
+ trans_id, reason = authorize.auth_transaction(bid, user,
+ pay_id, link,
+ index,
+ test = test)
+ if not reason and trans_id is not None and int(trans_id) != 0:
+ promotion_log(link, "updated payment and/or bid: "
+ "SUCCESS (id: %s)"
+ % trans_id)
+ if trans_id < 0:
+ promotion_log(link, "FREEBIE")
- emailer.promo_bid(thing)
- PromoteDates.update_bid(thing)
- return bool(trans_id)
+ set_status(link,
+ max(STATUS.unseen if trans_id else STATUS.unpaid,
+ link.promote_status))
+ # notify of campaign creation
+ # update the query queue
+ if user._id == link.author_id and trans_id > 0:
+ emailer.promo_bid(link, bid, sd)
-
-def unapproved_promo(thing):
- """
- revert status of a promoted link to unseen.
-
- NOTE: if the promotion is live, this has the side effect of
- bumping it from the live queue pending an admin's intervention to
- put it back in place.
- """
- # only reinforce pending if it hasn't been seen yet.
- if STATUS.unseen < thing.promote_status < STATUS.finished:
- promotion_log(thing, "status update: unapproved")
- unpromote(thing, status = STATUS.unseen)
-
-def accept_promo(thing):
- """
- Accept promotion and set its status as accepted if not already
- charged, else pending.
- """
- if thing.promote_status < STATUS.pending:
- bid = Bid.one(thing.promote_trans_id)
- if bid.status == Bid.STATUS.CHARGE:
- thing.promote_status = STATUS.pending
- # repromote if already promoted before
- if hasattr(thing, "promoted_on"):
- promote(thing)
else:
- emailer.queue_promo(thing)
- else:
- thing.promote_status = STATUS.accepted
- promotion_log(thing, "status update: accepted")
- emailer.accept_promo(thing)
- thing._commit()
+ # something bad happend.
+ promotion_log(link, "updated payment and/or bid: FAILED ('%s')"
+ % reason)
+ trans_id = 0
-def reject_promo(thing, reason = ""):
- """
- Reject promotion and set its status as rejected
+ campaigns[index] = sd, ed, bid, sr, trans_id
+ link.campaigns = {}
+ link.campaigns = campaigns
+ link._commit()
- Here, we use unpromote so that we can also remove a promotion from
- the queue if it has become promoted.
- """
- unpromote(thing, status = STATUS.rejected)
- promotion_log(thing, "status update: rejected. Reason: '%s'" % reason)
- emailer.reject_promo(thing, reason)
-
-def delete_promo(thing):
- """
- deleted promotions have to be specially dealt with. Reject the
- promo and void any associated transactions.
- """
- thing.promoted = False
- thing._deleted = True
- reject_promo(thing, reason = "The promotion was deleted by the user")
- if thing.promote_trans_id > 0:
- user = Account._byID(thing.author_id)
- authorize.void_transaction(user, thing.promote_trans_id)
-
-
-
-def pending_promo(thing):
- """
- For an accepted promotion within the proper time interval, charge
- the account of the user and set the new status as pending.
- """
- if thing.promote_status == STATUS.accepted and thing.promote_trans_id:
- user = Account._byID(thing.author_id)
- # TODO: check for charge failures/recharges, etc
- if authorize.charge_transaction(user, thing.promote_trans_id):
- promotion_log(thing, "status update: pending")
- thing.promote_status = STATUS.pending
- thing.promote_paid = thing.promote_bid
- thing._commit()
- emailer.queue_promo(thing)
- else:
- promotion_log(thing, "status update: charge failure")
- thing._commit()
- #TODO: email rejection?
-
-
-
-def promote(thing, batch = False):
- """
- Given a promotion with pending status, set the status to promoted
- and move it into the promoted queue.
- """
- if thing.promote_status == STATUS.pending:
- promotion_log(thing, "status update: live")
- PromoteDates.log_start(thing)
- thing.promoted_on = datetime.now(g.tz)
- thing.promote_status = STATUS.promoted
- thing._commit()
- emailer.live_promo(thing)
- if not batch:
- with g.make_lock(promoted_lock_key):
- promoted = get_promoted_direct()
- if thing._fullname not in promoted:
- promoted[thing._fullname] = auction_weight(thing)
- set_promoted(promoted)
-
-def unpromote(thing, batch = False, status = STATUS.finished):
- """
- unpromote a link with provided status, removing it from the
- current promotional queue.
- """
- if status == STATUS.finished:
- PromoteDates.log_end(thing)
- emailer.finished_promo(thing)
- thing.unpromoted_on = datetime.now(g.tz)
- promotion_log(thing, "status update: finished")
- thing.promote_status = status
- thing._commit()
- if not batch:
- with g.make_lock(promoted_lock_key):
- promoted = get_promoted_direct()
- if thing._fullname in promoted:
- del promoted[thing._fullname]
- set_promoted(promoted)
-
-# batch methods for moving promotions into the pending queue, and
-# setting status as pending.
+ return bool(trans_id), reason
+ return False, ""
# dates are referenced to UTC, while we want promos to change at (roughly)
# midnight eastern-US.
# TODO: make this a config parameter
timezone_offset = -5 # hours
timezone_offset = timedelta(0, timezone_offset * 3600)
+def promo_datetime_now(offset = None):
+ now = datetime.now(g.tz) + timezone_offset
+ if offset is not None:
+ now += timedelta(offset)
+ return now
-def promo_datetime_now():
- return datetime.now(g.tz) + timezone_offset
-def generate_pending(date = None, test = False):
+
+def get_scheduled_campaign(link, offset = None):
"""
- Look-up links that are to be promoted on the provided date (the
- default is now plus one day) and set their status as pending if
- they have been accepted. This results in credit cards being charged.
+ returns the indices of the campaigns that (datewise) could be active.
"""
- date = date or (promo_datetime_now() + timedelta(1))
- links = Link._by_fullname([p.thing_name for p in
- PromoteDates.for_date(date)],
- data = True,
- return_dict = False)
- for l in links:
- if l._deleted and l.promote_status != STATUS.rejected:
- print "DELETING PROMO", l
- # deleted promos should never be made pending
- delete_promo(l)
- elif l.promote_status == STATUS.accepted:
- if test:
- print "Would have made pending: (%s, %s)" % \
- (l, l.make_permalink(None))
- else:
- pending_promo(l)
+ now = promo_datetime_now(offset = offset)
+ active = []
+ campaigns = getattr(link, "campaigns", {})
+ for indx in campaigns:
+ sd, ed, bid, sr, trans_id = campaigns[indx]
+ if sd <= now and ed >= now:
+ active.append(indx)
+ return active
-def promote_promoted(test = False):
+def accept_promotion(link):
"""
- make promotions that are no longer supposed to be active
- 'finished' and find all pending promotions that are supposed to be
- promoted and promote them.
+ Accepting is campaign agnostic. Accepting the ad just means that
+ it is allowed to run if payment has been processed.
+
+ If a campagn is able to run, this also requeues it.
"""
- from r2.lib.traffic import load_traffic
- with g.make_lock(promoted_lock_key):
- now = promo_datetime_now()
+ promotion_log(link, "status update: accepted")
+ # update the query queue
- promoted = Link._by_fullname(get_promoted_direct().keys(),
- data = True, return_dict = False)
- promos = {}
- for l in promoted:
- keep = True
- if l.promote_until < now:
- keep = False
- maximum_clicks = getattr(l, "maximum_clicks", None)
- maximum_views = getattr(l, "maximum_views", None)
- if maximum_clicks or maximum_views:
- # grab the traffic
- traffic = load_traffic("day", "thing", l._fullname)
- if traffic:
- # (unique impressions, number impressions,
- # unique clicks, number of clicks)
- traffic = [y for x, y in traffic]
- traffic = map(sum, zip(*traffic))
- uimp, nimp, ucli, ncli = traffic
- if maximum_clicks and maximum_clicks < ncli:
- keep = False
- if maximum_views and maximum_views < nimp:
- keep = False
+ set_status(link, STATUS.accepted)
+ now = promo_datetime_now(0)
+ if link._fullname in set(l.thing_name for l in
+ PromotionWeights.get_campaigns(now)):
+ promotion_log(link, "requeued")
+ #TODO: smarter would be nice, but this will have to do for now
+ make_daily_promotions()
+ emailer.accept_promo(link)
- if not keep:
- if test:
- print "Would have unpromoted: (%s, %s)" % \
- (l, l.make_permalink(None))
+def reject_promotion(link, reason = None):
+ promotion_log(link, "status update: rejected")
+ # update the query queue
+ set_status(link, STATUS.rejected)
+ # check to see if this link is a member of the current live list
+ links, weighted = get_live_promotions()
+ if link._fullname in links:
+ links.remove(link._fullname)
+ for k in list(weighted.keys()):
+ weighted[k] = [(lid, w) for lid, w in weighted[k]
+ if lid != link._fullname]
+ if not weighted[k]:
+ del weighted[k]
+ set_live_promotions((links, weighted))
+ promotion_log(link, "dequeued")
+ emailer.reject_promo(link, reason = reason)
+
+
+def unapprove_promotion(link):
+ promotion_log(link, "status update: unapproved")
+ # update the query queue
+ set_status(link, STATUS.unseen)
+ links, weghts = get_live_promotions()
+
+def accepted_iter(func, offset = 0):
+ now = promo_datetime_now(offset = offset)
+ campaigns = PromotionWeights.get_campaigns(now)
+ # load the links that have campaigns coming up
+ links = Link._by_fullname(set(x.thing_name for x in campaigns),
+ data = True,return_dict = True)
+ for x in campaigns:
+ l = links[x.thing_name]
+ if is_accepted(l):
+ # get the campaign of interest from the link
+ camp = getattr(l, "campaigns", {}).get(x.promo_idx)
+ # the transaction id is the last of the campaign tuple
+ if camp and camp[CAMPAIGN.trans_id]:
+ func(l, camp, x.promo_idx, x.weight)
+
+
+def charge_pending(offset = 1):
+ def _charge(l, camp, indx, weight):
+ user = Account._byID(l.author_id)
+ sd, ed, bid, sr, trans_id = camp
+ try:
+ if (not authorize.is_charged_transaction(trans_id, indx) and
+ authorize.charge_transaction(user, trans_id, indx)):
+ # TODO: probably not absolutely necessary
+ promotion_log(l, "status update: pending")
+ # update the query queue
+ if is_promoted(l):
+ emailer.queue_promo(l, bid, trans_id)
else:
- unpromote(l, batch = True)
-
- new_promos = Link._query(Link.c.promote_status == (STATUS.pending,
- STATUS.promoted),
- Link.c.promoted == True,
- data = True)
- for l in new_promos:
- if l.promote_until > now and l._date <= now:
- if test:
- print "Would have promoted: %s" % l
- else:
- promote(l, batch = True)
- promos[l._fullname] = auction_weight(l)
- elif l.promote_until <= now:
- if test:
- print "Would have unpromoted: (%s, %s)" % \
- (l, l.make_permalink(None))
- else:
- unpromote(l, batch = True)
-
- # remove unpaid promos that are scheduled to run on today or before
- unpaid_promos = Link._query(Link.c.promoted == True,
- Link.c.promote_status == STATUS.unpaid,
- Link.c._date < now,
- Link.c._deleted == False,
- data = True)
- for l in unpaid_promos:
- if test:
- print "Would have rejected: %s" % promo_edit_url(l)
- else:
- reject_promo(l, reason = "We're sorry, but this sponsored link was not set up for payment before the appointed date. Please add payment info and move the date into the future if you would like to resubmit. Also please feel free to email us at selfservicesupport@reddit.com if you believe this email is in error.")
+ set_status(l, STATUS.pending,
+ onchange = lambda: emailer.queue_promo(l, bid, trans_id) )
+ except:
+ print "Error on %s, campaign %s" % (l, indx)
+ accepted_iter(_charge, offset = offset)
- if test:
- print promos
+def get_scheduled(offset = 0):
+ """
+ gets a dictionary of sr -> list of (link, weight) for promotions
+ that should be live as of the day which is offset days from today.
+ """
+ by_sr = {}
+ def _promote(l, camp, indx, weight):
+ sd, ed, bid, sr, trans_id = camp
+ if authorize.is_charged_transaction(trans_id, indx):
+ by_sr.setdefault(sr, []).append((l, weight))
+ accepted_iter(_promote, offset = offset)
+ return by_sr
+
+
+def get_traffic_weights(srnames):
+ from r2.lib import traffic
+
+ # the weight is just the last 7 days of impressions (averaged)
+ def weigh(t, npoints = 7):
+ if t:
+ t = [y[1] for x, y in t[-npoints-1:-1]]
+ return max(float(sum(t)) / len(t), 1)
+ return 1
+
+ default_traffic = [weigh(traffic.load_traffic("day", "reddit", sr.name))
+ for sr in Subreddit.top_lang_srs('all', 10)]
+ default_traffic = (float(max(sum(default_traffic),1)) /
+ max(len(default_traffic), 1))
+
+ res = {}
+ for srname in srnames:
+ if srname:
+ res[srname] = (default_traffic /
+ weigh(traffic.load_traffic("day", "reddit", srname)) )
else:
- set_promoted(promos)
- return promos
+ res[srname] = 1
+ return res
-def auction_weight(link):
- duration = (link.promote_until - link._date).days
- return duration and link.promote_bid / duration
+def get_weighted_schedule(offset = 0):
+ by_sr = get_scheduled(offset = offset)
+ weight_dict = get_traffic_weights(by_sr.keys())
+ weighted = {}
+ links = set()
+ for sr_name, t_tuples in by_sr.iteritems():
+ weighted[sr_name] = []
+ for l, weight in t_tuples:
+ links.add(l._fullname)
+ weighted[sr_name].append((l._fullname,
+ weight * weight_dict[sr_name]))
+ return links, weighted
-def set_promoted(link_names):
- # caller is assumed to execute me inside a lock if necessary
- g.permacache.set(promoted_memo_key, link_names)
+def promotion_key():
+ return "current_promotions"
- #update cache
- get_promoted(_update = True)
+def get_live_promotions():
+ return g.permacache.get(promotion_key()) or (set(), {})
-@memoize(promoted_memo_key, time = promoted_memo_lifetime)
-def get_promoted():
- # does not lock the list to return it, so (slightly) stale data
- # will be returned if called during an update rather than blocking
- return get_promoted_direct()
+def set_live_promotions(x):
+ return g.permacache.set(promotion_key(), x)
-def get_promoted_direct():
- return g.permacache.get(promoted_memo_key, {})
+def make_daily_promotions(offset = 0, test = False):
+ old_links = set([])
+ all_links, weighted = get_weighted_schedule(offset)
+ x = get_live_promotions()
+ if x:
+ old_links, old_weights = x
+ # links that need to be promoted
+ new_links = all_links - old_links
+ # links that have already been promoted
+ old_links = old_links - all_links
+ else:
+ new_links = links
+
+ links = Link._by_fullname(new_links.union(old_links), data = True,
+ return_dict = True)
+ for l in old_links:
+ if is_promoted(links[l]):
+ if test:
+ print "unpromote", l
+ else:
+ # update the query queue
+ set_status(links[l], STATUS.finished,
+ onchange = lambda: emailer.finished_promo(links[l]))
+
+ for l in new_links:
+ if is_accepted(links[l]):
+ if test:
+ print "promote2", l
+ else:
+ # update the query queue
+ set_status(links[l], STATUS.promoted,
+ onchange = lambda: emailer.live_promo(links[l]))
+
+ # convert the weighted dict to use sr_ids which are more useful
+ srs = {"":""}
+ for srname in weighted.keys():
+ if srname:
+ srs[srname] = Subreddit._by_name(srname)._id
+ weighted = dict((srs[k], v) for k, v in weighted.iteritems())
+
+ if not test:
+ set_live_promotions((all_links, weighted))
+ else:
+ print (all_links, weighted)
-def get_promoted_slow():
- # to be used only by a human at a terminal
- with g.make_lock(promoted_lock_key):
- links = Link._query(Link.c.promote_status == STATUS.promoted,
- Link.c.promoted == True,
- data = True)
- link_names = dict((x._fullname, auction_weight(x)) for x in links)
+def get_promotion_list(user, site):
+ # site is specified, pick an ad from that site
+ if not isinstance(site, FakeSubreddit):
+ srids = set([site._id])
+ # site is Fake, user is not. Pick based on their subscriptions.
+ elif user and not isinstance(user, FakeAccount):
+ srids = set(Subreddit.reverse_subscriber_ids(user) + [""])
+ # both site and user are "fake" -- get the default subscription list
+ else:
+ srids = set(Subreddit.user_subreddits(None, True) + [""])
- set_promoted(link_names)
-
- return link_names
+ return get_promotions_cached(srids)
-def random_promoted():
+#@memoize('get_promotions_cached', time = 10 * 60)
+def get_promotions_cached(sites):
+ p = get_live_promotions()
+ if p:
+ links, promo_dict = p
+ available = {}
+ for k, links in promo_dict.iteritems():
+ if k in sites:
+ for l, w in links:
+ available[l] = available.get(l, 0) + w
+ # sort the available list by weight
+ links = available.keys()
+ links.sort(key = lambda x: -available[x])
+ norm = sum(available.values())
+ # return a sorted list of (link, norm_weight)
+ return [(l, available[l] / norm) for l in links]
+
+ return []
+
+def randomized_promotion_list(user, site):
+ promos = get_promotion_list(user, site)
+ # no promos, no problem
+ if not promos:
+ return []
+ # more than two: randomize
+ elif len(promos) > 1:
+ n = random.uniform(0, 1)
+ for i, (l, w) in enumerate(promos):
+ n -= w
+ if n < 0:
+ promos = promos[i:] + promos[:i]
+ break
+ # fall thru for the length 1 case here as well
+ return [l for l, w in promos]
+
+
+def insert_promoted(link_names, pos, promoted_every_n = 5):
"""
- return a list of the currently promoted items, randomly choosing
- the order of the list based on the bid-weighing.
+ Inserts promoted links into an existing organic list. Destructive
+ on `link_names'
"""
- bids = get_promoted()
- market = sum(bids.values())
- if market:
- # get a list of current promotions, sorted by their bid amount
- promo_list = bids.keys()
- # sort by bids and use the thing_id as the tie breaker (for
- # consistent sorting)
- promo_list.sort(key = lambda x: (bids[x], x), reverse = True)
- if len(bids) > 1:
- # pick a number, any number
- n = random.uniform(0, 1)
- for i, p in enumerate(promo_list):
- n -= bids[p] / market
- if n < 0:
- return promo_list[i:] + promo_list[:i]
- return promo_list
+ promoted_items = randomized_promotion_list(c.user, c.site)
+
+ if not promoted_items:
+ return link_names, pos
+
+ # no point in running the builder over more promoted links than
+ # we'll even use
+ max_promoted = max(1,len(link_names)/promoted_every_n)
+
+ builder = IDBuilder(promoted_items, keep_fn = keep_fresh_links,
+ skip = True)
+ promoted_items = builder.get_items()[0]
+
+ focus = None
+ if promoted_items:
+ focus = promoted_items[0]._fullname
+ # insert one promoted item for every N items
+ for i, item in enumerate(promoted_items):
+ p = i * (promoted_every_n + 1)
+ if p > len(link_names):
+ break
+ p += pos
+ if p > len(link_names):
+ p = p % len(link_names)
+
+ link_names.insert(p, item._fullname)
+
+ link_names = filter(None, link_names)
+ if focus:
+ try:
+ pos = link_names.index(focus)
+ except ValueError:
+ pass
+ # don't insert one at the head of the list 50% of the time for
+ # logged in users, and 50% of the time for logged-off users when
+ # the pool of promoted links is less than 3 (to avoid showing the
+ # same promoted link to the same person too often)
+ if ((c.user_is_loggedin or len(promoted_items) < 3) and
+ random.choice((True,False))):
+ pos = (pos + 1) % len(link_names)
+
+ return list(UniqueIterator(link_names)), pos
+
+def benchmark_promoted(user, site, pos = 0, link_sample = 50, attempts = 100):
+ c.user = user
+ c.site = site
+ link_names = ["blah%s" % i for i in xrange(link_sample)]
+ res = {}
+ for i in xrange(attempts):
+ names, p = insert_promoted(link_names[::], pos)
+ name = names[p]
+ res[name] = res.get(name, 0) + 1
+ res = list(res.iteritems())
+ res.sort(key = lambda x : x[1], reverse = True)
+ expected = dict(get_promotion_list(user, site))
+ for l, v in res:
+ print "%s: %5.3f %3.5f" % (l,float(v)/attempts, expected.get(l, 0))
-def test_random_promoted(n = 1000):
- promos = get_promoted()
- market = sum(promos.values())
- if market:
- res = {}
- for i in xrange(n):
- key = random_promoted()[0]
- res[key] = res.get(key, 0) + 1
- print "%10s expected actual E/A" % "thing"
- print "------------------------------------"
- for k, v in promos.iteritems():
- expected = float(v) / market * 100
- actual = float(res.get(k, 0)) / n * 100
+def Run(offset = 0):
+ charge_pending(offset = offset + 1)
+ charge_pending(offset = offset)
+ make_daily_promotions(offset = offset)
- print "%10s %6.2f%% %6.2f%% %6.2f" % \
- (k, expected, actual, expected / actual if actual else 0)
diff --git a/r2/r2/lib/s3cp.py b/r2/r2/lib/s3cp.py
index 5ed85b436..b54ce2dc6 100644
--- a/r2/r2/lib/s3cp.py
+++ b/r2/r2/lib/s3cp.py
@@ -22,79 +22,32 @@
# CondeNet, Inc. All Rights Reserved.
################################################################################
-import base64, hmac, sha, os, sys, getopt
-from datetime import datetime
-from pylons import g,config
+import boto
+from boto.s3.connection import S3Connection
+from boto.s3.key import Key
+from pylons import g
KEY_ID = g.S3KEY_ID
SECRET_KEY = g.S3SECRET_KEY
+NEVER = 'Thu, 31 Dec 2037 23:59:59 GMT'
+
class S3Exception(Exception): pass
-def make_header(verb, date, amz_headers, resource, content_type):
- content_md5 = ''
+def send_file(bucketname, filename, content, content_type = 'text/plain', never_expire = False):
+ # this function is pretty low-traffic, but if we start using it a
+ # lot more we'll want to maintain a connection pool across the app
+ # rather than connecting on every invocation
- #amazon headers
- lower_head = dict((key.lower(), val)
- for key, val in amz_headers.iteritems())
- keys = lower_head.keys()
- keys.sort()
- amz_lst = ['%s:%s' % (key, lower_head[key]) for key in keys]
- amz_str = '\n'.join(amz_lst)
+ # TODO: add ACL support instead of always using public-read
- s = '\n'.join((verb,
- content_md5,
- content_type,
- date,
- amz_str,
- resource))
+ connection = S3Connection(KEY_ID, SECRET_KEY)
+ bucket = connection.get_bucket(bucketname)
+ k = bucket.new_key(filename)
- h = hmac.new(SECRET_KEY, s, sha)
- return base64.encodestring(h.digest()).strip()
-
-def send_file(filename, resource, content_type, acl, rate, meter):
- date = datetime.utcnow().strftime("%a, %d %b %Y %H:%M:%S GMT")
- amz_headers = {'x-amz-acl': acl}
+ headers={'Content-Type': content_type}
+ if never_expire:
+ headers['Expires'] = NEVER
- auth_header = make_header('PUT', date, amz_headers, resource, content_type)
-
- params = ['-T', filename,
- '-H', 'x-amz-acl: %s' % amz_headers['x-amz-acl'],
- '-H', 'Authorization: AWS %s:%s' % (KEY_ID, auth_header),
- '-H', 'Date: %s' % date]
-
- if content_type:
- params.append('-H')
- params.append('Content-Type: %s' % content_type)
-
- if rate:
- params.append('--limit-rate')
- params.append(rate)
-
- if meter:
- params.append('-o')
- params.append('s3cp.output')
- else:
- params.append('-s')
-
- params.append('https://s3.amazonaws.com%s' % resource)
-
- exit_code = os.spawnlp(os.P_WAIT, 'curl', 'curl', *params)
- if exit_code:
- raise S3Exception(exit_code)
-
-
-if __name__ == '__main__':
- options = "a:c:l:m"
- try:
- opts, args = getopt.getopt(sys.argv[1:], options)
- except:
- sys.exit(2)
-
- opts = dict(opts)
-
- send_file(args[0], args[1],
- opts.get('-c', ''),
- opts.get('-a', 'private'),
- opts.get('-l'),
- opts.has_key('-m'))
+ k.set_contents_from_string(content, policy='public-read',
+ headers=headers)
diff --git a/r2/r2/lib/scraper.py b/r2/r2/lib/scraper.py
index 78bc2740c..fec58a919 100644
--- a/r2/r2/lib/scraper.py
+++ b/r2/r2/lib/scraper.py
@@ -23,6 +23,7 @@
from pylons import g
from r2.lib import utils
from r2.lib.memoize import memoize
+import simplejson as json
from urllib2 import Request, HTTPError, URLError, urlopen
from httplib import InvalidURL
@@ -158,8 +159,8 @@ class MediaEmbed(object):
scrolling = False
def __init__(self, height, width, content, scrolling = False):
- self.height = height
- self.width = width
+ self.height = int(height)
+ self.width = int(width)
self.content = content
self.scrolling = scrolling
@@ -317,10 +318,11 @@ def youtube_in_google(google_url):
def make_scraper(url):
domain = utils.domain(url)
scraper = Scraper
- for suffix, cls in scrapers.iteritems():
- if domain.endswith(suffix):
- scraper = cls
- break
+ for suffix, clses in scrapers.iteritems():
+ for cls in clses:
+ if domain.endswith(suffix):
+ scraper = cls
+ break
#sometimes youtube scrapers masquerade as google scrapers
if scraper == GootubeScraper:
@@ -625,6 +627,216 @@ class CraigslistScraper(MediaScraper):
content = content,
scrolling = True)
+
+########## oembed rich-media scrapers ##########
+
+class OEmbed(Scraper):
+ """
+ Oembed Scraper
+ ==============
+ Tries to use the oembed standard to create a media object.
+
+ url_re: Regular Expression to match the incoming url against.
+ api_endpoint: Url of the api end point you are using.
+ api_params: Default Params to be sent with the outgoing request.
+ """
+ url_re = ''
+ api_endpoint = ''
+ api_params = {}
+
+ def __init__(self, url):
+ Scraper.__init__(self, url)
+ self.oembed = None
+
+ #Fallback to the scraper if the url doesn't match
+ if not self.url_re.match(self.url):
+ self.__class__ = Scraper
+
+ def __repr__(self):
+ return "%s(%r)" % (self.__class__.__name__, self.url)
+
+ def download(self):
+ self.api_params.update( { 'url':self.url})
+ query = urllib.urlencode(self.api_params)
+ api_url = "%s?%s" % (self.api_endpoint, query)
+
+ self.content_type, self.content = fetch_url(api_url)
+
+ #Either a 404 or 500.
+ if not self.content:
+ #raise ValueError('ISSUE CALLING %s' %api_url)
+ log.warn('oEmbed call (%s) failed to return content for %s'
+ %(api_url, self.url))
+ return None
+
+ try:
+ self.oembed = json.loads(self.content)
+ except ValueError, e:
+ log.error('oEmbed call (%s) return invalid json for %s'
+ %(api_url, self.url))
+ return None
+
+ def image_urls(self):
+ #if the original url was an image, use that
+ if self.oembed and self.oembed.get('type') =='photo':
+ yield self.oembed.get('url')
+ elif self.oembed and self.oembed.get('thumbnail_url'):
+ yield self.oembed.get('thumbnail_url')
+
+ def largest_image_url(self):
+ #Seems to be the default place to check if the download has happened.
+ if not self.oembed:
+ self.download()
+
+ #if the original url was of the photo type
+ if self.oembed and self.oembed.get('type') =='photo':
+ return self.oembed.get('url')
+ elif self.oembed and self.oembed.get('thumbnail_url'):
+ return self.oembed.get('thumbnail_url')
+
+ def media_object(self):
+ #Seems to be the default place to check if the download has happened.
+ if not self.oembed:
+ self.download()
+
+ if self.oembed and self.oembed.get('type') in ['video', 'rich']:
+ for domain in self.domains:
+ if self.url.find(domain) > -1:
+ return dict(type=domain, oembed=self.oembed)
+ return None
+
+ @classmethod
+ def media_embed(cls, video_id = None, height = None, width = None, **kw):
+ content = None
+ oembed = kw.get('oembed')
+
+ # check if oembed is there and has html
+ if oembed and oembed.get('html'):
+ content = oembed.get('html')
+ if content and oembed.get('height') and oembed.get('width'):
+ return MediaEmbed(height = oembed['height'],
+ width = oembed['width'],
+ content = content)
+
+class EmbedlyOEmbed(OEmbed):
+ """
+ Embedly oEmbed Provider
+ =======================
+ documentation: http://api.embed.ly
+ """
+ domains = ['youtube.com', 'veoh.com', 'justin.tv', 'ustream.com',
+ 'qik.com', 'revision3.com', 'dailymotion.com', 'collegehumor.com',
+ 'twitvid.com', 'break.com', 'vids.myspace.com', 'metacafe.com',
+ 'blip.tv', 'video.google.com','revver.com', 'video.yahoo.com',
+ 'viddler.com', 'liveleak.com', 'animoto.com', 'yfrog.com',
+ 'tweetphoto.com', 'flickr.com', 'twitpic.com', 'imgur.com',
+ 'posterous.com', 'twitgoo.com', 'photobucket.com', 'phodroid.com',
+ 'xkcd.com', 'asofterword.com', 'qwantz.com', '23hq.com', 'hulu.com',
+ 'movieclips.com', 'crackle.com', 'fancast.com', 'funnyordie.com',
+ 'vimeo.com', 'ted.com', 'omnisio.com', 'nfb.ca', 'thedailyshow.com',
+ 'movies.yahoo.com', 'colbertnation.com', 'comedycentral.com',
+ 'theonion.com', 'wordpress.tv', 'traileraddict.com', 'soundcloud.com',
+ 'slideshare.net', 'scribd.com', 'screenr.com', '5min.com',
+ 'howcast.com', 'my.opera.com', 'escapistmagazine.com', ]
+
+ url_re = re.compile('^http://.+\.youtube\.com/watch.+|'+\
+ '^http://.+\.youtube\.com/v/.+|'+\
+ '^http://youtube\.com/watch.+|'+\
+ '^http://youtube\.com/v/.+|'+\
+ '^http://youtu\.be/.+|'+\
+ '^http://www\.veoh\.com/.*/watch/.+|'+\
+ '^http://www\.justin\.tv/clip/.+|'+\
+ '^http://www\.justin\.tv/.+|'+\
+ '^http://justin\.tv/clip/.+|'+\
+ '^http://justin\.tv/.+|'+\
+ '^http://www\.ustream\.tv/recorded/.+|'+\
+ '^http://www\.ustream\.tv/channel/.+|'+\
+ '^http://qik\.com/video/.+|'+\
+ '^http://qik\.com/.+|'+\
+ '^http://.*revision3\.com/.+|'+\
+ '^http://www.dailymotion\.com/video/.+|'+\
+ '^http://www.dailymotion\.com/.+/video/.+|'+\
+ '^http://dailymotion\.com/video/.+|'+\
+ '^http://dailymotion\.com/.+/video/.+|'+\
+ '^http://www\.collegehumor\.com/video:.+|'+\
+ '^http://www\.twitvid\.com/.+|'+\
+ '^http://www\.break\.com/.*/.+|'+\
+ '^http://vids\.myspace\.com/index\.cfm\?fuseaction=vids\.individual&videoid.+|'+\
+ '^http://www\.myspace\.com/index\.cfm\?fuseaction=.*&videoid.+|'+\
+ '^http://www\.metacafe\.com/watch/.+|'+\
+ '^http://blip\.tv/file/.+|'+\
+ '^http://.+\.blip\.tv/file/.+|'+\
+ '^http://video\.google\.com/videoplay\?.+|'+\
+ '^http://revver\.com/video/.+|'+\
+ '^http://www\.revver\.com/video/.+|'+\
+ '^http://video\.yahoo\.com/watch/.*/.+|'+\
+ '^http://video\.yahoo\.com/network/.+|'+\
+ '^http://.*viddler\.com/explore/.*/videos/.+|'+\
+ '^http://liveleak\.com/view\?.+|'+\
+ '^http://www\.liveleak\.com/view\?.+|'+\
+ '^http://animoto\.com/play/.+|'+\
+ '^http://yfrog\..*/.+|'+\
+ '^http://.+\.yfrog\..*/.+|'+\
+ '^http://tweetphoto\.com/.+|'+\
+ '^http://www\.flickr\.com/photos/.+|'+\
+ '^http://twitpic\.com/.+|'+\
+ '^http://.*imgur\.com/.+|'+\
+ '^http://.*\.posterous\.com/.+|'+\
+ '^http://twitgoo\.com/.+|'+\
+ '^http://i.*\.photobucket\.com/albums/.+|'+\
+ '^http://gi.*\.photobucket\.com/groups/.+|'+\
+ '^http://phodroid\.com/.*/.*/.+|'+\
+ '^http://xkcd\.com/.+|'+\
+ '^http://www\.asofterworld\.com/index\.php\?id=.+|'+\
+ '^http://www\.qwantz\.com/index\.php\?comic=.+|'+\
+ '^http://23hq\.com/.*/photo/.+|'+\
+ '^http://www\.23hq\.com/.*/photo/.+|'+\
+ '^http://www\.hulu\.com/watch/.+|'+\
+ '^http://movieclips\.com/watch/.*/.*/|'+\
+ '^http://movieclips\.com/watch/.*/.*/.*/.+|'+\
+ '^http://.*crackle\.com/c/.+|'+\
+ '^http://www\.fancast\.com/.*/videos|'+\
+ '^http://www\.funnyordie\.com/videos/.+|'+\
+ '^http://www\.vimeo\.com/groups/.*/videos/.+|'+\
+ '^http://www\.vimeo\.com/.+|'+\
+ '^http://vimeo\.com/groups/.*/videos/.+|'+\
+ '^http://vimeo\.com/.+|'+\
+ '^http://www\.ted\.com/.+|'+\
+ '^http://www\.omnisio\.com/.+|'+\
+ '^http://.*nfb\.ca/film/.+|'+\
+ '^http://www\.thedailyshow\.com/watch/.+|'+\
+ '^http://www\.thedailyshow\.com/full-episodes/.+|'+\
+ '^http://www\.thedailyshow\.com/collection/.*/.*/.+|'+\
+ '^http://movies\.yahoo\.com/.*movie/.*/video/.+|'+\
+ '^http://movies\.yahoo\.com/movie/.*/info|'+\
+ '^http://movies\.yahoo\.com/movie/.*/trailer|'+\
+ '^http://www\.colbertnation\.com/the-colbert-report-collections/.+|'+\
+ '^http://www\.colbertnation\.com/full-episodes/.+|'+\
+ '^http://www\.colbertnation\.com/the-colbert-report-videos/.+|'+\
+ '^http://www\.comedycentral\.com/videos/index\.jhtml\?.+|'+\
+ '^http://www\.theonion\.com/video/.+|'+\
+ '^http://theonion\.com/video/.+|'+\
+ '^http://wordpress\.tv/.*/.*/.*/.*/|'+\
+ '^http://www\.traileraddict\.com/trailer/.+|'+\
+ '^http://www\.traileraddict\.com/clip/.+|'+\
+ '^http://www\.traileraddict\.com/poster/.+|'+\
+ '^http://soundcloud\.com/.+|'+\
+ '^http://soundcloud\.com/.*/.+|'+\
+ '^http://soundcloud\.com/.*/sets/.+|'+\
+ '^http://soundcloud\.com/groups/.+|'+\
+ '^http://www\.slideshare\.net/.*/.+|'+\
+ '^http://.*\.scribd\.com/doc/.+|'+\
+ '^http://screenr\.com/.+|'+\
+ '^http://www\.5min\.com/Video/.+|'+\
+ '^http://www\.howcast\.com/videos/.+|'+\
+ '^http://my\.opera\.com/.*/albums/show\.dml\?id=.+|'+\
+ '^http://my\.opera\.com/.*/albums/showpic\.dml\?album=.+&picture=.+|'+\
+ '^http://escapistmagazine\.com/videos/.+|'+\
+ '^http://www\.escapistmagazine\.com/videos/.+', re.I
+ )
+ api_endpoint = 'http://api.embed.ly/v1/api/oembed'
+ api_params = {'format':'json', 'maxwidth':600 }
+
class GenericScraper(MediaScraper):
"""a special scrapper not associated with any domains, used to
write media objects to links by hand"""
@@ -662,13 +874,14 @@ class YoutubeEmbedDeepScraper(DeepScraper):
youtube_id = self.youtube_url_re.match(movie_embed['src']).group(2)
youtube_url = 'http://www.youtube.com/watch?v=%s"' % youtube_id
log.debug('found youtube embed %s' % youtube_url)
- mo = YoutubeScraper(youtube_url).media_object()
+ mo = make_scraper(youtube_url).media_object()
mo['deep'] = scraper.url
return mo
#scrapers =:= dict(domain -> ScraperClass)
scrapers = {}
-for scraper in [ YoutubeScraper,
+for scraper in [ EmbedlyOEmbed,
+ YoutubeScraper,
MetacafeScraper,
GootubeScraper,
VimeoScraper,
@@ -686,14 +899,20 @@ for scraper in [ YoutubeScraper,
EscapistScraper,
JustintvScraper,
SoundcloudScraper,
- #CraigslistScraper,
+ CraigslistScraper,
GenericScraper,
]:
for domain in scraper.domains:
- scrapers[domain] = scraper
+ scrapers.setdefault(domain, []).append(scraper)
deepscrapers = [YoutubeEmbedDeepScraper]
+def get_media_embed(media_object):
+ for scraper in scrapers.get(media_object['type']):
+ res = scraper.media_embed(**media_object)
+ if res:
+ return res
+
def convert_old_media_objects():
q = Link._query(Link.c.media_object is not None,
Link.c._date > whenever,
@@ -728,10 +947,10 @@ test_urls = [
'http://www.facebook.com/pages/Rick-Astley/5807213510?sid=c99aaf3888171e73668a38e0749ae12d', # regular thumbnail finder
'http://www.flickr.com/photos/septuagesima/317819584/', # thumbnail with image_src
- 'http://www.youtube.com/watch?v=Yu_moia-oVI',
+ #'http://www.youtube.com/watch?v=Yu_moia-oVI',
'http://www.metacafe.com/watch/sy-1473689248/rick_astley_never_gonna_give_you_up_official_music_video/',
'http://video.google.com/videoplay?docid=5908758151704698048',
- 'http://vimeo.com/4495451',
+ #'http://vimeo.com/4495451',
'http://www.break.com/usercontent/2008/11/Macy-s-Thankgiving-Day-Parade-Rick-Roll-611965.html',
'http://www.theonion.com/content/video/sony_releases_new_stupid_piece_of',
'http://www.collegehumor.com/video:1823712',
@@ -761,8 +980,291 @@ test_urls = [
'http://listen.grooveshark.com/#/song/Never_Gonna_Give_You_Up/12616328',
'http://tinysong.com/2WOJ', # also Grooveshark
-
- 'http://www.rickrolled.com/videos/video/rickrolld' # test the DeepScraper
+ 'http://www.slideshare.net/doina/happy-easter-from-holland-slideshare',
+ 'http://www.slideshare.net/stinson/easter-1284190',
+ 'http://www.slideshare.net/angelspascual/easter-events',
+ 'http://www.slideshare.net/sirrods/happy-easter-3626014',
+ 'http://www.slideshare.net/sirrods/happy-easter-wide-screen',
+ 'http://www.slideshare.net/carmen_serbanescu/easter-holiday',
+ 'http://www.slideshare.net/Lithuaniabook/easter-1255880',
+ 'http://www.slideshare.net/hues/easter-plants',
+ 'http://www.slideshare.net/Gospelman/passover-week',
+ 'http://www.slideshare.net/angelspascual/easter-around-the-world-1327542',
+ 'http://www.scribd.com/doc/13994900/Easter',
+ 'http://www.scribd.com/doc/27425714/Celebrating-Easter-ideas-for-adults-and-children',
+ 'http://www.scribd.com/doc/28010101/Easter-Foods-No-Name',
+ 'http://www.scribd.com/doc/28452730/Easter-Cards',
+ 'http://www.scribd.com/doc/19026714/The-Easter-Season',
+ 'http://www.scribd.com/doc/29183659/History-of-Easter',
+ 'http://www.scribd.com/doc/15632842/The-Last-Easter',
+ 'http://www.scribd.com/doc/28741860/The-Plain-Truth-About-Easter',
+ 'http://www.scribd.com/doc/23616250/4-27-08-ITS-EASTER-AGAIN-ORTHODOX-EASTER-by-vanderKOK',
+ 'http://screenr.com/t9d',
+ 'http://screenr.com/yLS',
+ 'http://screenr.com/gzS',
+ 'http://screenr.com/IwU',
+ 'http://screenr.com/FM7',
+ 'http://screenr.com/Ejg',
+ 'http://screenr.com/u4h',
+ 'http://screenr.com/QiN',
+ 'http://screenr.com/zts',
+ 'http://www.5min.com/Video/How-to-Decorate-Easter-Eggs-with-Decoupage-142076462',
+ 'http://www.5min.com/Video/How-to-Color-Easter-Eggs-Dye-142076281',
+ 'http://www.5min.com/Video/How-to-Make-an-Easter-Egg-Diorama-142076482',
+ 'http://www.5min.com/Video/How-to-Make-Sequined-Easter-Eggs-142076512',
+ 'http://www.5min.com/Video/How-to-Decorate-Wooden-Easter-Eggs-142076558',
+ 'http://www.5min.com/Video/How-to-Blow-out-an-Easter-Egg-142076367',
+ 'http://www.5min.com/Video/Learn-About-Easter-38363995',
+ 'http://www.howcast.com/videos/368909-Easter-Egg-Dying-How-To-Make-Ukrainian-Easter-Eggs',
+ 'http://www.howcast.com/videos/368911-Easter-Egg-Dying-How-To-Color-Easter-Eggs-With-Food-Dyes',
+ 'http://www.howcast.com/videos/368913-Easter-Egg-Dying-How-To-Make-Homemade-Easter-Egg-Dye',
+ 'http://www.howcast.com/videos/220110-The-Meaning-Of-Easter',
+ 'http://my.opera.com/nirvanka/albums/show.dml?id=519866',
+ 'http://img402.yfrog.com/i/mfe.jpg/',
+ 'http://img20.yfrog.com/i/dy6.jpg/',
+ 'http://img145.yfrog.com/i/4mu.mp4/',
+ 'http://img15.yfrog.com/i/mygreatmovie.mp4/',
+ 'http://img159.yfrog.com/i/500x5000401.jpg/',
+ 'http://tweetphoto.com/14784358',
+ 'http://tweetphoto.com/16044847',
+ 'http://tweetphoto.com/16718883',
+ 'http://tweetphoto.com/16451148',
+ 'http://tweetphoto.com/16133984',
+ 'http://tweetphoto.com/8069529',
+ 'http://tweetphoto.com/16207556',
+ 'http://tweetphoto.com/7448361',
+ 'http://tweetphoto.com/16069325',
+ 'http://tweetphoto.com/4791033',
+ 'http://www.flickr.com/photos/10349896@N08/4490293418/',
+ 'http://www.flickr.com/photos/mneylon/4483279051/',
+ 'http://www.flickr.com/photos/xstartxtodayx/4488996521/',
+ 'http://www.flickr.com/photos/mommyknows/4485313917/',
+ 'http://www.flickr.com/photos/29988430@N06/4487127638/',
+ 'http://www.flickr.com/photos/excomedia/4484159563/',
+ 'http://www.flickr.com/photos/sunnybrook100/4471526636/',
+ 'http://www.flickr.com/photos/jaimewalsh/4489497178/',
+ 'http://www.flickr.com/photos/29988430@N06/4486475549/',
+ 'http://www.flickr.com/photos/22695183@N08/4488681694/',
+ 'http://twitpic.com/1cnsf6',
+ 'http://twitpic.com/1cgtti',
+ 'http://twitpic.com/1coc0n',
+ 'http://twitpic.com/1cm8us',
+ 'http://twitpic.com/1cgks4',
+ 'http://imgur.com/6pLoN',
+ 'http://onegoodpenguin.posterous.com/golden-tee-live-2010-easter-egg',
+ 'http://adland.posterous.com/?tag=royaleastershowauckland',
+ 'http://apartmentliving.posterous.com/biggest-easter-egg-hunts-in-the-dc-area',
+ 'http://twitgoo.com/1as',
+ 'http://twitgoo.com/1p94',
+ 'http://twitgoo.com/4kg2',
+ 'http://twitgoo.com/6c9',
+ 'http://twitgoo.com/1w5',
+ 'http://twitgoo.com/6mu',
+ 'http://twitgoo.com/1w3',
+ 'http://twitgoo.com/1om',
+ 'http://twitgoo.com/1mh',
+ 'http://www.qwantz.com/index.php?comic=1686',
+ 'http://www.qwantz.com/index.php?comic=773',
+ 'http://www.qwantz.com/index.php?comic=1018',
+ 'http://www.qwantz.com/index.php?comic=1019',
+ 'http://www.23hq.com/mhg/photo/5498347',
+ 'http://www.23hq.com/Greetingdesignstudio/photo/5464607',
+ 'http://www.23hq.com/Greetingdesignstudio/photo/5464590',
+ 'http://www.23hq.com/Greetingdesignstudio/photo/5464605',
+ 'http://www.23hq.com/Greetingdesignstudio/photo/5464604',
+ 'http://www.23hq.com/dvilles2/photo/5443192',
+ 'http://www.23hq.com/Greetingdesignstudio/photo/5464606',
+ 'http://www.youtube.com/watch?v=gghKdx558Qg',
+ 'http://www.youtube.com/watch?v=yPid9BLQQcg',
+ 'http://www.youtube.com/watch?v=uEo2vboUYUk',
+ 'http://www.youtube.com/watch?v=geUhtoHbLu4',
+ 'http://www.youtube.com/watch?v=Zk7dDekYej0',
+ 'http://www.youtube.com/watch?v=Q3tgMosx_tI',
+ 'http://www.youtube.com/watch?v=s9P8_vgmLfs',
+ 'http://www.youtube.com/watch?v=1cmtN1meMmk',
+ 'http://www.youtube.com/watch?v=AVzj-U5Ihm0',
+ 'http://www.veoh.com/collection/easycookvideos/watch/v366931kcdgj7Hd',
+ 'http://www.veoh.com/collection/easycookvideos/watch/v366991zjpANrqc',
+ 'http://www.veoh.com/browse/videos/category/educational/watch/v7054535EZGFJqyX',
+ 'http://www.veoh.com/browse/videos/category/lifestyle/watch/v18155013XBBtnYwq',
+ 'http://www.justin.tv/easter7presents',
+ 'http://www.justin.tv/easterfraud',
+ 'http://www.justin.tv/cccog27909',
+ 'http://www.justin.tv/clip/6e8c18f7050',
+ 'http://www.justin.tv/venom24',
+ 'http://qik.com/video/1622287',
+ 'http://qik.com/video/1503735',
+ 'http://qik.com/video/40504',
+ 'http://qik.com/video/1445763',
+ 'http://qik.com/video/743285',
+ 'http://qik.com/video/1445299',
+ 'http://qik.com/video/1443200',
+ 'http://qik.com/video/1445889',
+ 'http://qik.com/video/174242',
+ 'http://qik.com/video/1444897',
+ 'http://revision3.com/hak5/DualCore',
+ 'http://revision3.com/popsiren/charm',
+ 'http://revision3.com/tekzilla/eyefinity',
+ 'http://revision3.com/diggnation/2005-10-06',
+ 'http://revision3.com/hak5/netcat-virtualization-wordpress/',
+ 'http://revision3.com/infected/forsaken',
+ 'http://revision3.com/hak5/purepwnage',
+ 'http://revision3.com/tekzilla/wowheadset',
+ 'http://www.dailymotion.com/video/xcstzd_greek-wallets-tighten-during-easter_news',
+ 'http://www.dailymotion.com/video/xcso4y_exclusive-easter-eggs-easter-basket_lifestyle',
+ 'http://www.dailymotion.com/video/x2sgkt_evil-easter-bunny',
+ 'http://www.dailymotion.com/video/xco7oc_invitation-to-2010-easter-services_news',
+ 'http://www.dailymotion.com/video/xcss6b_big-cat-easter_animals',
+ 'http://www.dailymotion.com/video/xcszw1_easter-bunny-visits-buenos-aires-zo_news',
+ 'http://www.dailymotion.com/video/xcsfvs_forecasters-warn-of-easter-misery_news',
+ 'http://www.collegehumor.com/video:1682246',
+ 'http://www.twitvid.com/D9997',
+ 'http://www.twitvid.com/902B9',
+ 'http://www.twitvid.com/C33F8',
+ 'http://www.twitvid.com/63F73',
+ 'http://www.twitvid.com/BC0BA',
+ 'http://www.twitvid.com/1C33C',
+ 'http://www.twitvid.com/8A8E2',
+ 'http://www.twitvid.com/51035',
+ 'http://www.twitvid.com/5C733',
+ 'http://www.break.com/game-trailers/game/just-cause-2/just-cause-2-lost-easter-egg?res=1',
+ 'http://www.break.com/usercontent/2010/3/10/easter-holiday-2009-slideshow-1775624',
+ 'http://www.break.com/index/a-very-sexy-easter-video.html',
+ 'http://www.break.com/usercontent/2010/3/11/this-video-features-gizzi-erskine-making-easter-cookies-1776089',
+ 'http://www.break.com/usercontent/2007/4/4/happy-easter-265717',
+ 'http://www.break.com/usercontent/2007/4/17/extreme-easter-egg-hunting-276064',
+ 'http://www.break.com/usercontent/2006/11/18/the-evil-easter-bunny-184789',
+ 'http://www.break.com/usercontent/2006/4/16/hoppy-easter-kitty-91040',
+ 'http://vids.myspace.com/index.cfm?fuseaction=vids.individual&videoid=104063637',
+ 'http://vids.myspace.com/index.cfm?fuseaction=vids.individual&videoid=104004674',
+ 'http://vids.myspace.com/index.cfm?fuseaction=vids.individual&videoid=103928002',
+ 'http://vids.myspace.com/index.cfm?fuseaction=vids.individual&videoid=103999188',
+ 'http://vids.myspace.com/index.cfm?fuseaction=vids.individual&videoid=103920940',
+ 'http://vids.myspace.com/index.cfm?fuseaction=vids.individual&videoid=103981831',
+ 'http://vids.myspace.com/index.cfm?fuseaction=vids.individual&videoid=104004673',
+ 'http://vids.myspace.com/index.cfm?fuseaction=vids.individual&videoid=104046456',
+ 'http://www.metacafe.com/watch/105023/the_easter_bunny/',
+ 'http://www.metacafe.com/watch/4376131/easter_lay/',
+ 'http://www.metacafe.com/watch/2245996/how_to_make_ukraine_easter_eggs/',
+ 'http://www.metacafe.com/watch/4374339/easter_eggs/',
+ 'http://www.metacafe.com/watch/2605860/filled_easter_baskets/',
+ 'http://www.metacafe.com/watch/2372088/easter_eggs/',
+ 'http://www.metacafe.com/watch/3043671/www_goodnews_ws_easter_island/',
+ 'http://www.metacafe.com/watch/1652057/easter_eggs/',
+ 'http://www.metacafe.com/watch/1173632/ultra_kawaii_easter_bunny_party/',
+ 'http://celluloidremix.blip.tv/file/3378272/',
+ 'http://blip.tv/file/449469',
+ 'http://blip.tv/file/199776',
+ 'http://blip.tv/file/766967',
+ 'http://blip.tv/file/770127',
+ 'http://blip.tv/file/854925',
+ 'http://www.blip.tv/file/22695?filename=Uncle_dale-THEEASTERBUNNYHATESYOU395.flv',
+ 'http://iofa.blip.tv/file/3412333/',
+ 'http://blip.tv/file/190393',
+ 'http://blip.tv/file/83152',
+ 'http://video.google.com/videoplay?docid=-5427138374898988918&q=easter+bunny&pl=true',
+ 'http://video.google.com/videoplay?docid=7785441737970480237',
+ 'http://video.google.com/videoplay?docid=2320995867449957036',
+ 'http://video.google.com/videoplay?docid=-2586684490991458032&q=peeps&pl=true',
+ 'http://video.google.com/videoplay?docid=5621139047118918034',
+ 'http://video.google.com/videoplay?docid=4232304376070958848',
+ 'http://video.google.com/videoplay?docid=-6612726032157145299',
+ 'http://video.google.com/videoplay?docid=4478549130377875994&hl=en',
+ 'http://video.google.com/videoplay?docid=9169278170240080877',
+ 'http://video.google.com/videoplay?docid=2551240967354893096',
+ 'http://video.yahoo.com/watch/7268801/18963438',
+ 'http://video.yahoo.com/watch/2224892/7014048',
+ 'http://video.yahoo.com/watch/7244748/18886014',
+ 'http://video.yahoo.com/watch/4656845/12448951',
+ 'http://video.yahoo.com/watch/363942/2249254',
+ 'http://video.yahoo.com/watch/2232968/7046348',
+ 'http://video.yahoo.com/watch/4530253/12135472',
+ 'http://video.yahoo.com/watch/2237137/7062908',
+ 'http://video.yahoo.com/watch/952841/3706424',
+ 'http://www.viddler.com/explore/BigAppleChannel/videos/113/',
+ 'http://www.viddler.com/explore/cheezburger/videos/379/',
+ 'http://www.viddler.com/explore/warnerbros/videos/350/',
+ 'http://www.viddler.com/explore/tvcgroup/videos/169/',
+ 'http://www.viddler.com/explore/thebrickshow/videos/12/',
+ 'http://www.liveleak.com/view?i=e0b_1239827917',
+ 'http://www.liveleak.com/view?i=715_1239490211',
+ 'http://www.liveleak.com/view?i=d30_1206233786&p=1',
+ 'http://www.liveleak.com/view?i=d91_1239548947',
+ 'http://www.liveleak.com/view?i=f58_1190741182',
+ 'http://www.liveleak.com/view?i=44e_1179885621&c=1',
+ 'http://www.liveleak.com/view?i=451_1188059885',
+ 'http://www.liveleak.com/view?i=3f5_1267456341&c=1',
+ 'http://www.hulu.com/watch/67313/howcast-how-to-make-braided-easter-bread',
+ 'http://www.hulu.com/watch/133583/access-hollywood-glees-matthew-morrison-on-touring-and-performing-for-president-obama',
+ 'http://www.hulu.com/watch/66319/saturday-night-live-easter-album',
+ 'http://www.hulu.com/watch/80229/explorer-end-of-easter-island',
+ 'http://www.hulu.com/watch/139020/nbc-today-show-lamb-and-ham-create-easter-feast',
+ 'http://www.hulu.com/watch/84272/rex-the-runt-easter-island',
+ 'http://www.hulu.com/watch/132203/everyday-italian-easter-pie',
+ 'http://www.hulu.com/watch/23349/nova-secrets-of-lost-empires-ii-easter-island',
+ 'http://movieclips.com/watch/dirty_harry_1971/do_you_feel_lucky_punk/',
+ 'http://movieclips.com/watch/napoleon_dynamite_2004/chatting_online_with_babes/',
+ 'http://movieclips.com/watch/dumb__dumber_1994/the_toilet_doesnt_flush/',
+ 'http://movieclips.com/watch/jaws_1975/youre_gonna_need_a_bigger_boat/',
+ 'http://movieclips.com/watch/napoleon_dynamite_2004/chatting_online_with_babes/61.495/75.413',
+ 'http://movieclips.com/watch/super_troopers_2001/the_cat_game/12.838/93.018',
+ 'http://movieclips.com/watch/this_is_spinal_tap_1984/these_go_to_eleven/79.703/129.713',
+ 'http://crackle.com/c/Originals/What_s_the_deal_with_Easter_candy_/2303243',
+ 'http://crackle.com/c/How_To/Dryer_Lint_Easter_Bunny_Trailer_Park_Craft/2223902',
+ 'http://crackle.com/c/How_To/Pagan_Origin_of_Easter_Easter_Egg_Rabbit_Playb_/2225124',
+ 'http://crackle.com/c/Funny/Happy_Easter/2225363',
+ 'http://crackle.com/c/Funny/Crazy_and_Hilarious_Easter_Egg_Hunt/2225737',
+ 'http://crackle.com/c/How_To/Learn_About_Greek_Orthodox_Easter/2262294',
+ 'http://crackle.com/c/How_To/How_to_Make_Ukraine_Easter_Eggs/2262274',
+ 'http://crackle.com/c/How_To/Symbolism_Of_Ukrainian_Easter_Eggs/2262267',
+ 'http://crackle.com/c/Funny/Easter_Retard/931976',
+ 'http://www.fancast.com/tv/It-s-the-Easter-Beagle,-Charlie-Brown/74789/1078053475/Peanuts:-Specials:-It-s-the-Easter-Beagle,-Charlie-Brown/videos',
+ 'http://www.fancast.com/movies/Easter-Parade/97802/687440525/Easter-Parade/videos',
+ 'http://www.fancast.com/tv/Saturday-Night-Live/10009/1083396482/Easter-Album/videos',
+ 'http://www.fancast.com/movies/The-Proposal/147176/1140660489/The-Proposal:-Easter-Egg-Hunt/videos',
+ 'http://www.funnyordie.com/videos/f6883f54ae/the-unsettling-ritualistic-origin-of-the-easter-bunny',
+ 'http://www.funnyordie.com/videos/3ccb03863e/easter-tail-keaster-bunny',
+ 'http://www.funnyordie.com/videos/17b1d36ad0/easter-bunny-from-leatherfink',
+ 'http://www.funnyordie.com/videos/0c55aa116d/easter-exposed-from-bryan-erwin',
+ 'http://www.funnyordie.com/videos/040dac4eff/easter-eggs',
+ 'http://vimeo.com/10446922',
+ 'http://vimeo.com/10642542',
+ 'http://www.vimeo.com/10664068',
+ 'http://vimeo.com/819176',
+ 'http://www.vimeo.com/10525353',
+ 'http://vimeo.com/10429123',
+ 'http://www.vimeo.com/10652053',
+ 'http://vimeo.com/10572216',
+ 'http://www.ted.com/talks/jared_diamond_on_why_societies_collapse.html',
+ 'http://www.ted.com/talks/nathan_myhrvold_on_archeology_animal_photography_bbq.html',
+ 'http://www.ted.com/talks/johnny_lee_demos_wii_remote_hacks.html',
+ 'http://www.ted.com/talks/robert_ballard_on_exploring_the_oceans.html',
+ 'http://www.omnisio.com/v/Z3QxbTUdjhG/wall-e-collection-of-videos',
+ 'http://www.omnisio.com/v/3ND6LTvdjhG/php-tutorial-4-login-form-updated',
+ 'http://www.thedailyshow.com/watch/thu-december-14-2000/intro---easter',
+ 'http://www.thedailyshow.com/watch/tue-april-18-2006/headlines---easter-charade',
+ 'http://www.thedailyshow.com/watch/tue-april-18-2006/egg-beaters',
+ 'http://www.thedailyshow.com/watch/tue-april-18-2006/moment-of-zen---scuba-diver-hiding-easter-eggs',
+ 'http://www.thedailyshow.com/watch/tue-april-7-2009/easter---passover-highlights',
+ 'http://www.thedailyshow.com/watch/tue-february-29-2000/headlines---leap-impact',
+ 'http://www.thedailyshow.com/watch/thu-march-1-2007/tomb-with-a-jew',
+ 'http://www.thedailyshow.com/watch/mon-april-24-2000/the-meaning-of-passover',
+ 'http://www.colbertnation.com/the-colbert-report-videos/268800/march-31-2010/easter-under-attack---peeps-display-update',
+ 'http://www.colbertnation.com/the-colbert-report-videos/268797/march-31-2010/intro---03-31-10',
+ 'http://www.colbertnation.com/full-episodes/wed-march-31-2010-craig-mullaney',
+ 'http://www.colbertnation.com/the-colbert-report-videos/60902/march-28-2006/the-word---easter-under-attack---marketing',
+ 'http://www.colbertnation.com/the-colbert-report-videos/83362/march-07-2007/easter-under-attack---bunny',
+ 'http://www.colbertnation.com/the-colbert-report-videos/61404/april-06-2006/easter-under-attack---recalled-eggs?videoId=61404',
+ 'http://www.colbertnation.com/the-colbert-report-videos/223957/april-06-2009/colbert-s-easter-parade',
+ 'http://www.colbertnation.com/the-colbert-report-videos/181772/march-28-2006/intro---3-28-06',
+ 'http://www.traileraddict.com/trailer/despicable-me/easter-greeting',
+ 'http://www.traileraddict.com/trailer/easter-parade/trailer',
+ 'http://www.traileraddict.com/clip/the-proposal/easter-egg-hunt',
+ 'http://www.traileraddict.com/trailer/despicable-me/international-teaser-trailer',
+ 'http://www.traileraddict.com/trailer/despicable-me/today-show-minions',
+ 'http://revver.com/video/263817/happy-easter/',
+ 'http://www.revver.com/video/1574939/easter-bunny-house/',
+ 'http://revver.com/video/771140/easter-08/',
]
def submit_all():
@@ -796,36 +1298,55 @@ def submit_all():
return links
+def test_real(nlinks):
+ from r2.models import Link, desc
+ from r2.lib.utils import fetch_things2
+
+ counter = 0
+ q = Link._query(sort = desc("_date"))
+
+ print "
"
+ for l in fetch_things2(q):
+ if counter > nlinks:
+ break
+ if not l.is_self:
+ h = make_scraper(l.url)
+ mo = h.media_object()
+ print "scraper: %s" % mo
+ if mo:
+ print get_media_embed(mo).content
+ counter +=1
+ print "
"
+
def test():
"""Take some example URLs and print out a nice pretty HTML table
of their extracted thubmnails and media objects"""
- import sys
- from r2.lib.filters import websafe
-
print "
"
for url in test_urls:
- sys.stderr.write("%s\n" % url)
- print "
').appendTo(placeholder);
+ if (options.legend.backgroundOpacity != 0.0) {
+ // put in the transparent background
+ // separately to avoid blended labels and
+ // label boxes
+ var c = options.legend.backgroundColor;
+ if (c == null) {
+ c = options.grid.backgroundColor;
+ if (c && typeof c == "string")
+ c = $.color.parse(c);
+ else
+ c = $.color.extract(legend, 'background-color');
+ c.a = 1;
+ c = c.toString();
+ }
+ var div = legend.children();
+ $('
').prependTo(legend).css('opacity', options.legend.backgroundOpacity);
+ }
+ }
+ }
+
+
+ // interactive features
+
+ var highlights = [],
+ redrawTimeout = null;
+
+ // returns the data item the mouse is over, or null if none is found
+ function findNearbyItem(mouseX, mouseY, seriesFilter) {
+ var maxDistance = options.grid.mouseActiveRadius,
+ smallestDistance = maxDistance * maxDistance + 1,
+ item = null, foundPoint = false, i, j;
+
+ for (i = 0; i < series.length; ++i) {
+ if (!seriesFilter(series[i]))
+ continue;
+
+ var s = series[i],
+ axisx = s.xaxis,
+ axisy = s.yaxis,
+ points = s.datapoints.points,
+ ps = s.datapoints.pointsize,
+ mx = axisx.c2p(mouseX), // precompute some stuff to make the loop faster
+ my = axisy.c2p(mouseY),
+ maxx = maxDistance / axisx.scale,
+ maxy = maxDistance / axisy.scale;
+
+ if (s.lines.show || s.points.show) {
+ for (j = 0; j < points.length; j += ps) {
+ var x = points[j], y = points[j + 1];
+ if (x == null)
+ continue;
+
+ // For points and lines, the cursor must be within a
+ // certain distance to the data point
+ if (x - mx > maxx || x - mx < -maxx ||
+ y - my > maxy || y - my < -maxy)
+ continue;
+
+ // We have to calculate distances in pixels, not in
+ // data units, because the scales of the axes may be different
+ var dx = Math.abs(axisx.p2c(x) - mouseX),
+ dy = Math.abs(axisy.p2c(y) - mouseY),
+ dist = dx * dx + dy * dy; // we save the sqrt
+
+ // use <= to ensure last point takes precedence
+ // (last generally means on top of)
+ if (dist <= smallestDistance) {
+ smallestDistance = dist;
+ item = [i, j / ps];
+ }
+ }
+ }
+
+ if (s.bars.show && !item) { // no other point can be nearby
+ var barLeft = s.bars.align == "left" ? 0 : -s.bars.barWidth/2,
+ barRight = barLeft + s.bars.barWidth;
+
+ for (j = 0; j < points.length; j += ps) {
+ var x = points[j], y = points[j + 1], b = points[j + 2];
+ if (x == null)
+ continue;
+
+ // for a bar graph, the cursor must be inside the bar
+ if (series[i].bars.horizontal ?
+ (mx <= Math.max(b, x) && mx >= Math.min(b, x) &&
+ my >= y + barLeft && my <= y + barRight) :
+ (mx >= x + barLeft && mx <= x + barRight &&
+ my >= Math.min(b, y) && my <= Math.max(b, y)))
+ item = [i, j / ps];
+ }
+ }
+ }
+
+ if (item) {
+ i = item[0];
+ j = item[1];
+ ps = series[i].datapoints.pointsize;
+
+ return { datapoint: series[i].datapoints.points.slice(j * ps, (j + 1) * ps),
+ dataIndex: j,
+ series: series[i],
+ seriesIndex: i };
+ }
+
+ return null;
+ }
+
+ function onMouseMove(e) {
+ if (options.grid.hoverable)
+ triggerClickHoverEvent("plothover", e,
+ function (s) { return s["hoverable"] != false; });
+ }
+
+ function onClick(e) {
+ triggerClickHoverEvent("plotclick", e,
+ function (s) { return s["clickable"] != false; });
+ }
+
+ // trigger click or hover event (they send the same parameters
+ // so we share their code)
+ function triggerClickHoverEvent(eventname, event, seriesFilter) {
+ var offset = eventHolder.offset(),
+ pos = { pageX: event.pageX, pageY: event.pageY },
+ canvasX = event.pageX - offset.left - plotOffset.left,
+ canvasY = event.pageY - offset.top - plotOffset.top;
+
+ if (axes.xaxis.used)
+ pos.x = axes.xaxis.c2p(canvasX);
+ if (axes.yaxis.used)
+ pos.y = axes.yaxis.c2p(canvasY);
+ if (axes.x2axis.used)
+ pos.x2 = axes.x2axis.c2p(canvasX);
+ if (axes.y2axis.used)
+ pos.y2 = axes.y2axis.c2p(canvasY);
+
+ var item = findNearbyItem(canvasX, canvasY, seriesFilter);
+
+ if (item) {
+ // fill in mouse pos for any listeners out there
+ item.pageX = parseInt(item.series.xaxis.p2c(item.datapoint[0]) + offset.left + plotOffset.left);
+ item.pageY = parseInt(item.series.yaxis.p2c(item.datapoint[1]) + offset.top + plotOffset.top);
+ }
+
+ if (options.grid.autoHighlight) {
+ // clear auto-highlights
+ for (var i = 0; i < highlights.length; ++i) {
+ var h = highlights[i];
+ if (h.auto == eventname &&
+ !(item && h.series == item.series && h.point == item.datapoint))
+ unhighlight(h.series, h.point);
+ }
+
+ if (item)
+ highlight(item.series, item.datapoint, eventname);
+ }
+
+ placeholder.trigger(eventname, [ pos, item ]);
+ }
+
+ function triggerRedrawOverlay() {
+ if (!redrawTimeout)
+ redrawTimeout = setTimeout(drawOverlay, 30);
+ }
+
+ function drawOverlay() {
+ redrawTimeout = null;
+
+ // draw highlights
+ octx.save();
+ octx.clearRect(0, 0, canvasWidth, canvasHeight);
+ octx.translate(plotOffset.left, plotOffset.top);
+
+ var i, hi;
+ for (i = 0; i < highlights.length; ++i) {
+ hi = highlights[i];
+
+ if (hi.series.bars.show)
+ drawBarHighlight(hi.series, hi.point);
+ else
+ drawPointHighlight(hi.series, hi.point);
+ }
+ octx.restore();
+
+ executeHooks(hooks.drawOverlay, [octx]);
+ }
+
+ function highlight(s, point, auto) {
+ if (typeof s == "number")
+ s = series[s];
+
+ if (typeof point == "number")
+ point = s.data[point];
+
+ var i = indexOfHighlight(s, point);
+ if (i == -1) {
+ highlights.push({ series: s, point: point, auto: auto });
+
+ triggerRedrawOverlay();
+ }
+ else if (!auto)
+ highlights[i].auto = false;
+ }
+
+ function unhighlight(s, point) {
+ if (s == null && point == null) {
+ highlights = [];
+ triggerRedrawOverlay();
+ }
+
+ if (typeof s == "number")
+ s = series[s];
+
+ if (typeof point == "number")
+ point = s.data[point];
+
+ var i = indexOfHighlight(s, point);
+ if (i != -1) {
+ highlights.splice(i, 1);
+
+ triggerRedrawOverlay();
+ }
+ }
+
+ function indexOfHighlight(s, p) {
+ for (var i = 0; i < highlights.length; ++i) {
+ var h = highlights[i];
+ if (h.series == s && h.point[0] == p[0]
+ && h.point[1] == p[1])
+ return i;
+ }
+ return -1;
+ }
+
+ function drawPointHighlight(series, point) {
+ var x = point[0], y = point[1],
+ axisx = series.xaxis, axisy = series.yaxis;
+
+ if (x < axisx.min || x > axisx.max || y < axisy.min || y > axisy.max)
+ return;
+
+ var pointRadius = series.points.radius + series.points.lineWidth / 2;
+ octx.lineWidth = pointRadius;
+ octx.strokeStyle = $.color.parse(series.color).scale('a', 0.5).toString();
+ var radius = 1.5 * pointRadius;
+ octx.beginPath();
+ octx.arc(axisx.p2c(x), axisy.p2c(y), radius, 0, 2 * Math.PI, false);
+ octx.stroke();
+ }
+
+ function drawBarHighlight(series, point) {
+ octx.lineWidth = series.bars.lineWidth;
+ octx.strokeStyle = $.color.parse(series.color).scale('a', 0.5).toString();
+ var fillStyle = $.color.parse(series.color).scale('a', 0.5).toString();
+ var barLeft = series.bars.align == "left" ? 0 : -series.bars.barWidth/2;
+ drawBar(point[0], point[1], point[2] || 0, barLeft, barLeft + series.bars.barWidth,
+ 0, function () { return fillStyle; }, series.xaxis, series.yaxis, octx, series.bars.horizontal);
+ }
+
+ function getColorOrGradient(spec, bottom, top, defaultColor) {
+ if (typeof spec == "string")
+ return spec;
+ else {
+ // assume this is a gradient spec; IE currently only
+ // supports a simple vertical gradient properly, so that's
+ // what we support too
+ var gradient = ctx.createLinearGradient(0, top, 0, bottom);
+
+ for (var i = 0, l = spec.colors.length; i < l; ++i) {
+ var c = spec.colors[i];
+ if (typeof c != "string") {
+ c = $.color.parse(defaultColor).scale('rgb', c.brightness);
+ c.a *= c.opacity;
+ c = c.toString();
+ }
+ gradient.addColorStop(i / (l - 1), c);
+ }
+
+ return gradient;
+ }
+ }
+ }
+
+ $.plot = function(placeholder, data, options) {
+ var plot = new Plot($(placeholder), data, options, $.plot.plugins);
+ /*var t0 = new Date();
+ var t1 = new Date();
+ var tstr = "time used (msecs): " + (t1.getTime() - t0.getTime())
+ if (window.console)
+ console.log(tstr);
+ else
+ alert(tstr);*/
+ return plot;
+ };
+
+ $.plot.plugins = [];
+
+ // returns a string with the date d formatted according to fmt
+ $.plot.formatDate = function(d, fmt, monthNames) {
+ var leftPad = function(n) {
+ n = "" + n;
+ return n.length == 1 ? "0" + n : n;
+ };
+
+ var r = [];
+ var escape = false;
+ var hours = d.getUTCHours();
+ var isAM = hours < 12;
+ if (monthNames == null)
+ monthNames = ["Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"];
+
+ if (fmt.search(/%p|%P/) != -1) {
+ if (hours > 12) {
+ hours = hours - 12;
+ } else if (hours == 0) {
+ hours = 12;
+ }
+ }
+ for (var i = 0; i < fmt.length; ++i) {
+ var c = fmt.charAt(i);
+
+ if (escape) {
+ switch (c) {
+ case 'h': c = "" + hours; break;
+ case 'H': c = leftPad(hours); break;
+ case 'M': c = leftPad(d.getUTCMinutes()); break;
+ case 'S': c = leftPad(d.getUTCSeconds()); break;
+ case 'd': c = "" + d.getUTCDate(); break;
+ case 'm': c = "" + (d.getUTCMonth() + 1); break;
+ case 'y': c = "" + d.getUTCFullYear(); break;
+ case 'b': c = "" + monthNames[d.getUTCMonth()]; break;
+ case 'p': c = (isAM) ? ("" + "am") : ("" + "pm"); break;
+ case 'P': c = (isAM) ? ("" + "AM") : ("" + "PM"); break;
+ }
+ r.push(c);
+ escape = false;
+ }
+ else {
+ if (c == "%")
+ escape = true;
+ else
+ r.push(c);
+ }
+ }
+ return r.join("");
+ };
+
+ // round to nearby lower multiple of base
+ function floorInBase(n, base) {
+ return base * Math.floor(n / base);
+ }
+
+})(jQuery);
diff --git a/r2/r2/public/static/js/jquery.lazyload.js b/r2/r2/public/static/js/jquery.lazyload.js
new file mode 100644
index 000000000..276e9882b
--- /dev/null
+++ b/r2/r2/public/static/js/jquery.lazyload.js
@@ -0,0 +1,164 @@
+/*
+ * Lazy Load - jQuery plugin for lazy loading images
+ *
+ * Copyright (c) 2007-2009 Mika Tuupola
+ *
+ * Licensed under the MIT license:
+ * http://www.opensource.org/licenses/mit-license.php
+ *
+ * Project home:
+ * http://www.appelsiini.net/projects/lazyload
+ *
+ * Version: 1.5.0
+ *
+ */
+(function($) {
+
+ $.fn.lazyload = function(options) {
+ var settings = {
+ threshold : 0,
+ failurelimit : 0,
+ event : "scroll",
+ effect : "show",
+ container : window
+ };
+
+ if(options) {
+ $.extend(settings, options);
+ }
+
+ /* Fire one scroll event per scroll. Not one scroll event per image. */
+ var elements = this;
+ if ("scroll" == settings.event) {
+ $(settings.container).bind("scroll", function(event) {
+
+ var counter = 0;
+ elements.each(function() {
+ if ($.abovethetop(this, settings) ||
+ $.leftofbegin(this, settings)) {
+ /* Nothing. */
+ } else if (!$.belowthefold(this, settings) &&
+ !$.rightoffold(this, settings)) {
+ $(this).trigger("appear");
+ } else {
+ if (counter++ > settings.failurelimit) {
+ return false;
+ }
+ }
+ });
+ /* Remove image from array so it is not looped next time. */
+ var temp = $.grep(elements, function(element) {
+ return !element.loaded;
+ });
+ elements = $(temp);
+ });
+ }
+
+ this.each(function() {
+ var self = this;
+
+ /* Save original only if it is not defined in HTML. */
+ if (undefined == $(self).attr("original")) {
+ $(self).attr("original", $(self).attr("src"));
+ }
+
+ if ("scroll" != settings.event ||
+ undefined == $(self).attr("src") ||
+ settings.placeholder == $(self).attr("src") ||
+ ($.abovethetop(self, settings) ||
+ $.leftofbegin(self, settings) ||
+ $.belowthefold(self, settings) ||
+ $.rightoffold(self, settings) )) {
+
+ if (settings.placeholder) {
+ $(self).attr("src", settings.placeholder);
+ } else {
+ $(self).removeAttr("src");
+ }
+ self.loaded = false;
+ } else {
+ self.loaded = true;
+ }
+
+ /* When appear is triggered load original image. */
+ $(self).one("appear", function() {
+ if (!this.loaded) {
+ $("")
+ .bind("load", function() {
+ $(self)
+ .hide()
+ .attr("src", $(self).attr("original"))
+ [settings.effect](settings.effectspeed);
+ self.loaded = true;
+ })
+ .attr("src", $(self).attr("original"));
+ };
+ });
+
+ /* When wanted event is triggered load original image */
+ /* by triggering appear. */
+ if ("scroll" != settings.event) {
+ $(self).bind(settings.event, function(event) {
+ if (!self.loaded) {
+ $(self).trigger("appear");
+ }
+ });
+ }
+ });
+
+ /* Force initial check if images should appear. */
+ $(settings.container).trigger(settings.event);
+
+ return this;
+
+ };
+
+ /* Convenience methods in jQuery namespace. */
+ /* Use as $.belowthefold(element, {threshold : 100, container : window}) */
+
+ $.belowthefold = function(element, settings) {
+ if (settings.container === undefined || settings.container === window) {
+ var fold = $(window).height() + $(window).scrollTop();
+ } else {
+ var fold = $(settings.container).offset().top + $(settings.container).height();
+ }
+ return fold <= $(element).offset().top - settings.threshold;
+ };
+
+ $.rightoffold = function(element, settings) {
+ if (settings.container === undefined || settings.container === window) {
+ var fold = $(window).width() + $(window).scrollLeft();
+ } else {
+ var fold = $(settings.container).offset().left + $(settings.container).width();
+ }
+ return fold <= $(element).offset().left - settings.threshold;
+ };
+
+ $.abovethetop = function(element, settings) {
+ if (settings.container === undefined || settings.container === window) {
+ var fold = $(window).scrollTop();
+ } else {
+ var fold = $(settings.container).offset().top;
+ }
+ return fold >= $(element).offset().top + settings.threshold + $(element).height();
+ };
+
+ $.leftofbegin = function(element, settings) {
+ if (settings.container === undefined || settings.container === window) {
+ var fold = $(window).scrollLeft();
+ } else {
+ var fold = $(settings.container).offset().left;
+ }
+ return fold >= $(element).offset().left + settings.threshold + $(element).width();
+ };
+ /* Custom selectors for your convenience. */
+ /* Use as $("img:below-the-fold").something() */
+
+ $.extend($.expr[':'], {
+ "below-the-fold" : "$.belowthefold(a, {threshold : 0, container: window})",
+ "above-the-fold" : "!$.belowthefold(a, {threshold : 0, container: window})",
+ "right-of-fold" : "$.rightoffold(a, {threshold : 0, container: window})",
+ "left-of-fold" : "!$.rightoffold(a, {threshold : 0, container: window})"
+ });
+
+})(jQuery);
diff --git a/r2/r2/public/static/js/jquery.reddit.js b/r2/r2/public/static/js/jquery.reddit.js
index 798e2ceb6..e287d3a45 100644
--- a/r2/r2/public/static/js/jquery.reddit.js
+++ b/r2/r2/public/static/js/jquery.reddit.js
@@ -310,9 +310,13 @@ $.fn.all_things_by_id = function() {
return this.thing().add( $.things(this.thing_id()) );
};
-$.fn.thing_id = function() {
+$.fn.thing_id = function(class_filter) {
+ class_filter = $.with_default(class_filter, "thing");
/* Returns the (reddit) ID of the current element's thing */
var t = (this.hasClass("thing")) ? this : this.thing();
+ if(class_filter != "thing") {
+ t = t.find("." + class_filter + ":first");
+ }
if(t.length) {
var id = $.grep(t.get(0).className.split(' '),
function(i) { return i.match(/^id-/); });
@@ -332,6 +336,15 @@ $.things = function() {
return $(sel);
};
+$.fn.same_author = function() {
+ var aid = $(this).thing_id("author");
+ var ids = [];
+ $(".author.id-" + aid).each(function() {
+ ids.push(".thing.id-" + $(this).thing_id());
+ });
+ return $(ids.join(", "));
+};
+
$.fn.things = function() {
/*
* try to find all things that occur below a given selector, like:
@@ -387,7 +400,7 @@ $.listing = function(name) {
var thing_init_func = function() { };
$.fn.set_thing_init = function(func) {
thing_init_func = func;
- $(this).find(".thing").each(function() { func(this) });
+ $(this).find(".thing:not(.stub)").each(function() { func(this) });
};
@@ -508,12 +521,19 @@ $.insert_things = function(things, append) {
});
};
-$.fn.delete_table_row = function() {
+$.fn.delete_table_row = function(callback) {
var tr = this.parents("tr:first").get(0);
var table = this.parents("table").get(0);
- $(tr).fadeOut(function() {
- table.deleteRow(tr.rowIndex);
- });
+ if(tr) {
+ $(tr).fadeOut(function() {
+ table.deleteRow(tr.rowIndex);
+ if(callback) {
+ callback();
+ }
+ });
+ } else if (callback) {
+ callback();
+ }
};
$.fn.insert_table_rows = function(rows, index) {
diff --git a/r2/r2/public/static/js/reddit.js b/r2/r2/public/static/js/reddit.js
index 48ff1c1d1..993e98a02 100644
--- a/r2/r2/public/static/js/reddit.js
+++ b/r2/r2/public/static/js/reddit.js
@@ -1206,6 +1206,30 @@ function fetch_parent(elem, parent_permalink, parent_id) {
return false;
}
+function big_mod_action(elem, dir) {
+ if ( ! elem.hasClass("pressed")) {
+ elem.addClass("pressed");
+
+ var thing_id = elem.thing_id();
+
+ d = {
+ id: thing_id
+ };
+
+ if (dir == -1) {
+ $.request("remove", d, null, true);
+ elem.siblings(".removed").show();
+ elem.siblings(".approved").hide();
+ } else if (dir == 1) {
+ $.request("approve", d, null, true);
+ elem.siblings(".removed").hide();
+ elem.siblings(".approved").show();
+ }
+ }
+ elem.siblings(".pretty-button").removeClass("pressed");
+ return false;
+}
+
function juryvote(elem, dir) {
var thing_id = elem.thing_id();
@@ -1231,7 +1255,10 @@ $(function() {
* and call it on all things currently rendered in the
* page. */
$("body").set_thing_init(updateEventHandlers);
-
+ $(".thumbnail img").lazyload({
+ threshold: 200,
+ placeholder: "/static/nothing.png"
+ });
/* Set up gray inputs and textareas to clear on focus */
$("textarea.gray, input.gray")
.focus( function() {
diff --git a/r2/r2/public/static/js/sponsored.js b/r2/r2/public/static/js/sponsored.js
index 8e9be4c00..c607067f0 100644
--- a/r2/r2/public/static/js/sponsored.js
+++ b/r2/r2/public/static/js/sponsored.js
@@ -3,13 +3,13 @@ function update_box(elem) {
};
function update_bid(elem) {
- var form = $(elem).parents(".pretty-form:first");
+ var form = $(elem).parents(".campaign");
var bid = parseFloat(form.find("*[name=bid]").val());
var ndays = ((Date.parse(form.find("*[name=enddate]").val()) -
Date.parse(form.find("*[name=startdate]").val())) / (86400*1000));
- $("#bid-field span.gray").html("[Current campaign totals " +
- "$" + (bid/ndays).toFixed(2) +
- " per day for " + ndays + " day(s)]");
+ $(".bid-info").html(" &rarr" +
+ "$" + (bid/ndays).toFixed(2) +
+ " per day for " + ndays + " day(s)");
$("#duration span.gray")
.html( ndays == 1 ? "(1 day)" : "(" + ndays + " days)");
}
@@ -62,3 +62,266 @@ function attach_calendar(where, min_date_src, max_date_src, callback) {
$(this).siblings(".datepicker.inuse").addClass("active");
});
}
+
+function targeting_on(elem) {
+ $(elem).parents(".campaign").find(".targeting")
+ .find("*[name=sr]").attr("disabled", "").end().show();
+}
+
+function targeting_off(elem) {
+ $(elem).parents(".campaign").find(".targeting")
+ .find("*[name=sr]").attr("disabled", "disabled").end().hide();
+}
+
+(function($) {
+
+function get_flag_class(flags) {
+ var css_class = "";
+ if(flags.free) {
+ css_class += " free";
+ }
+ if(flags.complete) {
+ css_class += " complete";
+ }
+ else {
+ if(flags.sponsor) {
+ css_class += " sponsor";
+ }
+ if(flags.paid) {
+ css_class += " paid";
+ }
+ }
+ return css_class
+}
+
+$.new_campaign = function(indx, start_date, end_date, duration,
+ bid, targeting, flags) {
+ cancel_edit(function() {
+ var data =('' +
+ '' +
+ '' +
+ '' +
+ '');
+ if (flags && flags.pay_url) {
+ data += ("");
+ }
+ var row = [start_date, end_date, duration, "$" + bid, targeting, data];
+ $(".existing-campaigns .error").hide();
+ var css_class = get_flag_class(flags);
+ $(".existing-campaigns table").show()
+ .insert_table_rows([{"id": "", "css_class": css_class,
+ "cells": row}], -1);
+ $.set_up_campaigns()
+ });
+ return $;
+};
+
+$.update_campaign = function(indx, start_date, end_date,
+ duration, bid, targeting, flags) {
+ cancel_edit(function() {
+ $(".existing-campaigns input[name=indx]")
+ .filter("*[value=" + (indx || '0') + "]")
+ .parents("tr").removeClass()
+ .addClass(get_flag_class(flags))
+ .children(":first").html(start_date)
+ .next().html(end_date)
+ .next().html(duration)
+ .next().html("$" + bid).removeClass()
+ .next().html(targeting)
+ .next()
+ .find("*[name=startdate]").val(start_date).end()
+ .find("*[name=enddate]").val(end_date).end()
+ .find("*[name=targeting]").val(targeting).end()
+ .find("*[name=bid]").val(bid).end()
+ .find("button, span").remove();
+ $.set_up_campaigns();
+ });
+};
+
+$.set_up_campaigns = function() {
+ var edit = "";
+ var del = "";
+ var pay = "";
+ var free = "";
+ var repay = "";
+ $(".existing-campaigns tr").each(function() {
+ var tr = $(this);
+ var td = $(this).find("td:last");
+ var bid_td = $(this).find("td:first").next().next().next()
+ .addClass("bid");
+ if(td.length && ! td.children("button, span").length ) {
+ /* once paid, we shouldn't muck around with the campaign */
+ if(!tr.hasClass("complete")) {
+ if (tr.hasClass("sponsor") && !tr.hasClass("free")) {
+ $(bid_td).append($(free).addClass("free")
+ .click(function() { free_campaign(tr) }))
+ }
+ else if (!tr.hasClass("paid")) {
+ $(bid_td).prepend($(pay).addClass("pay")
+ .click(function() { pay_campaign(tr) }));
+ } else if (tr.hasClass("free")) {
+ $(bid_td).addClass("free paid")
+ .prepend("freebie");
+ } else {
+ (bid_td).addClass("paid")
+ .prepend($(repay).addClass("pay")
+ .click(function() { pay_campaign(tr) }));
+ }
+ var e = $(edit).addClass("edit")
+ .click(function() { edit_campaign(tr); });
+ var d = $(del).addClass("d")
+ .click(function() { del_campaign(tr); });
+ $(td).append(e).append(d);
+ }
+ else {
+ $(td).append("complete/live");
+ $(bid_td).addClass("paid")
+ }
+ }
+ });
+ return $;
+
+}
+
+}(jQuery));
+
+function detach_campaign_form() {
+ /* remove datepicker from fields */
+ $("#campaign").find(".datepicker").each(function() {
+ $(this).datepicker("destroy").siblings().unbind();
+ });
+
+ /* clone and remove original */
+ var orig = $("#campaign");
+ var campaign = orig.clone(true);
+ orig.remove();
+ return campaign;
+}
+
+function cancel_edit(callback) {
+ if($("#campaign").parents('tr:first').length) {
+ var tr = $("#campaign").parents("tr:first").prev();
+ /* copy the campaign element */
+ /* delete the original */
+ $("#campaign").fadeOut(function() {
+ $(this).parent('tr').prev().fadeIn();
+ var td = $(this).parent();
+ var campaign = detach_campaign_form();
+ td.delete_table_row(function() {
+ tr.fadeIn(function() {
+ $(".existing-campaigns").before(campaign);
+ campaign.hide();
+ if(callback) { callback(); }
+ });
+ });
+ });
+ } else {
+ if ($("#campaign:visible").length) {
+ $("#campaign").fadeOut(function() {
+ if(callback) {
+ callback();
+ }});
+ }
+ else if (callback) {
+ callback();
+ }
+ }
+}
+
+function del_campaign(elem) {
+ var indx = $(elem).find("*[name=indx]").val();
+ var link_id = $("#campaign").find("*[name=link_id]").val();
+ $.request("delete_campaign", {"indx": indx, "link_id": link_id},
+ null, true, "json", false);
+ $(elem).children(":first").delete_table_row();
+}
+
+
+function edit_campaign(elem) {
+ /* find the table row in question */
+ var tr = $(elem).get(0);
+
+ if ($("#campaign").parents('tr:first').get(0) != tr) {
+
+ cancel_edit(function() {
+
+ /* copy the campaign element */
+ var campaign = detach_campaign_form();
+
+ $(".existing-campaigns table")
+ .insert_table_rows([{"id": "edit-campaign-tr",
+ "css_class": "", "cells": [""]}],
+ tr.rowIndex + 1);
+ $("#edit-campaign-tr").children('td:first')
+ .attr("colspan", 6).append(campaign).end()
+ .prev().fadeOut(function() {
+ var data_tr = $(this);
+ var c = $("#campaign");
+ $.map(['startdate', 'enddate', 'bid', 'indx'],
+ function(i) {
+ i = "*[name=" + i + "]";
+ c.find(i).val(data_tr.find(i).val());
+ });
+ /* check if targeting is turned on */
+ var targeting = data_tr
+ .find("*[name=targeting]").val();
+ var radios=c.find("*[name=targeting]");
+ if (targeting) {
+ radios.filter("*[value=one]")
+ .attr("checked", "checked");
+ c.find("*[name=sr]").val(targeting).attr("disabled", "").end()
+ .find(".targeting").show();
+ }
+ else {
+ radios.filter("*[value=none]")
+ .attr("checked", "checked");
+ c.find("*[name=sr]").val("").attr("disabled", "disabled").end()
+ .find(".targeting").hide();
+ }
+ /* attach the dates to the date widgets */
+ init_startdate();
+ init_enddate();
+ c.find("button[name=edit]").show().end()
+ .find("button[name=create]").hide().end();
+ update_bid("*[name=bid]");
+ c.fadeIn();
+ } );
+ }
+ );
+ }
+}
+
+function create_campaign(elem) {
+ cancel_edit(function() {;
+ init_startdate();
+ init_enddate();
+ $("#campaign")
+ .find("button[name=edit]").hide().end()
+ .find("button[name=create]").show().end()
+ .find("input[name=indx]").val('').end()
+ .find("input[name=sr]").val('').end()
+ .find("input[name=targeting][value=none]")
+ .attr("checked", "checked").end()
+ .find(".targeting").hide().end()
+ .find("*[name=sr]").val("").attr("disabled", "disabled").end()
+ .fadeIn();
+ update_bid("*[name=bid]");
+ });
+}
+
+function free_campaign(elem) {
+ var indx = $(elem).find("*[name=indx]").val();
+ var link_id = $("#campaign").find("*[name=link_id]").val();
+ $.request("freebie", {"indx": indx, "link_id": link_id},
+ null, true, "json", false);
+ $(elem).find(".free").fadeOut();
+ return false;
+}
+
+function pay_campaign(elem) {
+ $.redirect($(elem).find("input[name=pay_url]").val());
+}
\ No newline at end of file
diff --git a/r2/r2/public/static/noimage.png b/r2/r2/public/static/noimage.png
index c1556c738..9fc0124eb 100644
Binary files a/r2/r2/public/static/noimage.png and b/r2/r2/public/static/noimage.png differ
diff --git a/r2/r2/public/static/nothing.png b/r2/r2/public/static/nothing.png
new file mode 100644
index 000000000..8834edbc6
Binary files /dev/null and b/r2/r2/public/static/nothing.png differ
diff --git a/r2/r2/public/static/self_default.png b/r2/r2/public/static/self_default.png
new file mode 100644
index 000000000..2e9523a12
Binary files /dev/null and b/r2/r2/public/static/self_default.png differ
diff --git a/r2/r2/public/static/vid-collapsed.png b/r2/r2/public/static/vid-collapsed.png
index c8ab5f3f1..fd9e5aeaf 100644
Binary files a/r2/r2/public/static/vid-collapsed.png and b/r2/r2/public/static/vid-collapsed.png differ
diff --git a/r2/r2/public/static/vid-expanded.png b/r2/r2/public/static/vid-expanded.png
index f517ee974..55847e603 100644
Binary files a/r2/r2/public/static/vid-expanded.png and b/r2/r2/public/static/vid-expanded.png differ
diff --git a/r2/r2/templates/buttondemopanel.html b/r2/r2/templates/buttondemopanel.html
index 57cc7a21f..369415967 100644
--- a/r2/r2/templates/buttondemopanel.html
+++ b/r2/r2/templates/buttondemopanel.html
@@ -23,7 +23,7 @@
from r2.lib.template_helpers import get_domain
%>
-<% domain = get_domain(True) %>
+<% domain = get_domain(False) %>
${_("put %(site)s buttons on your site") % dict(site=c.site.name)}
@@ -90,6 +90,10 @@
${_("useful in places like blogs, where you want to link to the post's permalink")}
${drawoption('url','[URL]')}
+
- ${thing.a.promote_until.strftime(thing.datefmt)}
- %if thing.a.promote_until < datetime.now(g.tz):
- ${_('(this link has expired and is no longer being promoted)')}
- %endif
-
@@ -365,8 +421,9 @@
### originally in commentbutton
<%def name="comment_button(name, link_text, link,\
- a_class='', title='', newwindow = False)">
+ _sr_path = True, a_class='', title='', newwindow = False)">
${plain_link(link_text, link,
+ _sr_path = _sr_path,
_class=a_class, title=title,
target='_blank' if newwindow else '_parent')}
%def>
diff --git a/r2/r2/templates/promo_email.email b/r2/r2/templates/promo_email.email
index 1195afe7e..e0fe9b807 100644
--- a/r2/r2/templates/promo_email.email
+++ b/r2/r2/templates/promo_email.email
@@ -30,9 +30,9 @@
%>
%if thing.kind == Email.Kind.NEW_PROMO:
-This email is to confirm reddit.com's receipt of your submitted self-serve ad. To set up payment for your ad, please go here:
+This email is to confirm reddit.com's receipt of your submitted self-serve ad. At the moment you have no campaigns associated with this link. To set up a campaign and payment if you haven't already, please visit the link's edit page:
-${g.payment_domain}promoted/pay/${thing.link._id36}
+ ${edit_url}
Please note that we can't approve your ad until you have set up payment, and that your ad must be approved before it goes live on your selected dates.
@@ -43,13 +43,13 @@ If your ad is rejected your credit card will not be charged. Don't take it pers
http://www.reddit.com/help/selfservicepromotion
%elif thing.kind == Email.Kind.BID_PROMO:
-This email is to confirm that your bid of $${"%.2f" % thing.link.promote_bid} for a self-serve ad on reddit.com has been accepted. The credit card number you provided will be charged 24 hours prior to the date your self-serve ad is set to run.
+This email is to confirm that your bid of $${"%.2f" % thing.bid} for a self-serve ad on reddit.com has been accepted. The credit card number you provided will be charged 24 hours prior to the date your self-serve ad is set to run.
-Having second thoughts about your bid? Want to be sure you're outbidding the competition? You'll have until ${(thing.link._date - timedelta(1)).strftime("%Y-%m-%d")} to change your bid here:
+Having second thoughts about your bid? Want to be sure you're outbidding the competition? You'll have until ${(thing.start_date - timedelta(1)).strftime("%Y-%m-%d")} to change your bid here:
${edit_url}
%elif thing.kind == Email.Kind.ACCEPT_PROMO:
-This email is to confirm that your self-serve reddit.com ad has been approved by reddit! The credit card you provided will not be charged until 24 hours prior to the date you have set your ad to run. If you make any changes to your ad, they will have to be re-approved. Keep in mind that after we have charged your credit card you will not be able to change your ad.
+This email is to confirm that your self-serve reddit.com ad has been approved by reddit! The credit card you provided will not be charged until 24 hours prior to the date you have set your ad to run. If you make any changes to your ad, they will have to be re-approved.
It won't be long now until your ad is being displayed to hundreds of thousands of the Internet's finest surfers.
@@ -67,21 +67,21 @@ and we'll reconsider it for sumbission.
%elif thing.kind == Email.Kind.QUEUED_PROMO:
This email is to inform you that your self-serve ad on reddit.com is about to go live. Feel free to reply to this email if you have any questions.
-%if thing.link.promote_trans_id > 0:
+%if thing.trans_id > 0:
Your credit card has been successfully charged by reddit for the amount you bid. Please use this email as your receipt.
================================================================================
-TRANSACTION #${thing.link.promote_trans_id}
+TRANSACTION #${thing.trans_id}
DATE: ${datetime.now(g.tz).strftime("%Y-%m-%d")}
................................................................................
-AMOUNT CHARGED: $${"%.2f" % thing.link.promote_bid}
+AMOUNT CHARGED: $${"%.2f" % thing.bid}
SPONSORSHIP PERMALINK: ${thing.link.make_permalink_slow(force_domain = True)}
================================================================================
%else:
-Your promotion was a freebie in the amount of $${"%.2f" % thing.link.promote_bid}.
+Your promotion was a freebie in the amount of $${"%.2f" % thing.bid}.
%endif
%elif thing.kind == Email.Kind.LIVE_PROMO:
This email is to inform you that your self-serve ad on reddit.com is now live and can be found at the following link:
diff --git a/r2/r2/templates/promote_graph.html b/r2/r2/templates/promote_graph.html
index 09b3c9c54..86bb1eaab 100644
--- a/r2/r2/templates/promote_graph.html
+++ b/r2/r2/templates/promote_graph.html
@@ -20,7 +20,9 @@
## CondeNet, Inc. All Rights Reserved.
################################################################################
<%!
+ from r2.lib.template_helpers import static
import datetime, locale
+ from r2.lib import promote
def num(x):
return locale.format('%d', x, True)
def money(x):
@@ -28,6 +30,8 @@
%>
Sponsored link calendar
+
+
%if not c.user_is_sponsor:
Below is a calendar of your scheduled and completed promotions (if you have any, of course), along with some site-wide averages to use as a guide for setting up future promotions. These values are:
@@ -77,7 +81,7 @@
%else:
YOUR COMMIT
%endif
-
+
%for i in xrange(thing.total_size):
<%
@@ -125,10 +129,15 @@
<%
prev_end = 0
%>
-%for link, start, end in thing.promote_blocks:
+%for link, bid, start, end, indx in thing.promote_blocks:
<%
start += 1
end += 1
+ sr = ''
+ if indx in getattr(link, "campaigns", {}):
+ sr = link.campaigns[indx][promote.CAMPAIGN.sr]
+ if sr:
+ sr += ':'
%>
%if start != end:
%if prev_end > start:
@@ -142,10 +151,9 @@
%>
This dashboard allows you to easily place ads on reddit.
+ You can target a specific community or just run on the main
+ page.
+
+
To get create a new campaign, simply set the start and end
+ date as well as the bid amount.
+
Targeting will let you specify whether the ad should run
+ site-wide or be targeted to a specific community. If you
+ choose to enable targeting, only users who are subscribed to
+ that reddit will see your ad. Your ad will also run on the
+ top of that community's front page.
+
You can rerun a sponsored link at any time, preserving
+ comment history and score. If you are running a large campaign
+ you only need to enter your ad's information once, and then
+ use the dashboard to copy your ad for each additional targeted
+ community
+
+
+ More questions? Have a look at our FAQ or our walkthrough.
+
- %utils:line_field>
+
+
+
- <%utils:line_field title="${_('duration')}" id="duration-field">
- %if (thing.link and thing.link.promote_status >= STATUS.pending) and not c.user_is_sponsor:
-
- %else:
- %if not c.user_is_sponsor:
-
- %endif
+
+
+ <%
+ start_title = "Date when your sponsored link will start running. We start new campaigns at midnight UTC+5"
+ end_title = "Date when your sponsored link will end (at midnight UTC+5)"
+ targeting_title = "name of the community that you are targeting. A blank entry here means that the ad is untargeted and will run site-wise "
+ newcamp_title = "click to create a new campaign. To edit an existing campaing in the table below, click the 'edit' button."
+ %>
+
+%utils:line_field>
+
+%if thing.link:
+ <%utils:line_field title="${_('look and feel')}"
+ description="images will be resized if large than 70 x 70 pixels">
+
+ For correspondence, the email address of this author is
+ ${thing.author.email}.
+
To check with
authorize.net, use CustomerID
- t${Account._type_id}_${to36(thing.link.author_id)} when searching by batch.
+ ${thing.author._fullname} when searching by batch.
@@ -425,4 +496,4 @@
%endif
-
+
diff --git a/r2/r2/templates/reddit.html b/r2/r2/templates/reddit.html
index 7d8d38ce0..a134af488 100644
--- a/r2/r2/templates/reddit.html
+++ b/r2/r2/templates/reddit.html
@@ -22,6 +22,7 @@
<%!
from r2.lib.template_helpers import add_sr, static, join_urls, class_dict, get_domain
+ from r2.lib.filters import unsafe
from r2.lib.pages import SearchForm, ClickGadget, SideContentBox
from r2.lib import tracking
from pylons import request
@@ -62,10 +63,20 @@
%endif
%if c.allow_styles and c.site.stylesheet_contents:
-
+ <% inline_stylesheet = (
+ len(c.site.stylesheet_contents) < 1024
+ and '<' not in c.site.stylesheet_contents) %>
+ %if inline_stylesheet:
+ ## for very simple stylesheets, we can just include them inline
+
+ %else:
+
+ %endif
%endif
%if getattr(thing, "additional_css", None):
+
%def>
diff --git a/r2/r2/templates/redditfooter.html b/r2/r2/templates/redditfooter.html
index 64fa09378..79838da14 100644
--- a/r2/r2/templates/redditfooter.html
+++ b/r2/r2/templates/redditfooter.html
@@ -45,5 +45,7 @@
${_("(c) %(year)d Conde Nast Digital. All rights reserved.") % \
dict(year=datetime.datetime.now().timetuple()[0])}
+
REDDIT and the ALIEN Logo are registered trademarks of Advance Magazine
+Publishers Inc.
diff --git a/r2/r2/templates/redditheader.html b/r2/r2/templates/redditheader.html
index 51ab5cef9..bdeb33ecf 100644
--- a/r2/r2/templates/redditheader.html
+++ b/r2/r2/templates/redditheader.html
@@ -123,12 +123,10 @@
-
-
-
-
-
+
+
+
+
## Be careful when adding things here: If all users try to get the new images
## at once, they'll crush the static servers!
-
diff --git a/r2/r2/templates/reddittraffic.html b/r2/r2/templates/reddittraffic.html
index 158f04ac7..561571b5e 100644
--- a/r2/r2/templates/reddittraffic.html
+++ b/r2/r2/templates/reddittraffic.html
@@ -20,12 +20,15 @@
## CondeNet, Inc. All Rights Reserved.
################################################################################
<%!
+ from r2.lib.template_helpers import static
import locale
from r2.models.subreddit import DomainSR, FakeSubreddit
def num(x):
return locale.format('%d', x, True)
%>
+
+
<%def name="daily_summary()">
<%
thing.day_data = filter(None, thing.day_data)
@@ -105,48 +108,77 @@
Traffic for ${c.site.name}
+
+ Below are the traffic stats for your reddit. Each graph represents one of the following over the interval specified
+
+
+
+ pageviews are all hits to ${c.site.name}, including both listing pages and comment pages.
+
+
+ uniques are the total number of unique visitors (IP and U/A combo) that generate the above pageviews. This is independent of whether or not they are logged in.
+
+
+ subscriptions is the number of new subscriptions in a given day that have been generated. This number is less accurate than the first two metrics, as, though we can track new subscriptions, we have no way to track unsubscriptions (which are when a subscription is actually deleted from the db).
+
+
+
+ Note: there are a couple of places outside of your reddit where someone can click "subscribe", so it is possible (though unlikely) that the subscription count can exceed the unique count on a given day.
+
+
%if not thing.has_data:
${_("There doesn't seem to be any traffic data at the moment. Please check back later.")}
reddit has created a system where you can buy sponsored links on the
front page using our self-serve tool. Overall, we've been quite pleased to find a ridiculously successful (2% CTR to upwards of 10% CTR) means of advertising that doesn't diminish the user experience.
-
Even if your budget is only $20 a day, you can get in on the action. Here's how: we'll allow you to submit a sponsored link (set the title, upload an image), and set the start date and duration. Then, you can bid on how much you want to spend on that link (we've set a minimum of $20 per link per day). This bid is exactly how much you'll pay for the link once it starts running.
+
Even if your budget is only $20 a day, you can get in on the action. Here's how: we'll allow you to submit a sponsored link (set the title, upload an image), and set the start date and duration. Then, you can bid on how much you want to spend on that link (we've set a minimum of $20 per link per day, and $30/day for targeted ads). This bid is exactly how much you'll pay for the link once it starts running.
On each day, we tally up the total number of bids, and use the total to figure out how large a piece of the pie each sponsored link gets. For example, if you were to bid $20, and the total for the day is $200, you'll get 10% of sponsored link impressions for the day.
diff --git a/r2/r2/templates/selfserviceoatmeal.html b/r2/r2/templates/selfserviceoatmeal.html
new file mode 100644
index 000000000..c9c281c16
--- /dev/null
+++ b/r2/r2/templates/selfserviceoatmeal.html
@@ -0,0 +1,40 @@
+## The contents of this file are subject to the Common Public Attribution
+## License Version 1.0. (the "License"); you may not use this file except in
+## compliance with the License. You may obtain a copy of the License at
+## http://code.reddit.com/LICENSE. The License is based on the Mozilla Public
+## License Version 1.1, but Sections 14 and 15 have been added to cover use of
+## software over a computer network and provide for limited attribution for the
+## Original Developer. In addition, Exhibit A has been modified to be consistent
+## with Exhibit B.
+##
+## Software distributed under the License is distributed on an "AS IS" basis,
+## WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License for
+## the specific language governing rights and limitations under the License.
+##
+## The Original Code is Reddit.
+##
+## The Original Developer is the Initial Developer. The Initial Developer of
+## the Original Code is CondeNet, Inc.
+##
+## All portions of the code written by CondeNet are Copyright (c) 2006-2010
+## CondeNet, Inc. All Rights Reserved.
+################################################################################
+
+
diff --git a/r2/r2/templates/spotlightlisting.html b/r2/r2/templates/spotlightlisting.html
index 13e82be44..a066029f4 100644
--- a/r2/r2/templates/spotlightlisting.html
+++ b/r2/r2/templates/spotlightlisting.html
@@ -23,20 +23,16 @@
<%namespace file="printablebuttons.html" import="ynbutton"/>
<%
from r2.lib.template_helpers import static
- from r2.lib.promote import get_promoted
%>
<%
lookup = dict((t._fullname, t) for t in thing.things)
seen = set([])
- promoted = set(get_promoted())
- promoted = [o for o in thing.spotlight_links if o in promoted ]
%>
%for name in thing.spotlight_links:
%if name in seen:
@@ -71,9 +67,9 @@
- ${_("Sometimes reddit needs input from the community to train its spam filter. This is your chance to help.")}
+ ${_("Sometimes reddit needs to ask the community to help fight spam. This is your chance to take part in that.")}
-
+
${_("Click here for more info.")}
<%
if img is not None:
- img = "http:/%s%s_%d.png" % \
+ img = "http://%s/%s_%d.png" % \
(g.s3_thumb_bucket, c.site._fullname, img)
else:
img = "/static/kill.png"
diff --git a/r2/r2/templates/trafficgraph.html b/r2/r2/templates/trafficgraph.html
new file mode 100644
index 000000000..d6cabda49
--- /dev/null
+++ b/r2/r2/templates/trafficgraph.html
@@ -0,0 +1,74 @@
+## The contents of this file are subject to the Common Public Attribution
+## License Version 1.0. (the "License"); you may not use this file except in
+## compliance with the License. You may obtain a copy of the License at
+## http://code.reddit.com/LICENSE. The License is based on the Mozilla Public
+## License Version 1.1, but Sections 14 and 15 have been added to cover use of
+## software over a computer network and provide for limited attribution for the
+## Original Developer. In addition, Exhibit A has been modified to be consistent
+## with Exhibit B.
+##
+## Software distributed under the License is distributed on an "AS IS" basis,
+## WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License for
+## the specific language governing rights and limitations under the License.
+##
+## The Original Code is Reddit.
+##
+## The Original Developer is the Initial Developer. The Initial Developer of
+## the Original Code is CondeNet, Inc.
+##
+## All portions of the code written by CondeNet are Copyright (c) 2006-2010
+## CondeNet, Inc. All Rights Reserved.
+################################################################################
+<%!
+ from random import random
+ import simplejson
+ %>
+
+
+<%
+ _id = str(random()).split('.')[-1]
+ %>
+
+
${thing.title}
+
+
+
+
+
diff --git a/r2/r2/templates/upgradebuttons.html b/r2/r2/templates/upgradebuttons.html
new file mode 100644
index 000000000..452cb6047
--- /dev/null
+++ b/r2/r2/templates/upgradebuttons.html
@@ -0,0 +1,149 @@
+## The contents of this file are subject to the Common Public Attribution
+## License Version 1.0. (the "License"); you may not use this file except in
+## compliance with the License. You may obtain a copy of the License at
+## http://code.reddit.com/LICENSE. The License is based on the Mozilla Public
+## License Version 1.1, but Sections 14 and 15 have been added to cover use of
+## software over a computer network and provide for limited attribution for the
+## Original Developer. In addition, Exhibit A has been modified to be consistent
+## with Exhibit B.
+##
+## Software distributed under the License is distributed on an "AS IS" basis,
+## WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License for
+## the specific language governing rights and limitations under the License.
+##
+## The Original Code is Reddit.
+##
+## The Original Developer is the Initial Developer. The Initial Developer of
+## the Original Code is CondeNet, Inc.
+##
+## All portions of the code written by CondeNet are Copyright (c) 2006-2010
+## CondeNet, Inc. All Rights Reserved.
+################################################################################
+<%!
+ from r2.lib.template_helpers import get_domain
+ %>
+
+<% domain = get_domain(False) %>
+
+
+
You need to upgrade your buttons
+
+ You were redirected here because the site you came from has out of date buttons and were linking directly to our iframe contents rather than our JS (tsk tsk). If you are the site administrator, use the code snippets below to fix it.
+
+
+
${_('simple interactive button')}
+
${_('put this code on your page:')}
+
+ ${capture(draw_interactive,False)}
+
+
${_("and you'll get something like this:")}
+
+ ${draw_interactive(False)}
+
+
+
${_("more interactive buttons")}
+
+ %for x in xrange(1,4):
+ ${demo(capture(draw_interactive, x))}
+ %endfor
+
+
+
${_('interactive button advanced settings')}
+
+
+
+
${_("specify a url")}
+ ${_("useful in places like blogs, where you want to link to the post's permalink")}
+%def>
+
<%def name="percentage(slice, total)">
%if total is None or total == "" or total == 0 or slice is None or slice == "":
--
diff --git a/r2/r2/tests/testfile b/r2/r2/tests/testfile
new file mode 100644
index 000000000..e88600031
--- /dev/null
+++ b/r2/r2/tests/testfile
@@ -0,0 +1 @@
+testing 4
diff --git a/r2/setup.py b/r2/setup.py
index 3f245b4cc..3315d6ff7 100644
--- a/r2/setup.py
+++ b/r2/setup.py
@@ -75,13 +75,13 @@ except ImportError:
# we're using a custom build of pylibmc at the moment, so we need to
# be sure that we have the right version
-pylibmc_version = '1.0-reddit-01'
+pylibmc_version = '1.0-reddit-03'
try:
import pylibmc
assert pylibmc.__version__ == pylibmc_version
except (ImportError, AssertionError):
print "Installing pylibmc"
- easy_install(["http://github.com/downloads/ketralnis/pylibmc/pylibmc-1.0-reddit-01.tar.gz"])
+ easy_install(["http://github.com/downloads/ketralnis/pylibmc/pylibmc-1.0-reddit-03.tar.gz"])
filtermod = Extension('Cfilters',
sources = ['r2/lib/c/filters.c'])
@@ -116,6 +116,7 @@ setup(
#url="",
install_requires=["Routes<=1.8",
"Pylons<=0.9.6.2",
+ "boto",
"pytz",
"pycrypto",
"Babel>=0.9.1",