diff --git a/r2/example.ini b/r2/example.ini index 8f58545d4..033416b76 100644 --- a/r2/example.ini +++ b/r2/example.ini @@ -17,6 +17,7 @@ permacaches = 127.0.0.1:11211 rec_cache = 127.0.0.1:11311 tracker_url = adtracker_url = +clicktracker_url = main_db_name = reddit main_db_host = 127.0.0.1 @@ -74,6 +75,7 @@ domain = localhost domain_prefix = default_sr = localhost admins = +sponsors = page_cache_time = 30 static_path = /static/ useragent = Mozilla/5.0 (compatible; bot/1.0; ChangeMe) diff --git a/r2/r2/config/routing.py b/r2/r2/config/routing.py index 3441089c7..5f32b8b83 100644 --- a/r2/r2/config/routing.py +++ b/r2/r2/config/routing.py @@ -72,8 +72,7 @@ def make_map(global_conf={}, app_conf={}): mc('/feedback', controller='feedback', action='feedback') mc('/ad_inq', controller='feedback', action='ad_inq') - mc('/admin/promote', controller='admin', action='promote') - mc('/admin/unpromote', controller='admin', action='unpromote') + mc('/admin/:action', controller='admin') mc('/admin/i18n', controller='i18n', action='list') mc('/admin/i18n/:action', controller='i18n') @@ -102,6 +101,9 @@ def make_map(global_conf={}, app_conf={}): mc('/stylesheet', controller = 'front', action = 'stylesheet') mc('/frame', controller='front', action = 'frame') + mc('/promote/edit_promo/:link', controller='promote', action = 'edit_promo') + mc('/promote/:action', controller='promote') + mc('/', controller='hot', action='listing') listing_controllers = "hot|saved|toplinks|new|recommended|randomrising|comments" diff --git a/r2/r2/controllers/__init__.py b/r2/r2/controllers/__init__.py index dcc31e33c..96ed1b36f 100644 --- a/r2/r2/controllers/__init__.py +++ b/r2/r2/controllers/__init__.py @@ -43,6 +43,7 @@ from error import ErrorController from post import PostController from toolbar import ToolbarController from i18n import I18nController +from promotecontroller import PromoteController from querycontroller import QueryController diff --git a/r2/r2/controllers/admin.py b/r2/r2/controllers/admin.py index c542c3c3f..a9c54f0f3 100644 --- a/r2/r2/controllers/admin.py +++ b/r2/r2/controllers/admin.py @@ -26,30 +26,16 @@ from r2.controllers.validator import * from r2.lib.pages import * from r2.models import * -from r2.lib import organic - from pylons.i18n import _ def admin_profile_query(vuser, location, db_sort): return None -class AdminController(RedditController): - @validate(VAdmin(), - thing = VByName('fullname')) - @validate(VAdmin()) - def GET_promote(self): - current_list = organic.get_promoted() - - b = IDBuilder([ x._fullname for x in current_list]) - - render_list = b.get_items()[0] - - return AdminPage(content = Promote(render_list), - title = _('promote'), - nav_menus = []).render() - +class AdminController(RedditController): pass try: from r2admin.controllers.admin import * except ImportError: pass + + diff --git a/r2/r2/controllers/api.py b/r2/r2/controllers/api.py index e8d0996f0..041a92fec 100644 --- a/r2/r2/controllers/api.py +++ b/r2/r2/controllers/api.py @@ -30,7 +30,8 @@ from r2.models import * from r2.models.subreddit import Default as DefaultSR import r2.models.thing_changes as tc -from r2.lib.utils import get_title, sanitize_url, timeuntil, set_last_modified, query_string +from r2.lib.utils import get_title, sanitize_url, timeuntil, set_last_modified +from r2.lib.utils import query_string, to36, timefromnow from r2.lib.wrapped import Wrapped from r2.lib.pages import FriendList, ContributorList, ModList, \ BannedList, BoringPage, FormPage, NewLink, CssError, UploadedImage @@ -48,12 +49,15 @@ from r2.config import cache from r2.lib.jsonresponse import JsonResponse, Json from r2.lib.jsontemplates import api_type from r2.lib import cssfilter +from r2.lib.media import force_thumbnail, thumbnail_url from simplejson import dumps from datetime import datetime, timedelta from md5 import md5 -from r2.lib.organic import promote, unpromote + +from r2.lib.promote import promote, unpromote +from r2.controllers import promotecontroller def link_listing_by_url(url, count = None): try: @@ -904,7 +908,7 @@ class ApiController(RedditController): JS. """ - # default error list (default valuse will reset the errors in + # default error list (default values will reset the errors in # the response if no error is raised) errors = dict(BAD_CSS_NAME = "", IMAGE_ERROR = "") try: @@ -925,7 +929,7 @@ class ApiController(RedditController): except ValueError: # the add_image method will raise only on too many images errors['IMAGE_ERROR'] = ( - _("too many imags (you only get %d)") % g.max_sr_images) + _("too many images (you only get %d)") % g.max_sr_images) if any(errors.values()): return UploadedImage("", "", "", errors = errors).render() @@ -1315,14 +1319,97 @@ class ApiController(RedditController): c.user._commit() @Json - @validate(VAdmin(), + @validate(VSponsor(), thing = VByName('id')) def POST_promote(self, res, thing): promote(thing) @Json - @validate(VAdmin(), + @validate(VSponsor(), thing = VByName('id')) def POST_unpromote(self, res, thing): unpromote(thing) + @Json + @validate(VSponsor(), + link = VLink('link_id'), + title = VTitle('title'), + url = nop('url'), + sr = VSubmitSR('sr'), + subscribers_only = VBoolean('subscribers_only'), + disable_comments = VBoolean('disable_comments'), + disable_expire = VBoolean('disable_expire'), + timelimit = VBoolean('timelimit'), + timelimitlength = VInt('timelimitlength',1,1000), + timelimittype = VOneOf('timelimittype',['hours','days','weeks'])) + def POST_edit_promo(self, res, title, url, sr, subscribers_only, + disable_comments, + timelimit = None, timelimitlength = None, timelimittype = None, + disable_expire = None, + link = None): + if res._chk_error(errors.NO_TITLE): + res._focus('title') + elif res._chk_errors((errors.NO_URL,errors.BAD_URL)): + res._focus('url') + elif res._chk_error(errors.SUBREDDIT_NOEXIST): + res._focus('sr') + elif timelimit and res._chk_error(errors.BAD_NUMBER): + res._focus('timelimitlength') + elif link: + link.title = title + link.url = url + + link.promoted_subscribersonly = subscribers_only + link.disable_comments = disable_comments + + if disable_expire: + link.promote_until = None + elif timelimit and timelimitlength and timelimittype: + link.promote_until = timefromnow("%d %s" % (timelimitlength, timelimittype)) + + link._commit() + + res._redirect('/promote/edit_promo/%s' % to36(link._id)) + else: + link = Link(title = title, + url = url, + author_id = c.user._id, + sr_id = sr._id) + + if timelimit and timelimitlength and timelimittype: + promote_until = timefromnow("%d %s" % (timelimitlength, timelimittype)) + else: + promote_until = None + + link._commit() + + promote(link, subscribers_only = subscribers_only, + promote_until = promote_until, + disable_comments = disable_comments) + + res._redirect('/promote/edit_promo/%s' % to36(link._id)) + + def GET_link_thumb(self, *a, **kw): + """ + See GET_upload_sr_image for rationale + """ + return "nothing to see here." + + @validate(VSponsor(), + link = VByName('link_id'), + file = VLength('file',500*1024)) + def POST_link_thumb(self, link=None, file=None): + errors = dict(BAD_CSS_NAME = "", IMAGE_ERROR = "") + + try: + force_thumbnail(link, file) + except cssfilter.BadImage: + # if the image doesn't clean up nicely, abort + errors["IMAGE_ERROR"] = _("bad image") + + if any(errors.values()): + return UploadedImage("", "", "upload", errors = errors).render() + else: + return UploadedImage(_('saved'), thumbnail_url(link), "upload", + errors = errors).render() + diff --git a/r2/r2/controllers/error.py b/r2/r2/controllers/error.py index 9c97fe72a..be69d8aa4 100644 --- a/r2/r2/controllers/error.py +++ b/r2/r2/controllers/error.py @@ -160,7 +160,7 @@ def handle_awful_failure(fail_text): import sys s = sys.exc_info() # reraise the original error with the original stack trace - raise s[1], None, s[2] + raise s[1], None, s[2] try: # log the traceback, and flag the "path" as the error location import traceback diff --git a/r2/r2/controllers/errors.py b/r2/r2/controllers/errors.py index f360afef4..3ac7db3ec 100644 --- a/r2/r2/controllers/errors.py +++ b/r2/r2/controllers/errors.py @@ -50,7 +50,8 @@ error_list = dict(( ('INVALID_PREF', "that preference isn't valid"), ('BAD_NUMBER', _("that number isn't in the right range")), ('ALREADY_SUB', _("that link has already been submitted")), - ('SUBREDDIT_EXISTS', _('that subreddit already exists')), + ('SUBREDDIT_EXISTS', _('that reddit already exists')), + ('SUBREDDIT_NOEXIST', _('that reddit doesn\'t exist')), ('BAD_SR_NAME', _('that name isn\'t going to work')), ('RATELIMIT', _('you are trying to submit too fast. try again in %(time)s.')), ('EXPIRED', _('your session has expired')), diff --git a/r2/r2/controllers/listingcontroller.py b/r2/r2/controllers/listingcontroller.py index b3551a8c2..e3604c799 100644 --- a/r2/r2/controllers/listingcontroller.py +++ b/r2/r2/controllers/listingcontroller.py @@ -35,6 +35,7 @@ from r2.lib.strings import Score from r2.lib import organic from r2.lib.solrsearch import SearchQuery from r2.lib.utils import iters, check_cheating +from r2.lib.promote import promote_builder_wrapper from admin import admin_profile_query @@ -148,6 +149,7 @@ class ListingController(RedditController): if c.user.pref_compress and isinstance(thing, Link): thing.__class__ = LinkCompressed thing.score_fmt = Score.points + return Wrapped(thing) def GET_listing(self, **env): @@ -190,7 +192,7 @@ class HotController(FixListing, ListingController): disp_links = [o_links[(i + pos) % len(o_links)] for i in xrange(-2, l)] b = IDBuilder(disp_links, - wrap = self.builder_wrapper) + wrap = promote_builder_wrapper(self.builder_wrapper)) o = OrganicListing(b, org_links = o_links, visible_link = o_links[pos], @@ -198,14 +200,12 @@ class HotController(FixListing, ListingController): max_score = self.listing_obj.max_score).listing() if len(o.things) > 0: - # only pass through a listing if the link made it + # only pass through a listing if the links made it # through the builder in organic_links *and* ours organic.update_pos(pos+1, calculation_key) return o - return None - def query(self): #no need to worry when working from the cache diff --git a/r2/r2/controllers/promotecontroller.py b/r2/r2/controllers/promotecontroller.py new file mode 100644 index 000000000..206c47107 --- /dev/null +++ b/r2/r2/controllers/promotecontroller.py @@ -0,0 +1,65 @@ +# The contents of this file are subject to the Common Public Attribution +# License Version 1.0. (the "License"); you may not use this file except in +# compliance with the License. You may obtain a copy of the License at +# http://code.reddit.com/LICENSE. The License is based on the Mozilla Public +# License Version 1.1, but Sections 14 and 15 have been added to cover use of +# software over a computer network and provide for limited attribution for the +# Original Developer. In addition, Exhibit A has been modified to be consistent +# with Exhibit B. +# +# Software distributed under the License is distributed on an "AS IS" basis, +# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License for +# the specific language governing rights and limitations under the License. +# +# The Original Code is Reddit. +# +# The Original Developer is the Initial Developer. The Initial Developer of the +# Original Code is CondeNet, Inc. +# +# All portions of the code written by CondeNet are Copyright (c) 2006-2008 +# CondeNet, Inc. All Rights Reserved. +################################################################################ +from validator import * +from pylons.i18n import _ +from r2.models import * +from r2.lib.pages import * +from r2.lib.menus import * + +from r2.controllers.reddit_base import RedditController + +from r2.lib import promote + +class PromoteController(RedditController): + @validate(VSponsor()) + def GET_index(self): + return self.GET_current_promos() + + @validate(VSponsor()) + def GET_current_promos(self): + current_list = promote.get_promoted() + + b = IDBuilder([ x._fullname for x in current_list]) + + render_list = b.get_items()[0] + + page = PromotePage('current_promos', + content = PromotedLinks(render_list)) + + return page.render() + + @validate(VSponsor()) + def GET_new_promo(self): + page = PromotePage('new_promo', + content = PromoteLinkForm()) + return page.render() + + @validate(VSponsor(), + link = VLink('link')) + def GET_edit_promo(self, link): + sr = Subreddit._byID(link.sr_id) + + form = PromoteLinkForm(sr = sr, link = link) + page = PromotePage('new_promo', content = form) + + return page.render() + diff --git a/r2/r2/controllers/reddit_base.py b/r2/r2/controllers/reddit_base.py index f3086169e..1ccba649a 100644 --- a/r2/r2/controllers/reddit_base.py +++ b/r2/r2/controllers/reddit_base.py @@ -67,11 +67,8 @@ class Cookie(object): self.domain = g.domain def __repr__(self): - return ("Cookie(value=%s, expires=%s, domain=%s, dirty=%s)" - % (repr(self.value), - repr(self.expires), - repr(self.domain), - repr(self.dirty))) + return ("Cookie(value=%r, expires=%r, domain=%r, dirty=%r)" + % (self.value, self.expires, self.domain, self.dirty)) class UnloggedUser(FakeAccount): _cookie = 'options' @@ -445,6 +442,7 @@ class RedditController(BaseController): c.cookies = Cookies() try: for k,v in request.cookies.iteritems(): + # we can unquote even if it's not quoted c.cookies[k] = Cookie(value=unquote(v), dirty=False) except CookieError: #pylons or one of the associated retarded libraries can't @@ -476,6 +474,8 @@ class RedditController(BaseController): c.have_messages = c.user.msgtime c.user_is_admin = maybe_admin and c.user.name in g.admins + c.user_is_sponsor = c.user_is_admin or c.user.name in g.sponsors + c.over18 = over18() #set_browser_langs() diff --git a/r2/r2/controllers/validator/validator.py b/r2/r2/controllers/validator/validator.py index ae40478e0..5d191f3cc 100644 --- a/r2/r2/controllers/validator/validator.py +++ b/r2/r2/controllers/validator/validator.py @@ -341,6 +341,11 @@ class VAdmin(Validator): if not c.user_is_admin: abort(404, "page not found") +class VSponsor(Validator): + def run(self): + if not c.user_is_sponsor: + abort(403, 'forbidden') + class VSrModerator(Validator): def run(self): if not (c.user_is_loggedin and c.site.is_moderator(c.user) @@ -396,10 +401,16 @@ class VSubmitParent(Validator): class VSubmitSR(Validator): def run(self, sr_name): - sr = Subreddit._by_name(sr_name) - if not (c.user_is_loggedin and sr.can_submit(c.user)): + try: + sr = Subreddit._by_name(sr_name) + except NotFound: + c.errors.add(errors.SUBREDDIT_NOEXIST) + sr = None + + if sr and not (c.user_is_loggedin and sr.can_submit(c.user)): abort(403, "forbidden") - return sr + else: + return sr pass_rx = re.compile(r".{3,20}") @@ -466,8 +477,14 @@ class VUrl(VRequired): def run(self, url, sr = None): if sr is None and not isinstance(c.site, FakeSubreddit): sr = c.site + elif sr: + try: + sr = Subreddit._by_name(sr) + except NotFound: + c.errors.add(errors.SUBREDDIT_NOEXIST) + sr = None else: - sr = Subreddit._by_name(sr) if sr else None + sr = None if not url: return self.error(errors.NO_URL) diff --git a/r2/r2/lib/app_globals.py b/r2/r2/lib/app_globals.py index abd94237b..f830523a0 100644 --- a/r2/r2/lib/app_globals.py +++ b/r2/r2/lib/app_globals.py @@ -57,6 +57,7 @@ class Globals(object): 'rec_cache', 'permacaches', 'admins', + 'sponsors', 'monitored_servers', 'default_srs', 'agents', diff --git a/r2/r2/lib/filters.py b/r2/r2/lib/filters.py index e3baf9424..53e564bab 100644 --- a/r2/r2/lib/filters.py +++ b/r2/r2/lib/filters.py @@ -70,6 +70,9 @@ def _force_unicode(text): text = unicode(text) return text +def _force_utf8(text): + return str(_force_unicode(text).encode('utf8')) + def unsafe(text=''): return _Unsafe(_force_unicode(text)) diff --git a/r2/r2/lib/media.py b/r2/r2/lib/media.py index 297bdea12..68462866e 100644 --- a/r2/r2/lib/media.py +++ b/r2/r2/lib/media.py @@ -27,7 +27,7 @@ from r2.lib.workqueue import WorkQueue from r2.lib import s3cp from r2.lib.utils import timeago, fetch_things2 from r2.lib.db.operators import desc -from r2.lib.scraper import make_scraper +from r2.lib.scraper import make_scraper, str_to_image, image_to_str, prepare_image import tempfile from Queue import Queue @@ -115,3 +115,10 @@ def set_media(link): results = {} make_link_info_job(results, link, g.useragent)() update_link(link, *results[link]) + +def force_thumbnail(link, image_data): + image = str_to_image(image_data) + image = prepare_image(image) + upload_thumb(link, image) + update_link(link, thumbnail = True, media_object = None) + diff --git a/r2/r2/lib/menus.py b/r2/r2/lib/menus.py index 7063e87e0..6d6e96fcb 100644 --- a/r2/r2/lib/menus.py +++ b/r2/r2/lib/menus.py @@ -48,6 +48,7 @@ menu_selected=StringHandler(hot = _("what's hot"), controversial= _("most controversial"), saved = _("saved"), recommended = _("recommended"), + promote = _('promote'), ) # translation strings for every menu on the site @@ -57,7 +58,6 @@ menu = MenuHandler(hot = _('hot'), ups = _('ups'), downs = _('downs'), top = _('top'), - relevence = _('relevence'), more = _('more'), relevance = _('relevance'), controversial = _('controversial'), @@ -125,7 +125,7 @@ menu = MenuHandler(hot = _('hot'), mine = _("my reddits"), i18n = _("translate site"), - promote = _("promote"), + promoted = _("promoted"), reporters = _("reporters"), reports = _("reports"), reportedauth = _("reported authors"), @@ -140,6 +140,9 @@ menu = MenuHandler(hot = _('hot'), deleted = _("deleted"), reported = _("reported"), + promote = _('promote'), + new_promo = _('new promoted link'), + current_promos = _('promoted links'), ) class Styled(Wrapped): @@ -393,8 +396,8 @@ class CommentSortMenu(SortMenu): class SearchSortMenu(SortMenu): """Sort menu for search pages.""" - default = 'relevence' - mapping = dict(relevence = None, + default = 'relevance' + mapping = dict(relevance = None, hot = 'hot desc', new = 'date desc', old = 'date asc', diff --git a/r2/r2/lib/organic.py b/r2/r2/lib/organic.py index 165eaad1c..f72e28a1f 100644 --- a/r2/r2/lib/organic.py +++ b/r2/r2/lib/organic.py @@ -20,20 +20,21 @@ # CondeNet, Inc. All Rights Reserved. ################################################################################ from r2.models import * -from r2.lib.memoize import memoize, clear_memo +from r2.lib.memoize import memoize from r2.lib.normalized_hot import get_hot, only_recent from r2.lib import count -from r2.lib.utils import UniqueIterator, timeago, timefromnow -from r2.lib.db.operators import desc +from r2.lib.utils import UniqueIterator, timeago +from r2.lib.promote import get_promoted + +from pylons import c import random -from datetime import datetime +from time import time -from pylons import g - -# lifetime in seconds of organic listing in memcached organic_lifetime = 5*60 -promoted_memo_key = 'cached_promoted_links' + +# how many regular organic links should show between promoted ones +promoted_every_n = 5 def keep_link(link): return not any((link.likes != None, @@ -41,43 +42,10 @@ def keep_link(link): link.clicked, link.hidden)) -def promote(thing, subscribers_only = False): - thing.promoted = True - thing.promoted_on = datetime.now(g.tz) - thing.promote_until = timefromnow("1 day") - if subscribers_only: - thing.promoted_subscribersonly = True - thing._commit() - clear_memo(promoted_memo_key) - -def unpromote(thing): - thing.promoted = False - thing.unpromoted_on = datetime.now(g.tz) - thing._commit() - clear_memo(promoted_memo_key) - -def clean_promoted(): +def insert_promoted(link_names, sr_ids, logged_in): """ - Remove any stale promoted entries (should be run periodically to - keep the list small) - """ - p = get_promoted() - for x in p: - if datetime.now(g.tz) > x.promote_until: - unpromote(x) - clear_memo(promoted_memo_key) - -@memoize(promoted_memo_key, time = organic_lifetime) -def get_promoted(): - return [ x for x in Link._query(Link.c.promoted == True, - sort = desc('_date'), - data = True) - if x.promote_until > datetime.now(g.tz) ] - -def insert_promoted(link_names, subscribed_reddits): - """ - The oldest promoted link that c.user hasn't seen yet, and sets the - timestamp for their seen promotions in their cookie + Inserts promoted links into an existing organic list. Destructive + on `link_names' """ promoted_items = get_promoted() @@ -85,7 +53,7 @@ def insert_promoted(link_names, subscribed_reddits): return def my_keepfn(l): - if l.promoted_subscribersonly and l.sr_id not in subscribed_reddits: + if l.promoted_subscribersonly and l.sr_id not in sr_ids: return False else: return keep_link(l) @@ -101,11 +69,10 @@ def insert_promoted(link_names, subscribed_reddits): if not promoted_items: return - every_n = 5 - # don't insert one at the head of the list 50% of the time for # logged in users, and 50% of the time for logged-off users when - # the pool of promoted links is less than 3 + # the pool of promoted links is less than 3 (to avoid showing the + # same promoted link to the same person too often) if c.user_is_loggedin or len(promoted_items) < 3: skip_first = random.choice((True,False)) else: @@ -113,32 +80,25 @@ def insert_promoted(link_names, subscribed_reddits): # insert one promoted item for every N items for i, item in enumerate(promoted_items): - pos = i * every_n + pos = i * promoted_every_n if pos > len(link_names): break elif pos == 0 and skip_first: - # don't always show one for logged-in users continue else: link_names.insert(pos, promoted_items[i]._fullname) -@memoize('cached_organic_links_user', time = organic_lifetime) -def cached_organic_links(username): - if username: - user = Account._by_name(username) - else: - user = FakeAccount() - +@memoize('cached_organic_links', time = organic_lifetime) +def cached_organic_links(sr_ids, logged_in): sr_count = count.get_link_counts() - srs = Subreddit.user_subreddits(user) #only use links from reddits that you're subscribed to - link_names = filter(lambda n: sr_count[n][1] in srs, sr_count.keys()) + link_names = filter(lambda n: sr_count[n][1] in sr_ids, sr_count.keys()) link_names.sort(key = lambda n: sr_count[n][0]) #potentially add a up and coming link if random.choice((True, False)): - sr = Subreddit._byID(random.choice(srs)) + sr = Subreddit._byID(random.choice(sr_ids)) items = only_recent(get_hot(sr)) if items: if len(items) == 1: @@ -147,15 +107,17 @@ def cached_organic_links(username): new_item = random.choice(items[1:4]) link_names.insert(0, new_item._fullname) - insert_promoted(link_names, srs) + insert_promoted(link_names, sr_ids, logged_in) builder = IDBuilder(link_names, num = 30, skip = True, keep_fn = keep_link) links = builder.get_items()[0] - calculation_key = str(datetime.now(g.tz)) + calculation_key = str(time()) update_pos(0, calculation_key) + # in case of duplicates (inserted by the random up-and-coming link + # or a promoted link), return only the first ret = [l._fullname for l in UniqueIterator(links)] return (calculation_key, ret) @@ -163,8 +125,8 @@ def cached_organic_links(username): def organic_links(user): from r2.controllers.reddit_base import organic_pos - username = user.name if c.user_is_loggedin else None - cached_key, links = cached_organic_links(username) + sr_ids = Subreddit.user_subreddits(user) + cached_key, links = cached_organic_links(sr_ids, c.user_is_loggedin) cookie_key, pos = organic_pos() # pos will be 0 if it wasn't specified diff --git a/r2/r2/lib/pages/admin_pages.py b/r2/r2/lib/pages/admin_pages.py index 731847436..151d4e302 100644 --- a/r2/r2/lib/pages/admin_pages.py +++ b/r2/r2/lib/pages/admin_pages.py @@ -22,7 +22,7 @@ from pylons import c, g from r2.lib.wrapped import Wrapped from pages import Reddit -from r2.lib.menus import NavButton, NavMenu, menu +from r2.lib.menus import NamedButton, NavButton, menu, NavMenu class AdminSidebar(Wrapped): def __init__(self, user): @@ -43,8 +43,11 @@ class AdminPage(Reddit): def __init__(self, nav_menus = None, *a, **kw): #add admin options to the nav_menus if c.user_is_admin: - buttons = [NavButton(menu.i18n, "")] \ - if g.translator else [] + buttons = [] + + if g.translator: + buttons.append(NavButton(menu.i18n, "")) + admin_menu = NavMenu(buttons, title='show', base_path = '/admin', type="lightdrop") if nav_menus: diff --git a/r2/r2/lib/pages/pages.py b/r2/r2/lib/pages/pages.py index 8a44023aa..a07de7c47 100644 --- a/r2/r2/lib/pages/pages.py +++ b/r2/r2/lib/pages/pages.py @@ -29,13 +29,16 @@ from pylons import c, request, g from pylons.controllers.util import abort from r2.lib.captcha import get_iden -from r2.lib.filters import spaceCompress, _force_unicode +from r2.lib.filters import spaceCompress, _force_unicode, _force_utf8 from r2.lib.menus import NavButton, NamedButton, NavMenu, PageNameNav, JsButton, menu from r2.lib.strings import plurals, rand_strings, strings from r2.lib.utils import title_to_url, query_string, UrlParser from r2.lib.template_helpers import add_sr, get_domain +from r2.lib.promote import promote_builder_wrapper import sys +datefmt = _force_utf8(_('%d %b %Y')) + def get_captcha(): if not c.user_is_loggedin or c.user.needs_captcha(): return get_iden() @@ -219,10 +222,13 @@ class Reddit(Wrapped): more_buttons.append(NamedButton('saved', False)) more_buttons.append(NamedButton('recommended', False)) - if c.user_is_loggedin and c.user_is_admin: - more_buttons.append(NamedButton('admin')) - elif c.user_is_loggedin and c.site.is_moderator(c.user): - more_buttons.append(NavButton(menu.admin, 'about/edit')) + if c.user_is_admin: + more_buttons.append(NamedButton('admin')) + elif c.site.is_moderator(c.user): + more_buttons.append(NavButton(menu.admin, 'about/edit')) + + if c.user_is_sponsor: + more_buttons.append(NamedButton('promote')) toolbar = [NavMenu(main_buttons, type='tabmenu')] if more_buttons: @@ -409,7 +415,8 @@ class LinkInfoPage(Reddit): link_title = '', *a, **kw): # TODO: temp hack until we find place for builder_wrapper from r2.controllers.listingcontroller import ListingController - link_builder = IDBuilder(link._fullname, wrap = ListingController.builder_wrapper) + link_builder = IDBuilder(link._fullname, + wrap = promote_builder_wrapper(ListingController.builder_wrapper)) # link_listing will be the one-element listing at the top self.link_listing = LinkListing(link_builder, nextprev=False).listing() @@ -452,13 +459,17 @@ class LinkInfoPage(Reddit): def rightbox(self): rb = Reddit.rightbox(self) - rb.insert(1, LinkInfoBar(a = self.link)) + if not (self.link.promoted and not c.user_is_sponsor): + rb.insert(1, LinkInfoBar(a = self.link)) return rb class LinkInfoBar(Wrapped): """Right box for providing info about a link.""" def __init__(self, a = None): - Wrapped.__init__(self, a = a) + if a: + a = Wrapped(a) + + Wrapped.__init__(self, a = a, datefmt = datefmt) class EditReddit(Reddit): @@ -1109,7 +1120,34 @@ class Cnameframe(Wrapped): self.title = "" self.frame_target = None -class Promote(Wrapped): - def __init__(self, current_list, *k, **kw): +class PromotePage(Reddit): + create_reddit_box = False + submit_box = False + extension_handling = False + + def __init__(self, title, nav_menus = None, *a, **kw): + buttons = [NamedButton('current_promos'), + NamedButton('new_promo')] + + menu = NavMenu(buttons, title='show', base_path = '/promote', + type='flatlist', default = 'current_promos') + + if nav_menus: + nav_menus.insert(0, menu) + else: + nav_menus = [menu] + + Reddit.__init__(self, title, nav_menus = nav_menus, *a, **kw) + + +class PromotedLinks(Wrapped): + def __init__(self, current_list, *a, **kw): self.things = current_list - Wrapped.__init__(self, *k, **kw) + + Wrapped.__init__(self, *a, **kw) + +class PromoteLinkForm(Wrapped): + def __init__(self, sr = None, link = None, *a, **kw): + Wrapped.__init__(self, sr = sr, link = link, + datefmt = datefmt, *a, **kw) + diff --git a/r2/r2/lib/promote.py b/r2/r2/lib/promote.py new file mode 100644 index 000000000..a930cf6d0 --- /dev/null +++ b/r2/r2/lib/promote.py @@ -0,0 +1,101 @@ +# The contents of this file are subject to the Common Public Attribution +# License Version 1.0. (the "License"); you may not use this file except in +# compliance with the License. You may obtain a copy of the License at +# http://code.reddit.com/LICENSE. The License is based on the Mozilla Public +# License Version 1.1, but Sections 14 and 15 have been added to cover use of +# software over a computer network and provide for limited attribution for the +# Original Developer. In addition, Exhibit A has been modified to be consistent +# with Exhibit B. +# +# Software distributed under the License is distributed on an "AS IS" basis, +# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License for +# the specific language governing rights and limitations under the License. +# +# The Original Code is Reddit. +# +# The Original Developer is the Initial Developer. The Initial Developer of the +# Original Code is CondeNet, Inc. +# +# All portions of the code written by CondeNet are Copyright (c) 2006-2008 +# CondeNet, Inc. All Rights Reserved. +################################################################################ +from r2.models import * + +from r2.lib.utils import timefromnow +from r2.lib.memoize import clear_memo, memoize +from r2.lib.db.operators import desc + +from datetime import datetime +import random + +# time in seconds to retain cached list of promoted links; note that +# expired promotions are cleaned only this often +promoted_memo_lifetime = 60*60 +promoted_memo_key = 'cached_promoted_links' + +def promote(thing, subscribers_only = False, promote_until = None, + disable_comments = False): + + thing.promoted = True + thing.promoted_on = datetime.now(g.tz) + + if c.user: + thing.promoted_by = c.user._id + + if promote_until: + thing.promote_until = promote_until + + if disable_comments: + thing.disable_comments = True + + if subscribers_only: + thing.promoted_subscribersonly = True + + thing._commit() + clear_memo(promoted_memo_key) + +def unpromote(thing): + thing.promoted = False + thing.unpromoted_on = datetime.now(g.tz) + thing.promote_until = None + thing._commit() + clear_memo(promoted_memo_key) + +@memoize(promoted_memo_key, time = promoted_memo_lifetime) +def get_promoted_cached(): + """ + Returns all links that are promoted, and cleans up any that are + ready for automatic unpromotion + """ + links = Link._query(Link.c.promoted == True, + sort = desc('_date'), + data = True) + + # figure out which links have expired + expired_links = set(x for x in links + if x.promote_until + and x.promote_until < datetime.now(g.tz)) + + for x in expired_links: + g.log.debug('Unpromoting "%s"' % x.title) + unpromote(x) + + return [ x._fullname for x in links if x not in expired_links ] + +def get_promoted(): + fullnames = get_promoted_cached() + return Link._by_fullname(fullnames, data = True, return_dict = False) + +def promote_builder_wrapper(alternative_wrapper): + def wrapper(thing): + if isinstance(thing, Link) and thing.promoted: + thing.__class__ = PromotedLink + w = Wrapped(thing) + w.rowstyle = 'promoted link' + + return w + else: + return alternative_wrapper(thing) + return wrapper + + diff --git a/r2/r2/lib/scraper.py b/r2/r2/lib/scraper.py index 1ede38480..b56b9dd5b 100644 --- a/r2/r2/lib/scraper.py +++ b/r2/r2/lib/scraper.py @@ -48,6 +48,11 @@ def str_to_image(s): image = Image.open(s) return image +def prepare_image(image): + image = square_image(image) + image.thumbnail(thumbnail_size, Image.ANTIALIAS) + return image + def image_entropy(img): """calculate the entropy of an image""" hist = img.histogram() @@ -209,8 +214,7 @@ class Scraper: if image_str: image = str_to_image(image_str) try: - image = square_image(image) - image.thumbnail(thumbnail_size, Image.ANTIALIAS) + image = prepare_image(image) except IOError, e: #can't read interlaced PNGs, ignore if 'interlaced' in e.message: diff --git a/r2/r2/lib/tracking.py b/r2/r2/lib/tracking.py index e88a4a901..f37dcc84d 100644 --- a/r2/r2/lib/tracking.py +++ b/r2/r2/lib/tracking.py @@ -46,7 +46,7 @@ def pkcs5unpad(text, padlen = 8): return text def cipher(lv): - '''returns a pycrypto object used by encrypt and decrypt, with the key based on g.SECRET''' + '''returns a pycrypto object used by encrypt and decrypt, with the key based on g.tracking_secret''' key = g.tracking_secret return AES.new(key[:key_len], AES.MODE_CBC, lv[:key_len]) @@ -61,7 +61,6 @@ def encrypt(text): text = b64enc(cip.encrypt(pkcs5pad(text, key_len))) return quote_plus(randstr + text, safe='') - def decrypt(text): '''Inverts encrypt''' # we can unquote even if text is not quoted. @@ -81,7 +80,7 @@ def safe_str(text): if isinstance(text, unicode): return text.encode('utf8') except: - print "unicode encoding exception in safe_str" + g.log.error("unicode encoding exception in safe_str") return '' return text @@ -98,7 +97,7 @@ class Info(object): try: data = decrypt(text).split('|') except: - print "decryption failure on '%s'" % text + g.log.error("decryption failure on '%s'" % text) data = [] for i, d in enumerate(data): if i < len(self.__slots__): @@ -120,14 +119,14 @@ class Info(object): return cls(**kw).tracking_url() except Exception,e: - print e + g.log.error(e) try: randstr = ''.join(choice('1234567890abcdefghijklmnopqrstuvwxyz' + 'ABCDEFGHIJKLMNOPQRSTUVWXYZ+') for x in xrange(pad_len)) return "%s?v=%s" % (cls.tracker_url, randstr) except: - print "fallback rendering failed as well" + g.log.error("fallback rendering failed as well") return "" class UserInfo(Info): @@ -140,14 +139,26 @@ class UserInfo(Info): self.site = safe_str(c.site.name if c.site else '') self.lang = safe_str(c.lang if c.lang else '') -class PromotedLinkInfo(Info): +class PromotedLinkInfo(Info): __slots__ = ['fullname'] tracker_url = g.adtracker_url - def init_defaults(self, fullname = None): + def init_defaults(self, fullname): self.fullname = fullname - +class PromotedLinkClickInfo(PromotedLinkInfo): + tracker_url = g.clicktracker_url + + def init_defaults(self, dest, **kw): + self.dest = dest + + return PromotedLinkInfo.init_defaults(self, **kw) + + def tracking_url(self): + s = PromotedLinkInfo.tracking_url(self) + '&url=' + self.dest + g.log.debug("generated %s" % s) + return s + def benchmark(n = 10000): """on my humble desktop machine, this gives ~150 microseconds per gen_url""" import time diff --git a/r2/r2/lib/utils/utils.py b/r2/r2/lib/utils/utils.py index 25e294b99..7453bac8b 100644 --- a/r2/r2/lib/utils/utils.py +++ b/r2/r2/lib/utils/utils.py @@ -490,7 +490,7 @@ class UrlParser(object): @property def query_dict(self): """ - Parses they params attribute of the original urlparse and + Parses the `params' attribute of the original urlparse and generates a dictionary where both the keys and values have been url_unescape'd. Any updates or changes to the resulting dict will be reflected in the updated query params @@ -998,11 +998,11 @@ def title_to_url(title, max_length = 50): title = title[:last_word] return title -def debug_print(fn): +def trace(fn): from pylons import g - def new_fn(*k,**kw): - ret = fn(*k,**kw) - g.log.debug("Fn: %s; k=%s; kw=%s\nRet: %s" - % (fn,k,kw,ret)) + def new_fn(*a,**kw): + ret = fn(*a,**kw) + g.log.debug("Fn: %s; a=%s; kw=%s\nRet: %s" + % (fn,a,kw,ret)) return ret return new_fn diff --git a/r2/r2/models/builder.py b/r2/r2/models/builder.py index 5bbed1038..0add85558 100644 --- a/r2/r2/models/builder.py +++ b/r2/r2/models/builder.py @@ -129,9 +129,11 @@ class Builder(object): compute_votes(w, item) w.score = w.upvotes - w.downvotes - w.rowstyle= 'even' if (count % 2) else "odd" w.deleted = item._deleted + w.rowstyle = w.rowstyle if hasattr(w,'rowstyle') else '' + w.rowstyle += ' ' + ('even' if (count % 2) else 'odd') + count += 1 # would have called it "reported", but that is already diff --git a/r2/r2/models/link.py b/r2/r2/models/link.py index 8f0a2a363..66d14a9d9 100644 --- a/r2/r2/models/link.py +++ b/r2/r2/models/link.py @@ -49,6 +49,9 @@ class Link(Thing, Printable): has_thumbnail = False, promoted = False, promoted_subscribersonly = False, + promote_until = None, + promoted_by = None, + disable_comments = False, ip = '0.0.0.0') def __init__(self, *a, **kw): @@ -234,6 +237,8 @@ class Link(Thing, Printable): def add_props(cls, user, wrapped): from r2.lib.count import incr_counts from r2.lib.media import thumbnail_url + from r2.lib.utils import timeago + from r2.lib.tracking import PromotedLinkClickInfo saved = Link._saved(user, wrapped) if user else {} hidden = Link._hidden(user, wrapped) if user else {} @@ -241,8 +246,8 @@ class Link(Thing, Printable): clicked = {} for item in wrapped: - show_media = (c.user.pref_media == 'on' or + (item.promoted and item.has_thumbnail) or (c.user.pref_media == 'subreddit' and item.subreddit.show_media)) @@ -269,7 +274,34 @@ class Link(Thing, Printable): if item.is_self: item.url = item.make_permalink(item.subreddit, force_domain = True) - + if c.user_is_admin: + item.hide_score = False + elif item.promoted: + item.hide_score = True + elif c.user == item.author: + item.hide_score = False + elif item._date < timeago("2 hours"): + item.hide_score = True + else: + item.hide_score = False + + if c.user_is_loggedin and item.author._id == c.user._id: + item.nofollow = False + elif item.score <= 1 or item._spam or item.author._spam: + item.nofollow = True + else: + item.nofollow = False + + if item.promoted and g.clicktracker_url: + # promoted links' clicks are tracked, so here we are + # changing its URL to be the tracking/redirecting URL. + # This can't be done in PromotedLink.add_props because + # we still want to track clicks for links that haven't + # been wrapped as PromotedLinks, as in regular + # listings + item.url = PromotedLinkClickInfo.gen_url(fullname = item._fullname, + dest = item.url) + if c.user_is_loggedin: incr_counts(wrapped) @@ -281,6 +313,40 @@ class Link(Thing, Printable): when possible. """ return Subreddit._byID(self.sr_id, True, return_dict = False) +class PromotedLink(Link): + _nodb = True + + @classmethod + def add_props(cls, user, wrapped): + Link.add_props(user, wrapped) + + try: + if c.user_is_sponsor: + promoted_by_ids = set(x.promoted_by + for x in wrapped + if hasattr(x,'promoted_by')) + promoted_by_accounts = Account._byID(promoted_by_ids, + data=True) + else: + promoted_by_accounts = {} + + except NotFound: + # since this is just cosmetic, we can skip it altogether + # if one isn't found or is broken + promoted_by_accounts = {} + + for item in wrapped: + # these are potentially paid for placement + item.nofollow = True + + if item.promoted_by in promoted_by_accounts: + item.promoted_by_name = promoted_by_accounts[item.promoted_by].name + else: + # keep the template from trying to read it + item.promoted_by = None + + + class LinkCompressed(Link): _nodb = True diff --git a/r2/r2/models/populatedb.py b/r2/r2/models/populatedb.py index ecab3dad2..b81cbaa58 100644 --- a/r2/r2/models/populatedb.py +++ b/r2/r2/models/populatedb.py @@ -20,17 +20,39 @@ # CondeNet, Inc. All Rights Reserved. ################################################################################ from r2.models import * +from r2.lib import promote -def populate(sr_name = 'reddit.com', start_account = 1, sr_title = "reddit.com: what's new online"): +import random + +def populate(sr_name = 'reddit.com', sr_title = "reddit.com: what's new online", + num = 100): sr = Subreddit._new(name= sr_name, title = sr_title) sr._commit() - for i in range(start_account,(start_account + 4)): - name = 'test' + str(i) - password = name - user = register(name, password) - for j in range(1, 30): - link_id = str(i) + '_' + str(j) - url = 'http://google.com/?q=' + link_id - title = user.name + ' ' + link_id - Link._submit(title, url, user, sr, '127.0.0.1') + create_accounts(num) + create_links(num) + +def create_accounts(num): + chars = 'abcdefghijklmnopqrztuvwxyz' + for i in range(num): + name_ext = ''.join([ random.choice(chars) + for x + in range(int(random.uniform(1, 10))) ]) + name = 'test_' + name_ext + try: + register(name, name) + except AccountExists: + pass + +def create_links(num): + accounts = list(Account._query(limit = num, data = True)) + subreddits = list(Subreddit._query(limit = num, data = True)) + for i in range(num): + id = random.uniform(1,100) + title = url = 'http://google.com/?q=' + str(id) + user = random.choice(accounts) + sr = random.choice(subreddits) + l = Link._submit(title, url, user, sr, '127.0.0.1') + + if random.choice(([False] * 50) + [True]): + promote.promote(l) diff --git a/r2/r2/public/static/reddit.css b/r2/r2/public/static/reddit.css index 530e39fdc..02d311dd0 100644 --- a/r2/r2/public/static/reddit.css +++ b/r2/r2/public/static/reddit.css @@ -553,23 +553,24 @@ before enabling */ /* organic listing */ .organic-listing { - background-color: #F8F8F8; border: solid 1px #666666; - padding: 5px 0; + padding: 0; overflow: hidden; position: relative; _height: 60px; - _overflow: auto; + _overflow: hidden; } .organic-listing .link { + background-color: #F8F8F8; + padding: 5px 7em 10px 0; margin-bottom: 0px; - margin-right: 7em; - } +} .organic-listing .linkcompressed { + background-color: #F8F8F8; + padding: 5px 7em 10px 0; margin-bottom: 0px; - margin-right: 7em; } .organic-listing .nextprev { @@ -584,6 +585,25 @@ before enabling */ .organic-listing .nextprev img:hover { cursor: pointer; border: solid 1px #336699; } .organic-listing .nextprev img:active { margin: 6px 4px 1px 1px;} +.promoted { + background-color: #EFF7FF; + border: solid 1px; + padding: 5px 0 5px 0; + overflow: hidden; +} + +.organic-listing .promoted { + background-color: #EFF7FF; + border: none; +} + +.sponsored-tagline { + color: #808080; + bottom: 0; + margin: 0 5px 5px 0; + position: absolute; + right: 6.2em; +} .infobar { background-color: #f6e69f; @@ -1133,7 +1153,7 @@ a.star { text-decoration: none; color: #ff8b60 } } .details span { margin: 0 5px 0 5px; } -.details .profline { +.details th { text-align: right; padding-right: 5px; font-weight: bold; @@ -1411,6 +1431,7 @@ a.star { text-decoration: none; color: #ff8b60 } .image-upload td, .image-upload th { vertical-align: top; } .image-upload span { padding-left: 5px; } +.image-upload { display: inline; } ul#image-preview-list { diff --git a/r2/r2/public/static/utils.js b/r2/r2/public/static/utils.js index 4396fa291..42f830988 100644 --- a/r2/r2/public/static/utils.js +++ b/r2/r2/public/static/utils.js @@ -332,7 +332,7 @@ function completedUploadImage(status, img_src, name, errors) { } if(img_src) { - $('upload-image').reset(); + $('image-upload').reset(); hide('submit-header-img'); if (!name) { $('header-img').src = img_src; @@ -620,4 +620,4 @@ function show_hide_child(el, tagName, label) { el.innerHTML = 'view ' + label; } } - \ No newline at end of file + diff --git a/r2/r2/public/static/vote_piece.js b/r2/r2/public/static/vote_piece.js index d2f290b57..dae1607b1 100644 --- a/r2/r2/public/static/vote_piece.js +++ b/r2/r2/public/static/vote_piece.js @@ -37,7 +37,8 @@ function readLCookie(nameEQ) { var c = ca[i]; while(c.charAt(0)==' ') c=c.substring(1,c.length); if(c.indexOf(nameEQ)==0) { - return unescape(c.substring(nameEQ.length,c.length)); + /* we can unescape even if it's not escaped */ + return unescape(c.substring(nameEQ.length,c.length)); } } return ''; diff --git a/r2/r2/templates/createsubreddit.html b/r2/r2/templates/createsubreddit.html index 38b20ea4f..e910e3e7b 100644 --- a/r2/r2/templates/createsubreddit.html +++ b/r2/r2/templates/createsubreddit.html @@ -24,6 +24,7 @@ from r2.lib.filters import keep_space %> <%namespace file="utils.html" import="error_field, language_tool, plain_link"/> +<%namespace file="utils.html" import="image_upload"/> diff --git a/r2/r2/templates/utils.html b/r2/r2/templates/utils.html index 775e91858..f82064147 100644 --- a/r2/r2/templates/utils.html +++ b/r2/r2/templates/utils.html @@ -221,3 +221,44 @@ ${unsafe(txt)} style="${style}" %endif + +<%def name="checkbox(name, text, val)"> + + ${text} + + + +<%def name="image_upload(post_target, current_image = None)"> +
+ + + + + + +
+ header preview
+
+ %if caller: + ${caller.body()} + %endif +
+ \ No newline at end of file