mirror of
https://github.com/reddit-archive/reddit.git
synced 2026-01-27 15:58:06 -05:00
Allow a link to be forced into the organic link box for site-wide announcements
This commit is contained in:
@@ -15,6 +15,7 @@ log_path =
|
||||
memcaches = 127.0.0.1:11211
|
||||
rec_cache = 127.0.0.1:11311
|
||||
tracker_url =
|
||||
adtracker_url =
|
||||
|
||||
main_db_name = reddit
|
||||
main_db_host = 127.0.0.1
|
||||
@@ -81,6 +82,7 @@ solr_url =
|
||||
|
||||
SECRET = abcdefghijklmnopqrstuvwxyz0123456789
|
||||
MODSECRET = abcdefghijklmnopqrstuvwxyz0123456789
|
||||
tracking_secret = abcdefghijklmnopqrstuvwxyz0123456789
|
||||
ip_hash =
|
||||
S3KEY_ID = ABCDEFGHIJKLMNOP1234
|
||||
S3SECRET_KEY = aBcDeFgHiJkLmNoPqRsTuVwXyZ1234567890AbCd
|
||||
|
||||
@@ -70,6 +70,9 @@ def make_map(global_conf={}, app_conf={}):
|
||||
mc('/feedback', controller='feedback', action='feedback')
|
||||
mc('/ad_inq', controller='feedback', action='ad_inq')
|
||||
|
||||
mc('/admin/promote', controller='admin', action='promote')
|
||||
mc('/admin/unpromote', controller='admin', action='unpromote')
|
||||
|
||||
mc('/admin/i18n', controller='i18n', action='list')
|
||||
mc('/admin/i18n/:action', controller='i18n')
|
||||
mc('/admin/i18n/:action/:lang', controller='i18n')
|
||||
|
||||
@@ -53,3 +53,10 @@ except ImportError:
|
||||
from admin import AdminController
|
||||
|
||||
from redirect import RedirectController
|
||||
|
||||
try:
|
||||
from r2admin.controllers.admin import *
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
|
||||
@@ -20,14 +20,31 @@
|
||||
# CondeNet, Inc. All Rights Reserved.
|
||||
################################################################################
|
||||
from r2.controllers.reddit_base import RedditController
|
||||
from r2.controllers.reddit_base import base_listing
|
||||
|
||||
from r2.controllers.validator import *
|
||||
from r2.lib.pages import *
|
||||
from r2.models import *
|
||||
|
||||
from r2.lib import organic
|
||||
|
||||
from pylons.i18n import _
|
||||
|
||||
def admin_profile_query(vuser, location, db_sort):
|
||||
return None
|
||||
|
||||
class AdminController(RedditController):
|
||||
pass
|
||||
@validate(VAdmin(),
|
||||
thing = VByName('fullname'))
|
||||
@validate(VAdmin())
|
||||
def GET_promote(self):
|
||||
current_list = organic.get_promoted()
|
||||
|
||||
b = IDBuilder([ x._fullname for x in current_list])
|
||||
|
||||
render_list = b.get_items()[0]
|
||||
|
||||
return AdminPage(content = Promote(render_list),
|
||||
title = _('promote'),
|
||||
nav_menus = []).render()
|
||||
|
||||
try:
|
||||
from r2admin.controllers.admin import *
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
@@ -53,7 +53,7 @@ from simplejson import dumps
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from md5 import md5
|
||||
from r2.lib.organic import update_pos
|
||||
from r2.lib.organic import promote, unpromote
|
||||
|
||||
def link_listing_by_url(url, count = None):
|
||||
try:
|
||||
@@ -1303,13 +1303,6 @@ class ApiController(RedditController):
|
||||
l.mid_margin = mid_margin
|
||||
res.object = res._thing(l.listing(), action = 'populate')
|
||||
|
||||
@Json
|
||||
@validate(pos = VInt('pos', min = 0, max = 100))
|
||||
def POST_update_pos(self, res, pos):
|
||||
if pos is not None:
|
||||
update_pos(c.user, pos)
|
||||
|
||||
|
||||
@Json
|
||||
@validate(VUser(),
|
||||
ui_elem = VOneOf('id', ('organic',)))
|
||||
@@ -1320,4 +1313,15 @@ class ApiController(RedditController):
|
||||
setattr(c.user, "pref_" + ui_elem, False)
|
||||
c.user._commit()
|
||||
|
||||
|
||||
@Json
|
||||
@validate(VAdmin(),
|
||||
thing = VByName('id'))
|
||||
def POST_promote(self, res, thing):
|
||||
promote(thing)
|
||||
|
||||
@Json
|
||||
@validate(VAdmin(),
|
||||
thing = VByName('id'))
|
||||
def POST_unpromote(self, res, thing):
|
||||
unpromote(thing)
|
||||
|
||||
|
||||
@@ -180,7 +180,7 @@ class HotController(FixListing, ListingController):
|
||||
where = 'hot'
|
||||
|
||||
def organic(self):
|
||||
o_links, pos = organic.organic_links(c.user)
|
||||
o_links, pos, calculation_key = organic.organic_links(c.user)
|
||||
if o_links:
|
||||
# get links in proximity to pos
|
||||
l = min(len(o_links) - 3, 8)
|
||||
@@ -192,9 +192,16 @@ class HotController(FixListing, ListingController):
|
||||
org_links = o_links,
|
||||
visible_link = o_links[pos],
|
||||
max_num = self.listing_obj.max_num,
|
||||
max_score = self.listing_obj.max_score)
|
||||
organic.update_pos(c.user, (pos + 1) % len(o_links))
|
||||
return o.listing()
|
||||
max_score = self.listing_obj.max_score).listing()
|
||||
|
||||
if len(o.things) > 0:
|
||||
# only pass through a listing if the link made it
|
||||
# through the builder in organic_links *and* ours
|
||||
organic.update_pos(pos+1, calculation_key)
|
||||
|
||||
return o
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def query(self):
|
||||
@@ -218,7 +225,8 @@ class HotController(FixListing, ListingController):
|
||||
def content(self):
|
||||
# only send an organic listing for HTML rendering
|
||||
if (c.site == Default and c.render_style == "html"
|
||||
and c.user_is_loggedin and c.user.pref_organic):
|
||||
and (not c.user_is_loggedin
|
||||
or (c.user_is_loggedin and c.user.pref_organic))):
|
||||
org = self.organic()
|
||||
if org:
|
||||
return PaneStack([org, self.listing_obj], css_class='spacer')
|
||||
|
||||
@@ -125,9 +125,9 @@ class PostController(ApiController):
|
||||
c.user._commit()
|
||||
else:
|
||||
ip_hash = sha.new(request.ip).hexdigest()
|
||||
c.response.set_cookie('over18',
|
||||
value = ip_hash,
|
||||
domain = g.domain if not c.frameless_cname else None)
|
||||
domain = g.domain if not c.frameless_cname else None
|
||||
c.cookies.add('over18', value = ip_hash,
|
||||
domain = domain)
|
||||
return self.redirect(dest)
|
||||
else:
|
||||
return self.redirect('/')
|
||||
|
||||
@@ -26,6 +26,7 @@ from pylons.i18n import _
|
||||
from pylons.i18n.translation import LanguageError
|
||||
from r2.lib.base import BaseController, proxyurl
|
||||
from r2.lib import pages, utils, filters
|
||||
from r2.lib.utils import http_utils
|
||||
from r2.lib.cache import LocalCache
|
||||
import random as rand
|
||||
from r2.models.account import valid_cookie, FakeAccount
|
||||
@@ -38,12 +39,37 @@ from r2.lib.template_helpers import add_sr
|
||||
from r2.lib.jsontemplates import api_type
|
||||
|
||||
from copy import copy
|
||||
from datetime import datetime
|
||||
import sha, inspect, simplejson
|
||||
from urllib import quote, unquote
|
||||
|
||||
from r2.lib.tracking import encrypt, decrypt
|
||||
|
||||
NEVER = 'Thu, 31 Dec 2037 23:59:59 GMT'
|
||||
|
||||
cache_affecting_cookies = ('reddit_first',)
|
||||
|
||||
class Cookies(dict):
|
||||
def add(name, *k, **kw):
|
||||
self[name] = Cookie(*k, **kw)
|
||||
|
||||
class Cookie(object):
|
||||
def __init__(self, value, expires = None, domain = None, dirty = True):
|
||||
self.value = value
|
||||
self.expires = expires
|
||||
self.dirty = dirty
|
||||
if domain:
|
||||
self.domain = domain
|
||||
else:
|
||||
self.domain = g.domain
|
||||
|
||||
def __repr__(self):
|
||||
return ("Cookie(value=%s, expires=%s, domain=%s, dirty=%s)"
|
||||
% (repr(self.value),
|
||||
repr(self.expires),
|
||||
repr(self.domain),
|
||||
repr(self.dirty)))
|
||||
|
||||
class UnloggedUser(FakeAccount):
|
||||
_cookie = 'options'
|
||||
allowed_prefs = ('pref_content_langs', 'pref_lang')
|
||||
@@ -99,13 +125,17 @@ class UnloggedUser(FakeAccount):
|
||||
|
||||
def read_user_cookie(name):
|
||||
uname = c.user.name if c.user_is_loggedin else ""
|
||||
return request.cookies.get(uname + '_' + name) or ''
|
||||
cookie_name = uname + '_' + name
|
||||
if cookie_name in c.cookies:
|
||||
return c.cookies[cookie_name].value
|
||||
else:
|
||||
return ''
|
||||
|
||||
def set_user_cookie(name, val):
|
||||
uname = c.user.name if c.user_is_loggedin else ""
|
||||
c.response.set_cookie(uname + '_' + name,
|
||||
value = val,
|
||||
domain = g.domain if not c.frameless_cname else None)
|
||||
domain = g.domain if not c.frameless_cname else None
|
||||
c.cookies[uname + '_' + name] = Cookie(value = val,
|
||||
domain = domain)
|
||||
|
||||
def read_click_cookie():
|
||||
if c.user_is_loggedin:
|
||||
@@ -125,22 +155,59 @@ def read_mod_cookie():
|
||||
set_user_cookie('mod', '')
|
||||
|
||||
def firsttime():
|
||||
if not c.user_is_loggedin:
|
||||
if not request.cookies.get("reddit_first"):
|
||||
c.response.set_cookie("reddit_first", "first",
|
||||
expires = NEVER,
|
||||
domain = g.domain if not c.frameless_cname else None)
|
||||
return True
|
||||
return False
|
||||
if get_redditfirst('firsttime'):
|
||||
return False
|
||||
else:
|
||||
set_redditfirst('firsttime','first')
|
||||
return True
|
||||
|
||||
def get_redditfirst(key,default=None):
|
||||
try:
|
||||
cookie = simplejson.loads(c.cookies['reddit_first'].value)
|
||||
return cookie[key]
|
||||
except (ValueError,TypeError,KeyError),e:
|
||||
# it's not a proper json dict, or the cookie isn't present, or
|
||||
# the key isn't part of the cookie; we don't really want a
|
||||
# broken cookie to propogate an exception up
|
||||
return default
|
||||
|
||||
def set_redditfirst(key,val):
|
||||
try:
|
||||
cookie = simplejson.loads(c.cookies['reddit_first'].value)
|
||||
cookie[key] = val
|
||||
except (ValueError,TypeError,KeyError),e:
|
||||
# invalid JSON data; we'll just construct a new cookie
|
||||
cookie = {key: val}
|
||||
|
||||
c.cookies['reddit_first'] = Cookie(simplejson.dumps(cookie),
|
||||
expires = NEVER,
|
||||
domain = g.domain)
|
||||
|
||||
# this cookie is also accessed by organic.js, so changes to the format
|
||||
# will have to be made there as well
|
||||
organic_pos_key = 'organic_pos'
|
||||
def organic_pos():
|
||||
"organic_pos() -> (calc_date = str(), pos = int())"
|
||||
try:
|
||||
d,p = get_redditfirst(organic_pos_key, (None,0))
|
||||
except ValueError:
|
||||
d,p = (None,0)
|
||||
return d,p
|
||||
|
||||
def set_organic_pos(key,pos):
|
||||
"set_organic_pos(str(), int()) -> None"
|
||||
set_redditfirst(organic_pos_key,[key,pos])
|
||||
|
||||
|
||||
def over18():
|
||||
if c.user.pref_over_18 or c.user_is_admin:
|
||||
return True
|
||||
|
||||
else:
|
||||
cookie = request.cookies.get('over18')
|
||||
if cookie == sha.new(request.ip).hexdigest():
|
||||
return True
|
||||
if 'over18' in c.cookies:
|
||||
cookie = c.cookies['over18'].value
|
||||
if cookie == sha.new(request.ip).hexdigest():
|
||||
return True
|
||||
|
||||
def set_subreddit():
|
||||
sr_name=request.environ.get("subreddit", request.params.get('r'))
|
||||
@@ -327,13 +394,19 @@ class RedditController(BaseController):
|
||||
return kw
|
||||
|
||||
def request_key(self):
|
||||
# note that this references the cookie at request time, not
|
||||
# the current value of it
|
||||
cookie_keys = []
|
||||
for x in cache_affecting_cookies:
|
||||
cookie_keys.append(request.cookies.get(x,''))
|
||||
|
||||
key = ''.join((str(c.lang),
|
||||
str(c.content_langs),
|
||||
request.host,
|
||||
str(c.cname),
|
||||
str(request.fullpath),
|
||||
str(c.firsttime),
|
||||
str(c.over18)))
|
||||
str(c.over18),
|
||||
''.join(cookie_keys)))
|
||||
return key
|
||||
|
||||
def cached_response(self):
|
||||
@@ -341,15 +414,12 @@ class RedditController(BaseController):
|
||||
|
||||
@staticmethod
|
||||
def login(user, admin = False, rem = False):
|
||||
c.response.set_cookie(g.login_cookie,
|
||||
value = user.make_cookie(admin = admin),
|
||||
domain = g.domain,
|
||||
expires = NEVER if rem else None)
|
||||
c.cookies[g.login_cookie] = Cookie(value = user.make_cookie(admin = admin),
|
||||
expires = NEVER if rem else None)
|
||||
|
||||
@staticmethod
|
||||
def logout(admin = False):
|
||||
c.response.set_cookie(g.login_cookie, value = '',
|
||||
domain = g.domain)
|
||||
c.cookies[g.login_cookie] = Cookie(value='')
|
||||
|
||||
def pre(self):
|
||||
g.cache.caches = (LocalCache(),) + g.cache.caches[1:]
|
||||
@@ -357,10 +427,18 @@ class RedditController(BaseController):
|
||||
#check if user-agent needs a dose of rate-limiting
|
||||
ratelimit_agents()
|
||||
|
||||
# populate c.cookies
|
||||
c.cookies = Cookies()
|
||||
for k,v in request.cookies.iteritems():
|
||||
c.cookies[k] = Cookie(value=unquote(v), dirty=False)
|
||||
|
||||
c.response_wrappers = []
|
||||
c.errors = ErrorSet()
|
||||
c.firsttime = firsttime()
|
||||
(c.user, maybe_admin) = \
|
||||
valid_cookie(request.cookies.get(g.login_cookie))
|
||||
valid_cookie(c.cookies[g.login_cookie].value
|
||||
if g.login_cookie in c.cookies
|
||||
else '')
|
||||
|
||||
if c.user:
|
||||
c.user_is_loggedin = True
|
||||
@@ -388,7 +466,6 @@ class RedditController(BaseController):
|
||||
set_iface_lang()
|
||||
set_content_lang()
|
||||
set_cnameframe()
|
||||
c.firsttime = firsttime()
|
||||
|
||||
# set some environmental variables in case we hit an abort
|
||||
if not isinstance(c.site, FakeSubreddit):
|
||||
@@ -411,6 +488,16 @@ class RedditController(BaseController):
|
||||
response = c.response
|
||||
response.headers = r.headers
|
||||
response.content = r.content
|
||||
|
||||
for x in r.cookies.keys():
|
||||
if x in cache_affecting_cookies:
|
||||
cookie = r.cookies[x]
|
||||
response.set_cookie(key = x,
|
||||
value = cookie.value,
|
||||
domain = cookie.get('domain',None),
|
||||
expires = cookie.get('expires',None),
|
||||
path = cookie.get('path',None))
|
||||
|
||||
response.status_code = r.status_code
|
||||
request.environ['pylons.routes_dict']['action'] = 'cached_response'
|
||||
# make sure to carry over the content type
|
||||
@@ -438,6 +525,17 @@ class RedditController(BaseController):
|
||||
response.headers['Cache-Control'] = 'no-cache'
|
||||
response.headers['Pragma'] = 'no-cache'
|
||||
|
||||
# send cookies
|
||||
if not c.used_cache:
|
||||
# if we used the cache, these cookies should be set by the
|
||||
# cached response object instead
|
||||
for k,v in c.cookies.iteritems():
|
||||
if v.dirty:
|
||||
response.set_cookie(key = k,
|
||||
value = quote(v.value),
|
||||
domain = v.domain,
|
||||
expires = v.expires)
|
||||
|
||||
#return
|
||||
#set content cache
|
||||
if (g.page_cache_time
|
||||
@@ -448,7 +546,7 @@ class RedditController(BaseController):
|
||||
config.cache.set(self.request_key(),
|
||||
response,
|
||||
g.page_cache_time)
|
||||
|
||||
|
||||
def check_modified(self, thing, action):
|
||||
if c.user_is_loggedin:
|
||||
return
|
||||
@@ -457,7 +555,7 @@ class RedditController(BaseController):
|
||||
if date is True:
|
||||
abort(304, 'not modified')
|
||||
else:
|
||||
c.response.headers['Last-Modified'] = utils.http_date_str(date)
|
||||
c.response.headers['Last-Modified'] = http_utils.http_date_str(date)
|
||||
|
||||
def abort404(self):
|
||||
abort(404, "not found")
|
||||
|
||||
@@ -178,7 +178,7 @@ embedopen = urllib2.OpenerDirector()
|
||||
embedopen.add_handler(EmbedHandler())
|
||||
|
||||
def proxyurl(url):
|
||||
cstrs = ['%s="%s"' % (k, v) for k, v in request.cookies.iteritems()]
|
||||
cstrs = ['%s="%s"' % (k, v.value) for k, v in c.cookies.iteritems()]
|
||||
cookiestr = "; ".join(cstrs)
|
||||
headers = {"Cookie":cookiestr}
|
||||
|
||||
|
||||
@@ -124,6 +124,7 @@ menu = MenuHandler(hot = _('hot'),
|
||||
mine = _("my reddits"),
|
||||
|
||||
i18n = _("translate site"),
|
||||
promote = _("promote"),
|
||||
reporters = _("reporters"),
|
||||
reports = _("reports"),
|
||||
reportedauth = _("reported authors"),
|
||||
|
||||
@@ -20,28 +20,114 @@
|
||||
# CondeNet, Inc. All Rights Reserved.
|
||||
################################################################################
|
||||
from r2.models import *
|
||||
from r2.lib.memoize import memoize
|
||||
from r2.lib.memoize import memoize, clear_memo
|
||||
from r2.lib.normalized_hot import get_hot, only_recent
|
||||
from r2.lib import count
|
||||
from r2.lib.utils import UniqueIterator, timeago, timefromnow
|
||||
from r2.lib.db.operators import desc
|
||||
|
||||
import random
|
||||
from datetime import datetime
|
||||
|
||||
from pylons import g
|
||||
cache = g.cache
|
||||
|
||||
def pos_key(user):
|
||||
return 'organic_pos_' + user.name
|
||||
|
||||
# lifetime in seconds of organic listing in memcached
|
||||
organic_lifetime = 5*60
|
||||
promoted_memo_key = 'cached_promoted_links'
|
||||
|
||||
def keep_link(link):
|
||||
return not any((link.likes != None,
|
||||
link.saved,
|
||||
link.clicked,
|
||||
link.hidden))
|
||||
|
||||
|
||||
@memoize('cached_organic_links', time = 300)
|
||||
def promote(thing, subscribers_only = False):
|
||||
thing.promoted = True
|
||||
thing.promoted_on = datetime.now(g.tz)
|
||||
thing.promote_until = timefromnow("1 day")
|
||||
if subscribers_only:
|
||||
thing.promoted_subscribersonly = True
|
||||
thing._commit()
|
||||
clear_memo(promoted_memo_key)
|
||||
|
||||
def unpromote(thing):
|
||||
thing.promoted = False
|
||||
thing.unpromoted_on = datetime.now(g.tz)
|
||||
thing._commit()
|
||||
clear_memo(promoted_memo_key)
|
||||
|
||||
def clean_promoted():
|
||||
"""
|
||||
Remove any stale promoted entries (should be run periodically to
|
||||
keep the list small)
|
||||
"""
|
||||
p = get_promoted()
|
||||
for x in p:
|
||||
if datetime.now(g.tz) > x.promote_until:
|
||||
unpromote(x)
|
||||
clear_memo(promoted_memo_key)
|
||||
|
||||
@memoize(promoted_memo_key, time = organic_lifetime)
|
||||
def get_promoted():
|
||||
return [ x for x in Link._query(Link.c.promoted == True,
|
||||
sort = desc('_date'),
|
||||
data = True)
|
||||
if x.promote_until > datetime.now(g.tz) ]
|
||||
|
||||
def insert_promoted(link_names, subscribed_reddits):
|
||||
"""
|
||||
The oldest promoted link that c.user hasn't seen yet, and sets the
|
||||
timestamp for their seen promotions in their cookie
|
||||
"""
|
||||
promoted_items = get_promoted()
|
||||
|
||||
if not promoted_items:
|
||||
return
|
||||
|
||||
def my_keepfn(l):
|
||||
if l.promoted_subscribersonly and l.sr_id not in subscribed_reddits:
|
||||
return False
|
||||
else:
|
||||
return keep_link(l)
|
||||
|
||||
# remove any that the user has acted on
|
||||
builder = IDBuilder([ x._fullname for x in promoted_items ],
|
||||
skip = True, keep_fn = my_keepfn)
|
||||
promoted_items = builder.get_items()[0]
|
||||
|
||||
# in the future, we may want to weight this sorting somehow
|
||||
random.shuffle(promoted_items)
|
||||
|
||||
if not promoted_items:
|
||||
return
|
||||
|
||||
every_n = 5
|
||||
|
||||
# don't insert one at the head of the list 50% of the time for
|
||||
# logged in users, and 50% of the time for logged-off users when
|
||||
# the pool of promoted links is less than 3
|
||||
if c.user_is_loggedin or len(promoted_items) < 3:
|
||||
skip_first = random.choice((True,False))
|
||||
else:
|
||||
skip_first = False
|
||||
|
||||
# insert one promoted item for every N items
|
||||
for i, item in enumerate(promoted_items):
|
||||
pos = i * every_n
|
||||
if pos > len(link_names):
|
||||
break
|
||||
elif pos == 0 and skip_first:
|
||||
# don't always show one for logged-in users
|
||||
continue
|
||||
else:
|
||||
link_names.insert(pos, promoted_items[i]._fullname)
|
||||
|
||||
@memoize('cached_organic_links', time = organic_lifetime)
|
||||
def cached_organic_links(username):
|
||||
user = Account._by_name(username)
|
||||
if username:
|
||||
user = Account._by_name(username)
|
||||
else:
|
||||
user = FakeAccount()
|
||||
|
||||
sr_count = count.get_link_counts()
|
||||
srs = Subreddit.user_subreddits(user)
|
||||
@@ -61,16 +147,35 @@ def cached_organic_links(username):
|
||||
new_item = random.choice(items[1:4])
|
||||
link_names.insert(0, new_item._fullname)
|
||||
|
||||
insert_promoted(link_names, srs)
|
||||
|
||||
builder = IDBuilder(link_names, num = 30, skip = True, keep_fn = keep_link)
|
||||
links = builder.get_items()[0]
|
||||
cache.set(pos_key(user), 0)
|
||||
return [l._fullname for l in links]
|
||||
|
||||
calculation_key = str(datetime.now(g.tz))
|
||||
|
||||
update_pos(0, calculation_key)
|
||||
|
||||
ret = [l._fullname for l in UniqueIterator(links)]
|
||||
|
||||
return (calculation_key, ret)
|
||||
|
||||
def organic_links(user):
|
||||
links = cached_organic_links(user.name)
|
||||
pos = cache.get(pos_key(user)) or 0
|
||||
return (links, pos)
|
||||
from r2.controllers.reddit_base import organic_pos
|
||||
|
||||
def update_pos(user, pos):
|
||||
cache.set(pos_key(user), pos)
|
||||
|
||||
username = user.name if c.user_is_loggedin else None
|
||||
cached_key, links = cached_organic_links(username)
|
||||
|
||||
cookie_key, pos = organic_pos()
|
||||
# pos will be 0 if it wasn't specified
|
||||
if links and (cookie_key == cached_key):
|
||||
# make sure that we're not running off the end of the list
|
||||
pos = pos % len(links)
|
||||
|
||||
return links, pos, cached_key
|
||||
|
||||
def update_pos(pos, key):
|
||||
"Update the user's current position within the cached organic list."
|
||||
from r2.controllers import reddit_base
|
||||
|
||||
reddit_base.set_organic_pos(key, pos)
|
||||
|
||||
@@ -1074,7 +1074,7 @@ class DetailsPage(LinkInfoPage):
|
||||
# TODO: a better way?
|
||||
from admin_pages import Details
|
||||
return self.content_stack(self.link_listing, Details(link = self.link))
|
||||
|
||||
|
||||
class Cnameframe(Wrapped):
|
||||
"""The frame page."""
|
||||
def __init__(self, original_path, subreddit, sub_domain):
|
||||
@@ -1089,3 +1089,8 @@ class Cnameframe(Wrapped):
|
||||
else:
|
||||
self.title = ""
|
||||
self.frame_target = None
|
||||
|
||||
class Promote(Wrapped):
|
||||
def __init__(self, current_list, *k, **kw):
|
||||
self.things = current_list
|
||||
Wrapped.__init__(self, *k, **kw)
|
||||
|
||||
@@ -47,7 +47,7 @@ def pkcs5unpad(text, padlen = 8):
|
||||
|
||||
def cipher(lv):
|
||||
'''returns a pycrypto object used by encrypt and decrypt, with the key based on g.SECRET'''
|
||||
key = g.SECRET
|
||||
key = g.tracking_secret
|
||||
return AES.new(key[:key_len], AES.MODE_CBC, lv[:key_len])
|
||||
|
||||
def encrypt(text):
|
||||
@@ -85,11 +85,12 @@ def safe_str(text):
|
||||
return ''
|
||||
return text
|
||||
|
||||
class UserInfo():
|
||||
class Info(object):
|
||||
'''Class for generating and reading user tracker information.'''
|
||||
__slots__ = ['name', 'site', 'lang']
|
||||
|
||||
def __init__(self, text = ''):
|
||||
__slots__ = []
|
||||
tracker_url = ""
|
||||
|
||||
def __init__(self, text = '', **kw):
|
||||
for s in self.__slots__:
|
||||
setattr(self, s, '')
|
||||
|
||||
@@ -103,33 +104,49 @@ class UserInfo():
|
||||
if i < len(self.__slots__):
|
||||
setattr(self, self.__slots__[i], d)
|
||||
else:
|
||||
self.name = safe_str(c.user.name if c.user_is_loggedin else '')
|
||||
self.site = safe_str(c.site.name if c.site else '')
|
||||
self.lang = safe_str(c.lang if c.lang else '')
|
||||
self.init_defaults(**kw)
|
||||
|
||||
def init_defaults(self, **kw):
|
||||
raise NotImplementedError
|
||||
|
||||
def tracking_url(self):
|
||||
data = '|'.join(getattr(self, s) for s in self.__slots__)
|
||||
data = encrypt(data)
|
||||
return "%s?v=%s" % (g.tracker_url, data)
|
||||
|
||||
return "%s?v=%s" % (self.tracker_url, data)
|
||||
|
||||
def gen_url():
|
||||
"""function for safely creating a tracking url, trapping exceptions so as not to interfere with
|
||||
the app"""
|
||||
try:
|
||||
return UserInfo().tracking_url()
|
||||
except Exception, e:
|
||||
print "error in gen_url!!!!!"
|
||||
print e
|
||||
@classmethod
|
||||
def gen_url(cls, **kw):
|
||||
try:
|
||||
randstr = ''.join(choice('1234567890abcdefghijklmnopqrstuvwxyz' +
|
||||
'ABCDEFGHIJKLMNOPQRSTUVWXYZ+')
|
||||
for x in xrange(pad_len))
|
||||
return "%s?v=%s" % (g.tracker_url, randstr)
|
||||
except:
|
||||
print "fallback rendering failed as well"
|
||||
return ""
|
||||
return cls(**kw).tracking_url()
|
||||
|
||||
except Exception,e:
|
||||
print e
|
||||
try:
|
||||
randstr = ''.join(choice('1234567890abcdefghijklmnopqrstuvwxyz' +
|
||||
'ABCDEFGHIJKLMNOPQRSTUVWXYZ+')
|
||||
for x in xrange(pad_len))
|
||||
return "%s?v=%s" % (cls.tracker_url, randstr)
|
||||
except:
|
||||
print "fallback rendering failed as well"
|
||||
return ""
|
||||
|
||||
class UserInfo(Info):
|
||||
'''Class for generating and reading user tracker information.'''
|
||||
__slots__ = ['name', 'site', 'lang']
|
||||
tracker_url = g.tracker_url
|
||||
|
||||
def init_defaults(self):
|
||||
self.name = safe_str(c.user.name if c.user_is_loggedin else '')
|
||||
self.site = safe_str(c.site.name if c.site else '')
|
||||
self.lang = safe_str(c.lang if c.lang else '')
|
||||
|
||||
class PromotedLinkInfo(Info):
|
||||
__slots__ = ['fullname']
|
||||
tracker_url = g.adtracker_url
|
||||
|
||||
def init_defaults(self, fullname = None):
|
||||
self.fullname = fullname
|
||||
|
||||
|
||||
def benchmark(n = 10000):
|
||||
"""on my humble desktop machine, this gives ~150 microseconds per gen_url"""
|
||||
|
||||
@@ -330,7 +330,6 @@ def sanitize_url(url, require_scheme = False):
|
||||
and '%' not in u.netloc):
|
||||
return url
|
||||
|
||||
|
||||
def timeago(interval):
|
||||
"""Returns a datetime object corresponding to time 'interval' in
|
||||
the past. Interval is of the same form as is returned by
|
||||
@@ -338,9 +337,17 @@ def timeago(interval):
|
||||
English (i.e., untranslated) and the format is
|
||||
|
||||
[num] second|minute|hour|day|week|month|year(s)
|
||||
|
||||
"""
|
||||
from pylons import g
|
||||
return datetime.now(g.tz) - timeinterval_fromstr(interval)
|
||||
|
||||
def timefromnow(interval):
|
||||
"The opposite of timeago"
|
||||
from pylons import g
|
||||
return datetime.now(g.tz) + timeinterval_fromstr(interval)
|
||||
|
||||
def timeinterval_fromstr(interval):
|
||||
"Used by timeago and timefromnow to generate timedeltas from friendly text"
|
||||
parts = interval.strip().split(' ')
|
||||
if len(parts) == 1:
|
||||
num = 1
|
||||
@@ -360,7 +367,7 @@ def timeago(interval):
|
||||
month = 60 * 60 * 24 * 30,
|
||||
year = 60 * 60 * 24 * 365)[period]
|
||||
delta = num * d
|
||||
return datetime.now(g.tz) - timedelta(0, delta)
|
||||
return timedelta(0, delta)
|
||||
|
||||
def timetext(delta, resultion = 1, bare=True):
|
||||
"""
|
||||
@@ -933,12 +940,16 @@ class IteratorChunker(object):
|
||||
self.done=True
|
||||
return chunk
|
||||
|
||||
def IteratorFilter(iterator, filter):
|
||||
def IteratorFilter(iterator, fn):
|
||||
for x in iterator:
|
||||
if filter(x):
|
||||
if fn(x):
|
||||
yield x
|
||||
|
||||
def NoDupsIterator(iterator):
|
||||
def UniqueIterator(iterator):
|
||||
"""
|
||||
Takes an iterator and returns an iterator that returns only the
|
||||
first occurence of each entry
|
||||
"""
|
||||
so_far = set()
|
||||
def no_dups(x):
|
||||
if x in so_far:
|
||||
|
||||
@@ -48,6 +48,8 @@ class Link(Thing, Printable):
|
||||
banned_before_moderator = False,
|
||||
media_object = None,
|
||||
has_thumbnail = False,
|
||||
promoted = False,
|
||||
promoted_subscribersonly = False,
|
||||
ip = '0.0.0.0')
|
||||
|
||||
def __init__(self, *a, **kw):
|
||||
|
||||
@@ -62,6 +62,29 @@ function _fire_and_shift(type) {
|
||||
};
|
||||
}
|
||||
|
||||
function update_organic_pos(new_pos) {
|
||||
var c = readLCookie('reddit_first');
|
||||
if(c != '') {
|
||||
try {
|
||||
c = c.parseJSON();
|
||||
} catch(e) {
|
||||
c = '';
|
||||
}
|
||||
}
|
||||
|
||||
if(c == '') {
|
||||
c = {};
|
||||
}
|
||||
|
||||
if(c.organic_pos && c.organic_pos.length >= 2) {
|
||||
c.organic_pos[1] = new_pos;
|
||||
} else {
|
||||
c.organic_pos = ['none', new_pos];
|
||||
}
|
||||
|
||||
createLCookie('reddit_first', c.toJSONString());
|
||||
}
|
||||
|
||||
OrganicListing.unhide = _fire_and_shift('unhide');
|
||||
OrganicListing.hide = _fire_and_shift('hide');
|
||||
OrganicListing.report = _fire_and_shift('report');
|
||||
@@ -95,7 +118,7 @@ OrganicListing.prototype.change = function(dir) {
|
||||
if(this.listing.max_pos == pos)
|
||||
this.listing.update_pos = true;
|
||||
else if (this.listing.update_pos) {
|
||||
redditRequest('update_pos', {pos: (pos+1) % len});
|
||||
update_organic_pos((pos+1)%len);
|
||||
this.listing.max_pos = pos;
|
||||
}
|
||||
}
|
||||
@@ -109,6 +132,14 @@ OrganicListing.prototype.change = function(dir) {
|
||||
c.fade("veryfast");
|
||||
add_to_aniframes(function() {
|
||||
c.hide();
|
||||
if(n.$('promoted')) {
|
||||
var i = new Image();
|
||||
i.src = n.$('promoted').tracking_url.value;
|
||||
// just setting i.src is enough to most browsers to
|
||||
// download, but in Opera it's not, we'd need to
|
||||
// uncomment this line to get the image in the DOM
|
||||
/* n.$('promoted').appendChild(i); */
|
||||
}
|
||||
n.show();
|
||||
n.set_opacity(0);
|
||||
}, 1);
|
||||
@@ -129,3 +160,4 @@ function get_organic(next) {
|
||||
l.change(-1);
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
@@ -586,7 +586,7 @@ before enabling */
|
||||
.infobar {
|
||||
background-color: #f6e69f;
|
||||
padding: 5px 10px;
|
||||
margin: 5px 310px 5px 5px;
|
||||
margin: 5px 310px 5px 0px;
|
||||
border: 1px solid orange;
|
||||
}
|
||||
|
||||
|
||||
@@ -13,7 +13,7 @@ function cookieName(name) {
|
||||
return (logged || '') + "_" + name;
|
||||
}
|
||||
|
||||
function createCookie(name,value,days) {
|
||||
function createLCookie(name,value,days) {
|
||||
var domain = "; domain=" + cur_domain;
|
||||
if (days) {
|
||||
var date = new Date();
|
||||
@@ -21,20 +21,30 @@ function createCookie(name,value,days) {
|
||||
var expires="; expires="+date.toGMTString();
|
||||
}
|
||||
else expires="";
|
||||
document.cookie=cookieName(name)+"="+value+expires+domain+"; path=/";
|
||||
document.cookie=name+"="+escape(value)+expires+domain+"; path=/";
|
||||
}
|
||||
|
||||
function readCookie(name) {
|
||||
var nameEQ=cookieName(name) + "=";
|
||||
function createCookie(name, value, days) {
|
||||
return createLCookie(cookieName(name));
|
||||
}
|
||||
|
||||
function readLCookie(nameEQ) {
|
||||
nameEQ=nameEQ+'=';
|
||||
var ca=document.cookie.split(';');
|
||||
for(var i=0;i< ca.length;i++) {
|
||||
var c =ca[i];
|
||||
while(c.charAt(0)==' ') c=c.substring(1,c.length);
|
||||
if(c.indexOf(nameEQ)==0) return c.substring(nameEQ.length,c.length);
|
||||
if(c.indexOf(nameEQ)==0) {
|
||||
return unescape(c.substring(nameEQ.length,c.length));
|
||||
}
|
||||
}
|
||||
return '';
|
||||
return '';
|
||||
}
|
||||
|
||||
function readCookie(name) {
|
||||
var nameEQ=cookieName(name) + "=";
|
||||
return readLCookie(nameEQ);
|
||||
}
|
||||
|
||||
/*function setModCookie(id, c) {
|
||||
createCookie("mod", readCookie("mod") + id + "=" + c + ":");
|
||||
|
||||
@@ -32,12 +32,14 @@
|
||||
<p>
|
||||
${help}
|
||||
</p>
|
||||
${state_button("leave" + name, name, _("here"),
|
||||
"return deletetoggle(this, disable_ui);", "",
|
||||
fmt = _("Click %(here)s to disable this feature."),
|
||||
fmt_param = "here",
|
||||
question = _("are you sure?"),
|
||||
yes = _("yes"), no = _("no"))}
|
||||
%if c.user_is_loggedin:
|
||||
${state_button("leave" + name, name, _("here"),
|
||||
"return deletetoggle(this, disable_ui);", "",
|
||||
fmt = _("Click %(here)s to disable this feature."),
|
||||
fmt_param = "here",
|
||||
question = _("are you sure?"),
|
||||
yes = _("yes"), no = _("no"))}
|
||||
%endif
|
||||
${state_button("nvm" + name, name, _("here"),
|
||||
"hide('%s-help'); return false;" % name, "",
|
||||
fmt = _("Click %(here)s to close help."),
|
||||
|
||||
@@ -23,6 +23,7 @@
|
||||
<%!
|
||||
from r2.models.subreddit import Default
|
||||
from r2.lib.template_helpers import get_domain
|
||||
from r2.lib import tracking
|
||||
%>
|
||||
|
||||
<%inherit file="printable.html"/>
|
||||
@@ -81,6 +82,24 @@
|
||||
${self.admintagline()}
|
||||
</ul>
|
||||
${self.mediadiv()}
|
||||
%if thing.promoted:
|
||||
<%
|
||||
tracking_url = tracking.PromotedLinkInfo.gen_url(fullname=thing._fullname)
|
||||
%>
|
||||
<form id="promoted_${thing._fullname}" method="GET"
|
||||
action="about:blank" style="display: none;">
|
||||
<input type="hidden" name="tracking_url" value="${tracking_url}" />
|
||||
</form>
|
||||
<script type="text/javascript">
|
||||
/* see also OrganicListing.prototype.change()
|
||||
dirty, dirty hack. But some people are into that
|
||||
*/
|
||||
if(!"$display") {
|
||||
var i = new Image();
|
||||
i.src = "${tracking_url}";
|
||||
}
|
||||
</script>
|
||||
%endif
|
||||
</%def>
|
||||
|
||||
<%def name="subreddit()" buffered="True">
|
||||
|
||||
42
r2/r2/templates/promote.html
Normal file
42
r2/r2/templates/promote.html
Normal file
@@ -0,0 +1,42 @@
|
||||
## The contents of this file are subject to the Common Public Attribution
|
||||
## License Version 1.0. (the "License"); you may not use this file except in
|
||||
## compliance with the License. You may obtain a copy of the License at
|
||||
## http://code.reddit.com/LICENSE. The License is based on the Mozilla Public
|
||||
## License Version 1.1, but Sections 14 and 15 have been added to cover use of
|
||||
## software over a computer network and provide for limited attribution for the
|
||||
## Original Developer. In addition, Exhibit A has been modified to be consistent
|
||||
## with Exhibit B.
|
||||
##
|
||||
## Software distributed under the License is distributed on an "AS IS" basis,
|
||||
## WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License for
|
||||
## the specific language governing rights and limitations under the License.
|
||||
##
|
||||
## The Original Code is Reddit.
|
||||
##
|
||||
## The Original Developer is the Initial Developer. The Initial Developer of
|
||||
## the Original Code is CondeNet, Inc.
|
||||
##
|
||||
## All portions of the code written by CondeNet are Copyright (c) 2006-2008
|
||||
## CondeNet, Inc. All Rights Reserved.
|
||||
################################################################################
|
||||
<%namespace file="utils.html" import="plain_link"/>
|
||||
<%namespace file="printable.html" import="state_button"/>
|
||||
|
||||
|
||||
<div>
|
||||
<h1>${_('current promotions')}</h1>
|
||||
|
||||
<ul>
|
||||
%for t in thing.things:
|
||||
<li class="entry">
|
||||
${plain_link(t.title,t.permalink)}
|
||||
<ul class="buttons" style="display: inline;">
|
||||
<li>
|
||||
${state_button("unpromote", t._fullname, _("unpromote"), \
|
||||
"return change_state(this, 'unpromote');",_("unpromoted"))}
|
||||
</li>
|
||||
</ul>
|
||||
</li>
|
||||
%endfor
|
||||
</ul>
|
||||
</div>
|
||||
@@ -44,7 +44,7 @@
|
||||
dict(year=datetime.datetime.now().timetuple()[0])}
|
||||
</p>
|
||||
%if g.tracker_url:
|
||||
<img alt="" src="${tracking.gen_url()}"/>
|
||||
<img alt="" src="${tracking.UserInfo.gen_url()}"/>
|
||||
%endif
|
||||
%if c.frameless_cname:
|
||||
<%
|
||||
|
||||
Reference in New Issue
Block a user