mirror of
https://github.com/reddit-archive/reddit.git
synced 2026-01-28 08:17:58 -05:00
added Simple Update Protocol (SUP) support for FriendFeed.
This commit is contained in:
@@ -42,6 +42,8 @@ def make_map(global_conf={}, app_conf={}):
|
||||
mc('/over18', controller='post', action='over18')
|
||||
|
||||
mc('/search', controller='front', action='search')
|
||||
|
||||
mc('/sup', controller='front', action='sup')
|
||||
|
||||
mc('/about/:location', controller='front',
|
||||
action='editreddit', location = 'about')
|
||||
|
||||
@@ -41,15 +41,13 @@ from r2.lib.pages import FriendList, ContributorList, ModList, \
|
||||
from r2.lib.menus import CommentSortMenu
|
||||
from r2.lib.normalized_hot import expire_hot
|
||||
from r2.lib.captcha import get_iden
|
||||
from r2.lib import emailer
|
||||
from r2.lib.strings import strings
|
||||
from r2.lib.memoize import clear_memo
|
||||
from r2.lib.filters import _force_unicode, websafe_json
|
||||
from r2.lib.db import queries
|
||||
from r2.lib import cssfilter
|
||||
from r2.lib import tracking
|
||||
from r2.lib.media import force_thumbnail, thumbnail_url
|
||||
from r2.lib.comment_tree import add_comment, delete_comment
|
||||
from r2.lib import tracking, sup, cssfilter, emailer
|
||||
|
||||
from simplejson import dumps
|
||||
|
||||
@@ -242,6 +240,9 @@ class ApiController(RedditController):
|
||||
set_last_modified(c.user, 'overview')
|
||||
set_last_modified(c.user, 'submitted')
|
||||
set_last_modified(c.user, 'liked')
|
||||
|
||||
#update sup listings
|
||||
sup.add_update(c.user, 'submitted')
|
||||
|
||||
# flag search indexer that something has changed
|
||||
tc.changed(l)
|
||||
@@ -596,6 +597,9 @@ class ApiController(RedditController):
|
||||
set_last_modified(c.user, 'overview')
|
||||
set_last_modified(c.user, 'commented')
|
||||
set_last_modified(link, 'comments')
|
||||
|
||||
#update sup listings
|
||||
sup.add_update(c.user, 'commented')
|
||||
|
||||
#update the comment cache
|
||||
add_comment(item)
|
||||
@@ -710,6 +714,12 @@ class ApiController(RedditController):
|
||||
set_last_modified(c.user, 'liked')
|
||||
set_last_modified(c.user, 'disliked')
|
||||
|
||||
#update sup listings
|
||||
if dir:
|
||||
sup.add_update(c.user, 'liked')
|
||||
elif dir is False:
|
||||
sup.add_update(c.user, 'disliked')
|
||||
|
||||
if v.valid_thing:
|
||||
expire_hot(sr)
|
||||
|
||||
|
||||
@@ -33,6 +33,8 @@ from r2.lib.emailer import has_opted_out, Email
|
||||
from r2.lib.db.operators import desc
|
||||
from r2.lib.strings import strings
|
||||
from r2.lib.solrsearch import RelatedSearchQuery, SubredditSearchQuery, LinkSearchQuery
|
||||
from r2.lib import jsontemplates
|
||||
from r2.lib import sup
|
||||
import r2.lib.db.thing as thing
|
||||
from listingcontroller import ListingController
|
||||
from pylons import c, request
|
||||
@@ -456,10 +458,10 @@ class FrontController(RedditController):
|
||||
returns their user name"""
|
||||
c.response_content_type = 'text/plain'
|
||||
if c.user_is_loggedin:
|
||||
return c.user.name
|
||||
c.response.content = c.user.name
|
||||
else:
|
||||
return ''
|
||||
|
||||
c.response.content = ''
|
||||
return c.response
|
||||
|
||||
@validate(VUser(),
|
||||
VSRSubmitPage(),
|
||||
@@ -573,3 +575,14 @@ class FrontController(RedditController):
|
||||
|
||||
def GET_catchall(self):
|
||||
return self.abort404()
|
||||
|
||||
def GET_sup(self):
|
||||
#dont cache this, it's memoized elsewhere
|
||||
c.used_cache = True
|
||||
sup.set_expires_header()
|
||||
|
||||
if c.extension == 'json':
|
||||
c.response.content = sup.sup_json()
|
||||
return c.response
|
||||
else:
|
||||
return self.abort404()
|
||||
|
||||
@@ -35,6 +35,7 @@ from r2.lib.strings import Score
|
||||
from r2.lib import organic
|
||||
from r2.lib.solrsearch import SearchQuery
|
||||
from r2.lib.utils import iters, check_cheating
|
||||
from r2.lib import sup
|
||||
|
||||
from admin import admin_profile_query
|
||||
|
||||
@@ -386,14 +387,17 @@ class UserController(ListingController):
|
||||
|
||||
elif self.where == 'comments':
|
||||
self.check_modified(self.vuser, 'commented')
|
||||
sup.set_sup_header(self.vuser, 'commented')
|
||||
q = queries.get_comments(self.vuser, 'new', 'all')
|
||||
|
||||
elif self.where == 'submitted':
|
||||
self.check_modified(self.vuser, 'submitted')
|
||||
sup.set_sup_header(self.vuser, 'submitted')
|
||||
q = queries.get_submitted(self.vuser, 'new', 'all')
|
||||
|
||||
elif self.where in ('liked', 'disliked'):
|
||||
self.check_modified(self.vuser, self.where)
|
||||
sup.set_sup_header(self.vuser, self.where)
|
||||
if self.where == 'liked':
|
||||
q = queries.get_liked(self.vuser)
|
||||
else:
|
||||
|
||||
@@ -597,6 +597,3 @@ class RedditController(BaseController):
|
||||
merged = copy(request.get)
|
||||
merged.update(dict)
|
||||
return request.path + utils.query_string(merged)
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -104,7 +104,7 @@ class LocalCache(dict, CacheUtils):
|
||||
for k,v in keys.iteritems():
|
||||
self.set(prefix+str(k), v)
|
||||
|
||||
def add(self, key, val):
|
||||
def add(self, key, val, time = 0):
|
||||
self._check_key(key)
|
||||
self.setdefault(key, val)
|
||||
|
||||
|
||||
87
r2/r2/lib/sup.py
Normal file
87
r2/r2/lib/sup.py
Normal file
@@ -0,0 +1,87 @@
|
||||
# The contents of this file are subject to the Common Public Attribution
|
||||
# License Version 1.0. (the "License"); you may not use this file except in
|
||||
# compliance with the License. You may obtain a copy of the License at
|
||||
# http://code.reddit.com/LICENSE. The License is based on the Mozilla Public
|
||||
# License Version 1.1, but Sections 14 and 15 have been added to cover use of
|
||||
# software over a computer network and provide for limited attribution for the
|
||||
# Original Developer. In addition, Exhibit A has been modified to be consistent
|
||||
# with Exhibit B.
|
||||
#
|
||||
# Software distributed under the License is distributed on an "AS IS" basis,
|
||||
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License for
|
||||
# the specific language governing rights and limitations under the License.
|
||||
#
|
||||
# The Original Code is Reddit.
|
||||
#
|
||||
# The Original Developer is the Initial Developer. The Initial Developer of the
|
||||
# Original Code is CondeNet, Inc.
|
||||
#
|
||||
# All portions of the code written by CondeNet are Copyright (c) 2006-2008
|
||||
# CondeNet, Inc. All Rights Reserved.
|
||||
################################################################################
|
||||
|
||||
from datetime import datetime
|
||||
import time, md5
|
||||
|
||||
import simplejson
|
||||
|
||||
from r2.lib.utils import rfc3339_date_str, http_date_str, to36
|
||||
from r2.lib.memoize import memoize
|
||||
from pylons import g, c
|
||||
|
||||
SUP_PERIOD = 60
|
||||
|
||||
def cur_time(period):
|
||||
t = int(time.time())
|
||||
return t - t % period
|
||||
|
||||
def last_time(period):
|
||||
return cur_time(period) - period
|
||||
|
||||
def make_sup_id(user, action):
|
||||
sup_id = md5.new(user.name + action).hexdigest()
|
||||
#cause cool kids only use part of the hash
|
||||
return sup_id[:10]
|
||||
|
||||
def add_update(user, action):
|
||||
update_time = to36(int(time.time()))
|
||||
sup_id = make_sup_id(user, action)
|
||||
sup_update = ',%s:%s' % (sup_id, update_time)
|
||||
|
||||
key = str(cur_time(SUP_PERIOD))
|
||||
g.cache.add(key, '', time = SUP_PERIOD * 3)
|
||||
g.cache.append(key, sup_update)
|
||||
|
||||
@memoize('set_sup_header', time = SUP_PERIOD)
|
||||
def sup_json_cached(lt):
|
||||
update_time = datetime.utcnow()
|
||||
since_time = datetime.utcfromtimestamp(lt)
|
||||
|
||||
updates = g.cache.get(str(lt))
|
||||
sup_updates = []
|
||||
if updates:
|
||||
sup_updates = [u.split(':') for u in updates.split(',') if u]
|
||||
|
||||
json = simplejson.dumps({'updated_time' : rfc3339_date_str(update_time),
|
||||
'since_time' : rfc3339_date_str(since_time),
|
||||
'period' : SUP_PERIOD,
|
||||
'updates' : sup_updates})
|
||||
return json
|
||||
|
||||
def sup_json():
|
||||
return sup_json_cached(last_time(SUP_PERIOD))
|
||||
|
||||
def set_sup_header(user, action):
|
||||
sup_id = make_sup_id(user, action)
|
||||
|
||||
if g.domain_prefix:
|
||||
domain = g.domain_prefix + '.' + g.domain
|
||||
else:
|
||||
domain = g.domain
|
||||
|
||||
c.response.headers['x-sup-id'] = 'http://%s/sup.json#%s' % (domain, sup_id)
|
||||
|
||||
def set_expires_header():
|
||||
seconds = cur_time(SUP_PERIOD) + SUP_PERIOD
|
||||
expire_time = datetime.fromtimestamp(seconds, g.tz)
|
||||
c.response.headers['expires'] = http_date_str(expire_time)
|
||||
@@ -3,6 +3,7 @@ from datetime import datetime
|
||||
|
||||
DATE_RFC822 = '%a, %d %b %Y %H:%M:%S %Z'
|
||||
DATE_RFC850 = '%A, %d-%b-%y %H:%M:%S %Z'
|
||||
DATE_RFC3339 = "%Y-%m-%dT%H:%M:%SZ"
|
||||
DATE_ANSI = '%a %b %d %H:%M:%S %Y'
|
||||
|
||||
def read_http_date(date_str):
|
||||
@@ -22,3 +23,6 @@ def read_http_date(date_str):
|
||||
def http_date_str(date):
|
||||
date = date.astimezone(pytz.timezone('GMT'))
|
||||
return date.strftime(DATE_RFC822)
|
||||
|
||||
def rfc3339_date_str(date):
|
||||
return date.strftime(DATE_RFC3339)
|
||||
|
||||
Reference in New Issue
Block a user