mirror of
https://github.com/reddit-archive/reddit.git
synced 2026-01-28 00:07:57 -05:00
enable the query cache for profile pages
This commit is contained in:
@@ -74,8 +74,8 @@ def make_map(global_conf={}, app_conf={}):
|
||||
mc('/admin/i18n/:action', controller='i18n')
|
||||
mc('/admin/i18n/:action/:lang', controller='i18n')
|
||||
|
||||
mc('/user/:username/:location', controller='front', action='user',
|
||||
location='overview')
|
||||
mc('/user/:username/:where', controller='user', action='listing',
|
||||
where='overview')
|
||||
|
||||
mc('/prefs/:location', controller='front',
|
||||
action='prefs', location='options')
|
||||
|
||||
@@ -30,6 +30,7 @@ from listingcontroller import MessageController
|
||||
from listingcontroller import RedditsController
|
||||
from listingcontroller import MyredditsController
|
||||
from listingcontroller import RandomrisingController
|
||||
from listingcontroller import UserController
|
||||
|
||||
from feedback import FeedbackController
|
||||
from front import FrontController
|
||||
|
||||
@@ -173,6 +173,9 @@ class ApiController(RedditController):
|
||||
res._update('to', value='')
|
||||
res._update('subject', value='')
|
||||
res._update('message', value='')
|
||||
|
||||
if g.write_query_queue:
|
||||
queries.new_message(m)
|
||||
else:
|
||||
res._update('success', innerHTML='')
|
||||
|
||||
@@ -630,6 +633,12 @@ class ApiController(RedditController):
|
||||
set_last_modified(c.user, 'commented')
|
||||
set_last_modified(link, 'comments')
|
||||
|
||||
#update the queries
|
||||
if g.write_query_queue:
|
||||
if is_message:
|
||||
queries.new_message(item)
|
||||
else:
|
||||
queries.new_comment(item)
|
||||
|
||||
#set the ratelimiter
|
||||
if should_ratelimit:
|
||||
@@ -957,18 +966,18 @@ class ApiController(RedditController):
|
||||
VModhash(),
|
||||
thing = VByName('id'))
|
||||
def POST_save(self, res, thing):
|
||||
user = c.user
|
||||
thing._save(user)
|
||||
|
||||
thing._save(c.user)
|
||||
if g.write_query_queue:
|
||||
queries.new_savehide(c.user, 'save')
|
||||
|
||||
@Json
|
||||
@validate(VUser(),
|
||||
VModhash(),
|
||||
thing = VByName('id'))
|
||||
def POST_unsave(self, res, thing):
|
||||
user = c.user
|
||||
thing._unsave(user)
|
||||
|
||||
thing._unsave(c.user)
|
||||
if g.write_query_queue:
|
||||
queries.new_savehide(c.user, 'save')
|
||||
|
||||
@Json
|
||||
@validate(VUser(),
|
||||
@@ -976,7 +985,8 @@ class ApiController(RedditController):
|
||||
thing = VByName('id'))
|
||||
def POST_hide(self, res, thing):
|
||||
thing._hide(c.user)
|
||||
|
||||
if g.write_query_queue:
|
||||
queries.new_savehide(c.user, 'hide')
|
||||
|
||||
@Json
|
||||
@validate(VUser(),
|
||||
@@ -984,6 +994,9 @@ class ApiController(RedditController):
|
||||
thing = VByName('id'))
|
||||
def POST_unhide(self, res, thing):
|
||||
thing._unhide(c.user)
|
||||
if g.write_query_queue:
|
||||
queries.new_savehide(c.user, 'hide')
|
||||
|
||||
|
||||
@Json
|
||||
@validate(link = VByName('link_id'),
|
||||
|
||||
@@ -29,10 +29,13 @@ from r2.lib.rising import get_rising
|
||||
from r2.lib.wrapped import Wrapped
|
||||
from r2.lib.normalized_hot import normalized_hot, get_hot
|
||||
from r2.lib.recommendation import get_recommended
|
||||
from r2.lib.db.thing import Query
|
||||
from r2.lib.db.thing import Query, Merge, Relations
|
||||
from r2.lib.db import queries
|
||||
from r2.lib.strings import Score
|
||||
from r2.lib import organic
|
||||
from r2.lib.utils import iters
|
||||
from r2.lib.utils import iters, check_cheating
|
||||
|
||||
from admin import admin_profile_query
|
||||
|
||||
from pylons.i18n import _
|
||||
|
||||
@@ -67,6 +70,9 @@ class ListingController(RedditController):
|
||||
# class (probably a subclass of Reddit) to use to render the page.
|
||||
render_cls = Reddit
|
||||
|
||||
#extra parameters to send to the render_cls constructor
|
||||
render_params = {}
|
||||
|
||||
@property
|
||||
def menus(self):
|
||||
"""list of menus underneat the header (e.g., sort, time, kind,
|
||||
@@ -91,7 +97,8 @@ class ListingController(RedditController):
|
||||
show_sidebar = self.show_sidebar,
|
||||
nav_menus = self.menus,
|
||||
title = self.title(),
|
||||
infotext = self.infotext).render()
|
||||
infotext = self.infotext,
|
||||
**self.render_params).render()
|
||||
return res
|
||||
|
||||
|
||||
@@ -230,16 +237,18 @@ class HotController(FixListing, ListingController):
|
||||
return ListingController.GET_listing(self, **env)
|
||||
|
||||
class SavedController(ListingController):
|
||||
prewrap_fn = lambda self, x: x._thing2
|
||||
where = 'saved'
|
||||
skip = False
|
||||
title_text = _('saved')
|
||||
|
||||
def query(self):
|
||||
q = SaveHide._query(SaveHide.c._thing1_id == c.user._id,
|
||||
SaveHide.c._name == 'save',
|
||||
sort = desc('_date'),
|
||||
eager_load = True, thing_data = True)
|
||||
q = queries.get_saved(c.user)
|
||||
if g.use_query_cache:
|
||||
q = q.fetch()
|
||||
else:
|
||||
self.prewrap_fn = q.filter
|
||||
q = q.query
|
||||
|
||||
return q
|
||||
|
||||
@validate(VUser())
|
||||
@@ -340,6 +349,96 @@ class RecommendedController(ListingController):
|
||||
self.sort = sort
|
||||
return ListingController.GET_listing(self, **env)
|
||||
|
||||
class UserController(ListingController):
|
||||
render_cls = ProfilePage
|
||||
skip = False
|
||||
|
||||
def title(self):
|
||||
titles = {'overview': _("overview for %(user)s"),
|
||||
'comments': _("comments by %(user)s"),
|
||||
'submitted': _("submitted by %(user)s"),
|
||||
'liked': _("liked by %(user)s"),
|
||||
'disliked': _("disliked by %(user)s"),
|
||||
'hidden': _("hidden by %(user)s")}
|
||||
title = titles.get(self.where, _('profile for %(user)s')) \
|
||||
% dict(user = self.vuser.name, site = c.site.name)
|
||||
return title
|
||||
|
||||
def query(self):
|
||||
q = None
|
||||
if self.where == 'overview':
|
||||
self.check_modified(self.vuser, 'overview')
|
||||
if g.use_query_cache:
|
||||
q = queries.get_overview(self.vuser, 'new', 'all')
|
||||
else:
|
||||
links = Link._query(Link.c.author_id == self.vuser._id,
|
||||
Link.c._spam == (True, False))
|
||||
comments = Comment._query(Comment.c.author_id == self.vuser._id,
|
||||
Comment.c._spam == (True, False))
|
||||
q = Merge((links, comments), sort = desc('_date'), data = True)
|
||||
elif self.where == 'comments':
|
||||
self.check_modified(self.vuser, 'commented')
|
||||
q = queries.get_comments(self.vuser, 'new', 'all')
|
||||
elif self.where == 'submitted':
|
||||
self.check_modified(self.vuser, 'submitted')
|
||||
q = queries.get_submitted(self.vuser, 'new', 'all')
|
||||
elif self.where in ('liked', 'disliked'):
|
||||
self.check_modified(self.vuser, self.where)
|
||||
if self.where == 'liked':
|
||||
q = queries.get_liked(self.vuser)
|
||||
else:
|
||||
q = queries.get_disliked(self.vuser)
|
||||
|
||||
elif self.where == 'hidden':
|
||||
q = queries.get_hidden(self.vuser)
|
||||
|
||||
elif c.user_is_admin:
|
||||
q, self.prewrap_fn = admin_profile_query(self.vuser,
|
||||
self.where,
|
||||
desc('_date'))
|
||||
|
||||
#QUERIES HACK
|
||||
if isinstance(q, queries.CachedResults):
|
||||
if g.use_query_cache:
|
||||
q = q.fetch()
|
||||
elif isinstance(q.query, Relations):
|
||||
self.prewrap_fn = q.filter
|
||||
q = q.query
|
||||
else:
|
||||
q = q.query
|
||||
|
||||
if q is None:
|
||||
return self.abort404()
|
||||
|
||||
return q
|
||||
|
||||
@validate(vuser = VExistingUname('username'))
|
||||
def GET_listing(self, where, vuser, **env):
|
||||
self.where = where
|
||||
|
||||
# the validator will ensure that vuser is a valid account
|
||||
if not vuser:
|
||||
return self.abort404()
|
||||
|
||||
# hide spammers profile pages
|
||||
if (not c.user_is_loggedin or
|
||||
(c.user._id != vuser._id and not c.user_is_admin)) \
|
||||
and vuser._spam:
|
||||
return self.abort404()
|
||||
|
||||
if (where not in ('overview', 'submitted', 'comments')
|
||||
and not votes_visible(vuser)):
|
||||
return self.abort404()
|
||||
|
||||
check_cheating('user')
|
||||
|
||||
self.vuser = vuser
|
||||
self.render_params = {'user' : vuser}
|
||||
c.profilepage = True
|
||||
|
||||
return ListingController.GET_listing(self, **env)
|
||||
|
||||
|
||||
class MessageController(ListingController):
|
||||
show_sidebar = False
|
||||
render_cls = MessagePage
|
||||
@@ -364,10 +463,13 @@ class MessageController(ListingController):
|
||||
|
||||
def query(self):
|
||||
if self.where == 'inbox':
|
||||
q = Inbox._query(Inbox.c._thing1_id == c.user._id,
|
||||
eager_load = True,
|
||||
thing_data = True)
|
||||
self.prewrap_fn = lambda x: x._thing2
|
||||
if g.use_query_cache:
|
||||
q = queries.get_inbox(c.user)
|
||||
else:
|
||||
q = Inbox._query(Inbox.c._thing1_id == c.user._id,
|
||||
eager_load = True,
|
||||
thing_data = True)
|
||||
self.prewrap_fn = lambda x: x._thing2
|
||||
|
||||
#reset the inbox
|
||||
if c.have_messages:
|
||||
@@ -375,16 +477,14 @@ class MessageController(ListingController):
|
||||
c.user._commit()
|
||||
|
||||
elif self.where == 'sent':
|
||||
q = Message._query(Message.c.author_id == c.user._id,
|
||||
Message.c._spam == (True, False))
|
||||
q = queries.get_sent(c.user)
|
||||
if g.use_query_cache:
|
||||
q = q.fetch()
|
||||
else:
|
||||
q = q.query
|
||||
|
||||
q._sort = desc('_date')
|
||||
return q
|
||||
|
||||
def content(self):
|
||||
self.page = self.listing_obj
|
||||
return self.page
|
||||
|
||||
@validate(VUser())
|
||||
def GET_listing(self, where, **env):
|
||||
self.where = where
|
||||
|
||||
@@ -41,7 +41,7 @@ class Globals(object):
|
||||
'num_comments',
|
||||
'max_comments',
|
||||
'num_side_reddits',
|
||||
'num_precompute_workers',
|
||||
'num_query_queue_workers',
|
||||
]
|
||||
|
||||
bool_props = ['debug', 'translator',
|
||||
|
||||
@@ -67,7 +67,8 @@ class CachedResults(object):
|
||||
item = self.filter(i)
|
||||
l = [item._fullname]
|
||||
for col in sort_cols:
|
||||
attr = getattr(item, col)
|
||||
#take the property of the original
|
||||
attr = getattr(i, col)
|
||||
#convert dates to epochs to take less space
|
||||
if isinstance(attr, datetime):
|
||||
attr = epoch_seconds(attr)
|
||||
@@ -149,7 +150,9 @@ def user_rel_query(rel, user, name):
|
||||
rel.c._t2_deleted == False,
|
||||
rel.c._name == name,
|
||||
sort = desc('_date'),
|
||||
eager_load = True)
|
||||
eager_load = True,
|
||||
#thing_data = True
|
||||
)
|
||||
|
||||
return CachedResults(q, filter_thing2)
|
||||
|
||||
@@ -219,20 +222,18 @@ def add_queries(queries):
|
||||
## actions to update the correct listings.
|
||||
def new_link(link):
|
||||
sr = Subreddit._byID(link.sr_id)
|
||||
#author = Account._byID(link.author_id)
|
||||
author = Account._byID(link.author_id)
|
||||
|
||||
results = all_queries(get_links, sr, ('hot', 'new', 'old'), ['all'])
|
||||
results.extend(all_queries(get_links, sr, ('top', 'controversial'), db_times.keys()))
|
||||
#results.extend(all_queries(get_submitted, author,
|
||||
# db_sorts.keys(),
|
||||
# db_times.keys()))
|
||||
results.append(get_submitted(author, 'new', 'all'))
|
||||
|
||||
add_queries(results)
|
||||
|
||||
def new_comment(comment):
|
||||
author = Account._byID(comment.author_id)
|
||||
results = all_queries(get_comments, author, db_sorts.keys(), db_times.keys())
|
||||
|
||||
results = [get_comments(author, 'new', 'all')]
|
||||
|
||||
if hasattr(comment, 'parent_id'):
|
||||
parent = Comment._byID(comment.parent_id, data = True)
|
||||
parent_author = Account._byID(parent.author_id)
|
||||
@@ -252,8 +253,8 @@ def new_vote(vote):
|
||||
results.extend(all_queries(get_links, sr, ('top', 'controversial'), db_times.keys()))
|
||||
|
||||
#must update both because we don't know if it's a changed vote
|
||||
#results.append(get_liked(user))
|
||||
#results.append(get_disliked(user))
|
||||
results.append(get_liked(user))
|
||||
results.append(get_disliked(user))
|
||||
|
||||
add_queries(results)
|
||||
|
||||
@@ -266,11 +267,10 @@ def new_message(message):
|
||||
|
||||
add_queries(results)
|
||||
|
||||
def new_savehide(savehide):
|
||||
user = savehide._thing1
|
||||
if savehide._name == 'save':
|
||||
def new_savehide(user, action):
|
||||
if action == 'save':
|
||||
results = [get_saved(user)]
|
||||
else:
|
||||
elif action == 'hide':
|
||||
results = [get_hidden(user)]
|
||||
|
||||
add_queries(results)
|
||||
@@ -283,16 +283,40 @@ def add_all_srs():
|
||||
add_queries(all_queries(get_links, sr, ('top', 'controversial'), db_times.keys()))
|
||||
|
||||
def add_all_users():
|
||||
"""Adds every profile-page query for every user to the queue"""
|
||||
q = Account._query(sort = asc('_date'))
|
||||
for user in fetch_things2(q):
|
||||
queries = []
|
||||
queries.extend(all_queries(get_submitted, user, db_sorts.keys(), db_times.keys()))
|
||||
queries.extend(all_queries(get_comments, user, db_sorts.keys(), db_times.keys()))
|
||||
queries.append(get_inbox_messages(user))
|
||||
queries.append(get_inbox_comments(user))
|
||||
queries.append(get_saved(user))
|
||||
queries.append(get_hidden(user))
|
||||
queries.append(get_liked(user))
|
||||
queries.append(get_disliked(user))
|
||||
add_queries(queries)
|
||||
results = [get_inbox_messages(user),
|
||||
get_inbox_comments(user),
|
||||
get_sent(user),
|
||||
get_liked(user),
|
||||
get_disliked(user),
|
||||
get_saved(user),
|
||||
get_hidden(user),
|
||||
get_submitted(user, 'new', 'all'),
|
||||
get_comments(user, 'new', 'all')]
|
||||
add_queries(results)
|
||||
|
||||
def compute_all_inboxes():
|
||||
q = Account._query(sort = asc('_date'))
|
||||
for user in fetch_things2(q):
|
||||
get_inbox_messages(user).update()
|
||||
get_inbox_comments(user).update()
|
||||
get_sent(user).update()
|
||||
|
||||
def compute_all_liked():
|
||||
q = Account._query(sort = asc('_date'))
|
||||
for user in fetch_things2(q):
|
||||
get_liked(user).update()
|
||||
get_disliked(user).update()
|
||||
|
||||
def compute_all_saved():
|
||||
q = Account._query(sort = asc('_date'))
|
||||
for user in fetch_things2(q):
|
||||
get_saved(user).update()
|
||||
get_hidden(user).update()
|
||||
|
||||
def compute_all_user_pages():
|
||||
q = Account._query(sort = asc('_date'))
|
||||
for user in fetch_things2(q):
|
||||
get_submitted(user, 'new', 'all').update()
|
||||
get_comments(user, 'new', 'all').update()
|
||||
|
||||
@@ -102,7 +102,7 @@ def run():
|
||||
#limit the total number of jobs in the WorkQueue. we don't
|
||||
#need to load the entire db queue right away (the db queue can
|
||||
#get quite large).
|
||||
if wq.jobs.qsize() < 2 * num_workers:
|
||||
if len(running) < 2 * num_workers:
|
||||
with running_lock:
|
||||
iden, pickled_cr = get_query()
|
||||
if pickled_cr is not None:
|
||||
|
||||
@@ -35,7 +35,7 @@ import logging
|
||||
log_format = logging.Formatter('sql: %(message)s')
|
||||
|
||||
settings = storage()
|
||||
settings.DEBUG = False
|
||||
settings.DEBUG = g.debug
|
||||
settings.DB_CREATE_TABLES = True
|
||||
settings.DB_APP_NAME = 'reddit'
|
||||
|
||||
|
||||
Reference in New Issue
Block a user