mirror of
https://github.com/reddit-archive/reddit.git
synced 2026-01-27 15:58:06 -05:00
refactored some query caching code. tried to remove any references to use_query_cache from listingcontroller.py
This commit is contained in:
@@ -22,7 +22,7 @@
|
||||
from r2.controllers.reddit_base import RedditController
|
||||
|
||||
def admin_profile_query(vuser, location, db_sort):
|
||||
return None, None
|
||||
return None
|
||||
|
||||
class AdminController(RedditController):
|
||||
pass
|
||||
|
||||
@@ -176,128 +176,6 @@ class FrontController(RedditController):
|
||||
default=num_comments)],
|
||||
infotext = infotext).render()
|
||||
return res
|
||||
|
||||
|
||||
@base_listing
|
||||
@validate(vuser = VExistingUname('username'),
|
||||
location = nop('location', default = ''),
|
||||
sort = VMenu('location', SortMenu),
|
||||
time = VMenu('location', TimeMenu))
|
||||
def GET_user(self, num, vuser, sort, time, after, reverse, count, location, **env):
|
||||
"""user profile pages"""
|
||||
|
||||
# the validator will ensure that vuser is a valid account
|
||||
if not vuser:
|
||||
return self.abort404()
|
||||
|
||||
# hide spammers profile pages
|
||||
if (not c.user_is_loggedin or
|
||||
(c.user._id != vuser._id and not c.user_is_admin)) \
|
||||
and vuser._spam:
|
||||
return self.abort404()
|
||||
|
||||
check_cheating('user')
|
||||
|
||||
content_pane = PaneStack()
|
||||
|
||||
# enable comments displaying with their titles when rendering
|
||||
c.profilepage = True
|
||||
listing = None
|
||||
|
||||
db_sort = SortMenu.operator(sort)
|
||||
db_time = TimeMenu.operator(time)
|
||||
|
||||
# function for extracting the proper thing if query is a relation (see liked)
|
||||
prewrap_fn = None
|
||||
|
||||
# default (nonexistent) query to trip an error on if location is unhandles
|
||||
query = None
|
||||
|
||||
# build the sort menus for the space above the content
|
||||
sortbar = [SortMenu(default = sort), TimeMenu(default = time)]
|
||||
|
||||
# overview page is a merge of comments and links
|
||||
if location == 'overview':
|
||||
self.check_modified(vuser, 'overview')
|
||||
links = Link._query(Link.c.author_id == vuser._id,
|
||||
Link.c._spam == (True, False))
|
||||
comments = Comment._query(Comment.c.author_id == vuser._id,
|
||||
Comment.c._spam == (True, False))
|
||||
query = thing.Merge((links, comments), sort = db_sort, data = True)
|
||||
|
||||
elif location == 'comments':
|
||||
self.check_modified(vuser, 'commented')
|
||||
query = Comment._query(Comment.c.author_id == vuser._id,
|
||||
Comment.c._spam == (True, False),
|
||||
sort = db_sort)
|
||||
|
||||
elif location == 'submitted':
|
||||
self.check_modified(vuser, 'submitted')
|
||||
query = Link._query(Link.c.author_id == vuser._id,
|
||||
Link.c._spam == (True, False),
|
||||
sort = db_sort)
|
||||
|
||||
# (dis)liked page: pull votes and extract thing2
|
||||
elif ((location == 'liked' or location == 'disliked') and
|
||||
votes_visible(vuser)):
|
||||
self.check_modified(vuser, location)
|
||||
rel = Vote.rel(vuser, Link)
|
||||
query = rel._query(rel.c._thing1_id == vuser._id,
|
||||
rel.c._t2_deleted == False)
|
||||
query._eager(True, True)
|
||||
|
||||
if location == 'liked':
|
||||
query._filter(rel.c._name == '1')
|
||||
else:
|
||||
query._filter(rel.c._name == '-1')
|
||||
sortbar = []
|
||||
query._sort = desc('_date')
|
||||
prewrap_fn = lambda x: x._thing2
|
||||
|
||||
# TODO: this should be handled with '' above once merges work
|
||||
elif location == 'hidden' and votes_visible(vuser):
|
||||
db_time = None
|
||||
query = SaveHide._query(SaveHide.c._thing1_id == vuser._id,
|
||||
SaveHide.c._name == 'hide',
|
||||
eager_load = True,
|
||||
thing_data = True)
|
||||
sortbar = []
|
||||
query._sort = desc('_date')
|
||||
prewrap_fn = lambda x: x._thing2
|
||||
|
||||
# any admin pages live here.
|
||||
elif c.user_is_admin:
|
||||
db_time = None
|
||||
query, prewrap_fn = admin_profile_query(vuser, location, db_sort)
|
||||
|
||||
if query is None:
|
||||
return self.abort404()
|
||||
|
||||
if db_time:
|
||||
query._filter(db_time)
|
||||
|
||||
|
||||
builder = QueryBuilder(query, num = num, prewrap_fn = prewrap_fn,
|
||||
after = after, count = count, reverse = reverse,
|
||||
wrap = ListingController.builder_wrapper)
|
||||
listing = LinkListing(builder)
|
||||
|
||||
if listing:
|
||||
content_pane.append(listing.listing())
|
||||
|
||||
titles = {'': _("overview for %(user)s on %(site)s"),
|
||||
'comments': _("comments by %(user)s on %(site)s"),
|
||||
'submitted': _("submitted by %(user)s on %(site)s"),
|
||||
'liked': _("liked by %(user)s on %(site)s"),
|
||||
'disliked': _("disliked by %(user)s on %(site)s"),
|
||||
'hidden': _("hidden by %(user)s on %(site)s")}
|
||||
title = titles.get(location, _('profile for %(user)s')) \
|
||||
% dict(user = vuser.name, site = c.site.name)
|
||||
|
||||
return ProfilePage(vuser, title = title,
|
||||
nav_menus = sortbar,
|
||||
content = content_pane).render()
|
||||
|
||||
|
||||
@validate(VUser(),
|
||||
location = nop("location"))
|
||||
|
||||
@@ -50,10 +50,6 @@ class ListingController(RedditController):
|
||||
# toggles showing numbers
|
||||
show_nums = True
|
||||
|
||||
# for use with builder inm build_listing. function applied to the
|
||||
# elements of query() upon iteration
|
||||
prewrap_fn = None
|
||||
|
||||
# any text that should be shown on the top of the page
|
||||
infotext = None
|
||||
|
||||
@@ -118,6 +114,8 @@ class ListingController(RedditController):
|
||||
builder_cls = QueryBuilder
|
||||
elif isinstance(self.query_obj, iters):
|
||||
builder_cls = IDBuilder
|
||||
elif isinstance(self.query_obj, queries.CachedResults):
|
||||
builder_cls = IDBuilder
|
||||
|
||||
b = builder_cls(self.query_obj,
|
||||
num = self.num,
|
||||
@@ -125,8 +123,8 @@ class ListingController(RedditController):
|
||||
after = self.after,
|
||||
count = self.count,
|
||||
reverse = self.reverse,
|
||||
prewrap_fn = self.prewrap_fn,
|
||||
wrap = self.builder_wrapper)
|
||||
|
||||
return b
|
||||
|
||||
def listing(self):
|
||||
@@ -207,15 +205,13 @@ class HotController(FixListing, ListingController):
|
||||
if c.site == Default:
|
||||
user = c.user if c.user_is_loggedin else None
|
||||
sr_ids = Subreddit.user_subreddits(user)
|
||||
links = normalized_hot(sr_ids)
|
||||
return links
|
||||
return normalized_hot(sr_ids)
|
||||
#if not using the query_cache we still want cached front pages
|
||||
elif (not g.use_query_cache
|
||||
and not isinstance(c.site, FakeSubreddit)
|
||||
and self.after is None
|
||||
and self.count == 0):
|
||||
links = [l._fullname for l in get_hot(c.site)]
|
||||
return links
|
||||
return [l._fullname for l in get_hot(c.site)]
|
||||
else:
|
||||
return c.site.get_links('hot', 'all')
|
||||
|
||||
@@ -242,14 +238,7 @@ class SavedController(ListingController):
|
||||
title_text = _('saved')
|
||||
|
||||
def query(self):
|
||||
q = queries.get_saved(c.user)
|
||||
if g.use_query_cache:
|
||||
q = q.fetch()
|
||||
else:
|
||||
self.prewrap_fn = q.filter
|
||||
q = q.query
|
||||
|
||||
return q
|
||||
return queries.get_saved(c.user)
|
||||
|
||||
@validate(VUser())
|
||||
def GET_listing(self, **env):
|
||||
@@ -369,44 +358,28 @@ class UserController(ListingController):
|
||||
q = None
|
||||
if self.where == 'overview':
|
||||
self.check_modified(self.vuser, 'overview')
|
||||
if g.use_query_cache:
|
||||
q = queries.get_overview(self.vuser, 'new', 'all')
|
||||
else:
|
||||
links = Link._query(Link.c.author_id == self.vuser._id,
|
||||
Link.c._spam == (True, False))
|
||||
comments = Comment._query(Comment.c.author_id == self.vuser._id,
|
||||
Comment.c._spam == (True, False))
|
||||
q = Merge((links, comments), sort = desc('_date'), data = True)
|
||||
q = queries.get_overview(self.vuser, 'new', 'all')
|
||||
|
||||
elif self.where == 'comments':
|
||||
self.check_modified(self.vuser, 'commented')
|
||||
q = queries.get_comments(self.vuser, 'new', 'all')
|
||||
|
||||
elif self.where == 'submitted':
|
||||
self.check_modified(self.vuser, 'submitted')
|
||||
q = queries.get_submitted(self.vuser, 'new', 'all')
|
||||
|
||||
elif self.where in ('liked', 'disliked'):
|
||||
self.check_modified(self.vuser, self.where)
|
||||
if self.where == 'liked':
|
||||
q = queries.get_liked(self.vuser)
|
||||
else:
|
||||
q = queries.get_disliked(self.vuser)
|
||||
|
||||
|
||||
elif self.where == 'hidden':
|
||||
q = queries.get_hidden(self.vuser)
|
||||
|
||||
elif c.user_is_admin:
|
||||
q, self.prewrap_fn = admin_profile_query(self.vuser,
|
||||
self.where,
|
||||
desc('_date'))
|
||||
|
||||
#QUERIES HACK
|
||||
if isinstance(q, queries.CachedResults):
|
||||
if g.use_query_cache:
|
||||
q = q.fetch()
|
||||
elif isinstance(q.query, Relations):
|
||||
self.prewrap_fn = q.filter
|
||||
q = q.query
|
||||
else:
|
||||
q = q.query
|
||||
q = admin_profile_query(self.vuser, self.where, desc('_date'))
|
||||
|
||||
if q is None:
|
||||
return self.abort404()
|
||||
@@ -464,13 +437,7 @@ class MessageController(ListingController):
|
||||
|
||||
def query(self):
|
||||
if self.where == 'inbox':
|
||||
if g.use_query_cache:
|
||||
q = queries.get_inbox(c.user)
|
||||
else:
|
||||
q = Inbox._query(Inbox.c._thing1_id == c.user._id,
|
||||
eager_load = True,
|
||||
thing_data = True)
|
||||
self.prewrap_fn = lambda x: x._thing2
|
||||
q = queries.get_inbox(c.user)
|
||||
|
||||
#reset the inbox
|
||||
if c.have_messages:
|
||||
@@ -479,10 +446,6 @@ class MessageController(ListingController):
|
||||
|
||||
elif self.where == 'sent':
|
||||
q = queries.get_sent(c.user)
|
||||
if g.use_query_cache:
|
||||
q = q.fetch()
|
||||
else:
|
||||
q = q.query
|
||||
|
||||
return q
|
||||
|
||||
@@ -532,7 +495,6 @@ class RedditsController(ListingController):
|
||||
return ListingController.GET_listing(self, **env)
|
||||
|
||||
class MyredditsController(ListingController):
|
||||
prewrap_fn = lambda self, x: x._thing1
|
||||
render_cls = MySubredditsPage
|
||||
|
||||
@property
|
||||
@@ -554,6 +516,7 @@ class MyredditsController(ListingController):
|
||||
sort = (desc('_t1_ups'), desc('_t1_date')),
|
||||
eager_load = True,
|
||||
thing_data = True)
|
||||
reddits.prewrap_fn = lambda x: x._thing1
|
||||
return reddits
|
||||
|
||||
def content(self):
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from r2.models import Account, Link, Comment, Vote, SaveHide
|
||||
from r2.models import Message, Inbox, Subreddit
|
||||
from r2.lib.db.thing import Thing
|
||||
from r2.lib.db.thing import Thing, Merge
|
||||
from r2.lib.db.operators import asc, desc, timeago
|
||||
from r2.lib.db import query_queue
|
||||
from r2.lib.db.sorts import epoch_seconds
|
||||
@@ -42,7 +42,7 @@ def filter_thing2(x):
|
||||
class CachedResults(object):
|
||||
"""Given a query returns a list-like object that will lazily look up
|
||||
the query from the persistent cache. """
|
||||
def __init__(self, query, filter = filter_identity):
|
||||
def __init__(self, query, filter):
|
||||
self.query = query
|
||||
self.query._limit = precompute_limit
|
||||
self.filter = filter
|
||||
@@ -56,7 +56,6 @@ class CachedResults(object):
|
||||
if not self._fetched:
|
||||
self._fetched = True
|
||||
self.data = query_cache.get(self.iden) or []
|
||||
return list(self)
|
||||
|
||||
def make_item_tuple(self, item):
|
||||
"""Given a single 'item' from the result of a query build the tuple
|
||||
@@ -120,13 +119,12 @@ class CachedResults(object):
|
||||
return '<CachedResults %s %s>' % (self.query._rules, self.query._sort)
|
||||
|
||||
def __iter__(self):
|
||||
if not self._fetched:
|
||||
self.fetch()
|
||||
self.fetch()
|
||||
|
||||
for x in self.data:
|
||||
yield x[0]
|
||||
|
||||
def merge_results(*results):
|
||||
def merge_cached_results(*results):
|
||||
"""Given two CachedResults, mergers their lists based on the sorts of
|
||||
their queries."""
|
||||
if len(results) == 1:
|
||||
@@ -154,13 +152,29 @@ def merge_results(*results):
|
||||
#all_items = Thing._by_fullname(all_items, return_dict = False)
|
||||
return [i[0] for i in sorted(all_items, cmp = thing_cmp)]
|
||||
|
||||
def make_results(query, filter = filter_identity):
|
||||
if g.use_query_cache:
|
||||
return CachedResults(query, filter)
|
||||
else:
|
||||
query.prewrap_fn = filter
|
||||
return query
|
||||
|
||||
def merge_results(*results):
|
||||
if g.use_query_cache:
|
||||
return merge_cached_results(*results)
|
||||
else:
|
||||
m = Merge(results, sort = results[0]._sort)
|
||||
#assume the prewrap_fn's all match
|
||||
m.prewrap_fn = results[0].prewrap_fn
|
||||
return m
|
||||
|
||||
def get_links(sr, sort, time):
|
||||
"""General link query for a subreddit."""
|
||||
q = Link._query(Link.c.sr_id == sr._id,
|
||||
sort = db_sort(sort))
|
||||
if time != 'all':
|
||||
q._filter(db_times[time])
|
||||
return CachedResults(q)
|
||||
return make_results(q)
|
||||
|
||||
def user_query(kind, user, sort, time):
|
||||
"""General profile-page query."""
|
||||
@@ -169,7 +183,7 @@ def user_query(kind, user, sort, time):
|
||||
sort = db_sort(sort))
|
||||
if time != 'all':
|
||||
q._filter(db_times[time])
|
||||
return CachedResults(q)
|
||||
return make_results(q)
|
||||
|
||||
def get_comments(user, sort, time):
|
||||
return user_query(Comment, user, sort, time)
|
||||
@@ -180,7 +194,7 @@ def get_submitted(user, sort, time):
|
||||
def get_overview(user, sort, time):
|
||||
return merge_results(get_comments(user, sort, time),
|
||||
get_submitted(user, sort, time))
|
||||
|
||||
|
||||
def user_rel_query(rel, user, name):
|
||||
"""General user relationship query."""
|
||||
q = rel._query(rel.c._thing1_id == user._id,
|
||||
@@ -191,7 +205,7 @@ def user_rel_query(rel, user, name):
|
||||
#thing_data = True
|
||||
)
|
||||
|
||||
return CachedResults(q, filter_thing2)
|
||||
return make_results(q, filter_thing2)
|
||||
|
||||
vote_rel = Vote.rel(Account, Link)
|
||||
|
||||
@@ -223,7 +237,7 @@ def get_sent(user):
|
||||
q = Message._query(Message.c.author_id == user._id,
|
||||
Message.c._spam == (True, False),
|
||||
sort = desc('_date'))
|
||||
return CachedResults(q)
|
||||
return make_results(q)
|
||||
|
||||
def add_queries(queries, insert_item = None, delete_item = None):
|
||||
"""Adds multiple queries to the query queue. If insert_item or
|
||||
@@ -231,6 +245,9 @@ def add_queries(queries, insert_item = None, delete_item = None):
|
||||
all."""
|
||||
def _add_queries():
|
||||
for q in queries:
|
||||
if not isinstance(q, CachedResults):
|
||||
continue
|
||||
|
||||
if insert_item and q.can_insert():
|
||||
q.insert(insert_item)
|
||||
elif delete_item and q.can_delete():
|
||||
@@ -361,6 +378,3 @@ def add_all_users():
|
||||
q = Account._query(sort = asc('_date'))
|
||||
for user in fetch_things2(q):
|
||||
update_user(user)
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -667,21 +667,6 @@ def Relation(type1, type2, denorm1 = None, denorm2 = None):
|
||||
|
||||
return RelationCls
|
||||
|
||||
#TODO can this be more efficient?
|
||||
class QueryAttr(object):
|
||||
__slots__ = ('cols',)
|
||||
def __init__(self, *cols):
|
||||
self.cols = cols
|
||||
|
||||
def __eq__(self, other):
|
||||
return (self, other)
|
||||
|
||||
def lookup(self, obj):
|
||||
return reduce(getattr, self.cols, obj)
|
||||
|
||||
def __getattr__(self, attr):
|
||||
return QueryAttr(*list(self.cols) + [attr])
|
||||
|
||||
class Query(object):
|
||||
def __init__(self, kind, *rules, **kw):
|
||||
self._rules = []
|
||||
@@ -767,11 +752,6 @@ class Query(object):
|
||||
def _count(self):
|
||||
return self._cursor().rowcount()
|
||||
|
||||
def __getattr__(self, attr):
|
||||
if attr.startswith('__'):
|
||||
raise AttributeError
|
||||
else:
|
||||
return QueryAttr(attr)
|
||||
|
||||
def _filter(*a, **kw):
|
||||
raise NotImplementedError
|
||||
@@ -1142,3 +1122,28 @@ def MultiRelation(name, *relations):
|
||||
# def __init__(self, query1, query2, rule):
|
||||
# MultiQuery.__init__(self, query1, query2)
|
||||
# self._a = (rule[0].lookup, rule, rule[1].lookup)
|
||||
|
||||
|
||||
##used to be in class Query
|
||||
# def __getattr__(self, attr):
|
||||
# if attr.startswith('__'):
|
||||
# raise AttributeError
|
||||
# else:
|
||||
# return QueryAttr(attr)
|
||||
|
||||
##user to be in class Query
|
||||
#TODO can this be more efficient?
|
||||
# class QueryAttr(object):
|
||||
# __slots__ = ('cols',)
|
||||
# def __init__(self, *cols):
|
||||
# self.cols = cols
|
||||
|
||||
# def __eq__(self, other):
|
||||
# return (self, other)
|
||||
|
||||
# def lookup(self, obj):
|
||||
# return reduce(getattr, self.cols, obj)
|
||||
|
||||
# def __getattr__(self, attr):
|
||||
# return QueryAttr(*list(self.cols) + [attr])
|
||||
|
||||
|
||||
@@ -40,17 +40,11 @@ def access_key(sr):
|
||||
def expire_key(sr):
|
||||
return sr.name + '_expire'
|
||||
|
||||
def top_key(sr):
|
||||
return sr.name + '_top'
|
||||
|
||||
def expire_hot(sr):
|
||||
"""Called when a subreddit should be recomputed: after a vote (hence,
|
||||
submit) or deletion."""
|
||||
cache.set(expire_key(sr), True)
|
||||
|
||||
#def is_top_link(sr, link):
|
||||
# return cache.get(top_key(sr)) == link._fullname
|
||||
|
||||
def cached_query(query, sr):
|
||||
"""Returns the results from running query. The results are cached and
|
||||
only recomputed after 'expire_delta'"""
|
||||
@@ -79,11 +73,6 @@ def cached_query(query, sr):
|
||||
query._read_cache = read_cache
|
||||
res = list(query)
|
||||
|
||||
#set the #1 link so we can ignore it later. expire after TOP_CACHE
|
||||
#just in case something happens and that sr doesn't update
|
||||
#if res:
|
||||
# cache.set(top_key(sr), res[0]._fullname, TOP_CACHE)
|
||||
|
||||
return res
|
||||
|
||||
def get_hot(sr):
|
||||
@@ -94,7 +83,7 @@ def get_hot(sr):
|
||||
if isinstance(q, Query):
|
||||
return cached_query(q, sr)
|
||||
else:
|
||||
return Link._by_fullname(q[:150], return_dict = False)
|
||||
return Link._by_fullname(list(q)[:150], return_dict = False)
|
||||
|
||||
def only_recent(items):
|
||||
return filter(lambda l: l._date > utils.timeago('%d day' % g.HOT_PAGE_AGE),
|
||||
|
||||
@@ -37,7 +37,8 @@ iters = (list, tuple, set)
|
||||
def tup(item, ret_is_single=False):
|
||||
"""Forces casting of item to a tuple (for a list) or generates a
|
||||
single element tuple (for anything else)"""
|
||||
if isinstance(item, iters):
|
||||
#return true for iterables, except for strings, which is what we want
|
||||
if hasattr(item, '__iter__'):
|
||||
return (item, False) if ret_is_single else item
|
||||
else:
|
||||
return ((item,), True) if ret_is_single else (item,)
|
||||
|
||||
@@ -191,7 +191,11 @@ class QueryBuilder(Builder):
|
||||
self.start_count = kw.get('count', 0) or 0
|
||||
self.after = kw.get('after')
|
||||
self.reverse = kw.get('reverse')
|
||||
self.prewrap_fn = kw.get('prewrap_fn')
|
||||
|
||||
self.prewrap_fn = None
|
||||
if hasattr(query, 'prewrap_fn'):
|
||||
self.prewrap_fn = query.prewrap_fn
|
||||
#self.prewrap_fn = kw.get('prewrap_fn')
|
||||
|
||||
def item_iter(self, a):
|
||||
"""Iterates over the items returned by get_items"""
|
||||
@@ -316,7 +320,7 @@ class QueryBuilder(Builder):
|
||||
|
||||
class IDBuilder(QueryBuilder):
|
||||
def init_query(self):
|
||||
names = self.names = copy(tup(self.query))
|
||||
names = self.names = list(tup(self.query))
|
||||
|
||||
if self.reverse:
|
||||
names.reverse()
|
||||
|
||||
@@ -230,11 +230,7 @@ class Subreddit(Thing, Printable):
|
||||
|
||||
def get_links(self, sort, time):
|
||||
from r2.lib.db import queries
|
||||
q = queries.get_links(self, sort, time)
|
||||
if g.use_query_cache:
|
||||
return q.fetch()
|
||||
else:
|
||||
return q.query
|
||||
return queries.get_links(self, sort, time)
|
||||
|
||||
@classmethod
|
||||
def add_props(cls, user, wrapped):
|
||||
@@ -442,7 +438,7 @@ class DefaultSR(FakeSubreddit):
|
||||
results = []
|
||||
for sr in srs:
|
||||
results.append(queries.get_links(sr, sort, time))
|
||||
return queries.merge_results(*results)
|
||||
return queries.merge_cached_results(*results)
|
||||
else:
|
||||
q = Link._query(Link.c.sr_id == sr_ids,
|
||||
sort = queries.db_sort(sort))
|
||||
|
||||
Reference in New Issue
Block a user