diff --git a/r2/example.ini b/r2/example.ini index 6af8e7ab3..beb0ac34e 100644 --- a/r2/example.ini +++ b/r2/example.ini @@ -41,6 +41,11 @@ email_db_host = 127.0.0.1 email_db_user = reddit email_db_pass = password +query_queue_db_name = query_queue +query_queue_db_host = 127.0.0.1 +query_queue_db_user = reddit +query_queue_db_pass = password + ### # Other magic settings ### @@ -48,6 +53,14 @@ email_db_pass = password timezone = UTC monitored_servers = localhost +#query cache settings +query_caches = 127.0.0.1:11211 +num_query_queue_workers = +query_queue_worker = +enable_doquery = False +use_query_cache = False +write_query_queue = False + stylesheet = reddit.css stylesheet_rtl = reddit_rtl.css diff --git a/r2/r2/controllers/api.py b/r2/r2/controllers/api.py index 0611f195f..b67836a8b 100644 --- a/r2/r2/controllers/api.py +++ b/r2/r2/controllers/api.py @@ -705,19 +705,20 @@ class ApiController(RedditController): organic = vote_type == 'organic' v = Vote.vote(user, thing, dir, ip, spam, organic) - #update last modified - set_last_modified(c.user, 'liked') - set_last_modified(c.user, 'disliked') + #update relevant caches + if isinstance(thing, Link): + sr = thing.subreddit_slow + set_last_modified(c.user, 'liked') + set_last_modified(c.user, 'disliked') - #update the queries - if g.write_query_queue: - queries.new_vote(v) + if v.valid_thing: + expire_hot(sr) + if g.write_query_queue: + queries.new_vote(v) # flag search indexer that something has changed tc.changed(thing) - - @Json @validate(VUser(), VModhash(), diff --git a/r2/r2/controllers/listingcontroller.py b/r2/r2/controllers/listingcontroller.py index 0c2a15d49..f731b447a 100644 --- a/r2/r2/controllers/listingcontroller.py +++ b/r2/r2/controllers/listingcontroller.py @@ -194,7 +194,7 @@ class HotController(FixListing, ListingController): def query(self): #no need to worry when working from the cache if g.use_query_cache or c.site == Default: - fix_listing = False + self.fix_listing = False if c.site == Default: user = c.user if c.user_is_loggedin else None diff --git a/r2/r2/lib/db/queries.py b/r2/r2/lib/db/queries.py index 948158d27..54164ed25 100644 --- a/r2/r2/lib/db/queries.py +++ b/r2/r2/lib/db/queries.py @@ -12,11 +12,16 @@ from pylons import g query_cache = g.query_cache precompute_limit = 1000 -db_sorts = dict(hot = desc('_hot'), - new = desc('_date'), - top = desc('_score'), - controversial = desc('_controversy'), - old = asc('_date')) + +db_sorts = dict(hot = (desc, '_hot'), + new = (desc, '_date'), + top = (desc, '_score'), + controversial = (desc, '_controversy'), + old = (asc, '_date')) + +def db_sort(sort): + cls, col = db_sorts[sort] + return cls(col) db_times = dict(all = None, hour = Thing.c._date >= timeago('1 hour'), @@ -114,7 +119,7 @@ def merge_results(*results): def get_links(sr, sort, time): """General link query for a subreddit.""" q = Link._query(Link.c.sr_id == sr._id, - sort = db_sorts[sort]) + sort = db_sort(sort)) if time != 'all': q._filter(db_times[time]) return CachedResults(q) @@ -123,7 +128,7 @@ def user_query(kind, user, sort, time): """General profile-page query.""" q = kind._query(kind.c.author_id == user._id, kind.c._spam == (True, False), - sort = db_sorts[sort]) + sort = db_sort(sort)) if time != 'all': q._filter(db_times[time]) return CachedResults(q) diff --git a/r2/r2/models/subreddit.py b/r2/r2/models/subreddit.py index faebbaafd..b15ffc893 100644 --- a/r2/r2/models/subreddit.py +++ b/r2/r2/models/subreddit.py @@ -363,7 +363,7 @@ class FriendsSR(FakeSubreddit): raise UserRequiredException q = Link._query(self.c.author_id == c.user.friends, - sort = queries.db_sorts[sort]) + sort = queries.db_sort(sort)) if time != 'all': q._filter(queries.db_times[time]) return q @@ -381,7 +381,7 @@ class AllSR(FakeSubreddit): def get_links(self, sort, time): from r2.models import Link from r2.lib.db import queries - q = Link._query(sort = queries.db_sorts[sort]) + q = Link._query(sort = queries.db_sort(sort)) if time != 'all': q._filter(queries.db_times[time]) return q @@ -408,7 +408,7 @@ class DefaultSR(FakeSubreddit): return queries.merge_results(*results) else: - q = Link._query(sort = queries.db_sorts[sort]) + q = Link._query(sort = queries.db_sort(sort)) if time != 'all': q._filter(queries.db_times[time]) return q diff --git a/r2/r2/models/vote.py b/r2/r2/models/vote.py index 5b7360f69..bf85455cb 100644 --- a/r2/r2/models/vote.py +++ b/r2/r2/models/vote.py @@ -116,10 +116,6 @@ class Vote(MultiRelation('vote', if sub._id != obj.author_id: incr_counts([sr]) - #expire the sr - if kind == 'link' and v.valid_thing: - expire_hot(sr) - return v #TODO make this generic and put on multirelation?