fixed sorting issue where the global sorts were being overwritten. added the query_queue parameters to example.ini.

This commit is contained in:
Steve Huffman
2008-08-22 13:09:27 -07:00
parent 33fd4e9684
commit 1861a04e45
6 changed files with 38 additions and 23 deletions

View File

@@ -41,6 +41,11 @@ email_db_host = 127.0.0.1
email_db_user = reddit
email_db_pass = password
query_queue_db_name = query_queue
query_queue_db_host = 127.0.0.1
query_queue_db_user = reddit
query_queue_db_pass = password
###
# Other magic settings
###
@@ -48,6 +53,14 @@ email_db_pass = password
timezone = UTC
monitored_servers = localhost
#query cache settings
query_caches = 127.0.0.1:11211
num_query_queue_workers =
query_queue_worker =
enable_doquery = False
use_query_cache = False
write_query_queue = False
stylesheet = reddit.css
stylesheet_rtl = reddit_rtl.css

View File

@@ -705,19 +705,20 @@ class ApiController(RedditController):
organic = vote_type == 'organic'
v = Vote.vote(user, thing, dir, ip, spam, organic)
#update last modified
set_last_modified(c.user, 'liked')
set_last_modified(c.user, 'disliked')
#update relevant caches
if isinstance(thing, Link):
sr = thing.subreddit_slow
set_last_modified(c.user, 'liked')
set_last_modified(c.user, 'disliked')
#update the queries
if g.write_query_queue:
queries.new_vote(v)
if v.valid_thing:
expire_hot(sr)
if g.write_query_queue:
queries.new_vote(v)
# flag search indexer that something has changed
tc.changed(thing)
@Json
@validate(VUser(),
VModhash(),

View File

@@ -194,7 +194,7 @@ class HotController(FixListing, ListingController):
def query(self):
#no need to worry when working from the cache
if g.use_query_cache or c.site == Default:
fix_listing = False
self.fix_listing = False
if c.site == Default:
user = c.user if c.user_is_loggedin else None

View File

@@ -12,11 +12,16 @@ from pylons import g
query_cache = g.query_cache
precompute_limit = 1000
db_sorts = dict(hot = desc('_hot'),
new = desc('_date'),
top = desc('_score'),
controversial = desc('_controversy'),
old = asc('_date'))
db_sorts = dict(hot = (desc, '_hot'),
new = (desc, '_date'),
top = (desc, '_score'),
controversial = (desc, '_controversy'),
old = (asc, '_date'))
def db_sort(sort):
cls, col = db_sorts[sort]
return cls(col)
db_times = dict(all = None,
hour = Thing.c._date >= timeago('1 hour'),
@@ -114,7 +119,7 @@ def merge_results(*results):
def get_links(sr, sort, time):
"""General link query for a subreddit."""
q = Link._query(Link.c.sr_id == sr._id,
sort = db_sorts[sort])
sort = db_sort(sort))
if time != 'all':
q._filter(db_times[time])
return CachedResults(q)
@@ -123,7 +128,7 @@ def user_query(kind, user, sort, time):
"""General profile-page query."""
q = kind._query(kind.c.author_id == user._id,
kind.c._spam == (True, False),
sort = db_sorts[sort])
sort = db_sort(sort))
if time != 'all':
q._filter(db_times[time])
return CachedResults(q)

View File

@@ -363,7 +363,7 @@ class FriendsSR(FakeSubreddit):
raise UserRequiredException
q = Link._query(self.c.author_id == c.user.friends,
sort = queries.db_sorts[sort])
sort = queries.db_sort(sort))
if time != 'all':
q._filter(queries.db_times[time])
return q
@@ -381,7 +381,7 @@ class AllSR(FakeSubreddit):
def get_links(self, sort, time):
from r2.models import Link
from r2.lib.db import queries
q = Link._query(sort = queries.db_sorts[sort])
q = Link._query(sort = queries.db_sort(sort))
if time != 'all':
q._filter(queries.db_times[time])
return q
@@ -408,7 +408,7 @@ class DefaultSR(FakeSubreddit):
return queries.merge_results(*results)
else:
q = Link._query(sort = queries.db_sorts[sort])
q = Link._query(sort = queries.db_sort(sort))
if time != 'all':
q._filter(queries.db_times[time])
return q

View File

@@ -116,10 +116,6 @@ class Vote(MultiRelation('vote',
if sub._id != obj.author_id:
incr_counts([sr])
#expire the sr
if kind == 'link' and v.valid_thing:
expire_hot(sr)
return v
#TODO make this generic and put on multirelation?