mirror of
https://github.com/reddit-archive/reddit.git
synced 2026-01-26 15:28:37 -05:00
Move /comments to new query cache.
This is another attempt at doing so. We're now using the Leveled Compaction strategy for SubredditQueryCache and have drastically reduced gc_grace_seconds (to half an hour). This should ameliorate the tombstone pileup issues we ran into last time.
This commit is contained in:
@@ -516,10 +516,10 @@ def user_query(kind, user_id, sort, time):
|
||||
q._filter(db_times[time])
|
||||
return make_results(q)
|
||||
|
||||
@cached_query(SubredditQueryCache)
|
||||
def get_all_comments():
|
||||
"""the master /comments page"""
|
||||
q = Comment._query(sort = desc('_date'))
|
||||
return make_results(q)
|
||||
return Comment._query(sort=desc('_date'))
|
||||
|
||||
def get_sr_comments(sr):
|
||||
return _get_sr_comments(sr._id)
|
||||
@@ -757,7 +757,7 @@ def new_comment(comment, inbox_rels):
|
||||
if comment._deleted:
|
||||
job_key = "delete_items"
|
||||
job.append(get_sr_comments(sr))
|
||||
job.append(get_all_comments())
|
||||
m.delete(get_all_comments(), [comment])
|
||||
else:
|
||||
job_key = "insert_items"
|
||||
if comment._spam:
|
||||
@@ -1088,6 +1088,7 @@ def _common_del_ban(things):
|
||||
|
||||
|
||||
def unban(things, insert=True):
|
||||
query_cache_inserts = []
|
||||
query_cache_deletes = []
|
||||
|
||||
by_srid, srs = _by_srid(things)
|
||||
@@ -1125,7 +1126,8 @@ def unban(things, insert=True):
|
||||
query_cache_deletes.append([get_spam_links(sr), links])
|
||||
|
||||
if insert and comments:
|
||||
add_queries([get_all_comments(), get_sr_comments(sr)],
|
||||
query_cache_inserts.append((get_all_comments(), comments))
|
||||
add_queries([get_sr_comments(sr)],
|
||||
insert_items=comments)
|
||||
query_cache_deletes.append([get_spam_comments(sr), comments])
|
||||
|
||||
@@ -1137,6 +1139,9 @@ def unban(things, insert=True):
|
||||
query_cache_deletes.append([get_spam_filtered_comments(sr), comments])
|
||||
|
||||
with CachedQueryMutator() as m:
|
||||
for q, inserts in query_cache_inserts:
|
||||
m.insert(q, inserts)
|
||||
|
||||
for q, deletes in query_cache_deletes:
|
||||
m.delete(q, deletes)
|
||||
|
||||
@@ -1254,8 +1259,8 @@ def run_new_comments(limit=1000):
|
||||
fnames = [msg.body for msg in msgs]
|
||||
|
||||
comments = Comment._by_fullname(fnames, data=True, return_dict=False)
|
||||
add_queries([get_all_comments()],
|
||||
insert_items=comments)
|
||||
with CachedQueryMutator() as m:
|
||||
m.insert(get_all_comments(), comments)
|
||||
|
||||
bysrid = _by_srid(comments, False)
|
||||
for srid, sr_comments in bysrid.iteritems():
|
||||
|
||||
Reference in New Issue
Block a user