From 6a2a0210e1c32eff56b9ceddf18ba6c3178a45b3 Mon Sep 17 00:00:00 2001 From: Neil Williams Date: Thu, 9 Aug 2012 17:06:50 -0700 Subject: [PATCH] Move /comments to new query cache. This is another attempt at doing so. We're now using the Leveled Compaction strategy for SubredditQueryCache and have drastically reduced gc_grace_seconds (to half an hour). This should ameliorate the tombstone pileup issues we ran into last time. --- r2/r2/lib/db/queries.py | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/r2/r2/lib/db/queries.py b/r2/r2/lib/db/queries.py index 4b5ef4c01..811a894ae 100755 --- a/r2/r2/lib/db/queries.py +++ b/r2/r2/lib/db/queries.py @@ -516,10 +516,10 @@ def user_query(kind, user_id, sort, time): q._filter(db_times[time]) return make_results(q) +@cached_query(SubredditQueryCache) def get_all_comments(): """the master /comments page""" - q = Comment._query(sort = desc('_date')) - return make_results(q) + return Comment._query(sort=desc('_date')) def get_sr_comments(sr): return _get_sr_comments(sr._id) @@ -757,7 +757,7 @@ def new_comment(comment, inbox_rels): if comment._deleted: job_key = "delete_items" job.append(get_sr_comments(sr)) - job.append(get_all_comments()) + m.delete(get_all_comments(), [comment]) else: job_key = "insert_items" if comment._spam: @@ -1088,6 +1088,7 @@ def _common_del_ban(things): def unban(things, insert=True): + query_cache_inserts = [] query_cache_deletes = [] by_srid, srs = _by_srid(things) @@ -1125,7 +1126,8 @@ def unban(things, insert=True): query_cache_deletes.append([get_spam_links(sr), links]) if insert and comments: - add_queries([get_all_comments(), get_sr_comments(sr)], + query_cache_inserts.append((get_all_comments(), comments)) + add_queries([get_sr_comments(sr)], insert_items=comments) query_cache_deletes.append([get_spam_comments(sr), comments]) @@ -1137,6 +1139,9 @@ def unban(things, insert=True): query_cache_deletes.append([get_spam_filtered_comments(sr), comments]) with CachedQueryMutator() as m: + for q, inserts in query_cache_inserts: + m.insert(q, inserts) + for q, deletes in query_cache_deletes: m.delete(q, deletes) @@ -1254,8 +1259,8 @@ def run_new_comments(limit=1000): fnames = [msg.body for msg in msgs] comments = Comment._by_fullname(fnames, data=True, return_dict=False) - add_queries([get_all_comments()], - insert_items=comments) + with CachedQueryMutator() as m: + m.insert(get_all_comments(), comments) bysrid = _by_srid(comments, False) for srid, sr_comments in bysrid.iteritems():