Revert "queries: Move /r/all/comments to new query cache (try 3)."

This reverts commit 13497f6f08.
This commit is contained in:
Neil Williams
2012-12-14 12:52:19 -08:00
parent d6924c8bcc
commit 2ef7985618
2 changed files with 8 additions and 24 deletions

View File

@@ -33,8 +33,7 @@ from r2.models.promo import PROMOTE_STATUS, get_promote_srid
from r2.models.query_cache import (cached_query, merged_cached_query,
CachedQuery, CachedQueryMutator,
MergedCachedQuery)
from r2.models.query_cache import (UserQueryCache, SubredditQueryCache,
HotQueryCache)
from r2.models.query_cache import UserQueryCache, SubredditQueryCache
from r2.models.query_cache import ThingTupleComparator
from r2.models.last_modified import LastModified
from r2.lib.utils import SimpleSillyStub
@@ -470,10 +469,10 @@ def user_query(kind, user_id, sort, time):
q._filter(db_times[time])
return make_results(q)
@cached_query(HotQueryCache)
def get_all_comments():
"""the master /comments page"""
return Comment._query(sort=desc('_date'))
q = Comment._query(sort = desc('_date'))
return make_results(q)
def get_sr_comments(sr):
return _get_sr_comments(sr._id)
@@ -840,7 +839,7 @@ def new_comment(comment, inbox_rels):
if comment._deleted:
job_key = "delete_items"
job.append(get_sr_comments(sr))
m.delete(get_all_comments(), [comment])
job.append(get_all_comments())
else:
job_key = "insert_items"
if comment._spam:
@@ -1166,7 +1165,6 @@ def _common_del_ban(things):
def unban(things, insert=True):
query_cache_inserts = []
query_cache_deletes = []
by_srid, srs = _by_srid(things)
@@ -1204,8 +1202,8 @@ def unban(things, insert=True):
query_cache_deletes.append([get_spam_links(sr), links])
if insert and comments:
query_cache_inserts.append((get_all_comments(), comments))
add_queries([get_sr_comments(sr)], insert_items=comments)
add_queries([get_all_comments(), get_sr_comments(sr)],
insert_items=comments)
query_cache_deletes.append([get_spam_comments(sr), comments])
if links:
@@ -1216,9 +1214,6 @@ def unban(things, insert=True):
query_cache_deletes.append([get_spam_filtered_comments(sr), comments])
with CachedQueryMutator() as m:
for q, inserts in query_cache_inserts:
m.insert(q, inserts)
for q, deletes in query_cache_deletes:
m.delete(q, deletes)
@@ -1324,8 +1319,8 @@ def run_new_comments(limit=1000):
fnames = [msg.body for msg in msgs]
comments = Comment._by_fullname(fnames, data=True, return_dict=False)
with CachedQueryMutator() as m:
m.insert(get_all_comments(), comments)
add_queries([get_all_comments()],
insert_items=comments)
bysrid = _by_srid(comments, False)
for srid, sr_comments in bysrid.iteritems():

View File

@@ -554,14 +554,3 @@ class UserQueryCache(_BaseQueryCache):
class SubredditQueryCache(_BaseQueryCache):
"""A query cache column family for subreddit-keyed queries."""
_use_db = True
class HotQueryCache(_BaseQueryCache):
"""A query cache for very hot single-key queries.
Some queries such as all_comments appear to cause rowcache related issues
due to the high volume of writes happening to the single row. This column
family is intended to house such queries. The row cache should be disabled
here to prevent these issues.
"""
_use_db = True