mirror of
https://github.com/reddit-archive/reddit.git
synced 2026-01-29 08:48:18 -05:00
cache comment listings in the permacache
This commit is contained in:
@@ -51,6 +51,7 @@ from r2.lib.jsontemplates import api_type
|
||||
from r2.lib import cssfilter
|
||||
from r2.lib import tracking
|
||||
from r2.lib.media import force_thumbnail, thumbnail_url
|
||||
from r2.lib.comment_tree import add_comment, delete_comment
|
||||
|
||||
from simplejson import dumps
|
||||
|
||||
@@ -531,6 +532,7 @@ class ApiController(RedditController):
|
||||
#comments have special delete tasks
|
||||
elif isinstance(thing, Comment):
|
||||
thing._delete()
|
||||
delete_comment(thing)
|
||||
if g.use_query_cache:
|
||||
queries.new_comment(thing, None)
|
||||
|
||||
@@ -631,6 +633,9 @@ class ApiController(RedditController):
|
||||
set_last_modified(c.user, 'commented')
|
||||
set_last_modified(link, 'comments')
|
||||
|
||||
#update the comment cache
|
||||
add_comment(item)
|
||||
|
||||
#update the queries
|
||||
if g.write_query_queue:
|
||||
if is_message:
|
||||
|
||||
@@ -26,6 +26,7 @@ from datetime import timedelta
|
||||
from r2.lib.cache import LocalCache, Memcache, CacheChain
|
||||
from r2.lib.db.stats import QueryStats
|
||||
from r2.lib.translation import _get_languages
|
||||
from r2.lib.lock import make_lock_factory
|
||||
|
||||
class Globals(object):
|
||||
|
||||
@@ -111,6 +112,7 @@ class Globals(object):
|
||||
mc = Memcache(self.memcaches)
|
||||
self.cache = CacheChain((LocalCache(), mc))
|
||||
self.permacache = Memcache(self.permacaches)
|
||||
self.make_lock = make_lock_factory(mc)
|
||||
|
||||
self.rec_cache = Memcache(self.rec_cache)
|
||||
|
||||
|
||||
129
r2/r2/lib/comment_tree.py
Normal file
129
r2/r2/lib/comment_tree.py
Normal file
@@ -0,0 +1,129 @@
|
||||
# The contents of this file are subject to the Common Public Attribution
|
||||
# License Version 1.0. (the "License"); you may not use this file except in
|
||||
# compliance with the License. You may obtain a copy of the License at
|
||||
# http://code.reddit.com/LICENSE. The License is based on the Mozilla Public
|
||||
# License Version 1.1, but Sections 14 and 15 have been added to cover use of
|
||||
# software over a computer network and provide for limited attribution for the
|
||||
# Original Developer. In addition, Exhibit A has been modified to be consistent
|
||||
# with Exhibit B.
|
||||
#
|
||||
# Software distributed under the License is distributed on an "AS IS" basis,
|
||||
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License for
|
||||
# the specific language governing rights and limitations under the License.
|
||||
#
|
||||
# The Original Code is Reddit.
|
||||
#
|
||||
# The Original Developer is the Initial Developer. The Initial Developer of the
|
||||
# Original Code is CondeNet, Inc.
|
||||
#
|
||||
# All portions of the code written by CondeNet are Copyright (c) 2006-2008
|
||||
# CondeNet, Inc. All Rights Reserved.
|
||||
################################################################################
|
||||
from __future__ import with_statement
|
||||
|
||||
from pylons import g
|
||||
|
||||
from r2.models import *
|
||||
|
||||
def comments_key(link_id):
|
||||
return 'comments_' + str(link_id)
|
||||
|
||||
def lock_key(link_id):
|
||||
return 'comment_lock_' + str(link_id)
|
||||
|
||||
def add_comment(comment):
|
||||
with g.make_lock(lock_key(comment.link_id)):
|
||||
add_comment_nolock(comment)
|
||||
|
||||
def add_comment_nolock(comment):
|
||||
cm_id = comment._id
|
||||
p_id = comment.parent_id if hasattr(comment, 'parent_id') else None
|
||||
link_id = comment.link_id
|
||||
|
||||
cids, comment_tree, depth, num_children = link_comments(link_id)
|
||||
|
||||
#add to comment list
|
||||
cids.append(comment._id)
|
||||
|
||||
#add to tree
|
||||
comment_tree.setdefault(p_id, []).append(cm_id)
|
||||
|
||||
#add to depth
|
||||
depth[cm_id] = depth[p_id] + 1 if p_id else 0
|
||||
|
||||
#update children
|
||||
num_children[cm_id] = 0
|
||||
|
||||
#dfs to find the list of parents for the new comment
|
||||
def find_parents(top = None):
|
||||
children = comment_tree.get(top, ())
|
||||
if cm_id in children:
|
||||
return []
|
||||
else:
|
||||
for c in children:
|
||||
parents = find_parents(c)
|
||||
if parents is not None:
|
||||
parents.append(c)
|
||||
return parents
|
||||
|
||||
#if this comment had a parent, find the parent's parents
|
||||
if p_id:
|
||||
for p_id in find_parents():
|
||||
num_children[p_id] += 1
|
||||
|
||||
g.permacache.set(comments_key(link_id),
|
||||
(cids, comment_tree, depth, num_children))
|
||||
|
||||
def delete_comment(comment):
|
||||
#nothing really to do here, atm
|
||||
pass
|
||||
|
||||
def link_comments(link_id):
|
||||
key = comments_key(link_id)
|
||||
r = g.permacache.get(key)
|
||||
if r:
|
||||
return r
|
||||
else:
|
||||
with g.make_lock(lock_key(link_id)):
|
||||
r = load_link_comments(link_id)
|
||||
g.permacache.set(key, r)
|
||||
return r
|
||||
|
||||
def load_link_comments(link_id):
|
||||
q = Comment._query(Comment.c.link_id == link_id,
|
||||
Comment.c._deleted == (True, False),
|
||||
Comment.c._spam == (True, False),
|
||||
data = True)
|
||||
comments = list(q)
|
||||
cids = [c._id for c in comments]
|
||||
|
||||
#make a tree
|
||||
comment_tree = {}
|
||||
for cm in comments:
|
||||
p_id = cm.parent_id if hasattr(cm, 'parent_id') else None
|
||||
comment_tree.setdefault(p_id, []).append(cm._id)
|
||||
|
||||
#calculate the depths
|
||||
depth = {}
|
||||
level = 0
|
||||
cur_level = comment_tree.get(None, ())
|
||||
while cur_level:
|
||||
next_level = []
|
||||
for cm_id in cur_level:
|
||||
depth[cm_id] = level
|
||||
next_level.extend(comment_tree.get(cm_id, ()))
|
||||
cur_level = next_level
|
||||
level += 1
|
||||
|
||||
#calc the number of children
|
||||
num_children = {}
|
||||
for cm_id in cids:
|
||||
num = 0
|
||||
todo = [cm_id]
|
||||
while todo:
|
||||
more = comment_tree.get(todo.pop(0), ())
|
||||
num += len(more)
|
||||
todo.extend(more)
|
||||
num_children[cm_id] = num
|
||||
|
||||
return cids, comment_tree, depth, num_children
|
||||
74
r2/r2/lib/lock.py
Normal file
74
r2/r2/lib/lock.py
Normal file
@@ -0,0 +1,74 @@
|
||||
# The contents of this file are subject to the Common Public Attribution
|
||||
# License Version 1.0. (the "License"); you may not use this file except in
|
||||
# compliance with the License. You may obtain a copy of the License at
|
||||
# http://code.reddit.com/LICENSE. The License is based on the Mozilla Public
|
||||
# License Version 1.1, but Sections 14 and 15 have been added to cover use of
|
||||
# software over a computer network and provide for limited attribution for the
|
||||
# Original Developer. In addition, Exhibit A has been modified to be consistent
|
||||
# with Exhibit B.
|
||||
#
|
||||
# Software distributed under the License is distributed on an "AS IS" basis,
|
||||
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License for
|
||||
# the specific language governing rights and limitations under the License.
|
||||
#
|
||||
# The Original Code is Reddit.
|
||||
#
|
||||
# The Original Developer is the Initial Developer. The Initial Developer of the
|
||||
# Original Code is CondeNet, Inc.
|
||||
#
|
||||
# All portions of the code written by CondeNet are Copyright (c) 2006-2008
|
||||
# CondeNet, Inc. All Rights Reserved.
|
||||
################################################################################
|
||||
|
||||
from __future__ import with_statement
|
||||
from time import sleep
|
||||
from datetime import datetime
|
||||
|
||||
from pylons import c
|
||||
|
||||
class TimeoutExpired(Exception): pass
|
||||
|
||||
class MemcacheLock(object):
|
||||
"""A simple global lock based on the memcache 'add' command. We
|
||||
attempt to grab a lock by 'adding' the lock name. If the response
|
||||
is True, we have the lock. If it's False, someone else has it."""
|
||||
|
||||
def __init__(self, key, cache, time = 30, timeout = 30):
|
||||
self.key = key
|
||||
self.cache = cache
|
||||
self.time = time
|
||||
self.timeout = timeout
|
||||
self.have_lock = False
|
||||
|
||||
def __enter__(self):
|
||||
start = datetime.now()
|
||||
|
||||
if not c.locks:
|
||||
c.locks = {}
|
||||
|
||||
#if this thread already has this lock, move on
|
||||
if c.locks.get(self.key):
|
||||
return
|
||||
|
||||
#try and fetch the lock, looping until it's available
|
||||
while not self.cache.add(self.key, 1, time = self.time):
|
||||
if (datetime.now() - start).seconds > self.timeout:
|
||||
raise TimeoutExpired
|
||||
|
||||
sleep(.1)
|
||||
|
||||
#tell this thread we have this lock so we can avoid deadlocks
|
||||
#of requests for the same lock in the same thread
|
||||
c.locks[self.key] = True
|
||||
self.have_lock = True
|
||||
|
||||
def __exit__(self, type, value, tb):
|
||||
#only release the lock if we gained it in the first place
|
||||
if self.have_lock:
|
||||
self.cache.delete(self.key)
|
||||
del c.locks[self.key]
|
||||
|
||||
def make_lock_factory(cache):
|
||||
def factory(key):
|
||||
return MemcacheLock(key, cache)
|
||||
return factory
|
||||
@@ -83,6 +83,7 @@ def get_promoted_cached():
|
||||
return [ x._fullname for x in links if x not in expired_links ]
|
||||
|
||||
def get_promoted():
|
||||
return ()
|
||||
return get_promoted_cached()
|
||||
|
||||
def promote_builder_wrapper(alternative_wrapper):
|
||||
|
||||
@@ -33,6 +33,7 @@ from r2.lib.wrapped import Wrapped
|
||||
from r2.lib import utils
|
||||
from r2.lib.db import operators
|
||||
from r2.lib.cache import sgm
|
||||
from r2.lib.comment_tree import link_comments
|
||||
|
||||
from copy import deepcopy, copy
|
||||
|
||||
@@ -416,51 +417,9 @@ class CommentBuilder(Builder):
|
||||
if hasattr(i, 'child'):
|
||||
for j in self.item_iter(i.child.things):
|
||||
yield j
|
||||
|
||||
|
||||
@staticmethod
|
||||
@memoize('builder.link_comments2')
|
||||
def link_comments(link_id):
|
||||
q = Comment._query(Comment.c.link_id == link_id,
|
||||
Comment.c._deleted == (True, False),
|
||||
Comment.c._spam == (True, False),
|
||||
data = True)
|
||||
comments = list(q)
|
||||
cids = [c._id for c in comments]
|
||||
|
||||
#make a tree
|
||||
comment_tree = {}
|
||||
for cm in comments:
|
||||
p_id = cm.parent_id if hasattr(cm, 'parent_id') else None
|
||||
comment_tree.setdefault(p_id, []).append(cm._id)
|
||||
|
||||
#calculate the depths
|
||||
depth = {}
|
||||
level = 0
|
||||
cur_level = comment_tree.get(None, ())
|
||||
while cur_level:
|
||||
next_level = []
|
||||
for cm_id in cur_level:
|
||||
depth[cm_id] = level
|
||||
next_level.extend(comment_tree.get(cm_id, ()))
|
||||
cur_level = next_level
|
||||
level += 1
|
||||
|
||||
#calc the number of children
|
||||
num_children = {}
|
||||
for cm_id in cids:
|
||||
num = 0
|
||||
todo = [cm_id]
|
||||
while todo:
|
||||
more = comment_tree.get(todo.pop(0), ())
|
||||
num += len(more)
|
||||
todo.extend(more)
|
||||
num_children[cm_id] = num
|
||||
|
||||
return cids, comment_tree, depth, num_children
|
||||
|
||||
def get_items(self, num, nested = True, starting_depth = 0):
|
||||
r = self.link_comments(self.link._id)
|
||||
r = link_comments(self.link._id)
|
||||
cids, comment_tree, depth, num_children = r
|
||||
if cids:
|
||||
comments = set(Comment._byID(cids, data = True,
|
||||
|
||||
Reference in New Issue
Block a user