* removed references to clear_memo

* add _update parameter to memoize function to force them to skip the cache
* make all reads after a write in the same request use the master db
This commit is contained in:
spez
2009-03-04 15:24:47 -08:00
parent 4d35ba17c2
commit e35b519699
15 changed files with 133 additions and 100 deletions

View File

@@ -42,7 +42,6 @@ from r2.lib.menus import CommentSortMenu
from r2.lib.normalized_hot import expire_hot
from r2.lib.captcha import get_iden
from r2.lib.strings import strings
from r2.lib.memoize import clear_memo
from r2.lib.filters import _force_unicode, websafe_json
from r2.lib.db import queries
from r2.lib.media import force_thumbnail, thumbnail_url
@@ -980,13 +979,16 @@ class ApiController(RedditController):
#editting an existing reddit
elif sr.is_moderator(c.user) or c.user_is_admin:
#assume sr existed, or was just built
clear_memo('subreddit._by_domain',
Subreddit, _force_unicode(sr.domain))
old_domain = sr.domain
for k, v in kw.iteritems():
setattr(sr, k, v)
sr._commit()
clear_memo('subreddit._by_domain',
Subreddit, _force_unicode(sr.domain))
#update the domain cache if the domain changed
if sr.domain != old_domain:
Subreddit._by_domain(old_domain, _update = True)
Subreddit._by_domain(sr.domain, _update = True)
# flag search indexer that something has changed
tc.changed(sr)

View File

@@ -667,7 +667,7 @@ class VMenu(Validator):
user_prefs[pref] = sort
c.user.sort_options = user_prefs
user = c.user
utils.worker.do(lambda: user._commit())
user._commit()
return sort

View File

@@ -30,7 +30,7 @@ from sqlalchemy.databases import postgres
from r2.lib.utils import storage, storify, iters, Results, tup, TransSet
from r2.lib.services import AppServiceMonitor
import operators
from pylons import g
from pylons import g, c
dbm = g.dbm
import logging
@@ -306,21 +306,33 @@ def get_read_table(tables):
best_ip = random.choice(all_ips)
return ips[best_ip]
def get_thing_write_table(type_id):
return get_write_table(types_id[type_id].tables)
def get_table(kind, action, tables):
if action == 'write':
#if this is a write, store the kind in the c.use_write_db dict
#so that all future requests use the write db
if not isinstance(c.use_write_db, dict):
c.use_write_db = {}
c.use_write_db[kind] = True
def get_thing_read_table(type_id):
return get_read_table(types_id[type_id].tables)
return get_write_table(tables)
elif action == 'read':
#check to see if we're supposed to use the write db again
if c.use_write_db and c.use_write_db.has_key(kind):
return get_write_table(tables)
else:
return get_read_table(tables)
def get_rel_write_table(rel_type_id):
return get_write_table(rel_types_id[rel_type_id].tables)
def get_thing_table(type_id, action = 'read' ):
return get_table('t' + str(type_id), action,
types_id[type_id].tables)
def get_rel_read_table(rel_type_id):
return get_read_table(rel_types_id[rel_type_id].tables)
def get_rel_table(rel_type_id, action = 'read'):
return get_table('r' + str(rel_type_id), action,
rel_types_id[rel_type_id].tables)
#TODO does the type actually exist?
def make_thing(type_id, ups, downs, date, deleted, spam, id=None):
table = get_thing_write_table(type_id)[0]
table = get_thing_table(type_id, action = 'write')[0]
params = dict(ups = ups, downs = downs,
date = date, deleted = deleted, spam = spam)
@@ -346,7 +358,7 @@ def make_thing(type_id, ups, downs, date, deleted, spam, id=None):
def set_thing_props(type_id, thing_id, **props):
table = get_thing_write_table(type_id)[0]
table = get_thing_table(type_id, action = 'write')[0]
if not props:
return
@@ -361,7 +373,7 @@ def set_thing_props(type_id, thing_id, **props):
do_update(table)
def incr_thing_prop(type_id, thing_id, prop, amount):
table = get_thing_write_table(type_id)[0]
table = get_thing_table(type_id, action = 'write')[0]
def do_update(t):
transactions.add_engine(t.engine)
@@ -376,7 +388,7 @@ class CreationError(Exception): pass
#TODO does the type exist?
#TODO do the things actually exist?
def make_relation(rel_type_id, thing1_id, thing2_id, name, date=None):
table = get_rel_write_table(rel_type_id)[0]
table = get_rel_table(rel_type_id, action = 'write')[0]
transactions.add_engine(table.engine)
if not date: date = datetime.now(g.tz)
@@ -394,7 +406,7 @@ def make_relation(rel_type_id, thing1_id, thing2_id, name, date=None):
def set_rel_props(rel_type_id, rel_id, **props):
t = get_rel_write_table(rel_type_id)[0]
t = get_rel_table(rel_type_id, action = 'write')[0]
if not props:
return
@@ -500,19 +512,19 @@ def get_data(table, thing_id):
return res
def set_thing_data(type_id, thing_id, **vals):
table = get_thing_write_table(type_id)[1]
table = get_thing_table(type_id, action = 'write')[1]
return set_data(table, type_id, thing_id, **vals)
def incr_thing_data(type_id, thing_id, prop, amount):
table = get_thing_write_table(type_id)[1]
table = get_thing_table(type_id, action = 'write')[1]
return incr_data_prop(table, type_id, thing_id, prop, amount)
def get_thing_data(type_id, thing_id):
table = get_thing_read_table(type_id)[1]
table = get_thing_table(type_id)[1]
return get_data(table, thing_id)
def get_thing(type_id, thing_id):
table = get_thing_read_table(type_id)[0]
table = get_thing_table(type_id)[0]
r, single = fetch_query(table, table.c.thing_id, thing_id)
#if single, only return one storage, otherwise make a dict
@@ -530,19 +542,19 @@ def get_thing(type_id, thing_id):
return res
def set_rel_data(rel_type_id, thing_id, **vals):
table = get_rel_write_table(rel_type_id)[3]
table = get_rel_table(rel_type_id, action = 'write')[3]
return set_data(table, rel_type_id, thing_id, **vals)
def incr_rel_data(rel_type_id, thing_id, prop, amount):
table = get_rel_write_table(rel_type_id)[3]
table = get_rel_table(rel_type_id, action = 'write')[3]
return incr_data_prop(table, rel_type_id, thing_id, prop, amount)
def get_rel_data(rel_type_id, rel_id):
table = get_rel_read_table(rel_type_id)[3]
table = get_rel_table(rel_type_id)[3]
return get_data(table, rel_id)
def get_rel(rel_type_id, rel_id):
r_table = get_rel_read_table(rel_type_id)[0]
r_table = get_rel_table(rel_type_id)[0]
r, single = fetch_query(r_table, r_table.c.rel_id, rel_id)
res = {} if not single else None
@@ -558,7 +570,7 @@ def get_rel(rel_type_id, rel_id):
return res
def del_rel(rel_type_id, rel_id):
tables = get_rel_write_table(rel_type_id)
tables = get_rel_table(rel_type_id, action = 'write')
table = tables[0]
data_table = tables[3]
@@ -675,7 +687,7 @@ def translate_thing_value(rval):
#will assume parameters start with a _ for consistency
def find_things(type_id, get_cols, sort, limit, constraints):
table = get_thing_read_table(type_id)[0]
table = get_thing_table(type_id)[0]
constraints = deepcopy(constraints)
s = sa.select([table.c.thing_id.label('thing_id')])
@@ -716,7 +728,7 @@ def translate_data_value(alias, op):
#TODO sort by data fields
#TODO sort by id wants thing_id
def find_data(type_id, get_cols, sort, limit, constraints):
t_table, d_table = get_thing_read_table(type_id)
t_table, d_table = get_thing_table(type_id)
constraints = deepcopy(constraints)
used_first = False
@@ -778,7 +790,7 @@ def find_data(type_id, get_cols, sort, limit, constraints):
def find_rels(rel_type_id, get_cols, sort, limit, constraints):
tables = get_rel_read_table(rel_type_id)
tables = get_rel_table(rel_type_id)
r_table, t1_table, t2_table, d_table = tables
constraints = deepcopy(constraints)

View File

@@ -19,13 +19,11 @@
# All portions of the code written by CondeNet are Copyright (c) 2006-2009
# CondeNet, Inc. All Rights Reserved.
################################################################################
from r2.lib.memoize import memoize, clear_memo
from r2.lib.memoize import memoize
def UserRel(name, relation):
all_memo_str = name + '.all_ids'
reverse_memo_str = name + 'reverse'
exists_name = 'is_' + name
exists_fn_name = 'is_' + name
def userrel_exists(self, user):
if not user:
return False
@@ -35,33 +33,41 @@ def UserRel(name, relation):
if r:
return r
add_fn_name = 'add_' + name
def userrel_add(self, user):
fn = getattr(self, exists_name)
fn = getattr(self, exists_fn_name)
if not fn(user):
s = relation(self, user, name)
s._commit()
clear_memo(all_memo_str, self)
clear_memo(reverse_memo_str, user)
#update caches
getattr(self, ids_fn_name)(_update = True)
getattr(self, reverse_ids_fn_name)(user, _update = True)
return s
remove_fn_name = 'remove_' + name
def userrel_remove(self, user):
fn = getattr(self, exists_name)
fn = getattr(self, exists_fn_name)
s = fn(user)
if s:
s._delete()
clear_memo(all_memo_str, self)
clear_memo(reverse_memo_str, user)
#update caches
getattr(self, ids_fn_name)(_update = True)
getattr(self, reverse_ids_fn_name)(user, _update = True)
return True
@memoize(all_memo_str)
ids_fn_name = name + '_ids'
@memoize(ids_fn_name)
def userrel_ids(self):
q = relation._query(relation.c._thing1_id == self._id,
relation.c._name == name)
#removed set() here, shouldn't be required
return [r._thing2_id for r in q]
reverse_ids_fn_name = 'reverse_' + name + '_ids'
@staticmethod
@memoize(reverse_memo_str)
@memoize(reverse_ids_fn_name)
def reverse_ids(user):
q = relation._query(relation.c._thing2_id == user._id,
relation.c._name == name)
@@ -69,11 +75,11 @@ def UserRel(name, relation):
class UR: pass
setattr(UR, 'is_' + name, userrel_exists)
setattr(UR, 'add_' + name, userrel_add)
setattr(UR, 'remove_' + name, userrel_remove)
setattr(UR, name + '_ids', userrel_ids)
setattr(UR, 'reverse_' + name + '_ids', reverse_ids)
setattr(UR, exists_fn_name, userrel_exists)
setattr(UR, add_fn_name, userrel_add)
setattr(UR, remove_fn_name, userrel_remove)
setattr(UR, ids_fn_name, userrel_ids)
setattr(UR, reverse_ids_fn_name, reverse_ids)
return UR

View File

@@ -30,7 +30,7 @@ from datetime import datetime
def find_tz():
q = Link._query(sort = desc('_hot'), limit = 1)
link = list(q)[0]
t = tdb_sql.get_thing_read_table(Link._type_id)[0]
t = tdb_sql.get_thing_table(Link._type_id)[0]
s = sa.select([sa.func.hot(t.c.ups, t.c.downs, t.c.date),
t.c.thing_id],

View File

@@ -28,9 +28,19 @@ def memoize(iden, time = 0):
def memoize_fn(fn):
from r2.lib.memoize import NoneResult
def new_fn(*a, **kw):
#if the keyword param _update == True, the cache will be
#overwritten no matter what
update = False
if kw.has_key('_update'):
update = kw['_update']
del kw['_update']
key = _make_key(iden, a, kw)
#print 'CHECKING', key
res = cache.get(key)
res = None if update else cache.get(key)
if res is None:
res = fn(*a, **kw)
if res is None:
@@ -63,7 +73,7 @@ def _make_key(iden, a, kw):
return (_conv(iden)
+ str([_conv(x) for x in a])
+ str(dict((_conv(x),_conv(y)) for (x,y) in kw.iteritems())))
+ str([(_conv(x),_conv(y)) for (x,y) in sorted(kw.iteritems())]))
@memoize('test')
def test(x, y):

View File

@@ -22,7 +22,7 @@
from __future__ import with_statement
from r2.models import *
from r2.lib.memoize import memoize, clear_memo
from r2.lib.memoize import memoize
from datetime import datetime
@@ -71,7 +71,8 @@ def set_promoted(link_names):
# caller is assumed to execute me inside a lock if necessary
g.permacache.set(promoted_memo_key, link_names)
clear_memo(promoted_memo_key)
#update cache
get_promoted(_update = True)
@memoize(promoted_memo_key, time = promoted_memo_lifetime)
def get_promoted():

View File

@@ -28,7 +28,7 @@ from pylons import g
cache = g.cache
def top_users():
tt, dt = tdb.get_thing_read_table(Account._type_id)
tt, dt = tdb.get_thing_table(Account._type_id)
karma = dt.alias()
@@ -44,7 +44,7 @@ def top_users():
def top_user_change(period = '1 day'):
rel = Vote.rel(Account, Link)
rt, account, link, dt = tdb.get_rel_read_table(rel._type_id)
rt, account, link, dt = tdb.get_rel_table(rel._type_id)
author = dt.alias()

View File

@@ -31,7 +31,7 @@ class Report(object):
def total_things(self, table_name, spam=None, all_time=None):
"""Return totals based on items in the thing tables."""
t = tdb_sql.get_thing_read_table(table_name)[0]
t = tdb_sql.get_thing_table(table_name)[0]
s = sa.select([sa.func.count(t.c.thing_id)])
if spam:
s.append_whereclause(t.c.spam==spam)
@@ -42,7 +42,7 @@ class Report(object):
def total_relation(self, table_name, key, value=None, all_time=None):
"""Return totals based on relationship data."""
tables = tdb_sql.get_rel_read_table('%s_account_link' % table_name)
tables = tdb_sql.get_rel_table('%s_account_link' % table_name)
t1, t2 = tables[0], tables[3]
s = sa.select([sa.func.count(t1.c.date)],
@@ -61,7 +61,7 @@ class Report(object):
def css_stats(self, val, all_time=None):
"""Create stats related to custom css and headers."""
t = tdb_sql.get_thing_read_table('subreddit')[1]
t = tdb_sql.get_thing_table('subreddit')[1]
s = sa.select([sa.func.count(t.c.key)], t.c.key == val)
return s.execute().fetchone()[0]

View File

@@ -781,12 +781,6 @@ class Worker:
worker = Worker()
def asynchronous(func):
def _asynchronous(*a, **kw):
f = lambda: func(*a, **kw)
worker.do(f)
return _asynchronous
def cols(lst, ncols):
"""divides a list into columns, and returns the
rows. e.g. cols('abcdef', 2) returns (('a', 'd'), ('b', 'e'), ('c',

View File

@@ -22,7 +22,7 @@
from r2.lib.db.thing import Thing, Relation, NotFound
from r2.lib.db.operators import lower
from r2.lib.db.userrel import UserRel
from r2.lib.memoize import memoize, clear_memo
from r2.lib.memoize import memoize
from r2.lib.utils import modhash, valid_hash, randstr
from pylons import g
@@ -167,9 +167,9 @@ class Account(Thing):
return l[0]._id
@classmethod
def _by_name(cls, name, allow_deleted = False):
def _by_name(cls, name, allow_deleted = False, _update = False):
#lower name here so there is only one cache
uid = cls._by_name_cache(name.lower(), allow_deleted)
uid = cls._by_name_cache(name.lower(), allow_deleted, _update = _update)
if uid:
return cls._byID(uid, True)
else:
@@ -182,7 +182,15 @@ class Account(Thing):
def delete(self):
self._deleted = True
self._commit()
clear_memo('account._by_name', Account, self.name.lower(), False)
#update caches
Account._by_name(self.name, allow_deleted = True, _update = True)
#we need to catch an exception here since it will have been
#recently deleted
try:
Account._by_name(self.name, _update = True)
except NotFound:
pass
#remove from friends lists
q = Friend._query(Friend.c._thing2_id == self._id,
@@ -289,7 +297,10 @@ def register(name, password):
password = passhash(name, password, True))
a._commit()
clear_memo('account._by_name', Account, name.lower(), False)
#clear the caches
Account._by_name(name, _update = True)
Account._by_name(name, allow_deleted = True, _update = True)
return a
class Friend(Relation(Account, Account)): pass

View File

@@ -26,7 +26,7 @@ from account import Account, DeletedUser
from subreddit import Subreddit
from printable import Printable
from r2.config import cache
from r2.lib.memoize import memoize, clear_memo
from r2.lib.memoize import memoize
from r2.lib import utils
from mako.filters import url_escape
from r2.lib.strings import strings, Score
@@ -413,9 +413,6 @@ class Comment(Thing, Printable):
if not c._spam or to.name in g.admins:
inbox_rel = Inbox._add(to, c, 'inbox')
#clear that chache
clear_memo('builder.link_comments2', link._id)
return (c, inbox_rel)
@property

View File

@@ -30,7 +30,7 @@ from r2.models.thing_changes import changed, index_str, create_table
from r2.lib.utils import Storage, timeago
from account import Account
from r2.lib.db.thing import Thing
from r2.lib.memoize import memoize, clear_memo
from r2.lib.memoize import memoize
from pylons import g
def mail_queue(metadata):
@@ -169,9 +169,10 @@ class EmailHandler(object):
o = self.opt_table
try:
o.insert().execute({o.c.email: email, o.c.msg_hash: msg_hash})
clear_memo('r2.models.mail_queue.has_opted_out',
email)
clear_memo('r2.models.mail_queue.opt_count')
#clear caches
has_opted_out(email, _update = True)
opt_count(_update = True)
return (email, True)
except sa.exceptions.SQLError:
return (email, False)
@@ -184,9 +185,10 @@ class EmailHandler(object):
o = self.opt_table
if self.has_opted_out(email):
sa.delete(o, o.c.email == email).execute()
clear_memo('r2.models.mail_queue.has_opted_out',
email)
clear_memo('r2.models.mail_queue.opt_count')
#clear caches
has_opted_out(email, _update = True)
opt_count(_update = True)
return (email, True)
else:
return (email, False)
@@ -377,7 +379,3 @@ def opt_count():
s = sa.select([sa.func.count(o.c.email)])
res = s.execute().fetchone()
return int(res[0])

View File

@@ -30,7 +30,7 @@ from r2.lib.utils import tup, Storage
from link import Link, Comment, Message, Subreddit
from account import Account
from vote import score_changes
from r2.lib.memoize import memoize, clear_memo
from r2.lib.memoize import memoize
from r2.config import cache
from r2.lib.cache import sgm
@@ -106,8 +106,7 @@ class Report(MultiRelation('report',
# clear memoizing around this thing's author
if not r._thing2._loaded: r._thing2._load()
if hasattr(r._thing2, "author_id"):
clear_memo('report._by_author', cls, r._thing2.author_id,
amount = a)
cls._by_author(r._thing2, amount = a, _update = True)
for t in (r._thing1, r._thing2):
thing_key = cls._cache_prefix(rel, t.__class__,
@@ -162,10 +161,10 @@ class Report(MultiRelation('report',
for types, rel in cls.rels.iteritems():
# grab the proper thing table
thing_type = types[0]
table, dtable = tdb.get_thing_read_table(thing_type._type_id)
table, dtable = tdb.get_thing_table(thing_type._type_id)
# and the proper relationship table
tables = tdb.get_rel_read_table(rel._type_id)
tables = tdb.get_rel_table(rel._type_id)
rel_table, rel_dtable = tables[0], tables[3]
where = [dtable.c.key == 'author_id',
@@ -182,9 +181,10 @@ class Report(MultiRelation('report',
return res
@classmethod
def _by_author(cls, author, amount = None):
def _by_author(cls, author, amount = None, _update = False):
res = []
rdict = cls._by_author_cache(author._id, amount = amount)
rdict = cls._by_author_cache(author._id, amount = amount,
_update = _update)
for types, rids in rdict.iteritems():
res.extend(cls.rels[types]._byID(rids, data=True,
return_dict = False))
@@ -427,7 +427,7 @@ def unreport(things, correct=False, auto = False, banned_by = ''):
def unreport_account(user, correct = True, types = (Link, Comment, Message),
auto = False, banned_by = ''):
for typ in types:
table, dtable = tdb.get_thing_read_table(typ._type_id)
table, dtable = tdb.get_thing_table(typ._type_id)
by_user_query = sa.and_(table.c.thing_id == dtable.c.thing_id,
dtable.c.key == 'author_id',

View File

@@ -29,7 +29,7 @@ from account import Account
from printable import Printable
from r2.lib.db.userrel import UserRel
from r2.lib.db.operators import lower, or_, and_, desc
from r2.lib.memoize import memoize, clear_memo
from r2.lib.memoize import memoize
from r2.lib.utils import tup
from r2.lib.strings import strings, Score
from r2.lib.filters import _force_unicode
@@ -74,8 +74,9 @@ class Subreddit(Thing, Printable):
ip = ip,
**kw)
sr._commit()
clear_memo('subreddit._by_name', Subreddit, name.lower())
clear_memo('subreddit.subreddits', Subreddit)
#clear cache
Subreddit._by_name(name, _update = True)
return sr
@classmethod
@@ -89,7 +90,7 @@ class Subreddit(Thing, Printable):
return l[0]._id
@classmethod
def _by_name(cls, name):
def _by_name(cls, name, _update = False):
#lower name here so there is only one cache
name = name.lower()
@@ -98,7 +99,7 @@ class Subreddit(Thing, Printable):
elif name == 'all':
return All
else:
sr_id = cls._by_name_cache(name)
sr_id = cls._by_name_cache(name, _update = _update)
if sr_id:
return cls._byID(sr_id, True)
else:
@@ -115,8 +116,9 @@ class Subreddit(Thing, Printable):
return l[0]._id
@classmethod
def _by_domain(cls, domain):
sr_id = cls._by_domain_cache(_force_unicode(domain).lower())
def _by_domain(cls, domain, _update = False):
sr_id = cls._by_domain_cache(_force_unicode(domain).lower(),
_update = _update)
if sr_id:
return cls._byID(sr_id, True)
else: