mirror of
https://github.com/reddit-archive/reddit.git
synced 2026-01-28 00:07:57 -05:00
Moved database configuration from databases.py to the ini.
Added support for load-balancing database reads.
This commit is contained in:
@@ -20,35 +20,49 @@ tracker_url =
|
||||
adtracker_url =
|
||||
clicktracker_url =
|
||||
|
||||
main_db_name = reddit
|
||||
main_db_host = 127.0.0.1
|
||||
main_db_user = reddit
|
||||
main_db_pass = password
|
||||
databases = main, comment, vote, change, email, neuter, query_queue
|
||||
|
||||
comment_db_name = reddit
|
||||
comment_db_host = 127.0.0.1
|
||||
comment_db_user = reddit
|
||||
comment_db_pass = password
|
||||
#db name db host user, pass
|
||||
main_db = newreddit, 10.96.53.79, ri, password
|
||||
comment_db = newreddit, 10.96.53.76, ri, password
|
||||
comment2_db = newreddit, 10.96.53.78, ri, password
|
||||
vote_db = newreddit, 10.96.53.77, ri, password
|
||||
change_db = changed, 10.96.53.216, ri, password
|
||||
email_db = email, 10.96.53.216, ri, password
|
||||
neuter_db = neuter, 10.96.53.216, ri, password
|
||||
query_queue_db = query_queue, 10.96.53.220, ri, password
|
||||
|
||||
vote_db_name = reddit
|
||||
vote_db_host = 127.0.0.1
|
||||
vote_db_user = reddit
|
||||
vote_db_pass = password
|
||||
db_app_name = reddit
|
||||
db_create_tables = True
|
||||
|
||||
change_db_name = changes
|
||||
change_db_host = 127.0.0.1
|
||||
change_db_user = reddit
|
||||
change_db_pass = password
|
||||
type_db = main
|
||||
rel_type_db = main
|
||||
|
||||
email_db_name = email
|
||||
email_db_host = 127.0.0.1
|
||||
email_db_user = reddit
|
||||
email_db_pass = password
|
||||
db_table_link = thing, main, main
|
||||
db_table_account = thing, main
|
||||
db_table_message = thing, main
|
||||
|
||||
db_table_savehide = relation, account, link, main
|
||||
db_table_click = relation, account, link, main
|
||||
|
||||
db_table_comment = thing, comment
|
||||
db_table_subreddit = thing, comment
|
||||
|
||||
db_table_srmember = relation, subreddit, account, comment
|
||||
|
||||
db_table_friend = relation, account, account, comment
|
||||
|
||||
db_table_vote_account_link = relation, account, link, vote
|
||||
db_table_vote_account_comment = relation, account, comment, vote
|
||||
|
||||
db_table_inbox_account_comment = relation, account, comment
|
||||
db_table_inbox_account_message = relation, account, message, main
|
||||
|
||||
db_table_report_account_link = relation, account, link, main
|
||||
db_table_report_account_comment = relation, account, comment, comment
|
||||
db_table_report_account_message = relation, account, message, main
|
||||
db_table_report_account_subreddit = relation, account, subreddit, main
|
||||
|
||||
query_queue_db_name = query_queue
|
||||
query_queue_db_host = 127.0.0.1
|
||||
query_queue_db_user = reddit
|
||||
query_queue_db_pass = password
|
||||
|
||||
###
|
||||
# Other magic settings
|
||||
|
||||
@@ -1,98 +0,0 @@
|
||||
# The contents of this file are subject to the Common Public Attribution
|
||||
# License Version 1.0. (the "License"); you may not use this file except in
|
||||
# compliance with the License. You may obtain a copy of the License at
|
||||
# http://code.reddit.com/LICENSE. The License is based on the Mozilla Public
|
||||
# License Version 1.1, but Sections 14 and 15 have been added to cover use of
|
||||
# software over a computer network and provide for limited attribution for the
|
||||
# Original Developer. In addition, Exhibit A has been modified to be consistent
|
||||
# with Exhibit B.
|
||||
#
|
||||
# Software distributed under the License is distributed on an "AS IS" basis,
|
||||
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License for
|
||||
# the specific language governing rights and limitations under the License.
|
||||
#
|
||||
# The Original Code is Reddit.
|
||||
#
|
||||
# The Original Developer is the Initial Developer. The Initial Developer of the
|
||||
# Original Code is CondeNet, Inc.
|
||||
#
|
||||
# All portions of the code written by CondeNet are Copyright (c) 2006-2008
|
||||
# CondeNet, Inc. All Rights Reserved.
|
||||
################################################################################
|
||||
from r2.lib.manager import db_manager
|
||||
from pylons import g
|
||||
try:
|
||||
#TODO: move?
|
||||
from r2admin.config.databases import *
|
||||
except:
|
||||
pass
|
||||
|
||||
tz = g.tz
|
||||
|
||||
dbm = db_manager.db_manager()
|
||||
|
||||
main_engine = db_manager.get_engine(g.main_db_name,
|
||||
db_host = g.main_db_host,
|
||||
db_user = g.main_db_user,
|
||||
db_pass = g.main_db_pass)
|
||||
|
||||
comment_engine = db_manager.get_engine(g.comment_db_name,
|
||||
db_host = g.comment_db_host,
|
||||
db_user = g.comment_db_user,
|
||||
db_pass = g.comment_db_pass)
|
||||
|
||||
vote_engine = db_manager.get_engine(g.vote_db_name,
|
||||
db_host = g.vote_db_host,
|
||||
db_user = g.vote_db_user,
|
||||
db_pass = g.vote_db_pass)
|
||||
|
||||
change_engine = db_manager.get_engine(g.change_db_name,
|
||||
db_host = g.change_db_host,
|
||||
db_user = g.change_db_user,
|
||||
db_pass = g.change_db_pass,
|
||||
pool_size = 2,
|
||||
max_overflow = 2)
|
||||
|
||||
email_engine = db_manager.get_engine(g.email_db_name,
|
||||
db_host = g.email_db_host,
|
||||
db_user = g.email_db_user,
|
||||
db_pass = g.email_db_pass,
|
||||
pool_size = 2,
|
||||
max_overflow = 2)
|
||||
|
||||
query_queue_engine = db_manager.get_engine(g.query_queue_db_name,
|
||||
db_host = g.query_queue_db_host,
|
||||
db_user = g.query_queue_db_user,
|
||||
db_pass = g.query_queue_db_pass,
|
||||
pool_size = 2,
|
||||
max_overflow = 2)
|
||||
|
||||
dbm.type_db = main_engine
|
||||
dbm.relation_type_db = main_engine
|
||||
|
||||
dbm.thing('link', main_engine, main_engine)
|
||||
dbm.thing('account', main_engine, main_engine)
|
||||
dbm.thing('message', main_engine, main_engine)
|
||||
|
||||
dbm.relation('savehide', 'account', 'link', main_engine)
|
||||
dbm.relation('click', 'account', 'link', main_engine)
|
||||
|
||||
dbm.thing('comment', comment_engine, comment_engine)
|
||||
|
||||
dbm.thing('subreddit', comment_engine, comment_engine)
|
||||
dbm.relation('srmember', 'subreddit', 'account', comment_engine)
|
||||
|
||||
dbm.relation('friend', 'account', 'account', comment_engine)
|
||||
|
||||
dbm.relation('vote_account_link', 'account', 'link', vote_engine)
|
||||
dbm.relation('vote_account_comment', 'account', 'comment', vote_engine)
|
||||
|
||||
dbm.relation('inbox_account_comment', 'account', 'comment', comment_engine)
|
||||
dbm.relation('inbox_account_message', 'account', 'message', main_engine)
|
||||
|
||||
dbm.relation('report_account_link', 'account', 'link', main_engine)
|
||||
dbm.relation('report_account_comment', 'account', 'comment', comment_engine)
|
||||
dbm.relation('report_account_message', 'account', 'message', main_engine)
|
||||
dbm.relation('report_account_subreddit', 'account', 'subreddit', main_engine)
|
||||
|
||||
|
||||
@@ -27,6 +27,7 @@ from r2.lib.cache import LocalCache, Memcache, CacheChain
|
||||
from r2.lib.db.stats import QueryStats
|
||||
from r2.lib.translation import _get_languages
|
||||
from r2.lib.lock import make_lock_factory
|
||||
from r2.lib.manager import db_manager
|
||||
|
||||
class Globals(object):
|
||||
|
||||
@@ -52,7 +53,8 @@ class Globals(object):
|
||||
'enable_doquery',
|
||||
'use_query_cache',
|
||||
'write_query_queue',
|
||||
'css_killswitch']
|
||||
'css_killswitch',
|
||||
'db_create_tables']
|
||||
|
||||
tuple_props = ['memcaches',
|
||||
'rec_cache',
|
||||
@@ -91,22 +93,15 @@ class Globals(object):
|
||||
|
||||
"""
|
||||
|
||||
def to_bool(x):
|
||||
return (x.lower() == 'true') if x else None
|
||||
|
||||
def to_iter(name, delim = ','):
|
||||
return (x.strip() for x in global_conf.get(name, '').split(delim))
|
||||
|
||||
|
||||
# slop over all variables to start with
|
||||
for k, v in global_conf.iteritems():
|
||||
if not k.startswith("_") and not hasattr(self, k):
|
||||
if k in self.int_props:
|
||||
v = int(v)
|
||||
elif k in self.bool_props:
|
||||
v = to_bool(v)
|
||||
v = self.to_bool(v)
|
||||
elif k in self.tuple_props:
|
||||
v = tuple(to_iter(k))
|
||||
v = tuple(self.to_iter(v))
|
||||
setattr(self, k, v)
|
||||
|
||||
# initialize caches
|
||||
@@ -121,6 +116,9 @@ class Globals(object):
|
||||
# set default time zone if one is not set
|
||||
self.tz = pytz.timezone(global_conf.get('timezone'))
|
||||
|
||||
#load the database info
|
||||
self.dbm = self.load_db_params(global_conf)
|
||||
|
||||
#make a query cache
|
||||
self.stats_collector = QueryStats()
|
||||
|
||||
@@ -177,6 +175,43 @@ class Globals(object):
|
||||
self.reddit_host = socket.gethostname()
|
||||
self.reddit_pid = os.getpid()
|
||||
|
||||
@staticmethod
|
||||
def to_bool(x):
|
||||
return (x.lower() == 'true') if x else None
|
||||
|
||||
@staticmethod
|
||||
def to_iter(v, delim = ','):
|
||||
return (x.strip() for x in v.split(delim))
|
||||
|
||||
def load_db_params(self, gc):
|
||||
databases = self.to_iter(gc['databases'])
|
||||
if not databases:
|
||||
return
|
||||
|
||||
dbm = db_manager.db_manager()
|
||||
db_params = ('name', 'db_host', 'db_user', 'db_pass',
|
||||
'pool_size', 'max_overflow')
|
||||
for db_name in databases:
|
||||
conf_params = self.to_iter(gc[db_name + '_db'])
|
||||
params = dict(zip(db_params, conf_params))
|
||||
dbm.engines[db_name] = db_manager.get_engine(**params)
|
||||
|
||||
dbm.type_db = dbm.engines[gc['type_db']]
|
||||
dbm.relation_type_db = dbm.engines[gc['rel_type_db']]
|
||||
|
||||
prefix = 'db_table_'
|
||||
for k, v in gc.iteritems():
|
||||
if k.startswith(prefix):
|
||||
params = list(self.to_iter(v))
|
||||
name = k[len(prefix):]
|
||||
kind = params[0]
|
||||
if kind == 'thing':
|
||||
dbm.add_thing(name, [dbm.engines[n] for n in params[1:]])
|
||||
elif kind == 'relation':
|
||||
dbm.add_relation(name, params[1], params[2],
|
||||
[dbm.engines[n] for n in params[3:]])
|
||||
return dbm
|
||||
|
||||
def __del__(self):
|
||||
"""
|
||||
Put any cleanup code to be run when the application finally exits
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
from __future__ import with_statement
|
||||
from r2.lib.workqueue import WorkQueue
|
||||
from r2.config.databases import query_queue_engine, tz
|
||||
from r2.lib.db.tdb_sql import make_metadata, settings, create_table, index_str
|
||||
from r2.lib.db.tdb_sql import make_metadata, create_table, index_str
|
||||
|
||||
import cPickle as pickle
|
||||
from datetime import datetime
|
||||
@@ -14,14 +13,16 @@ import sqlalchemy as sa
|
||||
from sqlalchemy.exceptions import SQLError
|
||||
|
||||
from pylons import g
|
||||
tz = g.tz
|
||||
|
||||
#the current
|
||||
running = set()
|
||||
running_lock = Lock()
|
||||
|
||||
def make_query_queue_table():
|
||||
metadata = make_metadata(query_queue_engine)
|
||||
table = sa.Table(settings.DB_APP_NAME + '_query_queue', metadata,
|
||||
engine = g.dbm.engines['query_queue']
|
||||
metadata = make_metadata(engine)
|
||||
table = sa.Table(g.db_app_name + '_query_queue', metadata,
|
||||
sa.Column('iden', sa.String, primary_key = True),
|
||||
sa.Column('query', sa.Binary),
|
||||
sa.Column('date', sa.DateTime(timezone = True)))
|
||||
|
||||
@@ -21,9 +21,9 @@
|
||||
################################################################################
|
||||
from math import log
|
||||
from datetime import datetime, timedelta
|
||||
from r2.config.databases import tz
|
||||
from pylons import g
|
||||
|
||||
epoch = datetime(1970, 1, 1, tzinfo = tz)
|
||||
epoch = datetime(1970, 1, 1, tzinfo = g.tz)
|
||||
|
||||
def epoch_seconds(date):
|
||||
"""Returns the number of seconds from the epoch to date. Should match
|
||||
|
||||
@@ -19,26 +19,22 @@
|
||||
# All portions of the code written by CondeNet are Copyright (c) 2006-2008
|
||||
# CondeNet, Inc. All Rights Reserved.
|
||||
################################################################################
|
||||
from r2.lib.utils import storage, storify, iters, Results, tup, TransSet
|
||||
from r2.config.databases import dbm, tz
|
||||
from pylons import g
|
||||
|
||||
import operators
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.databases import postgres
|
||||
from datetime import datetime
|
||||
import cPickle as pickle
|
||||
|
||||
from copy import deepcopy
|
||||
import random
|
||||
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.databases import postgres
|
||||
|
||||
from r2.lib.utils import storage, storify, iters, Results, tup, TransSet
|
||||
import operators
|
||||
from pylons import g
|
||||
dbm = g.dbm
|
||||
|
||||
import logging
|
||||
log_format = logging.Formatter('sql: %(message)s')
|
||||
|
||||
settings = storage()
|
||||
settings.DEBUG = g.debug
|
||||
settings.DB_CREATE_TABLES = True
|
||||
settings.DB_APP_NAME = 'reddit'
|
||||
|
||||
max_val_len = 1000
|
||||
|
||||
transactions = TransSet()
|
||||
@@ -47,12 +43,12 @@ BigInteger = postgres.PGBigInteger
|
||||
|
||||
def make_metadata(engine):
|
||||
metadata = sa.BoundMetaData(engine)
|
||||
metadata.engine.echo = settings.DEBUG
|
||||
metadata.engine.echo = g.debug
|
||||
return metadata
|
||||
|
||||
def create_table(table, index_commands=None):
|
||||
t = table
|
||||
if settings.DB_CREATE_TABLES:
|
||||
if g.db_create_tables:
|
||||
#@@hackish?
|
||||
if not t.engine.has_table(t.name):
|
||||
t.create(checkfirst = False)
|
||||
@@ -104,13 +100,13 @@ def index_commands(table, type):
|
||||
return commands
|
||||
|
||||
def get_type_table(metadata):
|
||||
table = sa.Table(settings.DB_APP_NAME + '_type', metadata,
|
||||
table = sa.Table(g.db_app_name + '_type', metadata,
|
||||
sa.Column('id', sa.Integer, primary_key = True),
|
||||
sa.Column('name', sa.String, nullable = False))
|
||||
return table
|
||||
|
||||
def get_rel_type_table(metadata):
|
||||
table = sa.Table(settings.DB_APP_NAME + '_type_rel', metadata,
|
||||
table = sa.Table(g.db_app_name + '_type_rel', metadata,
|
||||
sa.Column('id', sa.Integer, primary_key = True),
|
||||
sa.Column('type1_id', sa.Integer, nullable = False),
|
||||
sa.Column('type2_id', sa.Integer, nullable = False),
|
||||
@@ -119,7 +115,7 @@ def get_rel_type_table(metadata):
|
||||
|
||||
|
||||
def get_thing_table(metadata, name):
|
||||
table = sa.Table(settings.DB_APP_NAME + '_thing_' + name, metadata,
|
||||
table = sa.Table(g.db_app_name + '_thing_' + name, metadata,
|
||||
sa.Column('thing_id', BigInteger, primary_key = True),
|
||||
sa.Column('ups', sa.Integer, default = 0, nullable = False),
|
||||
sa.Column('downs',
|
||||
@@ -141,7 +137,7 @@ def get_thing_table(metadata, name):
|
||||
return table
|
||||
|
||||
def get_data_table(metadata, name):
|
||||
data_table = sa.Table(settings.DB_APP_NAME + '_data_' + name, metadata,
|
||||
data_table = sa.Table(g.db_app_name + '_data_' + name, metadata,
|
||||
sa.Column('thing_id', BigInteger, nullable = False,
|
||||
primary_key = True),
|
||||
sa.Column('key', sa.String, nullable = False,
|
||||
@@ -151,7 +147,7 @@ def get_data_table(metadata, name):
|
||||
return data_table
|
||||
|
||||
def get_rel_table(metadata, name):
|
||||
rel_table = sa.Table(settings.DB_APP_NAME + '_rel_' + name, metadata,
|
||||
rel_table = sa.Table(g.db_app_name + '_rel_' + name, metadata,
|
||||
sa.Column('rel_id', BigInteger, primary_key = True),
|
||||
sa.Column('thing1_id', BigInteger, nullable = False),
|
||||
sa.Column('thing2_id', BigInteger, nullable = False),
|
||||
@@ -181,8 +177,6 @@ types_id = {}
|
||||
types_name = {}
|
||||
rel_types_id = {}
|
||||
rel_types_name = {}
|
||||
extra_thing_tables = {}
|
||||
thing_engines = {}
|
||||
|
||||
def check_type(table, selector, insert_vals):
|
||||
#check for type in type table, create if not existent
|
||||
@@ -196,41 +190,30 @@ def check_type(table, selector, insert_vals):
|
||||
|
||||
#make the thing tables
|
||||
def build_thing_tables():
|
||||
for name, thing_engine, data_engine in dbm.things():
|
||||
for name, engines in dbm.things.iteritems():
|
||||
type_id = check_type(type_table,
|
||||
type_table.c.name == name,
|
||||
dict(name = name))
|
||||
|
||||
thing_engines[name] = thing_engine
|
||||
tables = []
|
||||
for engine in engines:
|
||||
metadata = make_metadata(engine)
|
||||
|
||||
#make thing table
|
||||
thing_table = get_thing_table(make_metadata(thing_engine), name)
|
||||
create_table(thing_table,
|
||||
index_commands(thing_table, 'thing'))
|
||||
#make thing table
|
||||
thing_table = get_thing_table(metadata, name)
|
||||
create_table(thing_table,
|
||||
index_commands(thing_table, 'thing'))
|
||||
|
||||
#make data tables
|
||||
data_metadata = make_metadata(data_engine)
|
||||
data_table = get_data_table(data_metadata, name)
|
||||
create_table(data_table,
|
||||
index_commands(data_table, 'data'))
|
||||
#make data tables
|
||||
data_table = get_data_table(metadata, name)
|
||||
create_table(data_table,
|
||||
index_commands(data_table, 'data'))
|
||||
|
||||
tables.append((thing_table, data_table))
|
||||
|
||||
#do we need another table?
|
||||
if thing_engine == data_engine:
|
||||
data_thing_table = thing_table
|
||||
else:
|
||||
#we're in a different engine, but do we need to maintain the extra table?
|
||||
if dbm.extra_data.get(data_engine):
|
||||
data_thing_table = get_thing_table(data_metadata, 'data_' + name)
|
||||
extra_thing_tables.setdefault(type_id, set()).add(data_thing_table)
|
||||
create_table(data_thing_table,
|
||||
index_commands(data_thing_table, 'thing'))
|
||||
else:
|
||||
data_thing_table = get_thing_table(data_metadata, name)
|
||||
|
||||
thing = storage(type_id = type_id,
|
||||
name = name,
|
||||
thing_table = thing_table,
|
||||
data_table = (data_table, data_thing_table))
|
||||
tables = tables)
|
||||
|
||||
types_id[type_id] = thing
|
||||
types_name[name] = thing
|
||||
@@ -238,7 +221,7 @@ build_thing_tables()
|
||||
|
||||
#make relation tables
|
||||
def build_rel_tables():
|
||||
for name, type1_name, type2_name, engine in dbm.relations():
|
||||
for name, (type1_name, type2_name, engines) in dbm.relations.iteritems():
|
||||
type1_id = types_name[type1_name].type_id
|
||||
type2_id = types_name[type2_name].type_id
|
||||
type_id = check_type(rel_type_table,
|
||||
@@ -247,48 +230,36 @@ def build_rel_tables():
|
||||
type1_id = type1_id,
|
||||
type2_id = type2_id))
|
||||
|
||||
metadata = make_metadata(engine)
|
||||
|
||||
#relation table
|
||||
rel_table = get_rel_table(metadata, name)
|
||||
create_table(rel_table,
|
||||
index_commands(rel_table, 'rel'))
|
||||
tables = []
|
||||
for engine in engines:
|
||||
metadata = make_metadata(engine)
|
||||
|
||||
#make thing1 table if required
|
||||
if engine == thing_engines[type1_name]:
|
||||
rel_t1_table = types_name[type1_name].thing_table
|
||||
else:
|
||||
#need to maintain an extra thing table?
|
||||
if dbm.extra_thing1.get(engine):
|
||||
rel_t1_table = get_thing_table(metadata, 'rel_' + name + '_type1')
|
||||
create_table(rel_t1_table, index_commands(rel_t1_table, 'thing'))
|
||||
extra_thing_tables.setdefault(type_id, set()).add(rel_t1_table)
|
||||
else:
|
||||
rel_t1_table = get_thing_table(metadata, type1_name)
|
||||
#relation table
|
||||
rel_table = get_rel_table(metadata, name)
|
||||
create_table(rel_table, index_commands(rel_table, 'rel'))
|
||||
|
||||
#make thing2 table if required
|
||||
if type1_id == type2_id:
|
||||
rel_t2_table = rel_t1_table
|
||||
elif engine == thing_engines[type2_name]:
|
||||
rel_t2_table = types_name[type2_name].thing_table
|
||||
else:
|
||||
if dbm.extra_thing2.get(engine):
|
||||
rel_t2_table = get_thing_table(metadata, 'rel_' + name + '_type2')
|
||||
create_table(rel_t2_table, index_commands(rel_t2_table, 'thing'))
|
||||
extra_thing_tables.setdefault(type_id, set()).add(rel_t2_table)
|
||||
#make thing tables
|
||||
rel_t1_table = get_thing_table(metadata, type1_name)
|
||||
if type1_name == type2_name:
|
||||
rel_t2_table = rel_t1_table
|
||||
else:
|
||||
rel_t2_table = get_thing_table(metadata, type2_name)
|
||||
|
||||
#build the data
|
||||
rel_data_table = get_data_table(metadata, 'rel_' + name)
|
||||
create_table(rel_data_table,
|
||||
index_commands(rel_data_table, 'data'))
|
||||
#build the data
|
||||
rel_data_table = get_data_table(metadata, 'rel_' + name)
|
||||
create_table(rel_data_table,
|
||||
index_commands(rel_data_table, 'data'))
|
||||
|
||||
tables.append((rel_table,
|
||||
rel_t1_table,
|
||||
rel_t2_table,
|
||||
rel_data_table))
|
||||
|
||||
rel = storage(type_id = type_id,
|
||||
type1_id = type1_id,
|
||||
type2_id = type2_id,
|
||||
name = name,
|
||||
rel_table = (rel_table, rel_t1_table, rel_t2_table, rel_data_table))
|
||||
tables = tables)
|
||||
|
||||
rel_types_id[type_id] = rel
|
||||
rel_types_name[name] = rel
|
||||
@@ -300,9 +271,27 @@ def get_type_id(name):
|
||||
def get_rel_type_id(name):
|
||||
return rel_types_name[name][0]
|
||||
|
||||
def get_write_table(tables):
|
||||
return tables[0]
|
||||
|
||||
def get_read_table(tables):
|
||||
return random.choice(tables)
|
||||
|
||||
def get_thing_write_table(type_id):
|
||||
return get_write_table(types_id[type_id].tables)
|
||||
|
||||
def get_thing_read_table(type_id):
|
||||
return get_read_table(types_id[type_id].tables)
|
||||
|
||||
def get_rel_write_table(rel_type_id):
|
||||
return get_write_table(rel_types_id[rel_type_id].tables)
|
||||
|
||||
def get_rel_read_table(rel_type_id):
|
||||
return get_read_table(rel_types_id[rel_type_id].tables)
|
||||
|
||||
#TODO does the type actually exist?
|
||||
def make_thing(type_id, ups, downs, date, deleted, spam, id=None):
|
||||
table = types_id[type_id].thing_table
|
||||
table = get_thing_write_table(type_id)[0]
|
||||
|
||||
params = dict(ups = ups, downs = downs,
|
||||
date = date, deleted = deleted, spam = spam)
|
||||
@@ -319,9 +308,6 @@ def make_thing(type_id, ups, downs, date, deleted, spam, id=None):
|
||||
try:
|
||||
id = do_insert(table)
|
||||
params['thing_id'] = id
|
||||
for t in extra_thing_tables.get(type_id, ()):
|
||||
do_insert(t)
|
||||
|
||||
return id
|
||||
except sa.exceptions.SQLError, e:
|
||||
if not 'IntegrityError' in e.message:
|
||||
@@ -331,7 +317,7 @@ def make_thing(type_id, ups, downs, date, deleted, spam, id=None):
|
||||
|
||||
|
||||
def set_thing_props(type_id, thing_id, **props):
|
||||
table = types_id[type_id].thing_table
|
||||
table = get_thing_write_table(type_id)[0]
|
||||
|
||||
if not props:
|
||||
return
|
||||
@@ -344,11 +330,9 @@ def set_thing_props(type_id, thing_id, **props):
|
||||
u.execute()
|
||||
|
||||
do_update(table)
|
||||
for t in extra_thing_tables.get(type_id, ()):
|
||||
do_update(t)
|
||||
|
||||
def incr_thing_prop(type_id, thing_id, prop, amount):
|
||||
table = types_id[type_id].thing_table
|
||||
table = get_thing_write_table(type_id)[0]
|
||||
|
||||
def do_update(t):
|
||||
transactions.add_engine(t.engine)
|
||||
@@ -357,18 +341,16 @@ def incr_thing_prop(type_id, thing_id, prop, amount):
|
||||
u.execute()
|
||||
|
||||
do_update(table)
|
||||
for t in extra_thing_tables.get(type_id, ()):
|
||||
do_update(t)
|
||||
|
||||
class CreationError(Exception): pass
|
||||
|
||||
#TODO does the type exist?
|
||||
#TODO do the things actually exist?
|
||||
def make_relation(rel_type_id, thing1_id, thing2_id, name, date=None):
|
||||
table = rel_types_id[rel_type_id].rel_table[0]
|
||||
table = get_rel_write_table(rel_type_id)[0]
|
||||
transactions.add_engine(table.engine)
|
||||
|
||||
if not date: date = datetime.now(tz)
|
||||
if not date: date = datetime.now(g.tz)
|
||||
try:
|
||||
r = table.insert().execute(thing1_id = thing1_id,
|
||||
thing2_id = thing2_id,
|
||||
@@ -383,7 +365,7 @@ def make_relation(rel_type_id, thing1_id, thing2_id, name, date=None):
|
||||
|
||||
|
||||
def set_rel_props(rel_type_id, rel_id, **props):
|
||||
t = rel_types_id[rel_type_id].rel_table[0]
|
||||
t = get_rel_write_table(rel_type_id)[0]
|
||||
|
||||
if not props:
|
||||
return
|
||||
@@ -489,19 +471,19 @@ def get_data(table, thing_id):
|
||||
return res
|
||||
|
||||
def set_thing_data(type_id, thing_id, **vals):
|
||||
table = types_id[type_id].data_table[0]
|
||||
table = get_thing_write_table(type_id)[1]
|
||||
return set_data(table, type_id, thing_id, **vals)
|
||||
|
||||
def incr_thing_data(type_id, thing_id, prop, amount):
|
||||
table = types_id[type_id].data_table[0]
|
||||
table = get_thing_write_table(type_id)[1]
|
||||
return incr_data_prop(table, type_id, thing_id, prop, amount)
|
||||
|
||||
def get_thing_data(type_id, thing_id):
|
||||
table = types_id[type_id].data_table[0]
|
||||
table = get_thing_read_table(type_id)[1]
|
||||
return get_data(table, thing_id)
|
||||
|
||||
def get_thing(type_id, thing_id):
|
||||
table = types_id[type_id].thing_table
|
||||
table = get_thing_read_table(type_id)[0]
|
||||
r, single = fetch_query(table, table.c.thing_id, thing_id)
|
||||
|
||||
#if single, only return one storage, otherwise make a dict
|
||||
@@ -519,19 +501,19 @@ def get_thing(type_id, thing_id):
|
||||
return res
|
||||
|
||||
def set_rel_data(rel_type_id, thing_id, **vals):
|
||||
table = rel_types_id[rel_type_id].rel_table[3]
|
||||
table = get_rel_write_table(rel_type_id)[3]
|
||||
return set_data(table, rel_type_id, thing_id, **vals)
|
||||
|
||||
def incr_rel_data(rel_type_id, thing_id, prop, amount):
|
||||
table = rel_types_id[rel_type_id].rel_table[3]
|
||||
table = get_rel_write_table(rel_type_id)[3]
|
||||
return incr_data_prop(table, rel_type_id, thing_id, prop, amount)
|
||||
|
||||
def get_rel_data(rel_type_id, rel_id):
|
||||
table = rel_types_id[rel_type_id].rel_table[3]
|
||||
table = get_rel_read_table(rel_type_id)[3]
|
||||
return get_data(table, rel_id)
|
||||
|
||||
def get_rel(rel_type_id, rel_id):
|
||||
r_table = rel_types_id[rel_type_id].rel_table[0]
|
||||
r_table = get_rel_read_table(rel_type_id)[0]
|
||||
r, single = fetch_query(r_table, r_table.c.rel_id, rel_id)
|
||||
|
||||
res = {} if not single else None
|
||||
@@ -547,7 +529,7 @@ def get_rel(rel_type_id, rel_id):
|
||||
return res
|
||||
|
||||
def del_rel(rel_type_id, rel_id):
|
||||
tables = rel_types_id[rel_type_id].rel_table
|
||||
tables = get_rel_write_table(rel_type_id)
|
||||
table = tables[0]
|
||||
data_table = tables[3]
|
||||
|
||||
@@ -664,7 +646,7 @@ def translate_thing_value(rval):
|
||||
|
||||
#will assume parameters start with a _ for consistency
|
||||
def find_things(type_id, get_cols, sort, limit, constraints):
|
||||
table = types_id[type_id].thing_table
|
||||
table = get_thing_read_table(type_id)[0]
|
||||
constraints = deepcopy(constraints)
|
||||
|
||||
s = sa.select([table.c.thing_id.label('thing_id')])
|
||||
@@ -705,7 +687,7 @@ def translate_data_value(alias, op):
|
||||
#TODO sort by data fields
|
||||
#TODO sort by id wants thing_id
|
||||
def find_data(type_id, get_cols, sort, limit, constraints):
|
||||
d_table, t_table = types_id[type_id].data_table
|
||||
t_table, d_table = get_thing_read_table(type_id)
|
||||
constraints = deepcopy(constraints)
|
||||
|
||||
used_first = False
|
||||
@@ -767,7 +749,8 @@ def find_data(type_id, get_cols, sort, limit, constraints):
|
||||
|
||||
|
||||
def find_rels(rel_type_id, get_cols, sort, limit, constraints):
|
||||
r_table, t1_table, t2_table, d_table = rel_types_id[rel_type_id].rel_table
|
||||
tables = get_rel_read_table(rel_type_id)
|
||||
r_table, t1_table, t2_table, d_table = tables
|
||||
constraints = deepcopy(constraints)
|
||||
|
||||
t1_table, t2_table = t1_table.alias(), t2_table.alias()
|
||||
@@ -837,3 +820,13 @@ def find_rels(rel_type_id, get_cols, sort, limit, constraints):
|
||||
|
||||
if logging.getLogger('sqlalchemy').handlers:
|
||||
logging.getLogger('sqlalchemy').handlers[0].formatter = log_format
|
||||
|
||||
#inconsitencies:
|
||||
|
||||
#relationships assume their thing and data tables are in the same
|
||||
#database. things don't make that assumption. in practice thing/data
|
||||
#tables always go together.
|
||||
#
|
||||
#we create thing tables for a relationship's things that aren't on the
|
||||
#same database as the relationship, although they're never used in
|
||||
#practice. we could remove a healthy chunk of code if we removed that.
|
||||
|
||||
@@ -22,17 +22,18 @@
|
||||
#TODO byID use Things?
|
||||
from __future__ import with_statement
|
||||
|
||||
import new, sys, sha
|
||||
from datetime import datetime
|
||||
from copy import copy, deepcopy
|
||||
|
||||
import operators
|
||||
import tdb_sql as tdb
|
||||
import sorts
|
||||
from .. utils import iters, Results, tup, to36, Storage
|
||||
from r2.config import cache
|
||||
from r2.config.databases import tz
|
||||
from r2.lib.cache import sgm
|
||||
from pylons import g
|
||||
|
||||
import new, sys, sha
|
||||
from datetime import datetime
|
||||
from copy import copy, deepcopy
|
||||
|
||||
class NotFound(Exception): pass
|
||||
CreationError = tdb.CreationError
|
||||
@@ -406,7 +407,7 @@ class Thing(DataThing):
|
||||
self._created = True
|
||||
self._loaded = False
|
||||
|
||||
if not date: date = datetime.now(tz)
|
||||
if not date: date = datetime.now(g.tz)
|
||||
|
||||
self._ups = ups
|
||||
self._downs = downs
|
||||
@@ -526,7 +527,7 @@ def Relation(type1, type2, denorm1 = None, denorm2 = None):
|
||||
self._created = True
|
||||
self._loaded = False
|
||||
|
||||
if not date: date = datetime.now(tz)
|
||||
if not date: date = datetime.now(g.tz)
|
||||
|
||||
|
||||
#store the id, and temporarily store the actual object
|
||||
|
||||
@@ -30,7 +30,7 @@ from datetime import datetime
|
||||
def find_tz():
|
||||
q = Link._query(sort = desc('_hot'), limit = 1)
|
||||
link = list(q)[0]
|
||||
t = tdb_sql.types_id[Link._type_id].thing_table
|
||||
t = tdb_sql.get_thing_read_table(Link._type_id)[0]
|
||||
|
||||
s = sa.select([sa.func.hot(t.c.ups, t.c.downs, t.c.date),
|
||||
t.c.thing_id],
|
||||
|
||||
@@ -21,7 +21,7 @@
|
||||
################################################################################
|
||||
import sqlalchemy as sa
|
||||
|
||||
def get_engine(name, db_host='', db_user='', db_pass='', pool_size = 1, max_overflow = 9):
|
||||
def get_engine(name, db_host='', db_user='', db_pass='', pool_size = 5, max_overflow = 5):
|
||||
host = db_host if db_host else ''
|
||||
if db_user:
|
||||
if db_pass:
|
||||
@@ -30,37 +30,21 @@ def get_engine(name, db_host='', db_user='', db_pass='', pool_size = 1, max_over
|
||||
host = "%s@%s" % (db_user, db_host)
|
||||
return sa.create_engine('postgres://%s/%s' % (host, name),
|
||||
strategy='threadlocal',
|
||||
pool_size = pool_size,
|
||||
max_overflow = max_overflow)
|
||||
pool_size = int(pool_size),
|
||||
max_overflow = int(max_overflow))
|
||||
|
||||
class db_manager:
|
||||
def __init__(self):
|
||||
self.type_db = None
|
||||
self.relation_type_db = None
|
||||
self.thing_dbs = {}
|
||||
self.relation_dbs = {}
|
||||
self.things = {}
|
||||
self.relations = {}
|
||||
self.engines = {}
|
||||
|
||||
self.extra_data = {}
|
||||
self.extra_thing1 = {}
|
||||
self.extra_thing2 = {}
|
||||
def add_thing(self, name, thing_dbs):
|
||||
"""thing_dbs is a list of database engines. the first in the
|
||||
list is assumed to be the master, the rest are slaves."""
|
||||
self.things[name] = thing_dbs
|
||||
|
||||
def thing(self, name, thing_db, data_db, need_extra = False):
|
||||
self.thing_dbs[name] = (thing_db, data_db)
|
||||
if need_extra:
|
||||
self.extra_data[data_db] = True
|
||||
|
||||
def relation(self, name, type1, type2, relation_db,
|
||||
need_extra1 = False, need_extra2 = False):
|
||||
self.relation_dbs[name] = (type1, type2, relation_db)
|
||||
if need_extra1:
|
||||
self.extra_thing1[relation_db] = True
|
||||
if need_extra2:
|
||||
self.extra_thing2[relation_db] = True
|
||||
|
||||
#unused i guess
|
||||
def things(self):
|
||||
return [(name, d[0], d[1]) for name, d in self.thing_dbs.items()]
|
||||
|
||||
def relations(self):
|
||||
return [(name, d[0], d[1], d[2])
|
||||
for name, d in self.relation_dbs.items()]
|
||||
def add_relation(self, name, type1, type2, relation_dbs):
|
||||
self.relations[name] = (type1, type2, relation_dbs)
|
||||
|
||||
@@ -37,13 +37,14 @@ def get_recommended(userid, age = 2, sort='relevance', num_users=10):
|
||||
|
||||
voter = Vote.rels[(Account, Link)]
|
||||
|
||||
votertable = tdb.rel_types_id[voter._type_id].rel_table[0]
|
||||
tables = tdb.get_rel_type_table(voter._type_id)
|
||||
votertable = tables[0]
|
||||
acct_col = votertable.c.thing1_id
|
||||
link_col = votertable.c.thing2_id
|
||||
date_col = votertable.c.date
|
||||
count = sa.func.count(acct_col)
|
||||
|
||||
linktable = tdb.rel_types_id[voter._type_id].rel_table[2]
|
||||
linktable = tables[2]
|
||||
# dlinktable, linktable = tdb.types_id[Link._type_id].data_table
|
||||
link_id_col = linktable.c.thing_id
|
||||
|
||||
|
||||
@@ -28,8 +28,7 @@ from pylons import g
|
||||
cache = g.cache
|
||||
|
||||
def top_users():
|
||||
type = tdb.types_id[Account._type_id]
|
||||
tt, dt = type.thing_table, type.data_table[0]
|
||||
tt, dt = tdb.get_thing_read_table(Account._type_id)
|
||||
|
||||
karma = dt.alias()
|
||||
|
||||
@@ -45,8 +44,7 @@ def top_users():
|
||||
|
||||
def top_user_change(period = '1 day'):
|
||||
rel = Vote.rel(Account, Link)
|
||||
type = tdb.rel_types_id[rel._type_id]
|
||||
rt, account, link, dt = type.rel_table
|
||||
rt, account, link, dt = tdb.get_rel_read_table(rel._type_id)
|
||||
|
||||
author = dt.alias()
|
||||
|
||||
|
||||
@@ -31,7 +31,7 @@ class Report(object):
|
||||
|
||||
def total_things(self, table_name, spam=None, all_time=None):
|
||||
"""Return totals based on items in the thing tables."""
|
||||
t = tdb_sql.types_name[table_name]['thing_table']
|
||||
t = tdb_sql.get_thing_read_table(table_name)[0]
|
||||
s = sa.select([sa.func.count(t.c.thing_id)])
|
||||
if spam:
|
||||
s.append_whereclause(t.c.spam==spam)
|
||||
@@ -42,8 +42,8 @@ class Report(object):
|
||||
|
||||
def total_relation(self, table_name, key, value=None, all_time=None):
|
||||
"""Return totals based on relationship data."""
|
||||
rel_table = tdb_sql.rel_types_name['%s_account_link' % table_name].rel_table
|
||||
t1, t2 = rel_table[0], rel_table[3]
|
||||
tables = tdb_sql.get_rel_read_table('%s_account_link' % table_name)
|
||||
t1, t2 = tables[0], tables[3]
|
||||
|
||||
s = sa.select([sa.func.count(t1.c.date)],
|
||||
sa.and_(t1.c.rel_id == t2.c.thing_id, t2.c.key == key))
|
||||
@@ -61,7 +61,7 @@ class Report(object):
|
||||
|
||||
def css_stats(self, val, all_time=None):
|
||||
"""Create stats related to custom css and headers."""
|
||||
t = tdb_sql.types_name['subreddit'].data_table[0]
|
||||
t = tdb_sql.get_thing_read_table('subreddit')[1]
|
||||
s = sa.select([sa.func.count(t.c.key)], t.c.key == val)
|
||||
return s.execute().fetchone()[0]
|
||||
|
||||
|
||||
@@ -65,5 +65,3 @@ try:
|
||||
from r2admin.models.admintools import *
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
|
||||
@@ -19,22 +19,22 @@
|
||||
# All portions of the code written by CondeNet are Copyright (c) 2006-2008
|
||||
# CondeNet, Inc. All Rights Reserved.
|
||||
##############################################################################
|
||||
from r2.config.databases import email_engine
|
||||
from r2.lib.db.tdb_sql import make_metadata, settings
|
||||
from sqlalchemy.databases.postgres import PGInet, PGBigInteger
|
||||
from r2.models.thing_changes import changed, index_str, create_table
|
||||
import sha, datetime
|
||||
from email.MIMEText import MIMEText
|
||||
|
||||
import sqlalchemy as sa
|
||||
import datetime
|
||||
from sqlalchemy.databases.postgres import PGInet, PGBigInteger
|
||||
|
||||
from r2.lib.db.tdb_sql import make_metadata
|
||||
from r2.models.thing_changes import changed, index_str, create_table
|
||||
from r2.lib.utils import Storage, timeago
|
||||
from account import Account
|
||||
from r2.lib.db.thing import Thing
|
||||
from email.MIMEText import MIMEText
|
||||
import sha
|
||||
from r2.lib.memoize import memoize, clear_memo
|
||||
|
||||
from pylons import g
|
||||
|
||||
def mail_queue(metadata):
|
||||
return sa.Table(settings.DB_APP_NAME + '_mail_queue', metadata,
|
||||
return sa.Table(g.db_app_name + '_mail_queue', metadata,
|
||||
sa.Column("uid", sa.Integer,
|
||||
sa.Sequence('queue_id_seq'), primary_key=True),
|
||||
|
||||
@@ -76,7 +76,7 @@ def mail_queue(metadata):
|
||||
)
|
||||
|
||||
def sent_mail_table(metadata, name = 'sent_mail'):
|
||||
return sa.Table(settings.DB_APP_NAME + '_' + name, metadata,
|
||||
return sa.Table(g.db_app_name + '_' + name, metadata,
|
||||
# tracking hash of the email
|
||||
sa.Column('msg_hash', sa.String, primary_key=True),
|
||||
|
||||
@@ -111,7 +111,7 @@ def sent_mail_table(metadata, name = 'sent_mail'):
|
||||
|
||||
|
||||
def opt_out(metadata):
|
||||
return sa.Table(settings.DB_APP_NAME + '_opt_out', metadata,
|
||||
return sa.Table(g.db_app_name + '_opt_out', metadata,
|
||||
sa.Column('email', sa.String, primary_key = True),
|
||||
# when added to the list
|
||||
sa.Column('date',
|
||||
@@ -124,7 +124,8 @@ def opt_out(metadata):
|
||||
|
||||
class EmailHandler(object):
|
||||
def __init__(self, force = False):
|
||||
self.metadata = make_metadata(email_engine)
|
||||
engine = g.dbm.engines['email']
|
||||
self.metadata = make_metadata(engine)
|
||||
self.queue_table = mail_queue(self.metadata)
|
||||
indices = [index_str(self.queue_table, "date", "date"),
|
||||
index_str(self.queue_table, 'kind', 'kind')]
|
||||
|
||||
@@ -161,13 +161,12 @@ class Report(MultiRelation('report',
|
||||
res = {}
|
||||
for types, rel in cls.rels.iteritems():
|
||||
# grab the proper thing table
|
||||
thing_type = types[1]
|
||||
thing_dict = tdb.types_id[thing_type._type_id]
|
||||
dtable, table = thing_dict.data_table
|
||||
thing_type = types[0]
|
||||
table, dtable = tdb.get_thing_read_table(thing_type._type_id)
|
||||
|
||||
# and the proper relationship table
|
||||
rel_table = tdb.rel_types_id[rel._type_id].rel_table[0]
|
||||
rel_dtable = tdb.rel_types_id[rel._type_id].rel_table[-1]
|
||||
tables = tdb.get_rel_read_table(rel._type_id)
|
||||
rel_table, rel_dtable = tables[0], tables[3]
|
||||
|
||||
where = [dtable.c.key == 'author_id',
|
||||
sa.func.substring(dtable.c.value, 1, 1000) == author_id,
|
||||
@@ -431,8 +430,7 @@ def unreport(things, correct=False, auto = False, banned_by = ''):
|
||||
def unreport_account(user, correct = True, types = (Link, Comment, Message),
|
||||
auto = False, banned_by = ''):
|
||||
for typ in types:
|
||||
thing_dict = tdb.types_id[typ._type_id]
|
||||
dtable, table = thing_dict.data_table
|
||||
table, dtable = tdb.get_thing_read_table(typ._type_id)
|
||||
|
||||
by_user_query = sa.and_(table.c.thing_id == dtable.c.thing_id,
|
||||
dtable.c.key == 'author_id',
|
||||
|
||||
@@ -19,11 +19,12 @@
|
||||
# All portions of the code written by CondeNet are Copyright (c) 2006-2008
|
||||
# CondeNet, Inc. All Rights Reserved.
|
||||
################################################################################
|
||||
from r2.config.databases import change_engine
|
||||
import sqlalchemy as sa
|
||||
from r2.lib.db.tdb_sql import make_metadata, settings
|
||||
|
||||
from r2.lib.db.tdb_sql import make_metadata
|
||||
from r2.lib.utils import worker
|
||||
|
||||
from pylons import g
|
||||
|
||||
def index_str(table, name, on, where = None):
|
||||
index_str = 'create index idx_%s_' % name
|
||||
@@ -35,7 +36,7 @@ def index_str(table, name, on, where = None):
|
||||
|
||||
def create_table(table, index_commands=None, force = False):
|
||||
t = table
|
||||
if settings.DB_CREATE_TABLES:
|
||||
if g.db_create_tables:
|
||||
if not t.engine.has_table(t.name) or force:
|
||||
try:
|
||||
t.create(checkfirst = False)
|
||||
@@ -47,7 +48,7 @@ def create_table(table, index_commands=None, force = False):
|
||||
except: pass
|
||||
|
||||
def change_table(metadata):
|
||||
return sa.Table(settings.DB_APP_NAME + '_changes', metadata,
|
||||
return sa.Table(g.db_app_name + '_changes', metadata,
|
||||
sa.Column('fullname', sa.String, nullable=False,
|
||||
primary_key = True),
|
||||
sa.Column('thing_type', sa.Integer, nullable=False),
|
||||
@@ -58,7 +59,8 @@ def change_table(metadata):
|
||||
)
|
||||
|
||||
def make_change_tables(force = False):
|
||||
metadata = make_metadata(change_engine)
|
||||
engine = g.dbm.engines['change']
|
||||
metadata = make_metadata(engine)
|
||||
table = change_table(metadata)
|
||||
indices = [
|
||||
index_str(table, 'fullname', 'fullname'),
|
||||
|
||||
@@ -23,7 +23,6 @@ from r2.lib.db.thing import MultiRelation, Relation, thing_prefix, cache
|
||||
from r2.lib.utils import tup, timeago
|
||||
from r2.lib.db.operators import ip_network
|
||||
from r2.lib.normalized_hot import expire_hot
|
||||
from r2.config.databases import tz
|
||||
|
||||
from account import Account
|
||||
from link import Link, Comment
|
||||
|
||||
Reference in New Issue
Block a user