Files
reddit/r2/example.ini
Neil Williams 347fb8bc5f SRMember: Start dual-writing to a dedicated cache pool.
This is one of the most heavily hit rels currently and the hope is that
pulling it out to its own pool will make the main cache more effective
for everything else.  It may make sense to move SRMember to a different
model such as a C* DenormalizedRelation in the future.
2014-02-12 14:51:12 -08:00

600 lines
20 KiB
INI

# DO NOT EDIT THIS FILE
# This is a base template. To apply changes to your
# reddit instance, create a "myreddit.update" config
# file, then run 'make ini'. 'make ini' will combine
# this template with the myreddit.update file and create a
# 'myreddit.ini'. ('myreddit.update' is just an example;
# any name will do - e.g., 'foo.update' will create
# 'foo.ini')
[secrets]
# the tokens in this section are base64 encoded
# general purpose secret
SECRET = YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXowMTIzNDU2Nzg5
# secret for /prefs/feeds
FEEDSECRET = YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXowMTIzNDU2Nzg5
# used for authenticating admin API calls w/o cookie
ADMINSECRET = YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXowMTIzNDU2Nzg5
# used to securely authenticate websocket requests to sutro
websocket = YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXowMTIzNDU2Nzg5
[DEFAULT]
############################################ SITE-SPECIFIC OPTIONS
#### Appearance
# the site's tagline, used in the title and description
short_description = open source is awesome
# default site interface language (two letter character code)
site_lang = en
# default header image url
default_header_url = reddit.com.header.png
#### Domains
# the domain that this app expects to be accessed on
domain = reddit.local
# the short domain (like redd.it)
shortdomain =
# if you use www for the old-timey feel, put it here
domain_prefix =
# subdomains that don't need special processing
reserved_subdomains = www, ssl
# subdomains that are not reddit instances
offsite_subdomains =
# https api endpoint (must be g.domain or a subdomain of g.domain)
https_endpoint =
# (secure) payment domain for self-serve ads
payment_domain = https://pay.reddit.local/
# base url where 300x250 ad units (sidebar) are hosted
ad_domain = http://reddit.local
# domain where sutro websocket server is hosted
websocket_host = %(domain)s
#### Accounts and Subreddits
# the user used for "system" operations and private messages
system_user = reddit
# the default subreddit for submissions
default_sr = reddit.com
# account used for default feedback messaging (can be /r/subreddit)
admin_message_acct = reddit
# subreddit used for DMCA takedowns
takedown_sr = _takedowns
# list of subreddits to auto-subscribe users to
automatic_reddits =
# special subreddit that only reddit gold subscribers can use
lounge_reddit =
# list of accounts with admin powers
admins = reddit
# accounts with special access to the ad system
sponsors =
# employees that aren't admins or sponsors (Free gilding, admin distinguish, traffic viewing, etc.)
employees =
#### Static Files
# if set, these are the domains used for static files served over http and https
# if not set, no domain will be specified and relative local URLs will be used instead
static_domain =
static_secure_domain =
# if this is true, append .gz to CSS and JS files served from the static domain
# this is for hosts that don't do on-the-fly gzipping (e.g. s3)
static_pre_gzipped = false
static_secure_pre_gzipped = false
# whether or not to put subreddit stylesheets in the media system or serve dynamically
subreddit_stylesheets_static = false
#### Ops
# if your webserver is a proxy and on a different instance on the same 10.0.0.0/8 network or host
# set X-forwarded-for and set this to true
trust_local_proxies = false
# hash for validating HTTP_TRUE_CLIENT_IP_HASH as sent by the CDN
ip_hash =
# Location (directory) for temp files for diff3 merging
# Empty will use python default for temp files
diff3_temp_location =
#### Analytics
# image to render to track pageviews
tracker_url = /static/pixel.png
# images to render to track sponsored links
adtracker_url = /static/pixel.png
# image to render to track the ad frame
adframetracker_url = /static/pixel.png
# redirector to bounce clicks off of on sponsored links for tracking
clicktracker_url = /static/pixel.png
# url to request to track interaction statistics
uitracker_url = /static/pixel.png
# domain to send tracking requests (see scripts/tracker.py)
fetch_trackers_url = http://reddit.local/fetch-trackers
# google analytics key
googleanalytics =
# secret used for signing information on the above tracking pixels
tracking_secret = abcdefghijklmnopqrstuvwxyz0123456789
#### Wiki Pages
wiki_page_privacy_policy = privacypolicy
wiki_page_user_agreement = useragreement
wiki_page_registration_info = registration_info
wiki_page_gold_bottlecaps = gold_bottlecaps
#### Feature toggles
disable_ads = false
disable_captcha = false
disable_ratelimit = false
disable_require_admin_otp = false
disable_wiki = false
############################################ DEBUG
# global debug flag -- displays pylons stacktrace rather than 500 page on error when true
# NOTE: a pylons stacktrace allows remote code execution. make sure this is false in prod.
debug = false
# enables/disables whitespace removal in rendered html
template_debug = false
# enables/disables compiled template caching and template file mtime checking
reload_templates = true
# use uncompressed static files (out of /static/js and /static/css)
# rather than compressed files out of /static (for development if true)
uncompressedJS = true
# enable/disable verbose logging of SQL queries
sqlprinting = false
# directory to write cProfile stats dumps to (disabled if not set)
profile_directory =
# template names to record render timings for
timed_templates = Reddit, Link, Comment, LinkListing, NestedListing, SubredditTopBar
############################################ LOGGING
# whether to print a "reddit app started" message at start
log_start = true
# enable/disable logging of exceptions and events via amqp/rabbitmq
amqp_logging = false
# exception reporter objects to give to ErrorMiddleware (see log.py)
error_reporters =
############################################ MEDIA STORAGE
# which backend provider to use for media (thumbnails, subreddit stylesheets,
# subreddit images, app icons). options are:
# s3 - use amazon s3
# filesystem - write to local filesystem
# or write your own!
media_provider = s3
# s3 provider
# your s3 credentials -- if these are left blank, we'll pass None to
# boto which will trigger it to look in various places, including instance
# metadata if on ec2, for credentials.
S3KEY_ID =
S3SECRET_KEY =
# May be one bucket, or many buckets seperated by commas
s3_media_buckets =
# Store direct urls for images, rather than buckets
# For the bucket mybucket with the image helloworld.jpg the stored url would be:
# true: http://s3.amazonaws.com/mybucket/helloworld.jpg
# false: http://mybucket/helloworld.jpg
s3_media_direct = true
# filesystem provider configuration
media_fs_root =
media_fs_base_url_http =
media_fs_base_url_https =
media_domain = localhost
############################################ EMERGENCY MODES
# emergency measures: makes the site read only
read_only_mode = false
# a modified read only mode used for cache shown during heavy load 503s
heavy_load_mode = false
# override default site language for things like pirate day
lang_override =
# enable/disable automatic creation of database tables/column families
db_create_tables = True
# are we allowed to write to databases at all?
disallow_db_writes = False
# disable custom subreddit stylesheets
css_killswitch = False
############################################ SCRAPER
# user agent for the scraper
useragent = Mozilla/5.0 (compatible; redditbot/1.0; +http://www.reddit.com/feedback)
# Embedly API Key. if no key is provided, the scraper will only fetch thumbnails.
embedly_api_key =
############################################ QUOTAS
# rate limiter duration (minutes)
RATELIMIT = 10
# user agent substrings to hard-ratelimit to a number of requests per ten second period
# example: agents = googlebot:10, appengine:2
agents =
# ratelimits for various types of relations creatable in subreddits
sr_banned_quota = 10000
sr_moderator_invite_quota = 10000
sr_contributor_quota = 10000
sr_wikibanned_quota = 10000
sr_wikicontributor_quota = 10000
sr_quota_time = 7200
sr_invite_limit = 25
# delay before allowing a link to be shared
new_link_share_delay = 30 seconds
# max number of uploaded images per subreddit
max_sr_images = 50
############################################ THRESHOLDS
# minimum item score to be considered for quota baskets
QUOTA_THRESHOLD = 5
# if the user has positive total karma, their per-subreddit karma will default to this, else 0
MIN_UP_KARMA = 1
# karma needed to avoid per-subreddit submission ratelimits
MIN_RATE_LIMIT_KARMA = 10
MIN_RATE_LIMIT_COMMENT_KARMA = 1
# ages in days at which various actions are disallowed to preserve history
REPLY_AGE_LIMIT = 180
VOTE_AGE_LIMIT = 180
REPORT_AGE_LIMIT = 180
# minimum age of an account (in days) for the "create a subreddit" button to show
min_membership_create_community = 30
# maximum age (in days) of items eligible for display on normalized hot pages (frontpage, multis, etc.)
HOT_PAGE_AGE = 1000
# how long to consider links eligible for the rising page
rising_period = 12 hours
# default number of comments shown
num_comments = 100
# max number of comments to show at once
max_comments = 500
max_comments_gold = 2500
# number of subreddits to put in the defaults for unlogged users
num_default_reddits = 10
# how deep do we go into the top listing when fetching /random
num_serendipity = 250
# number of subscriptions a user needs to have before the "my subreddits"
# dropdown is shown
sr_dropdown_threshold = 15
# Conflate visits to a comment page that happen within this many
# seconds of each other (gold "new comments" feature)
comment_visits_period = 600
# Number of days to keep recent wiki revisions for
wiki_keep_recent_days = 7
# Max number of bytes for wiki pages
wiki_max_page_length_bytes = 262144
# Max wiki page name length
wiki_max_page_name_length = 128
# Max number of separators in a wiki page name
wiki_max_page_separators = 3
############################################ SEARCH
# endpoint for link search
CLOUDSEARCH_SEARCH_API =
# endpoint for link upload
CLOUDSEARCH_DOC_API =
# endpoint for subreddit search
CLOUDSEARCH_SUBREDDIT_SEARCH_API =
# endpoint for subreddit upload
CLOUDSEARCH_SUBREDDIT_DOC_API =
############################################ MEMCACHE
num_mc_clients = 5
# core memcache cluster, Things and various other stuff
memcaches = 127.0.0.1:11211
# caches used for @memoize decorator magic
memoizecaches = 127.0.0.1:11211
# hosts to store memcache-based locks on
lockcaches = 127.0.0.1:11211
# hosts to store rendered template fragments in
rendercaches = 127.0.0.1:11211
# hosts to store entire rendered pages in
pagecaches = 127.0.0.1:11211
# hosts that cache permacache cassandra data
permacache_memcaches = 127.0.0.1:11211
# hosts that cache srmember rels (subset of Thing data)
srmembercaches = 127.0.0.1:11211
# a local cache that's not globally consistent and can have stale data (optional)
stalecaches =
############################################ MISCELLANEOUS
# default localization for strings (when using python's locale.format)
# (mostly replaced by babel, this shouldn't be necessary to change)
locale = C
# storage timezone, should probably not be changed from UTC
timezone = UTC
# timezone for display of some data, deprecated
display_timezone = MST
# location of the static directory
static_path = /static/
# Just a list of words. Used by errorlog.py to make up names for new errors.
words_file = /usr/dict/words
# domains that we consider URLs case sensitive for repost detection purposes
case_sensitive_domains = i.imgur.com, youtube.com
############################################ AUTHENTICATION
# how to authenticate users. see r2/lib/authentication.py for options
authentication_provider = cookie
# the work factor for bcrypt, increment this every time computers double in
# speed. don't worry, changing this won't break old passwords
bcrypt_work_factor = 12
# name of the cookie to drop with login information
login_cookie = reddit_session
# name of the admin cookie
admin_cookie = reddit_admin
# name of the otp cookie
otp_cookie = reddit_otp
# the maximum life of an admin cookie (seconds)
ADMIN_COOKIE_TTL = 32400
# the maximum amount of idle time for an admin cookie (seconds)
ADMIN_COOKIE_MAX_IDLE = 900
# the maximum life of an otp cookie
OTP_COOKIE_TTL = 604800
############################################ CASSANDRA
# cassandra hosts
cassandra_seeds = 127.0.0.1:9160
# number of connections to keep open to the cassandra ring
cassandra_pool_size = 5
# default read/write consistency levels for Cassandra
cassandra_rcl = ONE
cassandra_wcl = ONE
# name of default connection pool to use when _connection_pool not specified
cassandra_default_pool = main
############################################ AMQP
amqp_host = localhost:5672
amqp_user = reddit
amqp_pass = reddit
amqp_virtual_host = /
############################################ ZOOKEEPER
# zookeeper is optional at the moment
zookeeper_connection_string =
zookeeper_username =
zookeeper_password =
############################################ EMAIL
smtp_server = localhost
# where to send alerts for exceptions, etc.
nerds_email = nerds@reddit.com
# the "from" address for link share emails
share_reply = noreply@reddit.com
# where to send feedback comments
feedback_email = reddit@gmail.com
############################################ POSTGRES
db_user = reddit
db_pass = password
db_port = 5432
db_pool_size = 3
db_pool_overflow_size = 3
# list of all databases named in the subsequent table
databases = main, comment, vote, email, authorize, award, hc, traffic
#db name db host user, pass, port, conn, overflow_conn
main_db = reddit, 127.0.0.1, *, *, *, *, *
comment_db = reddit, 127.0.0.1, *, *, *, *, *
comment2_db = reddit, 127.0.0.1, *, *, *, *, *
vote_db = reddit, 127.0.0.1, *, *, *, *, *
email_db = reddit, 127.0.0.1, *, *, *, *, *
authorize_db = reddit, 127.0.0.1, *, *, *, *, *
award_db = reddit, 127.0.0.1, *, *, *, *, *
hc_db = reddit, 127.0.0.1, *, *, *, *, *
traffic_db = reddit, 127.0.0.1, *, *, *, *, *
hardcache_categories = *:hc:hc
# this setting will prefix all of the table names
db_app_name = reddit
type_db = main
rel_type_db = main
hardcache_db = main
# definitions of what each table is (probably shouldn't change in .update files)
# things require no extra info. relation is followed by the names of the related tables
db_table_link = thing
db_table_account = thing
db_table_message = thing
db_table_comment = thing
db_table_subreddit = thing
db_table_srmember = relation, subreddit, account
db_table_friend = relation, account, account
db_table_vote_account_link = relation, account, link
db_table_vote_account_comment = relation, account, comment
db_table_inbox_account_comment = relation, account, comment
db_table_inbox_account_message = relation, account, message
db_table_moderatorinbox = relation, subreddit, message
db_table_report_account_link = relation, account, link
db_table_report_account_comment = relation, account, comment
db_table_report_account_message = relation, account, message
db_table_report_account_subreddit = relation, account, subreddit
db_table_award = thing
db_table_trophy = relation, account, award
db_table_jury_account_link = relation, account, link
db_table_ad = thing
db_table_adsr = relation, ad, subreddit
db_table_flair = relation, subreddit, account
db_table_promocampaign = thing
# which servers to find each table on (likely to change in .update files)
# first server listed is assumed to be the master, all others are read-only slaves
# additionally, a "!avoid_master" flag may be added to specify that reads should use the slaves
db_servers_link = main, main
db_servers_account = main
db_servers_message = main
db_servers_comment = comment
db_servers_subreddit = comment
db_servers_srmember = comment
db_servers_friend = comment
db_servers_vote_account_link = vote
db_servers_vote_account_comment = vote
db_servers_inbox_account_comment = main
db_servers_inbox_account_message = main
db_servers_moderatorinbox = main
db_servers_report_account_link = main
db_servers_report_account_comment = comment
db_servers_report_account_message = main
db_servers_report_account_subreddit = main
db_servers_award = award
db_servers_trophy = award
db_servers_jury_account_link = main
db_servers_ad = main
db_servers_adsr = main
db_servers_flair = main
db_servers_promocampaign = main
############################################ GOLD
gold_month_price = 3.99
gold_year_price = 29.99
PAYPAL_SECRET =
PAYPAL_BUTTONID_ONETIME_BYMONTH =
PAYPAL_BUTTONID_ONETIME_BYYEAR =
PAYPAL_BUTTONID_AUTORENEW_BYMONTH =
PAYPAL_BUTTONID_AUTORENEW_BYYEAR =
PAYPAL_BUTTONID_CREDDITS_BYMONTH =
PAYPAL_BUTTONID_CREDDITS_BYYEAR =
STRIPE_PUBLIC_KEY =
STRIPE_SECRET_KEY =
STRIPE_MONTHLY_GOLD_PLAN =
STRIPE_YEARLY_GOLD_PLAN =
COINBASE_WEBHOOK_SECRET =
COINBASE_BUTTONID_ONETIME_1MO =
COINBASE_BUTTONID_ONETIME_2MO =
COINBASE_BUTTONID_ONETIME_3MO =
COINBASE_BUTTONID_ONETIME_4MO =
COINBASE_BUTTONID_ONETIME_5MO =
COINBASE_BUTTONID_ONETIME_6MO =
COINBASE_BUTTONID_ONETIME_7MO =
COINBASE_BUTTONID_ONETIME_8MO =
COINBASE_BUTTONID_ONETIME_9MO =
COINBASE_BUTTONID_ONETIME_10MO =
COINBASE_BUTTONID_ONETIME_11MO =
COINBASE_BUTTONID_ONETIME_1YR =
COINBASE_BUTTONID_ONETIME_2YR =
COINBASE_BUTTONID_ONETIME_3YR =
RG_SECRET =
STRIPE_WEBHOOK_SECRET =
############################################ SELF-SERVE ADS
selfserve_support_email = selfservesupport@mydomain.com
MAX_CAMPAIGNS_PER_LINK = 100
cpm_selfserve = 1.00
cpm_selfserve_geotarget = 0.25
# authorize.net credentials (blank authorizenetapi to disable)
authorizenetapi =
# authorizenetapi = https://api.authorize.net/xml/v1/request.api
authorizenetname =
authorizenetkey =
min_promote_bid = 20
max_promote_bid = 9999
min_promote_future = 2
max_promote_future = 93
############################################ TRAFFIC
TRAFFIC_ACCESS_KEY =
TRAFFIC_SECRET_KEY =
RAW_LOG_DIR =
PROCESSED_DIR =
AGGREGATE_DIR =
AWS_LOG_DIR =
TRAFFIC_SRC_DIR =
TRAFFIC_LOG_HOSTS =
############################################ PERFORMANCE / SCALING
# should we split link votes into separate queues based on subreddit id?
# this helps with lock contention but isn't necessary on smaller sites
shard_link_vote_queues = false
# should we split comment tree processing into shards based on the link id?
# this helps with lock contention but isn't necessary on smaller sites
shard_commentstree_queues = false
# chance of a write to the query cache triggering pruning. increasing this will
# potentially slow down writes, but will keep the size of cached queries in check better
querycache_prune_chance = 0.05
# time for the page cache (for unlogged in users)
page_cache_time = 90
# time for the comment pane cache (for a subset of logged in users, see pages.py:CommentPane)
commentpane_cache_time = 120
[server:main]
use = egg:Paste#http
host = 0.0.0.0
port = %(http_port)s
[app:main]
use = egg:r2
# %(here)s is the directory containing the configuration file
cache_dir = %(here)s/data
filter-with = gzip
[filter:gzip]
use = egg:r2#gzip
compress_level = 6
min_size = 800
[loggers]
keys = root
[logger_root]
level = WARNING
handlers = console
[handlers]
keys = console
[handler_console]
class = StreamHandler
args = (sys.stdout,)
[formatters]
keys = reddit
[formatter_reddit]
format = %(message)s
# the following configuration section makes up the "live" config. if zookeeper
# is enabled, then this configuration will be found by the app in zookeeper. to
# write it to zookeeper, use the writer script: scripts/write_live_config.
[live_config]
# links that get their own infrastructure (comma-delimited list of id36s)
fastlane_links =
# a message placed in the infobar
announcement_message =
# an info message placed in the sidebar
sidebar_message =
# an info message placed in the sidebar for gold users
gold_sidebar_message =
# probability of the subreddit suggester showing up in the spotlight box
# for users that have at some point edited their subscriptions:
spotlight_interest_sub_p = .05
# and for users that have not ever subscribed:
spotlight_interest_nosub_p = .1
# map of comment tree version to how frequently it should be chosen relative to
# the others
comment_tree_version_weights = 1:1, 2:0
# enables/disables client side logging POSTs to /web/log/...
frontend_logging = true
# daily gold revenue goal (in pennies) for progress bar thing
gold_revenue_goal = 0
# sample multireddits (displayed when a user has no multis)
listing_chooser_sample_multis = /user/reddit/m/hello, /user/reddit/m/world
# multi of subreddits to share with gold users
listing_chooser_gold_multi = /user/reddit/m/gold
# subreddit showcasing new multireddits
listing_chooser_explore_sr =
# subreddits that help people discover more subreddits (used in explore tab)
discovery_srs =
# historical cost to run a reddit server
pennies_per_server_second = 1970/1/1:1