# # r2 - Pylons development environment configuration # # The %(here)s variable will be replaced with the parent directory of this file # [DEFAULT] # -- debug and configuation flags -- # global debug flag -- displays pylons stacktrace rather than 500 page on error when true debug = true # enables/disables template caching (for development) template_debug = true # use uncompressed static files (out of /static/js and /static/css) # rather than compressed files out of /static (for development if true) uncompressedJS = true # enable/disable verbose SQL printing sqlprinting = false # enable/disable writing errors as they occur to a rabbit-mq queue exception_logging = false # whether to print a "reddit app started" message at start" log_start = true # enable/disable logging for amqp/rabbitmq amqp_logging = false # emergency measures: makes the site read only read_only_mode = false # -- SECRETS! <-- update these first! -- # global secret SECRET = abcdefghijklmnopqrstuvwxyz0123456789 # secret for making the modhash MODSECRET = abcdefghijklmnopqrstuvwxyz0123456789 # secret for /prefs/feeds FEEDSECRET = abcdefghijklmnopqrstuvwxyz0123456789 # -- important settings -- # the domain that this app serves itself up as domain = localhost # if you use www for the old-timey feel, put it here domain_prefix = # the user used for "system" operations and messages system_user = reddit # list of admin accounts admins = reddit # the default subreddit for submissions default_sr = reddit.com # time for the page cache (for unlogged in users) page_cache_time = 90 # default localization for strings (when using python's locale.format) locale = C # default site language (two letter character code) lang = en # if your webserver is a proxy and on a different instance, use # X-forwarded-for and set this to the webserver's IP proxy_addr = # hash for validating HTTP_TRUE_CLIENT_IP_HASH ip_hash = # timezone for storing timezone = UTC # timezone for the database display_timezone = MST # secret key for accessing /shutdown shutdown_secret = 12345 # list of servers that the service monitor will care about monitored_servers = reddit, localhost # name of the cookie to drop with login information login_cookie = reddit_session # set to a path to enable per-request logging log_path = # fraction of requests to pass into the queue-based usage sampler usage_sampling = 0. # account used for default feedback messaging (can be #subreddit) admin_message_acct = reddit # -- caching options -- # data cache (used for caching Thing objects) num_mc_clients = 5 memcaches = 127.0.0.1:11211 # render caches (the second is "remote" and the local is optional but in the same format) local_rendercache = rendercaches = 127.0.0.1:11211 # cache for storing service monitor information servicecaches = 127.0.0.1:11211 # -- permacache options -- # permacache is memcaches -> cassanda -> memcachedb # memcaches that sit in front of cassandra permacache_memcaches = 127.0.0.1:11211 # cassandra hosts. one of these will be chosen at random by pycassa cassandra_seeds = # -- url cache options -- url_caches = 127.0.0.1:11211 # cassandra hosts. one of these will be chosen at random by pycassa url_seeds = # caches for storing number of times a link is rendered incr_cache = udp:localhost:11211 incr_caches = localhost:11211 # recommender cache (depricated) rec_cache = 127.0.0.1:11211 ## -- traffic tracking urls -- # image to render to track pageviews tracker_url = /static/pixel.png # images to render to track sponsored links adtracker_url = /static/pixel.png # image to render to track the ad frame adframetracker_url = http://pixel.reddit.com/pixel/of_defenestration.png # open redirector to bounce clicks off of on sponsored links for tracking clicktracker_url = /static/pixel.png # amqp amqp_host = localhost:5672 amqp_user = reddit amqp_pass = reddit amqp_virtual_host = / ## -- database setup -- # list of all databases named in the subsequent table databases = main, comment, vote, email, authorize, award, hc #db name db host user, pass main_db = reddit, 127.0.0.1, reddit, password comment_db = reddit, 127.0.0.1, reddit, password comment2_db = reddit, 127.0.0.1, reddit, password vote_db = reddit, 127.0.0.1, reddit, password email_db = reddit, 127.0.0.1, reddit, password authorize_db = reddit, 127.0.0.1, reddit, password award_db = reddit, 127.0.0.1, reddit, password hc_db = reddit, 127.0.0.1, reddit, password # this setting will prefix all of the table names db_app_name = reddit # are we allowed to create tables? db_create_tables = True type_db = main rel_type_db = main hardcache_db = main db_table_link = thing, main, main db_table_account = thing, main db_table_message = thing, main db_table_savehide = relation, account, link, main db_table_click = relation, account, link, main db_table_comment = thing, comment db_table_subreddit = thing, comment db_table_srmember = relation, subreddit, account, comment db_table_friend = relation, account, account, comment db_table_vote_account_link = relation, account, link, vote db_table_vote_account_comment = relation, account, comment, vote db_table_inbox_account_comment = relation, account, comment, main db_table_inbox_account_message = relation, account, message, main db_table_moderatorinbox = relation, subreddit, message, main db_table_report_account_link = relation, account, link, main db_table_report_account_comment = relation, account, comment, comment db_table_report_account_message = relation, account, message, main db_table_report_account_subreddit = relation, account, subreddit, main db_table_award = thing, award db_table_trophy = relation, account, award, award db_table_jury_account_link = relation, account, link, main db_table_ad = thing, main db_table_adsr = relation, ad, subreddit, main disallow_db_writes = False ## -- traffic analytics -- # google analytics token googleanalytics = # machine to get traffic metrics from traffic_url = http://localhost:8010/tracker/pickle/ # secret used for talking to the traffic machine tracking_secret = abcdefghijklmnopqrstuvwxyz0123456789 ## -- Self-service sponsored link stuff -- # (secure) payment domain payment_domain = http://pay.localhost/ ad_domain = http://localhost sponsors = # authorize.net credentials authorizenetname = authorizenetkey = authorizenetapi = https://api.authorize.net/xml/v1/request.api min_promote_bid = 20 max_promote_bid = 9999 min_promote_future = 2 # -- spreadshirt -- spreadshirt_url = spreadshirt_vendor_id = spreadshirt_min_font = 42 spreadshirt_max_width = 620 spreadshirt_test_font = ### # Other magic settings ### # list of cnames allowed to render as reddit.com without a frame authorized_cnames = # -- query cache settings -- num_query_queue_workers = 5 query_queue_worker = http://cslowe.local:8000 enable_doquery = True use_query_cache = False write_query_queue = True # -- stylesheet editor -- # disable custom stylesheets css_killswitch = False # list of "trusted" domains that can be referenced in url() allowed_css_linked_domains = static.reddit.com, www.reddit.com, reallystatic.reddit.com # max number of uploaded images per subreddit max_sr_images = 50 # default stylesheet and it's rtl version stylesheet = reddit.css stylesheet_rtl = reddit-rtl.css # location of the static directory static_path = /static/ # -- translator UI -- # enable/disable access to the translation UI in /admin/i18n translator = true # reddit runs the translator UI on a different instance with a # password to login. These settings are used when generating messages # to translators to tell them their credentials translator_username = translator_password = # subreddit used for DMCA takedowns takedown_sr = _takedowns # png compressor png_optimizer = /usr/bin/env optipng # bad words that should be *'d out profanity_wordlist = # which markdown backent to use (c = discount, py = markdown.py) markdown_backend = c # -- search -- # where is solor? solr_url = # how long do we cache search results (in seconds) solr_cache_time = 300 # Just a list of words. Used by errlog.py to make up names for new errors. words_file = /usr/dict/words # -- media stuff -- # user agent for the scraper useragent = Mozilla/5.0 (compatible; redditbot/1.0; +http://www.reddit.com/feedback) # your s3 credentials S3KEY_ID = S3SECRET_KEY = # s3 bucket s3_thumb_bucket = test.your.domain.here default_thumb = /static/noimage.png self_thumb = /static/self_default.png media_domain = localhost # -- limits -- # rate limiter duration (minutes) RATELIMIT = 10 # minimum display karma MIN_UP_KARMA = 1 MIN_RATE_LIMIT_KARMA = 10 MIN_RATE_LIMIT_COMMENT_KARMA = 1 QUOTA_THRESHOLD = 5 # min amount of karma to edit WIKI_KARMA = 100 # time in days MODWINDOW = 2 HOT_PAGE_AGE = 1000 # -- display options -- # how long to consider links eligible for the rising page rising_period = 12 hours # max number of comments (default) num_comments = 200 # max number of comments (if show all is selected) max_comments = 500 # list of reddits to auto-subscribe users to automatic_reddits = # cutoff number of reddits to show unsubscribed users num_default_reddits = 10 # how deep do we go into the top listing when fetching /random num_serendipity = 250 sr_dropdown_threshold = 15 #user-agents to rate-limit agents = # -- email -- # smtp server smtp_server = localhost # delay before allowing a link to be shared new_link_share_delay = 30 seconds # alerter emails nerds_email = nerds@reddit.com # share reply email share_reply = noreply@reddit.com # feedback email feedback_email = reddit@gmail.com [server:main] use = egg:Paste#http host = 0.0.0.0 port = %(http_port)s [app:main] use = egg:r2 cache_dir = %(here)s/data beaker.session_key = r2 beaker.session_secret = somesecret #lang = en # WARNING: *THE LINE BELOW MUST BE UNCOMMENTED ON A PRODUCTION ENVIRONMENT* # Debug mode will enable the interactive debugging tool, allowing ANYONE to # execute malicious code after an exception is raised. #set debug = false