mirror of
https://github.com/reddit-archive/reddit.git
synced 2026-02-10 06:35:03 -05:00
* Two new 404 aliens
* enable a comment page cache if the user isn't author of any of the comments and the page is a non-permalink. votes and friends are updated by javascript
Additions
* descriptive example.ini
* create "trusted_sponsor" who get auto-accepted
* split off url cache from the permacache, with major clean-up to app_globals
* Move is_banned_domain() call into POST_submit() so it can take the sanitized url
* Redirect /r/asdfasdfasdf to the search page, not the create page.
* updates to draw_load.py
* orangered envelope goes to unread, not all.
* Remove "enable whitelisting" checkbox, and switch to new failiens
* Put verification link on email prefs page
Bugfixes:
* split off a minimal api controller for onload calls (which don't care about c.site or c.user, making them cheaper)
* Assume admins want to keep their cookies forever just like everyone else
* /randomrising bugfix
* JSON error page was rendering in html. it now returns "{ error: ${code} }"
36 lines
840 B
Python
Executable File
36 lines
840 B
Python
Executable File
#!/usr/bin/env python
|
|
|
|
import re, sys
|
|
|
|
line_rx = re.compile('^([-_a-zA-Z0-9 ]*[-_a-zA-Z0-9]+)\s*=\s*(.*)')
|
|
|
|
def parse_line(line):
|
|
m = line_rx.match(line)
|
|
if m:
|
|
return m.groups()
|
|
|
|
def main(source_ini, update_ini):
|
|
#read in update file
|
|
update_parts = {}
|
|
for line in open(update_ini):
|
|
m = parse_line(line)
|
|
if m:
|
|
update_parts[m[0]] = m[1]
|
|
|
|
#pass through main file
|
|
m = None
|
|
for line in open(source_ini):
|
|
line = line.strip()
|
|
m = parse_line(line)
|
|
if m and update_parts.has_key(m[0]):
|
|
line = '%s = %s' % (m[0], update_parts[m[0]])
|
|
print line
|
|
|
|
if __name__ == '__main__':
|
|
args = sys.argv
|
|
if len(args) != 3:
|
|
print 'usage: iniupdate.py [source] [update]'
|
|
sys.exit(1)
|
|
else:
|
|
main(sys.argv[1], sys.argv[2])
|