diff --git a/r2/example.ini b/r2/example.ini
index 8eedb835c..f41c4b9c4 100644
--- a/r2/example.ini
+++ b/r2/example.ini
@@ -66,6 +66,7 @@ stylesheet_rtl = reddit_rtl.css
allowed_css_linked_domains = my.domain.com, my.otherdomain.com
css_killswitch = False
+max_sr_images = 20
login_cookie = reddit_session
domain = localhost
diff --git a/r2/r2/config/middleware.py b/r2/r2/config/middleware.py
index f6462c358..b0ed9be29 100644
--- a/r2/r2/config/middleware.py
+++ b/r2/r2/config/middleware.py
@@ -333,7 +333,7 @@ class LimitUploadSize(object):
or int(environ[cl_key]) > self.max_size):
r = Response()
r.status_code = 500
- r.content = 'request too big'
+ r.content = '
request too big'
return r(environ, start_response)
return self.app(environ, start_response)
diff --git a/r2/r2/controllers/api.py b/r2/r2/controllers/api.py
index 916813d24..02a6462f2 100644
--- a/r2/r2/controllers/api.py
+++ b/r2/r2/controllers/api.py
@@ -774,9 +774,13 @@ class ApiController(RedditController):
res._update('status', innerHTML = '')
res._update('validation-errors', innerHTML = '')
+ stylesheet_contents_parsed = parsed.cssText if parsed else ''
+ # if the css parsed, we're going to apply it (both preview & save)
+ if not report.errors:
+ res._call('applyStylesheet("%s"); ' %
+ stylesheet_contents_parsed.replace('"', r"\"").replace("\n", r"\n").replace("\r", r"\r"))
if not report.errors and op == 'save':
stylesheet_contents_user = stylesheet_contents
- stylesheet_contents_parsed = parsed.cssText if parsed else ''
c.site.stylesheet_contents = stylesheet_contents_parsed
c.site.stylesheet_contents_user = stylesheet_contents_user
@@ -788,7 +792,6 @@ class ApiController(RedditController):
c.site._commit()
res._update('status', innerHTML = 'saved')
- res._call('applyStylesheetFromTextbox("stylesheet_contents");')
res._update('validation-errors', innerHTML = '')
elif op == 'preview':
@@ -820,36 +823,121 @@ class ApiController(RedditController):
return
cssfilter.rendered_comment('preview_comment',res,comments)
- @validate(VUser(),
+ @Json
+ @validate(VSrModerator(),
VModhash(),
- VRatelimit(rate_user = True,
- rate_ip = True,
- prefix = 'upload_reddit_img_'),
- file = VLength('file',length=1024*500),
- op = VOneOf('op',['upload','delete']))
- def POST_upload_header_img(self, file, op):
- if not c.site.can_change_stylesheet(c.user):
- return self.abort403()
-
+ name = VCssName('img_name'))
+ def POST_delete_sr_img(self, res, name):
+ """
+ Called called upon requested delete on /about/stylesheet.
+ Updates the site's image list, and causes the which wraps
+ the image to be hidden.
+ """
+ # just in case we need to kill this feature from XSS
if g.css_killswitch:
return self.abort(403,'forbidden')
+ c.site.del_image(name)
+ c.site._commit()
+ # hide the image and it's container
+ res._hide("img-li_%s" % name)
+ # reset the status
+ res._update('img-status', innerHTML = _("deleted"))
+
- if op == 'upload':
- try:
- cleaned = cssfilter.clean_image(file,'PNG')
- new_url = cssfilter.save_header_image(c.site, cleaned)
- except cssfilter.BadImage:
- return UploadedImage(_('bad image'),c.site.header,'upload').render()
-
- c.site.header = new_url
- c.site._commit()
-
- return UploadedImage(_('saved'),new_url,'upload').render()
- elif op == 'delete':
+ @Json
+ @validate(VSrModerator(),
+ VModhash())
+ def POST_delete_sr_header(self, res):
+ """
+ Called when the user request that the header on a sr be reset.
+ """
+ # just in case we need to kill this feature from XSS
+ if g.css_killswitch:
+ return self.abort(403,'forbidden')
+ if c.site.header:
c.site.header = None
c.site._commit()
+ # reset the header image on the page
+ res._update('header-img', src = DefaultSR.header)
+ # hide the button which started this
+ res._hide ('delete-img')
+ # hide the preview box
+ res._hide ('img-preview-container')
+ # reset the status boxes
+ res._update('img-status', innerHTML = _("deleted"))
+ res._update('status', innerHTML = "")
+
- return UploadedImage(_('deleted'),DefaultSR.header,'delete').render()
+ def GET_upload_sr_img(self, *a, **kw):
+ """
+ Completely unnecessary method which exists because safari can
+ be dumb too. On page reload after an image has been posted in
+ safari, the iframe to which the request posted preserves the
+ URL of the POST, and safari attempts to execute a GET against
+ it. The iframe is hidden, so what it returns is completely
+ irrelevant.
+ """
+ return "nothing to see here."
+
+ @validate(VSrModerator(),
+ VModhash(),
+ file = VLength('file', length=1024*500),
+ name = VCssName("name"),
+ header = nop('header'))
+ def POST_upload_sr_img(self, file, header, name):
+ """
+ Called on /about/stylesheet when an image needs to be replaced
+ or uploaded, as well as on /about/edit for updating the
+ header. Unlike every other POST in this controller, this
+ method does not get called with Ajax but rather is from the
+ original form POSTing to a hidden iFrame. Unfortunately, this
+ means the response needs to generate an page with a script tag
+ to fire the requisite updates to the parent document, and,
+ more importantly, that we can't use our normal toolkit for
+ passing those responses back.
+
+ The result of this function is a rendered UploadedImage()
+ object in charge of firing the completedUploadImage() call in
+ JS.
+ """
+
+ # default error list (default valuse will reset the errors in
+ # the response if no error is raised)
+ errors = dict(BAD_CSS_NAME = "", IMAGE_ERROR = "")
+ try:
+ cleaned = cssfilter.clean_image(file,'PNG')
+ if header:
+ num = None # there is one and only header, and it is unnumbered
+ elif not name:
+ # error if the name wasn't specified or didn't satisfy
+ # the validator
+ errors['BAD_CSS_NAME'] = _("bad image name")
+ else:
+ num = c.site.add_image(name, max_num = g.max_sr_images)
+ c.site._commit()
+
+ except cssfilter.BadImage:
+ # if the image doesn't clean up nicely, abort
+ errors["IMAGE_ERROR"] = _("bad image")
+ except ValueError:
+ # the add_image method will raise only on too many images
+ errors['IMAGE_ERROR'] = (
+ _("too many imags (you only get %d)") % g.max_sr_images)
+
+ if any(errors.values()):
+ return UploadedImage("", "", "", errors = errors).render()
+ else:
+ # with the image num, save the image an upload to s3. the
+ # header image will be of the form "${c.site._fullname}.png"
+ # while any other image will be ${c.site._fullname}_${num}.png
+ new_url = cssfilter.save_sr_image(c.site, cleaned, num = num)
+ if header:
+ c.site.header = new_url
+ c.site._commit()
+
+ return UploadedImage(_('saved'), new_url, name,
+ errors = errors).render()
+
@Json
@validate(VUser(),
diff --git a/r2/r2/controllers/validator/validator.py b/r2/r2/controllers/validator/validator.py
index 6ae3a94be..2fefb807e 100644
--- a/r2/r2/controllers/validator/validator.py
+++ b/r2/r2/controllers/validator/validator.py
@@ -511,7 +511,16 @@ class VInt(Validator):
except ValueError:
c.errors.add(errors.BAD_NUMBER)
-
+class VCssName(Validator):
+ """
+ returns a name iff it consists of alphanumeric characters and
+ possibly "-", and is below the length limit.
+ """
+ r_css_name = re.compile(r"^[a-zA-Z0-9\-]{1,100}$")
+ def run(self, name):
+ if name and self.r_css_name.match(name):
+ return name
+
class VMenu(Validator):
def __init__(self, param, menu_cls, remember = True, **kw):
diff --git a/r2/r2/lib/app_globals.py b/r2/r2/lib/app_globals.py
index 2d77ab5a7..465f1aea4 100644
--- a/r2/r2/lib/app_globals.py
+++ b/r2/r2/lib/app_globals.py
@@ -42,6 +42,7 @@ class Globals(object):
'max_comments',
'num_side_reddits',
'num_query_queue_workers',
+ 'max_sr_images',
]
bool_props = ['debug', 'translator',
diff --git a/r2/r2/lib/cssfilter.py b/r2/r2/lib/cssfilter.py
index 60ecaadec..12db52227 100644
--- a/r2/r2/lib/cssfilter.py
+++ b/r2/r2/lib/cssfilter.py
@@ -22,10 +22,10 @@
from __future__ import with_statement
from r2.models import *
-from r2.lib.utils import sanitize_url, domain
+from r2.lib.utils import sanitize_url, domain, randstr
from r2.lib.strings import string_dict
-from pylons import g
+from pylons import g, c
from pylons.i18n import _
import re
@@ -152,11 +152,37 @@ class ValidationError(Exception):
obj = str(self.obj) if hasattr(self,'obj') else ''
return "ValidationError%s: %s (%s)" % (line, self.message, obj)
+# local urls should be in the static directory
local_urls = re.compile(r'^/static/[a-z./-]+$')
+# substitutable urls will be css-valid labels surrounded by "%%"
+custom_img_urls = re.compile(r'%%([a-zA-Z0-9\-]+)%%')
def valid_url(prop,value,report):
+ """
+ checks url(...) arguments in CSS, ensuring that the contents are
+ officially sanctioned. Sanctioned urls include:
+ * anything in /static/
+ * image labels %%..%% for images uploaded on /about/stylesheet
+ * urls with domains in g.allowed_css_linked_domains
+ """
url = value.getStringValue()
+ # local urls are allowed
if local_urls.match(url):
pass
+ # custom urls are allowed, but need to be transformed into a real path
+ elif custom_img_urls.match(url):
+ name = custom_img_urls.match(url).group(1)
+ # the label -> image number lookup is stored on the subreddit
+ if c.site.images.has_key(name):
+ num = c.site.images[name]
+ value._setCssText("url(http:/%s%s_%d.png?v=%s)"
+ % (g.s3_thumb_bucket, c.site._fullname, num,
+ randstr(36)))
+ else:
+ # unknown image label -> error
+ report.append(ValidationError(msgs['broken_url']
+ % dict(brokenurl = value.cssText),
+ value))
+ # allowed domains are ok
elif domain(url) in g.allowed_css_linked_domains:
pass
else:
@@ -367,7 +393,13 @@ def clean_image(data,format):
return ret
-def save_header_image(sr, data):
+def save_sr_image(sr, data, num = None):
+ """
+ uploades image data to s3 as a PNG and returns its new url. Urls
+ will be of the form:
+ http:/${g.s3_thumb_bucket}/${sr._fullname}[_${num}].png?v=${md5hash}
+ [Note: g.s3_thumb_bucket begins with a "/" so the above url is valid.]
+ """
import tempfile
from r2.lib import s3cp
from md5 import md5
@@ -379,12 +411,18 @@ def save_header_image(sr, data):
f.write(data)
f.flush()
- resource = g.s3_thumb_bucket + sr._fullname + '.png'
- s3cp.send_file(f.name, resource, 'image/png', 'public-read', None, False)
+ resource = g.s3_thumb_bucket + sr._fullname
+ if num is not None:
+ resource += '_' + str(num)
+ resource += '.png'
+
+ s3cp.send_file(f.name, resource, 'image/png', 'public-read',
+ None, False)
finally:
f.close()
- return 'http:/%s%s.png?v=%s' % (g.s3_thumb_bucket, sr._fullname, hash)
+ return 'http:/%s%s?v=%s' % (g.s3_thumb_bucket,
+ resource.split('/')[-1], hash)
diff --git a/r2/r2/lib/pages/pages.py b/r2/r2/lib/pages/pages.py
index 6b5cf6020..1a5df83c3 100644
--- a/r2/r2/lib/pages/pages.py
+++ b/r2/r2/lib/pages/pages.py
@@ -684,9 +684,9 @@ class CssError(Wrapped):
class UploadedImage(Wrapped):
"The page rendered in the iframe during an upload of a header image"
- def __init__(self,status,img_src,op):
- Wrapped.__init__(self,
- status=status, img_src=img_src, op=op)
+ def __init__(self,status,img_src, name="", errors = {}):
+ self.errors = list(errors.iteritems())
+ Wrapped.__init__(self, status=status, img_src=img_src, name = name)
class Password(Wrapped):
"""Form encountered when 'recover password' is clicked in the LoginFormWide."""
diff --git a/r2/r2/lib/utils/utils.py b/r2/r2/lib/utils/utils.py
index 1b59ea4f0..3779fedfe 100644
--- a/r2/r2/lib/utils/utils.py
+++ b/r2/r2/lib/utils/utils.py
@@ -495,7 +495,6 @@ class UrlParser(object):
unquote_plus('='.join(p[1:])))
self._query_dict = dict(_split(p) for p in self.query.split('&')
if p)
-
return self._query_dict
def path_extension(self):
diff --git a/r2/r2/lib/wrapped.py b/r2/r2/lib/wrapped.py
index cb10b2c8c..57efcbcb5 100644
--- a/r2/r2/lib/wrapped.py
+++ b/r2/r2/lib/wrapped.py
@@ -66,7 +66,7 @@ class Wrapped(object):
for lookup in self.lookups:
try:
template = tpm.get(lookup, style, cache = not debug)
- except KeyError:
+ except AttributeError:
continue
else:
try:
diff --git a/r2/r2/models/subreddit.py b/r2/r2/models/subreddit.py
index 33c964d1b..425430cd8 100644
--- a/r2/r2/models/subreddit.py
+++ b/r2/r2/models/subreddit.py
@@ -44,6 +44,7 @@ class Subreddit(Thing, Printable):
firsttext = strings.firsttext,
header = os.path.join(g.static_path,
'base.reddit.com.header.png'),
+ images = {},
ad_file = os.path.join(g.static_path, 'ad_default.html'),
reported = 0,
valid_votes = 0,
@@ -69,6 +70,7 @@ class Subreddit(Thing, Printable):
clear_memo('subreddit.subreddits', Subreddit)
return sr
+
@classmethod
@memoize('subreddit._by_name')
def _by_name_cache(cls, name):
@@ -348,6 +350,69 @@ class Subreddit(Thing, Printable):
user = c.user if c.user_is_loggedin else None
return self.can_view(user)
+ def get_images(self):
+ """
+ Iterator over list of (name, image_num) pairs which have been
+ uploaded for custom styling of this subreddit.
+ """
+ for name, img_num in self.images.iteritems():
+ if isinstance(img_num, int):
+ yield (name, img_num)
+
+ def add_image(self, name, max_num = None):
+ """
+ Adds an image to the subreddit's image list. The resulting
+ number of the image is returned. Note that image numbers are
+ non-sequential insofar as unused numbers in an existing range
+ will be populated before a number outside the range is
+ returned. Imaged deleted with del_image are pushed onto the
+ "/empties/" stack in the images dict, and those values are
+ pop'd until the stack is empty.
+
+ raises ValueError if the resulting number is >= max_num.
+
+ The Subreddit will be _dirty if a new image has been added to
+ its images list, and no _commit is called.
+ """
+ if not self.images.has_key(name):
+ # copy and blank out the images list to flag as _dirty
+ l = self.images
+ self.images = None
+ # initialize the /empties/ list
+ l.setdefault('/empties/', [])
+ try:
+ num = l['/empties/'].pop() # grab old number if we can
+ except IndexError:
+ num = len(l) - 1 # one less to account for /empties/ key
+ if max_num is not None and num >= max_num:
+ raise ValueError, "too many images"
+ # update the dictionary and rewrite to images attr
+ l[name] = num
+ self.images = l
+ else:
+ # we've seen the image before, so just return the existing num
+ num = self.images[name]
+ return num
+
+ def del_image(self, name):
+ """
+ Deletes an image from the images dictionary assuming an image
+ of that name is in the current dictionary. The freed up
+ number is pushed onto the /empties/ stack for later recycling
+ by add_image.
+
+ The Subreddit will be _dirty if image has been removed from
+ its images list, and no _commit is called.
+ """
+ if self.images.has_key(name):
+ l = self.images
+ self.images = None
+ l.setdefault('/empties/', [])
+ # push the number on the empties list
+ l['/empties/'].append(l[name])
+ del l[name]
+ self.images = l
+
class FakeSubreddit(Subreddit):
over_18 = False
title = ''
diff --git a/r2/r2/public/static/reddit.css b/r2/r2/public/static/reddit.css
index aa3005abc..d44614f6c 100644
--- a/r2/r2/public/static/reddit.css
+++ b/r2/r2/public/static/reddit.css
@@ -1302,6 +1302,50 @@ a.star { text-decoration: none; color: #ff8b60 }
.stylesheet-customize-container { }
.stylesheet-customize-container textarea { margin: 0; padding: 0px; }
+.stylesheet-customize-container h2 { margin-top: 15px; margin-bottom: 10px; }
+
+.image-upload .new-image { margin-left: 20px }
+.image-upload table { margin-left: 20px; }
+.image-upload td,
+.image-upload th { vertical-align: top; }
+.image-upload span { padding-left: 5px; }
+
+
+ul#image-preview-list {
+ margin: 20px 320px 20px 20px;
+ font-size:larger;
+}
+ul#image-preview-list li {
+ padding-bottom: 10px;
+ margin-bottom: 20px;
+ vertical-align: top;
+ width: 45%;
+ height: 100px;
+ float: left;
+ position: relative;
+}
+
+ul#image-preview-list .preview {
+ width: 100px;
+ float: left;
+ display: block;
+ text-align: center;
+ max-height: 100px;
+ overflow: hidden;
+}
+ul#image-preview-list .preview img {
+ max-width: 100px;
+ padding: auto;
+}
+ul#image-preview-list .description {
+ vertical-align: top;
+ margin-left: 105px;
+}
+ul#image-preview-list .description pre {
+ display: inline;
+ padding: 5px;
+}
+
.sheets { margin-right: 315px; }
.sheets .col { float: left; }
@@ -1311,7 +1355,6 @@ a.star { text-decoration: none; color: #ff8b60 }
.sheets .btn { margin-left: 0px; margin-right: 5px; }
.sheets .btn.right { float: right; margin-right: 3px;}
-.stylesheet-customize-container h2 { margin-top: 15px; margin-bottom: 10px; }
#validation-errors {
margin-left: 40px;
@@ -1348,7 +1391,7 @@ a.star { text-decoration: none; color: #ff8b60 }
text-align: right;
}
-#header-img-preview-container {
+#img-preview-container {
border-width: .2em;
border-style: dashed;
border-color: lightgray;
diff --git a/r2/r2/public/static/reddit_piece.js b/r2/r2/public/static/reddit_piece.js
index ed1c30af8..40f88cfec 100644
--- a/r2/r2/public/static/reddit_piece.js
+++ b/r2/r2/public/static/reddit_piece.js
@@ -111,7 +111,7 @@ function untoggle(execute, parent, oldtext, type) {
if(execute) {
var form = parent.parentNode;
var uh = modhash; //global
- parent.innerHTML = form.executed.value;
+ parent.innerHTML = form.executed.value || oldtext;
if(type == 'del') {
post_form(form, type, function() {return ""});
diff --git a/r2/r2/public/static/utils.js b/r2/r2/public/static/utils.js
index acf8e6ec4..d6687bdf8 100644
--- a/r2/r2/public/static/utils.js
+++ b/r2/r2/public/static/utils.js
@@ -164,25 +164,6 @@ function tup(x) {
return x;
}
-function stylesheetSave(form, formID) {
- form.op.value = "save";
- return post_form(form, formID);
-}
-
-function stylesheetPreview(formID, textboxID) {
- var form = document.getElementById(formID);
- form.op.value = "preview";
- post_form(form, formID);
-
- applyStylesheetFromTextbox(textboxID);
-}
-
-function applyStylesheetFromTextbox(textboxID) {
- var textbox = document.getElementById(textboxID);
- var cssText = textbox.value;
- return applyStylesheet(cssText);
-}
-
function applyStylesheet(cssText) {
/* also referred to in the reddit.html template, for the name of the
stylesheet set for this reddit. These must be in sync, because
@@ -297,46 +278,100 @@ function gotoTextboxLine(textboxID, lineNo) {
}
}
-function uploadHeaderImage(status) {
- var form = $('upload-header-image');
- form.op.value = 'upload';
- $('img-status').innerHTML = status;
- show('img-status');
+function insertAtCursor(textbox, value) {
+ textbox = $(textbox);
+ var orig_pos = textbox.scrollTop;
- form.submit();
+ if (document.selection) { /* IE */
+ textbox.focus();
+ var sel = document.selection.createRange();
+ sel.text = value;
+ }
+ else if (textbox.selectionStart) {
+ var prev_start = textbox.selectionStart;
+ textbox.value =
+ textbox.value.substring(0, textbox.selectionStart) +
+ value +
+ textbox.value.substring(textbox.selectionEnd, textbox.value.length);
+ prev_start += value.length;
+ textbox.setSelectionRange(prev_start, prev_start);
+ } else {
+ textbox.value += value;
+ }
- return false;
+ if(textbox.scrollHeight) {
+ textbox.scrollTop = orig_pos;
+ }
+
+ textbox.focus();
}
-function deleteHeaderImage(status) {
- var form = $('upload-header-image');
- form.reset();
- form.op.value = 'delete';
+
+function upload_image(form, status) {
$('img-status').innerHTML = status;
show('img-status');
-
- form.submit();
-
- return false;
+ return true;
}
-function completedUploadHeaderImage(status,img_src,op) {
+
+function completedUploadImage(status, img_src, name, errors) {
show('img-status');
$('img-status').innerHTML = status;
- $('upload-header-image').reset();
+ for(var i = 0; i < errors.length; i++) {
+ var e = $(errors[i][0]);
+ if( errors[i][1]) {
+ show(e);
+ e.innerHTML = errors[i][1];
+ }
+ else {
+ hide(e);
+ }
+
+ }
- $('header-img').src = img_src;
+ if(img_src) {
+ $('upload-image').reset();
+ hide('submit-header-img');
+ if (!name) {
+ $('header-img').src = img_src;
+ $('img-preview').src = img_src;
+ show('delete-img');
+ hide('submit-img');
+ show('img-preview-container');
+ } else {
+ var img = $("img-preview_" + name);
+ if(img) {
+ /* Because IE isn't smart enought to eval "!img" */
+ }
+ else {
+ var ul = $("image-preview-list");
+ var li = $("img-prototype").cloneNode(true);
+ li.id = "img-li_";
+ ul.appendChild(li);
+ re_id_node(li, ''+name);
+ var name_b = $("img_name_" + name);
+ if(name_b) {
+ name_b.innerHTML = name;
+ }
+ var label = $("img_url_" + name);
+ if(label) {
+ label.innerHTML = "url(%%" + name + "%%)";
+ }
+ img = $("img-preview_" + name);
- if(op == 'delete') {
- hide('delete-header-img');
- hide('header-img-preview-container');
- } else {
- $('header-img-preview').src = img_src;
- show('delete-header-img');
- hide('submit-header-img');
- show('header-img-preview-container');
+ var sel_list = $('old-names');
+ if (sel_list) {
+ var opt = document.createElement('option');
+ opt.innerHTML = name;
+ sel_list.appendChild(opt);
+ }
+ }
+ img.src = img_src;
+ $("img-preview-a_" + name).href = img_src;
+ show("img-li_" + name);
+ }
}
}
@@ -392,7 +427,7 @@ function handleResponse(action) {
if(r.call) {
var calls = r.call;
for(var i=0; i
-
+
diff --git a/r2/r2/templates/createsubreddit.html b/r2/r2/templates/createsubreddit.html
index f79718739..292fbef54 100644
--- a/r2/r2/templates/createsubreddit.html
+++ b/r2/r2/templates/createsubreddit.html
@@ -275,13 +275,11 @@ function update_title() {
-