mirror of
https://github.com/reddit-archive/reddit.git
synced 2026-04-27 03:00:12 -04:00
Add unique hashed filename json mapping file for serving static files.
When make is run, unique filenames are generated for JavaScript and CSS files in /public/static/. Filenames are generated by appending a 64 bit truncated SHA1 hash in websafe base64 format before the extension. The filename mapping is stored in /public/static/names.json. Unique filenames are now inserted by the static() template helper. Hashed filenames can be served by Pylons, and hashed names are checked against the names.json mapping when requested.
This commit is contained in:
18
r2/Makefile
18
r2/Makefile
@@ -38,6 +38,7 @@ CAT=cat
|
||||
PYTHON=python2.6
|
||||
JS_COMPRESS = paster run standalone $(package)/lib/js.py -c "build_reddit_js()"
|
||||
CSS_COMPRESS = $(SED) -e 's/ \+/ /' -e 's/\/\*.*\*\///g' -e 's/: /:/' | grep -v "^ *$$"
|
||||
UPDATE_NAMES = $(PYTHON) $(package)/lib/static.py
|
||||
|
||||
# If admin codebase is install, get its path so that we can build ini
|
||||
# files against the primary production.ini
|
||||
@@ -52,11 +53,12 @@ CSSTARGETS = $(foreach css, $(css_targets), $(static_dir)/$(css))
|
||||
SPRITES = $(static_dir)/sprite.png
|
||||
MAINCSS = $(foreach css, $(main_css), $(static_dir)/$(css))
|
||||
RTLCSS = $(CSSTARGETS:.css=-rtl.css) $(MAINCSS:.css=-rtl.css)
|
||||
NAMES = $(static_dir)/names.json
|
||||
|
||||
PYX_FILES := $(shell find . -name \*.pyx)
|
||||
PYXSO_FILES := $(PYX_FILES:.pyx=.so)
|
||||
|
||||
MD5S = $(JSOUTPUTS) $(CSSTARGETS) $(MAINCSS) $(RTLCSS)
|
||||
NAMED = $(JSOUTPUTS) $(CSSTARGETS) $(MAINCSS) $(RTLCSS)
|
||||
|
||||
INIUPDATE = $(wildcard *.update)
|
||||
INIS = $(INIUPDATE:.update=.ini)
|
||||
@@ -71,14 +73,12 @@ else
|
||||
endif
|
||||
|
||||
|
||||
all: pyx static $(INIS)
|
||||
all: pyx static names $(INIS)
|
||||
|
||||
.PHONY: pyx js css rtl static md5 clean clean_static all
|
||||
.PHONY: pyx js css rtl static names clean clean_static all
|
||||
|
||||
md5:
|
||||
for name in $(MD5S) ; do \
|
||||
cat $$name | openssl md5 > $$name.md5 ; \
|
||||
done
|
||||
$(NAMES): static
|
||||
$(UPDATE_NAMES) $(NAMES) $(NAMED)
|
||||
|
||||
$(JSTARGETS): $(JSSOURCES)
|
||||
$(JS_COMPRESS)
|
||||
@@ -104,7 +104,7 @@ $(PYXSO_FILES): %.so : %.pyx
|
||||
|
||||
pyx: $(PYXSO_FILES)
|
||||
|
||||
static: js css rtl md5
|
||||
static: js css rtl
|
||||
|
||||
js: pyx $(JSTARGETS)
|
||||
|
||||
@@ -112,6 +112,8 @@ css: $(CSSTARGETS) $(MAINCSS)
|
||||
|
||||
rtl: $(RTLCSS)
|
||||
|
||||
names: $(NAMES)
|
||||
|
||||
clean: clean_static
|
||||
rm -f $(PYXSO_FILES)
|
||||
|
||||
|
||||
@@ -22,7 +22,7 @@
|
||||
"""Pylons middleware initialization"""
|
||||
from paste.cascade import Cascade
|
||||
from paste.registry import RegistryManager
|
||||
from paste.urlparser import StaticURLParser
|
||||
from paste.urlparser import URLParser, StaticURLParser
|
||||
from paste.deploy.converters import asbool
|
||||
from paste.gzipper import make_gzip_middleware
|
||||
|
||||
@@ -83,6 +83,25 @@ def error_mapper(code, message, environ, global_conf=None, **kw):
|
||||
url = '/error/document/?%s' % (urllib.urlencode(d))
|
||||
return url
|
||||
|
||||
class StaticURLHashMiddleware(URLParser):
|
||||
"""Middleware for handling uniquely hashed static files.
|
||||
|
||||
Filenames are matched against the g.static_names mapping, which is loaded
|
||||
from names.json in the static dir. If a valid unique static filename is
|
||||
requested, the hash is stripped from the filename before the request is
|
||||
passed through to the StaticURLParser ``app``."""
|
||||
def __init__(self, app):
|
||||
self.app = app
|
||||
|
||||
def __call__(self, environ, start_response):
|
||||
g = config['pylons.g']
|
||||
path_info = environ.get('PATH_INFO', '')
|
||||
if path_info.startswith(g.static_path):
|
||||
dirname, filename = os.path.split(path_info)
|
||||
if filename in g.static_names_rev:
|
||||
environ['PATH_INFO'] = os.path.join(dirname, g.static_names_rev[filename])
|
||||
return self.app(environ, start_response)
|
||||
|
||||
class DebugMiddleware(object):
|
||||
def __init__(self, app, keyword):
|
||||
self.app = app
|
||||
@@ -598,6 +617,7 @@ def make_app(global_conf, full_stack=True, **app_conf):
|
||||
# Static files
|
||||
javascripts_app = StaticJavascripts()
|
||||
static_app = StaticURLParser(config['pylons.paths']['static_files'])
|
||||
static_app = StaticURLHashMiddleware(static_app)
|
||||
app = Cascade([static_app, javascripts_app, app])
|
||||
|
||||
app = make_gzip_middleware(app, app_conf)
|
||||
|
||||
@@ -25,6 +25,7 @@ import pytz, os, logging, sys, socket, re, subprocess, random
|
||||
import signal
|
||||
from datetime import timedelta, datetime
|
||||
from urlparse import urlparse
|
||||
import json
|
||||
from pycassa.pool import ConnectionPool as PycassaConnectionPool
|
||||
from r2.lib.cache import LocalCache, SelfEmptyingCache
|
||||
from r2.lib.cache import CMemcache, StaleCacheChain
|
||||
@@ -278,18 +279,17 @@ class Globals(object):
|
||||
|
||||
self.secure_domains = set([urlparse(self.payment_domain).netloc])
|
||||
|
||||
# load the md5 hashes of files under static
|
||||
# load the unique hashed names of files under static
|
||||
static_files = os.path.join(self.paths.get('static_files'), 'static')
|
||||
self.static_md5 = {}
|
||||
if os.path.exists(static_files):
|
||||
for f in os.listdir(static_files):
|
||||
if f.endswith('.md5'):
|
||||
key = f[0:-4]
|
||||
f = os.path.join(static_files, f)
|
||||
with open(f, 'r') as handle:
|
||||
md5 = handle.read().strip('\n')
|
||||
self.static_md5[key] = md5
|
||||
|
||||
names_file_path = os.path.join(static_files, 'names.json')
|
||||
if os.path.exists(names_file_path):
|
||||
with open(names_file_path) as handle:
|
||||
self.static_names = json.load(handle)
|
||||
# Generate a reverse mapping dictionary so that we can check
|
||||
# unique filenames in StaticURLHashMiddleware
|
||||
self.static_names_rev = dict((v, k) for k, v in self.static_names.iteritems())
|
||||
else:
|
||||
self.static_names = {}
|
||||
|
||||
#set up the logging directory
|
||||
log_path = self.log_path
|
||||
|
||||
37
r2/r2/lib/static.py
Executable file
37
r2/r2/lib/static.py
Executable file
@@ -0,0 +1,37 @@
|
||||
#!/usr/bin/env python
|
||||
import sys
|
||||
import os
|
||||
import hashlib
|
||||
import json
|
||||
import base64
|
||||
|
||||
def generate_static_name(name, base=None, shorthash=None):
|
||||
"""Generate a unique filename.
|
||||
|
||||
Unique filenames are generated by base 64 encoding the first 64 bits of
|
||||
the SHA1 hash. This hash string is added to the filename before the extension.
|
||||
"""
|
||||
if base: path = os.path.join(base, name)
|
||||
sha = hashlib.sha1(open(path).read()).digest()
|
||||
shorthash = base64.urlsafe_b64encode(sha[0:8]).rstrip("=")
|
||||
name, ext = os.path.splitext(name)
|
||||
return name + '.' + shorthash + ext
|
||||
|
||||
def update_static_names(names_file, files):
|
||||
"""Generate a unique file name mapping for ``files`` and write it to a
|
||||
JSON file at ``names_file``."""
|
||||
if os.path.exists(names_file):
|
||||
names = json.load(open(names_file))
|
||||
else:
|
||||
names = {}
|
||||
|
||||
base = os.path.dirname(names_file)
|
||||
for path in files:
|
||||
name = os.path.relpath(path, base)
|
||||
names[name] = generate_static_name(name, base)
|
||||
|
||||
json_enc = json.JSONEncoder(indent=2, sort_keys=True)
|
||||
open(names_file, "w").write(json_enc.encode(names))
|
||||
|
||||
if __name__ == "__main__":
|
||||
update_static_names(sys.argv[1], sys.argv[2:])
|
||||
@@ -31,7 +31,7 @@ import random
|
||||
from pylons import g, c
|
||||
from pylons.i18n import _, ungettext
|
||||
|
||||
def static(file):
|
||||
def static(path):
|
||||
"""
|
||||
Simple static file maintainer which automatically paths and
|
||||
versions files being served out of static.
|
||||
@@ -41,24 +41,28 @@ def static(file):
|
||||
mangles the path to point to the uncompressed versions.
|
||||
"""
|
||||
|
||||
# stip of "/static/" if already present
|
||||
fname = os.path.basename(file).split('?')[0]
|
||||
# if uncompressed, we are in devel mode so randomize the hash
|
||||
dirname, fname = os.path.split(path)
|
||||
fname = fname.split('?')[0]
|
||||
query = ""
|
||||
|
||||
# if uncompressed, randomize a url query to bust caches
|
||||
if g.uncompressedJS:
|
||||
v = str(random.random()).split(".")[-1]
|
||||
query = "?v=" + str(random.random()).split(".")[-1]
|
||||
else:
|
||||
v = g.static_md5.get(fname, '')
|
||||
if v: v = "?v=" + v
|
||||
if fname in g.static_names:
|
||||
fname = g.static_names[fname]
|
||||
path = os.path.join(dirname, fname)
|
||||
|
||||
# don't mangle paths
|
||||
if os.path.dirname(file):
|
||||
return file + v
|
||||
if dirname:
|
||||
return path + query
|
||||
|
||||
if g.uncompressedJS:
|
||||
extension = file.split(".")[1:]
|
||||
extension = path.split(".")[1:]
|
||||
if extension and extension[-1] in ("js", "css"):
|
||||
return os.path.join(c.site.static_path, extension[-1], file) + v
|
||||
|
||||
return os.path.join(c.site.static_path, file) + v
|
||||
return os.path.join(c.site.static_path, extension[-1], path) + query
|
||||
|
||||
return os.path.join(c.site.static_path, path) + query
|
||||
|
||||
|
||||
external_resources = {
|
||||
|
||||
Reference in New Issue
Block a user