diff --git a/r2/example.ini b/r2/example.ini
index 4c174f937..775ce1b1b 100644
--- a/r2/example.ini
+++ b/r2/example.ini
@@ -19,8 +19,9 @@ rec_cache = 127.0.0.1:11311
tracker_url =
adtracker_url =
clicktracker_url =
+traffic_url =
-databases = main, comment, vote, change, email, neuter, query_queue
+databases = main, comment, vote, change, email, query_queue
#db name db host user, pass
main_db = newreddit, 127.0.0.1, ri, password
@@ -29,7 +30,6 @@ comment2_db = newreddit, 127.0.0.1, ri, password
vote_db = newreddit, 127.0.0.1, ri, password
change_db = changed, 127.0.0.1, ri, password
email_db = email, 127.0.0.1, ri, password
-neuter_db = neuter, 127.0.0.1, ri, password
query_queue_db = query_queue, 127.0.0.1, ri, password
db_app_name = reddit
@@ -55,7 +55,7 @@ db_table_friend = relation, account, account, comment
db_table_vote_account_link = relation, account, link, vote
db_table_vote_account_comment = relation, account, comment, vote
-db_table_inbox_account_comment = relation, account, comment
+db_table_inbox_account_comment = relation, account, comment, main
db_table_inbox_account_message = relation, account, message, main
db_table_report_account_link = relation, account, link, main
diff --git a/r2/r2/config/routing.py b/r2/r2/config/routing.py
index 7d6b6e492..18b5012b0 100644
--- a/r2/r2/config/routing.py
+++ b/r2/r2/config/routing.py
@@ -44,6 +44,7 @@ def make_map(global_conf={}, app_conf={}):
mc('/search', controller='front', action='search')
mc('/sup', controller='front', action='sup')
+ mc('/traffic', controller='front', action='site_traffic')
mc('/about/:location', controller='front',
action='editreddit', location = 'about')
@@ -96,6 +97,8 @@ def make_map(global_conf={}, app_conf={}):
action = 'related', title=None)
mc('/details/:article/:title', controller='front',
action = 'details', title=None)
+ mc('/traffic/:article/:title', controller='front',
+ action = 'traffic', title=None)
mc('/comments/:article/:title/:comment', controller='front',
action= 'comments', title=None, comment = None)
diff --git a/r2/r2/controllers/api.py b/r2/r2/controllers/api.py
index 62ae4844f..2bceaa6b0 100644
--- a/r2/r2/controllers/api.py
+++ b/r2/r2/controllers/api.py
@@ -1228,7 +1228,7 @@ class ApiController(RedditController):
@noresponse(VAdmin(),
tr = VTranslation("id"))
- def POST_enable_lang(self, lang):
+ def POST_enable_lang(self, tr):
if tr:
tr._is_enabled = True
diff --git a/r2/r2/controllers/front.py b/r2/r2/controllers/front.py
index 69b1e62a5..342712f48 100644
--- a/r2/r2/controllers/front.py
+++ b/r2/r2/controllers/front.py
@@ -290,6 +290,8 @@ class FrontController(RedditController):
# listing = LinkListing(builder)
# pane = listing.listing()
pane = InfoBar(message = "There doesn't seem to be anything here.")
+ elif c.user_is_admin and location == 'traffic':
+ pane = RedditTraffic()
else:
return self.abort404()
@@ -595,3 +597,17 @@ class FrontController(RedditController):
return c.response
else:
return self.abort404()
+
+
+ @validate(VAdmin(),
+ article = VLink('article'))
+ def GET_traffic(self, article):
+ res = LinkInfoPage(link = article,
+ comment = None,
+ content = PromotedTraffic(article)).render()
+ return res
+
+ @validate(VAdmin())
+ def GET_site_traffic(self):
+ return BoringPage("traffic",
+ content = RedditTraffic()).render()
diff --git a/r2/r2/controllers/reddit_base.py b/r2/r2/controllers/reddit_base.py
index 9ea4a44ba..b2eb7f260 100644
--- a/r2/r2/controllers/reddit_base.py
+++ b/r2/r2/controllers/reddit_base.py
@@ -40,7 +40,7 @@ from r2.lib.jsontemplates import api_type
from copy import copy
from Cookie import CookieError
from datetime import datetime
-import sha, simplejson
+import sha, simplejson, locale
from urllib import quote, unquote
from r2.lib.tracking import encrypt, decrypt
@@ -293,6 +293,10 @@ def set_host_lang():
c.host_lang = host_lang
def set_iface_lang():
+ # TODO: internationalize. This seems the best place to put this
+ # (used for formatting of large numbers to break them up with ",").
+ # unfortunately, not directly compatible with gettext
+ locale.setlocale(locale.LC_ALL, "en_US")
lang = ['en']
# GET param wins
if c.host_lang:
diff --git a/r2/r2/controllers/validator/validator.py b/r2/r2/controllers/validator/validator.py
index d33a04e7c..7c718ecaf 100644
--- a/r2/r2/controllers/validator/validator.py
+++ b/r2/r2/controllers/validator/validator.py
@@ -836,10 +836,10 @@ class VCnameDomain(Validator):
self.set_error(errors.BAD_CNAME)
class VTranslation(Validator):
- def run(self):
+ def run(self, param):
from r2.lib.translation import Translator
- if Translator.exists(self.param):
- return Translator(locale = self.param)
+ if Translator.exists(param):
+ return Translator(locale = param)
# NOTE: make sure *never* to have res check these are present
# otherwise, the response could contain reference to these errors...!
diff --git a/r2/r2/lib/menus.py b/r2/r2/lib/menus.py
index abb0d63a9..f79867301 100644
--- a/r2/r2/lib/menus.py
+++ b/r2/r2/lib/menus.py
@@ -113,6 +113,7 @@ menu = MenuHandler(hot = _('hot'),
# comments
related = _("related"),
details = _("details"),
+ traffic = _("traffic"),
# reddits
home = _("home"),
diff --git a/r2/r2/lib/organic.py b/r2/r2/lib/organic.py
index d9fc9bd17..b60c5fd4e 100644
--- a/r2/r2/lib/organic.py
+++ b/r2/r2/lib/organic.py
@@ -108,7 +108,7 @@ def cached_organic_links(user_id, langs):
link_names.sort(key = lambda n: sr_count[n][0])
#potentially add a up and coming link
- if random.choice((True, False)):
+ if random.choice((True, False)) and sr_ids:
sr = Subreddit._byID(random.choice(sr_ids))
items = only_recent(get_hot(sr))
if items:
diff --git a/r2/r2/lib/pages/graph.py b/r2/r2/lib/pages/graph.py
new file mode 100644
index 000000000..9af3f4150
--- /dev/null
+++ b/r2/r2/lib/pages/graph.py
@@ -0,0 +1,226 @@
+# The contents of this file are subject to the Common Public Attribution
+# License Version 1.0. (the "License"); you may not use this file except in
+# compliance with the License. You may obtain a copy of the License at
+# http://code.reddit.com/LICENSE. The License is based on the Mozilla Public
+# License Version 1.1, but Sections 14 and 15 have been added to cover use of
+# software over a computer network and provide for limited attribution for the
+# Original Developer. In addition, Exhibit A has been modified to be consistent
+# with Exhibit B.
+#
+# Software distributed under the License is distributed on an "AS IS" basis,
+# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License for
+# the specific language governing rights and limitations under the License.
+#
+# The Original Code is Reddit.
+#
+# The Original Developer is the Initial Developer. The Initial Developer of the
+# Original Code is CondeNet, Inc.
+#
+# All portions of the code written by CondeNet are Copyright (c) 2006-2009
+# CondeNet, Inc. All Rights Reserved.
+################################################################################
+import math, datetime, locale
+
+def google_extended(n):
+ """Computes the google extended encoding of an int in [0, 4096)"""
+ numerals = ("ABCDEFGHIJKLMNOPQRSTUVWXYZ"
+ "abcdefghijklmnopqrstuvwxyz"
+ "0123456789-.")
+ base = len(numerals)
+ assert(0 <= n <= base ** 2)
+ q, r = divmod(n, base)
+ return numerals[q] + numerals[r]
+
+def make_date_axis_labels(series):
+ """
+ assuming a uniform date series, generate a suitable axis label
+ """
+ _max = max(series)
+ _min = min(series)
+ delta = _max - _min
+ if delta < datetime.timedelta(0, 0.5 * 86400):
+ test = lambda cur, prev: cur.hour != prev.hour and cur.hour % 3 == 0
+ format = "%H:00"
+ elif delta < datetime.timedelta(2):
+ test = lambda cur, prev: cur.hour != prev.hour and cur.hour % 6 == 0
+ format = "%H:00"
+ elif delta < datetime.timedelta(7):
+ test = lambda cur, prev: cur.day != prev.day
+ format = "%d %b"
+ elif delta < datetime.timedelta(14):
+ test = lambda cur, prev: cur.day != prev.day and cur.day % 2 == 0
+ format = "%d %b"
+ elif delta < datetime.timedelta(30):
+ test = lambda cur, prev: (cur.day != prev.day) and cur.weekday() == 6
+ format = "%d %b"
+ else:
+ test = lambda cur, prev: (cur.month != prev.month)
+ format = "%b"
+ new_series = []
+ prev = None
+ for s in series:
+ if prev and test(s, prev):
+ new_series.append(s.strftime(format))
+ else:
+ new_series.append("")
+ prev = s
+ return new_series
+
+
+class DataSeries(list):
+ def __init__(self, data):
+ list.__init__(self, data)
+
+ def low_precision_max(self, precision = 2):
+ """
+ Compute the max of the data set, including at most 'precision'
+ units of decimal precision in the result (e.g., 9893 -> 9900 if
+ precision = 2)
+ """
+ _max = float(max(self))
+ if _max == 0:
+ return 0
+ scale = math.log10(_max)
+ scale = 10 ** (math.ceil(scale) - precision)
+ return math.ceil(_max / scale) * scale
+
+ def normalize(self, norm_max = 100, precision = 2, _max = None):
+ _min = min(self)
+ _max = _max or max(self)
+ if _min == _max:
+ return DataSeries(int(norm_max)/2.
+ for i in xrange(len(self)))
+ else:
+ return DataSeries(min(int(x * float(norm_max) / _max), norm_max -1)
+ for x in self)
+
+ def toBarY(self):
+ data = []
+ for i in xrange(len(self)):
+ data += [self[i], self[i]]
+ return DataSeries(data)
+
+ def toBarX(self):
+ delta = self[-1] - self[-2]
+ data = self.toBarY()
+ return DataSeries(data[1:] + [data[-1] + delta])
+
+ def is_regular(self):
+ return all(self[i] - self[i-1] == self[1] - self[0]
+ for i in xrange(1, len(self)))
+
+ def to_google_extended(self, precision = 1, _max = None):
+ if _max is None:
+ _max = self.low_precision_max(precision = precision)
+ norm_max = 4096
+ new = self.normalize(norm_max = norm_max, precision = precision,
+ _max = _max)
+ return _max, "".join(map(google_extended, new))
+
+class LineGraph(object):
+ """
+ General line chart class for plotting xy line graphs.
+
+ data is passed in as a series of tuples of the form (x, y_1, ...,
+ y_n) and converted to a single xdata DataSeries, and a list of
+ ydata DataSeries elements. The intention of this class is to be
+ able to handle multiple final plot representations, thought
+ currenly only google charts is available.
+
+ At some point, it also might make sense to connect this more
+ closely with numpy.
+
+ """
+ google_api = "http://chart.apis.google.com/chart"
+
+ def __init__(self, xydata, colors = ("FF4500", "336699"),
+ width = 350, height = 200):
+
+ series = zip(*xydata)
+
+ self.xdata = DataSeries(series[0])
+ self.ydata = map(DataSeries, series[1:])
+ self.width = width
+ self.height = height
+ self.colors = colors
+
+ def google_chart(self, multiy = True, ylabels = [], title = "",
+ bar_fmt = True):
+ xdata, ydata = self.xdata, self.ydata
+
+ # Bar format makes the line chart look like it is a series of
+ # contiguous bars without the boundary line between each bar.
+ if bar_fmt:
+ xdata = DataSeries(range(len(self.xdata))).toBarX()
+ ydata = [y.toBarY() for y in self.ydata]
+
+ # TODO: currently we are only supporting time series. Make general
+ xaxis = make_date_axis_labels(self.xdata)
+
+ # Convert x data into google extended text format
+ xmax, xdata = xdata.to_google_extended(_max = max(xdata))
+ ymax0 = None
+
+ # multiy <=> 2 y axes with independent scaling. not multiy
+ # means we need to know what the global max is over all y data
+ multiy = multiy and len(ydata) == 2
+ if not multiy:
+ ymax0 = max(y.low_precision_max() for y in ydata)
+
+ def make_labels(i, m, p = 4):
+ return (("%d:|" % i) +
+ '|'.join(locale.format('%d', i * m / p, True)
+ for i in range(p+1)))
+
+ # data stores a list of xy data strings in google's format
+ data = []
+ labels = []
+ for i in range(len(ydata)):
+ ymax, y = ydata[i].to_google_extended(_max = ymax0)
+ data.append(xdata + ',' + y)
+ if multiy:
+ labels.append(make_labels(i,ymax))
+ if not multiy:
+ labels.append(make_labels(0,ymax0))
+
+ if multiy:
+ labels.append('2:|' + '|'.join(xaxis))
+ axes = 'y,r,x'
+ if len(self.colors) > 1:
+ ycolor = "0,%s|1,%s" % (self.colors[0], self.colors[1])
+ else:
+ ycolor = ""
+ else:
+ labels.append('1:|' + '|'.join(xaxis))
+ axes = 'y,x'
+ ycolor="",
+ if ylabels:
+ axes += ',t'
+ labels.append('%d:|' % (len(axes)/2) +
+ ('|' if multiy else ', ').join(ylabels))
+ if title:
+ axes += ',t'
+ labels.append('%d:||%s|' % (len(axes)/2, title))
+
+ if len(self.colors) >= len(self.ydata):
+ colors = ",".join(self.colors[:len(self.ydata)])
+ else:
+ colors = ""
+ args = dict(# chart type is xy
+ cht = 'lxy',
+ # chart size
+ chs = "%sx%s" % (self.width, self.height),
+ # which axes are labeled
+ chxt= axes,
+ # axis labels
+ chxl = '|'.join(labels),
+ # chart data is in extended format
+ chd = 'e:' + ','.join(data),
+ chco = colors,
+ chxs = ycolor
+ )
+
+ return (self.google_api +
+ '?' + '&'.join('%s=%s' % (k, v) for k, v in args.iteritems()))
+
+
diff --git a/r2/r2/lib/pages/pages.py b/r2/r2/lib/pages/pages.py
index 2cb556efe..10f635d0a 100644
--- a/r2/r2/lib/pages/pages.py
+++ b/r2/r2/lib/pages/pages.py
@@ -20,7 +20,7 @@
# CondeNet, Inc. All Rights Reserved.
################################################################################
from r2.lib.wrapped import Wrapped, NoTemplateFound
-from r2.models import IDBuilder, LinkListing, Account, Default, FakeSubreddit, Subreddit
+from r2.models import IDBuilder, LinkListing, Account, Default, FakeSubreddit, Subreddit, Friends, All, Sub, NotFound, DomainSR
from r2.config import cache
from r2.lib.jsonresponse import json_respond
from r2.lib.jsontemplates import is_api
@@ -28,6 +28,7 @@ from pylons.i18n import _
from pylons import c, request, g
from pylons.controllers.util import abort
+from r2.lib.traffic import load_traffic, load_summary
from r2.lib.captcha import get_iden
from r2.lib.filters import spaceCompress, _force_unicode, _force_utf8
from r2.lib.menus import NavButton, NamedButton, NavMenu, PageNameNav, JsButton
@@ -35,7 +36,8 @@ from r2.lib.menus import SubredditButton, SubredditMenu, menu
from r2.lib.strings import plurals, rand_strings, strings
from r2.lib.utils import title_to_url, query_string, UrlParser, to_js
from r2.lib.template_helpers import add_sr, get_domain
-import sys, random
+import sys, random, datetime, locale, calendar
+import graph
datefmt = _force_utf8(_('%d %b %Y'))
@@ -222,7 +224,10 @@ class Reddit(Wrapped):
if c.user_is_sponsor:
more_buttons.append(NamedButton('promote'))
-
+
+ if c.user_is_admin:
+ more_buttons.append(NamedButton('traffic'))
+
toolbar = [NavMenu(main_buttons, type='tabmenu')]
if more_buttons:
toolbar.append(NavMenu(more_buttons, title=menu.more, type='tabdrop'))
@@ -265,6 +270,8 @@ class SubredditInfoBar(Wrapped):
buttons.append(NamedButton('edit'))
buttons.extend([NavButton(menu.banusers, 'banned'),
NamedButton('spam')])
+ if c.user_is_admin:
+ buttons.append(NamedButton('traffic'))
return [NavMenu(buttons, type = "flatlist", base_path = "/about/")]
class SideBox(Wrapped):
@@ -450,6 +457,8 @@ class LinkInfoPage(Reddit):
if c.user_is_admin:
buttons += [info_button('details')]
+ if self.link.promoted is not None:
+ buttons += [info_button('traffic')]
toolbar = [NavMenu(buttons, base_path = "", type="tabmenu")]
@@ -1182,7 +1191,22 @@ class PromotePage(Reddit):
class PromotedLinks(Wrapped):
def __init__(self, current_list, *a, **kw):
self.things = current_list
+
+ self.recent = dict(load_summary("thing"))
+ if self.recent:
+ from r2.models import Link
+ # TODO: temp hack until we find place for builder_wrapper
+ from r2.controllers.listingcontroller import ListingController
+ builder = IDBuilder(self.recent.keys(),
+ wrap = ListingController.builder_wrapper)
+ link_listing = LinkListing(builder, nextprev=False).listing()
+
+ for t in link_listing.things:
+ self.recent[t._fullname].insert(0, t)
+
+ self.recent = self.recent.values()
+ self.recent.sort(key = lambda x: x[0]._date)
Wrapped.__init__(self, datefmt = datefmt, *a, **kw)
class PromoteLinkForm(Wrapped):
@@ -1193,3 +1217,156 @@ class PromoteLinkForm(Wrapped):
timedeltatext = timedeltatext,
listing = listing,
*a, **kw)
+
+class Traffic(Wrapped):
+ @staticmethod
+ def slice_traffic(traffic, *indices):
+ return [[a] + [b[i] for i in indices] for a, b in traffic]
+
+
+class PromotedTraffic(Traffic):
+ """
+ Traffic page for a promoted link, including 2 graphs (one for
+ impressions and one for clicks with uniques on each plotted in
+ multiy format) and a table of the data.
+ """
+ def __init__(self, thing):
+ self.thing = thing
+ d = thing._date.astimezone(g.tz)
+ d = d.replace(minute = 0, second = 0, microsecond = 0)
+
+ until = thing.promote_until
+ now = datetime.datetime.now(g.tz)
+ if not until:
+ until = d + datetime.timedelta(1)
+ if until > now:
+ until - now
+
+ self.traffic = load_traffic('hour', "thing", thing._fullname,
+ start_time = d, stop_time = until)
+
+ imp = self.slice_traffic(self.traffic, 0, 1)
+
+ if len(imp) > 2:
+ imp_total = locale.format('%d', sum(x[2] for x in imp), True)
+ chart = graph.LineGraph(imp)
+ self.imp_graph = chart.google_chart(ylabels = ['uniques', 'total'],
+ title = ("impressions (%s)" %
+ imp_total))
+
+ cli = self.slice_traffic(self.traffic, 2, 3)
+ cli_total = locale.format('%d', sum(x[2] for x in cli), True)
+ chart = graph.LineGraph(cli)
+ self.cli_graph = chart.google_chart(ylabels = ['uniques', 'total'],
+ multiy = False,
+ title = ("clicks (%s)" %
+ cli_total))
+ else:
+ self.imp_graph = self.cli_graph = None
+ Wrapped.__init__(self)
+
+class RedditTraffic(Traffic):
+ """
+ fetches hourly and daily traffic for the current reddit. If the
+ current reddit is a default subreddit, fetches the site-wide
+ uniques and includes monthly totals. In this latter case, getter
+ methods are available for computing breakdown of site trafffic by
+ reddit.
+ """
+ def __init__(self):
+ self.has_data = False
+ ivals = ["hour", "day"]
+ if c.default_sr:
+ ivals.append("month")
+
+ for ival in ivals:
+ if c.default_sr:
+ data = load_traffic(ival, "total", "")
+ else:
+ data = load_traffic(ival, "reddit", c.site.name)
+ if not data:
+ break
+ slices = [("uniques", (0, 2) if c.site.domain else (0,),
+ "FF4500"),
+ ("impressions", (1, 3) if c.site.domain else (1,),
+ "336699")]
+ setattr(self, ival + "_data", data)
+ for name, indx, color in slices:
+ data2 = self.slice_traffic(data, *indx)
+ chart = graph.LineGraph(data2, colors = [color, "B0B0B0"])
+ setattr(self, name + "_" + ival + "_chart", chart)
+ title = "%s by %s" % (name, ival)
+ res = chart.google_chart(ylabels = [name],
+ multiy = False,
+ title = title)
+ setattr(self, name + "_" + ival, res)
+ else:
+ self.has_data = True
+ if self.has_data:
+ imp_by_day = [[] for i in range(7)]
+ uni_by_day = [[] for i in range(7)]
+ dates = self.uniques_day_chart.xdata
+ uniques = self.uniques_day_chart.ydata[0]
+ imps = self.impressions_day_chart.ydata[0]
+ self.uniques_mean = sum(map(float, uniques))/len(uniques)
+ self.impressions_mean = sum(map(float, imps))/len(imps)
+ for i, d in enumerate(dates):
+ imp_by_day[d.weekday()].append(float(imps[i]))
+ uni_by_day[d.weekday()].append(float(uniques[i]))
+ self.uniques_by_dow = [sum(x)/len(x) for x in uni_by_day]
+ self.impressions_by_dow = [sum(x)/len(x) for x in imp_by_day]
+ Wrapped.__init__(self)
+
+ def reddits_summary(self):
+ if c.default_sr:
+ data = map(list, load_summary("reddit"))
+ data.sort(key = lambda x: x[1][1], reverse = True)
+ for d in data:
+ name = d[0]
+ for sr in (Default, Friends, All, Sub):
+ if name == sr.name:
+ name = sr
+ break
+ else:
+ try:
+ name = Subreddit._by_name(name)
+ except NotFound:
+ name = DomainSR(name)
+ d[0] = name
+ return data
+ return res
+
+ def monthly_summary(self):
+ """
+ Convenience method b/c it is bad form to do this much math
+ inside of a template.b
+ """
+ res = []
+ if c.default_sr:
+ data = self.month_data
+ for x, (date, d) in enumerate(data):
+ res.append([("date", date.strftime("%Y-%m")),
+ ("", locale.format("%d", d[0], True)),
+ ("", locale.format("%d", d[1], True))])
+ last_d = data[x-1][1] if x else None
+ for i in range(2):
+ if x == 0:
+ res[-1].append(("",""))
+ elif x == len(data) - 1:
+ # project based on traffic so far
+ # totals are going to be up to yesterday
+ month_len = calendar.monthrange(date.year,
+ date.month)[1]
+ yday = (datetime.datetime.utcnow()
+ -datetime.timedelta(1)).day
+ scaled = float(d[i] * month_len) / yday
+ res[-1].append(("gray",
+ locale.format("%d", scaled, True)))
+ elif last_d and d[i] and last_d[i]:
+ f = 100 * (float(d[i])/last_d[i] - 1)
+
+ res[-1].append(("up" if f > 0 else "down",
+ "%5.2f%%" % f))
+ return res
+
+
diff --git a/r2/r2/lib/traffic.py b/r2/r2/lib/traffic.py
new file mode 100644
index 000000000..df9a64055
--- /dev/null
+++ b/r2/r2/lib/traffic.py
@@ -0,0 +1,72 @@
+# The contents of this file are subject to the Common Public Attribution
+# License Version 1.0. (the "License"); you may not use this file except in
+# compliance with the License. You may obtain a copy of the License at
+# http://code.reddit.com/LICENSE. The License is based on the Mozilla Public
+# License Version 1.1, but Sections 14 and 15 have been added to cover use of
+# software over a computer network and provide for limited attribution for the
+# Original Developer. In addition, Exhibit A has been modified to be consistent
+# with Exhibit B.
+#
+# Software distributed under the License is distributed on an "AS IS" basis,
+# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License for
+# the specific language governing rights and limitations under the License.
+#
+# The Original Code is Reddit.
+#
+# The Original Developer is the Initial Developer. The Initial Developer of the
+# Original Code is CondeNet, Inc.
+#
+# All portions of the code written by CondeNet are Copyright (c) 2006-2009
+# CondeNet, Inc. All Rights Reserved.
+################################################################################
+from httplib import HTTPConnection
+from urlparse import urlparse
+from cPickle import loads
+from utils import query_string
+import os, socket
+from pylons import g
+from r2.lib.memoize import memoize, clear_memo
+
+def load_traffic_uncached(interval, what, iden,
+ start_time = None, stop_time = None,
+ npoints = None):
+ """
+ Fetches pickled traffic from the traffic server and returns it as a list.
+ On connection failure (or no data) returns an empy list.
+ """
+ def format_date(d):
+ if d.tzinfo is None:
+ d = d.replace(tzinfo = g.tz)
+ else:
+ d = d.astimezone(g.tz)
+ return ":".join(map(str, d.timetuple()[:6]))
+
+ traffic_url = os.path.join(g.traffic_url, interval, what, iden)
+ args = {}
+ if start_time:
+ args['start_time'] = format_date(start_time)
+ if stop_time:
+ args['stop_time'] = format_date(stop_time)
+ if npoints:
+ args['n'] = npoints
+ u = urlparse(traffic_url)
+ try:
+ conn = HTTPConnection(u.hostname, u.port)
+ conn.request("GET", u.path + query_string(args))
+ res = conn.getresponse()
+ res = loads(res.read()) if res.status == 200 else []
+ conn.close()
+ return res
+ except socket.error:
+ return []
+
+@memoize("cached_traffic", time = 60)
+def load_traffic(interval, what, iden,
+ start_time = None, stop_time = None,
+ npoints = None):
+ return load_traffic_uncached(interval, what, iden,
+ start_time = start_time, stop_time = stop_time,
+ npoints = npoints)
+
+def load_summary(what, interval = "month", npoints = 50):
+ return load_traffic(interval, "summary", what, npoints = npoints)
diff --git a/r2/r2/models/link.py b/r2/r2/models/link.py
index 9574b21c9..32d7229b2 100644
--- a/r2/r2/models/link.py
+++ b/r2/r2/models/link.py
@@ -47,7 +47,7 @@ class Link(Thing, Printable):
banned_before_moderator = False,
media_object = None,
has_thumbnail = False,
- promoted = False,
+ promoted = None,
promoted_subscribersonly = False,
promote_until = None,
promoted_by = None,
diff --git a/r2/r2/public/static/css/reddit.css b/r2/r2/public/static/css/reddit.css
index b08b4a3ae..45add90fd 100644
--- a/r2/r2/public/static/css/reddit.css
+++ b/r2/r2/public/static/css/reddit.css
@@ -1799,4 +1799,26 @@ ul#image-preview-list .description pre {
.subscription-box h1{ text-align: center; }
-.toggle.deltranslator-button { display: inline; }
\ No newline at end of file
+.toggle.deltranslator-button { display: inline; }
+
+.traffic-table {margin: 10px 20px; }
+.traffic-table a:hover { text-decoration: underline; }
+.traffic-table th { font-weight: bold; text-align: center;}
+.traffic-table th,
+.traffic-table td { padding: 0 5px; }
+.traffic-table td { text-align: right; }
+
+.traffic-table td.up { color: #FF8B60; }
+.traffic-table td.down { color: #336699; }
+.traffic-table td.gray { color: gray; font-style: italic; }
+
+.traffic-table tr.max { border: 2px solid #FF8B60; }
+.traffic-table tr.min { border: 2px solid #336699; }
+.traffic-table tr.odd { background-color: #E0E0E0; }
+.traffic-table tr.mean { font-style: italic; border-top: 1px solid; }
+
+.traffic-graph {
+ padding: 10px;
+ border: 1px solid #B0B0B0;
+ margin-left: 10px;
+ margin-bottom: 10px; }
\ No newline at end of file
diff --git a/r2/r2/templates/promotedlinks.html b/r2/r2/templates/promotedlinks.html
index 36126b54c..175d062a7 100644
--- a/r2/r2/templates/promotedlinks.html
+++ b/r2/r2/templates/promotedlinks.html
@@ -47,4 +47,37 @@
%endfor
+
+ %if thing.recent:
+
${_('promotions this month')}
+
+
+
+ | Impresions |
+ Clicks |
+ |
+ |
+
+
+ | unique |
+ total |
+ unique |
+ total |
+ points |
+ title |
+
+ %for link, uimp, nimp, ucli, ncli in thing.recent:
+
+ | ${uimp} |
+ ${nimp} |
+ ${ucli} |
+ ${ncli} |
+ ${link._ups - link._downs} |
+
+ ${link.title}
+ |
+
+ %endfor
+
+ %endif
diff --git a/r2/r2/templates/promotedtraffic.html b/r2/r2/templates/promotedtraffic.html
new file mode 100644
index 000000000..1c4958555
--- /dev/null
+++ b/r2/r2/templates/promotedtraffic.html
@@ -0,0 +1,55 @@
+## The contents of this file are subject to the Common Public Attribution
+## License Version 1.0. (the "License"); you may not use this file except in
+## compliance with the License. You may obtain a copy of the License at
+## http://code.reddit.com/LICENSE. The License is based on the Mozilla Public
+## License Version 1.1, but Sections 14 and 15 have been added to cover use of
+## software over a computer network and provide for limited attribution for the
+## Original Developer. In addition, Exhibit A has been modified to be consistent
+## with Exhibit B.
+##
+## Software distributed under the License is distributed on an "AS IS" basis,
+## WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License for
+## the specific language governing rights and limitations under the License.
+##
+## The Original Code is Reddit.
+##
+## The Original Developer is the Initial Developer. The Initial Developer of
+## the Original Code is CondeNet, Inc.
+##
+## All portions of the code written by CondeNet are Copyright (c) 2006-2009
+## CondeNet, Inc. All Rights Reserved.
+################################################################################
+
+%if thing.traffic:
+ %if thing.imp_graph:
+
+ %endif
+ %if thing.cli_graph:
+
+ %endif
+
+
+ | date |
+ Impresions |
+ Clicks |
+
+
+ |
+ unique |
+ total |
+ unique |
+ total |
+
+ %for date, (uimp, nimp, ucli, ncli) in thing.traffic:
+
+ | ${date.strftime("%Y-%m-%d %H:%M")} |
+ ${uimp} |
+ ${nimp} |
+ ${ucli} |
+ ${ncli} |
+
+ %endfor
+
+%endif
diff --git a/r2/r2/templates/reddittraffic.html b/r2/r2/templates/reddittraffic.html
new file mode 100644
index 000000000..33433c3ef
--- /dev/null
+++ b/r2/r2/templates/reddittraffic.html
@@ -0,0 +1,204 @@
+## The contents of this file are subject to the Common Public Attribution
+## License Version 1.0. (the "License"); you may not use this file except in
+## compliance with the License. You may obtain a copy of the License at
+## http://code.reddit.com/LICENSE. The License is based on the Mozilla Public
+## License Version 1.1, but Sections 14 and 15 have been added to cover use of
+## software over a computer network and provide for limited attribution for the
+## Original Developer. In addition, Exhibit A has been modified to be consistent
+## with Exhibit B.
+##
+## Software distributed under the License is distributed on an "AS IS" basis,
+## WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License for
+## the specific language governing rights and limitations under the License.
+##
+## The Original Code is Reddit.
+##
+## The Original Developer is the Initial Developer. The Initial Developer of
+## the Original Code is CondeNet, Inc.
+##
+## All portions of the code written by CondeNet are Copyright (c) 2006-2009
+## CondeNet, Inc. All Rights Reserved.
+################################################################################
+<%!
+ import locale
+ from r2.models.subreddit import DomainSR, FakeSubreddit
+ def num(x):
+ return locale.format('%d', x, True)
+ %>
+
+Traffic for ${c.site.name}
+
+
+
+%if not thing.has_data:
+
+ ${_("There doesn't seem to be any traffic data at the moment. Please check back later.")}
+
+%else:
+
+
+
+
+
+
+
+
+
+ %if c.default_sr:
+
+
+ %endif
+
+ <%
+ umin = min(data[0] for date, data in filter(None, thing.day_data))
+ umax = max(data[0] for date, data in filter(None, thing.day_data))
+ %>
+
+
+
+
+ | Weekly summary |
+
+
+ | ${_("date")} |
+ ${_("uniques")} |
+ ${_("impressions")} |
+
+ <%
+ dow = ["Monday", "Tuesday", "Wednesday", "Thursday", "Friday",
+ "Saturday", "Sunday"]
+ %>
+ %for i, (uni, imp) in enumerate(zip(thing.uniques_by_dow, thing.impressions_by_dow)):
+
+ | ${dow[i]} |
+ ${num(int(uni))} |
+ ${num(int(imp))} |
+ ${bars(uni, imp)}
+
+ %endfor
+
+ | ${_("Daily Mean")} |
+ ${num(int(thing.uniques_mean))} |
+ ${num(int(thing.impressions_mean))} |
+
+
+
+
+
+ %if c.site.domain:
+ |
+ total |
+ ${c.site.domain} |
+
+ | ${_("date")} |
+ ${_("uniques")} |
+ ${_("impressions")} |
+ ${_("uniques")} |
+ ${_("impressions")} |
+ %else:
+ ${_("date")} |
+ ${_("uniques")} |
+ ${_("impressions")} |
+ %endif
+
+ %for x, (date, data) in enumerate(reversed(thing.day_data)):
+ ${date.strftime("%Y-%m-%d")}
+ %for i in xrange(4 if c.site.domain else 2):
+ | ${num(data[i]) if data[i] else "-"} |
+ %endfor
+ ${bars(data[0], data[1])}
+
+ %endfor
+
+
+
+ %if c.default_sr:
+ <% data = thing.monthly_summary() %>
+
+
+ | Monthly data |
+
+
+ | ${_("date")} |
+ ${_("uniques")} |
+ ${_("impressions")} |
+
+ %for i, d in enumerate(reversed(data)):
+
+ %for cls, x in d:
+ | ${x} |
+ %endfor
+
+ %endfor
+
+
+
+
+ |
+ total |
+ ${_("cnamed")} |
+
+
+ | ${_("date")} |
+ ${_("uniques")} |
+ ${_("impressions")} |
+ ${_("uniques")} |
+ ${_("impressions")} |
+ ${_("cname")} |
+
+ %for i, (sr, d) in enumerate(thing.reddits_summary()):
+
+
+ |
+ %if isinstance(sr, DomainSR):
+ [domain:${sr.name}]
+ %elif isinstance(sr, FakeSubreddit):
+ [meta:${sr.name}]
+ %else:
+ ${sr.name}
+ %endif
+
+ |
+ %for x in d:
+ ${num(x) if x else "--"} |
+ %endfor
+
+ %if not isinstance(sr, FakeSubreddit) and sr.domain:
+ ${sr.domain}
+ %endif
+ |
+
+ %endfor
+
+ %endif
+%endif
+
+
+
+<%def name="bars(uni, imp)">
+ <%
+ wid_uni = int(50 * min(2, float(uni)/thing.uniques_mean))
+ wid_imp = int(50 * min(2, float(imp)/thing.impressions_mean))
+ %>
+
+
+
+
+
+ |
+%def>
+