mirror of
https://github.com/reddit-archive/reddit.git
synced 2026-01-27 15:58:06 -05:00
example.ini bugfixes and first trials of site traffic integration
This commit is contained in:
@@ -19,8 +19,9 @@ rec_cache = 127.0.0.1:11311
|
||||
tracker_url =
|
||||
adtracker_url =
|
||||
clicktracker_url =
|
||||
traffic_url =
|
||||
|
||||
databases = main, comment, vote, change, email, neuter, query_queue
|
||||
databases = main, comment, vote, change, email, query_queue
|
||||
|
||||
#db name db host user, pass
|
||||
main_db = newreddit, 127.0.0.1, ri, password
|
||||
@@ -29,7 +30,6 @@ comment2_db = newreddit, 127.0.0.1, ri, password
|
||||
vote_db = newreddit, 127.0.0.1, ri, password
|
||||
change_db = changed, 127.0.0.1, ri, password
|
||||
email_db = email, 127.0.0.1, ri, password
|
||||
neuter_db = neuter, 127.0.0.1, ri, password
|
||||
query_queue_db = query_queue, 127.0.0.1, ri, password
|
||||
|
||||
db_app_name = reddit
|
||||
@@ -55,7 +55,7 @@ db_table_friend = relation, account, account, comment
|
||||
db_table_vote_account_link = relation, account, link, vote
|
||||
db_table_vote_account_comment = relation, account, comment, vote
|
||||
|
||||
db_table_inbox_account_comment = relation, account, comment
|
||||
db_table_inbox_account_comment = relation, account, comment, main
|
||||
db_table_inbox_account_message = relation, account, message, main
|
||||
|
||||
db_table_report_account_link = relation, account, link, main
|
||||
|
||||
@@ -44,6 +44,7 @@ def make_map(global_conf={}, app_conf={}):
|
||||
mc('/search', controller='front', action='search')
|
||||
|
||||
mc('/sup', controller='front', action='sup')
|
||||
mc('/traffic', controller='front', action='site_traffic')
|
||||
|
||||
mc('/about/:location', controller='front',
|
||||
action='editreddit', location = 'about')
|
||||
@@ -96,6 +97,8 @@ def make_map(global_conf={}, app_conf={}):
|
||||
action = 'related', title=None)
|
||||
mc('/details/:article/:title', controller='front',
|
||||
action = 'details', title=None)
|
||||
mc('/traffic/:article/:title', controller='front',
|
||||
action = 'traffic', title=None)
|
||||
mc('/comments/:article/:title/:comment', controller='front',
|
||||
action= 'comments', title=None, comment = None)
|
||||
|
||||
|
||||
@@ -1228,7 +1228,7 @@ class ApiController(RedditController):
|
||||
|
||||
@noresponse(VAdmin(),
|
||||
tr = VTranslation("id"))
|
||||
def POST_enable_lang(self, lang):
|
||||
def POST_enable_lang(self, tr):
|
||||
if tr:
|
||||
tr._is_enabled = True
|
||||
|
||||
|
||||
@@ -290,6 +290,8 @@ class FrontController(RedditController):
|
||||
# listing = LinkListing(builder)
|
||||
# pane = listing.listing()
|
||||
pane = InfoBar(message = "There doesn't seem to be anything here.")
|
||||
elif c.user_is_admin and location == 'traffic':
|
||||
pane = RedditTraffic()
|
||||
else:
|
||||
return self.abort404()
|
||||
|
||||
@@ -595,3 +597,17 @@ class FrontController(RedditController):
|
||||
return c.response
|
||||
else:
|
||||
return self.abort404()
|
||||
|
||||
|
||||
@validate(VAdmin(),
|
||||
article = VLink('article'))
|
||||
def GET_traffic(self, article):
|
||||
res = LinkInfoPage(link = article,
|
||||
comment = None,
|
||||
content = PromotedTraffic(article)).render()
|
||||
return res
|
||||
|
||||
@validate(VAdmin())
|
||||
def GET_site_traffic(self):
|
||||
return BoringPage("traffic",
|
||||
content = RedditTraffic()).render()
|
||||
|
||||
@@ -40,7 +40,7 @@ from r2.lib.jsontemplates import api_type
|
||||
from copy import copy
|
||||
from Cookie import CookieError
|
||||
from datetime import datetime
|
||||
import sha, simplejson
|
||||
import sha, simplejson, locale
|
||||
from urllib import quote, unquote
|
||||
|
||||
from r2.lib.tracking import encrypt, decrypt
|
||||
@@ -293,6 +293,10 @@ def set_host_lang():
|
||||
c.host_lang = host_lang
|
||||
|
||||
def set_iface_lang():
|
||||
# TODO: internationalize. This seems the best place to put this
|
||||
# (used for formatting of large numbers to break them up with ",").
|
||||
# unfortunately, not directly compatible with gettext
|
||||
locale.setlocale(locale.LC_ALL, "en_US")
|
||||
lang = ['en']
|
||||
# GET param wins
|
||||
if c.host_lang:
|
||||
|
||||
@@ -836,10 +836,10 @@ class VCnameDomain(Validator):
|
||||
self.set_error(errors.BAD_CNAME)
|
||||
|
||||
class VTranslation(Validator):
|
||||
def run(self):
|
||||
def run(self, param):
|
||||
from r2.lib.translation import Translator
|
||||
if Translator.exists(self.param):
|
||||
return Translator(locale = self.param)
|
||||
if Translator.exists(param):
|
||||
return Translator(locale = param)
|
||||
|
||||
# NOTE: make sure *never* to have res check these are present
|
||||
# otherwise, the response could contain reference to these errors...!
|
||||
|
||||
@@ -113,6 +113,7 @@ menu = MenuHandler(hot = _('hot'),
|
||||
# comments
|
||||
related = _("related"),
|
||||
details = _("details"),
|
||||
traffic = _("traffic"),
|
||||
|
||||
# reddits
|
||||
home = _("home"),
|
||||
|
||||
@@ -108,7 +108,7 @@ def cached_organic_links(user_id, langs):
|
||||
link_names.sort(key = lambda n: sr_count[n][0])
|
||||
|
||||
#potentially add a up and coming link
|
||||
if random.choice((True, False)):
|
||||
if random.choice((True, False)) and sr_ids:
|
||||
sr = Subreddit._byID(random.choice(sr_ids))
|
||||
items = only_recent(get_hot(sr))
|
||||
if items:
|
||||
|
||||
226
r2/r2/lib/pages/graph.py
Normal file
226
r2/r2/lib/pages/graph.py
Normal file
@@ -0,0 +1,226 @@
|
||||
# The contents of this file are subject to the Common Public Attribution
|
||||
# License Version 1.0. (the "License"); you may not use this file except in
|
||||
# compliance with the License. You may obtain a copy of the License at
|
||||
# http://code.reddit.com/LICENSE. The License is based on the Mozilla Public
|
||||
# License Version 1.1, but Sections 14 and 15 have been added to cover use of
|
||||
# software over a computer network and provide for limited attribution for the
|
||||
# Original Developer. In addition, Exhibit A has been modified to be consistent
|
||||
# with Exhibit B.
|
||||
#
|
||||
# Software distributed under the License is distributed on an "AS IS" basis,
|
||||
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License for
|
||||
# the specific language governing rights and limitations under the License.
|
||||
#
|
||||
# The Original Code is Reddit.
|
||||
#
|
||||
# The Original Developer is the Initial Developer. The Initial Developer of the
|
||||
# Original Code is CondeNet, Inc.
|
||||
#
|
||||
# All portions of the code written by CondeNet are Copyright (c) 2006-2009
|
||||
# CondeNet, Inc. All Rights Reserved.
|
||||
################################################################################
|
||||
import math, datetime, locale
|
||||
|
||||
def google_extended(n):
|
||||
"""Computes the google extended encoding of an int in [0, 4096)"""
|
||||
numerals = ("ABCDEFGHIJKLMNOPQRSTUVWXYZ"
|
||||
"abcdefghijklmnopqrstuvwxyz"
|
||||
"0123456789-.")
|
||||
base = len(numerals)
|
||||
assert(0 <= n <= base ** 2)
|
||||
q, r = divmod(n, base)
|
||||
return numerals[q] + numerals[r]
|
||||
|
||||
def make_date_axis_labels(series):
|
||||
"""
|
||||
assuming a uniform date series, generate a suitable axis label
|
||||
"""
|
||||
_max = max(series)
|
||||
_min = min(series)
|
||||
delta = _max - _min
|
||||
if delta < datetime.timedelta(0, 0.5 * 86400):
|
||||
test = lambda cur, prev: cur.hour != prev.hour and cur.hour % 3 == 0
|
||||
format = "%H:00"
|
||||
elif delta < datetime.timedelta(2):
|
||||
test = lambda cur, prev: cur.hour != prev.hour and cur.hour % 6 == 0
|
||||
format = "%H:00"
|
||||
elif delta < datetime.timedelta(7):
|
||||
test = lambda cur, prev: cur.day != prev.day
|
||||
format = "%d %b"
|
||||
elif delta < datetime.timedelta(14):
|
||||
test = lambda cur, prev: cur.day != prev.day and cur.day % 2 == 0
|
||||
format = "%d %b"
|
||||
elif delta < datetime.timedelta(30):
|
||||
test = lambda cur, prev: (cur.day != prev.day) and cur.weekday() == 6
|
||||
format = "%d %b"
|
||||
else:
|
||||
test = lambda cur, prev: (cur.month != prev.month)
|
||||
format = "%b"
|
||||
new_series = []
|
||||
prev = None
|
||||
for s in series:
|
||||
if prev and test(s, prev):
|
||||
new_series.append(s.strftime(format))
|
||||
else:
|
||||
new_series.append("")
|
||||
prev = s
|
||||
return new_series
|
||||
|
||||
|
||||
class DataSeries(list):
|
||||
def __init__(self, data):
|
||||
list.__init__(self, data)
|
||||
|
||||
def low_precision_max(self, precision = 2):
|
||||
"""
|
||||
Compute the max of the data set, including at most 'precision'
|
||||
units of decimal precision in the result (e.g., 9893 -> 9900 if
|
||||
precision = 2)
|
||||
"""
|
||||
_max = float(max(self))
|
||||
if _max == 0:
|
||||
return 0
|
||||
scale = math.log10(_max)
|
||||
scale = 10 ** (math.ceil(scale) - precision)
|
||||
return math.ceil(_max / scale) * scale
|
||||
|
||||
def normalize(self, norm_max = 100, precision = 2, _max = None):
|
||||
_min = min(self)
|
||||
_max = _max or max(self)
|
||||
if _min == _max:
|
||||
return DataSeries(int(norm_max)/2.
|
||||
for i in xrange(len(self)))
|
||||
else:
|
||||
return DataSeries(min(int(x * float(norm_max) / _max), norm_max -1)
|
||||
for x in self)
|
||||
|
||||
def toBarY(self):
|
||||
data = []
|
||||
for i in xrange(len(self)):
|
||||
data += [self[i], self[i]]
|
||||
return DataSeries(data)
|
||||
|
||||
def toBarX(self):
|
||||
delta = self[-1] - self[-2]
|
||||
data = self.toBarY()
|
||||
return DataSeries(data[1:] + [data[-1] + delta])
|
||||
|
||||
def is_regular(self):
|
||||
return all(self[i] - self[i-1] == self[1] - self[0]
|
||||
for i in xrange(1, len(self)))
|
||||
|
||||
def to_google_extended(self, precision = 1, _max = None):
|
||||
if _max is None:
|
||||
_max = self.low_precision_max(precision = precision)
|
||||
norm_max = 4096
|
||||
new = self.normalize(norm_max = norm_max, precision = precision,
|
||||
_max = _max)
|
||||
return _max, "".join(map(google_extended, new))
|
||||
|
||||
class LineGraph(object):
|
||||
"""
|
||||
General line chart class for plotting xy line graphs.
|
||||
|
||||
data is passed in as a series of tuples of the form (x, y_1, ...,
|
||||
y_n) and converted to a single xdata DataSeries, and a list of
|
||||
ydata DataSeries elements. The intention of this class is to be
|
||||
able to handle multiple final plot representations, thought
|
||||
currenly only google charts is available.
|
||||
|
||||
At some point, it also might make sense to connect this more
|
||||
closely with numpy.
|
||||
|
||||
"""
|
||||
google_api = "http://chart.apis.google.com/chart"
|
||||
|
||||
def __init__(self, xydata, colors = ("FF4500", "336699"),
|
||||
width = 350, height = 200):
|
||||
|
||||
series = zip(*xydata)
|
||||
|
||||
self.xdata = DataSeries(series[0])
|
||||
self.ydata = map(DataSeries, series[1:])
|
||||
self.width = width
|
||||
self.height = height
|
||||
self.colors = colors
|
||||
|
||||
def google_chart(self, multiy = True, ylabels = [], title = "",
|
||||
bar_fmt = True):
|
||||
xdata, ydata = self.xdata, self.ydata
|
||||
|
||||
# Bar format makes the line chart look like it is a series of
|
||||
# contiguous bars without the boundary line between each bar.
|
||||
if bar_fmt:
|
||||
xdata = DataSeries(range(len(self.xdata))).toBarX()
|
||||
ydata = [y.toBarY() for y in self.ydata]
|
||||
|
||||
# TODO: currently we are only supporting time series. Make general
|
||||
xaxis = make_date_axis_labels(self.xdata)
|
||||
|
||||
# Convert x data into google extended text format
|
||||
xmax, xdata = xdata.to_google_extended(_max = max(xdata))
|
||||
ymax0 = None
|
||||
|
||||
# multiy <=> 2 y axes with independent scaling. not multiy
|
||||
# means we need to know what the global max is over all y data
|
||||
multiy = multiy and len(ydata) == 2
|
||||
if not multiy:
|
||||
ymax0 = max(y.low_precision_max() for y in ydata)
|
||||
|
||||
def make_labels(i, m, p = 4):
|
||||
return (("%d:|" % i) +
|
||||
'|'.join(locale.format('%d', i * m / p, True)
|
||||
for i in range(p+1)))
|
||||
|
||||
# data stores a list of xy data strings in google's format
|
||||
data = []
|
||||
labels = []
|
||||
for i in range(len(ydata)):
|
||||
ymax, y = ydata[i].to_google_extended(_max = ymax0)
|
||||
data.append(xdata + ',' + y)
|
||||
if multiy:
|
||||
labels.append(make_labels(i,ymax))
|
||||
if not multiy:
|
||||
labels.append(make_labels(0,ymax0))
|
||||
|
||||
if multiy:
|
||||
labels.append('2:|' + '|'.join(xaxis))
|
||||
axes = 'y,r,x'
|
||||
if len(self.colors) > 1:
|
||||
ycolor = "0,%s|1,%s" % (self.colors[0], self.colors[1])
|
||||
else:
|
||||
ycolor = ""
|
||||
else:
|
||||
labels.append('1:|' + '|'.join(xaxis))
|
||||
axes = 'y,x'
|
||||
ycolor="",
|
||||
if ylabels:
|
||||
axes += ',t'
|
||||
labels.append('%d:|' % (len(axes)/2) +
|
||||
('|' if multiy else ', ').join(ylabels))
|
||||
if title:
|
||||
axes += ',t'
|
||||
labels.append('%d:||%s|' % (len(axes)/2, title))
|
||||
|
||||
if len(self.colors) >= len(self.ydata):
|
||||
colors = ",".join(self.colors[:len(self.ydata)])
|
||||
else:
|
||||
colors = ""
|
||||
args = dict(# chart type is xy
|
||||
cht = 'lxy',
|
||||
# chart size
|
||||
chs = "%sx%s" % (self.width, self.height),
|
||||
# which axes are labeled
|
||||
chxt= axes,
|
||||
# axis labels
|
||||
chxl = '|'.join(labels),
|
||||
# chart data is in extended format
|
||||
chd = 'e:' + ','.join(data),
|
||||
chco = colors,
|
||||
chxs = ycolor
|
||||
)
|
||||
|
||||
return (self.google_api +
|
||||
'?' + '&'.join('%s=%s' % (k, v) for k, v in args.iteritems()))
|
||||
|
||||
|
||||
@@ -20,7 +20,7 @@
|
||||
# CondeNet, Inc. All Rights Reserved.
|
||||
################################################################################
|
||||
from r2.lib.wrapped import Wrapped, NoTemplateFound
|
||||
from r2.models import IDBuilder, LinkListing, Account, Default, FakeSubreddit, Subreddit
|
||||
from r2.models import IDBuilder, LinkListing, Account, Default, FakeSubreddit, Subreddit, Friends, All, Sub, NotFound, DomainSR
|
||||
from r2.config import cache
|
||||
from r2.lib.jsonresponse import json_respond
|
||||
from r2.lib.jsontemplates import is_api
|
||||
@@ -28,6 +28,7 @@ from pylons.i18n import _
|
||||
from pylons import c, request, g
|
||||
from pylons.controllers.util import abort
|
||||
|
||||
from r2.lib.traffic import load_traffic, load_summary
|
||||
from r2.lib.captcha import get_iden
|
||||
from r2.lib.filters import spaceCompress, _force_unicode, _force_utf8
|
||||
from r2.lib.menus import NavButton, NamedButton, NavMenu, PageNameNav, JsButton
|
||||
@@ -35,7 +36,8 @@ from r2.lib.menus import SubredditButton, SubredditMenu, menu
|
||||
from r2.lib.strings import plurals, rand_strings, strings
|
||||
from r2.lib.utils import title_to_url, query_string, UrlParser, to_js
|
||||
from r2.lib.template_helpers import add_sr, get_domain
|
||||
import sys, random
|
||||
import sys, random, datetime, locale, calendar
|
||||
import graph
|
||||
|
||||
datefmt = _force_utf8(_('%d %b %Y'))
|
||||
|
||||
@@ -222,7 +224,10 @@ class Reddit(Wrapped):
|
||||
|
||||
if c.user_is_sponsor:
|
||||
more_buttons.append(NamedButton('promote'))
|
||||
|
||||
|
||||
if c.user_is_admin:
|
||||
more_buttons.append(NamedButton('traffic'))
|
||||
|
||||
toolbar = [NavMenu(main_buttons, type='tabmenu')]
|
||||
if more_buttons:
|
||||
toolbar.append(NavMenu(more_buttons, title=menu.more, type='tabdrop'))
|
||||
@@ -265,6 +270,8 @@ class SubredditInfoBar(Wrapped):
|
||||
buttons.append(NamedButton('edit'))
|
||||
buttons.extend([NavButton(menu.banusers, 'banned'),
|
||||
NamedButton('spam')])
|
||||
if c.user_is_admin:
|
||||
buttons.append(NamedButton('traffic'))
|
||||
return [NavMenu(buttons, type = "flatlist", base_path = "/about/")]
|
||||
|
||||
class SideBox(Wrapped):
|
||||
@@ -450,6 +457,8 @@ class LinkInfoPage(Reddit):
|
||||
|
||||
if c.user_is_admin:
|
||||
buttons += [info_button('details')]
|
||||
if self.link.promoted is not None:
|
||||
buttons += [info_button('traffic')]
|
||||
|
||||
toolbar = [NavMenu(buttons, base_path = "", type="tabmenu")]
|
||||
|
||||
@@ -1182,7 +1191,22 @@ class PromotePage(Reddit):
|
||||
class PromotedLinks(Wrapped):
|
||||
def __init__(self, current_list, *a, **kw):
|
||||
self.things = current_list
|
||||
|
||||
self.recent = dict(load_summary("thing"))
|
||||
|
||||
if self.recent:
|
||||
from r2.models import Link
|
||||
# TODO: temp hack until we find place for builder_wrapper
|
||||
from r2.controllers.listingcontroller import ListingController
|
||||
builder = IDBuilder(self.recent.keys(),
|
||||
wrap = ListingController.builder_wrapper)
|
||||
link_listing = LinkListing(builder, nextprev=False).listing()
|
||||
|
||||
for t in link_listing.things:
|
||||
self.recent[t._fullname].insert(0, t)
|
||||
|
||||
self.recent = self.recent.values()
|
||||
self.recent.sort(key = lambda x: x[0]._date)
|
||||
Wrapped.__init__(self, datefmt = datefmt, *a, **kw)
|
||||
|
||||
class PromoteLinkForm(Wrapped):
|
||||
@@ -1193,3 +1217,156 @@ class PromoteLinkForm(Wrapped):
|
||||
timedeltatext = timedeltatext,
|
||||
listing = listing,
|
||||
*a, **kw)
|
||||
|
||||
class Traffic(Wrapped):
|
||||
@staticmethod
|
||||
def slice_traffic(traffic, *indices):
|
||||
return [[a] + [b[i] for i in indices] for a, b in traffic]
|
||||
|
||||
|
||||
class PromotedTraffic(Traffic):
|
||||
"""
|
||||
Traffic page for a promoted link, including 2 graphs (one for
|
||||
impressions and one for clicks with uniques on each plotted in
|
||||
multiy format) and a table of the data.
|
||||
"""
|
||||
def __init__(self, thing):
|
||||
self.thing = thing
|
||||
d = thing._date.astimezone(g.tz)
|
||||
d = d.replace(minute = 0, second = 0, microsecond = 0)
|
||||
|
||||
until = thing.promote_until
|
||||
now = datetime.datetime.now(g.tz)
|
||||
if not until:
|
||||
until = d + datetime.timedelta(1)
|
||||
if until > now:
|
||||
until - now
|
||||
|
||||
self.traffic = load_traffic('hour', "thing", thing._fullname,
|
||||
start_time = d, stop_time = until)
|
||||
|
||||
imp = self.slice_traffic(self.traffic, 0, 1)
|
||||
|
||||
if len(imp) > 2:
|
||||
imp_total = locale.format('%d', sum(x[2] for x in imp), True)
|
||||
chart = graph.LineGraph(imp)
|
||||
self.imp_graph = chart.google_chart(ylabels = ['uniques', 'total'],
|
||||
title = ("impressions (%s)" %
|
||||
imp_total))
|
||||
|
||||
cli = self.slice_traffic(self.traffic, 2, 3)
|
||||
cli_total = locale.format('%d', sum(x[2] for x in cli), True)
|
||||
chart = graph.LineGraph(cli)
|
||||
self.cli_graph = chart.google_chart(ylabels = ['uniques', 'total'],
|
||||
multiy = False,
|
||||
title = ("clicks (%s)" %
|
||||
cli_total))
|
||||
else:
|
||||
self.imp_graph = self.cli_graph = None
|
||||
Wrapped.__init__(self)
|
||||
|
||||
class RedditTraffic(Traffic):
|
||||
"""
|
||||
fetches hourly and daily traffic for the current reddit. If the
|
||||
current reddit is a default subreddit, fetches the site-wide
|
||||
uniques and includes monthly totals. In this latter case, getter
|
||||
methods are available for computing breakdown of site trafffic by
|
||||
reddit.
|
||||
"""
|
||||
def __init__(self):
|
||||
self.has_data = False
|
||||
ivals = ["hour", "day"]
|
||||
if c.default_sr:
|
||||
ivals.append("month")
|
||||
|
||||
for ival in ivals:
|
||||
if c.default_sr:
|
||||
data = load_traffic(ival, "total", "")
|
||||
else:
|
||||
data = load_traffic(ival, "reddit", c.site.name)
|
||||
if not data:
|
||||
break
|
||||
slices = [("uniques", (0, 2) if c.site.domain else (0,),
|
||||
"FF4500"),
|
||||
("impressions", (1, 3) if c.site.domain else (1,),
|
||||
"336699")]
|
||||
setattr(self, ival + "_data", data)
|
||||
for name, indx, color in slices:
|
||||
data2 = self.slice_traffic(data, *indx)
|
||||
chart = graph.LineGraph(data2, colors = [color, "B0B0B0"])
|
||||
setattr(self, name + "_" + ival + "_chart", chart)
|
||||
title = "%s by %s" % (name, ival)
|
||||
res = chart.google_chart(ylabels = [name],
|
||||
multiy = False,
|
||||
title = title)
|
||||
setattr(self, name + "_" + ival, res)
|
||||
else:
|
||||
self.has_data = True
|
||||
if self.has_data:
|
||||
imp_by_day = [[] for i in range(7)]
|
||||
uni_by_day = [[] for i in range(7)]
|
||||
dates = self.uniques_day_chart.xdata
|
||||
uniques = self.uniques_day_chart.ydata[0]
|
||||
imps = self.impressions_day_chart.ydata[0]
|
||||
self.uniques_mean = sum(map(float, uniques))/len(uniques)
|
||||
self.impressions_mean = sum(map(float, imps))/len(imps)
|
||||
for i, d in enumerate(dates):
|
||||
imp_by_day[d.weekday()].append(float(imps[i]))
|
||||
uni_by_day[d.weekday()].append(float(uniques[i]))
|
||||
self.uniques_by_dow = [sum(x)/len(x) for x in uni_by_day]
|
||||
self.impressions_by_dow = [sum(x)/len(x) for x in imp_by_day]
|
||||
Wrapped.__init__(self)
|
||||
|
||||
def reddits_summary(self):
|
||||
if c.default_sr:
|
||||
data = map(list, load_summary("reddit"))
|
||||
data.sort(key = lambda x: x[1][1], reverse = True)
|
||||
for d in data:
|
||||
name = d[0]
|
||||
for sr in (Default, Friends, All, Sub):
|
||||
if name == sr.name:
|
||||
name = sr
|
||||
break
|
||||
else:
|
||||
try:
|
||||
name = Subreddit._by_name(name)
|
||||
except NotFound:
|
||||
name = DomainSR(name)
|
||||
d[0] = name
|
||||
return data
|
||||
return res
|
||||
|
||||
def monthly_summary(self):
|
||||
"""
|
||||
Convenience method b/c it is bad form to do this much math
|
||||
inside of a template.b
|
||||
"""
|
||||
res = []
|
||||
if c.default_sr:
|
||||
data = self.month_data
|
||||
for x, (date, d) in enumerate(data):
|
||||
res.append([("date", date.strftime("%Y-%m")),
|
||||
("", locale.format("%d", d[0], True)),
|
||||
("", locale.format("%d", d[1], True))])
|
||||
last_d = data[x-1][1] if x else None
|
||||
for i in range(2):
|
||||
if x == 0:
|
||||
res[-1].append(("",""))
|
||||
elif x == len(data) - 1:
|
||||
# project based on traffic so far
|
||||
# totals are going to be up to yesterday
|
||||
month_len = calendar.monthrange(date.year,
|
||||
date.month)[1]
|
||||
yday = (datetime.datetime.utcnow()
|
||||
-datetime.timedelta(1)).day
|
||||
scaled = float(d[i] * month_len) / yday
|
||||
res[-1].append(("gray",
|
||||
locale.format("%d", scaled, True)))
|
||||
elif last_d and d[i] and last_d[i]:
|
||||
f = 100 * (float(d[i])/last_d[i] - 1)
|
||||
|
||||
res[-1].append(("up" if f > 0 else "down",
|
||||
"%5.2f%%" % f))
|
||||
return res
|
||||
|
||||
|
||||
|
||||
72
r2/r2/lib/traffic.py
Normal file
72
r2/r2/lib/traffic.py
Normal file
@@ -0,0 +1,72 @@
|
||||
# The contents of this file are subject to the Common Public Attribution
|
||||
# License Version 1.0. (the "License"); you may not use this file except in
|
||||
# compliance with the License. You may obtain a copy of the License at
|
||||
# http://code.reddit.com/LICENSE. The License is based on the Mozilla Public
|
||||
# License Version 1.1, but Sections 14 and 15 have been added to cover use of
|
||||
# software over a computer network and provide for limited attribution for the
|
||||
# Original Developer. In addition, Exhibit A has been modified to be consistent
|
||||
# with Exhibit B.
|
||||
#
|
||||
# Software distributed under the License is distributed on an "AS IS" basis,
|
||||
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License for
|
||||
# the specific language governing rights and limitations under the License.
|
||||
#
|
||||
# The Original Code is Reddit.
|
||||
#
|
||||
# The Original Developer is the Initial Developer. The Initial Developer of the
|
||||
# Original Code is CondeNet, Inc.
|
||||
#
|
||||
# All portions of the code written by CondeNet are Copyright (c) 2006-2009
|
||||
# CondeNet, Inc. All Rights Reserved.
|
||||
################################################################################
|
||||
from httplib import HTTPConnection
|
||||
from urlparse import urlparse
|
||||
from cPickle import loads
|
||||
from utils import query_string
|
||||
import os, socket
|
||||
from pylons import g
|
||||
from r2.lib.memoize import memoize, clear_memo
|
||||
|
||||
def load_traffic_uncached(interval, what, iden,
|
||||
start_time = None, stop_time = None,
|
||||
npoints = None):
|
||||
"""
|
||||
Fetches pickled traffic from the traffic server and returns it as a list.
|
||||
On connection failure (or no data) returns an empy list.
|
||||
"""
|
||||
def format_date(d):
|
||||
if d.tzinfo is None:
|
||||
d = d.replace(tzinfo = g.tz)
|
||||
else:
|
||||
d = d.astimezone(g.tz)
|
||||
return ":".join(map(str, d.timetuple()[:6]))
|
||||
|
||||
traffic_url = os.path.join(g.traffic_url, interval, what, iden)
|
||||
args = {}
|
||||
if start_time:
|
||||
args['start_time'] = format_date(start_time)
|
||||
if stop_time:
|
||||
args['stop_time'] = format_date(stop_time)
|
||||
if npoints:
|
||||
args['n'] = npoints
|
||||
u = urlparse(traffic_url)
|
||||
try:
|
||||
conn = HTTPConnection(u.hostname, u.port)
|
||||
conn.request("GET", u.path + query_string(args))
|
||||
res = conn.getresponse()
|
||||
res = loads(res.read()) if res.status == 200 else []
|
||||
conn.close()
|
||||
return res
|
||||
except socket.error:
|
||||
return []
|
||||
|
||||
@memoize("cached_traffic", time = 60)
|
||||
def load_traffic(interval, what, iden,
|
||||
start_time = None, stop_time = None,
|
||||
npoints = None):
|
||||
return load_traffic_uncached(interval, what, iden,
|
||||
start_time = start_time, stop_time = stop_time,
|
||||
npoints = npoints)
|
||||
|
||||
def load_summary(what, interval = "month", npoints = 50):
|
||||
return load_traffic(interval, "summary", what, npoints = npoints)
|
||||
@@ -47,7 +47,7 @@ class Link(Thing, Printable):
|
||||
banned_before_moderator = False,
|
||||
media_object = None,
|
||||
has_thumbnail = False,
|
||||
promoted = False,
|
||||
promoted = None,
|
||||
promoted_subscribersonly = False,
|
||||
promote_until = None,
|
||||
promoted_by = None,
|
||||
|
||||
@@ -1799,4 +1799,26 @@ ul#image-preview-list .description pre {
|
||||
|
||||
.subscription-box h1{ text-align: center; }
|
||||
|
||||
.toggle.deltranslator-button { display: inline; }
|
||||
.toggle.deltranslator-button { display: inline; }
|
||||
|
||||
.traffic-table {margin: 10px 20px; }
|
||||
.traffic-table a:hover { text-decoration: underline; }
|
||||
.traffic-table th { font-weight: bold; text-align: center;}
|
||||
.traffic-table th,
|
||||
.traffic-table td { padding: 0 5px; }
|
||||
.traffic-table td { text-align: right; }
|
||||
|
||||
.traffic-table td.up { color: #FF8B60; }
|
||||
.traffic-table td.down { color: #336699; }
|
||||
.traffic-table td.gray { color: gray; font-style: italic; }
|
||||
|
||||
.traffic-table tr.max { border: 2px solid #FF8B60; }
|
||||
.traffic-table tr.min { border: 2px solid #336699; }
|
||||
.traffic-table tr.odd { background-color: #E0E0E0; }
|
||||
.traffic-table tr.mean { font-style: italic; border-top: 1px solid; }
|
||||
|
||||
.traffic-graph {
|
||||
padding: 10px;
|
||||
border: 1px solid #B0B0B0;
|
||||
margin-left: 10px;
|
||||
margin-bottom: 10px; }
|
||||
@@ -47,4 +47,37 @@
|
||||
</li>
|
||||
%endfor
|
||||
</ul>
|
||||
|
||||
%if thing.recent:
|
||||
<h1>${_('promotions this month')}</h1>
|
||||
|
||||
<table class="traffic-table">
|
||||
<tr>
|
||||
<th colspan="2">Impresions</th>
|
||||
<th colspan="2">Clicks</th>
|
||||
<th></th>
|
||||
<th></th>
|
||||
</tr>
|
||||
<tr>
|
||||
<th>unique</th>
|
||||
<th>total</th>
|
||||
<th>unique</th>
|
||||
<th>total</th>
|
||||
<th>points</th>
|
||||
<th>title</th>
|
||||
</tr>
|
||||
%for link, uimp, nimp, ucli, ncli in thing.recent:
|
||||
<tr>
|
||||
<td>${uimp}</td>
|
||||
<td>${nimp}</td>
|
||||
<td>${ucli}</td>
|
||||
<td>${ncli}</td>
|
||||
<td>${link._ups - link._downs}</td>
|
||||
<td>
|
||||
<a class="title" href="${link.permalink}">${link.title}</a>
|
||||
</td>
|
||||
</tr>
|
||||
%endfor
|
||||
</table>
|
||||
%endif
|
||||
</div>
|
||||
|
||||
55
r2/r2/templates/promotedtraffic.html
Normal file
55
r2/r2/templates/promotedtraffic.html
Normal file
@@ -0,0 +1,55 @@
|
||||
## The contents of this file are subject to the Common Public Attribution
|
||||
## License Version 1.0. (the "License"); you may not use this file except in
|
||||
## compliance with the License. You may obtain a copy of the License at
|
||||
## http://code.reddit.com/LICENSE. The License is based on the Mozilla Public
|
||||
## License Version 1.1, but Sections 14 and 15 have been added to cover use of
|
||||
## software over a computer network and provide for limited attribution for the
|
||||
## Original Developer. In addition, Exhibit A has been modified to be consistent
|
||||
## with Exhibit B.
|
||||
##
|
||||
## Software distributed under the License is distributed on an "AS IS" basis,
|
||||
## WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License for
|
||||
## the specific language governing rights and limitations under the License.
|
||||
##
|
||||
## The Original Code is Reddit.
|
||||
##
|
||||
## The Original Developer is the Initial Developer. The Initial Developer of
|
||||
## the Original Code is CondeNet, Inc.
|
||||
##
|
||||
## All portions of the code written by CondeNet are Copyright (c) 2006-2009
|
||||
## CondeNet, Inc. All Rights Reserved.
|
||||
################################################################################
|
||||
|
||||
%if thing.traffic:
|
||||
%if thing.imp_graph:
|
||||
<img class="traffic-graph" src="${thing.imp_graph}"
|
||||
alt="impressions graph"/>
|
||||
%endif
|
||||
%if thing.cli_graph:
|
||||
<img class="traffic-graph" src="${thing.cli_graph}"
|
||||
alt="click graph"/>
|
||||
%endif
|
||||
<table class="traffic-table">
|
||||
<tr>
|
||||
<th>date</th>
|
||||
<th colspan="2">Impresions</th>
|
||||
<th colspan="2">Clicks</th>
|
||||
</tr>
|
||||
<tr>
|
||||
<th></th>
|
||||
<th>unique</th>
|
||||
<th>total</th>
|
||||
<th>unique</th>
|
||||
<th>total</th>
|
||||
</tr>
|
||||
%for date, (uimp, nimp, ucli, ncli) in thing.traffic:
|
||||
<tr>
|
||||
<td>${date.strftime("%Y-%m-%d %H:%M")}</td>
|
||||
<td>${uimp}</td>
|
||||
<td>${nimp}</td>
|
||||
<td>${ucli}</td>
|
||||
<td>${ncli}</td>
|
||||
</tr>
|
||||
%endfor
|
||||
</table>
|
||||
%endif
|
||||
204
r2/r2/templates/reddittraffic.html
Normal file
204
r2/r2/templates/reddittraffic.html
Normal file
@@ -0,0 +1,204 @@
|
||||
## The contents of this file are subject to the Common Public Attribution
|
||||
## License Version 1.0. (the "License"); you may not use this file except in
|
||||
## compliance with the License. You may obtain a copy of the License at
|
||||
## http://code.reddit.com/LICENSE. The License is based on the Mozilla Public
|
||||
## License Version 1.1, but Sections 14 and 15 have been added to cover use of
|
||||
## software over a computer network and provide for limited attribution for the
|
||||
## Original Developer. In addition, Exhibit A has been modified to be consistent
|
||||
## with Exhibit B.
|
||||
##
|
||||
## Software distributed under the License is distributed on an "AS IS" basis,
|
||||
## WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License for
|
||||
## the specific language governing rights and limitations under the License.
|
||||
##
|
||||
## The Original Code is Reddit.
|
||||
##
|
||||
## The Original Developer is the Initial Developer. The Initial Developer of
|
||||
## the Original Code is CondeNet, Inc.
|
||||
##
|
||||
## All portions of the code written by CondeNet are Copyright (c) 2006-2009
|
||||
## CondeNet, Inc. All Rights Reserved.
|
||||
################################################################################
|
||||
<%!
|
||||
import locale
|
||||
from r2.models.subreddit import DomainSR, FakeSubreddit
|
||||
def num(x):
|
||||
return locale.format('%d', x, True)
|
||||
%>
|
||||
|
||||
<h1>Traffic for ${c.site.name}</h1>
|
||||
|
||||
|
||||
|
||||
%if not thing.has_data:
|
||||
<p class="error">
|
||||
${_("There doesn't seem to be any traffic data at the moment. Please check back later.")}
|
||||
</p>
|
||||
%else:
|
||||
|
||||
<img class="traffic-graph" alt='hourly uniques'
|
||||
src="${thing.uniques_hour}"/>
|
||||
<img class="traffic-graph" alt='hourly impressions'
|
||||
src="${thing.impressions_hour}"/>
|
||||
<br/>
|
||||
<img class="traffic-graph" alt='daily uniques'
|
||||
src="${thing.uniques_day}"/>
|
||||
<img class="traffic-graph" alt='daily impressions'
|
||||
src="${thing.impressions_day}"/>
|
||||
|
||||
<br/>
|
||||
|
||||
%if c.default_sr:
|
||||
<img class="traffic-graph" alt='monthly uniques'
|
||||
style="clear:left"
|
||||
src="${thing.uniques_month}"/>
|
||||
<img class="traffic-graph" alt='monthly impressions'
|
||||
src="${thing.impressions_month}"/>
|
||||
%endif
|
||||
|
||||
<%
|
||||
umin = min(data[0] for date, data in filter(None, thing.day_data))
|
||||
umax = max(data[0] for date, data in filter(None, thing.day_data))
|
||||
%>
|
||||
|
||||
<div style="float:left">
|
||||
<table class="traffic-table">
|
||||
<tr>
|
||||
<th colspan="3">Weekly summary</th>
|
||||
</tr>
|
||||
<tr>
|
||||
<th>${_("date")}</th>
|
||||
<th>${_("uniques")}</th>
|
||||
<th>${_("impressions")}</th>
|
||||
</tr>
|
||||
<%
|
||||
dow = ["Monday", "Tuesday", "Wednesday", "Thursday", "Friday",
|
||||
"Saturday", "Sunday"]
|
||||
%>
|
||||
%for i, (uni, imp) in enumerate(zip(thing.uniques_by_dow, thing.impressions_by_dow)):
|
||||
<tr class="${'odd' if i % 2 else 'even'}">
|
||||
<td>${dow[i]}</td>
|
||||
<td>${num(int(uni))}</td>
|
||||
<td>${num(int(imp))}</td>
|
||||
${bars(uni, imp)}
|
||||
</tr>
|
||||
%endfor
|
||||
<tr class="mean">
|
||||
<td>${_("Daily Mean")}</td>
|
||||
<td>${num(int(thing.uniques_mean))}</td>
|
||||
<td>${num(int(thing.impressions_mean))}</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
<table class="traffic-table">
|
||||
<tr>
|
||||
%if c.site.domain:
|
||||
<th></th>
|
||||
<th colspan="2">total</th>
|
||||
<th colspan="2">${c.site.domain}</th>
|
||||
</tr><tr>
|
||||
<th>${_("date")}</th>
|
||||
<th>${_("uniques")}</th>
|
||||
<th>${_("impressions")}</th>
|
||||
<th>${_("uniques")}</th>
|
||||
<th>${_("impressions")}</th>
|
||||
%else:
|
||||
<th>${_("date")}</th>
|
||||
<th>${_("uniques")}</th>
|
||||
<th>${_("impressions")}</th>
|
||||
%endif
|
||||
</tr>
|
||||
%for x, (date, data) in enumerate(reversed(thing.day_data)):
|
||||
<tr class="${'odd' if x % 2 else 'even'} ${'max' if data[0] == umax else 'min' if data[0] == umin else ''}"
|
||||
<td>${date.strftime("%Y-%m-%d")}</td>
|
||||
%for i in xrange(4 if c.site.domain else 2):
|
||||
<td>${num(data[i]) if data[i] else "-"}</td>
|
||||
%endfor
|
||||
${bars(data[0], data[1])}
|
||||
</tr>
|
||||
%endfor
|
||||
</table>
|
||||
</div>
|
||||
|
||||
%if c.default_sr:
|
||||
<% data = thing.monthly_summary() %>
|
||||
<table class="traffic-table">
|
||||
<tr>
|
||||
<th colspan="3">Monthly data</th>
|
||||
</tr>
|
||||
<tr>
|
||||
<th>${_("date")}</th>
|
||||
<th>${_("uniques")}</th>
|
||||
<th>${_("impressions")}</th>
|
||||
</tr>
|
||||
%for i, d in enumerate(reversed(data)):
|
||||
<tr class="${'odd' if i % 2 else 'even'}">
|
||||
%for cls, x in d:
|
||||
<td class="${cls}">${x}</td>
|
||||
%endfor
|
||||
</tr>
|
||||
%endfor
|
||||
</table>
|
||||
|
||||
<table class="traffic-table">
|
||||
<tr>
|
||||
<th></th>
|
||||
<th colspan="2">total</th>
|
||||
<th colspan="2">${_("cnamed")}</th>
|
||||
</tr>
|
||||
<tr>
|
||||
<th>${_("date")}</th>
|
||||
<th>${_("uniques")}</th>
|
||||
<th>${_("impressions")}</th>
|
||||
<th>${_("uniques")}</th>
|
||||
<th>${_("impressions")}</th>
|
||||
<th>${_("cname")}</th>
|
||||
</tr>
|
||||
%for i, (sr, d) in enumerate(thing.reddits_summary()):
|
||||
<tr class="${'odd' if i % 2 else 'even'}">
|
||||
|
||||
<td
|
||||
%if isinstance(sr, FakeSubreddit):
|
||||
style="font-style: left; text-align: left"
|
||||
%else:
|
||||
style="font-weight:bold; text-align: left"
|
||||
%endif
|
||||
>
|
||||
%if isinstance(sr, DomainSR):
|
||||
<a href="${sr.path}">[domain:${sr.name}]</a>
|
||||
%elif isinstance(sr, FakeSubreddit):
|
||||
<a href="${sr.path}">[meta:${sr.name}]</a>
|
||||
%else:
|
||||
<a href="${sr.path}about/traffic">${sr.name}</a>
|
||||
%endif
|
||||
</a>
|
||||
</td>
|
||||
%for x in d:
|
||||
<td>${num(x) if x else "--"}</td>
|
||||
%endfor
|
||||
<td>
|
||||
%if not isinstance(sr, FakeSubreddit) and sr.domain:
|
||||
<a href="http://${sr.domain}/">${sr.domain}</a>
|
||||
%endif
|
||||
</td>
|
||||
</tr>
|
||||
%endfor
|
||||
</table>
|
||||
%endif
|
||||
%endif
|
||||
|
||||
|
||||
|
||||
<%def name="bars(uni, imp)">
|
||||
<%
|
||||
wid_uni = int(50 * min(2, float(uni)/thing.uniques_mean))
|
||||
wid_imp = int(50 * min(2, float(imp)/thing.impressions_mean))
|
||||
%>
|
||||
<td>
|
||||
<div style="height:4px; width:${wid_imp}px; background-color:#336699; margin-bottom:1px">
|
||||
</div>
|
||||
<div style="height:4px; width:${wid_uni}px; background-color:#FF4500">
|
||||
</div>
|
||||
</td>
|
||||
</%def>
|
||||
|
||||
Reference in New Issue
Block a user