mirror of
https://github.com/reddit-archive/reddit.git
synced 2026-01-28 08:17:58 -05:00
Methods to check promoted link inventory.
This commit is contained in:
committed by
Brian Simpson
parent
7a85c3c2bb
commit
f8dd9e224c
@@ -28,7 +28,14 @@ import re
|
||||
from pylons import g
|
||||
from sqlalchemy import func
|
||||
|
||||
from r2.models import traffic
|
||||
from r2.lib.memoize import memoize
|
||||
from r2.lib.utils import to_date, tup
|
||||
from r2.models import (
|
||||
PromoCampaign,
|
||||
PromotionWeights,
|
||||
NO_TRANSACTION,
|
||||
traffic,
|
||||
)
|
||||
from r2.models.promo_metrics import PromoMetrics
|
||||
from r2.models.subreddit import DefaultSR
|
||||
|
||||
@@ -79,3 +86,72 @@ def _min_daily_pageviews_by_sr(ndays=NDAYS_TO_QUERY, end_date=None):
|
||||
if m:
|
||||
retval[m.group(1)] = row[1]
|
||||
return retval
|
||||
|
||||
|
||||
def get_date_range(start, end):
|
||||
start, end = map(to_date, [start, end])
|
||||
dates = [start + timedelta(i) for i in xrange((end - start).days)]
|
||||
return dates
|
||||
|
||||
|
||||
def get_sold_pageviews(srs, start, end):
|
||||
srs, is_single = tup(srs, ret_is_single=True)
|
||||
sr_names = ['' if isinstance(sr, DefaultSR) else sr.name for sr in srs]
|
||||
dates = set(get_date_range(start, end))
|
||||
q = (PromotionWeights.query()
|
||||
.filter(PromotionWeights.sr_name.in_(sr_names))
|
||||
.filter(PromotionWeights.date.in_(dates)))
|
||||
campaign_ids = {pw.promo_idx for pw in q}
|
||||
campaigns = PromoCampaign._byID(campaign_ids, data=True, return_dict=False)
|
||||
|
||||
ret = {sr.name: dict.fromkeys(dates, 0) for sr in srs}
|
||||
for camp in campaigns:
|
||||
if camp.trans_id == NO_TRANSACTION:
|
||||
continue
|
||||
|
||||
if camp.impressions <= 0:
|
||||
# pre-CPM campaign
|
||||
continue
|
||||
|
||||
sr_name = camp.sr_name or DefaultSR.name
|
||||
ndays = (camp.end_date - camp.start_date).days
|
||||
daily_impressions = camp.impressions / ndays
|
||||
camp_dates = set(get_date_range(camp.start_date, camp.end_date))
|
||||
for date in camp_dates.intersection(dates):
|
||||
ret[sr_name][date] += daily_impressions
|
||||
|
||||
if is_single:
|
||||
return ret[srs[0].name]
|
||||
else:
|
||||
return ret
|
||||
|
||||
|
||||
def get_available_pageviews(srs, start, end, datestr=False):
|
||||
srs, is_single = tup(srs, ret_is_single=True)
|
||||
sr_names = [sr.name for sr in srs]
|
||||
daily_inventory = PromoMetrics.get(MIN_DAILY_CASS_KEY, sr_names=sr_names)
|
||||
sold_by_sr_by_date = get_sold_pageviews(srs, start, end)
|
||||
|
||||
datekey = lambda dt: dt.strftime('%m/%d/%Y') if datestr else dt
|
||||
|
||||
ret = {}
|
||||
for sr in srs:
|
||||
sold_by_date = sold_by_sr_by_date[sr.name]
|
||||
ret[sr.name] = {
|
||||
datekey(date): max(0, daily_inventory.get(sr.name, 0) - sold)
|
||||
for date, sold in sold_by_date.iteritems()
|
||||
}
|
||||
|
||||
if is_single:
|
||||
return ret[srs[0].name]
|
||||
else:
|
||||
return ret
|
||||
|
||||
|
||||
def get_oversold(sr, start, end, daily_request):
|
||||
available_by_date = get_available_pageviews(sr, start, end, datestr=True)
|
||||
oversold = {}
|
||||
for datestr, available in available_by_date.iteritems():
|
||||
if available < daily_request:
|
||||
oversold[datestr] = available
|
||||
return oversold
|
||||
|
||||
@@ -543,38 +543,13 @@ def get_scheduled(offset=0):
|
||||
error_campaigns.append((campaign._id, e))
|
||||
return adweights, error_campaigns
|
||||
|
||||
def fuzz_impressions(imps):
|
||||
"""Return imps rounded to one significant digit."""
|
||||
def fuzz_impressions(imps, multiple=500):
|
||||
"""Return imps rounded down to nearest multiple."""
|
||||
if imps > 0:
|
||||
ndigits = int(math.floor(math.log10(imps)))
|
||||
return int(round(imps, -1 * ndigits)) # note the negative
|
||||
return int(multiple * math.floor(float(imps) / multiple))
|
||||
else:
|
||||
return 0
|
||||
|
||||
def get_scheduled_impressions(sr_name, start_date, end_date):
|
||||
# FIXME: mock function for development
|
||||
start_date = to_date(start_date)
|
||||
end_date = to_date(end_date)
|
||||
ndays = (end_date - start_date).days
|
||||
scheduled = OrderedDict()
|
||||
for i in range(ndays):
|
||||
date = (start_date + timedelta(i))
|
||||
scheduled[date] = random.randint(0, 100) # FIXME: fakedata
|
||||
return scheduled
|
||||
|
||||
def get_available_impressions(sr_name, start_date, end_date, fuzzed=False):
|
||||
# FIXME: mock function for development
|
||||
start_date = to_date(start_date)
|
||||
end_date = to_date(end_date)
|
||||
available = inventory.get_predicted_by_date(sr_name, start_date, end_date)
|
||||
scheduled = get_scheduled_impressions(sr_name, start_date, end_date)
|
||||
for date in scheduled:
|
||||
available[date] = int(available[date] - (available[date] * scheduled[date] / 100.)) # DELETEME
|
||||
#available[date] = max(0, available[date] - scheduled[date]) # UNCOMMENTME
|
||||
if fuzzed:
|
||||
available[date] = fuzz_impressions(available[date])
|
||||
return available
|
||||
|
||||
def charge_pending(offset=1):
|
||||
for l, camp, weight in accepted_campaigns(offset=offset):
|
||||
user = Account._byID(l.author_id)
|
||||
|
||||
Reference in New Issue
Block a user