Expose spam and reported listings to moderators. Lock concurrent votes.

This commit is contained in:
ketralnis
2009-07-28 11:59:47 -07:00
parent 1fdf8cdbb3
commit dadae89bb2
8 changed files with 184 additions and 58 deletions

View File

@@ -725,10 +725,15 @@ class ApiController(RedditController):
def POST_vote(self, dir, thing, ip, vote_type):
ip = request.ip
user = c.user
if not thing:
return
# TODO: temporary hack until we migrate the rest of the vote data
if thing and thing._date < datetime(2009, 4, 17, 0, 0, 0, 0, g.tz):
if thing._date < datetime(2009, 4, 17, 0, 0, 0, 0, g.tz):
g.log.debug("POST_vote: ignoring old vote on %s" % thing._fullname)
elif thing:
return
with g.make_lock('vote_lock(%s,%s)' % (c.user._id36, thing._id36)):
dir = (True if dir > 0
else False if dir < 0
else None)

View File

@@ -33,6 +33,7 @@ from r2.lib.utils import query_string, UrlParser, link_from_url, link_duplicates
from r2.lib.template_helpers import get_domain
from r2.lib.emailer import has_opted_out, Email
from r2.lib.db.operators import desc
from r2.lib.db import queries
from r2.lib.strings import strings
from r2.lib.solrsearch import RelatedSearchQuery, SubredditSearchQuery, LinkSearchQuery
from r2.lib import jsontemplates
@@ -282,36 +283,17 @@ class FrontController(RedditController):
stylesheet_contents = ''
pane = SubredditStylesheet(site = c.site,
stylesheet_contents = stylesheet_contents)
elif is_moderator and location == 'reports':
links = Link._query(Link.c.reported != 0,
Link.c._spam == False)
comments = Comment._query(Comment.c.reported != 0,
Comment.c._spam == False)
query = thing.Merge((links, comments),
Link.c.sr_id == c.site._id,
sort = desc('_date'),
data = True)
builder = QueryBuilder(query, num = num, after = after,
count = count, reverse = reverse,
wrap = ListingController.builder_wrapper)
elif location in ('reports', 'spam') and is_moderator:
query = (c.site.get_reported() if location == 'reports'
else c.site.get_spam())
builder_cls = (QueryBuilder if isinstance(query, thing.Query)
else IDBuilder)
builder = builder_cls(query,
num = num, after = after,
count = count, reverse = reverse,
wrap = ListingController.builder_wrapper)
listing = LinkListing(builder)
pane = listing.listing()
elif is_moderator and location == 'spam':
# links = Link._query(Link.c._spam == True)
# comments = Comment._query(Comment.c._spam == True)
# query = thing.Merge((links, comments),
# Link.c.sr_id == c.site._id,
# sort = desc('_date'),
# data = True)
#
# builder = QueryBuilder(query, num = num, after = after,
# count = count, reverse = reverse,
# wrap = ListingController.builder_wrapper)
# listing = LinkListing(builder)
# pane = listing.listing()
pane = InfoBar(message = "There doesn't seem to be anything here.")
elif is_moderator and location == 'traffic':
pane = RedditTraffic()
else:

View File

@@ -95,14 +95,16 @@ class CachedResults(object):
is sorted by date descending"""
self.fetch()
t = self.make_item_tuple(item)
changed = False
if t not in self.data:
self.data.insert(0, t)
changed = True
if changed:
if t not in self.data:
if self.data and t > self.data[0]:
self.data.insert(0, t)
else:
self.data.append(t)
self.data.sort()
query_cache.set(self.iden, self.data[:precompute_limit])
def delete(self, item):
"""Deletes an item from the cached data."""
self.fetch()
@@ -133,14 +135,14 @@ class CachedResults(object):
yield x[0]
def merge_cached_results(*results):
"""Given two CachedResults, mergers their lists based on the sorts of
"""Given two CachedResults, merges their lists based on the sorts of
their queries."""
if len(results) == 1:
return list(results[0])
#make sure the sorts match
sort = results[0].query._sort
assert(all(r.query._sort == sort for r in results[1:]))
assert all(r.query._sort == sort for r in results[1:])
def thing_cmp(t1, t2):
for i, s in enumerate(sort):
@@ -188,6 +190,42 @@ def get_links(sr, sort, time):
q._filter(db_times[time])
return make_results(q)
def get_spam_links(sr):
q_l = Link._query(Link.c.sr_id == sr._id,
Link.c._spam == True,
sort = db_sort('new'))
return make_results(q_l)
def get_spam_comments(sr):
q_c = Comment._query(Comment.c.sr_id == sr._id,
Comment.c._spam == True,
sort = db_sort('new'))
return make_results(q_c)
def get_spam(sr):
return get_spam_links(sr)
#return merge_results(get_spam_links(sr),
# get_spam_comments(sr))
def get_reported_links(sr):
q_l = Link._query(Link.c.reported != 0,
Link.c.sr_id == sr._id,
Link.c._spam == False,
sort = db_sort('new'))
return make_results(q_l)
def get_reported_comments(sr):
q_c = Comment._query(Comment.c.reported != 0,
Comment.c.sr_id == sr._id,
Comment.c._spam == False,
sort = db_sort('new'))
return make_results(q_c)
def get_reported(sr):
return get_reported_links(sr)
#return merge_results(get_reported_links(sr),
# get_reported_comments(sr))
def get_domain_links(domain, sort, time):
return DomainSearchQuery(domain, sort=search_sort[sort], timerange=time)
@@ -258,17 +296,22 @@ def add_queries(queries, insert_item = None, delete_item = None):
"""Adds multiple queries to the query queue. If insert_item or
delete_item is specified, the query may not need to be recomputed at
all."""
log = g.log.debug
make_lock = g.make_lock
def _add_queries():
for q in queries:
if not isinstance(q, CachedResults):
continue
if insert_item and q.can_insert():
q.insert(insert_item)
elif delete_item and q.can_delete():
q.delete(delete_item)
else:
query_queue.add_query(q)
log('Adding precomputed query %s' % q)
with make_lock("add_query(%s)" % q.iden):
if insert_item and q.can_insert():
q.insert(insert_item)
elif delete_item and q.can_delete():
q.delete(delete_item)
else:
query_queue.add_query(q)
worker.do(_add_queries)
#can be rewritten to be more efficient
@@ -304,6 +347,8 @@ def new_link(link):
results.extend(all_queries(get_links, sr, ('top', 'controversial'), db_times.keys()))
results.append(get_submitted(author, 'new', 'all'))
#results.append(get_links(sr, 'toplinks', 'all'))
if link._spam:
job.append(get_spam_links(sr))
if link._deleted:
add_queries(results, delete_item = link)
@@ -316,6 +361,9 @@ def new_comment(comment, inbox_rel):
if comment._deleted:
add_queries(job, delete_item = comment)
else:
#if comment._spam:
# sr = Subreddit._byID(comment.sr_id)
# job.append(get_spam_comments(sr))
add_queries(job, insert_item = comment)
if inbox_rel:
@@ -367,13 +415,67 @@ def new_savehide(rel):
elif name == 'unhide':
add_queries([get_hidden(user)], delete_item = rel)
def ban(x, auto = False):
if isinstance(x,Link):
sr = Subreddit._byID(x.sr_id)
add_queries([get_spam_links(sr)], insert_item = x)
#elif isinstance(x,Comment):
# sr = Subreddit._byID(x.sr_id)
# add_queries([get_spam_comments(sr)])
def unban(x):
if isinstance(x,Link):
sr = Subreddit._byID(x.sr_id)
add_queries([get_spam_links(sr)], delete_item = x)
#elif isinstance(x,Comment):
# sr = Subreddit._byID(x.sr_id)
# add_queries([get_spam_comments(sr)])
def new_report(report):
reporter = report._thing1
reported = report._thing2
if isinstance(reported, Link):
sr = Subreddit._byID(reported.sr_id)
add_queries([get_reported_links(sr)], insert_item = reported)
#elif isinstance(reported, Comment):
# sr = Subreddit._byID(reported.sr_id)
# add_queries([get_reported_comments(sr)], insert_item = reported)
def clear_report(report):
reporter = report._thing1
reported = report._thing2
if isinstance(reported, Link):
sr = Subreddit._byID(reported.sr_id)
add_queries([get_reported_links(sr)], delete_item = reported)
#elif isinstance(reported, Comment):
# sr = Subreddit._byID(reported.sr_id)
# add_queries([get_reported_comments(sr)], delete_item = reported)
def add_all_ban_report_srs():
"""Adds the initial spam/reported pages to the report queue"""
q = Subreddit._query(sort = asc('_date'))
for sr in fetch_things2(q):
add_queries([get_spam_links(sr),
#get_spam_comments(sr),
get_reported_links(sr),
#get_reported_comments(sr),
])
def add_all_srs():
"""Adds every listing query for every subreddit to the queue."""
q = Subreddit._query(sort = asc('_date'))
for sr in fetch_things2(q):
add_queries(all_queries(get_links, sr, ('hot', 'new', 'old'), ['all']))
add_queries(all_queries(get_links, sr, ('top', 'controversial'), db_times.keys()))
add_queries([get_links(sr, 'toplinks', 'all')])
add_queries([get_links(sr, 'toplinks', 'all'),
get_spam_links(sr),
#get_spam_comments(sr),
get_reported_links(sr),
#get_reported_comments(sr),
])
def update_user(user):

View File

@@ -23,8 +23,10 @@
from __future__ import with_statement
from time import sleep
from datetime import datetime
from threading import local
from pylons import c
# thread-local storage for detection of recursive locks
locks = local()
class TimeoutExpired(Exception): pass
@@ -34,6 +36,9 @@ class MemcacheLock(object):
is True, we have the lock. If it's False, someone else has it."""
def __init__(self, key, cache, time = 30, timeout = 30):
# get a thread-local set of locks that we own
self.locks = locks.locks = getattr(locks, 'locks', set())
self.key = key
self.cache = cache
self.time = time
@@ -43,11 +48,8 @@ class MemcacheLock(object):
def __enter__(self):
start = datetime.now()
if not c.locks:
c.locks = {}
#if this thread already has this lock, move on
if c.locks.get(self.key):
if self.key in self.locks:
return
#try and fetch the lock, looping until it's available
@@ -59,14 +61,14 @@ class MemcacheLock(object):
#tell this thread we have this lock so we can avoid deadlocks
#of requests for the same lock in the same thread
c.locks[self.key] = True
self.locks.add(self.key)
self.have_lock = True
def __exit__(self, type, value, tb):
#only release the lock if we gained it in the first place
if self.have_lock:
self.cache.delete(self.key)
del c.locks[self.key]
self.locks.remove(self.key)
def make_lock_factory(cache):
def factory(key):

View File

@@ -398,9 +398,11 @@ class SubredditInfoBar(CachedTemplate):
if self.type != 'public':
buttons.append(NavButton(plurals.contributors, 'contributors'))
if self.is_moderator:
if self.is_moderator or self.is_admin:
buttons.append(NamedButton('edit'))
buttons.extend([NavButton(menu.banusers, 'banned'),
NamedButton('spam'),
NamedButton('reports'),
NamedButton('spam')])
buttons.append(NamedButton('traffic'))
return [NavMenu(buttons, type = "flatlist", base_path = "/about/")]

View File

@@ -22,9 +22,8 @@
from r2.lib.utils import tup
class AdminTools(object):
def spam(self, thing, amount = 1, mark_as_spam = True, **kw):
things = tup(thing)
for t in things:
def spam(self, things, amount = 1, mark_as_spam = True, **kw):
for t in tup(things):
if mark_as_spam:
t._spam = (amount > 0)
t._commit()

View File

@@ -55,6 +55,8 @@ class Report(MultiRelation('report',
@classmethod
def new(cls, user, thing):
from r2.lib.db import queries
# check if this report exists already!
rel = cls.rel(user, thing)
oldreport = list(rel._query(rel.c._thing1_id == user._id,
@@ -66,7 +68,8 @@ class Report(MultiRelation('report',
if oldreport: return oldreport[0]
r = Report(user, thing, '0', amount = 0)
if not thing._loaded: thing._load()
if not thing._loaded:
thing._load()
# mark item as reported
thing._incr(cls._field)
@@ -76,7 +79,7 @@ class Report(MultiRelation('report',
aid = thing.author_id
author = Account._byID(aid)
author._incr(cls._field)
# mark user as having made a report
user._incr('report_made')
@@ -84,6 +87,9 @@ class Report(MultiRelation('report',
admintools.report(thing)
# update the reports queue if it exists
queries.new_report(r)
# if the thing is already marked as spam, accept the report
if thing._spam:
cls.accept(r)
@@ -124,6 +130,8 @@ class Report(MultiRelation('report',
def accept(cls, r, correct = True):
''' sets the various reporting fields, but does nothing to
the corresponding spam fields (handled by unreport)'''
from r2.lib.db import queries
amount = 1 if correct else -1
oldamount = int(r._name)
@@ -139,6 +147,8 @@ class Report(MultiRelation('report',
# update the amount
cls.set_amount(r, amount)
queries.clear_report(r)
# update the thing's number of reports only if we made no
# decision prior to this
if oldamount == 0:
@@ -386,6 +396,8 @@ class Report(MultiRelation('report',
def unreport(things, correct=False, auto = False, banned_by = ''):
from r2.lib.db import queries
things = tup(things)
# load authors (to set the spammer flag)
@@ -394,11 +406,20 @@ def unreport(things, correct=False, auto = False, banned_by = ''):
authors = Account._byID(tuple(aids), data=True) if aids else {}
with_srs = [ t for t in things if hasattr(t, 'sr_id') ]
if with_srs:
# populate local cache in batch, because
# queries.{ban,unban,new_report,clear_report} will do a
# Subreddit._byID(link.sr_id) on each link individually. Not
# required for Messages or Subreddits, which don't have sr_ids
Subreddit._byID(set(t.sr_id for t in with_srs))
# load all reports (to set their amount to be +/-1)
reports = Report.reported(things=things, amount = 0)
# mark the reports as finalized:
for r in reports.values(): Report.accept(r, correct)
for r in reports.values():
Report.accept(r, correct)
amount = 1 if correct else -1
@@ -416,6 +437,11 @@ def unreport(things, correct=False, auto = False, banned_by = ''):
# tally the spamminess of the author
spammer[t.author_id] = spammer.get(t.author_id,0) + amount
if correct:
queries.ban(t, auto = auto)
elif not correct and t._spam:
queries.unban(t)
#will be empty if the items didn't have authors
for s, v in spammer.iteritems():
if authors[s].spammer + v >= 0:

View File

@@ -246,6 +246,14 @@ class Subreddit(Thing, Printable):
from r2.lib.db import queries
return queries.get_links(self, sort, time)
def get_spam(self):
from r2.lib.db import queries
return queries.get_spam(self)
def get_reported(self):
from r2.lib.db import queries
return queries.get_reported(self)
@classmethod
def add_props(cls, user, wrapped):
names = ('subscriber', 'moderator', 'contributor')