diff --git a/r2/r2/controllers/admin.py b/r2/r2/controllers/admin.py
index 694a621d8..13a83155f 100644
--- a/r2/r2/controllers/admin.py
+++ b/r2/r2/controllers/admin.py
@@ -29,7 +29,7 @@ from r2.models import *
from pylons.i18n import _
def admin_profile_query(vuser, location, db_sort):
- return None
+ return None
class AdminController(RedditController): pass
diff --git a/r2/r2/controllers/front.py b/r2/r2/controllers/front.py
index 1118f83e5..86a4f646a 100644
--- a/r2/r2/controllers/front.py
+++ b/r2/r2/controllers/front.py
@@ -245,6 +245,7 @@ class FrontController(RedditController):
query = c.site.get_spam()
elif location == 'trials':
query = c.site.get_trials()
+ num = 1000
elif location == 'modqueue':
query = c.site.get_modqueue()
else:
diff --git a/r2/r2/lib/amqp.py b/r2/r2/lib/amqp.py
index 4485faad1..0ad363b8b 100644
--- a/r2/r2/lib/amqp.py
+++ b/r2/r2/lib/amqp.py
@@ -274,6 +274,16 @@ def empty_queue(queue):
chan.queue_purge(queue)
+def black_hole(queue):
+ """continually empty out a queue as new items are created"""
+ chan = connection_manager.get_channel()
+
+ def _ignore(msg):
+ print 'Ignoring msg: %r' % msg.body
+
+ consume_items(queue, _ignore)
+
+
def _test_setup(test_q = 'test_q'):
from r2.lib.queues import RedditQueueMap
chan = connection_manager.get_channel()
diff --git a/r2/r2/lib/app_globals.py b/r2/r2/lib/app_globals.py
index e8b880cb1..ee0cfcd10 100644
--- a/r2/r2/lib/app_globals.py
+++ b/r2/r2/lib/app_globals.py
@@ -243,7 +243,7 @@ class Globals(object):
if self.debug:
self.log.setLevel(logging.DEBUG)
else:
- self.log.setLevel(logging.WARNING)
+ self.log.setLevel(logging.INFO)
# set log level for pycountry which is chatty
logging.getLogger('pycountry.db').setLevel(logging.CRITICAL)
diff --git a/r2/r2/lib/db/thing.py b/r2/r2/lib/db/thing.py
index 657b8e25c..c09050ac7 100644
--- a/r2/r2/lib/db/thing.py
+++ b/r2/r2/lib/db/thing.py
@@ -184,7 +184,10 @@ class DataThing(object):
return l
def _cache_myself(self):
- cache.set(self._cache_key(), self)
+ ck = self._cache_key()
+ if self.__class__.__name__ in ("Link", "Comment", "Subreddit") and not self._t:
+ log_text ("{} cache", "About to cache {} for %r" % ck, "warning")
+ cache.set(ck, self)
def _sync_latest(self):
"""Load myself from the cache to and re-apply the .dirties
diff --git a/r2/r2/lib/utils/utils.py b/r2/r2/lib/utils/utils.py
index 2106374e4..99cbc1ba7 100644
--- a/r2/r2/lib/utils/utils.py
+++ b/r2/r2/lib/utils/utils.py
@@ -492,6 +492,31 @@ def median(l):
i = len(s) / 2
return s[i]
+# localpart and domain should be canonicalized
+# When true, returns the reason
+def is_banned_email(localpart, domain):
+ from pylons import g
+
+ key = "email_banned-%s@%s" % (localpart, domain)
+ if g.hardcache.get(key):
+ return "address"
+
+ # For abc@foo.bar.com, if foo.bar.com or bar.com is on the
+ # no-email list, treat the address as unverified.
+ parts = domain.rstrip(".").split(".")
+ while len(parts) >= 2:
+ whole = ".".join(parts)
+
+ d = g.hardcache.get("domain-" + whole)
+
+ if d and d.get("no_email", None):
+ return "domain"
+
+ parts.pop(0)
+
+ return None
+
+
def query_string(dict):
pairs = []
for k,v in dict.iteritems():
@@ -900,7 +925,7 @@ def fetch_things2(query, chunk_size = 100, batch_fn = None, chunks = False):
items = list(query)
def fix_if_broken(thing, delete = True):
- from r2.models import Link, Comment
+ from r2.models import Link, Comment, Subreddit
# the minimum set of attributes that are required
attrs = dict((cls, cls._essentials)
diff --git a/r2/r2/models/account.py b/r2/r2/models/account.py
index 470fbd9b0..9fea735d1 100644
--- a/r2/r2/models/account.py
+++ b/r2/r2/models/account.py
@@ -23,7 +23,8 @@ from r2.lib.db.thing import Thing, Relation, NotFound
from r2.lib.db.operators import lower
from r2.lib.db.userrel import UserRel
from r2.lib.memoize import memoize
-from r2.lib.utils import modhash, valid_hash, randstr, timefromnow, UrlParser
+from r2.lib.utils import modhash, valid_hash, randstr, timefromnow
+from r2.lib.utils import UrlParser, is_banned_email
from r2.lib.cache import sgm
from pylons import g
@@ -345,11 +346,37 @@ class Account(Thing):
return baskets
+ def canonical_email(self):
+ localpart, domain = str(self.email.lower()).split("@")
+
+ # a.s.d.f+something@gmail.com --> asdf@gmail.com
+ localpart.replace(".", "")
+ plus = localpart.find("+")
+ if plus > 0:
+ localpart = localpart[:plus]
+
+ return (localpart, domain)
+
+ def cromulent(self):
+ """Return whether the user has validated their email address and
+ passes some rudimentary 'not evil' checks."""
+
+ if not self.email_verified:
+ return False
+
+ t = self.canonical_email()
+
+ if is_banned_email(*t):
+ return False
+
+ # Otherwise, congratulations; you're cromulent!
+ return True
+
def quota_limits(self, kind):
if kind != 'link':
raise NotImplementedError
- if self.email_verified and not self._spam:
+ if self.cromulent():
return dict(hour=3, day=10, week=50, month=150)
else:
return dict(hour=1, day=3, week=5, month=5)
diff --git a/r2/r2/templates/redditfooter.html b/r2/r2/templates/redditfooter.html
index 79838da14..f22b8056f 100644
--- a/r2/r2/templates/redditfooter.html
+++ b/r2/r2/templates/redditfooter.html
@@ -33,7 +33,13 @@
${toolbar}
%endfor
-
+ %if g.domain != "reddit.com":
+
+
+
+
+ %endif