mirror of
https://github.com/reddit-archive/reddit.git
synced 2026-01-26 15:28:37 -05:00
* Improvements to the email verification system
* addition of the 'powered by reddit' footer
This commit is contained in:
@@ -29,7 +29,7 @@ from r2.models import *
|
||||
from pylons.i18n import _
|
||||
|
||||
def admin_profile_query(vuser, location, db_sort):
|
||||
return None
|
||||
return None
|
||||
|
||||
class AdminController(RedditController): pass
|
||||
|
||||
|
||||
@@ -245,6 +245,7 @@ class FrontController(RedditController):
|
||||
query = c.site.get_spam()
|
||||
elif location == 'trials':
|
||||
query = c.site.get_trials()
|
||||
num = 1000
|
||||
elif location == 'modqueue':
|
||||
query = c.site.get_modqueue()
|
||||
else:
|
||||
|
||||
@@ -274,6 +274,16 @@ def empty_queue(queue):
|
||||
chan.queue_purge(queue)
|
||||
|
||||
|
||||
def black_hole(queue):
|
||||
"""continually empty out a queue as new items are created"""
|
||||
chan = connection_manager.get_channel()
|
||||
|
||||
def _ignore(msg):
|
||||
print 'Ignoring msg: %r' % msg.body
|
||||
|
||||
consume_items(queue, _ignore)
|
||||
|
||||
|
||||
def _test_setup(test_q = 'test_q'):
|
||||
from r2.lib.queues import RedditQueueMap
|
||||
chan = connection_manager.get_channel()
|
||||
|
||||
@@ -243,7 +243,7 @@ class Globals(object):
|
||||
if self.debug:
|
||||
self.log.setLevel(logging.DEBUG)
|
||||
else:
|
||||
self.log.setLevel(logging.WARNING)
|
||||
self.log.setLevel(logging.INFO)
|
||||
|
||||
# set log level for pycountry which is chatty
|
||||
logging.getLogger('pycountry.db').setLevel(logging.CRITICAL)
|
||||
|
||||
@@ -184,7 +184,10 @@ class DataThing(object):
|
||||
return l
|
||||
|
||||
def _cache_myself(self):
|
||||
cache.set(self._cache_key(), self)
|
||||
ck = self._cache_key()
|
||||
if self.__class__.__name__ in ("Link", "Comment", "Subreddit") and not self._t:
|
||||
log_text ("{} cache", "About to cache {} for %r" % ck, "warning")
|
||||
cache.set(ck, self)
|
||||
|
||||
def _sync_latest(self):
|
||||
"""Load myself from the cache to and re-apply the .dirties
|
||||
|
||||
@@ -492,6 +492,31 @@ def median(l):
|
||||
i = len(s) / 2
|
||||
return s[i]
|
||||
|
||||
# localpart and domain should be canonicalized
|
||||
# When true, returns the reason
|
||||
def is_banned_email(localpart, domain):
|
||||
from pylons import g
|
||||
|
||||
key = "email_banned-%s@%s" % (localpart, domain)
|
||||
if g.hardcache.get(key):
|
||||
return "address"
|
||||
|
||||
# For abc@foo.bar.com, if foo.bar.com or bar.com is on the
|
||||
# no-email list, treat the address as unverified.
|
||||
parts = domain.rstrip(".").split(".")
|
||||
while len(parts) >= 2:
|
||||
whole = ".".join(parts)
|
||||
|
||||
d = g.hardcache.get("domain-" + whole)
|
||||
|
||||
if d and d.get("no_email", None):
|
||||
return "domain"
|
||||
|
||||
parts.pop(0)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def query_string(dict):
|
||||
pairs = []
|
||||
for k,v in dict.iteritems():
|
||||
@@ -900,7 +925,7 @@ def fetch_things2(query, chunk_size = 100, batch_fn = None, chunks = False):
|
||||
items = list(query)
|
||||
|
||||
def fix_if_broken(thing, delete = True):
|
||||
from r2.models import Link, Comment
|
||||
from r2.models import Link, Comment, Subreddit
|
||||
|
||||
# the minimum set of attributes that are required
|
||||
attrs = dict((cls, cls._essentials)
|
||||
|
||||
@@ -23,7 +23,8 @@ from r2.lib.db.thing import Thing, Relation, NotFound
|
||||
from r2.lib.db.operators import lower
|
||||
from r2.lib.db.userrel import UserRel
|
||||
from r2.lib.memoize import memoize
|
||||
from r2.lib.utils import modhash, valid_hash, randstr, timefromnow, UrlParser
|
||||
from r2.lib.utils import modhash, valid_hash, randstr, timefromnow
|
||||
from r2.lib.utils import UrlParser, is_banned_email
|
||||
from r2.lib.cache import sgm
|
||||
|
||||
from pylons import g
|
||||
@@ -345,11 +346,37 @@ class Account(Thing):
|
||||
|
||||
return baskets
|
||||
|
||||
def canonical_email(self):
|
||||
localpart, domain = str(self.email.lower()).split("@")
|
||||
|
||||
# a.s.d.f+something@gmail.com --> asdf@gmail.com
|
||||
localpart.replace(".", "")
|
||||
plus = localpart.find("+")
|
||||
if plus > 0:
|
||||
localpart = localpart[:plus]
|
||||
|
||||
return (localpart, domain)
|
||||
|
||||
def cromulent(self):
|
||||
"""Return whether the user has validated their email address and
|
||||
passes some rudimentary 'not evil' checks."""
|
||||
|
||||
if not self.email_verified:
|
||||
return False
|
||||
|
||||
t = self.canonical_email()
|
||||
|
||||
if is_banned_email(*t):
|
||||
return False
|
||||
|
||||
# Otherwise, congratulations; you're cromulent!
|
||||
return True
|
||||
|
||||
def quota_limits(self, kind):
|
||||
if kind != 'link':
|
||||
raise NotImplementedError
|
||||
|
||||
if self.email_verified and not self._spam:
|
||||
if self.cromulent():
|
||||
return dict(hour=3, day=10, week=50, month=150)
|
||||
else:
|
||||
return dict(hour=1, day=3, week=5, month=5)
|
||||
|
||||
@@ -33,7 +33,13 @@
|
||||
${toolbar}
|
||||
</div>
|
||||
%endfor
|
||||
|
||||
%if g.domain != "reddit.com":
|
||||
<!-- http://code.reddit.com/LICENSE see Exhibit B -->
|
||||
<a href="http://code.reddit.com" style="text-align:center;display:block">
|
||||
<img src="http://code.reddit.com/reddit_logo.png"
|
||||
style="width:140px"/>
|
||||
</a>
|
||||
%endif
|
||||
<p class="bottommenu">
|
||||
${text_with_links(
|
||||
_("Use of this site constitutes acceptance of our "
|
||||
|
||||
Reference in New Issue
Block a user