* sorting options for regular searches

* searching from the front page now searches all reddits
* 'related' now includes results from all reddits
This commit is contained in:
ketralnis
2008-10-06 14:22:19 -07:00
parent b675790312
commit d31abf431c
5 changed files with 51 additions and 39 deletions

View File

@@ -27,7 +27,7 @@ from r2 import config
from r2.models import *
from r2.lib.pages import *
from r2.lib.menus import *
from r2.lib.utils import to36, sanitize_url, check_cheating, title_to_url, query_string, UrlParser
from r2.lib.utils import to36, sanitize_url, check_cheating, title_to_url, query_string, UrlParser
from r2.lib.template_helpers import get_domain
from r2.lib.emailer import has_opted_out, Email
from r2.lib.db.operators import desc
@@ -321,12 +321,10 @@ class FrontController(RedditController):
@validate(query = nop('q'))
def GET_search_reddits(self, query, reverse, after, count, num):
"""Search reddits by title and description."""
# note that 'downs' is a measure of activity on subreddits
q = SubredditSearchQuery(query, sort = 'downs desc',
timerange = 'all')
q = SubredditSearchQuery(query)
num, t, spane = self._search(q, num = num, reverse = reverse, after = after,
count = count)
num, t, spane = self._search(q, num = num, reverse = reverse,
after = after, count = count)
res = SubredditsPage(content=spane,
prev_search = query,
@@ -339,8 +337,9 @@ class FrontController(RedditController):
@base_listing
@validate(query = nop('q'),
time = VMenu('action', TimeMenu, remember = False),
sort = VMenu('sort', SearchSortMenu, remember = False),
langs = nop('langs'))
def GET_search(self, query, num, time, reverse, after, count, langs):
def GET_search(self, query, num, time, reverse, after, count, langs, sort):
"""Search links page."""
if query and '.' in query:
url = sanitize_url(query, require_scheme = True)
@@ -352,25 +351,35 @@ class FrontController(RedditController):
else:
langs = c.content_langs
q = LinkSearchQuery(q = query, timerange = time, langs = langs)
subreddits = None
authors = None
if c.site == subreddit.Friends and c.user_is_loggedin and c.user.friends:
authors = c.user.friends
elif isinstance(c.site, MultiReddit):
subreddits = c.site.sr_ids
elif not isinstance(c.site, FakeSubreddit):
subreddits = [c.site._id]
q = LinkSearchQuery(q = query, timerange = time, langs = langs,
subreddits = subreddits, authors = authors,
sort = SearchSortMenu.operator(sort))
num, t, spane = self._search(q, num = num, after = after, reverse = reverse,
count = count)
if not isinstance(c.site,FakeSubreddit):
my_reddits_link = "/search%s" % query_string({'q': query})
all_reddits_link = "%s/search%s" % (subreddit.All.path,
query_string({'q': query}))
d = {'reddit_name': c.site.name,
'reddit_link': "http://%s/"%get_domain(cname = c.cname),
'my_reddits_link': my_reddits_link,
'all_reddits_link': all_reddits_link}
infotext = strings.searching_a_reddit % d
else:
infotext = None
res = SearchPage(_('search results'), query, t, num, content=spane,
nav_menus = [TimeMenu(default = time)],
nav_menus = [TimeMenu(default = time),
SearchSortMenu(default=sort)],
infotext = infotext).render()
return res

View File

@@ -57,7 +57,7 @@ menu = MenuHandler(hot = _('hot'),
ups = _('ups'),
downs = _('downs'),
top = _('top'),
rank = _('rank'),
relevence = _('relevence'),
more = _('more'),
relevance = _('relevance'),
controversial = _('controversial'),
@@ -392,8 +392,17 @@ class CommentSortMenu(SortMenu):
class SearchSortMenu(SortMenu):
"""Sort menu for search pages."""
default = 'rank'
options = ('hot', 'new', 'top', 'ups', 'downs', 'rank')
default = 'relevence'
mapping = dict(relevence = None,
hot = 'hot desc',
new = 'date desc',
old = 'date asc',
top = 'points desc')
options = mapping.keys()
@classmethod
def operator(cls, sort):
return cls.mapping.get(sort, cls.mapping[cls.default])
class RecSortMenu(SortMenu):
"""Sort menu for recommendation page"""

View File

@@ -584,7 +584,7 @@ class SearchQuery(object):
if reverse:
sort = swap_strings(sort,'asc','desc')
g.log.debug("Searching q=%s" % q)
g.log.debug("Searching q = %s; params = %s" % (q,repr(solr_params)))
with SolrConnection() as s:
if after:
@@ -629,8 +629,8 @@ class SearchQuery(object):
raise NotImplementedError
class UserSearchQuery(SearchQuery):
"Base class for queries that use the dismax parser; requires self.mm"
def __init__(self, q, sort=None, fields=[], langs=None, **kw):
"Base class for queries that use the dismax parser"
def __init__(self, q, mm, sort=None, fields=[], langs=None, **kw):
default_fields = ['contents^1.5','contents_ws^3'] + fields
if sort is None:
@@ -644,8 +644,8 @@ class UserSearchQuery(SearchQuery):
fields = set([("%s^2" % lang_to_fieldname(lang)) for lang in langs]
+ default_fields)
# default minimum match
self.mm = '75%'
# minimum match. See http://lucene.apache.org/solr/api/org/apache/solr/util/doc-files/min-should-match.html
self.mm = mm
SearchQuery.__init__(self, q, sort, fields = fields, **kw)
@@ -657,31 +657,21 @@ class UserSearchQuery(SearchQuery):
mm = self.mm)
class LinkSearchQuery(UserSearchQuery):
def __init__(self, q, **kw):
def __init__(self, q, mm = None, **kw):
additional_fields = ['site^1','author^1', 'subreddit^1', 'url^1']
subreddits = None
authors = None
if c.site == subreddit.Default:
subreddits = Subreddit.user_subreddits(c.user)
elif c.site == subreddit.Friends and c.user.friends:
authors = c.user.friends
elif isinstance(c.site, MultiReddit):
subreddits = c.site.sr_ids
elif not isinstance(c.site,subreddit.FakeSubreddit):
subreddits = [c.site._id]
if mm is None:
mm = '4<75%'
UserSearchQuery.__init__(self, q, fields = additional_fields,
subreddits = subreddits, authors = authors,
UserSearchQuery.__init__(self, q, mm = mm, fields = additional_fields,
types=[Link], **kw)
class RelatedSearchQuery(LinkSearchQuery):
def __init__(self, q, ignore = [], **kw):
self.ignore = set(ignore) if ignore else set()
LinkSearchQuery.__init__(self, q, sort = 'score desc', **kw)
self.mm = '25%'
LinkSearchQuery.__init__(self, q, mm = '3<100% 5<60% 8<50%',
sort = 'score desc', **kw)
def run(self, *k, **kw):
search = LinkSearchQuery.run(self, *k, **kw)
@@ -690,13 +680,15 @@ class RelatedSearchQuery(LinkSearchQuery):
class SubredditSearchQuery(UserSearchQuery):
def __init__(self, q, **kw):
UserSearchQuery.__init__(self, q, types=[Subreddit], **kw)
# note that 'downs' is a measure of activity on subreddits
UserSearchQuery.__init__(self, q, mm = '75%', sort = 'downs desc',
types=[Subreddit], **kw)
class DomainSearchQuery(SearchQuery):
def __init__(self, domain, **kw):
q = '+site:%s' % domain
SearchQuery.__init__(self, q=q, fields=['site'],types=[Link], **kw)
SearchQuery.__init__(self, q = q, fields=['site'],types=[Link], **kw)
def solr_params(self, q, boost):
q = q + ' ' + ' '.join(boost)

View File

@@ -92,8 +92,7 @@ string_dict = dict(
sr_subscribe = _('click the checkbox next to a reddit to subscribe'),
searching_a_reddit = _('you\'re searching within the [%(reddit_name)s](%(reddit_link)s) reddit. '+
'you can search within [your subscribed reddits](%(my_reddits_link)s) ' +
'or [all reddits](%(all_reddits_link)s)'),
'you can also search within [all reddits](%(all_reddits_link)s)'),
css_validator_messages = dict(
broken_url = _('"%(brokenurl)s" is not a valid URL'),

View File

@@ -25,11 +25,13 @@ Contains utilities intended to be run from a command line
from time import sleep
import sys
def bench_cache_lifetime(minutes):
"Attempts to find how long a given memcached key can be expected to live"
from pylons import g
from r2.lib.cache import Memcache
# we'll create an independent connection to memcached for this
# test
@@ -46,6 +48,7 @@ def bench_cache_lifetime(minutes):
if mc.get('bench_cache_%d' % x, None) is not None:
sleep(60)
else:
# we found one missing
return x-1
else:
# we're out of numbers, and we didn't find any missing