2016-06-03 10:13:43 +00:00
|
|
|
import re
|
|
|
|
from szurubooru import errors
|
|
|
|
from szurubooru.search import criteria, tokens
|
|
|
|
|
2016-08-14 12:22:53 +00:00
|
|
|
|
2016-06-03 10:13:43 +00:00
|
|
|
def _create_criterion(original_value, value):
|
2016-09-26 20:47:54 +00:00
|
|
|
if ',' in value:
|
|
|
|
return criteria.ArrayCriterion(
|
|
|
|
original_value, value.split(','))
|
2016-06-03 10:13:43 +00:00
|
|
|
if '..' in value:
|
|
|
|
low, high = value.split('..', 1)
|
|
|
|
if not low and not high:
|
|
|
|
raise errors.SearchError('Empty ranged value')
|
|
|
|
return criteria.RangedCriterion(original_value, low, high)
|
|
|
|
return criteria.PlainCriterion(original_value, value)
|
|
|
|
|
2016-08-14 12:22:53 +00:00
|
|
|
|
2016-06-03 10:13:43 +00:00
|
|
|
def _parse_anonymous(value, negated):
|
|
|
|
criterion = _create_criterion(value, value)
|
|
|
|
return tokens.AnonymousToken(criterion, negated)
|
|
|
|
|
2016-08-14 12:22:53 +00:00
|
|
|
|
2016-06-03 10:13:43 +00:00
|
|
|
def _parse_named(key, value, negated):
|
|
|
|
original_value = value
|
|
|
|
if key.endswith('-min'):
|
|
|
|
key = key[:-4]
|
|
|
|
value += '..'
|
|
|
|
elif key.endswith('-max'):
|
|
|
|
key = key[:-4]
|
|
|
|
value = '..' + value
|
|
|
|
criterion = _create_criterion(original_value, value)
|
|
|
|
return tokens.NamedToken(key, criterion, negated)
|
|
|
|
|
2016-08-14 12:22:53 +00:00
|
|
|
|
2016-06-03 10:13:43 +00:00
|
|
|
def _parse_special(value, negated):
|
|
|
|
return tokens.SpecialToken(value, negated)
|
|
|
|
|
2016-08-14 12:22:53 +00:00
|
|
|
|
2016-06-03 10:13:43 +00:00
|
|
|
def _parse_sort(value, negated):
|
|
|
|
if value.count(',') == 0:
|
2016-08-14 12:22:53 +00:00
|
|
|
order_str = None
|
2016-06-03 10:13:43 +00:00
|
|
|
elif value.count(',') == 1:
|
2016-08-14 12:22:53 +00:00
|
|
|
value, order_str = value.split(',')
|
2016-06-03 10:13:43 +00:00
|
|
|
else:
|
|
|
|
raise errors.SearchError('Too many commas in sort style token.')
|
|
|
|
try:
|
2016-08-14 12:22:53 +00:00
|
|
|
order = {
|
2016-06-03 10:13:43 +00:00
|
|
|
'asc': tokens.SortToken.SORT_ASC,
|
|
|
|
'desc': tokens.SortToken.SORT_DESC,
|
|
|
|
'': tokens.SortToken.SORT_DEFAULT,
|
|
|
|
None: tokens.SortToken.SORT_DEFAULT,
|
2016-08-14 12:22:53 +00:00
|
|
|
}[order_str]
|
2016-06-03 10:13:43 +00:00
|
|
|
except KeyError:
|
|
|
|
raise errors.SearchError(
|
2016-08-14 12:22:53 +00:00
|
|
|
'Unknown search direction: %r.' % order_str)
|
2016-06-03 10:13:43 +00:00
|
|
|
if negated:
|
2016-08-14 12:22:53 +00:00
|
|
|
order = {
|
|
|
|
tokens.SortToken.SORT_ASC:
|
|
|
|
tokens.SortToken.SORT_DESC,
|
|
|
|
tokens.SortToken.SORT_DESC:
|
|
|
|
tokens.SortToken.SORT_ASC,
|
|
|
|
tokens.SortToken.SORT_DEFAULT:
|
|
|
|
tokens.SortToken.SORT_NEGATED_DEFAULT,
|
|
|
|
tokens.SortToken.SORT_NEGATED_DEFAULT:
|
|
|
|
tokens.SortToken.SORT_DEFAULT,
|
|
|
|
}[order]
|
|
|
|
return tokens.SortToken(value, order)
|
|
|
|
|
2016-06-03 10:13:43 +00:00
|
|
|
|
2016-10-22 12:43:52 +00:00
|
|
|
class SearchQuery:
|
2016-06-03 10:13:43 +00:00
|
|
|
def __init__(self):
|
|
|
|
self.anonymous_tokens = []
|
|
|
|
self.named_tokens = []
|
|
|
|
self.special_tokens = []
|
|
|
|
self.sort_tokens = []
|
|
|
|
|
2016-06-03 11:50:38 +00:00
|
|
|
def __hash__(self):
|
|
|
|
return hash((
|
|
|
|
tuple(self.anonymous_tokens),
|
|
|
|
tuple(self.named_tokens),
|
|
|
|
tuple(self.special_tokens),
|
|
|
|
tuple(self.sort_tokens)))
|
|
|
|
|
2016-08-14 12:22:53 +00:00
|
|
|
|
2016-10-22 12:43:52 +00:00
|
|
|
class Parser:
|
2016-06-03 10:13:43 +00:00
|
|
|
def parse(self, query_text):
|
|
|
|
query = SearchQuery()
|
|
|
|
for chunk in re.split(r'\s+', (query_text or '').lower()):
|
|
|
|
if not chunk:
|
|
|
|
continue
|
|
|
|
negated = False
|
|
|
|
while chunk[0] == '-':
|
|
|
|
chunk = chunk[1:]
|
|
|
|
negated = not negated
|
2016-09-26 20:06:18 +00:00
|
|
|
match = re.match('([a-z_-]+):(.*)', chunk)
|
|
|
|
if match:
|
|
|
|
key, value = list(match.groups())
|
2016-06-03 10:13:43 +00:00
|
|
|
if key == 'sort':
|
|
|
|
query.sort_tokens.append(
|
|
|
|
_parse_sort(value, negated))
|
|
|
|
elif key == 'special':
|
|
|
|
query.special_tokens.append(
|
|
|
|
_parse_special(value, negated))
|
|
|
|
else:
|
|
|
|
query.named_tokens.append(
|
|
|
|
_parse_named(key, value, negated))
|
|
|
|
else:
|
2016-08-14 12:22:53 +00:00
|
|
|
query.anonymous_tokens.append(
|
|
|
|
_parse_anonymous(chunk, negated))
|
2016-06-03 10:13:43 +00:00
|
|
|
return query
|