2016-05-20 19:34:02 +00:00
|
|
|
import os
|
2016-04-02 12:13:26 +00:00
|
|
|
import datetime
|
2016-04-30 21:17:08 +00:00
|
|
|
import hashlib
|
2016-04-02 12:13:26 +00:00
|
|
|
import re
|
2016-05-20 19:34:02 +00:00
|
|
|
import tempfile
|
|
|
|
from contextlib import contextmanager
|
2016-06-11 09:15:47 +00:00
|
|
|
from szurubooru import errors
|
2016-03-28 20:53:56 +00:00
|
|
|
|
2016-06-18 08:55:44 +00:00
|
|
|
def snake_case_to_lower_camel_case(text):
|
|
|
|
components = text.split('_')
|
|
|
|
return components[0] + ''.join(word[0].upper() + word[1:] for word in components[1:])
|
|
|
|
|
|
|
|
def snake_case_to_lower_camel_case_keys(source):
|
|
|
|
target = {}
|
|
|
|
for key, value in source.items():
|
|
|
|
target[snake_case_to_lower_camel_case(key)] = value
|
|
|
|
return target
|
|
|
|
|
2016-05-30 21:08:22 +00:00
|
|
|
def get_serialization_options(ctx):
|
2016-06-11 16:02:40 +00:00
|
|
|
return ctx.get_param_as_list('fields', required=False, default=None)
|
2016-05-30 21:08:22 +00:00
|
|
|
|
|
|
|
def serialize_entity(entity, field_factories, options):
|
2016-05-30 20:07:44 +00:00
|
|
|
if not entity:
|
|
|
|
return None
|
2016-05-30 21:08:22 +00:00
|
|
|
if not options:
|
|
|
|
options = field_factories.keys()
|
2016-05-30 20:07:44 +00:00
|
|
|
ret = {}
|
2016-05-30 21:08:22 +00:00
|
|
|
for key in options:
|
|
|
|
try:
|
|
|
|
factory = field_factories[key]
|
|
|
|
ret[key] = factory()
|
|
|
|
except KeyError:
|
2016-06-11 09:15:47 +00:00
|
|
|
raise errors.ValidationError('Invalid key: %r. Valid keys: %r.' % (
|
|
|
|
key, list(sorted(field_factories.keys()))))
|
2016-05-30 20:07:44 +00:00
|
|
|
return ret
|
|
|
|
|
2016-05-20 19:34:02 +00:00
|
|
|
@contextmanager
|
|
|
|
def create_temp_file(**kwargs):
|
|
|
|
(handle, path) = tempfile.mkstemp(**kwargs)
|
|
|
|
os.close(handle)
|
|
|
|
try:
|
|
|
|
with open(path, 'r+b') as handle:
|
|
|
|
yield handle
|
|
|
|
finally:
|
|
|
|
os.remove(path)
|
|
|
|
|
2016-05-07 19:42:03 +00:00
|
|
|
def unalias_dict(input_dict):
|
|
|
|
output_dict = {}
|
|
|
|
for key_list, value in input_dict.items():
|
|
|
|
if isinstance(key_list, str):
|
|
|
|
key_list = [key_list]
|
|
|
|
for key in key_list:
|
|
|
|
output_dict[key] = value
|
|
|
|
return output_dict
|
|
|
|
|
2016-04-30 21:17:08 +00:00
|
|
|
def get_md5(source):
|
|
|
|
if not isinstance(source, bytes):
|
|
|
|
source = source.encode('utf-8')
|
|
|
|
md5 = hashlib.md5()
|
|
|
|
md5.update(source)
|
|
|
|
return md5.hexdigest()
|
|
|
|
|
|
|
|
def flip(source):
|
|
|
|
return {v: k for k, v in source.items()}
|
|
|
|
|
2016-04-03 16:00:38 +00:00
|
|
|
def is_valid_email(email):
|
2016-04-03 20:03:58 +00:00
|
|
|
''' Return whether given email address is valid or empty. '''
|
2016-04-03 17:00:47 +00:00
|
|
|
return not email or re.match(r'^[^@]*@[^@]*\.[^@]*$', email)
|
2016-04-03 16:00:38 +00:00
|
|
|
|
2016-03-28 20:53:56 +00:00
|
|
|
class dotdict(dict): # pylint: disable=invalid-name
|
2016-04-03 16:00:38 +00:00
|
|
|
''' dot.notation access to dictionary attributes. '''
|
2016-03-28 20:53:56 +00:00
|
|
|
def __getattr__(self, attr):
|
|
|
|
return self.get(attr)
|
|
|
|
__setattr__ = dict.__setitem__
|
|
|
|
__delattr__ = dict.__delitem__
|
2016-04-02 12:13:26 +00:00
|
|
|
|
2016-07-03 12:46:15 +00:00
|
|
|
def parse_time_range(value):
|
2016-04-03 20:03:58 +00:00
|
|
|
''' Return tuple containing min/max time for given text representation. '''
|
2016-04-02 12:13:26 +00:00
|
|
|
one_day = datetime.timedelta(days=1)
|
|
|
|
one_second = datetime.timedelta(seconds=1)
|
|
|
|
|
|
|
|
value = value.lower()
|
|
|
|
if not value:
|
2016-06-11 09:15:47 +00:00
|
|
|
raise errors.ValidationError('Empty date format.')
|
2016-04-02 12:13:26 +00:00
|
|
|
|
|
|
|
if value == 'today':
|
2016-07-03 12:46:15 +00:00
|
|
|
now = datetime.datetime.utcnow()
|
2016-04-02 12:13:26 +00:00
|
|
|
return (
|
|
|
|
datetime.datetime(now.year, now.month, now.day, 0, 0, 0),
|
2016-04-03 17:00:47 +00:00
|
|
|
datetime.datetime(now.year, now.month, now.day, 0, 0, 0) \
|
2016-04-02 12:13:26 +00:00
|
|
|
+ one_day - one_second)
|
|
|
|
|
|
|
|
if value == 'yesterday':
|
2016-07-03 12:46:15 +00:00
|
|
|
now = datetime.datetime.utcnow()
|
2016-04-02 12:13:26 +00:00
|
|
|
return (
|
|
|
|
datetime.datetime(now.year, now.month, now.day, 0, 0, 0) - one_day,
|
2016-04-03 17:00:47 +00:00
|
|
|
datetime.datetime(now.year, now.month, now.day, 0, 0, 0) \
|
2016-04-02 12:13:26 +00:00
|
|
|
- one_second)
|
|
|
|
|
2016-04-03 17:00:47 +00:00
|
|
|
match = re.match(r'^(\d{4})$', value)
|
2016-04-02 12:13:26 +00:00
|
|
|
if match:
|
|
|
|
year = int(match.group(1))
|
|
|
|
return (
|
|
|
|
datetime.datetime(year, 1, 1),
|
|
|
|
datetime.datetime(year + 1, 1, 1) - one_second)
|
|
|
|
|
2016-04-03 17:00:47 +00:00
|
|
|
match = re.match(r'^(\d{4})-(\d{1,2})$', value)
|
2016-04-02 12:13:26 +00:00
|
|
|
if match:
|
|
|
|
year = int(match.group(1))
|
|
|
|
month = int(match.group(2))
|
|
|
|
return (
|
|
|
|
datetime.datetime(year, month, 1),
|
|
|
|
datetime.datetime(year, month + 1, 1) - one_second)
|
|
|
|
|
2016-04-03 17:00:47 +00:00
|
|
|
match = re.match(r'^(\d{4})-(\d{1,2})-(\d{1,2})$', value)
|
2016-04-02 12:13:26 +00:00
|
|
|
if match:
|
|
|
|
year = int(match.group(1))
|
|
|
|
month = int(match.group(2))
|
|
|
|
day = int(match.group(3))
|
|
|
|
return (
|
|
|
|
datetime.datetime(year, month, day),
|
|
|
|
datetime.datetime(year, month, day + 1) - one_second)
|
|
|
|
|
2016-06-11 09:15:47 +00:00
|
|
|
raise errors.ValidationError('Invalid date format: %r.' % value)
|
2016-04-15 20:49:55 +00:00
|
|
|
|
|
|
|
def icase_unique(source):
|
|
|
|
target = []
|
|
|
|
target_low = []
|
|
|
|
for source_item in source:
|
|
|
|
if source_item.lower() not in target_low:
|
|
|
|
target.append(source_item)
|
|
|
|
target_low.append(source_item.lower())
|
|
|
|
return target
|
2016-04-16 13:07:33 +00:00
|
|
|
|
|
|
|
def value_exceeds_column_size(value, column):
|
2016-04-30 21:17:08 +00:00
|
|
|
if not value:
|
|
|
|
return False
|
|
|
|
max_length = column.property.columns[0].type.length
|
|
|
|
if max_length is None:
|
|
|
|
return False
|
|
|
|
return len(value) > max_length
|