server/search: cache results till non-GET request
250 ms per page --> 30 ms per page (save for the first render). I'd say it's pretty good
This commit is contained in:
parent
349f32ccf2
commit
446f4d6611
|
@ -80,6 +80,7 @@ def create_app():
|
|||
request_type=api.Request,
|
||||
middleware=[
|
||||
middleware.RequireJson(),
|
||||
middleware.CachePurger(),
|
||||
middleware.ContextAdapter(),
|
||||
middleware.DbSession(),
|
||||
middleware.Authenticator(),
|
||||
|
|
|
@ -0,0 +1,56 @@
|
|||
from datetime import datetime
|
||||
|
||||
class LruCacheItem(object):
|
||||
def __init__(self, key, value):
|
||||
self.key = key
|
||||
self.value = value
|
||||
self.timestamp = datetime.now()
|
||||
|
||||
class LruCache(object):
|
||||
def __init__(self, length, delta=None):
|
||||
self.length = length
|
||||
self.delta = delta
|
||||
self.hash = {}
|
||||
self.item_list = []
|
||||
|
||||
def insert_item(self, item):
|
||||
if item.key in self.hash:
|
||||
item_index = next(i for i, v in enumerate(self.item_list) if v.key == item.key)
|
||||
self.item_list[:] = self.item_list[:item_index] + self.item_list[item_index+1:]
|
||||
self.item_list.insert(0, item)
|
||||
else:
|
||||
if len(self.item_list) > self.length:
|
||||
self.remove_item(self.item_list[-1])
|
||||
self.hash[item.key] = item
|
||||
self.item_list.insert(0, item)
|
||||
|
||||
def remove_all(self):
|
||||
self.hash = {}
|
||||
self.item_list = []
|
||||
|
||||
def remove_item(self, item):
|
||||
del self.hash[item.key]
|
||||
del self.item_list[self.item_list.index(item)]
|
||||
|
||||
def validate_item(self):
|
||||
def _outdated_items():
|
||||
now = datetime.now()
|
||||
for item in self.item_list:
|
||||
time_delta = now - item.timestamp
|
||||
if time_delta.seconds > self.delta:
|
||||
yield item
|
||||
map(lambda x: self.remove_item(x), _outdated_items())
|
||||
|
||||
_cache = LruCache(length=100)
|
||||
|
||||
def purge():
|
||||
_cache.remove_all()
|
||||
|
||||
def has(key):
|
||||
return key in _cache.hash
|
||||
|
||||
def get(key):
|
||||
return _cache.hash[key].value
|
||||
|
||||
def put(key, value):
|
||||
_cache.insert_item(LruCacheItem(key, value))
|
|
@ -4,3 +4,4 @@ from szurubooru.middleware.authenticator import Authenticator
|
|||
from szurubooru.middleware.context_adapter import ContextAdapter
|
||||
from szurubooru.middleware.require_json import RequireJson
|
||||
from szurubooru.middleware.db_session import DbSession
|
||||
from szurubooru.middleware.cache_purger import CachePurger
|
||||
|
|
|
@ -0,0 +1,7 @@
|
|||
import falcon
|
||||
from szurubooru.func import cache
|
||||
|
||||
class CachePurger(object):
|
||||
def process_request(self, request, _response):
|
||||
if request.method != 'GET':
|
||||
cache.purge()
|
|
@ -1,6 +1,7 @@
|
|||
import re
|
||||
import sqlalchemy
|
||||
from szurubooru import db, errors
|
||||
from szurubooru.func import cache
|
||||
from szurubooru.search import criteria
|
||||
|
||||
class SearchExecutor(object):
|
||||
|
@ -13,6 +14,9 @@ class SearchExecutor(object):
|
|||
self.config = search_config
|
||||
|
||||
def execute(self, query_text, page, page_size):
|
||||
key = (id(self.config), query_text, page, page_size)
|
||||
if cache.has(key):
|
||||
return cache.get(key)
|
||||
'''
|
||||
Parse input and return tuple containing total record count and filtered
|
||||
entities.
|
||||
|
@ -33,7 +37,9 @@ class SearchExecutor(object):
|
|||
.with_only_columns([sqlalchemy.func.count()]) \
|
||||
.order_by(None)
|
||||
count = db.session.execute(count_statement).scalar()
|
||||
return (count, entities)
|
||||
ret = (count, entities)
|
||||
cache.put(key, ret)
|
||||
return ret
|
||||
|
||||
def execute_and_serialize(self, ctx, serializer):
|
||||
query = ctx.get_param_as_string('query')
|
||||
|
|
Loading…
Reference in New Issue