From 6064f0326a9fb4b99731da15079f02a9eb1286fa Mon Sep 17 00:00:00 2001 From: Untone Date: Tue, 12 Mar 2024 16:18:07 +0300 Subject: [PATCH] dogpiled-cache-authors --- resolvers/author.py | 61 +++++++++++++++++++++++------------------ services/memorycache.py | 31 ++++++--------------- 2 files changed, 43 insertions(+), 49 deletions(-) diff --git a/resolvers/author.py b/resolvers/author.py index 220a3222..4b9b01ba 100644 --- a/resolvers/author.py +++ b/resolvers/author.py @@ -14,6 +14,7 @@ from services.cache import set_author_cache, update_author_followers_cache from services.auth import login_required from services.db import local_session from services.encoders import CustomJSONEncoder +from services.memorycache import authors_cache_region from services.rediscache import redis from services.schema import mutation, query from services.logger import root_logger as logger @@ -109,37 +110,45 @@ async def get_author_id(_, _info, user: str): @query.field('load_authors_by') async def load_authors_by(_, _info, by, limit, offset): - logger.debug(f'loading authors by {by}') - q = select(Author) - if by.get('slug'): - q = q.filter(Author.slug.ilike(f"%{by['slug']}%")) - elif by.get('name'): - q = q.filter(Author.name.ilike(f"%{by['name']}%")) - elif by.get('topic'): - q = ( - q.join(ShoutAuthor) - .join(ShoutTopic) - .join(Topic) - .where(Topic.slug == str(by['topic'])) - ) + cache_key = f"load_authors_by_{json.dumps(by)}_{limit}_{offset}" - if by.get('last_seen'): # in unix time - before = int(time.time()) - by['last_seen'] - q = q.filter(Author.last_seen > before) - elif by.get('created_at'): # in unix time - before = int(time.time()) - by['created_at'] - q = q.filter(Author.created_at > before) + @authors_cache_region.cache_on_arguments(cache_key) + async def _load_authors_by(_, _info, by, limit, offset): + logger.debug(f'loading authors by {by}') + q = select(Author) + if by.get('slug'): + q = q.filter(Author.slug.ilike(f"%{by['slug']}%")) + elif by.get('name'): + q = q.filter(Author.name.ilike(f"%{by['name']}%")) + elif by.get('topic'): + q = ( + q.join(ShoutAuthor) + .join(ShoutTopic) + .join(Topic) + .where(Topic.slug == str(by['topic'])) + ) - order = by.get('order') - if order in ['likes', 'shouts', 'followers']: - q = q.order_by(desc(text(f'{order}_stat'))) + if by.get('last_seen'): # in unix time + before = int(time.time()) - by['last_seen'] + q = q.filter(Author.last_seen > before) + elif by.get('created_at'): # in unix time + before = int(time.time()) - by['created_at'] + q = q.filter(Author.created_at > before) - # q = q.distinct() - q = q.limit(limit).offset(offset) + order = by.get('order') + if order in ['likes', 'shouts', 'followers']: + q = q.order_by(desc(text(f'{order}_stat'))) + + # q = q.distinct() + q = q.limit(limit).offset(offset) + + authors = get_with_stat(q) + + return authors + + return await _load_authors_by() - authors = get_with_stat(q) - return authors @query.field('get_author_follows') diff --git a/services/memorycache.py b/services/memorycache.py index 2b2e3c41..65aaa111 100644 --- a/services/memorycache.py +++ b/services/memorycache.py @@ -1,26 +1,11 @@ -from functools import wraps - from dogpile.cache import make_region -# Создание региона кэша с TTL 300 секунд -cache_region = make_region().configure('dogpile.cache.memory', expiration_time=300) +from settings import REDIS_URL - -# Декоратор для кэширования методов -def cache_method(cache_key: str): - def decorator(f): - @wraps(f) - def decorated_function(*args, **kwargs): - # Генерация ключа для кэширования - key = cache_key.format(*args, **kwargs) - # Получение значения из кэша - result = cache_region.get(key) - if result is None: - # Если значение отсутствует в кэше, вызываем функцию и кэшируем результат - result = f(*args, **kwargs) - cache_region.set(key, result) - return result - - return decorated_function - - return decorator +# Создание региона кэша с TTL +authors_cache_region = make_region() +authors_cache_region.configure( + 'dogpile.cache.redis', + arguments={'url': f'{REDIS_URL}/1'}, + expiration_time=3600, # Cache expiration time in seconds +)