caching-wip1

This commit is contained in:
Untone 2024-08-06 19:55:27 +03:00
parent 9f91490441
commit c276a0eeb0

View File

@ -1,8 +1,8 @@
import asyncio
import json
from typing import List
from sqlalchemy import select
from sqlalchemy import select
from orm.author import Author
from orm.topic import Topic
from services.db import local_session
@ -16,80 +16,92 @@ DEFAULT_FOLLOWS = {
}
async def cache_multiple_items(items, cache_function):
await asyncio.gather(*(cache_function(item) for item in items))
# Кэширование данных темы
async def cache_topic(topic: dict):
await cache_multiple_items([topic], _cache_topic_helper)
async def _cache_topic_helper(topic):
topic_id = topic.get("id")
topic_slug = topic.get("slug")
payload = json.dumps(topic, cls=CustomJSONEncoder)
await redis.set(f"topic:id:{topic_id}", payload)
await redis.set(f"topic:slug:{topic_slug}", payload)
# Одновременное кэширование по id и slug для быстрого доступа
await asyncio.gather(
redis.set(f"topic:id:{topic['id']}", payload), redis.set(f"topic:slug:{topic['slug']}", payload)
)
# Кэширование данных автора
async def cache_author(author: dict):
author_id = author.get("id")
user_id = author.get("user", "").strip()
payload = json.dumps(author, cls=CustomJSONEncoder)
await redis.set(f"author:user:{user_id}", author_id)
await redis.set(f"author:id:{author_id}", payload)
# Кэширование данных автора по user и id
await asyncio.gather(
redis.set(f"author:user:{author['user'].strip()}", str(author["id"])),
redis.set(f"author:id:{author['id']}", payload),
)
# Кэширование данных о подписках
async def cache_follows(follower_id: int, entity_type: str, entity_id: int, is_insert=True):
redis_key = f"author:follows-{entity_type}s:{follower_id}"
follows = await redis.get(redis_key)
follows = json.loads(follows) if follows else []
key = f"author:follows-{entity_type}s:{follower_id}"
follows_str = await redis.get(key)
follows = json.loads(follows_str) if follows_str else []
if is_insert:
follows.append(entity_id) if entity_id not in follows else None
if entity_id not in follows:
follows.append(entity_id)
else:
follows = [eid for eid in follows if eid != entity_id]
await redis.set(key, json.dumps(follows, cls=CustomJSONEncoder))
update_follower_stat(follower_id, entity_type, len(follows))
payload = json.dumps(follows, cls=CustomJSONEncoder)
await redis.set(redis_key, payload)
follower = await redis.get(f"author:id:{follower_id}")
# Обновление статистики подписчика
async def update_follower_stat(follower_id, entity_type, count):
follower_key = f"author:id:{follower_id}"
follower_str = await redis.get(follower_key)
follower = json.loads(follower_str) if follower_str else None
if follower:
follower = json.loads(follower)
follower["stat"][f"{entity_type}s"] = len(follows)
follower["stat"] = {f"{entity_type}s": count}
await cache_author(follower)
async def get_cached_topic_by_slug(slug: str, get_with_stat):
cached_result = await redis.get(f"topic:slug:{slug}")
if cached_result:
return json.loads(cached_result)
# Получение автора из кэша
async def get_cached_author(author_id: int):
author_key = f"author:id:{author_id}"
result = await redis.get(author_key)
if result:
return json.loads(result)
# Загрузка из базы данных, если не найдено в кэше
with local_session() as session:
topic_query = select(Topic).filter(Topic.slug == slug)
result = await get_with_stat(session.execute(topic_query))
topic = result if isinstance(result, Topic) else result[0]
author = session.execute(select(Author).where(Author.id == author_id)).scalar_one_or_none()
if author:
await cache_author(author.dict())
return author.dict()
return None
# Получение темы по slug из кэша
async def get_cached_topic_by_slug(slug: str):
topic_key = f"topic:slug:{slug}"
result = await redis.get(topic_key)
if result:
return json.loads(result)
# Загрузка из базы данных, если не найдено в кэше
with local_session() as session:
topic = session.execute(select(Topic).where(Topic.slug == slug)).scalar_one_or_none()
if topic:
await cache_topic(topic.dict())
return topic
return topic.dict()
return None
# Пример агрегации получения и кеширования информации для авторов
# Получение списка авторов по ID из кэша
async def get_cached_authors_by_ids(author_ids: List[int]) -> List[dict]:
cache_keys = [f"author:id:{author_id}" for author_id in author_ids]
authors_data = await asyncio.gather(*(redis.get(key) for key in cache_keys))
authors = [json.loads(author) for author in authors_data if author]
# Кешируем отсутствующие данные
missing_ids = [author_ids[i] for i, data in enumerate(authors_data) if not data]
if missing_ids:
# Одновременное получение данных всех авторов
keys = [f"author:id:{author_id}" for author_id in author_ids]
results = await asyncio.gather(*(redis.get(key) for key in keys))
authors = [json.loads(result) if result else None for result in results]
# Загрузка отсутствующих авторов из базы данных и кэширование
missing_indices = [index for index, author in enumerate(authors) if author is None]
if missing_indices:
missing_ids = [author_ids[index] for index in missing_indices]
with local_session() as session:
query = select(Author).where(Author.id.in_(missing_ids))
results = await session.execute(query)
authors_to_cache = [result.dict() for result in results.scalars()]
await cache_multiple_items(authors_to_cache, cache_author)
authors.extend(authors_to_cache)
missing_authors = session.execute(query).scalars().all()
await asyncio.gather(*(cache_author(author.dict()) for author in missing_authors))
authors = [author.dict() for author in missing_authors]
return authors
# Остальные функции с аналогичными оптимизациями