2025-03-22 08:47:19 +00:00
|
|
|
|
"""
|
|
|
|
|
Caching system for the Discours platform
|
|
|
|
|
----------------------------------------
|
|
|
|
|
|
|
|
|
|
This module provides a comprehensive caching solution with these key components:
|
|
|
|
|
|
|
|
|
|
1. KEY NAMING CONVENTIONS:
|
|
|
|
|
- Entity-based keys: "entity:property:value" (e.g., "author:id:123")
|
|
|
|
|
- Collection keys: "entity:collection:params" (e.g., "authors:stats:limit=10:offset=0")
|
|
|
|
|
- Special case keys: Maintained for backwards compatibility (e.g., "topic_shouts_123")
|
|
|
|
|
|
|
|
|
|
2. CORE FUNCTIONS:
|
|
|
|
|
- cached_query(): High-level function for retrieving cached data or executing queries
|
|
|
|
|
|
|
|
|
|
3. ENTITY-SPECIFIC FUNCTIONS:
|
|
|
|
|
- cache_author(), cache_topic(): Cache entity data
|
|
|
|
|
- get_cached_author(), get_cached_topic(): Retrieve entity data from cache
|
|
|
|
|
- invalidate_cache_by_prefix(): Invalidate all keys with a specific prefix
|
|
|
|
|
|
|
|
|
|
4. CACHE INVALIDATION STRATEGY:
|
|
|
|
|
- Direct invalidation via invalidate_* functions for immediate changes
|
|
|
|
|
- Delayed invalidation via revalidation_manager for background processing
|
|
|
|
|
- Event-based triggers for automatic cache updates (see triggers.py)
|
|
|
|
|
|
|
|
|
|
To maintain consistency with the existing codebase, this module preserves
|
|
|
|
|
the original key naming patterns while providing a more structured approach
|
|
|
|
|
for new cache operations.
|
|
|
|
|
"""
|
|
|
|
|
|
2024-06-11 14:51:34 +00:00
|
|
|
|
import asyncio
|
2025-03-20 09:24:30 +00:00
|
|
|
|
import json
|
2025-05-16 06:23:48 +00:00
|
|
|
|
from typing import Any, List, Optional
|
2024-08-12 08:00:01 +00:00
|
|
|
|
|
2025-03-20 08:55:21 +00:00
|
|
|
|
import orjson
|
2024-08-12 08:00:01 +00:00
|
|
|
|
from sqlalchemy import and_, join, select
|
|
|
|
|
|
2025-05-16 06:23:48 +00:00
|
|
|
|
from auth.orm import Author, AuthorFollower
|
2024-08-06 17:20:20 +00:00
|
|
|
|
from orm.shout import Shout, ShoutAuthor, ShoutTopic
|
2024-08-12 08:00:01 +00:00
|
|
|
|
from orm.topic import Topic, TopicFollower
|
2024-05-06 21:06:31 +00:00
|
|
|
|
from services.db import local_session
|
2024-08-07 06:51:09 +00:00
|
|
|
|
from services.redis import redis
|
2024-08-12 08:00:01 +00:00
|
|
|
|
from utils.encoders import CustomJSONEncoder
|
2024-08-07 05:57:56 +00:00
|
|
|
|
from utils.logger import root_logger as logger
|
2024-02-25 13:43:04 +00:00
|
|
|
|
|
2024-02-27 12:40:53 +00:00
|
|
|
|
DEFAULT_FOLLOWS = {
|
2024-04-17 15:32:23 +00:00
|
|
|
|
"topics": [],
|
|
|
|
|
"authors": [],
|
2024-08-07 06:51:09 +00:00
|
|
|
|
"shouts": [],
|
2024-04-17 15:32:23 +00:00
|
|
|
|
"communities": [{"id": 1, "name": "Дискурс", "slug": "discours", "pic": ""}],
|
2024-02-27 12:40:53 +00:00
|
|
|
|
}
|
2024-02-25 13:43:04 +00:00
|
|
|
|
|
2025-03-22 08:47:19 +00:00
|
|
|
|
CACHE_TTL = 300 # 5 minutes
|
2025-01-16 02:46:31 +00:00
|
|
|
|
|
2025-03-22 08:47:19 +00:00
|
|
|
|
# Key templates for common entity types
|
|
|
|
|
# These are used throughout the codebase and should be maintained for compatibility
|
2025-01-16 03:00:15 +00:00
|
|
|
|
CACHE_KEYS = {
|
2025-01-21 07:09:28 +00:00
|
|
|
|
"TOPIC_ID": "topic:id:{}",
|
|
|
|
|
"TOPIC_SLUG": "topic:slug:{}",
|
|
|
|
|
"TOPIC_AUTHORS": "topic:authors:{}",
|
|
|
|
|
"TOPIC_FOLLOWERS": "topic:followers:{}",
|
|
|
|
|
"TOPIC_SHOUTS": "topic_shouts_{}",
|
|
|
|
|
"AUTHOR_ID": "author:id:{}",
|
|
|
|
|
"SHOUTS": "shouts:{}",
|
2025-01-16 03:00:15 +00:00
|
|
|
|
}
|
|
|
|
|
|
2024-02-25 13:43:04 +00:00
|
|
|
|
|
2024-08-07 06:51:09 +00:00
|
|
|
|
# Cache topic data
|
2024-05-20 22:40:57 +00:00
|
|
|
|
async def cache_topic(topic: dict):
|
2025-03-20 09:24:30 +00:00
|
|
|
|
payload = json.dumps(topic, cls=CustomJSONEncoder)
|
2024-08-06 16:55:27 +00:00
|
|
|
|
await asyncio.gather(
|
2025-03-22 08:47:19 +00:00
|
|
|
|
redis.execute("SET", f"topic:id:{topic['id']}", payload),
|
|
|
|
|
redis.execute("SET", f"topic:slug:{topic['slug']}", payload),
|
2024-08-06 16:55:27 +00:00
|
|
|
|
)
|
2024-05-20 22:40:57 +00:00
|
|
|
|
|
|
|
|
|
|
2024-08-07 06:51:09 +00:00
|
|
|
|
# Cache author data
|
2024-04-09 08:17:32 +00:00
|
|
|
|
async def cache_author(author: dict):
|
2025-03-20 09:24:30 +00:00
|
|
|
|
payload = json.dumps(author, cls=CustomJSONEncoder)
|
2024-08-06 16:55:27 +00:00
|
|
|
|
await asyncio.gather(
|
2025-05-16 06:23:48 +00:00
|
|
|
|
redis.execute("SET", f"author:slug:{author['slug'].strip()}", str(author["id"])),
|
2025-03-22 08:47:19 +00:00
|
|
|
|
redis.execute("SET", f"author:id:{author['id']}", payload),
|
2024-08-06 16:55:27 +00:00
|
|
|
|
)
|
2024-03-12 12:50:57 +00:00
|
|
|
|
|
2024-02-27 12:40:53 +00:00
|
|
|
|
|
2024-08-07 06:51:09 +00:00
|
|
|
|
# Cache follows data
|
2024-05-30 09:49:46 +00:00
|
|
|
|
async def cache_follows(follower_id: int, entity_type: str, entity_id: int, is_insert=True):
|
2024-08-06 16:55:27 +00:00
|
|
|
|
key = f"author:follows-{entity_type}s:{follower_id}"
|
2025-03-22 08:47:19 +00:00
|
|
|
|
follows_str = await redis.execute("GET", key)
|
2025-03-20 08:55:21 +00:00
|
|
|
|
follows = orjson.loads(follows_str) if follows_str else DEFAULT_FOLLOWS[entity_type]
|
2024-05-20 22:40:57 +00:00
|
|
|
|
if is_insert:
|
2024-08-06 16:55:27 +00:00
|
|
|
|
if entity_id not in follows:
|
|
|
|
|
follows.append(entity_id)
|
2024-05-20 22:40:57 +00:00
|
|
|
|
else:
|
|
|
|
|
follows = [eid for eid in follows if eid != entity_id]
|
2025-03-22 08:47:19 +00:00
|
|
|
|
await redis.execute("SET", key, json.dumps(follows, cls=CustomJSONEncoder))
|
2024-08-07 06:51:09 +00:00
|
|
|
|
await update_follower_stat(follower_id, entity_type, len(follows))
|
2024-05-20 22:40:57 +00:00
|
|
|
|
|
2024-08-06 16:55:27 +00:00
|
|
|
|
|
2024-08-07 06:51:09 +00:00
|
|
|
|
# Update follower statistics
|
2024-08-06 16:55:27 +00:00
|
|
|
|
async def update_follower_stat(follower_id, entity_type, count):
|
|
|
|
|
follower_key = f"author:id:{follower_id}"
|
2025-03-22 08:47:19 +00:00
|
|
|
|
follower_str = await redis.execute("GET", follower_key)
|
2025-03-20 08:55:21 +00:00
|
|
|
|
follower = orjson.loads(follower_str) if follower_str else None
|
2024-08-06 16:45:42 +00:00
|
|
|
|
if follower:
|
2024-08-06 16:55:27 +00:00
|
|
|
|
follower["stat"] = {f"{entity_type}s": count}
|
2024-05-20 22:40:57 +00:00
|
|
|
|
await cache_author(follower)
|
2024-02-29 21:51:49 +00:00
|
|
|
|
|
|
|
|
|
|
2024-08-07 06:51:09 +00:00
|
|
|
|
# Get author from cache
|
2024-08-09 06:37:06 +00:00
|
|
|
|
async def get_cached_author(author_id: int, get_with_stat):
|
2025-05-30 10:48:02 +00:00
|
|
|
|
logger.debug(f"[get_cached_author] Начало выполнения для author_id: {author_id}")
|
2025-05-30 11:08:29 +00:00
|
|
|
|
|
2024-08-06 16:55:27 +00:00
|
|
|
|
author_key = f"author:id:{author_id}"
|
2025-05-30 10:48:02 +00:00
|
|
|
|
logger.debug(f"[get_cached_author] Проверка кэша по ключу: {author_key}")
|
2025-05-30 11:08:29 +00:00
|
|
|
|
|
2025-03-22 08:47:19 +00:00
|
|
|
|
result = await redis.execute("GET", author_key)
|
2024-08-06 16:55:27 +00:00
|
|
|
|
if result:
|
2025-05-30 10:48:02 +00:00
|
|
|
|
logger.debug(f"[get_cached_author] Найдены данные в кэше, размер: {len(result)} байт")
|
|
|
|
|
cached_data = orjson.loads(result)
|
2025-05-30 11:08:29 +00:00
|
|
|
|
logger.debug(
|
|
|
|
|
f"[get_cached_author] Кэшированные данные имеют ключи: {list(cached_data.keys()) if cached_data else 'None'}"
|
|
|
|
|
)
|
2025-05-30 10:48:02 +00:00
|
|
|
|
return cached_data
|
2025-05-30 11:08:29 +00:00
|
|
|
|
|
2025-05-30 10:48:02 +00:00
|
|
|
|
logger.debug(f"[get_cached_author] Данные не найдены в кэше, загрузка из БД")
|
2025-05-30 11:08:29 +00:00
|
|
|
|
|
2024-08-07 06:51:09 +00:00
|
|
|
|
# Load from database if not found in cache
|
2024-08-09 06:37:06 +00:00
|
|
|
|
q = select(Author).where(Author.id == author_id)
|
2024-08-14 13:30:52 +00:00
|
|
|
|
authors = get_with_stat(q)
|
2025-05-30 10:48:02 +00:00
|
|
|
|
logger.debug(f"[get_cached_author] Результат запроса из БД: {len(authors) if authors else 0} записей")
|
2025-05-30 11:08:29 +00:00
|
|
|
|
|
2024-08-14 13:30:52 +00:00
|
|
|
|
if authors:
|
|
|
|
|
author = authors[0]
|
2025-05-30 10:48:02 +00:00
|
|
|
|
logger.debug(f"[get_cached_author] Получен автор из БД: {type(author)}, id: {getattr(author, 'id', 'N/A')}")
|
2025-05-30 11:08:29 +00:00
|
|
|
|
|
2025-05-30 10:48:02 +00:00
|
|
|
|
# Используем безопасный вызов dict() для Author
|
2025-05-30 11:08:29 +00:00
|
|
|
|
author_dict = author.dict() if hasattr(author, "dict") else author.__dict__
|
|
|
|
|
logger.debug(
|
|
|
|
|
f"[get_cached_author] Сериализованные данные автора: {list(author_dict.keys()) if author_dict else 'None'}"
|
|
|
|
|
)
|
|
|
|
|
|
2025-05-30 10:48:02 +00:00
|
|
|
|
await cache_author(author_dict)
|
|
|
|
|
logger.debug(f"[get_cached_author] Автор кэширован")
|
2025-05-30 11:08:29 +00:00
|
|
|
|
|
2025-05-30 10:48:02 +00:00
|
|
|
|
return author_dict
|
2025-05-30 11:08:29 +00:00
|
|
|
|
|
2025-05-30 10:48:02 +00:00
|
|
|
|
logger.warning(f"[get_cached_author] Автор с ID {author_id} не найден в БД")
|
2024-08-06 16:55:27 +00:00
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
2024-08-07 06:51:09 +00:00
|
|
|
|
# Function to get cached topic
|
|
|
|
|
async def get_cached_topic(topic_id: int):
|
|
|
|
|
"""
|
|
|
|
|
Fetch topic data from cache or database by id.
|
|
|
|
|
|
|
|
|
|
Args:
|
|
|
|
|
topic_id (int): The identifier for the topic.
|
|
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
|
dict: Topic data or None if not found.
|
|
|
|
|
"""
|
|
|
|
|
topic_key = f"topic:id:{topic_id}"
|
2025-03-22 08:47:19 +00:00
|
|
|
|
cached_topic = await redis.execute("GET", topic_key)
|
2024-08-07 06:51:09 +00:00
|
|
|
|
if cached_topic:
|
2025-03-20 08:55:21 +00:00
|
|
|
|
return orjson.loads(cached_topic)
|
2024-08-07 06:51:09 +00:00
|
|
|
|
|
|
|
|
|
# If not in cache, fetch from the database
|
|
|
|
|
with local_session() as session:
|
|
|
|
|
topic = session.execute(select(Topic).where(Topic.id == topic_id)).scalar_one_or_none()
|
|
|
|
|
if topic:
|
|
|
|
|
topic_dict = topic.dict()
|
2025-03-22 08:47:19 +00:00
|
|
|
|
await redis.execute("SET", topic_key, json.dumps(topic_dict, cls=CustomJSONEncoder))
|
2024-08-07 06:51:09 +00:00
|
|
|
|
return topic_dict
|
|
|
|
|
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# Get topic by slug from cache
|
2024-08-08 14:46:25 +00:00
|
|
|
|
async def get_cached_topic_by_slug(slug: str, get_with_stat):
|
2024-08-06 16:55:27 +00:00
|
|
|
|
topic_key = f"topic:slug:{slug}"
|
2025-03-22 08:47:19 +00:00
|
|
|
|
result = await redis.execute("GET", topic_key)
|
2024-08-06 16:55:27 +00:00
|
|
|
|
if result:
|
2025-03-20 08:55:21 +00:00
|
|
|
|
return orjson.loads(result)
|
2024-08-07 06:51:09 +00:00
|
|
|
|
# Load from database if not found in cache
|
2024-08-08 14:46:25 +00:00
|
|
|
|
topic_query = select(Topic).where(Topic.slug == slug)
|
2024-08-14 15:33:11 +00:00
|
|
|
|
topics = get_with_stat(topic_query)
|
|
|
|
|
if topics:
|
|
|
|
|
topic_dict = topics[0].dict()
|
2024-08-08 14:46:25 +00:00
|
|
|
|
await cache_topic(topic_dict)
|
|
|
|
|
return topic_dict
|
2024-08-06 16:55:27 +00:00
|
|
|
|
return None
|
2024-08-06 16:45:42 +00:00
|
|
|
|
|
|
|
|
|
|
2024-08-07 06:51:09 +00:00
|
|
|
|
# Get list of authors by ID from cache
|
2024-08-06 16:45:42 +00:00
|
|
|
|
async def get_cached_authors_by_ids(author_ids: List[int]) -> List[dict]:
|
2024-08-07 06:51:09 +00:00
|
|
|
|
# Fetch all author data concurrently
|
2024-08-06 16:55:27 +00:00
|
|
|
|
keys = [f"author:id:{author_id}" for author_id in author_ids]
|
2025-03-22 08:47:19 +00:00
|
|
|
|
results = await asyncio.gather(*(redis.execute("GET", key) for key in keys))
|
2025-03-20 08:55:21 +00:00
|
|
|
|
authors = [orjson.loads(result) if result else None for result in results]
|
2024-08-07 06:51:09 +00:00
|
|
|
|
# Load missing authors from database and cache
|
2024-08-06 16:55:27 +00:00
|
|
|
|
missing_indices = [index for index, author in enumerate(authors) if author is None]
|
|
|
|
|
if missing_indices:
|
|
|
|
|
missing_ids = [author_ids[index] for index in missing_indices]
|
2024-05-30 17:23:32 +00:00
|
|
|
|
with local_session() as session:
|
2024-08-06 16:45:42 +00:00
|
|
|
|
query = select(Author).where(Author.id.in_(missing_ids))
|
2025-05-30 05:51:24 +00:00
|
|
|
|
missing_authors = session.execute(query).scalars().unique().all()
|
2024-08-06 16:55:27 +00:00
|
|
|
|
await asyncio.gather(*(cache_author(author.dict()) for author in missing_authors))
|
2024-08-07 06:51:09 +00:00
|
|
|
|
for index, author in zip(missing_indices, missing_authors):
|
|
|
|
|
authors[index] = author.dict()
|
2024-08-06 16:45:42 +00:00
|
|
|
|
return authors
|
2024-08-06 16:59:27 +00:00
|
|
|
|
|
2024-08-06 17:05:24 +00:00
|
|
|
|
|
2024-08-06 16:59:27 +00:00
|
|
|
|
async def get_cached_topic_followers(topic_id: int):
|
2024-08-08 15:00:36 +00:00
|
|
|
|
"""
|
|
|
|
|
Получает подписчиков темы по ID, используя кеш Redis.
|
2025-02-09 14:18:01 +00:00
|
|
|
|
|
2025-02-03 22:40:00 +00:00
|
|
|
|
Args:
|
2025-02-04 12:27:59 +00:00
|
|
|
|
topic_id: ID темы
|
2025-02-09 14:18:01 +00:00
|
|
|
|
|
2025-02-03 22:40:00 +00:00
|
|
|
|
Returns:
|
2025-02-04 12:27:59 +00:00
|
|
|
|
List[dict]: Список подписчиков с их данными
|
2024-08-08 15:00:36 +00:00
|
|
|
|
"""
|
|
|
|
|
try:
|
2025-02-04 12:27:59 +00:00
|
|
|
|
cache_key = CACHE_KEYS["TOPIC_FOLLOWERS"].format(topic_id)
|
2025-03-22 08:47:19 +00:00
|
|
|
|
cached = await redis.execute("GET", cache_key)
|
2025-02-09 14:18:01 +00:00
|
|
|
|
|
2024-08-08 15:00:36 +00:00
|
|
|
|
if cached:
|
2025-03-20 08:55:21 +00:00
|
|
|
|
followers_ids = orjson.loads(cached)
|
2025-02-04 12:27:59 +00:00
|
|
|
|
logger.debug(f"Found {len(followers_ids)} cached followers for topic #{topic_id}")
|
|
|
|
|
return await get_cached_authors_by_ids(followers_ids)
|
2024-08-08 15:00:36 +00:00
|
|
|
|
|
2025-02-03 22:40:00 +00:00
|
|
|
|
with local_session() as session:
|
2025-02-04 12:27:59 +00:00
|
|
|
|
followers_ids = [
|
2025-02-09 14:18:01 +00:00
|
|
|
|
f[0]
|
|
|
|
|
for f in session.query(Author.id)
|
2025-02-03 23:53:01 +00:00
|
|
|
|
.join(TopicFollower, TopicFollower.follower == Author.id)
|
|
|
|
|
.filter(TopicFollower.topic == topic_id)
|
2025-02-03 22:40:00 +00:00
|
|
|
|
.all()
|
2025-02-04 12:27:59 +00:00
|
|
|
|
]
|
2025-02-09 14:18:01 +00:00
|
|
|
|
|
2025-03-22 08:47:19 +00:00
|
|
|
|
await redis.execute("SETEX", cache_key, CACHE_TTL, orjson.dumps(followers_ids))
|
2024-08-08 15:00:36 +00:00
|
|
|
|
followers = await get_cached_authors_by_ids(followers_ids)
|
2025-02-04 12:27:59 +00:00
|
|
|
|
logger.debug(f"Cached {len(followers)} followers for topic #{topic_id}")
|
2024-08-08 15:00:36 +00:00
|
|
|
|
return followers
|
2025-02-03 22:40:00 +00:00
|
|
|
|
|
2024-08-08 15:00:36 +00:00
|
|
|
|
except Exception as e:
|
2025-02-04 12:27:59 +00:00
|
|
|
|
logger.error(f"Error getting followers for topic #{topic_id}: {str(e)}")
|
2024-08-08 15:00:36 +00:00
|
|
|
|
return []
|
2024-08-06 16:59:27 +00:00
|
|
|
|
|
2024-08-06 17:05:24 +00:00
|
|
|
|
|
2024-08-07 06:51:09 +00:00
|
|
|
|
# Get cached author followers
|
2024-08-06 16:59:27 +00:00
|
|
|
|
async def get_cached_author_followers(author_id: int):
|
2024-08-07 06:51:09 +00:00
|
|
|
|
# Check cache for data
|
2025-03-22 08:47:19 +00:00
|
|
|
|
cached = await redis.execute("GET", f"author:followers:{author_id}")
|
2024-08-06 16:59:27 +00:00
|
|
|
|
if cached:
|
2025-03-20 08:55:21 +00:00
|
|
|
|
followers_ids = orjson.loads(cached)
|
2024-08-06 16:59:27 +00:00
|
|
|
|
followers = await get_cached_authors_by_ids(followers_ids)
|
2024-08-09 04:22:55 +00:00
|
|
|
|
logger.debug(f"Cached followers for author #{author_id}: {len(followers)}")
|
2024-08-06 16:59:27 +00:00
|
|
|
|
return followers
|
|
|
|
|
|
2024-08-07 06:51:09 +00:00
|
|
|
|
# Query database if cache is empty
|
2024-08-06 16:59:27 +00:00
|
|
|
|
with local_session() as session:
|
2024-08-06 17:05:24 +00:00
|
|
|
|
followers_ids = [
|
|
|
|
|
f[0]
|
|
|
|
|
for f in session.query(Author.id)
|
|
|
|
|
.join(AuthorFollower, AuthorFollower.follower == Author.id)
|
|
|
|
|
.filter(AuthorFollower.author == author_id, Author.id != author_id)
|
|
|
|
|
.all()
|
|
|
|
|
]
|
2025-03-22 08:47:19 +00:00
|
|
|
|
await redis.execute("SET", f"author:followers:{author_id}", orjson.dumps(followers_ids))
|
2024-08-06 16:59:27 +00:00
|
|
|
|
followers = await get_cached_authors_by_ids(followers_ids)
|
|
|
|
|
return followers
|
|
|
|
|
|
2024-08-06 17:05:24 +00:00
|
|
|
|
|
2024-08-07 06:51:09 +00:00
|
|
|
|
# Get cached follower authors
|
2024-08-06 16:59:27 +00:00
|
|
|
|
async def get_cached_follower_authors(author_id: int):
|
2024-08-07 06:51:09 +00:00
|
|
|
|
# Attempt to retrieve authors from cache
|
2025-03-22 08:47:19 +00:00
|
|
|
|
cached = await redis.execute("GET", f"author:follows-authors:{author_id}")
|
2024-08-06 16:59:27 +00:00
|
|
|
|
if cached:
|
2025-03-20 08:55:21 +00:00
|
|
|
|
authors_ids = orjson.loads(cached)
|
2024-08-06 16:59:27 +00:00
|
|
|
|
else:
|
2024-08-07 06:51:09 +00:00
|
|
|
|
# Query authors from database
|
2024-08-06 16:59:27 +00:00
|
|
|
|
with local_session() as session:
|
2024-08-06 17:05:24 +00:00
|
|
|
|
authors_ids = [
|
|
|
|
|
a[0]
|
|
|
|
|
for a in session.execute(
|
|
|
|
|
select(Author.id)
|
|
|
|
|
.select_from(join(Author, AuthorFollower, Author.id == AuthorFollower.author))
|
|
|
|
|
.where(AuthorFollower.follower == author_id)
|
|
|
|
|
).all()
|
|
|
|
|
]
|
2025-03-22 08:47:19 +00:00
|
|
|
|
await redis.execute("SET", f"author:follows-authors:{author_id}", orjson.dumps(authors_ids))
|
2024-08-06 16:59:27 +00:00
|
|
|
|
|
|
|
|
|
authors = await get_cached_authors_by_ids(authors_ids)
|
|
|
|
|
return authors
|
|
|
|
|
|
2024-08-06 17:05:24 +00:00
|
|
|
|
|
2024-08-07 06:51:09 +00:00
|
|
|
|
# Get cached follower topics
|
2024-08-06 16:59:27 +00:00
|
|
|
|
async def get_cached_follower_topics(author_id: int):
|
2024-08-07 06:51:09 +00:00
|
|
|
|
# Attempt to retrieve topics from cache
|
2025-03-22 08:47:19 +00:00
|
|
|
|
cached = await redis.execute("GET", f"author:follows-topics:{author_id}")
|
2024-08-06 16:59:27 +00:00
|
|
|
|
if cached:
|
2025-03-20 08:55:21 +00:00
|
|
|
|
topics_ids = orjson.loads(cached)
|
2024-08-06 16:59:27 +00:00
|
|
|
|
else:
|
2024-08-07 06:51:09 +00:00
|
|
|
|
# Load topics from database and cache them
|
2024-08-06 16:59:27 +00:00
|
|
|
|
with local_session() as session:
|
2024-08-06 17:05:24 +00:00
|
|
|
|
topics_ids = [
|
|
|
|
|
t[0]
|
|
|
|
|
for t in session.query(Topic.id)
|
|
|
|
|
.join(TopicFollower, TopicFollower.topic == Topic.id)
|
|
|
|
|
.where(TopicFollower.follower == author_id)
|
|
|
|
|
.all()
|
|
|
|
|
]
|
2025-03-22 08:47:19 +00:00
|
|
|
|
await redis.execute("SET", f"author:follows-topics:{author_id}", orjson.dumps(topics_ids))
|
2024-08-06 16:59:27 +00:00
|
|
|
|
|
|
|
|
|
topics = []
|
|
|
|
|
for topic_id in topics_ids:
|
2025-03-22 08:47:19 +00:00
|
|
|
|
topic_str = await redis.execute("GET", f"topic:id:{topic_id}")
|
2024-08-06 16:59:27 +00:00
|
|
|
|
if topic_str:
|
2025-03-20 08:55:21 +00:00
|
|
|
|
topic = orjson.loads(topic_str)
|
2024-08-06 16:59:27 +00:00
|
|
|
|
if topic and topic not in topics:
|
|
|
|
|
topics.append(topic)
|
|
|
|
|
|
|
|
|
|
logger.debug(f"Cached topics for author#{author_id}: {len(topics)}")
|
|
|
|
|
return topics
|
2024-08-06 17:05:24 +00:00
|
|
|
|
|
|
|
|
|
|
2025-05-29 09:15:06 +00:00
|
|
|
|
# Get author by author_id from cache
|
|
|
|
|
async def get_cached_author_by_id(author_id: int, get_with_stat):
|
2024-08-06 17:05:24 +00:00
|
|
|
|
"""
|
2025-05-29 09:15:06 +00:00
|
|
|
|
Retrieve author information by author_id, checking the cache first, then the database.
|
2024-08-06 17:05:24 +00:00
|
|
|
|
|
|
|
|
|
Args:
|
2025-05-29 09:15:06 +00:00
|
|
|
|
author_id (int): The author identifier for which to retrieve the author.
|
2024-08-06 17:05:24 +00:00
|
|
|
|
|
|
|
|
|
Returns:
|
2024-08-07 06:51:09 +00:00
|
|
|
|
dict: Dictionary with author data or None if not found.
|
2024-08-06 17:05:24 +00:00
|
|
|
|
"""
|
2025-05-29 15:56:55 +00:00
|
|
|
|
# Attempt to find author data by author_id in Redis cache
|
|
|
|
|
cached_author_data = await redis.execute("GET", f"author:id:{author_id}")
|
|
|
|
|
if cached_author_data:
|
|
|
|
|
# If data is found, return parsed JSON
|
|
|
|
|
return orjson.loads(cached_author_data)
|
2024-08-06 17:05:24 +00:00
|
|
|
|
|
2024-08-07 06:51:09 +00:00
|
|
|
|
# If data is not found in cache, query the database
|
2025-05-22 01:34:30 +00:00
|
|
|
|
author_query = select(Author).where(Author.id == author_id)
|
2024-08-14 15:33:11 +00:00
|
|
|
|
authors = get_with_stat(author_query)
|
|
|
|
|
if authors:
|
|
|
|
|
# Cache the retrieved author data
|
|
|
|
|
author = authors[0]
|
|
|
|
|
author_dict = author.dict()
|
|
|
|
|
await asyncio.gather(
|
2025-03-22 08:47:19 +00:00
|
|
|
|
redis.execute("SET", f"author:id:{author.id}", orjson.dumps(author_dict)),
|
2024-08-14 15:33:11 +00:00
|
|
|
|
)
|
|
|
|
|
return author_dict
|
2024-08-06 17:05:24 +00:00
|
|
|
|
|
2024-08-07 06:51:09 +00:00
|
|
|
|
# Return None if author is not found
|
2024-08-06 17:05:24 +00:00
|
|
|
|
return None
|
2024-08-06 17:20:20 +00:00
|
|
|
|
|
2024-08-06 17:55:19 +00:00
|
|
|
|
|
2024-08-07 06:51:09 +00:00
|
|
|
|
# Get cached topic authors
|
2024-08-06 17:20:20 +00:00
|
|
|
|
async def get_cached_topic_authors(topic_id: int):
|
|
|
|
|
"""
|
2024-08-07 06:51:09 +00:00
|
|
|
|
Retrieve a list of authors for a given topic, using cache or database.
|
2024-08-06 17:20:20 +00:00
|
|
|
|
|
|
|
|
|
Args:
|
2024-08-07 06:51:09 +00:00
|
|
|
|
topic_id (int): The identifier of the topic for which to retrieve authors.
|
2024-08-06 17:20:20 +00:00
|
|
|
|
|
|
|
|
|
Returns:
|
2024-08-07 06:51:09 +00:00
|
|
|
|
List[dict]: A list of dictionaries containing author data.
|
2024-08-06 17:20:20 +00:00
|
|
|
|
"""
|
2024-08-07 06:51:09 +00:00
|
|
|
|
# Attempt to get a list of author IDs from cache
|
2024-08-06 17:20:20 +00:00
|
|
|
|
rkey = f"topic:authors:{topic_id}"
|
2025-03-22 08:47:19 +00:00
|
|
|
|
cached_authors_ids = await redis.execute("GET", rkey)
|
2024-08-06 17:20:20 +00:00
|
|
|
|
if cached_authors_ids:
|
2025-03-20 08:55:21 +00:00
|
|
|
|
authors_ids = orjson.loads(cached_authors_ids)
|
2024-08-06 17:20:20 +00:00
|
|
|
|
else:
|
2024-08-07 06:51:09 +00:00
|
|
|
|
# If cache is empty, get data from the database
|
2024-08-06 17:20:20 +00:00
|
|
|
|
with local_session() as session:
|
|
|
|
|
query = (
|
|
|
|
|
select(ShoutAuthor.author)
|
|
|
|
|
.select_from(join(ShoutTopic, Shout, ShoutTopic.shout == Shout.id))
|
|
|
|
|
.join(ShoutAuthor, ShoutAuthor.shout == Shout.id)
|
2025-05-16 06:23:48 +00:00
|
|
|
|
.where(
|
|
|
|
|
and_(
|
|
|
|
|
ShoutTopic.topic == topic_id,
|
|
|
|
|
Shout.published_at.is_not(None),
|
|
|
|
|
Shout.deleted_at.is_(None),
|
|
|
|
|
)
|
|
|
|
|
)
|
2024-08-06 17:20:20 +00:00
|
|
|
|
)
|
2024-08-06 17:55:19 +00:00
|
|
|
|
authors_ids = [author_id for (author_id,) in session.execute(query).all()]
|
2024-08-07 06:51:09 +00:00
|
|
|
|
# Cache the retrieved author IDs
|
2025-03-22 08:47:19 +00:00
|
|
|
|
await redis.execute("SET", rkey, orjson.dumps(authors_ids))
|
2024-08-06 17:20:20 +00:00
|
|
|
|
|
2024-08-07 06:51:09 +00:00
|
|
|
|
# Retrieve full author details from cached IDs
|
2024-08-06 17:20:20 +00:00
|
|
|
|
if authors_ids:
|
|
|
|
|
authors = await get_cached_authors_by_ids(authors_ids)
|
|
|
|
|
logger.debug(f"Topic#{topic_id} authors fetched and cached: {len(authors)} authors found.")
|
|
|
|
|
return authors
|
|
|
|
|
|
|
|
|
|
return []
|
2025-01-16 02:42:53 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async def invalidate_shouts_cache(cache_keys: List[str]):
|
|
|
|
|
"""
|
|
|
|
|
Инвалидирует кэш выборок публикаций по переданным ключам.
|
|
|
|
|
"""
|
2025-05-21 15:29:46 +00:00
|
|
|
|
for cache_key in cache_keys:
|
2025-01-16 02:42:53 +00:00
|
|
|
|
try:
|
2025-01-16 03:49:15 +00:00
|
|
|
|
# Удаляем основной кэш
|
2025-03-22 08:47:19 +00:00
|
|
|
|
await redis.execute("DEL", cache_key)
|
2025-01-16 03:49:15 +00:00
|
|
|
|
logger.debug(f"Invalidated cache key: {cache_key}")
|
2025-01-21 07:09:28 +00:00
|
|
|
|
|
2025-01-16 03:49:15 +00:00
|
|
|
|
# Добавляем ключ в список инвалидированных с TTL
|
2025-03-22 08:47:19 +00:00
|
|
|
|
await redis.execute("SETEX", f"{cache_key}:invalidated", CACHE_TTL, "1")
|
2025-01-21 07:09:28 +00:00
|
|
|
|
|
2025-01-16 03:49:15 +00:00
|
|
|
|
# Если это кэш темы, инвалидируем также связанные ключи
|
2025-05-21 15:29:46 +00:00
|
|
|
|
if cache_key.startswith("topic_"):
|
|
|
|
|
topic_id = cache_key.split("_")[1]
|
2025-01-16 03:49:15 +00:00
|
|
|
|
related_keys = [
|
|
|
|
|
f"topic:id:{topic_id}",
|
|
|
|
|
f"topic:authors:{topic_id}",
|
|
|
|
|
f"topic:followers:{topic_id}",
|
2025-01-21 07:09:28 +00:00
|
|
|
|
f"topic:stats:{topic_id}",
|
2025-01-16 03:49:15 +00:00
|
|
|
|
]
|
|
|
|
|
for related_key in related_keys:
|
2025-03-22 08:47:19 +00:00
|
|
|
|
await redis.execute("DEL", related_key)
|
2025-01-16 03:49:15 +00:00
|
|
|
|
logger.debug(f"Invalidated related key: {related_key}")
|
2025-01-21 07:09:28 +00:00
|
|
|
|
|
2025-01-16 02:42:53 +00:00
|
|
|
|
except Exception as e:
|
2025-05-29 09:15:06 +00:00
|
|
|
|
logger.error(f"Error invalidating cache key {cache_key}: {e}")
|
2025-01-16 02:53:37 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async def cache_topic_shouts(topic_id: int, shouts: List[dict]):
|
|
|
|
|
"""Кэширует список публикаций для темы"""
|
|
|
|
|
key = f"topic_shouts_{topic_id}"
|
2025-03-20 09:24:30 +00:00
|
|
|
|
payload = json.dumps(shouts, cls=CustomJSONEncoder)
|
2025-03-22 08:47:19 +00:00
|
|
|
|
await redis.execute("SETEX", key, CACHE_TTL, payload)
|
2025-01-16 02:53:37 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async def get_cached_topic_shouts(topic_id: int) -> List[dict]:
|
|
|
|
|
"""Получает кэшированный список публикаций для темы"""
|
|
|
|
|
key = f"topic_shouts_{topic_id}"
|
2025-03-22 08:47:19 +00:00
|
|
|
|
cached = await redis.execute("GET", key)
|
2025-01-16 02:53:37 +00:00
|
|
|
|
if cached:
|
2025-03-20 08:55:21 +00:00
|
|
|
|
return orjson.loads(cached)
|
2025-01-16 02:53:37 +00:00
|
|
|
|
return None
|
2025-01-16 03:00:15 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async def cache_related_entities(shout: Shout):
|
|
|
|
|
"""
|
|
|
|
|
Кэширует все связанные с публикацией сущности (авторов и темы)
|
|
|
|
|
"""
|
|
|
|
|
tasks = []
|
|
|
|
|
for author in shout.authors:
|
|
|
|
|
tasks.append(cache_by_id(Author, author.id, cache_author))
|
|
|
|
|
for topic in shout.topics:
|
|
|
|
|
tasks.append(cache_by_id(Topic, topic.id, cache_topic))
|
|
|
|
|
await asyncio.gather(*tasks)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async def invalidate_shout_related_cache(shout: Shout, author_id: int):
|
|
|
|
|
"""
|
2025-02-04 12:27:59 +00:00
|
|
|
|
Инвалидирует весь кэш, связанный с публикацией и её связями
|
2025-02-09 14:18:01 +00:00
|
|
|
|
|
2025-02-04 12:27:59 +00:00
|
|
|
|
Args:
|
|
|
|
|
shout: Объект публикации
|
|
|
|
|
author_id: ID автора
|
2025-01-16 03:00:15 +00:00
|
|
|
|
"""
|
2025-02-04 12:27:59 +00:00
|
|
|
|
cache_keys = {
|
2025-01-16 03:49:15 +00:00
|
|
|
|
"feed", # основная лента
|
|
|
|
|
f"author_{author_id}", # публикации автора
|
|
|
|
|
"random_top", # случайные топовые
|
|
|
|
|
"unrated", # неоцененные
|
2025-02-04 12:27:59 +00:00
|
|
|
|
"recent", # последние
|
|
|
|
|
"coauthored", # совместные
|
|
|
|
|
}
|
2025-02-09 14:18:01 +00:00
|
|
|
|
|
2025-02-04 12:27:59 +00:00
|
|
|
|
# Добавляем ключи авторов
|
2025-02-09 14:18:01 +00:00
|
|
|
|
cache_keys.update(f"author_{a.id}" for a in shout.authors)
|
|
|
|
|
cache_keys.update(f"authored_{a.id}" for a in shout.authors)
|
|
|
|
|
|
2025-02-04 12:27:59 +00:00
|
|
|
|
# Добавляем ключи тем
|
2025-02-09 14:18:01 +00:00
|
|
|
|
cache_keys.update(f"topic_{t.id}" for t in shout.topics)
|
|
|
|
|
cache_keys.update(f"topic_shouts_{t.id}" for t in shout.topics)
|
2025-01-21 07:09:28 +00:00
|
|
|
|
|
2025-02-04 12:27:59 +00:00
|
|
|
|
await invalidate_shouts_cache(list(cache_keys))
|
2025-01-16 03:00:15 +00:00
|
|
|
|
|
|
|
|
|
|
2025-03-22 08:47:19 +00:00
|
|
|
|
# Function removed - direct Redis calls used throughout the module instead
|
2025-01-16 03:00:15 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async def get_cached_entity(entity_type: str, entity_id: int, get_method, cache_method):
|
|
|
|
|
"""
|
|
|
|
|
Универсальная функция получения кэшированной сущности
|
2025-01-21 07:09:28 +00:00
|
|
|
|
|
2025-01-16 03:00:15 +00:00
|
|
|
|
Args:
|
|
|
|
|
entity_type: 'author' или 'topic'
|
|
|
|
|
entity_id: ID сущности
|
|
|
|
|
get_method: метод получения из БД
|
|
|
|
|
cache_method: метод кэширования
|
|
|
|
|
"""
|
|
|
|
|
key = f"{entity_type}:id:{entity_id}"
|
2025-03-22 08:47:19 +00:00
|
|
|
|
cached = await redis.execute("GET", key)
|
2025-01-16 03:00:15 +00:00
|
|
|
|
if cached:
|
2025-03-20 08:55:21 +00:00
|
|
|
|
return orjson.loads(cached)
|
2025-01-21 07:09:28 +00:00
|
|
|
|
|
2025-01-16 03:00:15 +00:00
|
|
|
|
entity = await get_method(entity_id)
|
|
|
|
|
if entity:
|
|
|
|
|
await cache_method(entity)
|
|
|
|
|
return entity
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async def cache_by_id(entity, entity_id: int, cache_method):
|
|
|
|
|
"""
|
|
|
|
|
Кэширует сущность по ID, используя указанный метод кэширования
|
2025-01-21 07:09:28 +00:00
|
|
|
|
|
2025-01-16 03:00:15 +00:00
|
|
|
|
Args:
|
|
|
|
|
entity: класс сущности (Author/Topic)
|
|
|
|
|
entity_id: ID сущности
|
|
|
|
|
cache_method: функция кэширования
|
|
|
|
|
"""
|
|
|
|
|
from resolvers.stat import get_with_stat
|
2025-01-21 07:09:28 +00:00
|
|
|
|
|
2025-01-16 03:00:15 +00:00
|
|
|
|
caching_query = select(entity).filter(entity.id == entity_id)
|
|
|
|
|
result = get_with_stat(caching_query)
|
|
|
|
|
if not result or not result[0]:
|
|
|
|
|
logger.warning(f"{entity.__name__} with id {entity_id} not found")
|
|
|
|
|
return
|
|
|
|
|
x = result[0]
|
|
|
|
|
d = x.dict()
|
|
|
|
|
await cache_method(d)
|
|
|
|
|
return d
|
2025-03-22 08:47:19 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# Универсальная функция для сохранения данных в кеш
|
|
|
|
|
async def cache_data(key: str, data: Any, ttl: Optional[int] = None) -> None:
|
|
|
|
|
"""
|
|
|
|
|
Сохраняет данные в кеш по указанному ключу.
|
|
|
|
|
|
|
|
|
|
Args:
|
|
|
|
|
key: Ключ кеша
|
|
|
|
|
data: Данные для сохранения
|
|
|
|
|
ttl: Время жизни кеша в секундах (None - бессрочно)
|
|
|
|
|
"""
|
|
|
|
|
try:
|
|
|
|
|
payload = json.dumps(data, cls=CustomJSONEncoder)
|
|
|
|
|
if ttl:
|
|
|
|
|
await redis.execute("SETEX", key, ttl, payload)
|
|
|
|
|
else:
|
|
|
|
|
await redis.execute("SET", key, payload)
|
|
|
|
|
logger.debug(f"Данные сохранены в кеш по ключу {key}")
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error(f"Ошибка при сохранении данных в кеш: {e}")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# Универсальная функция для получения данных из кеша
|
|
|
|
|
async def get_cached_data(key: str) -> Optional[Any]:
|
|
|
|
|
"""
|
|
|
|
|
Получает данные из кеша по указанному ключу.
|
|
|
|
|
|
|
|
|
|
Args:
|
|
|
|
|
key: Ключ кеша
|
|
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
|
Any: Данные из кеша или None, если данных нет
|
|
|
|
|
"""
|
|
|
|
|
try:
|
|
|
|
|
cached_data = await redis.execute("GET", key)
|
|
|
|
|
if cached_data:
|
2025-04-10 15:46:09 +00:00
|
|
|
|
loaded = orjson.loads(cached_data)
|
|
|
|
|
logger.debug(f"Данные получены из кеша по ключу {key}: {len(loaded)}")
|
|
|
|
|
return loaded
|
2025-03-22 08:47:19 +00:00
|
|
|
|
return None
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error(f"Ошибка при получении данных из кеша: {e}")
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# Универсальная функция для инвалидации кеша по префиксу
|
|
|
|
|
async def invalidate_cache_by_prefix(prefix: str) -> None:
|
|
|
|
|
"""
|
|
|
|
|
Инвалидирует все ключи кеша с указанным префиксом.
|
|
|
|
|
|
|
|
|
|
Args:
|
|
|
|
|
prefix: Префикс ключей кеша для инвалидации
|
|
|
|
|
"""
|
|
|
|
|
try:
|
|
|
|
|
keys = await redis.execute("KEYS", f"{prefix}:*")
|
|
|
|
|
if keys:
|
|
|
|
|
await redis.execute("DEL", *keys)
|
|
|
|
|
logger.debug(f"Удалено {len(keys)} ключей кеша с префиксом {prefix}")
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error(f"Ошибка при инвалидации кеша: {e}")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# Универсальная функция для получения и кеширования данных
|
|
|
|
|
async def cached_query(
|
|
|
|
|
cache_key: str,
|
|
|
|
|
query_func: callable,
|
|
|
|
|
ttl: Optional[int] = None,
|
|
|
|
|
force_refresh: bool = False,
|
|
|
|
|
use_key_format: bool = True,
|
|
|
|
|
**query_params,
|
|
|
|
|
) -> Any:
|
|
|
|
|
"""
|
|
|
|
|
Gets data from cache or executes query and saves result to cache.
|
|
|
|
|
Supports existing key formats for compatibility.
|
|
|
|
|
|
|
|
|
|
Args:
|
|
|
|
|
cache_key: Cache key or key template from CACHE_KEYS
|
|
|
|
|
query_func: Function to execute the query
|
|
|
|
|
ttl: Cache TTL in seconds (None - indefinite)
|
|
|
|
|
force_refresh: Force cache refresh
|
|
|
|
|
use_key_format: Whether to check if cache_key matches a key template in CACHE_KEYS
|
|
|
|
|
**query_params: Parameters to pass to the query function
|
|
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
|
Any: Data from cache or query result
|
|
|
|
|
"""
|
|
|
|
|
# Check if cache_key matches a pattern in CACHE_KEYS
|
|
|
|
|
actual_key = cache_key
|
|
|
|
|
if use_key_format and "{}" in cache_key:
|
|
|
|
|
# Look for a template match in CACHE_KEYS
|
|
|
|
|
for key_name, key_format in CACHE_KEYS.items():
|
|
|
|
|
if cache_key == key_format:
|
|
|
|
|
# We have a match, now look for the id or value to format with
|
|
|
|
|
for param_name, param_value in query_params.items():
|
|
|
|
|
if param_name in ["id", "slug", "user", "topic_id", "author_id"]:
|
|
|
|
|
actual_key = cache_key.format(param_value)
|
|
|
|
|
break
|
|
|
|
|
|
|
|
|
|
# If not forcing refresh, try to get data from cache
|
|
|
|
|
if not force_refresh:
|
|
|
|
|
cached_result = await get_cached_data(actual_key)
|
|
|
|
|
if cached_result is not None:
|
|
|
|
|
return cached_result
|
|
|
|
|
|
|
|
|
|
# If data not in cache or refresh required, execute query
|
|
|
|
|
try:
|
|
|
|
|
result = await query_func(**query_params)
|
|
|
|
|
if result is not None:
|
|
|
|
|
# Save result to cache
|
|
|
|
|
await cache_data(actual_key, result, ttl)
|
|
|
|
|
return result
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error(f"Error executing query for caching: {e}")
|
|
|
|
|
# In case of error, return data from cache if not forcing refresh
|
|
|
|
|
if not force_refresh:
|
|
|
|
|
return await get_cached_data(actual_key)
|
|
|
|
|
raise
|