core/cache/precache.py

196 lines
9.4 KiB
Python
Raw Normal View History

2024-08-07 06:51:09 +00:00
import asyncio
2024-08-12 08:00:01 +00:00
2024-05-30 11:25:35 +00:00
from sqlalchemy import and_, join, select
2024-08-12 08:00:01 +00:00
2025-05-16 06:23:48 +00:00
from auth.orm import Author, AuthorFollower
2025-05-29 09:37:39 +00:00
from cache.cache import cache_author, cache_topic
2024-08-12 08:00:01 +00:00
from orm.shout import Shout, ShoutAuthor, ShoutReactionsFollower, ShoutTopic
2024-05-30 11:25:35 +00:00
from orm.topic import Topic, TopicFollower
2024-05-30 11:29:00 +00:00
from resolvers.stat import get_with_stat
2024-05-30 11:25:35 +00:00
from services.db import local_session
2024-08-12 08:00:01 +00:00
from services.redis import redis
from utils.encoders import fast_json_dumps
2024-08-07 05:57:56 +00:00
from utils.logger import root_logger as logger
2024-05-30 11:25:35 +00:00
2024-08-07 06:51:09 +00:00
# Предварительное кеширование подписчиков автора
async def precache_authors_followers(author_id, session) -> None:
authors_followers: set[int] = set()
2024-06-04 06:07:46 +00:00
followers_query = select(AuthorFollower.follower).where(AuthorFollower.author == author_id)
result = session.execute(followers_query)
2024-08-07 06:51:09 +00:00
authors_followers.update(row[0] for row in result if row[0])
2024-06-04 06:07:46 +00:00
followers_payload = fast_json_dumps(list(authors_followers))
2024-06-04 06:07:46 +00:00
await redis.execute("SET", f"author:followers:{author_id}", followers_payload)
2024-08-07 06:51:09 +00:00
# Предварительное кеширование подписок автора
async def precache_authors_follows(author_id, session) -> None:
2024-06-04 06:07:46 +00:00
follows_topics_query = select(TopicFollower.topic).where(TopicFollower.follower == author_id)
follows_authors_query = select(AuthorFollower.author).where(AuthorFollower.follower == author_id)
2025-05-29 09:37:39 +00:00
follows_shouts_query = select(ShoutReactionsFollower.shout).where(ShoutReactionsFollower.follower == author_id)
2024-06-04 06:07:46 +00:00
2024-08-07 06:51:09 +00:00
follows_topics = {row[0] for row in session.execute(follows_topics_query) if row[0]}
follows_authors = {row[0] for row in session.execute(follows_authors_query) if row[0]}
follows_shouts = {row[0] for row in session.execute(follows_shouts_query) if row[0]}
2024-06-04 06:07:46 +00:00
topics_payload = fast_json_dumps(list(follows_topics))
authors_payload = fast_json_dumps(list(follows_authors))
shouts_payload = fast_json_dumps(list(follows_shouts))
2024-08-07 06:51:09 +00:00
await asyncio.gather(
redis.execute("SET", f"author:follows-topics:{author_id}", topics_payload),
redis.execute("SET", f"author:follows-authors:{author_id}", authors_payload),
redis.execute("SET", f"author:follows-shouts:{author_id}", shouts_payload),
)
2024-06-04 06:07:46 +00:00
2024-08-07 06:51:09 +00:00
# Предварительное кеширование авторов тем
async def precache_topics_authors(topic_id: int, session) -> None:
2024-06-04 06:07:46 +00:00
topic_authors_query = (
select(ShoutAuthor.author)
.select_from(join(ShoutTopic, Shout, ShoutTopic.shout == Shout.id))
.join(ShoutAuthor, ShoutAuthor.shout == Shout.id)
.filter(
and_(
ShoutTopic.topic == topic_id,
Shout.published_at.is_not(None),
Shout.deleted_at.is_(None),
)
)
)
2024-08-07 06:51:09 +00:00
topic_authors = {row[0] for row in session.execute(topic_authors_query) if row[0]}
2024-06-04 06:07:46 +00:00
authors_payload = fast_json_dumps(list(topic_authors))
2024-06-04 06:07:46 +00:00
await redis.execute("SET", f"topic:authors:{topic_id}", authors_payload)
2024-08-07 06:51:09 +00:00
# Предварительное кеширование подписчиков тем
async def precache_topics_followers(topic_id: int, session) -> None:
2024-06-04 06:07:46 +00:00
followers_query = select(TopicFollower.follower).where(TopicFollower.topic == topic_id)
2024-08-07 06:51:09 +00:00
topic_followers = {row[0] for row in session.execute(followers_query) if row[0]}
2024-06-04 06:07:46 +00:00
followers_payload = fast_json_dumps(list(topic_followers))
2024-06-04 06:07:46 +00:00
await redis.execute("SET", f"topic:followers:{topic_id}", followers_payload)
async def precache_data() -> None:
2024-10-14 10:08:43 +00:00
logger.info("precaching...")
logger.debug("Entering precache_data")
2024-06-05 18:40:32 +00:00
try:
# Список паттернов ключей, которые нужно сохранить при FLUSHDB
preserve_patterns = [
"migrated_views_*", # Данные миграции просмотров
"session:*", # Сессии пользователей
"env_vars:*", # Переменные окружения
"oauth_*", # OAuth токены
]
# Сохраняем все важные ключи перед очисткой
all_keys_to_preserve = []
preserved_data = {}
for pattern in preserve_patterns:
keys = await redis.execute("KEYS", pattern)
if keys:
all_keys_to_preserve.extend(keys)
logger.info(f"Найдено {len(keys)} ключей по паттерну '{pattern}'")
if all_keys_to_preserve:
logger.info(f"Сохраняем {len(all_keys_to_preserve)} важных ключей перед FLUSHDB")
for key in all_keys_to_preserve:
try:
# Определяем тип ключа и сохраняем данные
key_type = await redis.execute("TYPE", key)
if key_type == "hash":
preserved_data[key] = await redis.execute("HGETALL", key)
elif key_type == "string":
preserved_data[key] = await redis.execute("GET", key)
elif key_type == "set":
preserved_data[key] = await redis.execute("SMEMBERS", key)
elif key_type == "list":
preserved_data[key] = await redis.execute("LRANGE", key, 0, -1)
elif key_type == "zset":
preserved_data[key] = await redis.execute("ZRANGE", key, 0, -1, "WITHSCORES")
except Exception as e:
logger.error(f"Ошибка при сохранении ключа {key}: {e}")
continue
2024-06-05 18:40:32 +00:00
await redis.execute("FLUSHDB")
logger.debug("Redis database flushed")
2024-10-14 09:31:55 +00:00
logger.info("redis: FLUSHDB")
2024-06-05 18:40:32 +00:00
# Восстанавливаем все сохранённые ключи
if preserved_data:
logger.info(f"Восстанавливаем {len(preserved_data)} сохранённых ключей")
for key, data in preserved_data.items():
try:
if isinstance(data, dict) and data:
# Hash
flattened = []
for field, val in data.items():
flattened.extend([field, val])
if flattened:
await redis.execute("HSET", key, *flattened)
elif isinstance(data, str) and data:
# String
await redis.execute("SET", key, data)
elif isinstance(data, list) and data:
# List или ZSet
if any(isinstance(item, (list, tuple)) and len(item) == 2 for item in data):
# ZSet with scores
for item in data:
if isinstance(item, (list, tuple)) and len(item) == 2:
await redis.execute("ZADD", key, item[1], item[0])
else:
# Regular list
await redis.execute("LPUSH", key, *data)
elif isinstance(data, set) and data:
# Set
await redis.execute("SADD", key, *data)
except Exception as e:
logger.error(f"Ошибка при восстановлении ключа {key}: {e}")
continue
2024-10-23 21:01:09 +00:00
logger.info("Beginning topic precache phase")
2024-06-05 18:40:32 +00:00
with local_session() as session:
2024-08-07 06:51:09 +00:00
# topics
2024-08-07 08:52:07 +00:00
q = select(Topic).where(Topic.community == 1)
topics = get_with_stat(q)
logger.info(f"Found {len(topics)} topics to precache")
2024-08-07 06:51:09 +00:00
for topic in topics:
2024-08-07 08:52:16 +00:00
topic_dict = topic.dict() if hasattr(topic, "dict") else topic
logger.debug(f"Precaching topic id={topic_dict.get('id')}")
2024-08-07 08:38:34 +00:00
await cache_topic(topic_dict)
logger.debug(f"Cached topic id={topic_dict.get('id')}")
2024-08-07 06:51:09 +00:00
await asyncio.gather(
2024-08-07 09:18:29 +00:00
precache_topics_followers(topic_dict["id"], session),
precache_topics_authors(topic_dict["id"], session),
2024-08-07 06:51:09 +00:00
)
logger.debug(f"Finished precaching followers and authors for topic id={topic_dict.get('id')}")
2024-08-07 06:51:09 +00:00
logger.info(f"{len(topics)} topics and their followings precached")
# authors
2025-05-16 06:23:48 +00:00
authors = get_with_stat(select(Author))
# logger.info(f"{len(authors)} authors found in database")
2024-08-07 06:51:09 +00:00
for author in authors:
if isinstance(author, Author):
profile = author.dict()
author_id = profile.get("id")
2025-05-16 06:23:48 +00:00
# user_id = profile.get("user", "").strip()
if author_id: # and user_id:
2024-08-07 06:51:09 +00:00
await cache_author(profile)
await asyncio.gather(
2025-05-16 06:23:48 +00:00
precache_authors_followers(author_id, session),
precache_authors_follows(author_id, session),
2024-08-07 06:51:09 +00:00
)
logger.debug(f"Finished precaching followers and follows for author id={author_id}")
2024-08-07 06:51:09 +00:00
else:
logger.error(f"fail caching {author}")
logger.info(f"{len(authors)} authors and their followings precached")
2024-06-05 18:40:32 +00:00
except Exception as exc:
2024-08-07 08:53:31 +00:00
import traceback
traceback.print_exc()
2024-08-07 06:51:09 +00:00
logger.error(f"Error in precache_data: {exc}")