circular-fix
Some checks failed
Deploy on push / deploy (push) Failing after 17s

This commit is contained in:
2025-08-17 16:33:54 +03:00
parent bc8447a444
commit e78e12eeee
65 changed files with 3304 additions and 1051 deletions

58
cache/cache.py vendored
View File

@@ -5,22 +5,22 @@ Caching system for the Discours platform
This module provides a comprehensive caching solution with these key components:
1. KEY NAMING CONVENTIONS:
- Entity-based keys: "entity:property:value" (e.g., "author:id:123")
- Collection keys: "entity:collection:params" (e.g., "authors:stats:limit=10:offset=0")
- Special case keys: Maintained for backwards compatibility (e.g., "topic_shouts_123")
- Entity-based keys: "entity:property:value" (e.g., "author:id:123")
- Collection keys: "entity:collection:params" (e.g., "authors:stats:limit=10:offset=0")
- Special case keys: Maintained for backwards compatibility (e.g., "topic_shouts_123")
2. CORE FUNCTIONS:
- cached_query(): High-level function for retrieving cached data or executing queries
ery(): High-level function for retrieving cached data or executing queries
3. ENTITY-SPECIFIC FUNCTIONS:
- cache_author(), cache_topic(): Cache entity data
- get_cached_author(), get_cached_topic(): Retrieve entity data from cache
- invalidate_cache_by_prefix(): Invalidate all keys with a specific prefix
- cache_author(), cache_topic(): Cache entity data
- get_cached_author(), get_cached_topic(): Retrieve entity data from cache
- invalidate_cache_by_prefix(): Invalidate all keys with a specific prefix
4. CACHE INVALIDATION STRATEGY:
- Direct invalidation via invalidate_* functions for immediate changes
- Delayed invalidation via revalidation_manager for background processing
- Event-based triggers for automatic cache updates (see triggers.py)
- Direct invalidation via invalidate_* functions for immediate changes
- Delayed invalidation via revalidation_manager for background processing
- Event-based triggers for automatic cache updates (see triggers.py)
To maintain consistency with the existing codebase, this module preserves
the original key naming patterns while providing a more structured approach
@@ -29,7 +29,7 @@ for new cache operations.
import asyncio
import json
from typing import Any, Callable, Dict, List, Optional, Type, Union
from typing import Any, Callable, Dict, List, Type
import orjson
from sqlalchemy import and_, join, select
@@ -135,10 +135,6 @@ async def get_cached_author(author_id: int, get_with_stat=None) -> dict | None:
logger.debug("[get_cached_author] Данные не найдены в кэше, загрузка из БД")
# Load from database if not found in cache
if get_with_stat is None:
from resolvers.stat import get_with_stat
q = select(Author).where(Author.id == author_id)
authors = get_with_stat(q)
logger.debug(f"[get_cached_author] Результат запроса из БД: {len(authors) if authors else 0} записей")
@@ -197,7 +193,7 @@ async def get_cached_topic_by_slug(slug: str, get_with_stat=None) -> dict | None
return orjson.loads(result)
# Load from database if not found in cache
if get_with_stat is None:
from resolvers.stat import get_with_stat
pass # get_with_stat уже импортирован на верхнем уровне
topic_query = select(Topic).where(Topic.slug == slug)
topics = get_with_stat(topic_query)
@@ -218,11 +214,11 @@ async def get_cached_authors_by_ids(author_ids: list[int]) -> list[dict]:
missing_indices = [index for index, author in enumerate(authors) if author is None]
if missing_indices:
missing_ids = [author_ids[index] for index in missing_indices]
query = select(Author).where(Author.id.in_(missing_ids))
with local_session() as session:
query = select(Author).where(Author.id.in_(missing_ids))
missing_authors = session.execute(query).scalars().unique().all()
await asyncio.gather(*(cache_author(author.dict()) for author in missing_authors))
for index, author in zip(missing_indices, missing_authors):
for index, author in zip(missing_indices, missing_authors, strict=False):
authors[index] = author.dict()
# Фильтруем None значения для корректного типа возвращаемого значения
return [author for author in authors if author is not None]
@@ -358,10 +354,6 @@ async def get_cached_author_by_id(author_id: int, get_with_stat=None):
# If data is found, return parsed JSON
return orjson.loads(cached_author_data)
# If data is not found in cache, query the database
if get_with_stat is None:
from resolvers.stat import get_with_stat
author_query = select(Author).where(Author.id == author_id)
authors = get_with_stat(author_query)
if authors:
@@ -540,7 +532,7 @@ async def cache_by_id(entity, entity_id: int, cache_method, get_with_stat=None):
"""
if get_with_stat is None:
from resolvers.stat import get_with_stat
pass # get_with_stat уже импортирован на верхнем уровне
caching_query = select(entity).where(entity.id == entity_id)
result = get_with_stat(caching_query)
@@ -554,7 +546,7 @@ async def cache_by_id(entity, entity_id: int, cache_method, get_with_stat=None):
# Универсальная функция для сохранения данных в кеш
async def cache_data(key: str, data: Any, ttl: Optional[int] = None) -> None:
async def cache_data(key: str, data: Any, ttl: int | None = None) -> None:
"""
Сохраняет данные в кеш по указанному ключу.
@@ -575,7 +567,7 @@ async def cache_data(key: str, data: Any, ttl: Optional[int] = None) -> None:
# Универсальная функция для получения данных из кеша
async def get_cached_data(key: str) -> Optional[Any]:
async def get_cached_data(key: str) -> Any | None:
"""
Получает данные из кеша по указанному ключу.
@@ -618,7 +610,7 @@ async def invalidate_cache_by_prefix(prefix: str) -> None:
async def cached_query(
cache_key: str,
query_func: Callable,
ttl: Optional[int] = None,
ttl: int | None = None,
force_refresh: bool = False,
use_key_format: bool = True,
**query_params,
@@ -714,7 +706,7 @@ async def cache_follows_by_follower(author_id: int, follows: List[Dict[str, Any]
logger.error(f"Failed to cache follows: {e}")
async def get_topic_from_cache(topic_id: Union[int, str]) -> Optional[Dict[str, Any]]:
async def get_topic_from_cache(topic_id: int | str) -> Dict[str, Any] | None:
"""Получает топик из кеша"""
try:
topic_key = f"topic:{topic_id}"
@@ -730,7 +722,7 @@ async def get_topic_from_cache(topic_id: Union[int, str]) -> Optional[Dict[str,
return None
async def get_author_from_cache(author_id: Union[int, str]) -> Optional[Dict[str, Any]]:
async def get_author_from_cache(author_id: int | str) -> Dict[str, Any] | None:
"""Получает автора из кеша"""
try:
author_key = f"author:{author_id}"
@@ -759,7 +751,7 @@ async def cache_topic_with_content(topic_dict: Dict[str, Any]) -> None:
logger.error(f"Failed to cache topic content: {e}")
async def get_cached_topic_content(topic_id: Union[int, str]) -> Optional[Dict[str, Any]]:
async def get_cached_topic_content(topic_id: int | str) -> Dict[str, Any] | None:
"""Получает кешированный контент топика"""
try:
topic_key = f"topic_content:{topic_id}"
@@ -786,7 +778,7 @@ async def save_shouts_to_cache(shouts: List[Dict[str, Any]], cache_key: str = "r
logger.error(f"Failed to save shouts to cache: {e}")
async def get_shouts_from_cache(cache_key: str = "recent_shouts") -> Optional[List[Dict[str, Any]]]:
async def get_shouts_from_cache(cache_key: str = "recent_shouts") -> List[Dict[str, Any]] | None:
"""Получает статьи из кеша"""
try:
cached_data = await redis.get(cache_key)
@@ -813,7 +805,7 @@ async def cache_search_results(query: str, data: List[Dict[str, Any]], ttl: int
logger.error(f"Failed to cache search results: {e}")
async def get_cached_search_results(query: str) -> Optional[List[Dict[str, Any]]]:
async def get_cached_search_results(query: str) -> List[Dict[str, Any]] | None:
"""Получает кешированные результаты поиска"""
try:
search_key = f"search:{query.lower().replace(' ', '_')}"
@@ -829,7 +821,7 @@ async def get_cached_search_results(query: str) -> Optional[List[Dict[str, Any]]
return None
async def invalidate_topic_cache(topic_id: Union[int, str]) -> None:
async def invalidate_topic_cache(topic_id: int | str) -> None:
"""Инвалидирует кеш топика"""
try:
topic_key = f"topic:{topic_id}"
@@ -841,7 +833,7 @@ async def invalidate_topic_cache(topic_id: Union[int, str]) -> None:
logger.error(f"Failed to invalidate topic cache: {e}")
async def invalidate_author_cache(author_id: Union[int, str]) -> None:
async def invalidate_author_cache(author_id: int | str) -> None:
"""Инвалидирует кеш автора"""
try:
author_key = f"author:{author_id}"

7
cache/precache.py vendored
View File

@@ -3,11 +3,12 @@ import traceback
from sqlalchemy import and_, join, select
from auth.orm import Author, AuthorFollower
# Импорт Author, AuthorFollower отложен для избежания циклических импортов
from cache.cache import cache_author, cache_topic
from orm.shout import Shout, ShoutAuthor, ShoutReactionsFollower, ShoutTopic
from orm.topic import Topic, TopicFollower
from resolvers.stat import get_with_stat
from auth.orm import Author, AuthorFollower
from services.db import local_session
from services.redis import redis
from utils.encoders import fast_json_dumps
@@ -135,10 +136,10 @@ async def precache_data() -> None:
await redis.execute("SET", key, data)
elif isinstance(data, list) and data:
# List или ZSet
if any(isinstance(item, (list, tuple)) and len(item) == 2 for item in data):
if any(isinstance(item, list | tuple) and len(item) == 2 for item in data):
# ZSet with scores
for item in data:
if isinstance(item, (list, tuple)) and len(item) == 2:
if isinstance(item, list | tuple) and len(item) == 2:
await redis.execute("ZADD", key, item[1], item[0])
else:
# Regular list

18
cache/revalidator.py vendored
View File

@@ -1,6 +1,14 @@
import asyncio
import contextlib
from cache.cache import (
cache_author,
cache_topic,
get_cached_author,
get_cached_topic,
invalidate_cache_by_prefix,
)
from resolvers.stat import get_with_stat
from services.redis import redis
from utils.logger import root_logger as logger
@@ -47,16 +55,6 @@ class CacheRevalidationManager:
async def process_revalidation(self) -> None:
"""Обновление кэша для всех сущностей, требующих ревалидации."""
# Поздние импорты для избежания циклических зависимостей
from cache.cache import (
cache_author,
cache_topic,
get_cached_author,
get_cached_topic,
invalidate_cache_by_prefix,
)
from resolvers.stat import get_with_stat
# Проверяем соединение с Redis
if not self._redis._client:
return # Выходим из метода, если не удалось подключиться

3
cache/triggers.py vendored
View File

@@ -1,11 +1,12 @@
from sqlalchemy import event
from auth.orm import Author, AuthorFollower
# Импорт Author, AuthorFollower отложен для избежания циклических импортов
from cache.revalidator import revalidation_manager
from orm.reaction import Reaction, ReactionKind
from orm.shout import Shout, ShoutAuthor, ShoutReactionsFollower
from orm.topic import Topic, TopicFollower
from services.db import local_session
from auth.orm import Author, AuthorFollower
from utils.logger import root_logger as logger