topic.stat.authors-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m20s

This commit is contained in:
Untone 2025-04-10 18:39:31 +03:00
parent abbc074474
commit 7d50638b3a
2 changed files with 14 additions and 2 deletions

2
cache/cache.py vendored
View File

@ -545,7 +545,7 @@ async def get_cached_data(key: str) -> Optional[Any]:
try:
cached_data = await redis.execute("GET", key)
if cached_data:
logger.debug(f"Данные получены из кеша по ключу {key}")
logger.debug(f"Данные получены из кеша по ключу {key}: {len(cached_data)} записей")
return orjson.loads(cached_data)
return None
except Exception as e:

View File

@ -6,7 +6,7 @@ from cache.cache import (
get_cached_topic_authors,
get_cached_topic_by_slug,
get_cached_topic_followers,
invalidate_cache_by_prefix,
invalidate_cache_by_prefix
)
from orm.author import Author
from orm.topic import Topic
@ -126,6 +126,17 @@ async def get_topics_with_stats(limit=100, offset=0, community_id=None, by=None)
GROUP BY topic
"""
followers_stats = {row[0]: row[1] for row in session.execute(text(followers_stats_query))}
# Запрос на получение статистики авторов для выбранных тем
authors_stats_query = f"""
SELECT st.topic, COUNT(DISTINCT sa.author) as authors_count
FROM shout_topic st
JOIN shout s ON st.shout = s.id AND s.deleted_at IS NULL AND s.published_at IS NOT NULL
JOIN shout_author sa ON sa.shout = s.id
WHERE st.topic IN ({",".join(map(str, topic_ids))})
GROUP BY st.topic
"""
authors_stats = {row[0]: row[1] for row in session.execute(text(authors_stats_query))}
# Формируем результат с добавлением статистики
result = []
@ -134,6 +145,7 @@ async def get_topics_with_stats(limit=100, offset=0, community_id=None, by=None)
topic_dict["stat"] = {
"shouts": shouts_stats.get(topic.id, 0),
"followers": followers_stats.get(topic.id, 0),
"authors": authors_stats.get(topic.id, 0)
}
result.append(topic_dict)