From 7d50638b3acd01af9ce06607c0f4ef061edb4c23 Mon Sep 17 00:00:00 2001 From: Untone Date: Thu, 10 Apr 2025 18:39:31 +0300 Subject: [PATCH 1/4] topic.stat.authors-fix --- cache/cache.py | 2 +- resolvers/topic.py | 14 +++++++++++++- 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/cache/cache.py b/cache/cache.py index 5b8ea5d6..57f82a23 100644 --- a/cache/cache.py +++ b/cache/cache.py @@ -545,7 +545,7 @@ async def get_cached_data(key: str) -> Optional[Any]: try: cached_data = await redis.execute("GET", key) if cached_data: - logger.debug(f"Данные получены из кеша по ключу {key}") + logger.debug(f"Данные получены из кеша по ключу {key}: {len(cached_data)} записей") return orjson.loads(cached_data) return None except Exception as e: diff --git a/resolvers/topic.py b/resolvers/topic.py index c0f8836e..13f0d575 100644 --- a/resolvers/topic.py +++ b/resolvers/topic.py @@ -6,7 +6,7 @@ from cache.cache import ( get_cached_topic_authors, get_cached_topic_by_slug, get_cached_topic_followers, - invalidate_cache_by_prefix, + invalidate_cache_by_prefix ) from orm.author import Author from orm.topic import Topic @@ -126,6 +126,17 @@ async def get_topics_with_stats(limit=100, offset=0, community_id=None, by=None) GROUP BY topic """ followers_stats = {row[0]: row[1] for row in session.execute(text(followers_stats_query))} + + # Запрос на получение статистики авторов для выбранных тем + authors_stats_query = f""" + SELECT st.topic, COUNT(DISTINCT sa.author) as authors_count + FROM shout_topic st + JOIN shout s ON st.shout = s.id AND s.deleted_at IS NULL AND s.published_at IS NOT NULL + JOIN shout_author sa ON sa.shout = s.id + WHERE st.topic IN ({",".join(map(str, topic_ids))}) + GROUP BY st.topic + """ + authors_stats = {row[0]: row[1] for row in session.execute(text(authors_stats_query))} # Формируем результат с добавлением статистики result = [] @@ -134,6 +145,7 @@ async def get_topics_with_stats(limit=100, offset=0, community_id=None, by=None) topic_dict["stat"] = { "shouts": shouts_stats.get(topic.id, 0), "followers": followers_stats.get(topic.id, 0), + "authors": authors_stats.get(topic.id, 0) } result.append(topic_dict) From f20000f1f606e8b6d6f43c045c93821e47f33147 Mon Sep 17 00:00:00 2001 From: Untone Date: Thu, 10 Apr 2025 18:46:09 +0300 Subject: [PATCH 2/4] topic.stat.authors-fix2 --- cache/cache.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/cache/cache.py b/cache/cache.py index 57f82a23..de140be6 100644 --- a/cache/cache.py +++ b/cache/cache.py @@ -545,8 +545,9 @@ async def get_cached_data(key: str) -> Optional[Any]: try: cached_data = await redis.execute("GET", key) if cached_data: - logger.debug(f"Данные получены из кеша по ключу {key}: {len(cached_data)} записей") - return orjson.loads(cached_data) + loaded = orjson.loads(cached_data) + logger.debug(f"Данные получены из кеша по ключу {key}: {len(loaded)}") + return loaded return None except Exception as e: logger.error(f"Ошибка при получении данных из кеша: {e}") From fdedb75a2cfab390f868aa328623f3a746732fed Mon Sep 17 00:00:00 2001 From: Untone Date: Thu, 10 Apr 2025 19:14:27 +0300 Subject: [PATCH 3/4] topics-comments-stat --- resolvers/topic.py | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/resolvers/topic.py b/resolvers/topic.py index 13f0d575..da46734d 100644 --- a/resolvers/topic.py +++ b/resolvers/topic.py @@ -138,6 +138,18 @@ async def get_topics_with_stats(limit=100, offset=0, community_id=None, by=None) """ authors_stats = {row[0]: row[1] for row in session.execute(text(authors_stats_query))} + # Запрос на получение статистики комментариев для выбранных тем + comments_stats_query = f""" + SELECT st.topic, COUNT(DISTINCT r.id) as comments_count + FROM shout_topic st + JOIN shout s ON st.shout = s.id AND s.deleted_at IS NULL AND s.published_at IS NOT NULL + JOIN reaction r ON r.shout = s.id + WHERE st.topic IN ({",".join(map(str, topic_ids))}) + GROUP BY st.topic + """ + comments_stats = {row[0]: row[1] for row in session.execute(text(comments_stats_query))} + + # Формируем результат с добавлением статистики result = [] for topic in topics: @@ -145,7 +157,8 @@ async def get_topics_with_stats(limit=100, offset=0, community_id=None, by=None) topic_dict["stat"] = { "shouts": shouts_stats.get(topic.id, 0), "followers": followers_stats.get(topic.id, 0), - "authors": authors_stats.get(topic.id, 0) + "authors": authors_stats.get(topic.id, 0), + "comments": comments_stats.get(topic.id, 0) } result.append(topic_dict) From 1c61e889d63063b83e3b8a2e5ed44bf524964bc3 Mon Sep 17 00:00:00 2001 From: Untone Date: Thu, 10 Apr 2025 22:51:07 +0300 Subject: [PATCH 4/4] update-draft-fix --- CHANGELOG.md | 7 +++++++ resolvers/draft.py | 14 ++++++++++++++ 2 files changed, 21 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index d25f8dcf..2c12cf79 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,10 @@ +#### [0.4.18] - 2025-04-10 +- Fixed unique constraint violation for empty slug values: + - Modified `update_draft` resolver to handle empty slug values + - Modified `create_draft` resolver to prevent empty slug values + - Added validation to prevent inserting or updating drafts with empty slug + - Fixed database error "duplicate key value violates unique constraint draft_slug_key" + #### [0.4.17] - 2025-03-26 - Fixed `'Reaction' object is not subscriptable` error in hierarchical comments: - Modified `get_reactions_with_stat()` to convert Reaction objects to dictionaries diff --git a/resolvers/draft.py b/resolvers/draft.py index 0e04ffd8..2971ec4c 100644 --- a/resolvers/draft.py +++ b/resolvers/draft.py @@ -104,6 +104,11 @@ async def create_draft(_, info, draft_input): if "title" not in draft_input or not draft_input["title"]: draft_input["title"] = "" # Пустая строка вместо NULL + + # Проверяем slug - он должен быть или не пустым, или не передаваться вообще + if "slug" in draft_input and (draft_input["slug"] is None or draft_input["slug"] == ""): + # При создании черновика удаляем пустой slug из входных данных + del draft_input["slug"] try: with local_session() as session: @@ -142,6 +147,15 @@ async def update_draft(_, info, draft_id: int, draft_input): if not user_id or not author_id: return {"error": "Author ID are required"} + # Проверяем slug - он должен быть или не пустым, или не передаваться вообще + if "slug" in draft_input and (draft_input["slug"] is None or draft_input["slug"] == ""): + # Если slug пустой, либо удаляем его из входных данных, либо генерируем временный уникальный + # Вариант 1: просто удаляем ключ из входных данных, чтобы оставить старое значение + del draft_input["slug"] + # Вариант 2 (если нужно обновить): генерируем временный уникальный slug + # import uuid + # draft_input["slug"] = f"draft-{uuid.uuid4().hex[:8]}" + with local_session() as session: draft = session.query(Draft).filter(Draft.id == draft_id).first() if not draft: