From 856f4ffc8547d8ab89c45c0a9255fc8fa2bc07d1 Mon Sep 17 00:00:00 2001 From: Untone Date: Sun, 9 Mar 2025 21:01:52 +0300 Subject: [PATCH 01/27] i --- utils/logger.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/utils/logger.py b/utils/logger.py index b49263d4..ef5bce9d 100644 --- a/utils/logger.py +++ b/utils/logger.py @@ -13,7 +13,7 @@ def filter(record: logging.LogRecord): record.emoji = ( "πŸ”" if record.levelno == logging.DEBUG - else "πŸ–ŠοΈ" + else "β„ΉοΈŽ" if record.levelno == logging.INFO else "🚧" if record.levelno == logging.WARNING From 354bda0efa0d6af3ca6f74ccbfadc198145ff6c0 Mon Sep 17 00:00:00 2001 From: Untone Date: Thu, 13 Mar 2025 22:21:43 +0300 Subject: [PATCH 02/27] drafts-fix --- resolvers/draft.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/resolvers/draft.py b/resolvers/draft.py index 4f7e0ead..50c8b758 100644 --- a/resolvers/draft.py +++ b/resolvers/draft.py @@ -1,3 +1,4 @@ +from operator import or_ import time from sqlalchemy.sql import and_ @@ -55,7 +56,9 @@ async def load_drafts(_, info): return {"error": "User ID and author ID are required"} with local_session() as session: - drafts = session.query(Draft).filter(Draft.authors.any(Author.id == author_id)).all() + drafts = session.query(Draft).filter(or_( + Draft.authors.any(Author.id == author_id), + Draft.created_by == author_id)).all() return {"drafts": drafts} From ae48a18536e8789139dfda3d183116f0893d511b Mon Sep 17 00:00:00 2001 From: Untone Date: Thu, 20 Mar 2025 11:01:39 +0300 Subject: [PATCH 03/27] comment-delete-handling-patch --- CHANGELOG.md | 3 +++ requirements.dev.txt | 6 ++++++ requirements.txt | 11 +---------- resolvers/draft.py | 30 +++++++++++++++++++----------- resolvers/reaction.py | 32 +++++++++++++++++++------------- 5 files changed, 48 insertions(+), 34 deletions(-) create mode 100644 requirements.dev.txt diff --git a/CHANGELOG.md b/CHANGELOG.md index 99a89d78..2db60b2c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,6 @@ +#### [0.4.12] - 2025-02-12 +- `delete_reaction` detects comments and uses `deleted_at` update + #### [0.4.11] - 2025-02-12 - `create_draft` resolver requires draft_id fixed - `create_draft` resolver defaults body and title fields to empty string diff --git a/requirements.dev.txt b/requirements.dev.txt new file mode 100644 index 00000000..fe95b9fb --- /dev/null +++ b/requirements.dev.txt @@ -0,0 +1,6 @@ +fakeredis +pytest +pytest-asyncio +pytest-cov +mypy +ruff diff --git a/requirements.txt b/requirements.txt index 56b09175..daa6dfb9 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,11 +4,8 @@ authlib passlib google-analytics-data -dogpile-cache -opensearch-py colorlog psycopg2-binary -dogpile-cache httpx redis[hiredis] sentry-sdk[starlette,sqlalchemy] @@ -17,10 +14,4 @@ gql ariadne granian -pydantic -fakeredis -pytest -pytest-asyncio -pytest-cov -mypy -ruff +pydantic \ No newline at end of file diff --git a/resolvers/draft.py b/resolvers/draft.py index 50c8b758..4424ff3e 100644 --- a/resolvers/draft.py +++ b/resolvers/draft.py @@ -1,5 +1,5 @@ -from operator import or_ import time +from operator import or_ from sqlalchemy.sql import and_ @@ -56,9 +56,11 @@ async def load_drafts(_, info): return {"error": "User ID and author ID are required"} with local_session() as session: - drafts = session.query(Draft).filter(or_( - Draft.authors.any(Author.id == author_id), - Draft.created_by == author_id)).all() + drafts = ( + session.query(Draft) + .filter(or_(Draft.authors.any(Author.id == author_id), Draft.created_by == author_id)) + .all() + ) return {"drafts": drafts} @@ -99,7 +101,7 @@ async def create_draft(_, info, draft_input): # ΠŸΡ€ΠΎΠ²Π΅Ρ€ΡΠ΅ΠΌ ΠΎΠ±ΡΠ·Π°Ρ‚Π΅Π»ΡŒΠ½Ρ‹Π΅ поля if "body" not in draft_input or not draft_input["body"]: draft_input["body"] = "" # ΠŸΡƒΡΡ‚Π°Ρ строка вмСсто NULL - + if "title" not in draft_input or not draft_input["title"]: draft_input["title"] = "" # ΠŸΡƒΡΡ‚Π°Ρ строка вмСсто NULL @@ -123,23 +125,29 @@ async def create_draft(_, info, draft_input): @mutation.field("update_draft") @login_required -async def update_draft(_, info, draft_input): +async def update_draft(_, info, draft_id: int, draft_input): + """ΠžΠ±Π½ΠΎΠ²Π»ΡΠ΅Ρ‚ Ρ‡Π΅Ρ€Π½ΠΎΠ²ΠΈΠΊ ΠΏΡƒΠ±Π»ΠΈΠΊΠ°Ρ†ΠΈΠΈ. + + Args: + draft_id: ID Ρ‡Π΅Ρ€Π½ΠΎΠ²ΠΈΠΊΠ° для обновлСния + draft_input: Π”Π°Π½Π½Ρ‹Π΅ для обновлСния Ρ‡Π΅Ρ€Π½ΠΎΠ²ΠΈΠΊΠ° + + Returns: + dict: ΠžΠ±Π½ΠΎΠ²Π»Π΅Π½Π½Ρ‹ΠΉ Ρ‡Π΅Ρ€Π½ΠΎΠ²ΠΈΠΊ ΠΈΠ»ΠΈ сообщСниС ΠΎΠ± ошибкС + """ user_id = info.context.get("user_id") author_dict = info.context.get("author", {}) author_id = author_dict.get("id") - draft_id = draft_input.get("id") - if not draft_id: - return {"error": "Draft ID is required"} + if not user_id or not author_id: return {"error": "Author ID are required"} with local_session() as session: draft = session.query(Draft).filter(Draft.id == draft_id).first() - del draft_input["id"] - Draft.update(draft, {**draft_input}) if not draft: return {"error": "Draft not found"} + Draft.update(draft, draft_input) draft.updated_at = int(time.time()) session.commit() return {"draft": draft} diff --git a/resolvers/reaction.py b/resolvers/reaction.py index 3f448b96..f4334065 100644 --- a/resolvers/reaction.py +++ b/resolvers/reaction.py @@ -133,21 +133,18 @@ def check_to_feature(session, approver_id, reaction) -> bool: return False -def check_to_unfeature(session, rejecter_id, reaction) -> bool: +def check_to_unfeature(session, reaction) -> bool: """ Unfeature a shout if 20% of reactions are negative. :param session: Database session. - :param rejecter_id: Rejecter author ID. :param reaction: Reaction object. :return: True if shout should be unfeatured, else False. """ if not reaction.reply_to and is_negative(reaction.kind): total_reactions = ( session.query(Reaction) - .filter( - Reaction.shout == reaction.shout, Reaction.kind.in_(RATING_REACTIONS), Reaction.deleted_at.is_(None) - ) + .filter(Reaction.shout == reaction.shout, Reaction.reply_to.is_(None), Reaction.kind.in_(RATING_REACTIONS)) .count() ) @@ -217,7 +214,7 @@ async def _create_reaction(session, shout_id: int, is_author: bool, author_id: i # Handle rating if r.kind in RATING_REACTIONS: - if check_to_unfeature(session, author_id, r): + if check_to_unfeature(session, r): set_unfeatured(session, shout_id) elif check_to_feature(session, author_id, r): await set_featured(session, shout_id) @@ -354,7 +351,7 @@ async def update_reaction(_, info, reaction): result = session.execute(reaction_query).unique().first() if result: - r, author, shout, commented_stat, rating_stat = result + r, author, _shout, commented_stat, rating_stat = result if not r or not author: return {"error": "Invalid reaction ID or unauthorized"} @@ -406,15 +403,24 @@ async def delete_reaction(_, info, reaction_id: int): if r.created_by != author_id and "editor" not in roles: return {"error": "Access denied"} - logger.debug(f"{user_id} user removing his #{reaction_id} reaction") - reaction_dict = r.dict() - session.delete(r) - session.commit() - - # Update author stat if r.kind == ReactionKind.COMMENT.value: + r.deleted_at = int(time.time()) update_author_stat(author.id) + session.add(r) + session.commit() + elif r.kind == ReactionKind.PROPOSE.value: + r.deleted_at = int(time.time()) + session.add(r) + session.commit() + # TODO: add more reaction types here + else: + logger.debug(f"{user_id} user removing his #{reaction_id} reaction") + session.delete(r) + session.commit() + if check_to_unfeature(session, r): + set_unfeatured(session, r.shout) + reaction_dict = r.dict() await notify_reaction(reaction_dict, "delete") return {"error": None, "reaction": reaction_dict} From 094e7e6fe25d1dd124efa5205382555adbecec48 Mon Sep 17 00:00:00 2001 From: Untone Date: Thu, 20 Mar 2025 11:19:29 +0300 Subject: [PATCH 04/27] granian-fix --- server.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server.py b/server.py index 30009c89..9487e570 100644 --- a/server.py +++ b/server.py @@ -3,7 +3,7 @@ from pathlib import Path from granian.constants import Interfaces from granian.log import LogLevels -from granian.server import Granian +from granian import Granian from settings import PORT from utils.logger import root_logger as logger From 3a1924279feb05a0c7970cddd5d0a19101d7984f Mon Sep 17 00:00:00 2001 From: Untone Date: Thu, 20 Mar 2025 11:23:37 +0300 Subject: [PATCH 05/27] redeploy --- server.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server.py b/server.py index 9487e570..29fbe16d 100644 --- a/server.py +++ b/server.py @@ -1,9 +1,9 @@ import sys from pathlib import Path +from granian import Granian from granian.constants import Interfaces from granian.log import LogLevels -from granian import Granian from settings import PORT from utils.logger import root_logger as logger From 450c73c060439fbd0ab60b58fcdcaa30f50cc5e3 Mon Sep 17 00:00:00 2001 From: Untone Date: Thu, 20 Mar 2025 11:30:36 +0300 Subject: [PATCH 06/27] nothreads --- server.py | 1 - 1 file changed, 1 deletion(-) diff --git a/server.py b/server.py index 29fbe16d..848c61d6 100644 --- a/server.py +++ b/server.py @@ -16,7 +16,6 @@ if __name__ == "__main__": address="0.0.0.0", port=PORT, interface=Interfaces.ASGI, - threads=4, websockets=False, log_level=LogLevels.debug, backlog=2048, From a1781b3800b8c5d2922416b1fbaa9f684fa7ef4d Mon Sep 17 00:00:00 2001 From: Untone Date: Thu, 20 Mar 2025 11:36:12 +0300 Subject: [PATCH 07/27] depfix --- requirements.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/requirements.txt b/requirements.txt index daa6dfb9..f6da22f5 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,6 +3,7 @@ bcrypt authlib passlib +opensearch-py google-analytics-data colorlog psycopg2-binary From 247fc98760c2cebba20fe7b5047582946ff5c935 Mon Sep 17 00:00:00 2001 From: Untone Date: Thu, 20 Mar 2025 11:55:21 +0300 Subject: [PATCH 08/27] cachedep-fix+orjson+fmt --- CHANGELOG.md | 4 +- cache/cache.py | 54 ++++++------- cache/memorycache.py | 176 +++++++++++++++++++++++++++++++++++++++--- cache/precache.py | 14 ++-- docs/features.md | 10 +++ requirements.txt | 3 +- resolvers/editor.py | 4 +- resolvers/notifier.py | 10 +-- resolvers/reader.py | 15 ++-- services/db.py | 6 +- services/notify.py | 8 +- services/search.py | 6 +- services/viewed.py | 5 +- utils/encoders.py | 5 +- 14 files changed, 245 insertions(+), 75 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2db60b2c..57c736da 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,7 @@ -#### [0.4.12] - 2025-02-12 +#### [0.4.12] - 2025-03-19 - `delete_reaction` detects comments and uses `deleted_at` update +- `check_to_unfeature` etc. update +- dogpile dep in `services/memorycache.py` optimized #### [0.4.11] - 2025-02-12 - `create_draft` resolver requires draft_id fixed diff --git a/cache/cache.py b/cache/cache.py index afd16990..75f7ec68 100644 --- a/cache/cache.py +++ b/cache/cache.py @@ -1,7 +1,7 @@ import asyncio -import json from typing import List +import orjson from sqlalchemy import and_, join, select from orm.author import Author, AuthorFollower @@ -35,7 +35,7 @@ CACHE_KEYS = { # Cache topic data async def cache_topic(topic: dict): - payload = json.dumps(topic, cls=CustomJSONEncoder) + payload = orjson.dumps(topic, cls=CustomJSONEncoder) await asyncio.gather( redis_operation("SET", f"topic:id:{topic['id']}", payload), redis_operation("SET", f"topic:slug:{topic['slug']}", payload), @@ -44,7 +44,7 @@ async def cache_topic(topic: dict): # Cache author data async def cache_author(author: dict): - payload = json.dumps(author, cls=CustomJSONEncoder) + payload = orjson.dumps(author, cls=CustomJSONEncoder) await asyncio.gather( redis_operation("SET", f"author:user:{author['user'].strip()}", str(author["id"])), redis_operation("SET", f"author:id:{author['id']}", payload), @@ -55,13 +55,13 @@ async def cache_author(author: dict): async def cache_follows(follower_id: int, entity_type: str, entity_id: int, is_insert=True): key = f"author:follows-{entity_type}s:{follower_id}" follows_str = await redis_operation("GET", key) - follows = json.loads(follows_str) if follows_str else DEFAULT_FOLLOWS[entity_type] + follows = orjson.loads(follows_str) if follows_str else DEFAULT_FOLLOWS[entity_type] if is_insert: if entity_id not in follows: follows.append(entity_id) else: follows = [eid for eid in follows if eid != entity_id] - await redis_operation("SET", key, json.dumps(follows, cls=CustomJSONEncoder)) + await redis_operation("SET", key, orjson.dumps(follows, cls=CustomJSONEncoder)) await update_follower_stat(follower_id, entity_type, len(follows)) @@ -69,7 +69,7 @@ async def cache_follows(follower_id: int, entity_type: str, entity_id: int, is_i async def update_follower_stat(follower_id, entity_type, count): follower_key = f"author:id:{follower_id}" follower_str = await redis_operation("GET", follower_key) - follower = json.loads(follower_str) if follower_str else None + follower = orjson.loads(follower_str) if follower_str else None if follower: follower["stat"] = {f"{entity_type}s": count} await cache_author(follower) @@ -80,7 +80,7 @@ async def get_cached_author(author_id: int, get_with_stat): author_key = f"author:id:{author_id}" result = await redis_operation("GET", author_key) if result: - return json.loads(result) + return orjson.loads(result) # Load from database if not found in cache q = select(Author).where(Author.id == author_id) authors = get_with_stat(q) @@ -105,14 +105,14 @@ async def get_cached_topic(topic_id: int): topic_key = f"topic:id:{topic_id}" cached_topic = await redis_operation("GET", topic_key) if cached_topic: - return json.loads(cached_topic) + return orjson.loads(cached_topic) # If not in cache, fetch from the database with local_session() as session: topic = session.execute(select(Topic).where(Topic.id == topic_id)).scalar_one_or_none() if topic: topic_dict = topic.dict() - await redis_operation("SET", topic_key, json.dumps(topic_dict, cls=CustomJSONEncoder)) + await redis_operation("SET", topic_key, orjson.dumps(topic_dict, cls=CustomJSONEncoder)) return topic_dict return None @@ -123,7 +123,7 @@ async def get_cached_topic_by_slug(slug: str, get_with_stat): topic_key = f"topic:slug:{slug}" result = await redis_operation("GET", topic_key) if result: - return json.loads(result) + return orjson.loads(result) # Load from database if not found in cache topic_query = select(Topic).where(Topic.slug == slug) topics = get_with_stat(topic_query) @@ -139,7 +139,7 @@ async def get_cached_authors_by_ids(author_ids: List[int]) -> List[dict]: # Fetch all author data concurrently keys = [f"author:id:{author_id}" for author_id in author_ids] results = await asyncio.gather(*(redis_operation("GET", key) for key in keys)) - authors = [json.loads(result) if result else None for result in results] + authors = [orjson.loads(result) if result else None for result in results] # Load missing authors from database and cache missing_indices = [index for index, author in enumerate(authors) if author is None] if missing_indices: @@ -168,7 +168,7 @@ async def get_cached_topic_followers(topic_id: int): cached = await redis_operation("GET", cache_key) if cached: - followers_ids = json.loads(cached) + followers_ids = orjson.loads(cached) logger.debug(f"Found {len(followers_ids)} cached followers for topic #{topic_id}") return await get_cached_authors_by_ids(followers_ids) @@ -181,7 +181,7 @@ async def get_cached_topic_followers(topic_id: int): .all() ] - await redis_operation("SETEX", cache_key, value=json.dumps(followers_ids), ttl=CACHE_TTL) + await redis_operation("SETEX", cache_key, value=orjson.dumps(followers_ids), ttl=CACHE_TTL) followers = await get_cached_authors_by_ids(followers_ids) logger.debug(f"Cached {len(followers)} followers for topic #{topic_id}") return followers @@ -196,7 +196,7 @@ async def get_cached_author_followers(author_id: int): # Check cache for data cached = await redis_operation("GET", f"author:followers:{author_id}") if cached: - followers_ids = json.loads(cached) + followers_ids = orjson.loads(cached) followers = await get_cached_authors_by_ids(followers_ids) logger.debug(f"Cached followers for author #{author_id}: {len(followers)}") return followers @@ -210,7 +210,7 @@ async def get_cached_author_followers(author_id: int): .filter(AuthorFollower.author == author_id, Author.id != author_id) .all() ] - await redis_operation("SET", f"author:followers:{author_id}", json.dumps(followers_ids)) + await redis_operation("SET", f"author:followers:{author_id}", orjson.dumps(followers_ids)) followers = await get_cached_authors_by_ids(followers_ids) return followers @@ -220,7 +220,7 @@ async def get_cached_follower_authors(author_id: int): # Attempt to retrieve authors from cache cached = await redis_operation("GET", f"author:follows-authors:{author_id}") if cached: - authors_ids = json.loads(cached) + authors_ids = orjson.loads(cached) else: # Query authors from database with local_session() as session: @@ -232,7 +232,7 @@ async def get_cached_follower_authors(author_id: int): .where(AuthorFollower.follower == author_id) ).all() ] - await redis_operation("SET", f"author:follows-authors:{author_id}", json.dumps(authors_ids)) + await redis_operation("SET", f"author:follows-authors:{author_id}", orjson.dumps(authors_ids)) authors = await get_cached_authors_by_ids(authors_ids) return authors @@ -243,7 +243,7 @@ async def get_cached_follower_topics(author_id: int): # Attempt to retrieve topics from cache cached = await redis_operation("GET", f"author:follows-topics:{author_id}") if cached: - topics_ids = json.loads(cached) + topics_ids = orjson.loads(cached) else: # Load topics from database and cache them with local_session() as session: @@ -254,13 +254,13 @@ async def get_cached_follower_topics(author_id: int): .where(TopicFollower.follower == author_id) .all() ] - await redis_operation("SET", f"author:follows-topics:{author_id}", json.dumps(topics_ids)) + await redis_operation("SET", f"author:follows-topics:{author_id}", orjson.dumps(topics_ids)) topics = [] for topic_id in topics_ids: topic_str = await redis_operation("GET", f"topic:id:{topic_id}") if topic_str: - topic = json.loads(topic_str) + topic = orjson.loads(topic_str) if topic and topic not in topics: topics.append(topic) @@ -285,7 +285,7 @@ async def get_cached_author_by_user_id(user_id: str, get_with_stat): # If ID is found, get full author data by ID author_data = await redis_operation("GET", f"author:id:{author_id}") if author_data: - return json.loads(author_data) + return orjson.loads(author_data) # If data is not found in cache, query the database author_query = select(Author).where(Author.user == user_id) @@ -296,7 +296,7 @@ async def get_cached_author_by_user_id(user_id: str, get_with_stat): author_dict = author.dict() await asyncio.gather( redis_operation("SET", f"author:user:{user_id.strip()}", str(author.id)), - redis_operation("SET", f"author:id:{author.id}", json.dumps(author_dict)), + redis_operation("SET", f"author:id:{author.id}", orjson.dumps(author_dict)), ) return author_dict @@ -319,7 +319,7 @@ async def get_cached_topic_authors(topic_id: int): rkey = f"topic:authors:{topic_id}" cached_authors_ids = await redis_operation("GET", rkey) if cached_authors_ids: - authors_ids = json.loads(cached_authors_ids) + authors_ids = orjson.loads(cached_authors_ids) else: # If cache is empty, get data from the database with local_session() as session: @@ -331,7 +331,7 @@ async def get_cached_topic_authors(topic_id: int): ) authors_ids = [author_id for (author_id,) in session.execute(query).all()] # Cache the retrieved author IDs - await redis_operation("SET", rkey, json.dumps(authors_ids)) + await redis_operation("SET", rkey, orjson.dumps(authors_ids)) # Retrieve full author details from cached IDs if authors_ids: @@ -378,7 +378,7 @@ async def invalidate_shouts_cache(cache_keys: List[str]): async def cache_topic_shouts(topic_id: int, shouts: List[dict]): """ΠšΡΡˆΠΈΡ€ΡƒΠ΅Ρ‚ список ΠΏΡƒΠ±Π»ΠΈΠΊΠ°Ρ†ΠΈΠΉ для Ρ‚Π΅ΠΌΡ‹""" key = f"topic_shouts_{topic_id}" - payload = json.dumps(shouts, cls=CustomJSONEncoder) + payload = orjson.dumps(shouts, cls=CustomJSONEncoder) await redis_operation("SETEX", key, value=payload, ttl=CACHE_TTL) @@ -387,7 +387,7 @@ async def get_cached_topic_shouts(topic_id: int) -> List[dict]: key = f"topic_shouts_{topic_id}" cached = await redis_operation("GET", key) if cached: - return json.loads(cached) + return orjson.loads(cached) return None @@ -467,7 +467,7 @@ async def get_cached_entity(entity_type: str, entity_id: int, get_method, cache_ key = f"{entity_type}:id:{entity_id}" cached = await redis_operation("GET", key) if cached: - return json.loads(cached) + return orjson.loads(cached) entity = await get_method(entity_id) if entity: diff --git a/cache/memorycache.py b/cache/memorycache.py index 003c863d..7cfc94eb 100644 --- a/cache/memorycache.py +++ b/cache/memorycache.py @@ -1,11 +1,169 @@ -from dogpile.cache import make_region +""" +ΠœΠΎΠ΄ΡƒΠ»ΡŒ для ΠΊΠ΅ΡˆΠΈΡ€ΠΎΠ²Π°Π½ΠΈΡ Π΄Π°Π½Π½Ρ‹Ρ… с использованиСм Redis. +ΠŸΡ€Π΅Π΄ΠΎΡΡ‚Π°Π²Π»ΡΠ΅Ρ‚ API, совмСстимый с dogpile.cache для ΠΏΠΎΠ΄Π΄Π΅Ρ€ΠΆΠΊΠΈ ΠΎΠ±Ρ€Π°Ρ‚Π½ΠΎΠΉ совмСстимости. +""" -from settings import REDIS_URL +import functools +import hashlib +import inspect +import logging +import pickle +from typing import Callable, Optional -# Π‘ΠΎΠ·Π΄Π°Π½ΠΈΠ΅ Ρ€Π΅Π³ΠΈΠΎΠ½Π° кэша с TTL -cache_region = make_region() -cache_region.configure( - "dogpile.cache.redis", - arguments={"url": f"{REDIS_URL}/1"}, - expiration_time=3600, # Cache expiration time in seconds -) +import orjson + +from services.redis import redis +from utils.encoders import CustomJSONEncoder + +logger = logging.getLogger(__name__) + +DEFAULT_TTL = 300 # врСмя ΠΆΠΈΠ·Π½ΠΈ кСша Π² сСкундах (5 ΠΌΠΈΠ½ΡƒΡ‚) + + +class RedisCache: + """ + Класс, ΠΏΡ€Π΅Π΄ΠΎΡΡ‚Π°Π²Π»ΡΡŽΡ‰ΠΈΠΉ API, совмСстимый с dogpile.cache, Π½ΠΎ ΠΈΡΠΏΠΎΠ»ΡŒΠ·ΡƒΡŽΡ‰ΠΈΠΉ Redis. + + ΠŸΡ€ΠΈΠΌΠ΅Ρ€Ρ‹: + >>> cache_region = RedisCache() + >>> @cache_region.cache_on_arguments("my_key") + ... def my_func(arg1, arg2): + ... return arg1 + arg2 + """ + + def __init__(self, ttl: int = DEFAULT_TTL): + """ + Π˜Π½ΠΈΡ†ΠΈΠ°Π»ΠΈΠ·Π°Ρ†ΠΈΡ ΠΎΠ±ΡŠΠ΅ΠΊΡ‚Π° кСша. + + Args: + ttl: ВрСмя ΠΆΠΈΠ·Π½ΠΈ кСша Π² сСкундах + """ + self.ttl = ttl + + def cache_on_arguments(self, cache_key: Optional[str] = None) -> Callable: + """ + Π”Π΅ΠΊΠΎΡ€Π°Ρ‚ΠΎΡ€ для ΠΊΠ΅ΡˆΠΈΡ€ΠΎΠ²Π°Π½ΠΈΡ Ρ€Π΅Π·ΡƒΠ»ΡŒΡ‚Π°Ρ‚ΠΎΠ² Ρ„ΡƒΠ½ΠΊΡ†ΠΈΠΉ с использованиСм Redis. + + Args: + cache_key: ΠžΠΏΡ†ΠΈΠΎΠ½Π°Π»ΡŒΠ½Ρ‹ΠΉ Π±Π°Π·ΠΎΠ²Ρ‹ΠΉ ΠΊΠ»ΡŽΡ‡ кСша. Если Π½Π΅ ΡƒΠΊΠ°Π·Π°Π½, гСнСрируСтся ΠΈΠ· сигнатуры Ρ„ΡƒΠ½ΠΊΡ†ΠΈΠΈ. + + Returns: + Π”Π΅ΠΊΠΎΡ€Π°Ρ‚ΠΎΡ€ для ΠΊΠ΅ΡˆΠΈΡ€ΠΎΠ²Π°Π½ΠΈΡ Ρ„ΡƒΠ½ΠΊΡ†ΠΈΠΈ + + ΠŸΡ€ΠΈΠΌΠ΅Ρ€Ρ‹: + >>> @cache_region.cache_on_arguments("users") + ... def get_users(): + ... return db.query(User).all() + """ + + def decorator(func: Callable) -> Callable: + @functools.wraps(func) + async def wrapper(*args, **kwargs): + # ГСнСрация ΠΊΠ»ΡŽΡ‡Π° кСша + key = self._generate_cache_key(func, cache_key, *args, **kwargs) + + # ΠŸΠΎΠΏΡ‹Ρ‚ΠΊΠ° ΠΏΠΎΠ»ΡƒΡ‡ΠΈΡ‚ΡŒ Π΄Π°Π½Π½Ρ‹Π΅ ΠΈΠ· кСша + cached_data = await redis.get(key) + if cached_data: + try: + return orjson.loads(cached_data) + except Exception: + # Если Π½Π΅ ΡƒΠ΄Π°Π»ΠΎΡΡŒ Π΄Π΅ΡΠ΅Ρ€ΠΈΠ°Π»ΠΈΠ·ΠΎΠ²Π°Ρ‚ΡŒ ΠΊΠ°ΠΊ JSON, ΠΏΠΎΠΏΡ€ΠΎΠ±ΡƒΠ΅ΠΌ ΠΊΠ°ΠΊ pickle + return pickle.loads(cached_data.encode()) + + # Π’Ρ‹Π·ΠΎΠ² ΠΎΡ€ΠΈΠ³ΠΈΠ½Π°Π»ΡŒΠ½ΠΎΠΉ Ρ„ΡƒΠ½ΠΊΡ†ΠΈΠΈ, Ссли Π΄Π°Π½Π½Ρ‹Ρ… Π² кСшС Π½Π΅Ρ‚ + result = func(*args, **kwargs) + + # Π‘ΠΎΡ…Ρ€Π°Π½Π΅Π½ΠΈΠ΅ Ρ€Π΅Π·ΡƒΠ»ΡŒΡ‚Π°Ρ‚Π° Π² кСш + try: + # ΠŸΡ‹Ρ‚Π°Π΅ΠΌΡΡ ΡΠ΅Ρ€ΠΈΠ°Π»ΠΈΠ·ΠΎΠ²Π°Ρ‚ΡŒ ΠΊΠ°ΠΊ JSON + serialized = orjson.dumps(result, cls=CustomJSONEncoder) + except (TypeError, ValueError): + # Если Π½Π΅ ΡƒΠ΄Π°Π»ΠΎΡΡŒ, ΠΈΡΠΏΠΎΠ»ΡŒΠ·ΡƒΠ΅ΠΌ pickle + serialized = pickle.dumps(result).decode() + + await redis.set(key, serialized, ex=self.ttl) + return result + + @functools.wraps(func) + def sync_wrapper(*args, **kwargs): + # Для Ρ„ΡƒΠ½ΠΊΡ†ΠΈΠΉ, ΠΊΠΎΡ‚ΠΎΡ€Ρ‹Π΅ Π½Π΅ ΡΠ²Π»ΡΡŽΡ‚ΡΡ ΠΊΠΎΡ€ΡƒΡ‚ΠΈΠ½Π°ΠΌΠΈ + # ГСнСрация ΠΊΠ»ΡŽΡ‡Π° кСша + key = self._generate_cache_key(func, cache_key, *args, **kwargs) + + # Бинхронная вСрсия Π½Π΅ ΠΈΡΠΏΠΎΠ»ΡŒΠ·ΡƒΠ΅Ρ‚ await, поэтому Ρ€Π΅Π·ΡƒΠ»ΡŒΡ‚Π°Ρ‚ всСгда вычисляСтся + result = func(*args, **kwargs) + + # Асинхронно записываСм Π² кэш (Π±ΡƒΠ΄Π΅Ρ‚ Π²Ρ‹ΠΏΠΎΠ»Π½Π΅Π½ΠΎ ΠΏΠΎΠ·ΠΆΠ΅) + try: + import asyncio + + serialized = orjson.dumps(result, cls=CustomJSONEncoder) + asyncio.create_task(redis.set(key, serialized, ex=self.ttl)) + except Exception as e: + logger.error(f"Ошибка ΠΏΡ€ΠΈ ΠΊΠ΅ΡˆΠΈΡ€ΠΎΠ²Π°Π½ΠΈΠΈ Ρ€Π΅Π·ΡƒΠ»ΡŒΡ‚Π°Ρ‚Π°: {e}") + + return result + + # Π’ΠΎΠ·Π²Ρ€Π°Ρ‰Π°Π΅ΠΌ асинхронный ΠΈΠ»ΠΈ синхронный Π²Ρ€Π°ΠΏΠΏΠ΅Ρ€ Π² зависимости ΠΎΡ‚ Ρ‚ΠΈΠΏΠ° Ρ„ΡƒΠ½ΠΊΡ†ΠΈΠΈ + if inspect.iscoroutinefunction(func): + return wrapper + else: + return sync_wrapper + + return decorator + + def _generate_cache_key(self, func: Callable, base_key: Optional[str], *args, **kwargs) -> str: + """ + Π“Π΅Π½Π΅Ρ€ΠΈΡ€ΡƒΠ΅Ρ‚ ΠΊΠ»ΡŽΡ‡ кСша Π½Π° основС Ρ„ΡƒΠ½ΠΊΡ†ΠΈΠΈ ΠΈ Π΅Ρ‘ Π°Ρ€Π³ΡƒΠΌΠ΅Π½Ρ‚ΠΎΠ². + + Args: + func: ΠšΠ΅ΡˆΠΈΡ€ΡƒΠ΅ΠΌΠ°Ρ функция + base_key: Π‘Π°Π·ΠΎΠ²Ρ‹ΠΉ ΠΊΠ»ΡŽΡ‡ кСша + *args: ΠŸΠΎΠ·ΠΈΡ†ΠΈΠΎΠ½Π½Ρ‹Π΅ Π°Ρ€Π³ΡƒΠΌΠ΅Π½Ρ‚Ρ‹ Ρ„ΡƒΠ½ΠΊΡ†ΠΈΠΈ + **kwargs: Π˜ΠΌΠ΅Π½ΠΎΠ²Π°Π½Π½Ρ‹Π΅ Π°Ρ€Π³ΡƒΠΌΠ΅Π½Ρ‚Ρ‹ Ρ„ΡƒΠ½ΠΊΡ†ΠΈΠΈ + + Returns: + Π‘Ρ‚Ρ€ΠΎΠΊΠΎΠ²Ρ‹ΠΉ ΠΊΠ»ΡŽΡ‡ для кСша + """ + if base_key: + key_prefix = f"cache:{base_key}" + else: + key_prefix = f"cache:{func.__module__}.{func.__name__}" + + # Π‘ΠΎΠ·Π΄Π°Π΅ΠΌ Ρ…Π΅Ρˆ Π°Ρ€Π³ΡƒΠΌΠ΅Π½Ρ‚ΠΎΠ² + arg_hash = hashlib.md5() + + # ДобавляСм ΠΏΠΎΠ·ΠΈΡ†ΠΈΠΎΠ½Π½Ρ‹Π΅ Π°Ρ€Π³ΡƒΠΌΠ΅Π½Ρ‚Ρ‹ + for arg in args: + try: + arg_hash.update(str(arg).encode()) + except Exception: + arg_hash.update(str(id(arg)).encode()) + + # ДобавляСм ΠΈΠΌΠ΅Π½ΠΎΠ²Π°Π½Π½Ρ‹Π΅ Π°Ρ€Π³ΡƒΠΌΠ΅Π½Ρ‚Ρ‹ (сортируСм для дСтСрминированности) + for k in sorted(kwargs.keys()): + try: + arg_hash.update(f"{k}:{kwargs[k]}".encode()) + except Exception: + arg_hash.update(f"{k}:{id(kwargs[k])}".encode()) + + return f"{key_prefix}:{arg_hash.hexdigest()}" + + def invalidate(self, func: Callable, *args, **kwargs) -> None: + """ + Π˜Π½Π²Π°Π»ΠΈΠ΄ΠΈΡ€ΡƒΠ΅Ρ‚ (удаляСт) кСш для ΠΊΠΎΠ½ΠΊΡ€Π΅Ρ‚Π½ΠΎΠΉ Ρ„ΡƒΠ½ΠΊΡ†ΠΈΠΈ с ΠΊΠΎΠ½ΠΊΡ€Π΅Ρ‚Π½Ρ‹ΠΌΠΈ Π°Ρ€Π³ΡƒΠΌΠ΅Π½Ρ‚Π°ΠΌΠΈ. + + Args: + func: ΠšΠ΅ΡˆΠΈΡ€ΠΎΠ²Π°Π½Π½Π°Ρ функция + *args: ΠŸΠΎΠ·ΠΈΡ†ΠΈΠΎΠ½Π½Ρ‹Π΅ Π°Ρ€Π³ΡƒΠΌΠ΅Π½Ρ‚Ρ‹ Ρ„ΡƒΠ½ΠΊΡ†ΠΈΠΈ + **kwargs: Π˜ΠΌΠ΅Π½ΠΎΠ²Π°Π½Π½Ρ‹Π΅ Π°Ρ€Π³ΡƒΠΌΠ΅Π½Ρ‚Ρ‹ Ρ„ΡƒΠ½ΠΊΡ†ΠΈΠΈ + """ + key = self._generate_cache_key(func, None, *args, **kwargs) + import asyncio + + asyncio.create_task(redis.execute("DEL", key)) + + +# ЭкзСмпляр класса RedisCache для использования Π² ΠΊΠΎΠ΄Π΅ +cache_region = RedisCache() diff --git a/cache/precache.py b/cache/precache.py index 94b39960..b0faec5f 100644 --- a/cache/precache.py +++ b/cache/precache.py @@ -1,6 +1,6 @@ import asyncio -import json +import orjson from sqlalchemy import and_, join, select from cache.cache import cache_author, cache_topic @@ -21,7 +21,7 @@ async def precache_authors_followers(author_id, session): result = session.execute(followers_query) authors_followers.update(row[0] for row in result if row[0]) - followers_payload = json.dumps(list(authors_followers), cls=CustomJSONEncoder) + followers_payload = orjson.dumps(list(authors_followers), cls=CustomJSONEncoder) await redis.execute("SET", f"author:followers:{author_id}", followers_payload) @@ -35,9 +35,9 @@ async def precache_authors_follows(author_id, session): follows_authors = {row[0] for row in session.execute(follows_authors_query) if row[0]} follows_shouts = {row[0] for row in session.execute(follows_shouts_query) if row[0]} - topics_payload = json.dumps(list(follows_topics), cls=CustomJSONEncoder) - authors_payload = json.dumps(list(follows_authors), cls=CustomJSONEncoder) - shouts_payload = json.dumps(list(follows_shouts), cls=CustomJSONEncoder) + topics_payload = orjson.dumps(list(follows_topics), cls=CustomJSONEncoder) + authors_payload = orjson.dumps(list(follows_authors), cls=CustomJSONEncoder) + shouts_payload = orjson.dumps(list(follows_shouts), cls=CustomJSONEncoder) await asyncio.gather( redis.execute("SET", f"author:follows-topics:{author_id}", topics_payload), @@ -62,7 +62,7 @@ async def precache_topics_authors(topic_id: int, session): ) topic_authors = {row[0] for row in session.execute(topic_authors_query) if row[0]} - authors_payload = json.dumps(list(topic_authors), cls=CustomJSONEncoder) + authors_payload = orjson.dumps(list(topic_authors), cls=CustomJSONEncoder) await redis.execute("SET", f"topic:authors:{topic_id}", authors_payload) @@ -71,7 +71,7 @@ async def precache_topics_followers(topic_id: int, session): followers_query = select(TopicFollower.follower).where(TopicFollower.topic == topic_id) topic_followers = {row[0] for row in session.execute(followers_query) if row[0]} - followers_payload = json.dumps(list(topic_followers), cls=CustomJSONEncoder) + followers_payload = orjson.dumps(list(topic_followers), cls=CustomJSONEncoder) await redis.execute("SET", f"topic:followers:{topic_id}", followers_payload) diff --git a/docs/features.md b/docs/features.md index 1970dbdb..b6d1bdc5 100644 --- a/docs/features.md +++ b/docs/features.md @@ -14,6 +14,16 @@ - АвтоматичСскоС ΠΎΠΏΡ€Π΅Π΄Π΅Π»Π΅Π½ΠΈΠ΅ сСрвСра Π°Π²Ρ‚ΠΎΡ€ΠΈΠ·Π°Ρ†ΠΈΠΈ - ΠšΠΎΡ€Ρ€Π΅ΠΊΡ‚Π½Π°Ρ ΠΎΠ±Ρ€Π°Π±ΠΎΡ‚ΠΊΠ° CORS для всСх ΠΏΠΎΠ΄Π΄Π΅Ρ€ΠΆΠΈΠ²Π°Π΅ΠΌΡ‹Ρ… Π΄ΠΎΠΌΠ΅Π½ΠΎΠ² +## БистСма ΠΊΠ΅ΡˆΠΈΡ€ΠΎΠ²Π°Π½ΠΈΡ + +- Redis ΠΈΡΠΏΠΎΠ»ΡŒΠ·ΡƒΠ΅Ρ‚ΡΡ Π² качСствС основного ΠΌΠ΅Ρ…Π°Π½ΠΈΠ·ΠΌΠ° ΠΊΠ΅ΡˆΠΈΡ€ΠΎΠ²Π°Π½ΠΈΡ +- ΠŸΠΎΠ΄Π΄Π΅Ρ€ΠΆΠΊΠ° ΠΊΠ°ΠΊ синхронных, Ρ‚Π°ΠΊ ΠΈ асинхронных Ρ„ΡƒΠ½ΠΊΡ†ΠΈΠΉ Π² Π΄Π΅ΠΊΠΎΡ€Π°Ρ‚ΠΎΡ€Π΅ cache_on_arguments +- АвтоматичСская сСриализация/дСсСриализация Π΄Π°Π½Π½Ρ‹Ρ… Π² JSON с использованиСм CustomJSONEncoder +- РСзСрвная сСриализация Ρ‡Π΅Ρ€Π΅Π· pickle для слоТных ΠΎΠ±ΡŠΠ΅ΠΊΡ‚ΠΎΠ² +- ГСнСрация ΡƒΠ½ΠΈΠΊΠ°Π»ΡŒΠ½Ρ‹Ρ… ΠΊΠ»ΡŽΡ‡Π΅ΠΉ кСша Π½Π° основС сигнатуры Ρ„ΡƒΠ½ΠΊΡ†ΠΈΠΈ ΠΈ ΠΏΠ΅Ρ€Π΅Π΄Π°Π½Π½Ρ‹Ρ… Π°Ρ€Π³ΡƒΠΌΠ΅Π½Ρ‚ΠΎΠ² +- НастраиваСмоС врСмя ΠΆΠΈΠ·Π½ΠΈ кСша (TTL) +- Π’ΠΎΠ·ΠΌΠΎΠΆΠ½ΠΎΡΡ‚ΡŒ Ρ€ΡƒΡ‡Π½ΠΎΠΉ ΠΈΠ½Π²Π°Π»ΠΈΠ΄Π°Ρ†ΠΈΠΈ кСша для ΠΊΠΎΠ½ΠΊΡ€Π΅Ρ‚Π½Ρ‹Ρ… Ρ„ΡƒΠ½ΠΊΡ†ΠΈΠΉ ΠΈ Π°Ρ€Π³ΡƒΠΌΠ΅Π½Ρ‚ΠΎΠ² + ## Webhooks - АвтоматичСская рСгистрация Π²Π΅Π±Ρ…ΡƒΠΊΠ° для события user.login diff --git a/requirements.txt b/requirements.txt index f6da22f5..d6ce74b9 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,7 +2,6 @@ bcrypt authlib passlib - opensearch-py google-analytics-data colorlog @@ -14,5 +13,5 @@ starlette gql ariadne granian - +orjson pydantic \ No newline at end of file diff --git a/resolvers/editor.py b/resolvers/editor.py index 89c6e4b2..1efc40cc 100644 --- a/resolvers/editor.py +++ b/resolvers/editor.py @@ -1,6 +1,6 @@ -import json import time +import orjson from sqlalchemy import and_, desc, select from sqlalchemy.orm import joinedload from sqlalchemy.sql.functions import coalesce @@ -106,7 +106,7 @@ async def get_my_shout(_, info, shout_id: int): if hasattr(shout, "media") and shout.media: if isinstance(shout.media, str): try: - shout.media = json.loads(shout.media) + shout.media = orjson.loads(shout.media) except Exception as e: logger.error(f"Error parsing shout media: {e}") shout.media = [] diff --git a/resolvers/notifier.py b/resolvers/notifier.py index 5cd2fbcb..569f0f7a 100644 --- a/resolvers/notifier.py +++ b/resolvers/notifier.py @@ -1,7 +1,7 @@ -import json import time from typing import List, Tuple +import orjson from sqlalchemy import and_, select from sqlalchemy.exc import SQLAlchemyError from sqlalchemy.orm import aliased @@ -115,7 +115,7 @@ def get_notifications_grouped(author_id: int, after: int = 0, limit: int = 10, o if (groups_amount + offset) >= limit: break - payload = json.loads(str(notification.payload)) + payload = orjson.loads(str(notification.payload)) if str(notification.entity) == NotificationEntity.SHOUT.value: shout = payload @@ -177,7 +177,7 @@ def get_notifications_grouped(author_id: int, after: int = 0, limit: int = 10, o elif str(notification.entity) == "follower": thread_id = "followers" - follower = json.loads(payload) + follower = orjson.loads(payload) group = groups_by_thread.get(thread_id) if group: if str(notification.action) == "follow": @@ -293,11 +293,11 @@ async def notifications_seen_thread(_, info, thread: str, after: int): ) exclude = set() for nr in removed_reaction_notifications: - reaction = json.loads(str(nr.payload)) + reaction = orjson.loads(str(nr.payload)) reaction_id = reaction.get("id") exclude.add(reaction_id) for n in new_reaction_notifications: - reaction = json.loads(str(n.payload)) + reaction = orjson.loads(str(n.payload)) reaction_id = reaction.get("id") if ( reaction_id not in exclude diff --git a/resolvers/reader.py b/resolvers/reader.py index 0be80af4..003a50cd 100644 --- a/resolvers/reader.py +++ b/resolvers/reader.py @@ -1,5 +1,4 @@ -import json - +import orjson from graphql import GraphQLResolveInfo from sqlalchemy import and_, nulls_last, text from sqlalchemy.orm import aliased @@ -222,7 +221,7 @@ def get_shouts_with_links(info, q, limit=20, offset=0): if has_field(info, "stat"): stat = {} if isinstance(row.stat, str): - stat = json.loads(row.stat) + stat = orjson.loads(row.stat) elif isinstance(row.stat, dict): stat = row.stat viewed = ViewedStorage.get_shout(shout_id=shout_id) or 0 @@ -231,7 +230,7 @@ def get_shouts_with_links(info, q, limit=20, offset=0): # ΠžΠ±Ρ€Π°Π±ΠΎΡ‚ΠΊΠ° main_topic ΠΈ topics topics = None if has_field(info, "topics") and hasattr(row, "topics"): - topics = json.loads(row.topics) if isinstance(row.topics, str) else row.topics + topics = orjson.loads(row.topics) if isinstance(row.topics, str) else row.topics # logger.debug(f"Shout#{shout_id} topics: {topics}") shout_dict["topics"] = topics @@ -240,7 +239,7 @@ def get_shouts_with_links(info, q, limit=20, offset=0): if hasattr(row, "main_topic"): # logger.debug(f"Raw main_topic for shout#{shout_id}: {row.main_topic}") main_topic = ( - json.loads(row.main_topic) if isinstance(row.main_topic, str) else row.main_topic + orjson.loads(row.main_topic) if isinstance(row.main_topic, str) else row.main_topic ) # logger.debug(f"Parsed main_topic for shout#{shout_id}: {main_topic}") @@ -260,7 +259,7 @@ def get_shouts_with_links(info, q, limit=20, offset=0): if has_field(info, "authors") and hasattr(row, "authors"): shout_dict["authors"] = ( - json.loads(row.authors) if isinstance(row.authors, str) else row.authors + orjson.loads(row.authors) if isinstance(row.authors, str) else row.authors ) if has_field(info, "media") and shout.media: @@ -268,8 +267,8 @@ def get_shouts_with_links(info, q, limit=20, offset=0): media_data = shout.media if isinstance(media_data, str): try: - media_data = json.loads(media_data) - except json.JSONDecodeError: + media_data = orjson.loads(media_data) + except orjson.JSONDecodeError: media_data = [] shout_dict["media"] = [media_data] if isinstance(media_data, dict) else media_data diff --git a/services/db.py b/services/db.py index bd3072e4..d598e7f2 100644 --- a/services/db.py +++ b/services/db.py @@ -1,10 +1,10 @@ -import json import math import time import traceback import warnings from typing import Any, Callable, Dict, TypeVar +import orjson import sqlalchemy from sqlalchemy import ( JSON, @@ -84,8 +84,8 @@ class Base(declarative_base()): # Check if the value is JSON and decode it if necessary if isinstance(value, (str, bytes)) and isinstance(self.__table__.columns[column_name].type, JSON): try: - data[column_name] = json.loads(value) - except (TypeError, json.JSONDecodeError) as e: + data[column_name] = orjson.loads(value) + except (TypeError, orjson.JSONDecodeError) as e: logger.error(f"Error decoding JSON for column '{column_name}': {e}") data[column_name] = value else: diff --git a/services/notify.py b/services/notify.py index 626afa7b..911bd6ec 100644 --- a/services/notify.py +++ b/services/notify.py @@ -1,4 +1,4 @@ -import json +import orjson from orm.notification import Notification from services.db import local_session @@ -18,7 +18,7 @@ async def notify_reaction(reaction, action: str = "create"): data = {"payload": reaction, "action": action} try: save_notification(action, channel_name, data.get("payload")) - await redis.publish(channel_name, json.dumps(data)) + await redis.publish(channel_name, orjson.dumps(data)) except Exception as e: logger.error(f"Failed to publish to channel {channel_name}: {e}") @@ -28,7 +28,7 @@ async def notify_shout(shout, action: str = "update"): data = {"payload": shout, "action": action} try: save_notification(action, channel_name, data.get("payload")) - await redis.publish(channel_name, json.dumps(data)) + await redis.publish(channel_name, orjson.dumps(data)) except Exception as e: logger.error(f"Failed to publish to channel {channel_name}: {e}") @@ -43,7 +43,7 @@ async def notify_follower(follower: dict, author_id: int, action: str = "follow" save_notification(action, channel_name, data.get("payload")) # Convert data to JSON string - json_data = json.dumps(data) + json_data = orjson.dumps(data) # Ensure the data is not empty before publishing if json_data: diff --git a/services/search.py b/services/search.py index 9c9b13e9..e92c387b 100644 --- a/services/search.py +++ b/services/search.py @@ -1,8 +1,8 @@ import asyncio -import json import logging import os +import orjson from opensearchpy import OpenSearch from services.redis import redis @@ -142,7 +142,7 @@ class SearchService: # ΠŸΡ€ΠΎΠ²Π΅Ρ€ΠΊΠ° ΠΈ ΠΎΠ±Π½ΠΎΠ²Π»Π΅Π½ΠΈΠ΅ структуры индСкса, Ссли Π½Π΅ΠΎΠ±Ρ…ΠΎΠ΄ΠΈΠΌΠΎ result = self.client.indices.get_mapping(index=self.index_name) if isinstance(result, str): - result = json.loads(result) + result = orjson.loads(result) if isinstance(result, dict): mapping = result.get(self.index_name, {}).get("mappings") logger.info(f"НайдСна структура индСксации: {mapping['properties'].keys()}") @@ -210,7 +210,7 @@ class SearchService: "SETEX", redis_key, REDIS_TTL, - json.dumps(results, cls=CustomJSONEncoder), + orjson.dumps(results, cls=CustomJSONEncoder), ) return results return [] diff --git a/services/viewed.py b/services/viewed.py index f1942de0..f54927b2 100644 --- a/services/viewed.py +++ b/services/viewed.py @@ -1,10 +1,11 @@ import asyncio -import json import os import time from datetime import datetime, timedelta, timezone from typing import Dict +import orjson + # ga from google.analytics.data_v1beta import BetaAnalyticsDataClient from google.analytics.data_v1beta.types import ( @@ -84,7 +85,7 @@ class ViewedStorage: logger.warn(f" * {viewfile_path} is too old: {self.start_date}") with open(viewfile_path, "r") as file: - precounted_views = json.load(file) + precounted_views = orjson.load(file) self.precounted_by_slug.update(precounted_views) logger.info(f" * {len(precounted_views)} shouts with views was loaded.") diff --git a/utils/encoders.py b/utils/encoders.py index fe4c97d4..0a6d234c 100644 --- a/utils/encoders.py +++ b/utils/encoders.py @@ -1,8 +1,9 @@ -import json from decimal import Decimal +import orjson -class CustomJSONEncoder(json.JSONEncoder): + +class CustomJSONEncoder(orjson.JSONEncoder): def default(self, obj): if isinstance(obj, Decimal): return str(obj) From edece36eccfd9fd65ac727f28028265368c2889c Mon Sep 17 00:00:00 2001 From: Untone Date: Thu, 20 Mar 2025 11:59:43 +0300 Subject: [PATCH 09/27] jsonenc-fix --- utils/encoders.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/utils/encoders.py b/utils/encoders.py index 0a6d234c..1901504e 100644 --- a/utils/encoders.py +++ b/utils/encoders.py @@ -1,9 +1,9 @@ from decimal import Decimal -import orjson +from json import JSONEncoder -class CustomJSONEncoder(orjson.JSONEncoder): +class CustomJSONEncoder(JSONEncoder): def default(self, obj): if isinstance(obj, Decimal): return str(obj) From a4a3c35f4dac897b63220ed8ca43d5098c52d14d Mon Sep 17 00:00:00 2001 From: Untone Date: Thu, 20 Mar 2025 12:04:47 +0300 Subject: [PATCH 10/27] lesscode --- Dockerfile | 2 +- server.py | 32 -------------------------------- utils/encoders.py | 1 - 3 files changed, 1 insertion(+), 34 deletions(-) delete mode 100644 server.py diff --git a/Dockerfile b/Dockerfile index facd6c06..6f5aa5d9 100644 --- a/Dockerfile +++ b/Dockerfile @@ -13,4 +13,4 @@ RUN pip install -r requirements.txt COPY . . -CMD ["python", "server.py"] \ No newline at end of file +CMD ["python", "-m", "granian", "main:app", "--interface", "asgi", "--host", "0.0.0.0", "--port", "$PORT"] \ No newline at end of file diff --git a/server.py b/server.py deleted file mode 100644 index 848c61d6..00000000 --- a/server.py +++ /dev/null @@ -1,32 +0,0 @@ -import sys -from pathlib import Path - -from granian import Granian -from granian.constants import Interfaces -from granian.log import LogLevels - -from settings import PORT -from utils.logger import root_logger as logger - -if __name__ == "__main__": - logger.info("started") - try: - granian_instance = Granian( - "main:app", - address="0.0.0.0", - port=PORT, - interface=Interfaces.ASGI, - websockets=False, - log_level=LogLevels.debug, - backlog=2048, - ) - - if "dev" in sys.argv: - logger.info("dev mode, building ssl context") - granian_instance.build_ssl_context(cert=Path("localhost.pem"), key=Path("localhost-key.pem"), password=None) - granian_instance.serve() - except Exception as error: - logger.error(error, exc_info=True) - raise - finally: - logger.info("stopped") diff --git a/utils/encoders.py b/utils/encoders.py index 1901504e..13964b07 100644 --- a/utils/encoders.py +++ b/utils/encoders.py @@ -1,5 +1,4 @@ from decimal import Decimal - from json import JSONEncoder From 8a95aa1209bace04862292e0a1f8c3ae8e230a5e Mon Sep 17 00:00:00 2001 From: Untone Date: Thu, 20 Mar 2025 12:05:58 +0300 Subject: [PATCH 11/27] jsonload-fix --- services/viewed.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/viewed.py b/services/viewed.py index f54927b2..a388ea0b 100644 --- a/services/viewed.py +++ b/services/viewed.py @@ -85,7 +85,7 @@ class ViewedStorage: logger.warn(f" * {viewfile_path} is too old: {self.start_date}") with open(viewfile_path, "r") as file: - precounted_views = orjson.load(file) + precounted_views = orjson.loads(file.read()) self.precounted_by_slug.update(precounted_views) logger.info(f" * {len(precounted_views)} shouts with views was loaded.") From 0aff77eda6b85cb033342a44a5b19fcab727588c Mon Sep 17 00:00:00 2001 From: Untone Date: Thu, 20 Mar 2025 12:13:14 +0300 Subject: [PATCH 12/27] portfix --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 6f5aa5d9..2c4874b1 100644 --- a/Dockerfile +++ b/Dockerfile @@ -13,4 +13,4 @@ RUN pip install -r requirements.txt COPY . . -CMD ["python", "-m", "granian", "main:app", "--interface", "asgi", "--host", "0.0.0.0", "--port", "$PORT"] \ No newline at end of file +CMD ["python", "-m", "granian", "main:app", "--interface", "asgi", "--host", "0.0.0.0", "--port", "8000"] \ No newline at end of file From ca01181f379feeeec8825820188d64b62151c776 Mon Sep 17 00:00:00 2001 From: Untone Date: Thu, 20 Mar 2025 12:24:30 +0300 Subject: [PATCH 13/27] jsonfix --- Dockerfile | 5 ++++- cache/cache.py | 12 +++++++----- cache/memorycache.py | 5 +++-- cache/precache.py | 13 +++++++------ services/search.py | 3 ++- settings.py | 16 +++++++++++----- 6 files changed, 34 insertions(+), 20 deletions(-) diff --git a/Dockerfile b/Dockerfile index 2c4874b1..ad15f9b4 100644 --- a/Dockerfile +++ b/Dockerfile @@ -13,4 +13,7 @@ RUN pip install -r requirements.txt COPY . . -CMD ["python", "-m", "granian", "main:app", "--interface", "asgi", "--host", "0.0.0.0", "--port", "8000"] \ No newline at end of file +ENV PORT=8000 +EXPOSE $PORT + +CMD ["python", "-m", "granian", "main:app", "--interface", "asgi", "--host", "0.0.0.0", "--port", "$PORT"] \ No newline at end of file diff --git a/cache/cache.py b/cache/cache.py index 75f7ec68..6057300a 100644 --- a/cache/cache.py +++ b/cache/cache.py @@ -1,6 +1,8 @@ import asyncio +import json from typing import List +import jsonschema import orjson from sqlalchemy import and_, join, select @@ -35,7 +37,7 @@ CACHE_KEYS = { # Cache topic data async def cache_topic(topic: dict): - payload = orjson.dumps(topic, cls=CustomJSONEncoder) + payload = json.dumps(topic, cls=CustomJSONEncoder) await asyncio.gather( redis_operation("SET", f"topic:id:{topic['id']}", payload), redis_operation("SET", f"topic:slug:{topic['slug']}", payload), @@ -44,7 +46,7 @@ async def cache_topic(topic: dict): # Cache author data async def cache_author(author: dict): - payload = orjson.dumps(author, cls=CustomJSONEncoder) + payload = json.dumps(author, cls=CustomJSONEncoder) await asyncio.gather( redis_operation("SET", f"author:user:{author['user'].strip()}", str(author["id"])), redis_operation("SET", f"author:id:{author['id']}", payload), @@ -61,7 +63,7 @@ async def cache_follows(follower_id: int, entity_type: str, entity_id: int, is_i follows.append(entity_id) else: follows = [eid for eid in follows if eid != entity_id] - await redis_operation("SET", key, orjson.dumps(follows, cls=CustomJSONEncoder)) + await redis_operation("SET", key, json.dumps(follows, cls=CustomJSONEncoder)) await update_follower_stat(follower_id, entity_type, len(follows)) @@ -112,7 +114,7 @@ async def get_cached_topic(topic_id: int): topic = session.execute(select(Topic).where(Topic.id == topic_id)).scalar_one_or_none() if topic: topic_dict = topic.dict() - await redis_operation("SET", topic_key, orjson.dumps(topic_dict, cls=CustomJSONEncoder)) + await redis_operation("SET", topic_key, json.dumps(topic_dict, cls=CustomJSONEncoder)) return topic_dict return None @@ -378,7 +380,7 @@ async def invalidate_shouts_cache(cache_keys: List[str]): async def cache_topic_shouts(topic_id: int, shouts: List[dict]): """ΠšΡΡˆΠΈΡ€ΡƒΠ΅Ρ‚ список ΠΏΡƒΠ±Π»ΠΈΠΊΠ°Ρ†ΠΈΠΉ для Ρ‚Π΅ΠΌΡ‹""" key = f"topic_shouts_{topic_id}" - payload = orjson.dumps(shouts, cls=CustomJSONEncoder) + payload = json.dumps(shouts, cls=CustomJSONEncoder) await redis_operation("SETEX", key, value=payload, ttl=CACHE_TTL) diff --git a/cache/memorycache.py b/cache/memorycache.py index 7cfc94eb..d2b666d9 100644 --- a/cache/memorycache.py +++ b/cache/memorycache.py @@ -6,6 +6,7 @@ import functools import hashlib import inspect +import json import logging import pickle from typing import Callable, Optional @@ -77,7 +78,7 @@ class RedisCache: # Π‘ΠΎΡ…Ρ€Π°Π½Π΅Π½ΠΈΠ΅ Ρ€Π΅Π·ΡƒΠ»ΡŒΡ‚Π°Ρ‚Π° Π² кСш try: # ΠŸΡ‹Ρ‚Π°Π΅ΠΌΡΡ ΡΠ΅Ρ€ΠΈΠ°Π»ΠΈΠ·ΠΎΠ²Π°Ρ‚ΡŒ ΠΊΠ°ΠΊ JSON - serialized = orjson.dumps(result, cls=CustomJSONEncoder) + serialized = json.dumps(result, cls=CustomJSONEncoder) except (TypeError, ValueError): # Если Π½Π΅ ΡƒΠ΄Π°Π»ΠΎΡΡŒ, ΠΈΡΠΏΠΎΠ»ΡŒΠ·ΡƒΠ΅ΠΌ pickle serialized = pickle.dumps(result).decode() @@ -98,7 +99,7 @@ class RedisCache: try: import asyncio - serialized = orjson.dumps(result, cls=CustomJSONEncoder) + serialized = json.dumps(result, cls=CustomJSONEncoder) asyncio.create_task(redis.set(key, serialized, ex=self.ttl)) except Exception as e: logger.error(f"Ошибка ΠΏΡ€ΠΈ ΠΊΠ΅ΡˆΠΈΡ€ΠΎΠ²Π°Π½ΠΈΠΈ Ρ€Π΅Π·ΡƒΠ»ΡŒΡ‚Π°Ρ‚Π°: {e}") diff --git a/cache/precache.py b/cache/precache.py index b0faec5f..5df91f2d 100644 --- a/cache/precache.py +++ b/cache/precache.py @@ -1,4 +1,5 @@ import asyncio +import json import orjson from sqlalchemy import and_, join, select @@ -21,7 +22,7 @@ async def precache_authors_followers(author_id, session): result = session.execute(followers_query) authors_followers.update(row[0] for row in result if row[0]) - followers_payload = orjson.dumps(list(authors_followers), cls=CustomJSONEncoder) + followers_payload = json.dumps(list(authors_followers), cls=CustomJSONEncoder) await redis.execute("SET", f"author:followers:{author_id}", followers_payload) @@ -35,9 +36,9 @@ async def precache_authors_follows(author_id, session): follows_authors = {row[0] for row in session.execute(follows_authors_query) if row[0]} follows_shouts = {row[0] for row in session.execute(follows_shouts_query) if row[0]} - topics_payload = orjson.dumps(list(follows_topics), cls=CustomJSONEncoder) - authors_payload = orjson.dumps(list(follows_authors), cls=CustomJSONEncoder) - shouts_payload = orjson.dumps(list(follows_shouts), cls=CustomJSONEncoder) + topics_payload = json.dumps(list(follows_topics), cls=CustomJSONEncoder) + authors_payload = json.dumps(list(follows_authors), cls=CustomJSONEncoder) + shouts_payload = json.dumps(list(follows_shouts), cls=CustomJSONEncoder) await asyncio.gather( redis.execute("SET", f"author:follows-topics:{author_id}", topics_payload), @@ -62,7 +63,7 @@ async def precache_topics_authors(topic_id: int, session): ) topic_authors = {row[0] for row in session.execute(topic_authors_query) if row[0]} - authors_payload = orjson.dumps(list(topic_authors), cls=CustomJSONEncoder) + authors_payload = json.dumps(list(topic_authors), cls=CustomJSONEncoder) await redis.execute("SET", f"topic:authors:{topic_id}", authors_payload) @@ -71,7 +72,7 @@ async def precache_topics_followers(topic_id: int, session): followers_query = select(TopicFollower.follower).where(TopicFollower.topic == topic_id) topic_followers = {row[0] for row in session.execute(followers_query) if row[0]} - followers_payload = orjson.dumps(list(topic_followers), cls=CustomJSONEncoder) + followers_payload = json.dumps(list(topic_followers), cls=CustomJSONEncoder) await redis.execute("SET", f"topic:followers:{topic_id}", followers_payload) diff --git a/services/search.py b/services/search.py index e92c387b..e9257436 100644 --- a/services/search.py +++ b/services/search.py @@ -1,4 +1,5 @@ import asyncio +import json import logging import os @@ -210,7 +211,7 @@ class SearchService: "SETEX", redis_key, REDIS_TTL, - orjson.dumps(results, cls=CustomJSONEncoder), + json.dumps(results, cls=CustomJSONEncoder), ) return results return [] diff --git a/settings.py b/settings.py index 5567e60e..6453b9e3 100644 --- a/settings.py +++ b/settings.py @@ -1,18 +1,24 @@ import sys from os import environ -PORT = 8000 +MODE = "development" if "dev" in sys.argv else "production" +DEV_SERVER_PID_FILE_NAME = "dev-server.pid" + +PORT = environ.get("PORT") or 8000 + +# storages DB_URL = ( environ.get("DATABASE_URL", "").replace("postgres://", "postgresql://") or environ.get("DB_URL", "").replace("postgres://", "postgresql://") or "sqlite:///discoursio.db" ) REDIS_URL = environ.get("REDIS_URL") or "redis://127.0.0.1" -AUTH_URL = environ.get("AUTH_URL") or "" -GLITCHTIP_DSN = environ.get("GLITCHTIP_DSN") -DEV_SERVER_PID_FILE_NAME = "dev-server.pid" -MODE = "development" if "dev" in sys.argv else "production" +# debug +GLITCHTIP_DSN = environ.get("GLITCHTIP_DSN") + +# authorizer.dev +AUTH_URL = environ.get("AUTH_URL") or "https://auth.discours.io/graphql" ADMIN_SECRET = environ.get("AUTH_SECRET") or "nothing" WEBHOOK_SECRET = environ.get("WEBHOOK_SECRET") or "nothing-else" From e1a1b4dc7df3af1e0727fa6490bd365ce81b3f8a Mon Sep 17 00:00:00 2001 From: Untone Date: Thu, 20 Mar 2025 12:25:18 +0300 Subject: [PATCH 14/27] fx --- cache/cache.py | 1 - 1 file changed, 1 deletion(-) diff --git a/cache/cache.py b/cache/cache.py index 6057300a..caeecfa5 100644 --- a/cache/cache.py +++ b/cache/cache.py @@ -2,7 +2,6 @@ import asyncio import json from typing import List -import jsonschema import orjson from sqlalchemy import and_, join, select From 47e12b44520a43d2141ef97d4cb5579834de998c Mon Sep 17 00:00:00 2001 From: Untone Date: Thu, 20 Mar 2025 12:33:27 +0300 Subject: [PATCH 15/27] fx2 --- Dockerfile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Dockerfile b/Dockerfile index ad15f9b4..3ba42006 100644 --- a/Dockerfile +++ b/Dockerfile @@ -14,6 +14,6 @@ RUN pip install -r requirements.txt COPY . . ENV PORT=8000 -EXPOSE $PORT +EXPOSE 8000 -CMD ["python", "-m", "granian", "main:app", "--interface", "asgi", "--host", "0.0.0.0", "--port", "$PORT"] \ No newline at end of file +CMD ["python", "-m", "granian", "main:app", "--interface", "asgi", "--host", "0.0.0.0", "--port", "8000"] \ No newline at end of file From dbbfd42e0898739be936e541cf21ce5308a6d5e9 Mon Sep 17 00:00:00 2001 From: Untone Date: Thu, 20 Mar 2025 12:35:55 +0300 Subject: [PATCH 16/27] redeploy --- Dockerfile | 1 - 1 file changed, 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 3ba42006..b481d544 100644 --- a/Dockerfile +++ b/Dockerfile @@ -13,7 +13,6 @@ RUN pip install -r requirements.txt COPY . . -ENV PORT=8000 EXPOSE 8000 CMD ["python", "-m", "granian", "main:app", "--interface", "asgi", "--host", "0.0.0.0", "--port", "8000"] \ No newline at end of file From b63c387806f8250f5dfd69572136bc784c67593a Mon Sep 17 00:00:00 2001 From: Untone Date: Thu, 20 Mar 2025 12:52:44 +0300 Subject: [PATCH 17/27] jsonfix3 --- CHANGELOG.md | 5 +++++ cache/memorycache.py | 13 ++++++++++++- utils/encoders.py | 19 +++++++++++++++++++ 3 files changed, 36 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 57c736da..32793427 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,8 @@ +#### [0.4.13] - 2025-03-20 +- Fixed Topic objects serialization error in cache/memorycache.py +- Improved CustomJSONEncoder to support SQLAlchemy models with dict() method +- Enhanced error handling in cache_on_arguments decorator + #### [0.4.12] - 2025-03-19 - `delete_reaction` detects comments and uses `deleted_at` update - `check_to_unfeature` etc. update diff --git a/cache/memorycache.py b/cache/memorycache.py index d2b666d9..80035ea6 100644 --- a/cache/memorycache.py +++ b/cache/memorycache.py @@ -99,10 +99,21 @@ class RedisCache: try: import asyncio - serialized = json.dumps(result, cls=CustomJSONEncoder) + # ΠŸΠΎΠΏΡ‹Ρ‚ΠΊΠ° ΡΠ΅Ρ€ΠΈΠ°Π»ΠΈΠ·ΠΎΠ²Π°Ρ‚ΡŒ Ρ€Π΅Π·ΡƒΠ»ΡŒΡ‚Π°Ρ‚ Π² JSON + try: + serialized = json.dumps(result, cls=CustomJSONEncoder) + except (TypeError, ValueError) as e: + logger.debug(f"JSON сСриализация Π½Π΅ ΡƒΠ΄Π°Π»Π°ΡΡŒ, ΠΈΡΠΏΠΎΠ»ΡŒΠ·ΡƒΠ΅ΠΌ pickle: {e}") + # Если Π½Π΅ ΡƒΠ΄Π°Π»ΠΎΡΡŒ ΡΠ΅Ρ€ΠΈΠ°Π»ΠΈΠ·ΠΎΠ²Π°Ρ‚ΡŒ ΠΊΠ°ΠΊ JSON, ΠΈΡΠΏΠΎΠ»ΡŒΠ·ΡƒΠ΅ΠΌ pickle + serialized = pickle.dumps(result).decode() + asyncio.create_task(redis.set(key, serialized, ex=self.ttl)) except Exception as e: logger.error(f"Ошибка ΠΏΡ€ΠΈ ΠΊΠ΅ΡˆΠΈΡ€ΠΎΠ²Π°Π½ΠΈΠΈ Ρ€Π΅Π·ΡƒΠ»ΡŒΡ‚Π°Ρ‚Π°: {e}") + # Для ΠΎΡ‚Π»Π°Π΄ΠΊΠΈ добавляСм ΠΈΠ½Ρ„ΠΎΡ€ΠΌΠ°Ρ†ΠΈΡŽ ΠΎ Ρ‚ΠΈΠΏΠ΅ ΠΎΠ±ΡŠΠ΅ΠΊΡ‚Π° + logger.debug(f"Π’ΠΈΠΏ Ρ€Π΅Π·ΡƒΠ»ΡŒΡ‚Π°Ρ‚Π°: {type(result)}") + if hasattr(result, "__class__"): + logger.debug(f"Класс Ρ€Π΅Π·ΡƒΠ»ΡŒΡ‚Π°Ρ‚Π°: {result.__class__.__name__}") return result diff --git a/utils/encoders.py b/utils/encoders.py index 13964b07..e93cc763 100644 --- a/utils/encoders.py +++ b/utils/encoders.py @@ -3,7 +3,26 @@ from json import JSONEncoder class CustomJSONEncoder(JSONEncoder): + """ + Π Π°ΡΡˆΠΈΡ€Π΅Π½Π½Ρ‹ΠΉ JSON энкодСр с ΠΏΠΎΠ΄Π΄Π΅Ρ€ΠΆΠΊΠΎΠΉ сСриализации ΠΎΠ±ΡŠΠ΅ΠΊΡ‚ΠΎΠ² SQLAlchemy. + + ΠŸΡ€ΠΈΠΌΠ΅Ρ€Ρ‹: + >>> import json + >>> from decimal import Decimal + >>> from orm.topic import Topic + >>> json.dumps(Decimal("10.50"), cls=CustomJSONEncoder) + '"10.50"' + >>> topic = Topic(id=1, slug="test") + >>> json.dumps(topic, cls=CustomJSONEncoder) + '{"id": 1, "slug": "test", ...}' + """ + def default(self, obj): if isinstance(obj, Decimal): return str(obj) + + # ΠŸΡ€ΠΎΠ²Π΅Ρ€ΡΠ΅ΠΌ, Π΅ΡΡ‚ΡŒ Π»ΠΈ Ρƒ ΠΎΠ±ΡŠΠ΅ΠΊΡ‚Π° ΠΌΠ΅Ρ‚ΠΎΠ΄ dict() (ΠΊΠ°ΠΊ Ρƒ ΠΌΠΎΠ΄Π΅Π»Π΅ΠΉ SQLAlchemy) + if hasattr(obj, "dict") and callable(obj.dict): + return obj.dict() + return super().default(obj) From 31c32143d0ccc8b375cd4321e94941aaf081f02f Mon Sep 17 00:00:00 2001 From: Untone Date: Fri, 21 Mar 2025 12:34:10 +0300 Subject: [PATCH 18/27] reaction-to-feature-fix --- CHANGELOG.md | 6 ++++ resolvers/reaction.py | 76 ++++++++++++++++++++++++++++++++++--------- 2 files changed, 66 insertions(+), 16 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 32793427..33096bdf 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ - Fixed Topic objects serialization error in cache/memorycache.py - Improved CustomJSONEncoder to support SQLAlchemy models with dict() method - Enhanced error handling in cache_on_arguments decorator +- Modified `load_reactions_by` to include deleted reactions when `include_deleted=true` for proper comment tree building +- Fixed featured/unfeatured logic in reaction processing: + - Dislike reactions now properly take precedence over likes + - Featured status now requires more than 4 likes from users with featured articles + - Removed unnecessary filters for deleted reactions since rating reactions are physically deleted + - Author's featured status now based on having non-deleted articles with featured_at #### [0.4.12] - 2025-03-19 - `delete_reaction` detects comments and uses `deleted_at` update diff --git a/resolvers/reaction.py b/resolvers/reaction.py index f4334065..89c4f9ac 100644 --- a/resolvers/reaction.py +++ b/resolvers/reaction.py @@ -97,20 +97,23 @@ def get_reactions_with_stat(q, limit, offset): def is_featured_author(session, author_id) -> bool: """ - Check if an author has at least one featured article. + Check if an author has at least one non-deleted featured article. :param session: Database session. :param author_id: Author ID. :return: True if the author has a featured article, else False. """ return session.query( - session.query(Shout).where(Shout.authors.any(id=author_id)).filter(Shout.featured_at.is_not(None)).exists() + session.query(Shout) + .where(Shout.authors.any(id=author_id)) + .filter(Shout.featured_at.is_not(None), Shout.deleted_at.is_(None)) + .exists() ).scalar() def check_to_feature(session, approver_id, reaction) -> bool: """ - Make a shout featured if it receives more than 4 votes. + Make a shout featured if it receives more than 4 votes from authors. :param session: Database session. :param approver_id: Approver author ID. @@ -118,18 +121,37 @@ def check_to_feature(session, approver_id, reaction) -> bool: :return: True if shout should be featured, else False. """ if not reaction.reply_to and is_positive(reaction.kind): - approvers = {approver_id} - # Count the number of approvers + # ΠŸΡ€ΠΎΠ²Π΅Ρ€ΡΠ΅ΠΌ, Π½Π΅ содСрТит Π»ΠΈ пост Π±ΠΎΠ»Π΅Π΅ 20% Π΄ΠΈΠ·Π»Π°ΠΉΠΊΠΎΠ² + # Если Π΄Π°, Ρ‚ΠΎ Π½Π΅ Π΄ΠΎΠ»ΠΆΠ΅Π½ Π±Ρ‹Ρ‚ΡŒ featured нСзависимо ΠΎΡ‚ количСства Π»Π°ΠΉΠΊΠΎΠ² + if check_to_unfeature(session, reaction): + return False + + # Π‘ΠΎΠ±ΠΈΡ€Π°Π΅ΠΌ всСх Π°Π²Ρ‚ΠΎΡ€ΠΎΠ², ΠΏΠΎΡΡ‚Π°Π²ΠΈΠ²ΡˆΠΈΡ… Π»Π°ΠΉΠΊ + author_approvers = set() reacted_readers = ( session.query(Reaction.created_by) - .filter(Reaction.shout == reaction.shout, is_positive(Reaction.kind), Reaction.deleted_at.is_(None)) + .filter( + Reaction.shout == reaction.shout, + is_positive(Reaction.kind), + # Π Π΅ΠΉΡ‚ΠΈΠ½Π³ΠΈ (LIKE, DISLIKE) физичСски ΡƒΠ΄Π°Π»ΡΡŽΡ‚ΡΡ, поэтому Ρ„ΠΈΠ»ΡŒΡ‚Ρ€ deleted_at Π½Π΅ Π½ΡƒΠΆΠ΅Π½ + ) .distinct() + .all() ) - for reader_id in reacted_readers: + # ДобавляСм Ρ‚Π΅ΠΊΡƒΡ‰Π΅Π³ΠΎ ΠΎΠ΄ΠΎΠ±Ρ€ΡΡŽΡ‰Π΅Π³ΠΎ + approver = session.query(Author).filter(Author.id == approver_id).first() + if approver and is_featured_author(session, approver_id): + author_approvers.add(approver_id) + + # ΠŸΡ€ΠΎΠ²Π΅Ρ€ΡΠ΅ΠΌ, Π΅ΡΡ‚ΡŒ Π»ΠΈ Ρƒ Ρ€Π΅Π°Π³ΠΈΡ€ΠΎΠ²Π°Π²ΡˆΠΈΡ… Π°Π²Ρ‚ΠΎΡ€ΠΎΠ² featured ΠΏΡƒΠ±Π»ΠΈΠΊΠ°Ρ†ΠΈΠΈ + for (reader_id,) in reacted_readers: if is_featured_author(session, reader_id): - approvers.add(reader_id) - return len(approvers) > 4 + author_approvers.add(reader_id) + + # ΠŸΡƒΠ±Π»ΠΈΠΊΠ°Ρ†ΠΈΡ становится featured ΠΏΡ€ΠΈ Π½Π°Π»ΠΈΡ‡ΠΈΠΈ Π±ΠΎΠ»Π΅Π΅ 4 Π»Π°ΠΉΠΊΠΎΠ² ΠΎΡ‚ Π°Π²Ρ‚ΠΎΡ€ΠΎΠ² + logger.debug(f"ΠŸΡƒΠ±Π»ΠΈΠΊΠ°Ρ†ΠΈΡ {reaction.shout} ΠΈΠΌΠ΅Π΅Ρ‚ {len(author_approvers)} Π»Π°ΠΉΠΊΠΎΠ² ΠΎΡ‚ Π°Π²Ρ‚ΠΎΡ€ΠΎΠ²") + return len(author_approvers) > 4 return False @@ -141,20 +163,36 @@ def check_to_unfeature(session, reaction) -> bool: :param reaction: Reaction object. :return: True if shout should be unfeatured, else False. """ - if not reaction.reply_to and is_negative(reaction.kind): + if not reaction.reply_to: + # ΠŸΡ€ΠΎΠ²Π΅Ρ€ΡΠ΅ΠΌ ΡΠΎΠΎΡ‚Π½ΠΎΡˆΠ΅Π½ΠΈΠ΅ Π΄ΠΈΠ·Π»Π°ΠΉΠΊΠΎΠ², Π΄Π°ΠΆΠ΅ Ссли тСкущая рСакция Π½Π΅ Π΄ΠΈΠ·Π»Π°ΠΉΠΊ total_reactions = ( session.query(Reaction) - .filter(Reaction.shout == reaction.shout, Reaction.reply_to.is_(None), Reaction.kind.in_(RATING_REACTIONS)) + .filter( + Reaction.shout == reaction.shout, + Reaction.reply_to.is_(None), + Reaction.kind.in_(RATING_REACTIONS), + # Π Π΅ΠΉΡ‚ΠΈΠ½Π³ΠΈ физичСски ΡƒΠ΄Π°Π»ΡΡŽΡ‚ΡΡ ΠΏΡ€ΠΈ ΡƒΠ΄Π°Π»Π΅Π½ΠΈΠΈ, поэтому Ρ„ΠΈΠ»ΡŒΡ‚Ρ€ deleted_at Π½Π΅ Π½ΡƒΠΆΠ΅Π½ + ) .count() ) negative_reactions = ( session.query(Reaction) - .filter(Reaction.shout == reaction.shout, is_negative(Reaction.kind), Reaction.deleted_at.is_(None)) + .filter( + Reaction.shout == reaction.shout, + is_negative(Reaction.kind), + Reaction.reply_to.is_(None), + # Π Π΅ΠΉΡ‚ΠΈΠ½Π³ΠΈ физичСски ΡƒΠ΄Π°Π»ΡΡŽΡ‚ΡΡ ΠΏΡ€ΠΈ ΡƒΠ΄Π°Π»Π΅Π½ΠΈΠΈ, поэтому Ρ„ΠΈΠ»ΡŒΡ‚Ρ€ deleted_at Π½Π΅ Π½ΡƒΠΆΠ΅Π½ + ) .count() ) - return total_reactions > 0 and (negative_reactions / total_reactions) >= 0.2 + # ΠŸΡ€ΠΎΠ²Π΅Ρ€ΡΠ΅ΠΌ, ΡΠΎΡΡ‚Π°Π²Π»ΡΡŽΡ‚ Π»ΠΈ ΠΎΡ‚Ρ€ΠΈΡ†Π°Ρ‚Π΅Π»ΡŒΠ½Ρ‹Π΅ Ρ€Π΅Π°ΠΊΡ†ΠΈΠΈ 20% ΠΈΠ»ΠΈ Π±ΠΎΠ»Π΅Π΅ ΠΎΡ‚ всСх Ρ€Π΅Π°ΠΊΡ†ΠΈΠΉ + negative_ratio = negative_reactions / total_reactions if total_reactions > 0 else 0 + logger.debug( + f"ΠŸΡƒΠ±Π»ΠΈΠΊΠ°Ρ†ΠΈΡ {reaction.shout}: {negative_reactions}/{total_reactions} ΠΎΡ‚Ρ€ΠΈΡ†Π°Ρ‚Π΅Π»ΡŒΠ½Ρ‹Ρ… Ρ€Π΅Π°ΠΊΡ†ΠΈΠΉ ({negative_ratio:.2%})" + ) + return total_reactions > 0 and negative_ratio >= 0.2 return False @@ -193,8 +231,8 @@ async def _create_reaction(session, shout_id: int, is_author: bool, author_id: i Create a new reaction and perform related actions such as updating counters and notification. :param session: Database session. - :param info: GraphQL context info. - :param shout: Shout object. + :param shout_id: Shout ID. + :param is_author: Flag indicating if the user is the author of the shout. :param author_id: Author ID. :param reaction: Dictionary with reaction data. :return: Dictionary with created reaction data. @@ -214,10 +252,14 @@ async def _create_reaction(session, shout_id: int, is_author: bool, author_id: i # Handle rating if r.kind in RATING_REACTIONS: + # ΠŸΡ€ΠΎΠ²Π΅Ρ€ΡΠ΅ΠΌ сначала условиС для unfeature (Π΄ΠΈΠ·Π»Π°ΠΉΠΊΠΈ ΠΈΠΌΠ΅ΡŽΡ‚ ΠΏΡ€ΠΈΠΎΡ€ΠΈΡ‚Π΅Ρ‚) if check_to_unfeature(session, r): set_unfeatured(session, shout_id) + logger.info(f"ΠŸΡƒΠ±Π»ΠΈΠΊΠ°Ρ†ΠΈΡ {shout_id} потСряла статус featured ΠΈΠ·-Π·Π° высокого ΠΏΡ€ΠΎΡ†Π΅Π½Ρ‚Π° Π΄ΠΈΠ·Π»Π°ΠΉΠΊΠΎΠ²") + # Волько Ссли Π½Π΅ Π±Ρ‹Π»ΠΎ unfeature, провСряСм условиС для feature elif check_to_feature(session, author_id, r): await set_featured(session, shout_id) + logger.info(f"ΠŸΡƒΠ±Π»ΠΈΠΊΠ°Ρ†ΠΈΡ {shout_id} ΠΏΠΎΠ»ΡƒΡ‡ΠΈΠ»Π° статус featured благодаря Π»Π°ΠΉΠΊΠ°ΠΌ ΠΎΡ‚ Π°Π²Ρ‚ΠΎΡ€ΠΎΠ²") # Notify creation await notify_reaction(rdict, "create") @@ -491,7 +533,9 @@ async def load_reactions_by(_, _info, by, limit=50, offset=0): # Add statistics and apply filters q = add_reaction_stat_columns(q) q = apply_reaction_filters(by, q) - q = q.where(Reaction.deleted_at.is_(None)) + + # Include reactions with deleted_at for building comment trees + # q = q.where(Reaction.deleted_at.is_(None)) # Group and sort q = q.group_by(Reaction.id, Author.id, Shout.id) From 86ddb50cb8f9397918365696b7e8adafb1c346dc Mon Sep 17 00:00:00 2001 From: Untone Date: Sat, 22 Mar 2025 09:31:53 +0300 Subject: [PATCH 19/27] topics caching upgrade --- orm/shout.py | 53 ++++++++++- orm/topic.py | 42 ++++++++- resolvers/topic.py | 223 +++++++++++++++++++++++++++++++++++++++++---- services/db.py | 78 ++++++++++++++++ 4 files changed, 377 insertions(+), 19 deletions(-) diff --git a/orm/shout.py b/orm/shout.py index db352441..37734aca 100644 --- a/orm/shout.py +++ b/orm/shout.py @@ -1,6 +1,6 @@ import time -from sqlalchemy import JSON, Boolean, Column, ForeignKey, Integer, String +from sqlalchemy import JSON, Boolean, Column, ForeignKey, Index, Integer, String from sqlalchemy.orm import relationship from orm.author import Author @@ -10,6 +10,15 @@ from services.db import Base class ShoutTopic(Base): + """ + Бвязь ΠΌΠ΅ΠΆΠ΄Ρƒ ΠΏΡƒΠ±Π»ΠΈΠΊΠ°Ρ†ΠΈΠ΅ΠΉ ΠΈ Ρ‚Π΅ΠΌΠΎΠΉ. + + Attributes: + shout (int): ID ΠΏΡƒΠ±Π»ΠΈΠΊΠ°Ρ†ΠΈΠΈ + topic (int): ID Ρ‚Π΅ΠΌΡ‹ + main (bool): ΠŸΡ€ΠΈΠ·Π½Π°ΠΊ основной Ρ‚Π΅ΠΌΡ‹ + """ + __tablename__ = "shout_topic" id = None # type: ignore @@ -17,6 +26,12 @@ class ShoutTopic(Base): topic = Column(ForeignKey("topic.id"), primary_key=True, index=True) main = Column(Boolean, nullable=True) + # ΠžΠΏΡ€Π΅Π΄Π΅Π»ΡΠ΅ΠΌ Π΄ΠΎΠΏΠΎΠ»Π½ΠΈΡ‚Π΅Π»ΡŒΠ½Ρ‹Π΅ индСксы + __table_args__ = ( + # ΠžΠΏΡ‚ΠΈΠΌΠΈΠ·ΠΈΡ€ΠΎΠ²Π°Π½Π½Ρ‹ΠΉ составной индСкс для запросов, ΠΊΠΎΡ‚ΠΎΡ€Ρ‹Π΅ ΠΈΡ‰ΡƒΡ‚ ΠΏΡƒΠ±Π»ΠΈΠΊΠ°Ρ†ΠΈΠΈ ΠΏΠΎ Ρ‚Π΅ΠΌΠ΅ + Index("idx_shout_topic_topic_shout", "topic", "shout"), + ) + class ShoutReactionsFollower(Base): __tablename__ = "shout_reactions_followers" @@ -30,6 +45,15 @@ class ShoutReactionsFollower(Base): class ShoutAuthor(Base): + """ + Бвязь ΠΌΠ΅ΠΆΠ΄Ρƒ ΠΏΡƒΠ±Π»ΠΈΠΊΠ°Ρ†ΠΈΠ΅ΠΉ ΠΈ Π°Π²Ρ‚ΠΎΡ€ΠΎΠΌ. + + Attributes: + shout (int): ID ΠΏΡƒΠ±Π»ΠΈΠΊΠ°Ρ†ΠΈΠΈ + author (int): ID Π°Π²Ρ‚ΠΎΡ€Π° + caption (str): Подпись Π°Π²Ρ‚ΠΎΡ€Π° + """ + __tablename__ = "shout_author" id = None # type: ignore @@ -37,8 +61,18 @@ class ShoutAuthor(Base): author = Column(ForeignKey("author.id"), primary_key=True, index=True) caption = Column(String, nullable=True, default="") + # ΠžΠΏΡ€Π΅Π΄Π΅Π»ΡΠ΅ΠΌ Π΄ΠΎΠΏΠΎΠ»Π½ΠΈΡ‚Π΅Π»ΡŒΠ½Ρ‹Π΅ индСксы + __table_args__ = ( + # ΠžΠΏΡ‚ΠΈΠΌΠΈΠ·ΠΈΡ€ΠΎΠ²Π°Π½Π½Ρ‹ΠΉ индСкс для запросов, ΠΊΠΎΡ‚ΠΎΡ€Ρ‹Π΅ ΠΈΡ‰ΡƒΡ‚ ΠΏΡƒΠ±Π»ΠΈΠΊΠ°Ρ†ΠΈΠΈ ΠΏΠΎ Π°Π²Ρ‚ΠΎΡ€Ρƒ + Index("idx_shout_author_author_shout", "author", "shout"), + ) + class Shout(Base): + """ + ΠŸΡƒΠ±Π»ΠΈΠΊΠ°Ρ†ΠΈΡ Π² систСмС. + """ + __tablename__ = "shout" created_at: int = Column(Integer, nullable=False, default=lambda: int(time.time())) @@ -74,3 +108,20 @@ class Shout(Base): seo: str | None = Column(String, nullable=True) # JSON draft: int | None = Column(ForeignKey("draft.id"), nullable=True) + + # ΠžΠΏΡ€Π΅Π΄Π΅Π»ΡΠ΅ΠΌ индСксы + __table_args__ = ( + # ИндСкс для быстрого поиска Π½Π΅ΡƒΠ΄Π°Π»Π΅Π½Π½Ρ‹Ρ… ΠΏΡƒΠ±Π»ΠΈΠΊΠ°Ρ†ΠΈΠΉ + Index("idx_shout_deleted_at", "deleted_at", postgresql_where=deleted_at.is_(None)), + # ИндСкс для быстрой Ρ„ΠΈΠ»ΡŒΡ‚Ρ€Π°Ρ†ΠΈΠΈ ΠΏΠΎ community + Index("idx_shout_community", "community"), + # ИндСкс для быстрого поиска ΠΏΠΎ slug + Index("idx_shout_slug", "slug"), + # Боставной индСкс для Ρ„ΠΈΠ»ΡŒΡ‚Ρ€Π°Ρ†ΠΈΠΈ ΠΎΠΏΡƒΠ±Π»ΠΈΠΊΠΎΠ²Π°Π½Π½Ρ‹Ρ… Π½Π΅ΡƒΠ΄Π°Π»Π΅Π½Π½Ρ‹Ρ… ΠΏΡƒΠ±Π»ΠΈΠΊΠ°Ρ†ΠΈΠΉ + Index( + "idx_shout_published_deleted", + "published_at", + "deleted_at", + postgresql_where=published_at.is_not(None) & deleted_at.is_(None), + ), + ) diff --git a/orm/topic.py b/orm/topic.py index 61231fb3..4be1897d 100644 --- a/orm/topic.py +++ b/orm/topic.py @@ -1,11 +1,21 @@ import time -from sqlalchemy import JSON, Boolean, Column, ForeignKey, Integer, String +from sqlalchemy import JSON, Boolean, Column, ForeignKey, Index, Integer, String from services.db import Base class TopicFollower(Base): + """ + Бвязь ΠΌΠ΅ΠΆΠ΄Ρƒ Ρ‚ΠΎΠΏΠΈΠΊΠΎΠΌ ΠΈ Π΅Π³ΠΎ подписчиком. + + Attributes: + follower (int): ID подписчика + topic (int): ID Ρ‚ΠΎΠΏΠΈΠΊΠ° + created_at (int): ВрСмя создания связи + auto (bool): АвтоматичСская подписка + """ + __tablename__ = "topic_followers" id = None # type: ignore @@ -14,8 +24,29 @@ class TopicFollower(Base): created_at = Column(Integer, nullable=False, default=int(time.time())) auto = Column(Boolean, nullable=False, default=False) + # ΠžΠΏΡ€Π΅Π΄Π΅Π»ΡΠ΅ΠΌ индСксы + __table_args__ = ( + # ИндСкс для быстрого поиска всСх подписчиков Ρ‚ΠΎΠΏΠΈΠΊΠ° + Index("idx_topic_followers_topic", "topic"), + # ИндСкс для быстрого поиска всСх Ρ‚ΠΎΠΏΠΈΠΊΠΎΠ², Π½Π° ΠΊΠΎΡ‚ΠΎΡ€Ρ‹Π΅ подписан Π°Π²Ρ‚ΠΎΡ€ + Index("idx_topic_followers_follower", "follower"), + ) + class Topic(Base): + """ + МодСль Ρ‚ΠΎΠΏΠΈΠΊΠ° (Ρ‚Π΅ΠΌΡ‹) ΠΏΡƒΠ±Π»ΠΈΠΊΠ°Ρ†ΠΈΠΉ. + + Attributes: + slug (str): Π£Π½ΠΈΠΊΠ°Π»ΡŒΠ½Ρ‹ΠΉ строковый ΠΈΠ΄Π΅Π½Ρ‚ΠΈΡ„ΠΈΠΊΠ°Ρ‚ΠΎΡ€ Ρ‚Π΅ΠΌΡ‹ + title (str): НазваниС Ρ‚Π΅ΠΌΡ‹ + body (str): ОписаниС Ρ‚Π΅ΠΌΡ‹ + pic (str): URL изобраТСния Ρ‚Π΅ΠΌΡ‹ + community (int): ID сообщСства + oid (str): Π‘Ρ‚Π°Ρ€Ρ‹ΠΉ ID + parent_ids (list): IDs Ρ€ΠΎΠ΄ΠΈΡ‚Π΅Π»ΡŒΡΠΊΠΈΡ… Ρ‚Π΅ΠΌ + """ + __tablename__ = "topic" slug = Column(String, unique=True) @@ -24,5 +55,12 @@ class Topic(Base): pic = Column(String, nullable=True, comment="Picture") community = Column(ForeignKey("community.id"), default=1) oid = Column(String, nullable=True, comment="Old ID") - parent_ids = Column(JSON, nullable=True, comment="Parent Topic IDs") + + # ΠžΠΏΡ€Π΅Π΄Π΅Π»ΡΠ΅ΠΌ индСксы + __table_args__ = ( + # ИндСкс для быстрого поиска ΠΏΠΎ slug + Index("idx_topic_slug", "slug"), + # ИндСкс для быстрого поиска ΠΏΠΎ сообщСству + Index("idx_topic_community", "community"), + ) diff --git a/resolvers/topic.py b/resolvers/topic.py index d7460c36..9dfc245b 100644 --- a/resolvers/topic.py +++ b/resolvers/topic.py @@ -1,44 +1,222 @@ -from sqlalchemy import select +import time + +from sqlalchemy import func, select, text from cache.cache import ( + cache_topic, get_cached_topic_authors, get_cached_topic_by_slug, get_cached_topic_followers, + redis_operation, ) from cache.memorycache import cache_region from orm.author import Author -from orm.topic import Topic +from orm.shout import Shout, ShoutTopic +from orm.topic import Topic, TopicFollower from resolvers.stat import get_with_stat from services.auth import login_required from services.db import local_session +from services.redis import redis from services.schema import mutation, query from utils.logger import root_logger as logger +# Π’ΡΠΏΠΎΠΌΠΎΠ³Π°Ρ‚Π΅Π»ΡŒΠ½Π°Ρ функция для получСния всСх Ρ‚Π΅ΠΌ Π±Π΅Π· статистики +async def get_all_topics(): + """ + ΠŸΠΎΠ»ΡƒΡ‡Π°Π΅Ρ‚ всС Ρ‚Π΅ΠΌΡ‹ Π±Π΅Π· статистики. + Π˜ΡΠΏΠΎΠ»ΡŒΠ·ΡƒΠ΅Ρ‚ΡΡ для случаСв, ΠΊΠΎΠ³Π΄Π° Π½ΡƒΠΆΠ΅Π½ ΠΏΠΎΠ»Π½Ρ‹ΠΉ список Ρ‚Π΅ΠΌ Π±Π΅Π· Π΄ΠΎΠΏΠΎΠ»Π½ΠΈΡ‚Π΅Π»ΡŒΠ½ΠΎΠΉ ΠΈΠ½Ρ„ΠΎΡ€ΠΌΠ°Ρ†ΠΈΠΈ. + + Returns: + list: Бписок всСх Ρ‚Π΅ΠΌ Π±Π΅Π· статистики + """ + # ΠŸΡ‹Ρ‚Π°Π΅ΠΌΡΡ ΠΏΠΎΠ»ΡƒΡ‡ΠΈΡ‚ΡŒ Π΄Π°Π½Π½Ρ‹Π΅ ΠΈΠ· кСша + cached_topics = await redis_operation("GET", "topics:all:basic") + + if cached_topics: + logger.debug("Π˜ΡΠΏΠΎΠ»ΡŒΠ·ΡƒΠ΅ΠΌ ΠΊΠ΅ΡˆΠΈΡ€ΠΎΠ²Π°Π½Π½Ρ‹Π΅ Π±Π°Π·ΠΎΠ²Ρ‹Π΅ Π΄Π°Π½Π½Ρ‹Π΅ ΠΎ Ρ‚Π΅ΠΌΠ°Ρ… ΠΈΠ· Redis") + try: + import json + + return json.loads(cached_topics) + except Exception as e: + logger.error(f"Ошибка ΠΏΡ€ΠΈ дСсСриализации Ρ‚Π΅ΠΌ ΠΈΠ· Redis: {e}") + + # Если Π² кСшС Π½Π΅Ρ‚ Π΄Π°Π½Π½Ρ‹Ρ…, выполняСм запрос Π² Π‘Π” + logger.debug("ΠŸΠΎΠ»ΡƒΡ‡Π°Π΅ΠΌ список всСх Ρ‚Π΅ΠΌ ΠΈΠ· Π‘Π” ΠΈ ΠΊΠ΅ΡˆΠΈΡ€ΡƒΠ΅ΠΌ Ρ€Π΅Π·ΡƒΠ»ΡŒΡ‚Π°Ρ‚") + + with local_session() as session: + # Запрос Π½Π° ΠΏΠΎΠ»ΡƒΡ‡Π΅Π½ΠΈΠ΅ Π±Π°Π·ΠΎΠ²ΠΎΠΉ ΠΈΠ½Ρ„ΠΎΡ€ΠΌΠ°Ρ†ΠΈΠΈ ΠΎ Ρ‚Π΅ΠΌΠ°Ρ… + topics_query = select(Topic) + topics = session.execute(topics_query).scalars().all() + + # ΠŸΡ€Π΅ΠΎΠ±Ρ€Π°Π·ΡƒΠ΅ΠΌ Ρ‚Π΅ΠΌΡ‹ Π² словари + result = [topic.dict() for topic in topics] + + # ΠšΠ΅ΡˆΠΈΡ€ΡƒΠ΅ΠΌ Ρ€Π΅Π·ΡƒΠ»ΡŒΡ‚Π°Ρ‚ Π² Redis Π±Π΅Π· TTL (Π±ΡƒΠ΄Π΅Ρ‚ ΠΎΠ±Π½ΠΎΠ²Π»ΡΡ‚ΡŒΡΡ Ρ‚ΠΎΠ»ΡŒΠΊΠΎ ΠΏΡ€ΠΈ измСнСниях) + try: + import json + + await redis_operation("SET", "topics:all:basic", json.dumps(result)) + except Exception as e: + logger.error(f"Ошибка ΠΏΡ€ΠΈ ΠΊΠ΅ΡˆΠΈΡ€ΠΎΠ²Π°Π½ΠΈΠΈ Ρ‚Π΅ΠΌ Π² Redis: {e}") + + return result + + +# Π’ΡΠΏΠΎΠΌΠΎΠ³Π°Ρ‚Π΅Π»ΡŒΠ½Π°Ρ функция для получСния Ρ‚Π΅ΠΌ со статистикой с ΠΏΠ°Π³ΠΈΠ½Π°Ρ†ΠΈΠ΅ΠΉ +async def get_topics_with_stats(limit=100, offset=0, community_id=None): + """ + ΠŸΠΎΠ»ΡƒΡ‡Π°Π΅Ρ‚ Ρ‚Π΅ΠΌΡ‹ со статистикой с ΠΏΠ°Π³ΠΈΠ½Π°Ρ†ΠΈΠ΅ΠΉ. + + Args: + limit: МаксимальноС количСство Π²ΠΎΠ·Π²Ρ€Π°Ρ‰Π°Π΅ΠΌΡ‹Ρ… Ρ‚Π΅ΠΌ + offset: Π‘ΠΌΠ΅Ρ‰Π΅Π½ΠΈΠ΅ для ΠΏΠ°Π³ΠΈΠ½Π°Ρ†ΠΈΠΈ + community_id: ΠžΠΏΡ†ΠΈΠΎΠ½Π°Π»ΡŒΠ½Ρ‹ΠΉ ID сообщСства для Ρ„ΠΈΠ»ΡŒΡ‚Ρ€Π°Ρ†ΠΈΠΈ + + Returns: + list: Бписок Ρ‚Π΅ΠΌ с ΠΈΡ… статистикой + """ + # Π€ΠΎΡ€ΠΌΠΈΡ€ΡƒΠ΅ΠΌ ΠΊΠ»ΡŽΡ‡ кСша с ΡƒΡ‡Π΅Ρ‚ΠΎΠΌ ΠΏΠ°Ρ€Π°ΠΌΠ΅Ρ‚Ρ€ΠΎΠ² + cache_key = f"topics:stats:limit={limit}:offset={offset}" + if community_id: + cache_key += f":community={community_id}" + + # ΠŸΡ‹Ρ‚Π°Π΅ΠΌΡΡ ΠΏΠΎΠ»ΡƒΡ‡ΠΈΡ‚ΡŒ Π΄Π°Π½Π½Ρ‹Π΅ ΠΈΠ· кСша + cached_topics = await redis_operation("GET", cache_key) + + if cached_topics: + logger.debug(f"Π˜ΡΠΏΠΎΠ»ΡŒΠ·ΡƒΠ΅ΠΌ ΠΊΠ΅ΡˆΠΈΡ€ΠΎΠ²Π°Π½Π½Ρ‹Π΅ Π΄Π°Π½Π½Ρ‹Π΅ ΠΎ Ρ‚Π΅ΠΌΠ°Ρ… ΠΈΠ· Redis: {cache_key}") + try: + import json + + return json.loads(cached_topics) + except Exception as e: + logger.error(f"Ошибка ΠΏΡ€ΠΈ дСсСриализации Ρ‚Π΅ΠΌ ΠΈΠ· Redis: {e}") + + # Если Π² кСшС Π½Π΅Ρ‚ Π΄Π°Π½Π½Ρ‹Ρ…, выполняСм ΠΎΠΏΡ‚ΠΈΠΌΠΈΠ·ΠΈΡ€ΠΎΠ²Π°Π½Π½Ρ‹ΠΉ запрос + logger.debug(f"ВыполняСм запрос Π½Π° ΠΏΠΎΠ»ΡƒΡ‡Π΅Π½ΠΈΠ΅ Ρ‚Π΅ΠΌ со статистикой: limit={limit}, offset={offset}") + + with local_session() as session: + # Π‘Π°Π·ΠΎΠ²Ρ‹ΠΉ запрос для получСния Ρ‚Π΅ΠΌ + base_query = select(Topic) + + # ДобавляСм Ρ„ΠΈΠ»ΡŒΡ‚Ρ€ ΠΏΠΎ сообщСству, Ссли ΡƒΠΊΠ°Π·Π°Π½ + if community_id: + base_query = base_query.where(Topic.community == community_id) + + # ΠŸΡ€ΠΈΠΌΠ΅Π½ΡΠ΅ΠΌ Π»ΠΈΠΌΠΈΡ‚ ΠΈ смСщСниС + base_query = base_query.limit(limit).offset(offset) + + # ΠŸΠΎΠ»ΡƒΡ‡Π°Π΅ΠΌ Ρ‚Π΅ΠΌΡ‹ + topics = session.execute(base_query).scalars().all() + topic_ids = [topic.id for topic in topics] + + if not topic_ids: + return [] + + # Запрос Π½Π° ΠΏΠΎΠ»ΡƒΡ‡Π΅Π½ΠΈΠ΅ статистики ΠΏΠΎ публикациям для Π²Ρ‹Π±Ρ€Π°Π½Π½Ρ‹Ρ… Ρ‚Π΅ΠΌ + shouts_stats_query = f""" + SELECT st.topic, COUNT(DISTINCT s.id) as shouts_count + FROM shout_topic st + JOIN shout s ON st.shout = s.id AND s.deleted_at IS NULL + WHERE st.topic IN ({",".join(map(str, topic_ids))}) + GROUP BY st.topic + """ + shouts_stats = {row[0]: row[1] for row in session.execute(text(shouts_stats_query))} + + # Запрос Π½Π° ΠΏΠΎΠ»ΡƒΡ‡Π΅Π½ΠΈΠ΅ статистики ΠΏΠΎ подписчикам для Π²Ρ‹Π±Ρ€Π°Π½Π½Ρ‹Ρ… Ρ‚Π΅ΠΌ + followers_stats_query = f""" + SELECT topic, COUNT(DISTINCT follower) as followers_count + FROM topic_followers + WHERE topic IN ({",".join(map(str, topic_ids))}) + GROUP BY topic + """ + followers_stats = {row[0]: row[1] for row in session.execute(text(followers_stats_query))} + + # Π€ΠΎΡ€ΠΌΠΈΡ€ΡƒΠ΅ΠΌ Ρ€Π΅Π·ΡƒΠ»ΡŒΡ‚Π°Ρ‚ с Π΄ΠΎΠ±Π°Π²Π»Π΅Π½ΠΈΠ΅ΠΌ статистики + result = [] + for topic in topics: + topic_dict = topic.dict() + topic_dict["stat"] = { + "shouts": shouts_stats.get(topic.id, 0), + "followers": followers_stats.get(topic.id, 0), + } + result.append(topic_dict) + + # ΠšΠ΅ΡˆΠΈΡ€ΡƒΠ΅ΠΌ ΠΊΠ°ΠΆΠ΄ΡƒΡŽ Ρ‚Π΅ΠΌΡƒ ΠΎΡ‚Π΄Π΅Π»ΡŒΠ½ΠΎ для использования Π² Π΄Ρ€ΡƒΠ³ΠΈΡ… функциях + await cache_topic(topic_dict) + + # ΠšΠ΅ΡˆΠΈΡ€ΡƒΠ΅ΠΌ ΠΏΠΎΠ»Π½Ρ‹ΠΉ Ρ€Π΅Π·ΡƒΠ»ΡŒΡ‚Π°Ρ‚ Π² Redis Π±Π΅Π· TTL (Π±ΡƒΠ΄Π΅Ρ‚ ΠΎΠ±Π½ΠΎΠ²Π»ΡΡ‚ΡŒΡΡ Ρ‚ΠΎΠ»ΡŒΠΊΠΎ ΠΏΡ€ΠΈ измСнСниях) + try: + import json + + await redis_operation("SET", cache_key, json.dumps(result)) + except Exception as e: + logger.error(f"Ошибка ΠΏΡ€ΠΈ ΠΊΠ΅ΡˆΠΈΡ€ΠΎΠ²Π°Π½ΠΈΠΈ Ρ‚Π΅ΠΌ Π² Redis: {e}") + + return result + + +# Ѐункция для ΠΈΠ½Π²Π°Π»ΠΈΠ΄Π°Ρ†ΠΈΠΈ кСша Ρ‚Π΅ΠΌ +async def invalidate_topics_cache(): + """ + Π˜Π½Π²Π°Π»ΠΈΠ΄ΠΈΡ€ΡƒΠ΅Ρ‚ всС кСши Ρ‚Π΅ΠΌ ΠΏΡ€ΠΈ ΠΈΠ·ΠΌΠ΅Π½Π΅Π½ΠΈΠΈ Π΄Π°Π½Π½Ρ‹Ρ…. + """ + logger.debug("Π˜Π½Π²Π°Π»ΠΈΠ΄Π°Ρ†ΠΈΡ кСша Ρ‚Π΅ΠΌ") + + # ΠŸΠΎΠ»ΡƒΡ‡Π°Π΅ΠΌ всС ΠΊΠ»ΡŽΡ‡ΠΈ, Π½Π°Ρ‡ΠΈΠ½Π°ΡŽΡ‰ΠΈΠ΅ΡΡ с "topics:" + topic_keys = await redis.execute("KEYS", "topics:*") + + if topic_keys: + # УдаляСм всС Π½Π°ΠΉΠ΄Π΅Π½Π½Ρ‹Π΅ ΠΊΠ»ΡŽΡ‡ΠΈ + await redis.execute("DEL", *topic_keys) + logger.debug(f"Π£Π΄Π°Π»Π΅Π½ΠΎ {len(topic_keys)} ΠΊΠ»ΡŽΡ‡Π΅ΠΉ кСша Ρ‚Π΅ΠΌ") + + # Запрос Π½Π° ΠΏΠΎΠ»ΡƒΡ‡Π΅Π½ΠΈΠ΅ всСх Ρ‚Π΅ΠΌ @query.field("get_topics_all") -def get_topics_all(_, _info): - cache_key = "get_topics_all" # ΠšΠ»ΡŽΡ‡ для кСша +async def get_topics_all(_, _info): + """ + ΠŸΠΎΠ»ΡƒΡ‡Π°Π΅Ρ‚ список всСх Ρ‚Π΅ΠΌ Π±Π΅Π· статистики. - @cache_region.cache_on_arguments(cache_key) - def _get_topics_all(): - topics_query = select(Topic) - return get_with_stat(topics_query) # ΠŸΠΎΠ»ΡƒΡ‡Π΅Π½ΠΈΠ΅ Ρ‚Π΅ΠΌ с ΡƒΡ‡Π΅Ρ‚ΠΎΠΌ статистики + Returns: + list: Бписок всСх Ρ‚Π΅ΠΌ + """ + return await get_all_topics() - return _get_topics_all() + +# Запрос Π½Π° ΠΏΠΎΠ»ΡƒΡ‡Π΅Π½ΠΈΠ΅ Ρ‚Π΅ΠΌ с ΠΏΠ°Π³ΠΈΠ½Π°Ρ†ΠΈΠ΅ΠΉ ΠΈ статистикой +@query.field("get_topics_paginated") +async def get_topics_paginated(_, _info, limit=100, offset=0): + """ + ΠŸΠΎΠ»ΡƒΡ‡Π°Π΅Ρ‚ список Ρ‚Π΅ΠΌ с ΠΏΠ°Π³ΠΈΠ½Π°Ρ†ΠΈΠ΅ΠΉ ΠΈ статистикой. + + Args: + limit: МаксимальноС количСство Π²ΠΎΠ·Π²Ρ€Π°Ρ‰Π°Π΅ΠΌΡ‹Ρ… Ρ‚Π΅ΠΌ + offset: Π‘ΠΌΠ΅Ρ‰Π΅Π½ΠΈΠ΅ для ΠΏΠ°Π³ΠΈΠ½Π°Ρ†ΠΈΠΈ + + Returns: + list: Бписок Ρ‚Π΅ΠΌ с ΠΈΡ… статистикой + """ + return await get_topics_with_stats(limit, offset) # Запрос Π½Π° ΠΏΠΎΠ»ΡƒΡ‡Π΅Π½ΠΈΠ΅ Ρ‚Π΅ΠΌ ΠΏΠΎ сообщСству @query.field("get_topics_by_community") -def get_topics_by_community(_, _info, community_id: int): - cache_key = f"get_topics_by_community_{community_id}" # ΠšΠ»ΡŽΡ‡ для кСша +async def get_topics_by_community(_, _info, community_id: int, limit=100, offset=0): + """ + ΠŸΠΎΠ»ΡƒΡ‡Π°Π΅Ρ‚ список Ρ‚Π΅ΠΌ, ΠΏΡ€ΠΈΠ½Π°Π΄Π»Π΅ΠΆΠ°Ρ‰ΠΈΡ… ΡƒΠΊΠ°Π·Π°Π½Π½ΠΎΠΌΡƒ сообщСству с ΠΏΠ°Π³ΠΈΠ½Π°Ρ†ΠΈΠ΅ΠΉ ΠΈ статистикой. - @cache_region.cache_on_arguments(cache_key) - def _get_topics_by_community(): - topics_by_community_query = select(Topic).where(Topic.community == community_id) - return get_with_stat(topics_by_community_query) + Args: + community_id: ID сообщСства + limit: МаксимальноС количСство Π²ΠΎΠ·Π²Ρ€Π°Ρ‰Π°Π΅ΠΌΡ‹Ρ… Ρ‚Π΅ΠΌ + offset: Π‘ΠΌΠ΅Ρ‰Π΅Π½ΠΈΠ΅ для ΠΏΠ°Π³ΠΈΠ½Π°Ρ†ΠΈΠΈ - return _get_topics_by_community() + Returns: + list: Бписок Ρ‚Π΅ΠΌ с ΠΈΡ… статистикой + """ + return await get_topics_with_stats(limit, offset, community_id) # Запрос Π½Π° ΠΏΠΎΠ»ΡƒΡ‡Π΅Π½ΠΈΠ΅ Ρ‚Π΅ΠΌ ΠΏΠΎ Π°Π²Ρ‚ΠΎΡ€Ρƒ @@ -74,6 +252,9 @@ async def create_topic(_, _info, topic_input): session.add(new_topic) session.commit() + # Π˜Π½Π²Π°Π»ΠΈΠ΄ΠΈΡ€ΡƒΠ΅ΠΌ кСш всСх Ρ‚Π΅ΠΌ + await invalidate_topics_cache() + return {"topic": new_topic} @@ -91,6 +272,11 @@ async def update_topic(_, _info, topic_input): session.add(topic) session.commit() + # Π˜Π½Π²Π°Π»ΠΈΠ΄ΠΈΡ€ΡƒΠ΅ΠΌ кСш всСх Ρ‚Π΅ΠΌ ΠΈ ΠΊΠΎΠ½ΠΊΡ€Π΅Ρ‚Π½ΠΎΠΉ Ρ‚Π΅ΠΌΡ‹ + await invalidate_topics_cache() + await redis.execute("DEL", f"topic:slug:{slug}") + await redis.execute("DEL", f"topic:id:{topic.id}") + return {"topic": topic} @@ -111,6 +297,11 @@ async def delete_topic(_, info, slug: str): session.delete(t) session.commit() + # Π˜Π½Π²Π°Π»ΠΈΠ΄ΠΈΡ€ΡƒΠ΅ΠΌ кСш всСх Ρ‚Π΅ΠΌ ΠΈ ΠΊΠΎΠ½ΠΊΡ€Π΅Ρ‚Π½ΠΎΠΉ Ρ‚Π΅ΠΌΡ‹ + await invalidate_topics_cache() + await redis.execute("DEL", f"topic:slug:{slug}") + await redis.execute("DEL", f"topic:id:{t.id}") + return {} return {"error": "access denied"} diff --git a/services/db.py b/services/db.py index d598e7f2..b81873ba 100644 --- a/services/db.py +++ b/services/db.py @@ -10,12 +10,14 @@ from sqlalchemy import ( JSON, Column, Engine, + Index, Integer, create_engine, event, exc, func, inspect, + text ) from sqlalchemy.orm import Session, configure_mappers, declarative_base from sqlalchemy.sql.schema import Table @@ -56,6 +58,82 @@ def create_table_if_not_exists(engine, table): logger.info(f"Table '{table.__tablename__}' ok.") +def sync_indexes(): + """ + Π‘ΠΈΠ½Ρ…Ρ€ΠΎΠ½ΠΈΠ·ΠΈΡ€ΡƒΠ΅Ρ‚ индСксы Π² Π‘Π” с индСксами, ΠΎΠΏΡ€Π΅Π΄Π΅Π»Π΅Π½Π½Ρ‹ΠΌΠΈ Π² модСлях SQLAlchemy. + Π‘ΠΎΠ·Π΄Π°Π΅Ρ‚ Π½Π΅Π΄ΠΎΡΡ‚Π°ΡŽΡ‰ΠΈΠ΅ индСксы, Ссли ΠΎΠ½ΠΈ ΠΎΠΏΡ€Π΅Π΄Π΅Π»Π΅Π½Ρ‹ Π² модСлях, Π½ΠΎ ΠΎΡ‚ΡΡƒΡ‚ΡΡ‚Π²ΡƒΡŽΡ‚ Π² Π‘Π”. + + Π˜ΡΠΏΠΎΠ»ΡŒΠ·ΡƒΠ΅Ρ‚ pg_catalog для PostgreSQL для получСния списка ΡΡƒΡ‰Π΅ΡΡ‚Π²ΡƒΡŽΡ‰ΠΈΡ… индСксов. + """ + if not DB_URL.startswith("postgres"): + logger.warning("Ѐункция sync_indexes поддСрТиваСтся Ρ‚ΠΎΠ»ΡŒΠΊΠΎ для PostgreSQL.") + return + + logger.info("НачинаСм ΡΠΈΠ½Ρ…Ρ€ΠΎΠ½ΠΈΠ·Π°Ρ†ΠΈΡŽ индСксов Π² Π±Π°Π·Π΅ Π΄Π°Π½Π½Ρ‹Ρ…...") + + # ΠŸΠΎΠ»ΡƒΡ‡Π°Π΅ΠΌ всС ΡΡƒΡ‰Π΅ΡΡ‚Π²ΡƒΡŽΡ‰ΠΈΠ΅ индСксы Π² Π‘Π” + with local_session() as session: + existing_indexes_query = text(""" + SELECT + t.relname AS table_name, + i.relname AS index_name + FROM + pg_catalog.pg_class i + JOIN + pg_catalog.pg_index ix ON ix.indexrelid = i.oid + JOIN + pg_catalog.pg_class t ON t.oid = ix.indrelid + JOIN + pg_catalog.pg_namespace n ON n.oid = i.relnamespace + WHERE + i.relkind = 'i' + AND n.nspname = 'public' + AND t.relkind = 'r' + ORDER BY + t.relname, i.relname; + """) + + existing_indexes = {row[1].lower() for row in session.execute(existing_indexes_query)} + logger.debug(f"НайдСно {len(existing_indexes)} ΡΡƒΡ‰Π΅ΡΡ‚Π²ΡƒΡŽΡ‰ΠΈΡ… индСксов Π² Π‘Π”") + + # ΠŸΡ€ΠΎΠ²Π΅Ρ€ΡΠ΅ΠΌ ΠΊΠ°ΠΆΠ΄ΡƒΡŽ модСль ΠΈ Π΅Ρ‘ индСксы + for _model_name, model_class in REGISTRY.items(): + if hasattr(model_class, "__table__") and hasattr(model_class, "__table_args__"): + table_args = model_class.__table_args__ + + # Если table_args - это ΠΊΠΎΡ€Ρ‚Π΅ΠΆ, ΠΈΡ‰Π΅ΠΌ Π² Π½Ρ‘ΠΌ ΠΎΠ±ΡŠΠ΅ΠΊΡ‚Ρ‹ Index + if isinstance(table_args, tuple): + for arg in table_args: + if isinstance(arg, Index): + index_name = arg.name.lower() + + # ΠŸΡ€ΠΎΠ²Π΅Ρ€ΡΠ΅ΠΌ, сущСствуСт Π»ΠΈ индСкс Π² Π‘Π” + if index_name not in existing_indexes: + logger.info( + f"Π‘ΠΎΠ·Π΄Π°Π΅ΠΌ ΠΎΡ‚ΡΡƒΡ‚ΡΡ‚Π²ΡƒΡŽΡ‰ΠΈΠΉ индСкс {index_name} для Ρ‚Π°Π±Π»ΠΈΡ†Ρ‹ {model_class.__tablename__}" + ) + + # Π‘ΠΎΠ·Π΄Π°Π΅ΠΌ индСкс Ссли ΠΎΠ½ отсутствуСт + try: + arg.create(engine) + logger.info(f"ИндСкс {index_name} ΡƒΡΠΏΠ΅ΡˆΠ½ΠΎ создан") + except Exception as e: + logger.error(f"Ошибка ΠΏΡ€ΠΈ создании индСкса {index_name}: {e}") + else: + logger.debug(f"ИндСкс {index_name} ΡƒΠΆΠ΅ сущСствуСт") + + # АнализируСм Ρ‚Π°Π±Π»ΠΈΡ†Ρ‹ для ΠΎΠΏΡ‚ΠΈΠΌΠΈΠ·Π°Ρ†ΠΈΠΈ запросов + for model_name, model_class in REGISTRY.items(): + if hasattr(model_class, "__tablename__"): + try: + session.execute(text(f"ANALYZE {model_class.__tablename__}")) + logger.debug(f"Π’Π°Π±Π»ΠΈΡ†Π° {model_class.__tablename__} ΠΏΡ€ΠΎΠ°Π½Π°Π»ΠΈΠ·ΠΈΡ€ΠΎΠ²Π°Π½Π°") + except Exception as e: + logger.error(f"Ошибка ΠΏΡ€ΠΈ Π°Π½Π°Π»ΠΈΠ·Π΅ Ρ‚Π°Π±Π»ΠΈΡ†Ρ‹ {model_class.__tablename__}: {e}") + + logger.info("Бинхронизация индСксов Π·Π°Π²Π΅Ρ€ΡˆΠ΅Π½Π°.") + + # noinspection PyUnusedLocal def local_session(src=""): return Session(bind=engine, expire_on_commit=False) From 615f1fe4682c0ca0cb9d456a7910b4b29f0bb15b Mon Sep 17 00:00:00 2001 From: Untone Date: Sat, 22 Mar 2025 11:47:19 +0300 Subject: [PATCH 20/27] topics+authors-reimplemented-cache --- CHANGELOG.md | 74 ++++++--- cache/cache.py | 240 ++++++++++++++++++++------- cache/memorycache.py | 181 -------------------- cache/precache.py | 15 +- cache/revalidator.py | 122 ++++++++++++-- docs/caching.md | 279 +++++++++++++++++++++++++++++++ docs/features.md | 10 +- orm/author.py | 84 +++++++++- resolvers/author.py | 381 ++++++++++++++++++++++++++++++------------- resolvers/topic.py | 260 +++++++++++++++-------------- services/db.py | 2 +- 11 files changed, 1127 insertions(+), 521 deletions(-) delete mode 100644 cache/memorycache.py create mode 100644 docs/caching.md diff --git a/CHANGELOG.md b/CHANGELOG.md index 33096bdf..987e2303 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,57 @@ +#### [0.4.15] - 2025-03-22 +- Upgraded caching system described `docs/caching.md` +- Module `cache/memorycache.py` removed +- Enhanced caching system with backward compatibility: + - Unified cache key generation with support for existing naming patterns + - Improved Redis operation function with better error handling + - Updated precache module to use consistent Redis interface + - Integrated revalidator with the invalidation system for better performance + - Added comprehensive documentation for the caching system + - Enhanced cached_query to support template-based cache keys + - Standardized error handling across all cache operations +- Optimized cache invalidation system: + - Added targeted invalidation for individual entities (authors, topics) + - Improved revalidation manager with individual object processing + - Implemented batched processing for high-volume invalidations + - Reduced Redis operations by using precise key invalidation instead of prefix-based wipes + - Added special handling for slug changes in topics +- Unified caching system for all models: + - Implemented abstract functions `cache_data`, `get_cached_data` and `invalidate_cache_by_prefix` + - Added `cached_query` function for unified approach to query caching + - Updated resolvers `author.py` and `topic.py` to use the new caching API + - Improved logging for cache operations to simplify debugging + - Optimized Redis memory usage through key format unification +- Improved caching and sorting in Topic and Author modules: + - Added support for dictionary sorting parameters in `by` for both modules + - Optimized cache key generation for stable behavior with various parameters + - Enhanced sorting logic with direction support and arbitrary fields + - Added `by` parameter support in the API for getting topics by community +- Performance optimizations for author-related queries: + - Added SQLAlchemy-managed indexes to `Author`, `AuthorFollower`, `AuthorRating` and `AuthorBookmark` models + - Implemented persistent Redis caching for author queries without TTL (invalidated only on changes) + - Optimized author retrieval with separate endpoints: + - `get_authors_all` - returns all non-deleted authors without statistics + - `get_authors_paginated` - returns authors with statistics and pagination support + - `load_authors_by` - optimized to use caching and efficient sorting + - Improved SQL queries with optimized JOIN conditions and efficient filtering + - Added pre-aggregation of statistics (shouts count, followers count) in single efficient queries + - Implemented robust cache invalidation on author updates + - Created necessary indexes for author lookups by user ID, slug, and timestamps + +#### [0.4.14] - 2025-03-21 +- Significant performance improvements for topic queries: + - Added database indexes to optimize JOIN operations + - Implemented persistent Redis caching for topic queries (no TTL, invalidated only on changes) + - Optimized topic retrieval with separate endpoints for different use cases: + - `get_topics_all` - returns all topics without statistics for lightweight listing + - `get_topics_paginated` - returns topics with statistics and pagination support + - `get_topics_by_community` - adds pagination and optimized filtering by community + - Added SQLAlchemy-managed indexes directly in ORM models for automatic schema maintenance + - Created `sync_indexes()` function for automatic index synchronization during app startup + - Reduced database load by pre-aggregating statistics in optimized SQL queries + - Added robust cache invalidation on topic create/update/delete operations + - Improved query optimization with proper JOIN conditions and specific partial indexes + #### [0.4.13] - 2025-03-20 - Fixed Topic objects serialization error in cache/memorycache.py - Improved CustomJSONEncoder to support SQLAlchemy models with dict() method @@ -244,22 +298,4 @@ #### [0.2.7] -- `loadFollowedReactions` now with `login_required` -- notifier service api draft -- added `shout` visibility kind in schema -- community isolated from author in orm - - -#### [0.2.6] -- redis connection pool -- auth context fixes -- communities orm, resolvers, schema - - -#### [0.2.5] -- restructured -- all users have their profiles as authors in core -- `gittask`, `inbox` and `auth` logics removed -- `settings` moved to base and now smaller -- new outside auth schema -- removed `gittask`, `auth`, `inbox`, `migration` +- `loadFollowedReactions` now with ` \ No newline at end of file diff --git a/cache/cache.py b/cache/cache.py index caeecfa5..5b8ea5d6 100644 --- a/cache/cache.py +++ b/cache/cache.py @@ -1,6 +1,35 @@ +""" +Caching system for the Discours platform +---------------------------------------- + +This module provides a comprehensive caching solution with these key components: + +1. KEY NAMING CONVENTIONS: + - Entity-based keys: "entity:property:value" (e.g., "author:id:123") + - Collection keys: "entity:collection:params" (e.g., "authors:stats:limit=10:offset=0") + - Special case keys: Maintained for backwards compatibility (e.g., "topic_shouts_123") + +2. CORE FUNCTIONS: + - cached_query(): High-level function for retrieving cached data or executing queries + +3. ENTITY-SPECIFIC FUNCTIONS: + - cache_author(), cache_topic(): Cache entity data + - get_cached_author(), get_cached_topic(): Retrieve entity data from cache + - invalidate_cache_by_prefix(): Invalidate all keys with a specific prefix + +4. CACHE INVALIDATION STRATEGY: + - Direct invalidation via invalidate_* functions for immediate changes + - Delayed invalidation via revalidation_manager for background processing + - Event-based triggers for automatic cache updates (see triggers.py) + +To maintain consistency with the existing codebase, this module preserves +the original key naming patterns while providing a more structured approach +for new cache operations. +""" + import asyncio import json -from typing import List +from typing import Any, Dict, List, Optional, Union import orjson from sqlalchemy import and_, join, select @@ -20,8 +49,10 @@ DEFAULT_FOLLOWS = { "communities": [{"id": 1, "name": "Дискурс", "slug": "discours", "pic": ""}], } -CACHE_TTL = 300 # 5 ΠΌΠΈΠ½ΡƒΡ‚ +CACHE_TTL = 300 # 5 minutes +# Key templates for common entity types +# These are used throughout the codebase and should be maintained for compatibility CACHE_KEYS = { "TOPIC_ID": "topic:id:{}", "TOPIC_SLUG": "topic:slug:{}", @@ -38,8 +69,8 @@ CACHE_KEYS = { async def cache_topic(topic: dict): payload = json.dumps(topic, cls=CustomJSONEncoder) await asyncio.gather( - redis_operation("SET", f"topic:id:{topic['id']}", payload), - redis_operation("SET", f"topic:slug:{topic['slug']}", payload), + redis.execute("SET", f"topic:id:{topic['id']}", payload), + redis.execute("SET", f"topic:slug:{topic['slug']}", payload), ) @@ -47,29 +78,29 @@ async def cache_topic(topic: dict): async def cache_author(author: dict): payload = json.dumps(author, cls=CustomJSONEncoder) await asyncio.gather( - redis_operation("SET", f"author:user:{author['user'].strip()}", str(author["id"])), - redis_operation("SET", f"author:id:{author['id']}", payload), + redis.execute("SET", f"author:user:{author['user'].strip()}", str(author["id"])), + redis.execute("SET", f"author:id:{author['id']}", payload), ) # Cache follows data async def cache_follows(follower_id: int, entity_type: str, entity_id: int, is_insert=True): key = f"author:follows-{entity_type}s:{follower_id}" - follows_str = await redis_operation("GET", key) + follows_str = await redis.execute("GET", key) follows = orjson.loads(follows_str) if follows_str else DEFAULT_FOLLOWS[entity_type] if is_insert: if entity_id not in follows: follows.append(entity_id) else: follows = [eid for eid in follows if eid != entity_id] - await redis_operation("SET", key, json.dumps(follows, cls=CustomJSONEncoder)) + await redis.execute("SET", key, json.dumps(follows, cls=CustomJSONEncoder)) await update_follower_stat(follower_id, entity_type, len(follows)) # Update follower statistics async def update_follower_stat(follower_id, entity_type, count): follower_key = f"author:id:{follower_id}" - follower_str = await redis_operation("GET", follower_key) + follower_str = await redis.execute("GET", follower_key) follower = orjson.loads(follower_str) if follower_str else None if follower: follower["stat"] = {f"{entity_type}s": count} @@ -79,7 +110,7 @@ async def update_follower_stat(follower_id, entity_type, count): # Get author from cache async def get_cached_author(author_id: int, get_with_stat): author_key = f"author:id:{author_id}" - result = await redis_operation("GET", author_key) + result = await redis.execute("GET", author_key) if result: return orjson.loads(result) # Load from database if not found in cache @@ -104,7 +135,7 @@ async def get_cached_topic(topic_id: int): dict: Topic data or None if not found. """ topic_key = f"topic:id:{topic_id}" - cached_topic = await redis_operation("GET", topic_key) + cached_topic = await redis.execute("GET", topic_key) if cached_topic: return orjson.loads(cached_topic) @@ -113,7 +144,7 @@ async def get_cached_topic(topic_id: int): topic = session.execute(select(Topic).where(Topic.id == topic_id)).scalar_one_or_none() if topic: topic_dict = topic.dict() - await redis_operation("SET", topic_key, json.dumps(topic_dict, cls=CustomJSONEncoder)) + await redis.execute("SET", topic_key, json.dumps(topic_dict, cls=CustomJSONEncoder)) return topic_dict return None @@ -122,7 +153,7 @@ async def get_cached_topic(topic_id: int): # Get topic by slug from cache async def get_cached_topic_by_slug(slug: str, get_with_stat): topic_key = f"topic:slug:{slug}" - result = await redis_operation("GET", topic_key) + result = await redis.execute("GET", topic_key) if result: return orjson.loads(result) # Load from database if not found in cache @@ -139,7 +170,7 @@ async def get_cached_topic_by_slug(slug: str, get_with_stat): async def get_cached_authors_by_ids(author_ids: List[int]) -> List[dict]: # Fetch all author data concurrently keys = [f"author:id:{author_id}" for author_id in author_ids] - results = await asyncio.gather(*(redis_operation("GET", key) for key in keys)) + results = await asyncio.gather(*(redis.execute("GET", key) for key in keys)) authors = [orjson.loads(result) if result else None for result in results] # Load missing authors from database and cache missing_indices = [index for index, author in enumerate(authors) if author is None] @@ -166,7 +197,7 @@ async def get_cached_topic_followers(topic_id: int): """ try: cache_key = CACHE_KEYS["TOPIC_FOLLOWERS"].format(topic_id) - cached = await redis_operation("GET", cache_key) + cached = await redis.execute("GET", cache_key) if cached: followers_ids = orjson.loads(cached) @@ -182,7 +213,7 @@ async def get_cached_topic_followers(topic_id: int): .all() ] - await redis_operation("SETEX", cache_key, value=orjson.dumps(followers_ids), ttl=CACHE_TTL) + await redis.execute("SETEX", cache_key, CACHE_TTL, orjson.dumps(followers_ids)) followers = await get_cached_authors_by_ids(followers_ids) logger.debug(f"Cached {len(followers)} followers for topic #{topic_id}") return followers @@ -195,7 +226,7 @@ async def get_cached_topic_followers(topic_id: int): # Get cached author followers async def get_cached_author_followers(author_id: int): # Check cache for data - cached = await redis_operation("GET", f"author:followers:{author_id}") + cached = await redis.execute("GET", f"author:followers:{author_id}") if cached: followers_ids = orjson.loads(cached) followers = await get_cached_authors_by_ids(followers_ids) @@ -211,7 +242,7 @@ async def get_cached_author_followers(author_id: int): .filter(AuthorFollower.author == author_id, Author.id != author_id) .all() ] - await redis_operation("SET", f"author:followers:{author_id}", orjson.dumps(followers_ids)) + await redis.execute("SET", f"author:followers:{author_id}", orjson.dumps(followers_ids)) followers = await get_cached_authors_by_ids(followers_ids) return followers @@ -219,7 +250,7 @@ async def get_cached_author_followers(author_id: int): # Get cached follower authors async def get_cached_follower_authors(author_id: int): # Attempt to retrieve authors from cache - cached = await redis_operation("GET", f"author:follows-authors:{author_id}") + cached = await redis.execute("GET", f"author:follows-authors:{author_id}") if cached: authors_ids = orjson.loads(cached) else: @@ -233,7 +264,7 @@ async def get_cached_follower_authors(author_id: int): .where(AuthorFollower.follower == author_id) ).all() ] - await redis_operation("SET", f"author:follows-authors:{author_id}", orjson.dumps(authors_ids)) + await redis.execute("SET", f"author:follows-authors:{author_id}", orjson.dumps(authors_ids)) authors = await get_cached_authors_by_ids(authors_ids) return authors @@ -242,7 +273,7 @@ async def get_cached_follower_authors(author_id: int): # Get cached follower topics async def get_cached_follower_topics(author_id: int): # Attempt to retrieve topics from cache - cached = await redis_operation("GET", f"author:follows-topics:{author_id}") + cached = await redis.execute("GET", f"author:follows-topics:{author_id}") if cached: topics_ids = orjson.loads(cached) else: @@ -255,11 +286,11 @@ async def get_cached_follower_topics(author_id: int): .where(TopicFollower.follower == author_id) .all() ] - await redis_operation("SET", f"author:follows-topics:{author_id}", orjson.dumps(topics_ids)) + await redis.execute("SET", f"author:follows-topics:{author_id}", orjson.dumps(topics_ids)) topics = [] for topic_id in topics_ids: - topic_str = await redis_operation("GET", f"topic:id:{topic_id}") + topic_str = await redis.execute("GET", f"topic:id:{topic_id}") if topic_str: topic = orjson.loads(topic_str) if topic and topic not in topics: @@ -281,10 +312,10 @@ async def get_cached_author_by_user_id(user_id: str, get_with_stat): dict: Dictionary with author data or None if not found. """ # Attempt to find author ID by user_id in Redis cache - author_id = await redis_operation("GET", f"author:user:{user_id.strip()}") + author_id = await redis.execute("GET", f"author:user:{user_id.strip()}") if author_id: # If ID is found, get full author data by ID - author_data = await redis_operation("GET", f"author:id:{author_id}") + author_data = await redis.execute("GET", f"author:id:{author_id}") if author_data: return orjson.loads(author_data) @@ -296,8 +327,8 @@ async def get_cached_author_by_user_id(user_id: str, get_with_stat): author = authors[0] author_dict = author.dict() await asyncio.gather( - redis_operation("SET", f"author:user:{user_id.strip()}", str(author.id)), - redis_operation("SET", f"author:id:{author.id}", orjson.dumps(author_dict)), + redis.execute("SET", f"author:user:{user_id.strip()}", str(author.id)), + redis.execute("SET", f"author:id:{author.id}", orjson.dumps(author_dict)), ) return author_dict @@ -318,7 +349,7 @@ async def get_cached_topic_authors(topic_id: int): """ # Attempt to get a list of author IDs from cache rkey = f"topic:authors:{topic_id}" - cached_authors_ids = await redis_operation("GET", rkey) + cached_authors_ids = await redis.execute("GET", rkey) if cached_authors_ids: authors_ids = orjson.loads(cached_authors_ids) else: @@ -332,7 +363,7 @@ async def get_cached_topic_authors(topic_id: int): ) authors_ids = [author_id for (author_id,) in session.execute(query).all()] # Cache the retrieved author IDs - await redis_operation("SET", rkey, orjson.dumps(authors_ids)) + await redis.execute("SET", rkey, orjson.dumps(authors_ids)) # Retrieve full author details from cached IDs if authors_ids: @@ -353,11 +384,11 @@ async def invalidate_shouts_cache(cache_keys: List[str]): cache_key = f"shouts:{key}" # УдаляСм основной кэш - await redis_operation("DEL", cache_key) + await redis.execute("DEL", cache_key) logger.debug(f"Invalidated cache key: {cache_key}") # ДобавляСм ΠΊΠ»ΡŽΡ‡ Π² список ΠΈΠ½Π²Π°Π»ΠΈΠ΄ΠΈΡ€ΠΎΠ²Π°Π½Π½Ρ‹Ρ… с TTL - await redis_operation("SETEX", f"{cache_key}:invalidated", value="1", ttl=CACHE_TTL) + await redis.execute("SETEX", f"{cache_key}:invalidated", CACHE_TTL, "1") # Если это кэш Ρ‚Π΅ΠΌΡ‹, ΠΈΠ½Π²Π°Π»ΠΈΠ΄ΠΈΡ€ΡƒΠ΅ΠΌ Ρ‚Π°ΠΊΠΆΠ΅ связанныС ΠΊΠ»ΡŽΡ‡ΠΈ if key.startswith("topic_"): @@ -369,7 +400,7 @@ async def invalidate_shouts_cache(cache_keys: List[str]): f"topic:stats:{topic_id}", ] for related_key in related_keys: - await redis_operation("DEL", related_key) + await redis.execute("DEL", related_key) logger.debug(f"Invalidated related key: {related_key}") except Exception as e: @@ -380,13 +411,13 @@ async def cache_topic_shouts(topic_id: int, shouts: List[dict]): """ΠšΡΡˆΠΈΡ€ΡƒΠ΅Ρ‚ список ΠΏΡƒΠ±Π»ΠΈΠΊΠ°Ρ†ΠΈΠΉ для Ρ‚Π΅ΠΌΡ‹""" key = f"topic_shouts_{topic_id}" payload = json.dumps(shouts, cls=CustomJSONEncoder) - await redis_operation("SETEX", key, value=payload, ttl=CACHE_TTL) + await redis.execute("SETEX", key, CACHE_TTL, payload) async def get_cached_topic_shouts(topic_id: int) -> List[dict]: """ΠŸΠΎΠ»ΡƒΡ‡Π°Π΅Ρ‚ ΠΊΡΡˆΠΈΡ€ΠΎΠ²Π°Π½Π½Ρ‹ΠΉ список ΠΏΡƒΠ±Π»ΠΈΠΊΠ°Ρ†ΠΈΠΉ для Ρ‚Π΅ΠΌΡ‹""" key = f"topic_shouts_{topic_id}" - cached = await redis_operation("GET", key) + cached = await redis.execute("GET", key) if cached: return orjson.loads(cached) return None @@ -432,27 +463,7 @@ async def invalidate_shout_related_cache(shout: Shout, author_id: int): await invalidate_shouts_cache(list(cache_keys)) -async def redis_operation(operation: str, key: str, value=None, ttl=None): - """ - Унифицированная функция для Ρ€Π°Π±ΠΎΡ‚Ρ‹ с Redis - - Args: - operation: 'GET', 'SET', 'DEL', 'SETEX' - key: ΠΊΠ»ΡŽΡ‡ - value: Π·Π½Π°Ρ‡Π΅Π½ΠΈΠ΅ (для SET/SETEX) - ttl: врСмя ΠΆΠΈΠ·Π½ΠΈ Π² сСкундах (для SETEX) - """ - try: - if operation == "GET": - return await redis.execute("GET", key) - elif operation == "SET": - await redis.execute("SET", key, value) - elif operation == "SETEX": - await redis.execute("SETEX", key, ttl or CACHE_TTL, value) - elif operation == "DEL": - await redis.execute("DEL", key) - except Exception as e: - logger.error(f"Redis {operation} error for key {key}: {e}") +# Function removed - direct Redis calls used throughout the module instead async def get_cached_entity(entity_type: str, entity_id: int, get_method, cache_method): @@ -466,7 +477,7 @@ async def get_cached_entity(entity_type: str, entity_id: int, get_method, cache_ cache_method: ΠΌΠ΅Ρ‚ΠΎΠ΄ ΠΊΡΡˆΠΈΡ€ΠΎΠ²Π°Π½ΠΈΡ """ key = f"{entity_type}:id:{entity_id}" - cached = await redis_operation("GET", key) + cached = await redis.execute("GET", key) if cached: return orjson.loads(cached) @@ -497,3 +508,120 @@ async def cache_by_id(entity, entity_id: int, cache_method): d = x.dict() await cache_method(d) return d + + +# Π£Π½ΠΈΠ²Π΅Ρ€ΡΠ°Π»ΡŒΠ½Π°Ρ функция для сохранСния Π΄Π°Π½Π½Ρ‹Ρ… Π² кСш +async def cache_data(key: str, data: Any, ttl: Optional[int] = None) -> None: + """ + БохраняСт Π΄Π°Π½Π½Ρ‹Π΅ Π² кСш ΠΏΠΎ ΡƒΠΊΠ°Π·Π°Π½Π½ΠΎΠΌΡƒ ΠΊΠ»ΡŽΡ‡Ρƒ. + + Args: + key: ΠšΠ»ΡŽΡ‡ кСша + data: Π”Π°Π½Π½Ρ‹Π΅ для сохранСния + ttl: ВрСмя ΠΆΠΈΠ·Π½ΠΈ кСша Π² сСкундах (None - бСссрочно) + """ + try: + payload = json.dumps(data, cls=CustomJSONEncoder) + if ttl: + await redis.execute("SETEX", key, ttl, payload) + else: + await redis.execute("SET", key, payload) + logger.debug(f"Π”Π°Π½Π½Ρ‹Π΅ сохранСны Π² кСш ΠΏΠΎ ΠΊΠ»ΡŽΡ‡Ρƒ {key}") + except Exception as e: + logger.error(f"Ошибка ΠΏΡ€ΠΈ сохранСнии Π΄Π°Π½Π½Ρ‹Ρ… Π² кСш: {e}") + + +# Π£Π½ΠΈΠ²Π΅Ρ€ΡΠ°Π»ΡŒΠ½Π°Ρ функция для получСния Π΄Π°Π½Π½Ρ‹Ρ… ΠΈΠ· кСша +async def get_cached_data(key: str) -> Optional[Any]: + """ + ΠŸΠΎΠ»ΡƒΡ‡Π°Π΅Ρ‚ Π΄Π°Π½Π½Ρ‹Π΅ ΠΈΠ· кСша ΠΏΠΎ ΡƒΠΊΠ°Π·Π°Π½Π½ΠΎΠΌΡƒ ΠΊΠ»ΡŽΡ‡Ρƒ. + + Args: + key: ΠšΠ»ΡŽΡ‡ кСша + + Returns: + Any: Π”Π°Π½Π½Ρ‹Π΅ ΠΈΠ· кСша ΠΈΠ»ΠΈ None, Ссли Π΄Π°Π½Π½Ρ‹Ρ… Π½Π΅Ρ‚ + """ + try: + cached_data = await redis.execute("GET", key) + if cached_data: + logger.debug(f"Π”Π°Π½Π½Ρ‹Π΅ ΠΏΠΎΠ»ΡƒΡ‡Π΅Π½Ρ‹ ΠΈΠ· кСша ΠΏΠΎ ΠΊΠ»ΡŽΡ‡Ρƒ {key}") + return orjson.loads(cached_data) + return None + except Exception as e: + logger.error(f"Ошибка ΠΏΡ€ΠΈ ΠΏΠΎΠ»ΡƒΡ‡Π΅Π½ΠΈΠΈ Π΄Π°Π½Π½Ρ‹Ρ… ΠΈΠ· кСша: {e}") + return None + + +# Π£Π½ΠΈΠ²Π΅Ρ€ΡΠ°Π»ΡŒΠ½Π°Ρ функция для ΠΈΠ½Π²Π°Π»ΠΈΠ΄Π°Ρ†ΠΈΠΈ кСша ΠΏΠΎ прСфиксу +async def invalidate_cache_by_prefix(prefix: str) -> None: + """ + Π˜Π½Π²Π°Π»ΠΈΠ΄ΠΈΡ€ΡƒΠ΅Ρ‚ всС ΠΊΠ»ΡŽΡ‡ΠΈ кСша с ΡƒΠΊΠ°Π·Π°Π½Π½Ρ‹ΠΌ прСфиксом. + + Args: + prefix: ΠŸΡ€Π΅Ρ„ΠΈΠΊΡ ΠΊΠ»ΡŽΡ‡Π΅ΠΉ кСша для ΠΈΠ½Π²Π°Π»ΠΈΠ΄Π°Ρ†ΠΈΠΈ + """ + try: + keys = await redis.execute("KEYS", f"{prefix}:*") + if keys: + await redis.execute("DEL", *keys) + logger.debug(f"Π£Π΄Π°Π»Π΅Π½ΠΎ {len(keys)} ΠΊΠ»ΡŽΡ‡Π΅ΠΉ кСша с прСфиксом {prefix}") + except Exception as e: + logger.error(f"Ошибка ΠΏΡ€ΠΈ ΠΈΠ½Π²Π°Π»ΠΈΠ΄Π°Ρ†ΠΈΠΈ кСша: {e}") + + +# Π£Π½ΠΈΠ²Π΅Ρ€ΡΠ°Π»ΡŒΠ½Π°Ρ функция для получСния ΠΈ ΠΊΠ΅ΡˆΠΈΡ€ΠΎΠ²Π°Π½ΠΈΡ Π΄Π°Π½Π½Ρ‹Ρ… +async def cached_query( + cache_key: str, + query_func: callable, + ttl: Optional[int] = None, + force_refresh: bool = False, + use_key_format: bool = True, + **query_params, +) -> Any: + """ + Gets data from cache or executes query and saves result to cache. + Supports existing key formats for compatibility. + + Args: + cache_key: Cache key or key template from CACHE_KEYS + query_func: Function to execute the query + ttl: Cache TTL in seconds (None - indefinite) + force_refresh: Force cache refresh + use_key_format: Whether to check if cache_key matches a key template in CACHE_KEYS + **query_params: Parameters to pass to the query function + + Returns: + Any: Data from cache or query result + """ + # Check if cache_key matches a pattern in CACHE_KEYS + actual_key = cache_key + if use_key_format and "{}" in cache_key: + # Look for a template match in CACHE_KEYS + for key_name, key_format in CACHE_KEYS.items(): + if cache_key == key_format: + # We have a match, now look for the id or value to format with + for param_name, param_value in query_params.items(): + if param_name in ["id", "slug", "user", "topic_id", "author_id"]: + actual_key = cache_key.format(param_value) + break + + # If not forcing refresh, try to get data from cache + if not force_refresh: + cached_result = await get_cached_data(actual_key) + if cached_result is not None: + return cached_result + + # If data not in cache or refresh required, execute query + try: + result = await query_func(**query_params) + if result is not None: + # Save result to cache + await cache_data(actual_key, result, ttl) + return result + except Exception as e: + logger.error(f"Error executing query for caching: {e}") + # In case of error, return data from cache if not forcing refresh + if not force_refresh: + return await get_cached_data(actual_key) + raise diff --git a/cache/memorycache.py b/cache/memorycache.py deleted file mode 100644 index 80035ea6..00000000 --- a/cache/memorycache.py +++ /dev/null @@ -1,181 +0,0 @@ -""" -ΠœΠΎΠ΄ΡƒΠ»ΡŒ для ΠΊΠ΅ΡˆΠΈΡ€ΠΎΠ²Π°Π½ΠΈΡ Π΄Π°Π½Π½Ρ‹Ρ… с использованиСм Redis. -ΠŸΡ€Π΅Π΄ΠΎΡΡ‚Π°Π²Π»ΡΠ΅Ρ‚ API, совмСстимый с dogpile.cache для ΠΏΠΎΠ΄Π΄Π΅Ρ€ΠΆΠΊΠΈ ΠΎΠ±Ρ€Π°Ρ‚Π½ΠΎΠΉ совмСстимости. -""" - -import functools -import hashlib -import inspect -import json -import logging -import pickle -from typing import Callable, Optional - -import orjson - -from services.redis import redis -from utils.encoders import CustomJSONEncoder - -logger = logging.getLogger(__name__) - -DEFAULT_TTL = 300 # врСмя ΠΆΠΈΠ·Π½ΠΈ кСша Π² сСкундах (5 ΠΌΠΈΠ½ΡƒΡ‚) - - -class RedisCache: - """ - Класс, ΠΏΡ€Π΅Π΄ΠΎΡΡ‚Π°Π²Π»ΡΡŽΡ‰ΠΈΠΉ API, совмСстимый с dogpile.cache, Π½ΠΎ ΠΈΡΠΏΠΎΠ»ΡŒΠ·ΡƒΡŽΡ‰ΠΈΠΉ Redis. - - ΠŸΡ€ΠΈΠΌΠ΅Ρ€Ρ‹: - >>> cache_region = RedisCache() - >>> @cache_region.cache_on_arguments("my_key") - ... def my_func(arg1, arg2): - ... return arg1 + arg2 - """ - - def __init__(self, ttl: int = DEFAULT_TTL): - """ - Π˜Π½ΠΈΡ†ΠΈΠ°Π»ΠΈΠ·Π°Ρ†ΠΈΡ ΠΎΠ±ΡŠΠ΅ΠΊΡ‚Π° кСша. - - Args: - ttl: ВрСмя ΠΆΠΈΠ·Π½ΠΈ кСша Π² сСкундах - """ - self.ttl = ttl - - def cache_on_arguments(self, cache_key: Optional[str] = None) -> Callable: - """ - Π”Π΅ΠΊΠΎΡ€Π°Ρ‚ΠΎΡ€ для ΠΊΠ΅ΡˆΠΈΡ€ΠΎΠ²Π°Π½ΠΈΡ Ρ€Π΅Π·ΡƒΠ»ΡŒΡ‚Π°Ρ‚ΠΎΠ² Ρ„ΡƒΠ½ΠΊΡ†ΠΈΠΉ с использованиСм Redis. - - Args: - cache_key: ΠžΠΏΡ†ΠΈΠΎΠ½Π°Π»ΡŒΠ½Ρ‹ΠΉ Π±Π°Π·ΠΎΠ²Ρ‹ΠΉ ΠΊΠ»ΡŽΡ‡ кСша. Если Π½Π΅ ΡƒΠΊΠ°Π·Π°Π½, гСнСрируСтся ΠΈΠ· сигнатуры Ρ„ΡƒΠ½ΠΊΡ†ΠΈΠΈ. - - Returns: - Π”Π΅ΠΊΠΎΡ€Π°Ρ‚ΠΎΡ€ для ΠΊΠ΅ΡˆΠΈΡ€ΠΎΠ²Π°Π½ΠΈΡ Ρ„ΡƒΠ½ΠΊΡ†ΠΈΠΈ - - ΠŸΡ€ΠΈΠΌΠ΅Ρ€Ρ‹: - >>> @cache_region.cache_on_arguments("users") - ... def get_users(): - ... return db.query(User).all() - """ - - def decorator(func: Callable) -> Callable: - @functools.wraps(func) - async def wrapper(*args, **kwargs): - # ГСнСрация ΠΊΠ»ΡŽΡ‡Π° кСша - key = self._generate_cache_key(func, cache_key, *args, **kwargs) - - # ΠŸΠΎΠΏΡ‹Ρ‚ΠΊΠ° ΠΏΠΎΠ»ΡƒΡ‡ΠΈΡ‚ΡŒ Π΄Π°Π½Π½Ρ‹Π΅ ΠΈΠ· кСша - cached_data = await redis.get(key) - if cached_data: - try: - return orjson.loads(cached_data) - except Exception: - # Если Π½Π΅ ΡƒΠ΄Π°Π»ΠΎΡΡŒ Π΄Π΅ΡΠ΅Ρ€ΠΈΠ°Π»ΠΈΠ·ΠΎΠ²Π°Ρ‚ΡŒ ΠΊΠ°ΠΊ JSON, ΠΏΠΎΠΏΡ€ΠΎΠ±ΡƒΠ΅ΠΌ ΠΊΠ°ΠΊ pickle - return pickle.loads(cached_data.encode()) - - # Π’Ρ‹Π·ΠΎΠ² ΠΎΡ€ΠΈΠ³ΠΈΠ½Π°Π»ΡŒΠ½ΠΎΠΉ Ρ„ΡƒΠ½ΠΊΡ†ΠΈΠΈ, Ссли Π΄Π°Π½Π½Ρ‹Ρ… Π² кСшС Π½Π΅Ρ‚ - result = func(*args, **kwargs) - - # Π‘ΠΎΡ…Ρ€Π°Π½Π΅Π½ΠΈΠ΅ Ρ€Π΅Π·ΡƒΠ»ΡŒΡ‚Π°Ρ‚Π° Π² кСш - try: - # ΠŸΡ‹Ρ‚Π°Π΅ΠΌΡΡ ΡΠ΅Ρ€ΠΈΠ°Π»ΠΈΠ·ΠΎΠ²Π°Ρ‚ΡŒ ΠΊΠ°ΠΊ JSON - serialized = json.dumps(result, cls=CustomJSONEncoder) - except (TypeError, ValueError): - # Если Π½Π΅ ΡƒΠ΄Π°Π»ΠΎΡΡŒ, ΠΈΡΠΏΠΎΠ»ΡŒΠ·ΡƒΠ΅ΠΌ pickle - serialized = pickle.dumps(result).decode() - - await redis.set(key, serialized, ex=self.ttl) - return result - - @functools.wraps(func) - def sync_wrapper(*args, **kwargs): - # Для Ρ„ΡƒΠ½ΠΊΡ†ΠΈΠΉ, ΠΊΠΎΡ‚ΠΎΡ€Ρ‹Π΅ Π½Π΅ ΡΠ²Π»ΡΡŽΡ‚ΡΡ ΠΊΠΎΡ€ΡƒΡ‚ΠΈΠ½Π°ΠΌΠΈ - # ГСнСрация ΠΊΠ»ΡŽΡ‡Π° кСша - key = self._generate_cache_key(func, cache_key, *args, **kwargs) - - # Бинхронная вСрсия Π½Π΅ ΠΈΡΠΏΠΎΠ»ΡŒΠ·ΡƒΠ΅Ρ‚ await, поэтому Ρ€Π΅Π·ΡƒΠ»ΡŒΡ‚Π°Ρ‚ всСгда вычисляСтся - result = func(*args, **kwargs) - - # Асинхронно записываСм Π² кэш (Π±ΡƒΠ΄Π΅Ρ‚ Π²Ρ‹ΠΏΠΎΠ»Π½Π΅Π½ΠΎ ΠΏΠΎΠ·ΠΆΠ΅) - try: - import asyncio - - # ΠŸΠΎΠΏΡ‹Ρ‚ΠΊΠ° ΡΠ΅Ρ€ΠΈΠ°Π»ΠΈΠ·ΠΎΠ²Π°Ρ‚ΡŒ Ρ€Π΅Π·ΡƒΠ»ΡŒΡ‚Π°Ρ‚ Π² JSON - try: - serialized = json.dumps(result, cls=CustomJSONEncoder) - except (TypeError, ValueError) as e: - logger.debug(f"JSON сСриализация Π½Π΅ ΡƒΠ΄Π°Π»Π°ΡΡŒ, ΠΈΡΠΏΠΎΠ»ΡŒΠ·ΡƒΠ΅ΠΌ pickle: {e}") - # Если Π½Π΅ ΡƒΠ΄Π°Π»ΠΎΡΡŒ ΡΠ΅Ρ€ΠΈΠ°Π»ΠΈΠ·ΠΎΠ²Π°Ρ‚ΡŒ ΠΊΠ°ΠΊ JSON, ΠΈΡΠΏΠΎΠ»ΡŒΠ·ΡƒΠ΅ΠΌ pickle - serialized = pickle.dumps(result).decode() - - asyncio.create_task(redis.set(key, serialized, ex=self.ttl)) - except Exception as e: - logger.error(f"Ошибка ΠΏΡ€ΠΈ ΠΊΠ΅ΡˆΠΈΡ€ΠΎΠ²Π°Π½ΠΈΠΈ Ρ€Π΅Π·ΡƒΠ»ΡŒΡ‚Π°Ρ‚Π°: {e}") - # Для ΠΎΡ‚Π»Π°Π΄ΠΊΠΈ добавляСм ΠΈΠ½Ρ„ΠΎΡ€ΠΌΠ°Ρ†ΠΈΡŽ ΠΎ Ρ‚ΠΈΠΏΠ΅ ΠΎΠ±ΡŠΠ΅ΠΊΡ‚Π° - logger.debug(f"Π’ΠΈΠΏ Ρ€Π΅Π·ΡƒΠ»ΡŒΡ‚Π°Ρ‚Π°: {type(result)}") - if hasattr(result, "__class__"): - logger.debug(f"Класс Ρ€Π΅Π·ΡƒΠ»ΡŒΡ‚Π°Ρ‚Π°: {result.__class__.__name__}") - - return result - - # Π’ΠΎΠ·Π²Ρ€Π°Ρ‰Π°Π΅ΠΌ асинхронный ΠΈΠ»ΠΈ синхронный Π²Ρ€Π°ΠΏΠΏΠ΅Ρ€ Π² зависимости ΠΎΡ‚ Ρ‚ΠΈΠΏΠ° Ρ„ΡƒΠ½ΠΊΡ†ΠΈΠΈ - if inspect.iscoroutinefunction(func): - return wrapper - else: - return sync_wrapper - - return decorator - - def _generate_cache_key(self, func: Callable, base_key: Optional[str], *args, **kwargs) -> str: - """ - Π“Π΅Π½Π΅Ρ€ΠΈΡ€ΡƒΠ΅Ρ‚ ΠΊΠ»ΡŽΡ‡ кСша Π½Π° основС Ρ„ΡƒΠ½ΠΊΡ†ΠΈΠΈ ΠΈ Π΅Ρ‘ Π°Ρ€Π³ΡƒΠΌΠ΅Π½Ρ‚ΠΎΠ². - - Args: - func: ΠšΠ΅ΡˆΠΈΡ€ΡƒΠ΅ΠΌΠ°Ρ функция - base_key: Π‘Π°Π·ΠΎΠ²Ρ‹ΠΉ ΠΊΠ»ΡŽΡ‡ кСша - *args: ΠŸΠΎΠ·ΠΈΡ†ΠΈΠΎΠ½Π½Ρ‹Π΅ Π°Ρ€Π³ΡƒΠΌΠ΅Π½Ρ‚Ρ‹ Ρ„ΡƒΠ½ΠΊΡ†ΠΈΠΈ - **kwargs: Π˜ΠΌΠ΅Π½ΠΎΠ²Π°Π½Π½Ρ‹Π΅ Π°Ρ€Π³ΡƒΠΌΠ΅Π½Ρ‚Ρ‹ Ρ„ΡƒΠ½ΠΊΡ†ΠΈΠΈ - - Returns: - Π‘Ρ‚Ρ€ΠΎΠΊΠΎΠ²Ρ‹ΠΉ ΠΊΠ»ΡŽΡ‡ для кСша - """ - if base_key: - key_prefix = f"cache:{base_key}" - else: - key_prefix = f"cache:{func.__module__}.{func.__name__}" - - # Π‘ΠΎΠ·Π΄Π°Π΅ΠΌ Ρ…Π΅Ρˆ Π°Ρ€Π³ΡƒΠΌΠ΅Π½Ρ‚ΠΎΠ² - arg_hash = hashlib.md5() - - # ДобавляСм ΠΏΠΎΠ·ΠΈΡ†ΠΈΠΎΠ½Π½Ρ‹Π΅ Π°Ρ€Π³ΡƒΠΌΠ΅Π½Ρ‚Ρ‹ - for arg in args: - try: - arg_hash.update(str(arg).encode()) - except Exception: - arg_hash.update(str(id(arg)).encode()) - - # ДобавляСм ΠΈΠΌΠ΅Π½ΠΎΠ²Π°Π½Π½Ρ‹Π΅ Π°Ρ€Π³ΡƒΠΌΠ΅Π½Ρ‚Ρ‹ (сортируСм для дСтСрминированности) - for k in sorted(kwargs.keys()): - try: - arg_hash.update(f"{k}:{kwargs[k]}".encode()) - except Exception: - arg_hash.update(f"{k}:{id(kwargs[k])}".encode()) - - return f"{key_prefix}:{arg_hash.hexdigest()}" - - def invalidate(self, func: Callable, *args, **kwargs) -> None: - """ - Π˜Π½Π²Π°Π»ΠΈΠ΄ΠΈΡ€ΡƒΠ΅Ρ‚ (удаляСт) кСш для ΠΊΠΎΠ½ΠΊΡ€Π΅Ρ‚Π½ΠΎΠΉ Ρ„ΡƒΠ½ΠΊΡ†ΠΈΠΈ с ΠΊΠΎΠ½ΠΊΡ€Π΅Ρ‚Π½Ρ‹ΠΌΠΈ Π°Ρ€Π³ΡƒΠΌΠ΅Π½Ρ‚Π°ΠΌΠΈ. - - Args: - func: ΠšΠ΅ΡˆΠΈΡ€ΠΎΠ²Π°Π½Π½Π°Ρ функция - *args: ΠŸΠΎΠ·ΠΈΡ†ΠΈΠΎΠ½Π½Ρ‹Π΅ Π°Ρ€Π³ΡƒΠΌΠ΅Π½Ρ‚Ρ‹ Ρ„ΡƒΠ½ΠΊΡ†ΠΈΠΈ - **kwargs: Π˜ΠΌΠ΅Π½ΠΎΠ²Π°Π½Π½Ρ‹Π΅ Π°Ρ€Π³ΡƒΠΌΠ΅Π½Ρ‚Ρ‹ Ρ„ΡƒΠ½ΠΊΡ†ΠΈΠΈ - """ - key = self._generate_cache_key(func, None, *args, **kwargs) - import asyncio - - asyncio.create_task(redis.execute("DEL", key)) - - -# ЭкзСмпляр класса RedisCache для использования Π² ΠΊΠΎΠ΄Π΅ -cache_region = RedisCache() diff --git a/cache/precache.py b/cache/precache.py index 5df91f2d..23844024 100644 --- a/cache/precache.py +++ b/cache/precache.py @@ -1,7 +1,6 @@ import asyncio import json -import orjson from sqlalchemy import and_, join, select from cache.cache import cache_author, cache_topic @@ -87,11 +86,15 @@ async def precache_data(): # ΠŸΡ€Π΅ΠΎΠ±Ρ€Π°Π·ΡƒΠ΅ΠΌ ΡΠ»ΠΎΠ²Π°Ρ€ΡŒ Π² список Π°Ρ€Π³ΡƒΠΌΠ΅Π½Ρ‚ΠΎΠ² для HSET if value: - flattened = [] - for field, val in value.items(): - flattened.extend([field, val]) - - await redis.execute("HSET", key, *flattened) + # Если Π·Π½Π°Ρ‡Π΅Π½ΠΈΠ΅ - ΡΠ»ΠΎΠ²Π°Ρ€ΡŒ, ΠΏΡ€Π΅ΠΎΠ±Ρ€Π°Π·ΡƒΠ΅ΠΌ Π΅Π³ΠΎ Π² плоский список для HSET + if isinstance(value, dict): + flattened = [] + for field, val in value.items(): + flattened.extend([field, val]) + await redis.execute("HSET", key, *flattened) + else: + # ΠŸΡ€Π΅Π΄ΠΏΠΎΠ»Π°Π³Π°Π΅ΠΌ, Ρ‡Ρ‚ΠΎ Π·Π½Π°Ρ‡Π΅Π½ΠΈΠ΅ ΡƒΠΆΠ΅ содСрТит список + await redis.execute("HSET", key, *value) logger.info(f"redis hash '{key}' was restored") with local_session() as session: diff --git a/cache/revalidator.py b/cache/revalidator.py index 125b9f5f..7be5041c 100644 --- a/cache/revalidator.py +++ b/cache/revalidator.py @@ -1,17 +1,26 @@ import asyncio -from cache.cache import cache_author, cache_topic, get_cached_author, get_cached_topic +from cache.cache import ( + cache_author, + cache_topic, + get_cached_author, + get_cached_topic, + invalidate_cache_by_prefix, +) from resolvers.stat import get_with_stat from utils.logger import root_logger as logger +CACHE_REVALIDATION_INTERVAL = 300 # 5 minutes + class CacheRevalidationManager: - def __init__(self, interval=60): + def __init__(self, interval=CACHE_REVALIDATION_INTERVAL): """Π˜Π½ΠΈΡ†ΠΈΠ°Π»ΠΈΠ·Π°Ρ†ΠΈΡ ΠΌΠ΅Π½Π΅Π΄ΠΆΠ΅Ρ€Π° с Π·Π°Π΄Π°Π½Π½Ρ‹ΠΌ ΠΈΠ½Ρ‚Π΅Ρ€Π²Π°Π»ΠΎΠΌ ΠΏΡ€ΠΎΠ²Π΅Ρ€ΠΊΠΈ (Π² сСкундах).""" self.interval = interval self.items_to_revalidate = {"authors": set(), "topics": set(), "shouts": set(), "reactions": set()} self.lock = asyncio.Lock() self.running = True + self.MAX_BATCH_SIZE = 10 # МаксимальноС количСство элСмСнтов для ΠΏΠΎΡˆΡ‚ΡƒΡ‡Π½ΠΎΠΉ ΠΎΠ±Ρ€Π°Π±ΠΎΡ‚ΠΊΠΈ async def start(self): """Запуск Ρ„ΠΎΠ½ΠΎΠ²ΠΎΠ³ΠΎ Π²ΠΎΡ€ΠΊΠ΅Ρ€Π° для Ρ€Π΅Π²Π°Π»ΠΈΠ΄Π°Ρ†ΠΈΠΈ кэша.""" @@ -32,22 +41,107 @@ class CacheRevalidationManager: """ОбновлСниС кэша для всСх сущностСй, Ρ‚Ρ€Π΅Π±ΡƒΡŽΡ‰ΠΈΡ… Ρ€Π΅Π²Π°Π»ΠΈΠ΄Π°Ρ†ΠΈΠΈ.""" async with self.lock: # РСвалидация кэша Π°Π²Ρ‚ΠΎΡ€ΠΎΠ² - for author_id in self.items_to_revalidate["authors"]: - author = await get_cached_author(author_id, get_with_stat) - if author: - await cache_author(author) - self.items_to_revalidate["authors"].clear() + if self.items_to_revalidate["authors"]: + logger.debug(f"Revalidating {len(self.items_to_revalidate['authors'])} authors") + for author_id in self.items_to_revalidate["authors"]: + if author_id == "all": + await invalidate_cache_by_prefix("authors") + break + author = await get_cached_author(author_id, get_with_stat) + if author: + await cache_author(author) + self.items_to_revalidate["authors"].clear() # РСвалидация кэша Ρ‚Π΅ΠΌ - for topic_id in self.items_to_revalidate["topics"]: - topic = await get_cached_topic(topic_id) - if topic: - await cache_topic(topic) - self.items_to_revalidate["topics"].clear() + if self.items_to_revalidate["topics"]: + logger.debug(f"Revalidating {len(self.items_to_revalidate['topics'])} topics") + for topic_id in self.items_to_revalidate["topics"]: + if topic_id == "all": + await invalidate_cache_by_prefix("topics") + break + topic = await get_cached_topic(topic_id) + if topic: + await cache_topic(topic) + self.items_to_revalidate["topics"].clear() + + # РСвалидация ΡˆΠ°ΡƒΡ‚ΠΎΠ² (ΠΏΡƒΠ±Π»ΠΈΠΊΠ°Ρ†ΠΈΠΉ) + if self.items_to_revalidate["shouts"]: + shouts_count = len(self.items_to_revalidate["shouts"]) + logger.debug(f"Revalidating {shouts_count} shouts") + + # ΠŸΡ€ΠΎΠ²Π΅Ρ€ΡΠ΅ΠΌ Π½Π°Π»ΠΈΡ‡ΠΈΠ΅ ΡΠΏΠ΅Ρ†ΠΈΠ°Π»ΡŒΠ½ΠΎΠ³ΠΎ Ρ„Π»Π°Π³Π° 'all' + if "all" in self.items_to_revalidate["shouts"]: + await invalidate_cache_by_prefix("shouts") + # Если элСмСнтов ΠΌΠ½ΠΎΠ³ΠΎ, Π½ΠΎ Π½Π΅ 'all', ΠΈΡΠΏΠΎΠ»ΡŒΠ·ΡƒΠ΅ΠΌ спСцифичСский ΠΏΠΎΠ΄Ρ…ΠΎΠ΄ + elif shouts_count > self.MAX_BATCH_SIZE: + # Π˜Π½Π²Π°Π»ΠΈΠ΄ΠΈΡ€ΡƒΠ΅ΠΌ Ρ‚ΠΎΠ»ΡŒΠΊΠΎ collections keys, ΠΊΠΎΡ‚ΠΎΡ€Ρ‹Π΅ Π·Π°Ρ‚Ρ€Π°Π³ΠΈΠ²Π°ΡŽΡ‚ ΠΌΠ½ΠΎΠ³ΠΎ сущностСй + collection_keys = await asyncio.create_task(self._redis.execute("KEYS", "shouts:*")) + if collection_keys: + await self._redis.execute("DEL", *collection_keys) + logger.debug(f"Π£Π΄Π°Π»Π΅Π½ΠΎ {len(collection_keys)} ΠΊΠΎΠ»Π»Π΅ΠΊΡ†ΠΈΠΎΠ½Π½Ρ‹Ρ… ΠΊΠ»ΡŽΡ‡Π΅ΠΉ ΡˆΠ°ΡƒΡ‚ΠΎΠ²") + + # ОбновляСм кСш ΠΊΠ°ΠΆΠ΄ΠΎΠ³ΠΎ ΠΊΠΎΠ½ΠΊΡ€Π΅Ρ‚Π½ΠΎΠ³ΠΎ ΡˆΠ°ΡƒΡ‚Π° + for shout_id in self.items_to_revalidate["shouts"]: + if shout_id != "all": + # ВочСчная инвалидация для ΠΊΠ°ΠΆΠ΄ΠΎΠ³ΠΎ shout_id + specific_keys = [f"shout:id:{shout_id}"] + for key in specific_keys: + await self._redis.execute("DEL", key) + logger.debug(f"Π£Π΄Π°Π»Π΅Π½ ΠΊΠ»ΡŽΡ‡ кСша {key}") + else: + # Если элСмСнтов Π½Π΅ΠΌΠ½ΠΎΠ³ΠΎ, ΠΎΠ±Ρ€Π°Π±Π°Ρ‚Ρ‹Π²Π°Π΅ΠΌ ΠΊΠ°ΠΆΠ΄Ρ‹ΠΉ + for shout_id in self.items_to_revalidate["shouts"]: + if shout_id != "all": + # ВочСчная инвалидация для ΠΊΠ°ΠΆΠ΄ΠΎΠ³ΠΎ shout_id + specific_keys = [f"shout:id:{shout_id}"] + for key in specific_keys: + await self._redis.execute("DEL", key) + logger.debug(f"Π£Π΄Π°Π»Π΅Π½ ΠΊΠ»ΡŽΡ‡ кСша {key}") + + self.items_to_revalidate["shouts"].clear() + + # Аналогично для Ρ€Π΅Π°ΠΊΡ†ΠΈΠΉ - точСчная инвалидация + if self.items_to_revalidate["reactions"]: + reactions_count = len(self.items_to_revalidate["reactions"]) + logger.debug(f"Revalidating {reactions_count} reactions") + + if "all" in self.items_to_revalidate["reactions"]: + await invalidate_cache_by_prefix("reactions") + elif reactions_count > self.MAX_BATCH_SIZE: + # Π˜Π½Π²Π°Π»ΠΈΠ΄ΠΈΡ€ΡƒΠ΅ΠΌ Ρ‚ΠΎΠ»ΡŒΠΊΠΎ collections keys для Ρ€Π΅Π°ΠΊΡ†ΠΈΠΉ + collection_keys = await asyncio.create_task(self._redis.execute("KEYS", "reactions:*")) + if collection_keys: + await self._redis.execute("DEL", *collection_keys) + logger.debug(f"Π£Π΄Π°Π»Π΅Π½ΠΎ {len(collection_keys)} ΠΊΠΎΠ»Π»Π΅ΠΊΡ†ΠΈΠΎΠ½Π½Ρ‹Ρ… ΠΊΠ»ΡŽΡ‡Π΅ΠΉ Ρ€Π΅Π°ΠΊΡ†ΠΈΠΉ") + + # ВочСчная инвалидация для ΠΊΠ°ΠΆΠ΄ΠΎΠΉ Ρ€Π΅Π°ΠΊΡ†ΠΈΠΈ + for reaction_id in self.items_to_revalidate["reactions"]: + if reaction_id != "all": + specific_keys = [f"reaction:id:{reaction_id}"] + for key in specific_keys: + await self._redis.execute("DEL", key) + logger.debug(f"Π£Π΄Π°Π»Π΅Π½ ΠΊΠ»ΡŽΡ‡ кСша {key}") + else: + # ВочСчная инвалидация для ΠΊΠ°ΠΆΠ΄ΠΎΠΉ Ρ€Π΅Π°ΠΊΡ†ΠΈΠΈ + for reaction_id in self.items_to_revalidate["reactions"]: + if reaction_id != "all": + specific_keys = [f"reaction:id:{reaction_id}"] + for key in specific_keys: + await self._redis.execute("DEL", key) + logger.debug(f"Π£Π΄Π°Π»Π΅Π½ ΠΊΠ»ΡŽΡ‡ кСша {key}") + + self.items_to_revalidate["reactions"].clear() def mark_for_revalidation(self, entity_id, entity_type): """ΠžΡ‚ΠΌΠ΅Ρ‚ΠΈΡ‚ΡŒ ΡΡƒΡ‰Π½ΠΎΡΡ‚ΡŒ для Ρ€Π΅Π²Π°Π»ΠΈΠ΄Π°Ρ†ΠΈΠΈ.""" - self.items_to_revalidate[entity_type].add(entity_id) + if entity_id and entity_type: + self.items_to_revalidate[entity_type].add(entity_id) + + def invalidate_all(self, entity_type): + """ΠŸΠΎΠΌΠ΅Ρ‚ΠΈΡ‚ΡŒ для ΠΈΠ½Π²Π°Π»ΠΈΠ΄Π°Ρ†ΠΈΠΈ всС элСмСнты ΡƒΠΊΠ°Π·Π°Π½Π½ΠΎΠ³ΠΎ Ρ‚ΠΈΠΏΠ°.""" + logger.debug(f"Marking all {entity_type} for invalidation") + # ΠžΡΠΎΠ±Ρ‹ΠΉ Ρ„Π»Π°Π³ для ΠΏΠΎΠ»Π½ΠΎΠΉ ΠΈΠ½Π²Π°Π»ΠΈΠ΄Π°Ρ†ΠΈΠΈ + self.items_to_revalidate[entity_type].add("all") async def stop(self): """ΠžΡΡ‚Π°Π½ΠΎΠ²ΠΊΠ° Ρ„ΠΎΠ½ΠΎΠ²ΠΎΠ³ΠΎ Π²ΠΎΡ€ΠΊΠ΅Ρ€Π°.""" @@ -60,4 +154,4 @@ class CacheRevalidationManager: pass -revalidation_manager = CacheRevalidationManager(interval=300) # РСвалидация ΠΊΠ°ΠΆΠ΄Ρ‹Π΅ 5 ΠΌΠΈΠ½ΡƒΡ‚ +revalidation_manager = CacheRevalidationManager() diff --git a/docs/caching.md b/docs/caching.md new file mode 100644 index 00000000..7c025be2 --- /dev/null +++ b/docs/caching.md @@ -0,0 +1,279 @@ +# БистСма ΠΊΠ΅ΡˆΠΈΡ€ΠΎΠ²Π°Π½ΠΈΡ Discours + +## ΠžΠ±Ρ‰Π΅Π΅ описаниС + +БистСма ΠΊΠ΅ΡˆΠΈΡ€ΠΎΠ²Π°Π½ΠΈΡ Discours - это комплСксноС Ρ€Π΅ΡˆΠ΅Π½ΠΈΠ΅ для ΠΏΠΎΠ²Ρ‹ΡˆΠ΅Π½ΠΈΡ ΠΏΡ€ΠΎΠΈΠ·Π²ΠΎΠ΄ΠΈΡ‚Π΅Π»ΡŒΠ½ΠΎΡΡ‚ΠΈ ΠΏΠ»Π°Ρ‚Ρ„ΠΎΡ€ΠΌΡ‹. Она ΠΈΡΠΏΠΎΠ»ΡŒΠ·ΡƒΠ΅Ρ‚ Redis для хранСния часто Π·Π°ΠΏΡ€Π°ΡˆΠΈΠ²Π°Π΅ΠΌΡ‹Ρ… Π΄Π°Π½Π½Ρ‹Ρ… ΠΈ ΡƒΠΌΠ΅Π½ΡŒΡˆΠ΅Π½ΠΈΡ Π½Π°Π³Ρ€ΡƒΠ·ΠΊΠΈ Π½Π° ΠΎΡΠ½ΠΎΠ²Π½ΡƒΡŽ Π±Π°Π·Ρƒ Π΄Π°Π½Π½Ρ‹Ρ…. + +ΠšΠ΅ΡˆΠΈΡ€ΠΎΠ²Π°Π½ΠΈΠ΅ Ρ€Π΅Π°Π»ΠΈΠ·ΠΎΠ²Π°Π½ΠΎ ΠΊΠ°ΠΊ многоуровнСвая систСма, состоящая ΠΈΠ· Π½Π΅ΡΠΊΠΎΠ»ΡŒΠΊΠΈΡ… ΠΌΠΎΠ΄ΡƒΠ»Π΅ΠΉ: + +- `cache.py` - основной ΠΌΠΎΠ΄ΡƒΠ»ΡŒ с функциями ΠΊΠ΅ΡˆΠΈΡ€ΠΎΠ²Π°Π½ΠΈΡ +- `revalidator.py` - асинхронный ΠΌΠ΅Π½Π΅Π΄ΠΆΠ΅Ρ€ Ρ€Π΅Π²Π°Π»ΠΈΠ΄Π°Ρ†ΠΈΠΈ кСша +- `triggers.py` - Ρ‚Ρ€ΠΈΠ³Π³Π΅Ρ€Ρ‹ событий SQLAlchemy для автоматичСской Ρ€Π΅Π²Π°Π»ΠΈΠ΄Π°Ρ†ΠΈΠΈ +- `precache.py` - ΠΏΡ€Π΅Π΄Π²Π°Ρ€ΠΈΡ‚Π΅Π»ΡŒΠ½ΠΎΠ΅ ΠΊΠ΅ΡˆΠΈΡ€ΠΎΠ²Π°Π½ΠΈΠ΅ Π΄Π°Π½Π½Ρ‹Ρ… ΠΏΡ€ΠΈ стартС прилоТСния + +## ΠšΠ»ΡŽΡ‡Π΅Π²Ρ‹Π΅ ΠΊΠΎΠΌΠΏΠΎΠ½Π΅Π½Ρ‚Ρ‹ + +### 1. Π€ΠΎΡ€ΠΌΠ°Ρ‚Ρ‹ ΠΊΠ»ΡŽΡ‡Π΅ΠΉ кСша + +БистСма ΠΏΠΎΠ΄Π΄Π΅Ρ€ΠΆΠΈΠ²Π°Π΅Ρ‚ нСсколько Ρ„ΠΎΡ€ΠΌΠ°Ρ‚ΠΎΠ² ΠΊΠ»ΡŽΡ‡Π΅ΠΉ для обСспСчСния совмСстимости ΠΈ удобства использования: + +- **ΠšΠ»ΡŽΡ‡ΠΈ сущностСй**: `entity:property:value` (Π½Π°ΠΏΡ€ΠΈΠΌΠ΅Ρ€, `author:id:123`) +- **ΠšΠ»ΡŽΡ‡ΠΈ ΠΊΠΎΠ»Π»Π΅ΠΊΡ†ΠΈΠΉ**: `entity:collection:params` (Π½Π°ΠΏΡ€ΠΈΠΌΠ΅Ρ€, `authors:stats:limit=10:offset=0`) +- **Π‘ΠΏΠ΅Ρ†ΠΈΠ°Π»ΡŒΠ½Ρ‹Π΅ ΠΊΠ»ΡŽΡ‡ΠΈ**: для ΠΎΠ±Ρ€Π°Ρ‚Π½ΠΎΠΉ совмСстимости (Π½Π°ΠΏΡ€ΠΈΠΌΠ΅Ρ€, `topic_shouts_123`) + +ВсС стандартныС Ρ„ΠΎΡ€ΠΌΠ°Ρ‚Ρ‹ ΠΊΠ»ΡŽΡ‡Π΅ΠΉ хранятся Π² словарС `CACHE_KEYS`: + +```python +CACHE_KEYS = { + "TOPIC_ID": "topic:id:{}", + "TOPIC_SLUG": "topic:slug:{}", + "AUTHOR_ID": "author:id:{}", + # ΠΈ Π΄Ρ€ΡƒΠ³ΠΈΠ΅... +} +``` + +### 2. ΠžΡΠ½ΠΎΠ²Π½Ρ‹Π΅ Ρ„ΡƒΠ½ΠΊΡ†ΠΈΠΈ ΠΊΠ΅ΡˆΠΈΡ€ΠΎΠ²Π°Π½ΠΈΡ + +#### Π‘Ρ‚Ρ€ΡƒΠΊΡ‚ΡƒΡ€Π° ΠΊΠ»ΡŽΡ‡Π΅ΠΉ + +ВмСсто Π³Π΅Π½Π΅Ρ€Π°Ρ†ΠΈΠΈ ΠΊΠ»ΡŽΡ‡Π΅ΠΉ Ρ‡Π΅Ρ€Π΅Π· Π²ΡΠΏΠΎΠΌΠΎΠ³Π°Ρ‚Π΅Π»ΡŒΠ½Ρ‹Π΅ Ρ„ΡƒΠ½ΠΊΡ†ΠΈΠΈ, систСма слСдуСт строгим конвСнциям формирования ΠΊΠ»ΡŽΡ‡Π΅ΠΉ: + +1. **ΠšΠ»ΡŽΡ‡ΠΈ для ΠΎΡ‚Π΄Π΅Π»ΡŒΠ½Ρ‹Ρ… сущностСй** строятся ΠΏΠΎ ΡˆΠ°Π±Π»ΠΎΠ½Ρƒ: + ``` + entity:property:value + ``` + НапримСр: + - `topic:id:123` - Ρ‚Π΅ΠΌΠ° с ID 123 + - `author:slug:john-doe` - Π°Π²Ρ‚ΠΎΡ€ со слагом "john-doe" + - `shout:id:456` - публикация с ID 456 + +2. **ΠšΠ»ΡŽΡ‡ΠΈ для ΠΊΠΎΠ»Π»Π΅ΠΊΡ†ΠΈΠΉ** строятся ΠΏΠΎ ΡˆΠ°Π±Π»ΠΎΠ½Ρƒ: + ``` + entity:collection[:filter1=value1:filter2=value2:...] + ``` + НапримСр: + - `topics:all:basic` - Π±Π°Π·ΠΎΠ²Ρ‹ΠΉ список всСх Ρ‚Π΅ΠΌ + - `authors:stats:limit=10:offset=0:sort=name` - отсортированный список Π°Π²Ρ‚ΠΎΡ€ΠΎΠ² с ΠΏΠ°Π³ΠΈΠ½Π°Ρ†ΠΈΠ΅ΠΉ + - `shouts:feed:limit=20:community=1` - Π»Π΅Π½Ρ‚Π° ΠΏΡƒΠ±Π»ΠΈΠΊΠ°Ρ†ΠΈΠΉ с Ρ„ΠΈΠ»ΡŒΡ‚Ρ€ΠΎΠΌ ΠΏΠΎ сообщСству + +3. **Π‘ΠΏΠ΅Ρ†ΠΈΠ°Π»ΡŒΠ½Ρ‹Π΅ Ρ„ΠΎΡ€ΠΌΠ°Ρ‚Ρ‹ ΠΊΠ»ΡŽΡ‡Π΅ΠΉ** для ΠΎΠ±Ρ€Π°Ρ‚Π½ΠΎΠΉ совмСстимости: + ``` + entity_action_id + ``` + НапримСр: + - `topic_shouts_123` - ΠΏΡƒΠ±Π»ΠΈΠΊΠ°Ρ†ΠΈΠΈ для Ρ‚Π΅ΠΌΡ‹ с ID 123 + +Π’ΠΎ всСх модулях систСмы Ρ€Π°Π·Ρ€Π°Π±ΠΎΡ‚Ρ‡ΠΈΠΊΠΈ Π΄ΠΎΠ»ΠΆΠ½Ρ‹ явно Ρ„ΠΎΡ€ΠΌΠΈΡ€ΠΎΠ²Π°Ρ‚ΡŒ ΠΊΠ»ΡŽΡ‡ΠΈ Π² соотвСтствии с этими конвСнциями, Ρ‡Ρ‚ΠΎ обСспСчиваСт Π΅Π΄ΠΈΠ½ΠΎΠΎΠ±Ρ€Π°Π·ΠΈΠ΅ ΠΈ ΠΏΡ€Π΅Π΄ΡΠΊΠ°Π·ΡƒΠ΅ΠΌΠΎΡΡ‚ΡŒ ΠΊΠ΅ΡˆΠΈΡ€ΠΎΠ²Π°Π½ΠΈΡ. + +#### Π Π°Π±ΠΎΡ‚Π° с Π΄Π°Π½Π½Ρ‹ΠΌΠΈ Π² кСшС + +```python +async def cache_data(key, data, ttl=None) +async def get_cached_data(key) +``` + +Π­Ρ‚ΠΈ Ρ„ΡƒΠ½ΠΊΡ†ΠΈΠΈ ΠΏΡ€Π΅Π΄ΠΎΡΡ‚Π°Π²Π»ΡΡŽΡ‚ ΡƒΠ½ΠΈΠ²Π΅Ρ€ΡΠ°Π»ΡŒΠ½Ρ‹ΠΉ интСрфСйс для сохранСния ΠΈ получСния Π΄Π°Π½Π½Ρ‹Ρ… ΠΈΠ· кСша. Они Π½Π°ΠΏΡ€ΡΠΌΡƒΡŽ ΠΈΡΠΏΠΎΠ»ΡŒΠ·ΡƒΡŽΡ‚ Redis Ρ‡Π΅Ρ€Π΅Π· Π²Ρ‹Π·ΠΎΠ²Ρ‹ `redis.execute()`. + +#### ВысокоуровнСвоС ΠΊΠ΅ΡˆΠΈΡ€ΠΎΠ²Π°Π½ΠΈΠ΅ запросов + +```python +async def cached_query(cache_key, query_func, ttl=None, force_refresh=False, **query_params) +``` + +Ѐункция `cached_query` ΠΎΠ±ΡŠΠ΅Π΄ΠΈΠ½ΡΠ΅Ρ‚ ΠΏΠΎΠ»ΡƒΡ‡Π΅Π½ΠΈΠ΅ Π΄Π°Π½Π½Ρ‹Ρ… ΠΈΠ· кСша ΠΈ Π²Ρ‹ΠΏΠΎΠ»Π½Π΅Π½ΠΈΠ΅ запроса Π² случаС отсутствия Π΄Π°Π½Π½Ρ‹Ρ… Π² кСшС. Π­Ρ‚ΠΎ основная функция, ΠΊΠΎΡ‚ΠΎΡ€ΡƒΡŽ слСдуСт ΠΈΡΠΏΠΎΠ»ΡŒΠ·ΠΎΠ²Π°Ρ‚ΡŒ Π² Ρ€Π΅Π·ΠΎΠ»Π²Π΅Ρ€Π°Ρ… для ΠΊΠ΅ΡˆΠΈΡ€ΠΎΠ²Π°Π½ΠΈΡ Ρ€Π΅Π·ΡƒΠ»ΡŒΡ‚Π°Ρ‚ΠΎΠ² запросов. + +### 3. ΠšΠ΅ΡˆΠΈΡ€ΠΎΠ²Π°Π½ΠΈΠ΅ сущностСй + +Для основных Ρ‚ΠΈΠΏΠΎΠ² сущностСй Ρ€Π΅Π°Π»ΠΈΠ·ΠΎΠ²Π°Π½Ρ‹ ΡΠΏΠ΅Ρ†ΠΈΠ°Π»ΡŒΠ½Ρ‹Π΅ Ρ„ΡƒΠ½ΠΊΡ†ΠΈΠΈ: + +```python +async def cache_topic(topic: dict) +async def cache_author(author: dict) +async def get_cached_topic(topic_id: int) +async def get_cached_author(author_id: int, get_with_stat) +``` + +Π­Ρ‚ΠΈ Ρ„ΡƒΠ½ΠΊΡ†ΠΈΠΈ ΡƒΠΏΡ€ΠΎΡ‰Π°ΡŽΡ‚ Ρ€Π°Π±ΠΎΡ‚Ρƒ с часто ΠΈΡΠΏΠΎΠ»ΡŒΠ·ΡƒΠ΅ΠΌΡ‹ΠΌΠΈ Ρ‚ΠΈΠΏΠ°ΠΌΠΈ Π΄Π°Π½Π½Ρ‹Ρ… ΠΈ ΠΎΠ±Π΅ΡΠΏΠ΅Ρ‡ΠΈΠ²Π°ΡŽΡ‚ Π΅Π΄ΠΈΠ½ΠΎΠΎΠ±Ρ€Π°Π·Π½Ρ‹ΠΉ ΠΏΠΎΠ΄Ρ…ΠΎΠ΄ ΠΊ ΠΈΡ… ΠΊΠ΅ΡˆΠΈΡ€ΠΎΠ²Π°Π½ΠΈΡŽ. + +### 4. Π Π°Π±ΠΎΡ‚Π° со связями + +Для Ρ€Π°Π±ΠΎΡ‚Ρ‹ со связями ΠΌΠ΅ΠΆΠ΄Ρƒ сущностями ΠΏΡ€Π΅Π΄Π½Π°Π·Π½Π°Ρ‡Π΅Π½Ρ‹ Ρ„ΡƒΠ½ΠΊΡ†ΠΈΠΈ: + +```python +async def cache_follows(follower_id, entity_type, entity_id, is_insert=True) +async def get_cached_topic_followers(topic_id) +async def get_cached_author_followers(author_id) +async def get_cached_follower_topics(author_id) +``` + +Они ΠΏΠΎΠ·Π²ΠΎΠ»ΡΡŽΡ‚ эффСктивно ΠΊΠ΅ΡˆΠΈΡ€ΠΎΠ²Π°Ρ‚ΡŒ ΠΈ ΠΏΠΎΠ»ΡƒΡ‡Π°Ρ‚ΡŒ ΠΈΠ½Ρ„ΠΎΡ€ΠΌΠ°Ρ†ΠΈΡŽ ΠΎ подписках, связях ΠΌΠ΅ΠΆΠ΄Ρƒ Π°Π²Ρ‚ΠΎΡ€Π°ΠΌΠΈ, Ρ‚Π΅ΠΌΠ°ΠΌΠΈ ΠΈ публикациями. + +## БистСма ΠΈΠ½Π²Π°Π»ΠΈΠ΄Π°Ρ†ΠΈΠΈ кСша + +### 1. ΠŸΡ€ΡΠΌΠ°Ρ инвалидация + +БистСма ΠΏΠΎΠ΄Π΄Π΅Ρ€ΠΆΠΈΠ²Π°Π΅Ρ‚ Π΄Π²Π° Ρ‚ΠΈΠΏΠ° ΠΈΠ½Π²Π°Π»ΠΈΠ΄Π°Ρ†ΠΈΠΈ кСша: + +#### 1.1. Π˜Π½Π²Π°Π»ΠΈΠ΄Π°Ρ†ΠΈΡ ΠΏΠΎ прСфиксу + +```python +async def invalidate_cache_by_prefix(prefix) +``` + +ΠŸΠΎΠ·Π²ΠΎΠ»ΡΠ΅Ρ‚ ΠΈΠ½Π²Π°Π»ΠΈΠ΄ΠΈΡ€ΠΎΠ²Π°Ρ‚ΡŒ всС ΠΊΠ»ΡŽΡ‡ΠΈ кСша, Π½Π°Ρ‡ΠΈΠ½Π°ΡŽΡ‰ΠΈΠ΅ΡΡ с ΡƒΠΊΠ°Π·Π°Π½Π½ΠΎΠ³ΠΎ прСфикса. Π˜ΡΠΏΠΎΠ»ΡŒΠ·ΡƒΠ΅Ρ‚ΡΡ Π² Ρ€Π΅Π·ΠΎΠ»Π²Π΅Ρ€Π°Ρ… для ΠΈΠ½Π²Π°Π»ΠΈΠ΄Π°Ρ†ΠΈΠΈ Π³Ρ€ΡƒΠΏΠΏΡ‹ кСшСй ΠΏΡ€ΠΈ массовых измСнСниях. + +#### 1.2. ВочСчная инвалидация + +```python +async def invalidate_authors_cache(author_id=None) +async def invalidate_topics_cache(topic_id=None) +``` + +Π­Ρ‚ΠΈ Ρ„ΡƒΠ½ΠΊΡ†ΠΈΠΈ ΠΏΠΎΠ·Π²ΠΎΠ»ΡΡŽΡ‚ ΠΈΠ½Π²Π°Π»ΠΈΠ΄ΠΈΡ€ΠΎΠ²Π°Ρ‚ΡŒ кСш Ρ‚ΠΎΠ»ΡŒΠΊΠΎ для ΠΊΠΎΠ½ΠΊΡ€Π΅Ρ‚Π½ΠΎΠΉ сущности, Ρ‡Ρ‚ΠΎ сниТаСт Π½Π°Π³Ρ€ΡƒΠ·ΠΊΡƒ Π½Π° Redis ΠΈ ΠΏΡ€Π΅Π΄ΠΎΡ‚Π²Ρ€Π°Ρ‰Π°Π΅Ρ‚ Π½Π΅Π½ΡƒΠΆΠ½ΡƒΡŽ ΠΏΠΎΡ‚Π΅Ρ€ΡŽ ΠΊΠ΅ΡˆΠΈΡ€ΠΎΠ²Π°Π½Π½Ρ‹Ρ… Π΄Π°Π½Π½Ρ‹Ρ…. Если ID сущности Π½Π΅ ΡƒΠΊΠ°Π·Π°Π½, ΠΈΡΠΏΠΎΠ»ΡŒΠ·ΡƒΠ΅Ρ‚ΡΡ инвалидация ΠΏΠΎ прСфиксу. + +ΠŸΡ€ΠΈΠΌΠ΅Ρ€Ρ‹ использования Ρ‚ΠΎΡ‡Π΅Ρ‡Π½ΠΎΠΉ ΠΈΠ½Π²Π°Π»ΠΈΠ΄Π°Ρ†ΠΈΠΈ: + +```python +# Π˜Π½Π²Π°Π»ΠΈΠ΄Π°Ρ†ΠΈΡ кСша Ρ‚ΠΎΠ»ΡŒΠΊΠΎ для Π°Π²Ρ‚ΠΎΡ€Π° с ID 123 +await invalidate_authors_cache(123) + +# Π˜Π½Π²Π°Π»ΠΈΠ΄Π°Ρ†ΠΈΡ кСша Ρ‚ΠΎΠ»ΡŒΠΊΠΎ для Ρ‚Π΅ΠΌΡ‹ с ID 456 +await invalidate_topics_cache(456) +``` + +### 2. ΠžΡ‚Π»ΠΎΠΆΠ΅Π½Π½Π°Ρ инвалидация + +ΠœΠΎΠ΄ΡƒΠ»ΡŒ `revalidator.py` Ρ€Π΅Π°Π»ΠΈΠ·ΡƒΠ΅Ρ‚ систСму ΠΎΡ‚Π»ΠΎΠΆΠ΅Π½Π½ΠΎΠΉ ΠΈΠ½Π²Π°Π»ΠΈΠ΄Π°Ρ†ΠΈΠΈ кСша Ρ‡Π΅Ρ€Π΅Π· класс `CacheRevalidationManager`: + +```python +class CacheRevalidationManager: + # ... + async def process_revalidation(self): + # ... + def mark_for_revalidation(self, entity_id, entity_type): + # ... +``` + +ΠœΠ΅Π½Π΅Π΄ΠΆΠ΅Ρ€ Ρ€Π΅Π²Π°Π»ΠΈΠ΄Π°Ρ†ΠΈΠΈ Ρ€Π°Π±ΠΎΡ‚Π°Π΅Ρ‚ ΠΊΠ°ΠΊ асинхронный Ρ„ΠΎΠ½ΠΎΠ²Ρ‹ΠΉ процСсс, ΠΊΠΎΡ‚ΠΎΡ€Ρ‹ΠΉ пСриодичСски (ΠΏΠΎ ΡƒΠΌΠΎΠ»Ρ‡Π°Π½ΠΈΡŽ ΠΊΠ°ΠΆΠ΄Ρ‹Π΅ 5 ΠΌΠΈΠ½ΡƒΡ‚) провСряСт Π½Π°Π»ΠΈΡ‡ΠΈΠ΅ сущностСй для Ρ€Π΅Π²Π°Π»ΠΈΠ΄Π°Ρ†ΠΈΠΈ. + +ΠžΡΠΎΠ±Π΅Π½Π½ΠΎΡΡ‚ΠΈ Ρ€Π΅Π°Π»ΠΈΠ·Π°Ρ†ΠΈΠΈ: +- Для Π°Π²Ρ‚ΠΎΡ€ΠΎΠ² ΠΈ Ρ‚Π΅ΠΌ ΠΈΡΠΏΠΎΠ»ΡŒΠ·ΡƒΠ΅Ρ‚ΡΡ ΠΏΠΎΡˆΡ‚ΡƒΡ‡Π½Π°Ρ рСвалидация ΠΊΠ°ΠΆΠ΄ΠΎΠΉ записи +- Для ΡˆΠ°ΡƒΡ‚ΠΎΠ² ΠΈ Ρ€Π΅Π°ΠΊΡ†ΠΈΠΉ ΠΈΡΠΏΠΎΠ»ΡŒΠ·ΡƒΠ΅Ρ‚ΡΡ батчСвая ΠΎΠ±Ρ€Π°Π±ΠΎΡ‚ΠΊΠ°, с ΠΏΠΎΡ€ΠΎΠ³ΠΎΠΌ Π² 10 элСмСнтов +- ΠŸΡ€ΠΈ достиТСнии ΠΏΠΎΡ€ΠΎΠ³Π° систСма ΠΏΠ΅Ρ€Π΅ΠΊΠ»ΡŽΡ‡Π°Π΅Ρ‚ΡΡ Π½Π° ΠΈΠ½Π²Π°Π»ΠΈΠ΄Π°Ρ†ΠΈΡŽ ΠΊΠΎΠ»Π»Π΅ΠΊΡ†ΠΈΠΉ вмСсто ΠΏΠΎΡˆΡ‚ΡƒΡ‡Π½ΠΎΠΉ ΠΎΠ±Ρ€Π°Π±ΠΎΡ‚ΠΊΠΈ +- Π‘ΠΏΠ΅Ρ†ΠΈΠ°Π»ΡŒΠ½Ρ‹ΠΉ Ρ„Π»Π°Π³ `all` позволяСт Π·Π°ΠΏΡƒΡΡ‚ΠΈΡ‚ΡŒ ΠΏΠΎΠ»Π½ΡƒΡŽ ΠΈΠ½Π²Π°Π»ΠΈΠ΄Π°Ρ†ΠΈΡŽ всСх записСй Ρ‚ΠΈΠΏΠ° + +### 3. АвтоматичСская инвалидация Ρ‡Π΅Ρ€Π΅Π· Ρ‚Ρ€ΠΈΠ³Π³Π΅Ρ€Ρ‹ + +ΠœΠΎΠ΄ΡƒΠ»ΡŒ `triggers.py` рСгистрируСт ΠΎΠ±Ρ€Π°Π±ΠΎΡ‚Ρ‡ΠΈΠΊΠΈ событий SQLAlchemy, ΠΊΠΎΡ‚ΠΎΡ€Ρ‹Π΅ автоматичСски ΠΎΡ‚ΠΌΠ΅Ρ‡Π°ΡŽΡ‚ сущности для Ρ€Π΅Π²Π°Π»ΠΈΠ΄Π°Ρ†ΠΈΠΈ ΠΏΡ€ΠΈ ΠΈΠ·ΠΌΠ΅Π½Π΅Π½ΠΈΠΈ Π΄Π°Π½Π½Ρ‹Ρ… Π² Π±Π°Π·Π΅: + +```python +def events_register(): + event.listen(Author, "after_update", mark_for_revalidation) + event.listen(Topic, "after_update", mark_for_revalidation) + # ΠΈ Π΄Ρ€ΡƒΠ³ΠΈΠ΅... +``` + +Π’Ρ€ΠΈΠ³Π³Π΅Ρ€Ρ‹ ΠΈΠΌΠ΅ΡŽΡ‚ ΡΠ»Π΅Π΄ΡƒΡŽΡ‰ΠΈΠ΅ особСнности: +- Π Π΅Π°Π³ΠΈΡ€ΡƒΡŽΡ‚ Π½Π° события вставки, обновлСния ΠΈ удалСния +- ΠžΡ‚ΠΌΠ΅Ρ‡Π°ΡŽΡ‚ Π·Π°Ρ‚Ρ€ΠΎΠ½ΡƒΡ‚Ρ‹Π΅ сущности для ΠΎΡ‚Π»ΠΎΠΆΠ΅Π½Π½ΠΎΠΉ Ρ€Π΅Π²Π°Π»ΠΈΠ΄Π°Ρ†ΠΈΠΈ +- Π£Ρ‡ΠΈΡ‚Ρ‹Π²Π°ΡŽΡ‚ связи ΠΌΠ΅ΠΆΠ΄Ρƒ сущностями (Π½Π°ΠΏΡ€ΠΈΠΌΠ΅Ρ€, ΠΏΡ€ΠΈ ΠΈΠ·ΠΌΠ΅Π½Π΅Π½ΠΈΠΈ Ρ‚Π΅ΠΌΡ‹ ΠΎΠ±Π½ΠΎΠ²Π»ΡΡŽΡ‚ΡΡ связанныС ΡˆΠ°ΡƒΡ‚Ρ‹) + +## ΠŸΡ€Π΅Π΄Π²Π°Ρ€ΠΈΡ‚Π΅Π»ΡŒΠ½ΠΎΠ΅ ΠΊΠ΅ΡˆΠΈΡ€ΠΎΠ²Π°Π½ΠΈΠ΅ + +ΠœΠΎΠ΄ΡƒΠ»ΡŒ `precache.py` Ρ€Π΅Π°Π»ΠΈΠ·ΡƒΠ΅Ρ‚ ΠΏΡ€Π΅Π΄Π²Π°Ρ€ΠΈΡ‚Π΅Π»ΡŒΠ½ΠΎΠ΅ ΠΊΠ΅ΡˆΠΈΡ€ΠΎΠ²Π°Π½ΠΈΠ΅ часто ΠΈΡΠΏΠΎΠ»ΡŒΠ·ΡƒΠ΅ΠΌΡ‹Ρ… Π΄Π°Π½Π½Ρ‹Ρ… ΠΏΡ€ΠΈ стартС прилоТСния: + +```python +async def precache_data(): + # ... +``` + +Π­Ρ‚Π° функция выполняСтся ΠΏΡ€ΠΈ запускС прилоТСния ΠΈ заполняСт кСш Π΄Π°Π½Π½Ρ‹ΠΌΠΈ, ΠΊΠΎΡ‚ΠΎΡ€Ρ‹Π΅ Π±ΡƒΠ΄ΡƒΡ‚ часто Π·Π°ΠΏΡ€Π°ΡˆΠΈΠ²Π°Ρ‚ΡŒΡΡ ΠΏΠΎΠ»ΡŒΠ·ΠΎΠ²Π°Ρ‚Π΅Π»ΡΠΌΠΈ. + +## ΠŸΡ€ΠΈΠΌΠ΅Ρ€Ρ‹ использования + +### ΠŸΡ€ΠΎΡΡ‚ΠΎΠ΅ ΠΊΠ΅ΡˆΠΈΡ€ΠΎΠ²Π°Π½ΠΈΠ΅ Ρ€Π΅Π·ΡƒΠ»ΡŒΡ‚Π°Ρ‚Π° запроса + +```python +async def get_topics_with_stats(limit=10, offset=0, by="title"): + # Π€ΠΎΡ€ΠΌΠΈΡ€ΠΎΠ²Π°Π½ΠΈΠ΅ ΠΊΠ»ΡŽΡ‡Π° кСша ΠΏΠΎ ΠΊΠΎΠ½Π²Π΅Π½Ρ†ΠΈΠΈ + cache_key = f"topics:stats:limit={limit}:offset={offset}:sort={by}" + + cached_data = await get_cached_data(cache_key) + if cached_data: + return cached_data + + # Π’Ρ‹ΠΏΠΎΠ»Π½Π΅Π½ΠΈΠ΅ запроса ΠΊ Π±Π°Π·Π΅ Π΄Π°Π½Π½Ρ‹Ρ… + result = ... # Π»ΠΎΠ³ΠΈΠΊΠ° получСния Π΄Π°Π½Π½Ρ‹Ρ… + + await cache_data(cache_key, result, ttl=300) + return result +``` + +### ИспользованиС ΠΎΠ±ΠΎΠ±Ρ‰Π΅Π½Π½ΠΎΠΉ Ρ„ΡƒΠ½ΠΊΡ†ΠΈΠΈ cached_query + +```python +async def get_topics_with_stats(limit=10, offset=0, by="title"): + async def fetch_data(limit, offset, by): + # Π›ΠΎΠ³ΠΈΠΊΠ° получСния Π΄Π°Π½Π½Ρ‹Ρ… + return result + + # Π€ΠΎΡ€ΠΌΠΈΡ€ΠΎΠ²Π°Π½ΠΈΠ΅ ΠΊΠ»ΡŽΡ‡Π° кСша ΠΏΠΎ ΠΊΠΎΠ½Π²Π΅Π½Ρ†ΠΈΠΈ + cache_key = f"topics:stats:limit={limit}:offset={offset}:sort={by}" + + return await cached_query( + cache_key, + fetch_data, + ttl=300, + limit=limit, + offset=offset, + by=by + ) +``` + +### ВочСчная инвалидация кСша ΠΏΡ€ΠΈ ΠΈΠ·ΠΌΠ΅Π½Π΅Π½ΠΈΠΈ Π΄Π°Π½Π½Ρ‹Ρ… + +```python +async def update_topic(topic_id, new_data): + # ОбновлСниС Π΄Π°Π½Π½Ρ‹Ρ… Π² Π±Π°Π·Π΅ + # ... + + # ВочСчная инвалидация кСша Ρ‚ΠΎΠ»ΡŒΠΊΠΎ для ΠΈΠ·ΠΌΠ΅Π½Π΅Π½Π½ΠΎΠΉ Ρ‚Π΅ΠΌΡ‹ + await invalidate_topics_cache(topic_id) + + return updated_topic +``` + +## ΠžΡ‚Π»Π°Π΄ΠΊΠ° ΠΈ ΠΌΠΎΠ½ΠΈΡ‚ΠΎΡ€ΠΈΠ½Π³ + +БистСма ΠΊΠ΅ΡˆΠΈΡ€ΠΎΠ²Π°Π½ΠΈΡ ΠΈΡΠΏΠΎΠ»ΡŒΠ·ΡƒΠ΅Ρ‚ Π»ΠΎΠ³Π³Π΅Ρ€ для отслСТивания ΠΎΠΏΠ΅Ρ€Π°Ρ†ΠΈΠΉ: + +```python +logger.debug(f"Π”Π°Π½Π½Ρ‹Π΅ ΠΏΠΎΠ»ΡƒΡ‡Π΅Π½Ρ‹ ΠΈΠ· кСша ΠΏΠΎ ΠΊΠ»ΡŽΡ‡Ρƒ {key}") +logger.debug(f"Π£Π΄Π°Π»Π΅Π½ΠΎ {len(keys)} ΠΊΠ»ΡŽΡ‡Π΅ΠΉ кСша с прСфиксом {prefix}") +logger.error(f"Ошибка ΠΏΡ€ΠΈ ΠΈΠ½Π²Π°Π»ΠΈΠ΄Π°Ρ†ΠΈΠΈ кСша: {e}") +``` + +Π­Ρ‚ΠΎ позволяСт ΠΎΡ‚ΡΠ»Π΅ΠΆΠΈΠ²Π°Ρ‚ΡŒ Ρ€Π°Π±ΠΎΡ‚Ρƒ кСша ΠΈ Π²Ρ‹ΡΠ²Π»ΡΡ‚ΡŒ Π²ΠΎΠ·ΠΌΠΎΠΆΠ½Ρ‹Π΅ ΠΏΡ€ΠΎΠ±Π»Π΅ΠΌΡ‹ Π½Π° Ρ€Π°Π½Π½ΠΈΡ… стадиях. + +## Π Π΅ΠΊΠΎΠΌΠ΅Π½Π΄Π°Ρ†ΠΈΠΈ ΠΏΠΎ использованию + +1. **Π‘Π»Π΅Π΄ΡƒΠΉΡ‚Π΅ конвСнциям формирования ΠΊΠ»ΡŽΡ‡Π΅ΠΉ** - это критичСски Π²Π°ΠΆΠ½ΠΎ для консистСнтности ΠΈ прСдсказуСмости кСша. +2. **НС создавайтС собствСнныС Ρ„ΠΎΡ€ΠΌΠ°Ρ‚Ρ‹ ΠΊΠ»ΡŽΡ‡Π΅ΠΉ** - ΠΈΡΠΏΠΎΠ»ΡŒΠ·ΡƒΠΉΡ‚Π΅ ΡΡƒΡ‰Π΅ΡΡ‚Π²ΡƒΡŽΡ‰ΠΈΠ΅ ΡˆΠ°Π±Π»ΠΎΠ½Ρ‹ для обСспСчСния Сдинообразия. +3. **НС Π·Π°Π±Ρ‹Π²Π°ΠΉΡ‚Π΅ ΠΎΠ± ΠΈΠ½Π²Π°Π»ΠΈΠ΄Π°Ρ†ΠΈΠΈ** - всСгда ΠΈΠ½Π²Π°Π»ΠΈΠ΄ΠΈΡ€ΡƒΠΉΡ‚Π΅ кСш ΠΏΡ€ΠΈ ΠΈΠ·ΠΌΠ΅Π½Π΅Π½ΠΈΠΈ Π΄Π°Π½Π½Ρ‹Ρ…. +4. **Π˜ΡΠΏΠΎΠ»ΡŒΠ·ΡƒΠΉΡ‚Π΅ Ρ‚ΠΎΡ‡Π΅Ρ‡Π½ΡƒΡŽ ΠΈΠ½Π²Π°Π»ΠΈΠ΄Π°Ρ†ΠΈΡŽ** - вмСсто ΠΈΠ½Π²Π°Π»ΠΈΠ΄Π°Ρ†ΠΈΠΈ ΠΏΠΎ прСфиксу для сниТСния Π½Π°Π³Ρ€ΡƒΠ·ΠΊΠΈ Π½Π° Redis. +5. **УстанавливайтС Ρ€Π°Π·ΡƒΠΌΠ½Ρ‹Π΅ TTL** - ΠΈΡΠΏΠΎΠ»ΡŒΠ·ΡƒΠΉΡ‚Π΅ Ρ€Π°Π·Π½Ρ‹Π΅ значСния TTL Π² зависимости ΠΎΡ‚ частоты измСнСния Π΄Π°Π½Π½Ρ‹Ρ…. +6. **НС ΠΊΠ΅ΡˆΠΈΡ€ΡƒΠΉΡ‚Π΅ большиС ΠΎΠ±ΡŠΠ΅ΠΌΡ‹ Π΄Π°Π½Π½Ρ‹Ρ…** - ΠΊΠ΅ΡˆΠΈΡ€ΡƒΠΉΡ‚Π΅ Ρ‚ΠΎΠ»ΡŒΠΊΠΎ Ρ‚ΠΎ, Ρ‡Ρ‚ΠΎ Π΄Π΅ΠΉΡΡ‚Π²ΠΈΡ‚Π΅Π»ΡŒΠ½ΠΎ Π½Π΅ΠΎΠ±Ρ…ΠΎΠ΄ΠΈΠΌΠΎ для ΠΏΠΎΠ²Ρ‹ΡˆΠ΅Π½ΠΈΡ ΠΏΡ€ΠΎΠΈΠ·Π²ΠΎΠ΄ΠΈΡ‚Π΅Π»ΡŒΠ½ΠΎΡΡ‚ΠΈ. + +## ВСхничСскиС Π΄Π΅Ρ‚Π°Π»ΠΈ Ρ€Π΅Π°Π»ΠΈΠ·Π°Ρ†ΠΈΠΈ + +- **БСриализация Π΄Π°Π½Π½Ρ‹Ρ…**: ΠΈΡΠΏΠΎΠ»ΡŒΠ·ΡƒΠ΅Ρ‚ΡΡ `orjson` для эффСктивной сСриализации ΠΈ дСсСриализации Π΄Π°Π½Π½Ρ‹Ρ…. +- **Π€ΠΎΡ€ΠΌΠ°Ρ‚ΠΈΡ€ΠΎΠ²Π°Π½ΠΈΠ΅ Π΄Π°Ρ‚Ρ‹ ΠΈ Π²Ρ€Π΅ΠΌΠ΅Π½ΠΈ**: для ΠΊΠΎΡ€Ρ€Π΅ΠΊΡ‚Π½ΠΎΠΉ Ρ€Π°Π±ΠΎΡ‚Ρ‹ с Π΄Π°Ρ‚Π°ΠΌΠΈ ΠΈΡΠΏΠΎΠ»ΡŒΠ·ΡƒΠ΅Ρ‚ΡΡ `CustomJSONEncoder`. +- **ΠΡΠΈΠ½Ρ…Ρ€ΠΎΠ½Π½ΠΎΡΡ‚ΡŒ**: всС ΠΎΠΏΠ΅Ρ€Π°Ρ†ΠΈΠΈ ΠΊΠ΅ΡˆΠΈΡ€ΠΎΠ²Π°Π½ΠΈΡ Π²Ρ‹ΠΏΠΎΠ»Π½ΡΡŽΡ‚ΡΡ асинхронно для минимального влияния Π½Π° ΠΏΡ€ΠΎΠΈΠ·Π²ΠΎΠ΄ΠΈΡ‚Π΅Π»ΡŒΠ½ΠΎΡΡ‚ΡŒ API. +- **ΠŸΡ€ΡΠΌΠΎΠ΅ взаимодСйствиС с Redis**: всС ΠΎΠΏΠ΅Ρ€Π°Ρ†ΠΈΠΈ Π²Ρ‹ΠΏΠΎΠ»Π½ΡΡŽΡ‚ΡΡ Ρ‡Π΅Ρ€Π΅Π· прямыС Π²Ρ‹Π·ΠΎΠ²Ρ‹ `redis.execute()` с ΠΎΠ±Ρ€Π°Π±ΠΎΡ‚ΠΊΠΎΠΉ ошибок. +- **БатчСвая ΠΎΠ±Ρ€Π°Π±ΠΎΡ‚ΠΊΠ°**: для массовых ΠΎΠΏΠ΅Ρ€Π°Ρ†ΠΈΠΉ ΠΈΡΠΏΠΎΠ»ΡŒΠ·ΡƒΠ΅Ρ‚ΡΡ ΠΏΠΎΡ€ΠΎΠ³ΠΎΠ²ΠΎΠ΅ Π·Π½Π°Ρ‡Π΅Π½ΠΈΠ΅, послС ΠΊΠΎΡ‚ΠΎΡ€ΠΎΠ³ΠΎ ΠΏΡ€ΠΈΠΌΠ΅Π½ΡΡŽΡ‚ΡΡ ΠΎΠΏΡ‚ΠΈΠΌΠΈΠ·ΠΈΡ€ΠΎΠ²Π°Π½Π½Ρ‹Π΅ стратСгии. + +## Π˜Π·Π²Π΅ΡΡ‚Π½Ρ‹Π΅ ограничСния + +1. **Π‘ΠΎΠ³Π»Π°ΡΠΎΠ²Π°Π½Π½ΠΎΡΡ‚ΡŒ Π΄Π°Π½Π½Ρ‹Ρ…** - систСма Π½Π΅ Π³Π°Ρ€Π°Π½Ρ‚ΠΈΡ€ΡƒΠ΅Ρ‚ Π°Π±ΡΠΎΠ»ΡŽΡ‚Π½ΡƒΡŽ ΡΠΎΠ³Π»Π°ΡΠΎΠ²Π°Π½Π½ΠΎΡΡ‚ΡŒ Π΄Π°Π½Π½Ρ‹Ρ… Π² кСшС ΠΈ Π±Π°Π·Π΅ Π΄Π°Π½Π½Ρ‹Ρ…. +2. **ΠŸΠ°ΠΌΡΡ‚ΡŒ** - Π½Π΅ΠΎΠ±Ρ…ΠΎΠ΄ΠΈΠΌΠΎ ΡΠ»Π΅Π΄ΠΈΡ‚ΡŒ Π·Π° объСмом Π΄Π°Π½Π½Ρ‹Ρ… Π² кСшС, Ρ‡Ρ‚ΠΎΠ±Ρ‹ ΠΈΠ·Π±Π΅ΠΆΠ°Ρ‚ΡŒ ΠΏΡ€ΠΎΠ±Π»Π΅ΠΌ с ΠΏΠ°ΠΌΡΡ‚ΡŒΡŽ Redis. +3. **ΠŸΡ€ΠΎΠΈΠ·Π²ΠΎΠ΄ΠΈΡ‚Π΅Π»ΡŒΠ½ΠΎΡΡ‚ΡŒ Redis** - ΠΏΡ€ΠΈ большом количСствС ΠΎΠΏΠ΅Ρ€Π°Ρ†ΠΈΠΉ с кСшСм ΠΌΠΎΠΆΠ΅Ρ‚ ΡΡ‚Π°Ρ‚ΡŒ ΡƒΠ·ΠΊΠΈΠΌ мСстом. diff --git a/docs/features.md b/docs/features.md index b6d1bdc5..e0ed3526 100644 --- a/docs/features.md +++ b/docs/features.md @@ -6,11 +6,7 @@ ## ΠœΡƒΠ»ΡŒΡ‚ΠΈΠ΄ΠΎΠΌΠ΅Π½Π½Π°Ρ авторизация -- ΠŸΠΎΠ΄Π΄Π΅Ρ€ΠΆΠΊΠ° Π°Π²Ρ‚ΠΎΡ€ΠΈΠ·Π°Ρ†ΠΈΠΈ для Ρ€Π°Π·Π½Ρ‹Ρ… Π΄ΠΎΠΌΠ΅Π½ΠΎΠ²: - - *.dscrs.site (Π²ΠΊΠ»ΡŽΡ‡Π°Ρ testing.dscrs.site) - - localhost[:port] - - testingdiscoursio-git-*-discoursio.vercel.app - - *.discours.io +- ΠŸΠΎΠ΄Π΄Π΅Ρ€ΠΆΠΊΠ° Π°Π²Ρ‚ΠΎΡ€ΠΈΠ·Π°Ρ†ΠΈΠΈ для Ρ€Π°Π·Π½Ρ‹Ρ… Π΄ΠΎΠΌΠ΅Π½ΠΎΠ² - АвтоматичСскоС ΠΎΠΏΡ€Π΅Π΄Π΅Π»Π΅Π½ΠΈΠ΅ сСрвСра Π°Π²Ρ‚ΠΎΡ€ΠΈΠ·Π°Ρ†ΠΈΠΈ - ΠšΠΎΡ€Ρ€Π΅ΠΊΡ‚Π½Π°Ρ ΠΎΠ±Ρ€Π°Π±ΠΎΡ‚ΠΊΠ° CORS для всСх ΠΏΠΎΠ΄Π΄Π΅Ρ€ΠΆΠΈΠ²Π°Π΅ΠΌΡ‹Ρ… Π΄ΠΎΠΌΠ΅Π½ΠΎΠ² @@ -35,10 +31,6 @@ ## CORS Configuration -- ΠŸΠΎΠ΄Π΄Π΅Ρ€ΠΆΠΈΠ²Π°ΡŽΡ‚ΡΡ Π΄ΠΎΠΌΠ΅Π½Ρ‹: - - *.dscrs.site (Π²ΠΊΠ»ΡŽΡ‡Π°Ρ testing.dscrs.site, core.dscrs.site) - - *.discours.io (Π²ΠΊΠ»ΡŽΡ‡Π°Ρ testing.discours.io) - - localhost (Π²ΠΊΠ»ΡŽΡ‡Π°Ρ ΠΏΠΎΡ€Ρ‚Ρ‹) - ΠŸΠΎΠ΄Π΄Π΅Ρ€ΠΆΠΈΠ²Π°Π΅ΠΌΡ‹Π΅ ΠΌΠ΅Ρ‚ΠΎΠ΄Ρ‹: GET, POST, OPTIONS - НастроСна ΠΏΠΎΠ΄Π΄Π΅Ρ€ΠΆΠΊΠ° credentials - Π Π°Π·Ρ€Π΅ΡˆΠ΅Π½Π½Ρ‹Π΅ Π·Π°Π³ΠΎΠ»ΠΎΠ²ΠΊΠΈ: Authorization, Content-Type, X-Requested-With, DNT, Cache-Control diff --git a/orm/author.py b/orm/author.py index c83b9d4f..4be2c630 100644 --- a/orm/author.py +++ b/orm/author.py @@ -1,6 +1,6 @@ import time -from sqlalchemy import JSON, Boolean, Column, ForeignKey, Integer, String +from sqlalchemy import JSON, Boolean, Column, ForeignKey, Index, Integer, String from services.db import Base @@ -8,6 +8,15 @@ from services.db import Base class AuthorRating(Base): + """ + Π Π΅ΠΉΡ‚ΠΈΠ½Π³ Π°Π²Ρ‚ΠΎΡ€Π° ΠΎΡ‚ Π΄Ρ€ΡƒΠ³ΠΎΠ³ΠΎ Π°Π²Ρ‚ΠΎΡ€Π°. + + Attributes: + rater (int): ID ΠΎΡ†Π΅Π½ΠΈΠ²Π°ΡŽΡ‰Π΅Π³ΠΎ Π°Π²Ρ‚ΠΎΡ€Π° + author (int): ID ΠΎΡ†Π΅Π½ΠΈΠ²Π°Π΅ΠΌΠΎΠ³ΠΎ Π°Π²Ρ‚ΠΎΡ€Π° + plus (bool): ΠŸΠΎΠ»ΠΎΠΆΠΈΡ‚Π΅Π»ΡŒΠ½Π°Ρ/ΠΎΡ‚Ρ€ΠΈΡ†Π°Ρ‚Π΅Π»ΡŒΠ½Π°Ρ ΠΎΡ†Π΅Π½ΠΊΠ° + """ + __tablename__ = "author_rating" id = None # type: ignore @@ -15,8 +24,26 @@ class AuthorRating(Base): author = Column(ForeignKey("author.id"), primary_key=True) plus = Column(Boolean) + # ΠžΠΏΡ€Π΅Π΄Π΅Π»ΡΠ΅ΠΌ индСксы + __table_args__ = ( + # ИндСкс для быстрого поиска всСх ΠΎΡ†Π΅Π½ΠΎΠΊ ΠΊΠΎΠ½ΠΊΡ€Π΅Ρ‚Π½ΠΎΠ³ΠΎ Π°Π²Ρ‚ΠΎΡ€Π° + Index("idx_author_rating_author", "author"), + # ИндСкс для быстрого поиска всСх ΠΎΡ†Π΅Π½ΠΎΠΊ, оставлСнных ΠΊΠΎΠ½ΠΊΡ€Π΅Ρ‚Π½Ρ‹ΠΌ Π°Π²Ρ‚ΠΎΡ€ΠΎΠΌ + Index("idx_author_rating_rater", "rater"), + ) + class AuthorFollower(Base): + """ + Подписка ΠΎΠ΄Π½ΠΎΠ³ΠΎ Π°Π²Ρ‚ΠΎΡ€Π° Π½Π° Π΄Ρ€ΡƒΠ³ΠΎΠ³ΠΎ. + + Attributes: + follower (int): ID подписчика + author (int): ID Π°Π²Ρ‚ΠΎΡ€Π°, Π½Π° ΠΊΠΎΡ‚ΠΎΡ€ΠΎΠ³ΠΎ ΠΏΠΎΠ΄ΠΏΠΈΡΡ‹Π²Π°ΡŽΡ‚ΡΡ + created_at (int): ВрСмя создания подписки + auto (bool): ΠŸΡ€ΠΈΠ·Π½Π°ΠΊ автоматичСской подписки + """ + __tablename__ = "author_follower" id = None # type: ignore @@ -25,16 +52,57 @@ class AuthorFollower(Base): created_at = Column(Integer, nullable=False, default=lambda: int(time.time())) auto = Column(Boolean, nullable=False, default=False) + # ΠžΠΏΡ€Π΅Π΄Π΅Π»ΡΠ΅ΠΌ индСксы + __table_args__ = ( + # ИндСкс для быстрого поиска всСх подписчиков Π°Π²Ρ‚ΠΎΡ€Π° + Index("idx_author_follower_author", "author"), + # ИндСкс для быстрого поиска всСх Π°Π²Ρ‚ΠΎΡ€ΠΎΠ², Π½Π° ΠΊΠΎΡ‚ΠΎΡ€Ρ‹Ρ… подписан ΠΊΠΎΠ½ΠΊΡ€Π΅Ρ‚Π½Ρ‹ΠΉ Π°Π²Ρ‚ΠΎΡ€ + Index("idx_author_follower_follower", "follower"), + ) + class AuthorBookmark(Base): + """ + Π—Π°ΠΊΠ»Π°Π΄ΠΊΠ° Π°Π²Ρ‚ΠΎΡ€Π° Π½Π° ΠΏΡƒΠ±Π»ΠΈΠΊΠ°Ρ†ΠΈΡŽ. + + Attributes: + author (int): ID Π°Π²Ρ‚ΠΎΡ€Π° + shout (int): ID ΠΏΡƒΠ±Π»ΠΈΠΊΠ°Ρ†ΠΈΠΈ + """ + __tablename__ = "author_bookmark" id = None # type: ignore author = Column(ForeignKey("author.id"), primary_key=True) shout = Column(ForeignKey("shout.id"), primary_key=True) + # ΠžΠΏΡ€Π΅Π΄Π΅Π»ΡΠ΅ΠΌ индСксы + __table_args__ = ( + # ИндСкс для быстрого поиска всСх Π·Π°ΠΊΠ»Π°Π΄ΠΎΠΊ Π°Π²Ρ‚ΠΎΡ€Π° + Index("idx_author_bookmark_author", "author"), + # ИндСкс для быстрого поиска всСх Π°Π²Ρ‚ΠΎΡ€ΠΎΠ², Π΄ΠΎΠ±Π°Π²ΠΈΠ²ΡˆΠΈΡ… ΠΏΡƒΠ±Π»ΠΈΠΊΠ°Ρ†ΠΈΡŽ Π² Π·Π°ΠΊΠ»Π°Π΄ΠΊΠΈ + Index("idx_author_bookmark_shout", "shout"), + ) + class Author(Base): + """ + МодСль Π°Π²Ρ‚ΠΎΡ€Π° Π² систСмС. + + Attributes: + user (str): Π˜Π΄Π΅Π½Ρ‚ΠΈΡ„ΠΈΠΊΠ°Ρ‚ΠΎΡ€ ΠΏΠΎΠ»ΡŒΠ·ΠΎΠ²Π°Ρ‚Π΅Π»Ρ Π² систСмС Π°Π²Ρ‚ΠΎΡ€ΠΈΠ·Π°Ρ†ΠΈΠΈ + name (str): ΠžΡ‚ΠΎΠ±Ρ€Π°ΠΆΠ°Π΅ΠΌΠΎΠ΅ имя + slug (str): Π£Π½ΠΈΠΊΠ°Π»ΡŒΠ½Ρ‹ΠΉ строковый ΠΈΠ΄Π΅Π½Ρ‚ΠΈΡ„ΠΈΠΊΠ°Ρ‚ΠΎΡ€ + bio (str): ΠšΡ€Π°Ρ‚ΠΊΠ°Ρ биография/статус + about (str): ПолноС описаниС + pic (str): URL изобраТСния профиля + links (dict): Бсылки Π½Π° ΡΠΎΡ†ΠΈΠ°Π»ΡŒΠ½Ρ‹Π΅ сСти ΠΈ сайты + created_at (int): ВрСмя создания профиля + last_seen (int): ВрСмя послСднСго посСщСния + updated_at (int): ВрСмя послСднСго обновлСния + deleted_at (int): ВрСмя удалСния (Ссли ΠΏΡ€ΠΎΡ„ΠΈΠ»ΡŒ ΡƒΠ΄Π°Π»Π΅Π½) + """ + __tablename__ = "author" user = Column(String) # unbounded link with authorizer's User type @@ -53,3 +121,17 @@ class Author(Base): # search_vector = Column( # TSVectorType("name", "slug", "bio", "about", regconfig="pg_catalog.russian") # ) + + # ΠžΠΏΡ€Π΅Π΄Π΅Π»ΡΠ΅ΠΌ индСксы + __table_args__ = ( + # ИндСкс для быстрого поиска ΠΏΠΎ slug + Index("idx_author_slug", "slug"), + # ИндСкс для быстрого поиска ΠΏΠΎ ΠΈΠ΄Π΅Π½Ρ‚ΠΈΡ„ΠΈΠΊΠ°Ρ‚ΠΎΡ€Ρƒ ΠΏΠΎΠ»ΡŒΠ·ΠΎΠ²Π°Ρ‚Π΅Π»Ρ + Index("idx_author_user", "user"), + # ИндСкс для Ρ„ΠΈΠ»ΡŒΡ‚Ρ€Π°Ρ†ΠΈΠΈ Π½Π΅ΡƒΠ΄Π°Π»Π΅Π½Π½Ρ‹Ρ… Π°Π²Ρ‚ΠΎΡ€ΠΎΠ² + Index("idx_author_deleted_at", "deleted_at", postgresql_where=deleted_at.is_(None)), + # ИндСкс для сортировки ΠΏΠΎ Π²Ρ€Π΅ΠΌΠ΅Π½ΠΈ создания (для Π½ΠΎΠ²Ρ‹Ρ… Π°Π²Ρ‚ΠΎΡ€ΠΎΠ²) + Index("idx_author_created_at", "created_at"), + # ИндСкс для сортировки ΠΏΠΎ Π²Ρ€Π΅ΠΌΠ΅Π½ΠΈ послСднСго посСщСния + Index("idx_author_last_seen", "last_seen"), + ) diff --git a/resolvers/author.py b/resolvers/author.py index 604f2bc6..e4cfd794 100644 --- a/resolvers/author.py +++ b/resolvers/author.py @@ -1,25 +1,196 @@ import asyncio import time +from typing import Optional -from sqlalchemy import desc, select, text +from sqlalchemy import select, text from cache.cache import ( cache_author, + cached_query, get_cached_author, get_cached_author_by_user_id, get_cached_author_followers, get_cached_follower_authors, get_cached_follower_topics, + invalidate_cache_by_prefix, ) from orm.author import Author -from orm.shout import ShoutAuthor, ShoutTopic -from orm.topic import Topic from resolvers.stat import get_with_stat from services.auth import login_required from services.db import local_session +from services.redis import redis from services.schema import mutation, query from utils.logger import root_logger as logger +DEFAULT_COMMUNITIES = [1] + + +# Π’ΡΠΏΠΎΠΌΠΎΠ³Π°Ρ‚Π΅Π»ΡŒΠ½Π°Ρ функция для получСния всСх Π°Π²Ρ‚ΠΎΡ€ΠΎΠ² Π±Π΅Π· статистики +async def get_all_authors(): + """ + ΠŸΠΎΠ»ΡƒΡ‡Π°Π΅Ρ‚ всСх Π°Π²Ρ‚ΠΎΡ€ΠΎΠ² Π±Π΅Π· статистики. + Π˜ΡΠΏΠΎΠ»ΡŒΠ·ΡƒΠ΅Ρ‚ΡΡ для случаСв, ΠΊΠΎΠ³Π΄Π° Π½ΡƒΠΆΠ΅Π½ ΠΏΠΎΠ»Π½Ρ‹ΠΉ список Π°Π²Ρ‚ΠΎΡ€ΠΎΠ² Π±Π΅Π· Π΄ΠΎΠΏΠΎΠ»Π½ΠΈΡ‚Π΅Π»ΡŒΠ½ΠΎΠΉ ΠΈΠ½Ρ„ΠΎΡ€ΠΌΠ°Ρ†ΠΈΠΈ. + + Returns: + list: Бписок всСх Π°Π²Ρ‚ΠΎΡ€ΠΎΠ² Π±Π΅Π· статистики + """ + cache_key = "authors:all:basic" + + # Ѐункция для получСния всСх Π°Π²Ρ‚ΠΎΡ€ΠΎΠ² ΠΈΠ· Π‘Π” + async def fetch_all_authors(): + logger.debug("ΠŸΠΎΠ»ΡƒΡ‡Π°Π΅ΠΌ список всСх Π°Π²Ρ‚ΠΎΡ€ΠΎΠ² ΠΈΠ· Π‘Π” ΠΈ ΠΊΠ΅ΡˆΠΈΡ€ΡƒΠ΅ΠΌ Ρ€Π΅Π·ΡƒΠ»ΡŒΡ‚Π°Ρ‚") + + with local_session() as session: + # Запрос Π½Π° ΠΏΠΎΠ»ΡƒΡ‡Π΅Π½ΠΈΠ΅ Π±Π°Π·ΠΎΠ²ΠΎΠΉ ΠΈΠ½Ρ„ΠΎΡ€ΠΌΠ°Ρ†ΠΈΠΈ ΠΎΠ± Π°Π²Ρ‚ΠΎΡ€Π°Ρ… + authors_query = select(Author).where(Author.deleted_at.is_(None)) + authors = session.execute(authors_query).scalars().all() + + # ΠŸΡ€Π΅ΠΎΠ±Ρ€Π°Π·ΡƒΠ΅ΠΌ Π°Π²Ρ‚ΠΎΡ€ΠΎΠ² Π² словари + return [author.dict() for author in authors] + + # Π˜ΡΠΏΠΎΠ»ΡŒΠ·ΡƒΠ΅ΠΌ ΡƒΠ½ΠΈΠ²Π΅Ρ€ΡΠ°Π»ΡŒΠ½ΡƒΡŽ Ρ„ΡƒΠ½ΠΊΡ†ΠΈΡŽ для ΠΊΠ΅ΡˆΠΈΡ€ΠΎΠ²Π°Π½ΠΈΡ запросов + return await cached_query(cache_key, fetch_all_authors) + + +# Π’ΡΠΏΠΎΠΌΠΎΠ³Π°Ρ‚Π΅Π»ΡŒΠ½Π°Ρ функция для получСния Π°Π²Ρ‚ΠΎΡ€ΠΎΠ² со статистикой с ΠΏΠ°Π³ΠΈΠ½Π°Ρ†ΠΈΠ΅ΠΉ +async def get_authors_with_stats(limit=50, offset=0, by: Optional[str] = None): + """ + ΠŸΠΎΠ»ΡƒΡ‡Π°Π΅Ρ‚ Π°Π²Ρ‚ΠΎΡ€ΠΎΠ² со статистикой с ΠΏΠ°Π³ΠΈΠ½Π°Ρ†ΠΈΠ΅ΠΉ. + + Args: + limit: МаксимальноС количСство Π²ΠΎΠ·Π²Ρ€Π°Ρ‰Π°Π΅ΠΌΡ‹Ρ… Π°Π²Ρ‚ΠΎΡ€ΠΎΠ² + offset: Π‘ΠΌΠ΅Ρ‰Π΅Π½ΠΈΠ΅ для ΠΏΠ°Π³ΠΈΠ½Π°Ρ†ΠΈΠΈ + by: ΠžΠΏΡ†ΠΈΠΎΠ½Π°Π»ΡŒΠ½Ρ‹ΠΉ ΠΏΠ°Ρ€Π°ΠΌΠ΅Ρ‚Ρ€ сортировки (new/active) + + Returns: + list: Бписок Π°Π²Ρ‚ΠΎΡ€ΠΎΠ² с ΠΈΡ… статистикой + """ + # Π€ΠΎΡ€ΠΌΠΈΡ€ΡƒΠ΅ΠΌ ΠΊΠ»ΡŽΡ‡ кСша с ΠΏΠΎΠΌΠΎΡ‰ΡŒΡŽ ΡƒΠ½ΠΈΠ²Π΅Ρ€ΡΠ°Π»ΡŒΠ½ΠΎΠΉ Ρ„ΡƒΠ½ΠΊΡ†ΠΈΠΈ + cache_key = f"authors:stats:limit={limit}:offset={offset}" + + # Ѐункция для получСния Π°Π²Ρ‚ΠΎΡ€ΠΎΠ² ΠΈΠ· Π‘Π” + async def fetch_authors_with_stats(): + logger.debug(f"ВыполняСм запрос Π½Π° ΠΏΠΎΠ»ΡƒΡ‡Π΅Π½ΠΈΠ΅ Π°Π²Ρ‚ΠΎΡ€ΠΎΠ² со статистикой: limit={limit}, offset={offset}, by={by}") + + with local_session() as session: + # Π‘Π°Π·ΠΎΠ²Ρ‹ΠΉ запрос для получСния Π°Π²Ρ‚ΠΎΡ€ΠΎΠ² + base_query = select(Author).where(Author.deleted_at.is_(None)) + + # ΠŸΡ€ΠΈΠΌΠ΅Π½ΡΠ΅ΠΌ сортировку + if by: + if isinstance(by, dict): + # ΠžΠ±Ρ€Π°Π±ΠΎΡ‚ΠΊΠ° словаря ΠΏΠ°Ρ€Π°ΠΌΠ΅Ρ‚Ρ€ΠΎΠ² сортировки + from sqlalchemy import asc, desc + + for field, direction in by.items(): + column = getattr(Author, field, None) + if column: + if direction.lower() == "desc": + base_query = base_query.order_by(desc(column)) + else: + base_query = base_query.order_by(column) + elif by == "new": + base_query = base_query.order_by(desc(Author.created_at)) + elif by == "active": + base_query = base_query.order_by(desc(Author.last_seen)) + else: + # По ΡƒΠΌΠΎΠ»Ρ‡Π°Π½ΠΈΡŽ сортируСм ΠΏΠΎ Π²Ρ€Π΅ΠΌΠ΅Π½ΠΈ создания + base_query = base_query.order_by(desc(Author.created_at)) + else: + base_query = base_query.order_by(desc(Author.created_at)) + + # ΠŸΡ€ΠΈΠΌΠ΅Π½ΡΠ΅ΠΌ Π»ΠΈΠΌΠΈΡ‚ ΠΈ смСщСниС + base_query = base_query.limit(limit).offset(offset) + + # ΠŸΠΎΠ»ΡƒΡ‡Π°Π΅ΠΌ Π°Π²Ρ‚ΠΎΡ€ΠΎΠ² + authors = session.execute(base_query).scalars().all() + author_ids = [author.id for author in authors] + + if not author_ids: + return [] + + # ΠžΠΏΡ‚ΠΈΠΌΠΈΠ·ΠΈΡ€ΠΎΠ²Π°Π½Π½Ρ‹ΠΉ запрос для получСния статистики ΠΏΠΎ публикациям для Π°Π²Ρ‚ΠΎΡ€ΠΎΠ² + shouts_stats_query = f""" + SELECT sa.author, COUNT(DISTINCT s.id) as shouts_count + FROM shout_author sa + JOIN shout s ON sa.shout = s.id AND s.deleted_at IS NULL AND s.published_at IS NOT NULL + WHERE sa.author IN ({",".join(map(str, author_ids))}) + GROUP BY sa.author + """ + shouts_stats = {row[0]: row[1] for row in session.execute(text(shouts_stats_query))} + + # Запрос Π½Π° ΠΏΠΎΠ»ΡƒΡ‡Π΅Π½ΠΈΠ΅ статистики ΠΏΠΎ подписчикам для Π°Π²Ρ‚ΠΎΡ€ΠΎΠ² + followers_stats_query = f""" + SELECT author, COUNT(DISTINCT follower) as followers_count + FROM author_follower + WHERE author IN ({",".join(map(str, author_ids))}) + GROUP BY author + """ + followers_stats = {row[0]: row[1] for row in session.execute(text(followers_stats_query))} + + # Π€ΠΎΡ€ΠΌΠΈΡ€ΡƒΠ΅ΠΌ Ρ€Π΅Π·ΡƒΠ»ΡŒΡ‚Π°Ρ‚ с Π΄ΠΎΠ±Π°Π²Π»Π΅Π½ΠΈΠ΅ΠΌ статистики + result = [] + for author in authors: + author_dict = author.dict() + author_dict["stat"] = { + "shouts": shouts_stats.get(author.id, 0), + "followers": followers_stats.get(author.id, 0), + } + result.append(author_dict) + + # ΠšΠ΅ΡˆΠΈΡ€ΡƒΠ΅ΠΌ ΠΊΠ°ΠΆΠ΄ΠΎΠ³ΠΎ Π°Π²Ρ‚ΠΎΡ€Π° ΠΎΡ‚Π΄Π΅Π»ΡŒΠ½ΠΎ для использования Π² Π΄Ρ€ΡƒΠ³ΠΈΡ… функциях + await cache_author(author_dict) + + return result + + # Π˜ΡΠΏΠΎΠ»ΡŒΠ·ΡƒΠ΅ΠΌ ΡƒΠ½ΠΈΠ²Π΅Ρ€ΡΠ°Π»ΡŒΠ½ΡƒΡŽ Ρ„ΡƒΠ½ΠΊΡ†ΠΈΡŽ для ΠΊΠ΅ΡˆΠΈΡ€ΠΎΠ²Π°Π½ΠΈΡ запросов + return await cached_query(cache_key, fetch_authors_with_stats) + + +# Ѐункция для ΠΈΠ½Π²Π°Π»ΠΈΠ΄Π°Ρ†ΠΈΠΈ кСша Π°Π²Ρ‚ΠΎΡ€ΠΎΠ² +async def invalidate_authors_cache(author_id=None): + """ + Π˜Π½Π²Π°Π»ΠΈΠ΄ΠΈΡ€ΡƒΠ΅Ρ‚ кСши Π°Π²Ρ‚ΠΎΡ€ΠΎΠ² ΠΏΡ€ΠΈ ΠΈΠ·ΠΌΠ΅Π½Π΅Π½ΠΈΠΈ Π΄Π°Π½Π½Ρ‹Ρ…. + + Args: + author_id: ΠžΠΏΡ†ΠΈΠΎΠ½Π°Π»ΡŒΠ½Ρ‹ΠΉ ID Π°Π²Ρ‚ΠΎΡ€Π° для Ρ‚ΠΎΡ‡Π΅Ρ‡Π½ΠΎΠΉ ΠΈΠ½Π²Π°Π»ΠΈΠ΄Π°Ρ†ΠΈΠΈ. + Если Π½Π΅ ΡƒΠΊΠ°Π·Π°Π½, ΠΈΠ½Π²Π°Π»ΠΈΠ΄ΠΈΡ€ΡƒΡŽΡ‚ΡΡ всС кСши Π°Π²Ρ‚ΠΎΡ€ΠΎΠ². + """ + if author_id: + # ВочСчная инвалидация ΠΊΠΎΠ½ΠΊΡ€Π΅Ρ‚Π½ΠΎΠ³ΠΎ Π°Π²Ρ‚ΠΎΡ€Π° + logger.debug(f"Π˜Π½Π²Π°Π»ΠΈΠ΄Π°Ρ†ΠΈΡ кСша для Π°Π²Ρ‚ΠΎΡ€Π° #{author_id}") + specific_keys = [ + f"author:id:{author_id}", + f"author:followers:{author_id}", + f"author:follows-authors:{author_id}", + f"author:follows-topics:{author_id}", + f"author:follows-shouts:{author_id}", + ] + + # ΠŸΠΎΠ»ΡƒΡ‡Π°Π΅ΠΌ user_id Π°Π²Ρ‚ΠΎΡ€Π°, Ссли Π΅ΡΡ‚ΡŒ + with local_session() as session: + author = session.query(Author).filter(Author.id == author_id).first() + if author and author.user: + specific_keys.append(f"author:user:{author.user.strip()}") + + # УдаляСм ΠΊΠΎΠ½ΠΊΡ€Π΅Ρ‚Π½Ρ‹Π΅ ΠΊΠ»ΡŽΡ‡ΠΈ + for key in specific_keys: + try: + await redis.execute("DEL", key) + logger.debug(f"Π£Π΄Π°Π»Π΅Π½ ΠΊΠ»ΡŽΡ‡ кСша {key}") + except Exception as e: + logger.error(f"Ошибка ΠΏΡ€ΠΈ ΡƒΠ΄Π°Π»Π΅Π½ΠΈΠΈ ΠΊΠ»ΡŽΡ‡Π° {key}: {e}") + + # Π’Π°ΠΊΠΆΠ΅ ΠΈΡ‰Π΅ΠΌ ΠΈ удаляСм ΠΊΠ»ΡŽΡ‡ΠΈ ΠΊΠΎΠ»Π»Π΅ΠΊΡ†ΠΈΠΉ, содСрТащих Π΄Π°Π½Π½Ρ‹Π΅ ΠΎΠ± этом Π°Π²Ρ‚ΠΎΡ€Π΅ + collection_keys = await redis.execute("KEYS", "authors:stats:*") + if collection_keys: + await redis.execute("DEL", *collection_keys) + logger.debug(f"Π£Π΄Π°Π»Π΅Π½ΠΎ {len(collection_keys)} ΠΊΠΎΠ»Π»Π΅ΠΊΡ†ΠΈΠΎΠ½Π½Ρ‹Ρ… ΠΊΠ»ΡŽΡ‡Π΅ΠΉ Π°Π²Ρ‚ΠΎΡ€ΠΎΠ²") + else: + # ΠžΠ±Ρ‰Π°Ρ инвалидация всСх кСшСй Π°Π²Ρ‚ΠΎΡ€ΠΎΠ² + logger.debug("Полная инвалидация кСша Π°Π²Ρ‚ΠΎΡ€ΠΎΠ²") + await invalidate_cache_by_prefix("authors") + @mutation.field("update_author") @login_required @@ -51,10 +222,30 @@ async def update_author(_, info, profile): @query.field("get_authors_all") -def get_authors_all(_, _info): - with local_session() as session: - authors = session.query(Author).all() - return authors +async def get_authors_all(_, _info): + """ + ΠŸΠΎΠ»ΡƒΡ‡Π°Π΅Ρ‚ список всСх Π°Π²Ρ‚ΠΎΡ€ΠΎΠ² Π±Π΅Π· статистики. + + Returns: + list: Бписок всСх Π°Π²Ρ‚ΠΎΡ€ΠΎΠ² + """ + return await get_all_authors() + + +@query.field("get_authors_paginated") +async def get_authors_paginated(_, _info, limit=50, offset=0, by=None): + """ + ΠŸΠΎΠ»ΡƒΡ‡Π°Π΅Ρ‚ список Π°Π²Ρ‚ΠΎΡ€ΠΎΠ² с ΠΏΠ°Π³ΠΈΠ½Π°Ρ†ΠΈΠ΅ΠΉ ΠΈ статистикой. + + Args: + limit: МаксимальноС количСство Π²ΠΎΠ·Π²Ρ€Π°Ρ‰Π°Π΅ΠΌΡ‹Ρ… Π°Π²Ρ‚ΠΎΡ€ΠΎΠ² + offset: Π‘ΠΌΠ΅Ρ‰Π΅Π½ΠΈΠ΅ для ΠΏΠ°Π³ΠΈΠ½Π°Ρ†ΠΈΠΈ + by: ΠŸΠ°Ρ€Π°ΠΌΠ΅Ρ‚Ρ€ сортировки (new/active) + + Returns: + list: Бписок Π°Π²Ρ‚ΠΎΡ€ΠΎΠ² с ΠΈΡ… статистикой + """ + return await get_authors_with_stats(limit, offset, by) @query.field("get_author") @@ -105,145 +296,105 @@ async def get_author_id(_, _info, user: str): asyncio.create_task(cache_author(author_dict)) return author_with_stat except Exception as exc: - import traceback - - traceback.print_exc() - logger.error(exc) + logger.error(f"Error getting author: {exc}") + return None @query.field("load_authors_by") async def load_authors_by(_, _info, by, limit, offset): - logger.debug(f"loading authors by {by}") - authors_query = select(Author) + """ + Π—Π°Π³Ρ€ΡƒΠΆΠ°Π΅Ρ‚ Π°Π²Ρ‚ΠΎΡ€ΠΎΠ² ΠΏΠΎ Π·Π°Π΄Π°Π½Π½ΠΎΠΌΡƒ ΠΊΡ€ΠΈΡ‚Π΅Ρ€ΠΈΡŽ с ΠΏΠ°Π³ΠΈΠ½Π°Ρ†ΠΈΠ΅ΠΉ. - if by.get("slug"): - authors_query = authors_query.filter(Author.slug.ilike(f"%{by['slug']}%")) - elif by.get("name"): - authors_query = authors_query.filter(Author.name.ilike(f"%{by['name']}%")) - elif by.get("topic"): - authors_query = ( - authors_query.join(ShoutAuthor) # ΠŸΠ΅Ρ€Π²ΠΎΠ΅ соСдинСниС ShoutAuthor - .join(ShoutTopic, ShoutAuthor.shout == ShoutTopic.shout) - .join(Topic, ShoutTopic.topic == Topic.id) - .filter(Topic.slug == str(by["topic"])) - ) + Args: + by: ΠšΡ€ΠΈΡ‚Π΅Ρ€ΠΈΠΉ сортировки Π°Π²Ρ‚ΠΎΡ€ΠΎΠ² (new/active) + limit: МаксимальноС количСство Π²ΠΎΠ·Π²Ρ€Π°Ρ‰Π°Π΅ΠΌΡ‹Ρ… Π°Π²Ρ‚ΠΎΡ€ΠΎΠ² + offset: Π‘ΠΌΠ΅Ρ‰Π΅Π½ΠΈΠ΅ для ΠΏΠ°Π³ΠΈΠ½Π°Ρ†ΠΈΠΈ - if by.get("last_seen"): # Π² unix time - before = int(time.time()) - by["last_seen"] - authors_query = authors_query.filter(Author.last_seen > before) - elif by.get("created_at"): # Π² unix time - before = int(time.time()) - by["created_at"] - authors_query = authors_query.filter(Author.created_at > before) - - authors_query = authors_query.limit(limit).offset(offset) - - with local_session() as session: - authors_nostat = session.execute(authors_query).all() - authors = [] - for a in authors_nostat: - if isinstance(a, Author): - author_dict = await get_cached_author(a.id, get_with_stat) - if author_dict and isinstance(author_dict.get("shouts"), int): - authors.append(author_dict) - - # order - order = by.get("order") - if order in ["shouts", "followers"]: - authors_query = authors_query.order_by(desc(text(f"{order}_stat"))) - - # group by - authors = get_with_stat(authors_query) - return authors or [] + Returns: + list: Бписок Π°Π²Ρ‚ΠΎΡ€ΠΎΠ² с ΡƒΡ‡Π΅Ρ‚ΠΎΠΌ критСрия + """ + # Π˜ΡΠΏΠΎΠ»ΡŒΠ·ΡƒΠ΅ΠΌ ΠΎΠΏΡ‚ΠΈΠΌΠΈΠ·ΠΈΡ€ΠΎΠ²Π°Π½Π½ΡƒΡŽ Ρ„ΡƒΠ½ΠΊΡ†ΠΈΡŽ для получСния Π°Π²Ρ‚ΠΎΡ€ΠΎΠ² + return await get_authors_with_stats(limit, offset, by) def get_author_id_from(slug="", user=None, author_id=None): - if not slug and not user and not author_id: - raise ValueError("One of slug, user, or author_id must be provided") - - author_query = select(Author.id) - if user: - author_query = author_query.filter(Author.user == user) - elif slug: - author_query = author_query.filter(Author.slug == slug) - elif author_id: - author_query = author_query.filter(Author.id == author_id) - - with local_session() as session: - author_id_result = session.execute(author_query).first() - author_id = author_id_result[0] if author_id_result else None - - if not author_id: - raise ValueError("Author not found") - + try: + author_id = None + if author_id: + return author_id + with local_session() as session: + author = None + if slug: + author = session.query(Author).filter(Author.slug == slug).first() + if author: + author_id = author.id + return author_id + if user: + author = session.query(Author).filter(Author.user == user).first() + if author: + author_id = author.id + except Exception as exc: + logger.error(exc) return author_id @query.field("get_author_follows") async def get_author_follows(_, _info, slug="", user=None, author_id=0): - try: - author_id = get_author_id_from(slug, user, author_id) + logger.debug(f"getting follows for @{slug}") + author_id = get_author_id_from(slug=slug, user=user, author_id=author_id) + if not author_id: + return {} - if bool(author_id): - logger.debug(f"getting {author_id} follows authors") - authors = await get_cached_follower_authors(author_id) - topics = await get_cached_follower_topics(author_id) - return { - "topics": topics, - "authors": authors, - "communities": [{"id": 1, "name": "Дискурс", "slug": "discours", "pic": ""}], - } - except Exception: - import traceback + followed_authors = await get_cached_follower_authors(author_id) + followed_topics = await get_cached_follower_topics(author_id) - traceback.print_exc() - return {"error": "Author not found"} + # TODO: Get followed communities too + return { + "authors": followed_authors, + "topics": followed_topics, + "communities": DEFAULT_COMMUNITIES, + "shouts": [], + } @query.field("get_author_follows_topics") async def get_author_follows_topics(_, _info, slug="", user=None, author_id=None): - try: - follower_id = get_author_id_from(slug, user, author_id) - topics = await get_cached_follower_topics(follower_id) - return topics - except Exception: - import traceback - - traceback.print_exc() + logger.debug(f"getting followed topics for @{slug}") + author_id = get_author_id_from(slug=slug, user=user, author_id=author_id) + if not author_id: + return [] + followed_topics = await get_cached_follower_topics(author_id) + return followed_topics @query.field("get_author_follows_authors") async def get_author_follows_authors(_, _info, slug="", user=None, author_id=None): - try: - follower_id = get_author_id_from(slug, user, author_id) - return await get_cached_follower_authors(follower_id) - except Exception: - import traceback - - traceback.print_exc() + logger.debug(f"getting followed authors for @{slug}") + author_id = get_author_id_from(slug=slug, user=user, author_id=author_id) + if not author_id: + return [] + followed_authors = await get_cached_follower_authors(author_id) + return followed_authors def create_author(user_id: str, slug: str, name: str = ""): + author = Author() + author.user = user_id # Бвязь с user_id ΠΈΠ· систСмы Π°Π²Ρ‚ΠΎΡ€ΠΈΠ·Π°Ρ†ΠΈΠΈ + author.slug = slug # Π˜Π΄Π΅Π½Ρ‚ΠΈΡ„ΠΈΠΊΠ°Ρ‚ΠΎΡ€ ΠΈΠ· систСмы Π°Π²Ρ‚ΠΎΡ€ΠΈΠ·Π°Ρ†ΠΈΠΈ + author.created_at = author.updated_at = int(time.time()) + author.name = name or slug # Ссли Π½Π΅ ΡƒΠΊΠ°Π·Π°Π½ΠΎ + with local_session() as session: - try: - author = None - if user_id: - author = session.query(Author).filter(Author.user == user_id).first() - elif slug: - author = session.query(Author).filter(Author.slug == slug).first() - if not author: - new_author = Author(user=user_id, slug=slug, name=name) - session.add(new_author) - session.commit() - logger.info(f"author created by webhook {new_author.dict()}") - except Exception as exc: - logger.debug(exc) + session.add(author) + session.commit() + return author @query.field("get_author_followers") async def get_author_followers(_, _info, slug: str = "", user: str = "", author_id: int = 0): - logger.debug(f"getting followers for @{slug}") + logger.debug(f"getting followers for author @{slug} or ID:{author_id}") author_id = get_author_id_from(slug=slug, user=user, author_id=author_id) - followers = [] - if author_id: - followers = await get_cached_author_followers(author_id) + if not author_id: + return [] + followers = await get_cached_author_followers(author_id) return followers diff --git a/resolvers/topic.py b/resolvers/topic.py index 9dfc245b..855d8a82 100644 --- a/resolvers/topic.py +++ b/resolvers/topic.py @@ -1,18 +1,15 @@ -import time - -from sqlalchemy import func, select, text +from sqlalchemy import desc, select, text from cache.cache import ( cache_topic, + cached_query, get_cached_topic_authors, get_cached_topic_by_slug, get_cached_topic_followers, - redis_operation, + invalidate_cache_by_prefix, ) -from cache.memorycache import cache_region from orm.author import Author -from orm.shout import Shout, ShoutTopic -from orm.topic import Topic, TopicFollower +from orm.topic import Topic from resolvers.stat import get_with_stat from services.auth import login_required from services.db import local_session @@ -30,42 +27,26 @@ async def get_all_topics(): Returns: list: Бписок всСх Ρ‚Π΅ΠΌ Π±Π΅Π· статистики """ - # ΠŸΡ‹Ρ‚Π°Π΅ΠΌΡΡ ΠΏΠΎΠ»ΡƒΡ‡ΠΈΡ‚ΡŒ Π΄Π°Π½Π½Ρ‹Π΅ ΠΈΠ· кСша - cached_topics = await redis_operation("GET", "topics:all:basic") + cache_key = "topics:all:basic" - if cached_topics: - logger.debug("Π˜ΡΠΏΠΎΠ»ΡŒΠ·ΡƒΠ΅ΠΌ ΠΊΠ΅ΡˆΠΈΡ€ΠΎΠ²Π°Π½Π½Ρ‹Π΅ Π±Π°Π·ΠΎΠ²Ρ‹Π΅ Π΄Π°Π½Π½Ρ‹Π΅ ΠΎ Ρ‚Π΅ΠΌΠ°Ρ… ΠΈΠ· Redis") - try: - import json + # Ѐункция для получСния всСх Ρ‚Π΅ΠΌ ΠΈΠ· Π‘Π” + async def fetch_all_topics(): + logger.debug("ΠŸΠΎΠ»ΡƒΡ‡Π°Π΅ΠΌ список всСх Ρ‚Π΅ΠΌ ΠΈΠ· Π‘Π” ΠΈ ΠΊΠ΅ΡˆΠΈΡ€ΡƒΠ΅ΠΌ Ρ€Π΅Π·ΡƒΠ»ΡŒΡ‚Π°Ρ‚") - return json.loads(cached_topics) - except Exception as e: - logger.error(f"Ошибка ΠΏΡ€ΠΈ дСсСриализации Ρ‚Π΅ΠΌ ΠΈΠ· Redis: {e}") + with local_session() as session: + # Запрос Π½Π° ΠΏΠΎΠ»ΡƒΡ‡Π΅Π½ΠΈΠ΅ Π±Π°Π·ΠΎΠ²ΠΎΠΉ ΠΈΠ½Ρ„ΠΎΡ€ΠΌΠ°Ρ†ΠΈΠΈ ΠΎ Ρ‚Π΅ΠΌΠ°Ρ… + topics_query = select(Topic) + topics = session.execute(topics_query).scalars().all() - # Если Π² кСшС Π½Π΅Ρ‚ Π΄Π°Π½Π½Ρ‹Ρ…, выполняСм запрос Π² Π‘Π” - logger.debug("ΠŸΠΎΠ»ΡƒΡ‡Π°Π΅ΠΌ список всСх Ρ‚Π΅ΠΌ ΠΈΠ· Π‘Π” ΠΈ ΠΊΠ΅ΡˆΠΈΡ€ΡƒΠ΅ΠΌ Ρ€Π΅Π·ΡƒΠ»ΡŒΡ‚Π°Ρ‚") + # ΠŸΡ€Π΅ΠΎΠ±Ρ€Π°Π·ΡƒΠ΅ΠΌ Ρ‚Π΅ΠΌΡ‹ Π² словари + return [topic.dict() for topic in topics] - with local_session() as session: - # Запрос Π½Π° ΠΏΠΎΠ»ΡƒΡ‡Π΅Π½ΠΈΠ΅ Π±Π°Π·ΠΎΠ²ΠΎΠΉ ΠΈΠ½Ρ„ΠΎΡ€ΠΌΠ°Ρ†ΠΈΠΈ ΠΎ Ρ‚Π΅ΠΌΠ°Ρ… - topics_query = select(Topic) - topics = session.execute(topics_query).scalars().all() - - # ΠŸΡ€Π΅ΠΎΠ±Ρ€Π°Π·ΡƒΠ΅ΠΌ Ρ‚Π΅ΠΌΡ‹ Π² словари - result = [topic.dict() for topic in topics] - - # ΠšΠ΅ΡˆΠΈΡ€ΡƒΠ΅ΠΌ Ρ€Π΅Π·ΡƒΠ»ΡŒΡ‚Π°Ρ‚ Π² Redis Π±Π΅Π· TTL (Π±ΡƒΠ΄Π΅Ρ‚ ΠΎΠ±Π½ΠΎΠ²Π»ΡΡ‚ΡŒΡΡ Ρ‚ΠΎΠ»ΡŒΠΊΠΎ ΠΏΡ€ΠΈ измСнСниях) - try: - import json - - await redis_operation("SET", "topics:all:basic", json.dumps(result)) - except Exception as e: - logger.error(f"Ошибка ΠΏΡ€ΠΈ ΠΊΠ΅ΡˆΠΈΡ€ΠΎΠ²Π°Π½ΠΈΠΈ Ρ‚Π΅ΠΌ Π² Redis: {e}") - - return result + # Π˜ΡΠΏΠΎΠ»ΡŒΠ·ΡƒΠ΅ΠΌ ΡƒΠ½ΠΈΠ²Π΅Ρ€ΡΠ°Π»ΡŒΠ½ΡƒΡŽ Ρ„ΡƒΠ½ΠΊΡ†ΠΈΡŽ для ΠΊΠ΅ΡˆΠΈΡ€ΠΎΠ²Π°Π½ΠΈΡ запросов + return await cached_query(cache_key, fetch_all_topics) # Π’ΡΠΏΠΎΠΌΠΎΠ³Π°Ρ‚Π΅Π»ΡŒΠ½Π°Ρ функция для получСния Ρ‚Π΅ΠΌ со статистикой с ΠΏΠ°Π³ΠΈΠ½Π°Ρ†ΠΈΠ΅ΠΉ -async def get_topics_with_stats(limit=100, offset=0, community_id=None): +async def get_topics_with_stats(limit=100, offset=0, community_id=None, by=None): """ ΠŸΠΎΠ»ΡƒΡ‡Π°Π΅Ρ‚ Ρ‚Π΅ΠΌΡ‹ со статистикой с ΠΏΠ°Π³ΠΈΠ½Π°Ρ†ΠΈΠ΅ΠΉ. @@ -73,105 +54,140 @@ async def get_topics_with_stats(limit=100, offset=0, community_id=None): limit: МаксимальноС количСство Π²ΠΎΠ·Π²Ρ€Π°Ρ‰Π°Π΅ΠΌΡ‹Ρ… Ρ‚Π΅ΠΌ offset: Π‘ΠΌΠ΅Ρ‰Π΅Π½ΠΈΠ΅ для ΠΏΠ°Π³ΠΈΠ½Π°Ρ†ΠΈΠΈ community_id: ΠžΠΏΡ†ΠΈΠΎΠ½Π°Π»ΡŒΠ½Ρ‹ΠΉ ID сообщСства для Ρ„ΠΈΠ»ΡŒΡ‚Ρ€Π°Ρ†ΠΈΠΈ + by: ΠžΠΏΡ†ΠΈΠΎΠ½Π°Π»ΡŒΠ½Ρ‹ΠΉ ΠΏΠ°Ρ€Π°ΠΌΠ΅Ρ‚Ρ€ сортировки Returns: list: Бписок Ρ‚Π΅ΠΌ с ΠΈΡ… статистикой """ - # Π€ΠΎΡ€ΠΌΠΈΡ€ΡƒΠ΅ΠΌ ΠΊΠ»ΡŽΡ‡ кСша с ΡƒΡ‡Π΅Ρ‚ΠΎΠΌ ΠΏΠ°Ρ€Π°ΠΌΠ΅Ρ‚Ρ€ΠΎΠ² - cache_key = f"topics:stats:limit={limit}:offset={offset}" - if community_id: - cache_key += f":community={community_id}" + # Π€ΠΎΡ€ΠΌΠΈΡ€ΡƒΠ΅ΠΌ ΠΊΠ»ΡŽΡ‡ кСша с ΠΏΠΎΠΌΠΎΡ‰ΡŒΡŽ ΡƒΠ½ΠΈΠ²Π΅Ρ€ΡΠ°Π»ΡŒΠ½ΠΎΠΉ Ρ„ΡƒΠ½ΠΊΡ†ΠΈΠΈ + cache_key = f"topics:stats:limit={limit}:offset={offset}:community_id={community_id}" - # ΠŸΡ‹Ρ‚Π°Π΅ΠΌΡΡ ΠΏΠΎΠ»ΡƒΡ‡ΠΈΡ‚ΡŒ Π΄Π°Π½Π½Ρ‹Π΅ ΠΈΠ· кСша - cached_topics = await redis_operation("GET", cache_key) + # Ѐункция для получСния Ρ‚Π΅ΠΌ ΠΈΠ· Π‘Π” + async def fetch_topics_with_stats(): + logger.debug(f"ВыполняСм запрос Π½Π° ΠΏΠΎΠ»ΡƒΡ‡Π΅Π½ΠΈΠ΅ Ρ‚Π΅ΠΌ со статистикой: limit={limit}, offset={offset}") - if cached_topics: - logger.debug(f"Π˜ΡΠΏΠΎΠ»ΡŒΠ·ΡƒΠ΅ΠΌ ΠΊΠ΅ΡˆΠΈΡ€ΠΎΠ²Π°Π½Π½Ρ‹Π΅ Π΄Π°Π½Π½Ρ‹Π΅ ΠΎ Ρ‚Π΅ΠΌΠ°Ρ… ΠΈΠ· Redis: {cache_key}") - try: - import json + with local_session() as session: + # Π‘Π°Π·ΠΎΠ²Ρ‹ΠΉ запрос для получСния Ρ‚Π΅ΠΌ + base_query = select(Topic) - return json.loads(cached_topics) - except Exception as e: - logger.error(f"Ошибка ΠΏΡ€ΠΈ дСсСриализации Ρ‚Π΅ΠΌ ΠΈΠ· Redis: {e}") + # ДобавляСм Ρ„ΠΈΠ»ΡŒΡ‚Ρ€ ΠΏΠΎ сообщСству, Ссли ΡƒΠΊΠ°Π·Π°Π½ + if community_id: + base_query = base_query.where(Topic.community == community_id) - # Если Π² кСшС Π½Π΅Ρ‚ Π΄Π°Π½Π½Ρ‹Ρ…, выполняСм ΠΎΠΏΡ‚ΠΈΠΌΠΈΠ·ΠΈΡ€ΠΎΠ²Π°Π½Π½Ρ‹ΠΉ запрос - logger.debug(f"ВыполняСм запрос Π½Π° ΠΏΠΎΠ»ΡƒΡ‡Π΅Π½ΠΈΠ΅ Ρ‚Π΅ΠΌ со статистикой: limit={limit}, offset={offset}") + # ΠŸΡ€ΠΈΠΌΠ΅Π½ΡΠ΅ΠΌ сортировку Π½Π° основС ΠΏΠ°Ρ€Π°ΠΌΠ΅Ρ‚Ρ€Π° by + if by: + if isinstance(by, dict): + # ΠžΠ±Ρ€Π°Π±ΠΎΡ‚ΠΊΠ° словаря ΠΏΠ°Ρ€Π°ΠΌΠ΅Ρ‚Ρ€ΠΎΠ² сортировки + for field, direction in by.items(): + column = getattr(Topic, field, None) + if column: + if direction.lower() == "desc": + base_query = base_query.order_by(desc(column)) + else: + base_query = base_query.order_by(column) + elif by == "popular": + # Π‘ΠΎΡ€Ρ‚ΠΈΡ€ΠΎΠ²ΠΊΠ° ΠΏΠΎ популярности (количСству ΠΏΡƒΠ±Π»ΠΈΠΊΠ°Ρ†ΠΈΠΉ) + # ΠŸΡ€ΠΈΠΌΠ΅Ρ‡Π°Π½ΠΈΠ΅: это Ρ‚Ρ€Π΅Π±ΡƒΠ΅Ρ‚ Π΄ΠΎΠΏΠΎΠ»Π½ΠΈΡ‚Π΅Π»ΡŒΠ½ΠΎΠ³ΠΎ запроса ΠΈΠ»ΠΈ подзапроса + base_query = base_query.order_by( + desc(Topic.id) + ) # Π’Ρ€Π΅ΠΌΠ΅Π½Π½ΠΎ, Π½ΡƒΠΆΠ½ΠΎ Π·Π°ΠΌΠ΅Π½ΠΈΡ‚ΡŒ Π½Π° proper implementation + else: + # По ΡƒΠΌΠΎΠ»Ρ‡Π°Π½ΠΈΡŽ сортируСм ΠΏΠΎ ID Π² ΠΎΠ±Ρ€Π°Ρ‚Π½ΠΎΠΌ порядкС + base_query = base_query.order_by(desc(Topic.id)) + else: + # По ΡƒΠΌΠΎΠ»Ρ‡Π°Π½ΠΈΡŽ сортируСм ΠΏΠΎ ID Π² ΠΎΠ±Ρ€Π°Ρ‚Π½ΠΎΠΌ порядкС + base_query = base_query.order_by(desc(Topic.id)) - with local_session() as session: - # Π‘Π°Π·ΠΎΠ²Ρ‹ΠΉ запрос для получСния Ρ‚Π΅ΠΌ - base_query = select(Topic) + # ΠŸΡ€ΠΈΠΌΠ΅Π½ΡΠ΅ΠΌ Π»ΠΈΠΌΠΈΡ‚ ΠΈ смСщСниС + base_query = base_query.limit(limit).offset(offset) - # ДобавляСм Ρ„ΠΈΠ»ΡŒΡ‚Ρ€ ΠΏΠΎ сообщСству, Ссли ΡƒΠΊΠ°Π·Π°Π½ - if community_id: - base_query = base_query.where(Topic.community == community_id) + # ΠŸΠΎΠ»ΡƒΡ‡Π°Π΅ΠΌ Ρ‚Π΅ΠΌΡ‹ + topics = session.execute(base_query).scalars().all() + topic_ids = [topic.id for topic in topics] - # ΠŸΡ€ΠΈΠΌΠ΅Π½ΡΠ΅ΠΌ Π»ΠΈΠΌΠΈΡ‚ ΠΈ смСщСниС - base_query = base_query.limit(limit).offset(offset) + if not topic_ids: + return [] - # ΠŸΠΎΠ»ΡƒΡ‡Π°Π΅ΠΌ Ρ‚Π΅ΠΌΡ‹ - topics = session.execute(base_query).scalars().all() - topic_ids = [topic.id for topic in topics] + # Запрос Π½Π° ΠΏΠΎΠ»ΡƒΡ‡Π΅Π½ΠΈΠ΅ статистики ΠΏΠΎ публикациям для Π²Ρ‹Π±Ρ€Π°Π½Π½Ρ‹Ρ… Ρ‚Π΅ΠΌ + shouts_stats_query = f""" + SELECT st.topic, COUNT(DISTINCT s.id) as shouts_count + FROM shout_topic st + JOIN shout s ON st.shout = s.id AND s.deleted_at IS NULL + WHERE st.topic IN ({",".join(map(str, topic_ids))}) + GROUP BY st.topic + """ + shouts_stats = {row[0]: row[1] for row in session.execute(text(shouts_stats_query))} - if not topic_ids: - return [] + # Запрос Π½Π° ΠΏΠΎΠ»ΡƒΡ‡Π΅Π½ΠΈΠ΅ статистики ΠΏΠΎ подписчикам для Π²Ρ‹Π±Ρ€Π°Π½Π½Ρ‹Ρ… Ρ‚Π΅ΠΌ + followers_stats_query = f""" + SELECT topic, COUNT(DISTINCT follower) as followers_count + FROM topic_followers + WHERE topic IN ({",".join(map(str, topic_ids))}) + GROUP BY topic + """ + followers_stats = {row[0]: row[1] for row in session.execute(text(followers_stats_query))} - # Запрос Π½Π° ΠΏΠΎΠ»ΡƒΡ‡Π΅Π½ΠΈΠ΅ статистики ΠΏΠΎ публикациям для Π²Ρ‹Π±Ρ€Π°Π½Π½Ρ‹Ρ… Ρ‚Π΅ΠΌ - shouts_stats_query = f""" - SELECT st.topic, COUNT(DISTINCT s.id) as shouts_count - FROM shout_topic st - JOIN shout s ON st.shout = s.id AND s.deleted_at IS NULL - WHERE st.topic IN ({",".join(map(str, topic_ids))}) - GROUP BY st.topic - """ - shouts_stats = {row[0]: row[1] for row in session.execute(text(shouts_stats_query))} + # Π€ΠΎΡ€ΠΌΠΈΡ€ΡƒΠ΅ΠΌ Ρ€Π΅Π·ΡƒΠ»ΡŒΡ‚Π°Ρ‚ с Π΄ΠΎΠ±Π°Π²Π»Π΅Π½ΠΈΠ΅ΠΌ статистики + result = [] + for topic in topics: + topic_dict = topic.dict() + topic_dict["stat"] = { + "shouts": shouts_stats.get(topic.id, 0), + "followers": followers_stats.get(topic.id, 0), + } + result.append(topic_dict) - # Запрос Π½Π° ΠΏΠΎΠ»ΡƒΡ‡Π΅Π½ΠΈΠ΅ статистики ΠΏΠΎ подписчикам для Π²Ρ‹Π±Ρ€Π°Π½Π½Ρ‹Ρ… Ρ‚Π΅ΠΌ - followers_stats_query = f""" - SELECT topic, COUNT(DISTINCT follower) as followers_count - FROM topic_followers - WHERE topic IN ({",".join(map(str, topic_ids))}) - GROUP BY topic - """ - followers_stats = {row[0]: row[1] for row in session.execute(text(followers_stats_query))} + # ΠšΠ΅ΡˆΠΈΡ€ΡƒΠ΅ΠΌ ΠΊΠ°ΠΆΠ΄ΡƒΡŽ Ρ‚Π΅ΠΌΡƒ ΠΎΡ‚Π΄Π΅Π»ΡŒΠ½ΠΎ для использования Π² Π΄Ρ€ΡƒΠ³ΠΈΡ… функциях + await cache_topic(topic_dict) - # Π€ΠΎΡ€ΠΌΠΈΡ€ΡƒΠ΅ΠΌ Ρ€Π΅Π·ΡƒΠ»ΡŒΡ‚Π°Ρ‚ с Π΄ΠΎΠ±Π°Π²Π»Π΅Π½ΠΈΠ΅ΠΌ статистики - result = [] - for topic in topics: - topic_dict = topic.dict() - topic_dict["stat"] = { - "shouts": shouts_stats.get(topic.id, 0), - "followers": followers_stats.get(topic.id, 0), - } - result.append(topic_dict) + return result - # ΠšΠ΅ΡˆΠΈΡ€ΡƒΠ΅ΠΌ ΠΊΠ°ΠΆΠ΄ΡƒΡŽ Ρ‚Π΅ΠΌΡƒ ΠΎΡ‚Π΄Π΅Π»ΡŒΠ½ΠΎ для использования Π² Π΄Ρ€ΡƒΠ³ΠΈΡ… функциях - await cache_topic(topic_dict) - - # ΠšΠ΅ΡˆΠΈΡ€ΡƒΠ΅ΠΌ ΠΏΠΎΠ»Π½Ρ‹ΠΉ Ρ€Π΅Π·ΡƒΠ»ΡŒΡ‚Π°Ρ‚ Π² Redis Π±Π΅Π· TTL (Π±ΡƒΠ΄Π΅Ρ‚ ΠΎΠ±Π½ΠΎΠ²Π»ΡΡ‚ΡŒΡΡ Ρ‚ΠΎΠ»ΡŒΠΊΠΎ ΠΏΡ€ΠΈ измСнСниях) - try: - import json - - await redis_operation("SET", cache_key, json.dumps(result)) - except Exception as e: - logger.error(f"Ошибка ΠΏΡ€ΠΈ ΠΊΠ΅ΡˆΠΈΡ€ΠΎΠ²Π°Π½ΠΈΠΈ Ρ‚Π΅ΠΌ Π² Redis: {e}") - - return result + # Π˜ΡΠΏΠΎΠ»ΡŒΠ·ΡƒΠ΅ΠΌ ΡƒΠ½ΠΈΠ²Π΅Ρ€ΡΠ°Π»ΡŒΠ½ΡƒΡŽ Ρ„ΡƒΠ½ΠΊΡ†ΠΈΡŽ для ΠΊΠ΅ΡˆΠΈΡ€ΠΎΠ²Π°Π½ΠΈΡ запросов + return await cached_query(cache_key, fetch_topics_with_stats) # Ѐункция для ΠΈΠ½Π²Π°Π»ΠΈΠ΄Π°Ρ†ΠΈΠΈ кСша Ρ‚Π΅ΠΌ -async def invalidate_topics_cache(): +async def invalidate_topics_cache(topic_id=None): """ - Π˜Π½Π²Π°Π»ΠΈΠ΄ΠΈΡ€ΡƒΠ΅Ρ‚ всС кСши Ρ‚Π΅ΠΌ ΠΏΡ€ΠΈ ΠΈΠ·ΠΌΠ΅Π½Π΅Π½ΠΈΠΈ Π΄Π°Π½Π½Ρ‹Ρ…. + Π˜Π½Π²Π°Π»ΠΈΠ΄ΠΈΡ€ΡƒΠ΅Ρ‚ кСши Ρ‚Π΅ΠΌ ΠΏΡ€ΠΈ ΠΈΠ·ΠΌΠ΅Π½Π΅Π½ΠΈΠΈ Π΄Π°Π½Π½Ρ‹Ρ…. + + Args: + topic_id: ΠžΠΏΡ†ΠΈΠΎΠ½Π°Π»ΡŒΠ½Ρ‹ΠΉ ID Ρ‚Π΅ΠΌΡ‹ для Ρ‚ΠΎΡ‡Π΅Ρ‡Π½ΠΎΠΉ ΠΈΠ½Π²Π°Π»ΠΈΠ΄Π°Ρ†ΠΈΠΈ. + Если Π½Π΅ ΡƒΠΊΠ°Π·Π°Π½, ΠΈΠ½Π²Π°Π»ΠΈΠ΄ΠΈΡ€ΡƒΡŽΡ‚ΡΡ всС кСши Ρ‚Π΅ΠΌ. """ - logger.debug("Π˜Π½Π²Π°Π»ΠΈΠ΄Π°Ρ†ΠΈΡ кСша Ρ‚Π΅ΠΌ") + if topic_id: + # ВочСчная инвалидация ΠΊΠΎΠ½ΠΊΡ€Π΅Ρ‚Π½ΠΎΠΉ Ρ‚Π΅ΠΌΡ‹ + logger.debug(f"Π˜Π½Π²Π°Π»ΠΈΠ΄Π°Ρ†ΠΈΡ кСша для Ρ‚Π΅ΠΌΡ‹ #{topic_id}") + specific_keys = [ + f"topic:id:{topic_id}", + f"topic:authors:{topic_id}", + f"topic:followers:{topic_id}", + f"topic_shouts_{topic_id}", + ] - # ΠŸΠΎΠ»ΡƒΡ‡Π°Π΅ΠΌ всС ΠΊΠ»ΡŽΡ‡ΠΈ, Π½Π°Ρ‡ΠΈΠ½Π°ΡŽΡ‰ΠΈΠ΅ΡΡ с "topics:" - topic_keys = await redis.execute("KEYS", "topics:*") + # ΠŸΠΎΠ»ΡƒΡ‡Π°Π΅ΠΌ slug Ρ‚Π΅ΠΌΡ‹, Ссли Π΅ΡΡ‚ΡŒ + with local_session() as session: + topic = session.query(Topic).filter(Topic.id == topic_id).first() + if topic and topic.slug: + specific_keys.append(f"topic:slug:{topic.slug}") - if topic_keys: - # УдаляСм всС Π½Π°ΠΉΠ΄Π΅Π½Π½Ρ‹Π΅ ΠΊΠ»ΡŽΡ‡ΠΈ - await redis.execute("DEL", *topic_keys) - logger.debug(f"Π£Π΄Π°Π»Π΅Π½ΠΎ {len(topic_keys)} ΠΊΠ»ΡŽΡ‡Π΅ΠΉ кСша Ρ‚Π΅ΠΌ") + # УдаляСм ΠΊΠΎΠ½ΠΊΡ€Π΅Ρ‚Π½Ρ‹Π΅ ΠΊΠ»ΡŽΡ‡ΠΈ + for key in specific_keys: + try: + await redis.execute("DEL", key) + logger.debug(f"Π£Π΄Π°Π»Π΅Π½ ΠΊΠ»ΡŽΡ‡ кСша {key}") + except Exception as e: + logger.error(f"Ошибка ΠΏΡ€ΠΈ ΡƒΠ΄Π°Π»Π΅Π½ΠΈΠΈ ΠΊΠ»ΡŽΡ‡Π° {key}: {e}") + + # Π’Π°ΠΊΠΆΠ΅ ΠΈΡ‰Π΅ΠΌ ΠΈ удаляСм ΠΊΠ»ΡŽΡ‡ΠΈ ΠΊΠΎΠ»Π»Π΅ΠΊΡ†ΠΈΠΉ, содСрТащих Π΄Π°Π½Π½Ρ‹Π΅ ΠΎΠ± этой Ρ‚Π΅ΠΌΠ΅ + collection_keys = await redis.execute("KEYS", "topics:stats:*") + if collection_keys: + await redis.execute("DEL", *collection_keys) + logger.debug(f"Π£Π΄Π°Π»Π΅Π½ΠΎ {len(collection_keys)} ΠΊΠΎΠ»Π»Π΅ΠΊΡ†ΠΈΠΎΠ½Π½Ρ‹Ρ… ΠΊΠ»ΡŽΡ‡Π΅ΠΉ Ρ‚Π΅ΠΌ") + else: + # ΠžΠ±Ρ‰Π°Ρ инвалидация всСх кСшСй Ρ‚Π΅ΠΌ + logger.debug("Полная инвалидация кСша Ρ‚Π΅ΠΌ") + await invalidate_cache_by_prefix("topics") # Запрос Π½Π° ΠΏΠΎΠ»ΡƒΡ‡Π΅Π½ΠΈΠ΅ всСх Ρ‚Π΅ΠΌ @@ -188,23 +204,24 @@ async def get_topics_all(_, _info): # Запрос Π½Π° ΠΏΠΎΠ»ΡƒΡ‡Π΅Π½ΠΈΠ΅ Ρ‚Π΅ΠΌ с ΠΏΠ°Π³ΠΈΠ½Π°Ρ†ΠΈΠ΅ΠΉ ΠΈ статистикой @query.field("get_topics_paginated") -async def get_topics_paginated(_, _info, limit=100, offset=0): +async def get_topics_paginated(_, _info, limit=100, offset=0, by=None): """ ΠŸΠΎΠ»ΡƒΡ‡Π°Π΅Ρ‚ список Ρ‚Π΅ΠΌ с ΠΏΠ°Π³ΠΈΠ½Π°Ρ†ΠΈΠ΅ΠΉ ΠΈ статистикой. Args: limit: МаксимальноС количСство Π²ΠΎΠ·Π²Ρ€Π°Ρ‰Π°Π΅ΠΌΡ‹Ρ… Ρ‚Π΅ΠΌ offset: Π‘ΠΌΠ΅Ρ‰Π΅Π½ΠΈΠ΅ для ΠΏΠ°Π³ΠΈΠ½Π°Ρ†ΠΈΠΈ + by: ΠžΠΏΡ†ΠΈΠΎΠ½Π°Π»ΡŒΠ½Ρ‹Π΅ ΠΏΠ°Ρ€Π°ΠΌΠ΅Ρ‚Ρ€Ρ‹ сортировки Returns: list: Бписок Ρ‚Π΅ΠΌ с ΠΈΡ… статистикой """ - return await get_topics_with_stats(limit, offset) + return await get_topics_with_stats(limit, offset, None, by) # Запрос Π½Π° ΠΏΠΎΠ»ΡƒΡ‡Π΅Π½ΠΈΠ΅ Ρ‚Π΅ΠΌ ΠΏΠΎ сообщСству @query.field("get_topics_by_community") -async def get_topics_by_community(_, _info, community_id: int, limit=100, offset=0): +async def get_topics_by_community(_, _info, community_id: int, limit=100, offset=0, by=None): """ ΠŸΠΎΠ»ΡƒΡ‡Π°Π΅Ρ‚ список Ρ‚Π΅ΠΌ, ΠΏΡ€ΠΈΠ½Π°Π΄Π»Π΅ΠΆΠ°Ρ‰ΠΈΡ… ΡƒΠΊΠ°Π·Π°Π½Π½ΠΎΠΌΡƒ сообщСству с ΠΏΠ°Π³ΠΈΠ½Π°Ρ†ΠΈΠ΅ΠΉ ΠΈ статистикой. @@ -212,11 +229,12 @@ async def get_topics_by_community(_, _info, community_id: int, limit=100, offset community_id: ID сообщСства limit: МаксимальноС количСство Π²ΠΎΠ·Π²Ρ€Π°Ρ‰Π°Π΅ΠΌΡ‹Ρ… Ρ‚Π΅ΠΌ offset: Π‘ΠΌΠ΅Ρ‰Π΅Π½ΠΈΠ΅ для ΠΏΠ°Π³ΠΈΠ½Π°Ρ†ΠΈΠΈ + by: ΠžΠΏΡ†ΠΈΠΎΠ½Π°Π»ΡŒΠ½Ρ‹Π΅ ΠΏΠ°Ρ€Π°ΠΌΠ΅Ρ‚Ρ€Ρ‹ сортировки Returns: list: Бписок Ρ‚Π΅ΠΌ с ΠΈΡ… статистикой """ - return await get_topics_with_stats(limit, offset, community_id) + return await get_topics_with_stats(limit, offset, community_id, by) # Запрос Π½Π° ΠΏΠΎΠ»ΡƒΡ‡Π΅Π½ΠΈΠ΅ Ρ‚Π΅ΠΌ ΠΏΠΎ Π°Π²Ρ‚ΠΎΡ€Ρƒ @@ -268,14 +286,18 @@ async def update_topic(_, _info, topic_input): if not topic: return {"error": "topic not found"} else: + old_slug = topic.slug Topic.update(topic, topic_input) session.add(topic) session.commit() - # Π˜Π½Π²Π°Π»ΠΈΠ΄ΠΈΡ€ΡƒΠ΅ΠΌ кСш всСх Ρ‚Π΅ΠΌ ΠΈ ΠΊΠΎΠ½ΠΊΡ€Π΅Ρ‚Π½ΠΎΠΉ Ρ‚Π΅ΠΌΡ‹ - await invalidate_topics_cache() - await redis.execute("DEL", f"topic:slug:{slug}") - await redis.execute("DEL", f"topic:id:{topic.id}") + # Π˜Π½Π²Π°Π»ΠΈΠ΄ΠΈΡ€ΡƒΠ΅ΠΌ кСш Ρ‚ΠΎΠ»ΡŒΠΊΠΎ для этой ΠΊΠΎΠ½ΠΊΡ€Π΅Ρ‚Π½ΠΎΠΉ Ρ‚Π΅ΠΌΡ‹ + await invalidate_topics_cache(topic.id) + + # Если slug измСнился, удаляСм старый ΠΊΠ»ΡŽΡ‡ + if old_slug != topic.slug: + await redis.execute("DEL", f"topic:slug:{old_slug}") + logger.debug(f"Π£Π΄Π°Π»Π΅Π½ ΠΊΠ»ΡŽΡ‡ кСша для старого slug: {old_slug}") return {"topic": topic} diff --git a/services/db.py b/services/db.py index b81873ba..e9a6b58c 100644 --- a/services/db.py +++ b/services/db.py @@ -17,7 +17,7 @@ from sqlalchemy import ( exc, func, inspect, - text + text, ) from sqlalchemy.orm import Session, configure_mappers, declarative_base from sqlalchemy.sql.schema import Table From 369ff757b00f99bcdd5f535618235ad6f898b2d6 Mon Sep 17 00:00:00 2001 From: Untone Date: Sat, 22 Mar 2025 13:37:43 +0300 Subject: [PATCH 21/27] [0.4.16] - 2025-03-22 - Added hierarchical comments pagination: - Created new GraphQL query `load_comments_branch` for efficient loading of hierarchical comments - Ability to load root comments with their first N replies - Added pagination for both root and child comments - Using existing `commented` field in `Stat` type to display number of replies - Added special `first_replies` field to store first replies to a comment - Optimized SQL queries for efficient loading of comment hierarchies - Implemented flexible comment sorting system (by time, rating) --- CHANGELOG.md | 10 ++ docs/comments-pagination.md | 165 +++++++++++++++++++++++++++++++ docs/features.md | 13 ++- resolvers/__init__.py | 2 + resolvers/reaction.py | 191 +++++++++++++++++++++++++++++++++++- schema/query.graphql | 3 + 6 files changed, 378 insertions(+), 6 deletions(-) create mode 100644 docs/comments-pagination.md diff --git a/CHANGELOG.md b/CHANGELOG.md index 987e2303..b9b5f937 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,13 @@ +#### [0.4.16] - 2025-03-22 +- Added hierarchical comments pagination: + - Created new GraphQL query `load_comments_branch` for efficient loading of hierarchical comments + - Ability to load root comments with their first N replies + - Added pagination for both root and child comments + - Using existing `commented` field in `Stat` type to display number of replies + - Added special `first_replies` field to store first replies to a comment + - Optimized SQL queries for efficient loading of comment hierarchies + - Implemented flexible comment sorting system (by time, rating) + #### [0.4.15] - 2025-03-22 - Upgraded caching system described `docs/caching.md` - Module `cache/memorycache.py` removed diff --git a/docs/comments-pagination.md b/docs/comments-pagination.md new file mode 100644 index 00000000..3b34a0de --- /dev/null +++ b/docs/comments-pagination.md @@ -0,0 +1,165 @@ +# ΠŸΠ°Π³ΠΈΠ½Π°Ρ†ΠΈΡ ΠΊΠΎΠΌΠΌΠ΅Π½Ρ‚Π°Ρ€ΠΈΠ΅Π² + +## ΠžΠ±Π·ΠΎΡ€ + +Π Π΅Π°Π»ΠΈΠ·ΠΎΠ²Π°Π½Π° систСма ΠΏΠ°Π³ΠΈΠ½Π°Ρ†ΠΈΠΈ ΠΊΠΎΠΌΠΌΠ΅Π½Ρ‚Π°Ρ€ΠΈΠ΅Π² ΠΏΠΎ Π²Π΅Ρ‚ΠΊΠ°ΠΌ, которая позволяСт эффСктивно Π·Π°Π³Ρ€ΡƒΠΆΠ°Ρ‚ΡŒ ΠΈ ΠΎΡ‚ΠΎΠ±Ρ€Π°ΠΆΠ°Ρ‚ΡŒ Π²Π»ΠΎΠΆΠ΅Π½Π½Ρ‹Π΅ Π²Π΅Ρ‚ΠΊΠΈ обсуТдСний. ΠžΡΠ½ΠΎΠ²Π½Ρ‹Π΅ прСимущСства: + +1. Π—Π°Π³Ρ€ΡƒΠ·ΠΊΠ° Ρ‚ΠΎΠ»ΡŒΠΊΠΎ Π½Π΅ΠΎΠ±Ρ…ΠΎΠ΄ΠΈΠΌΡ‹Ρ… ΠΊΠΎΠΌΠΌΠ΅Π½Ρ‚Π°Ρ€ΠΈΠ΅Π², Π° Π½Π΅ всСго Π΄Π΅Ρ€Π΅Π²Π° +2. Π‘Π½ΠΈΠΆΠ΅Π½ΠΈΠ΅ Π½Π°Π³Ρ€ΡƒΠ·ΠΊΠΈ Π½Π° сСрвСр ΠΈ ΠΊΠ»ΠΈΠ΅Π½Ρ‚ +3. Π’ΠΎΠ·ΠΌΠΎΠΆΠ½ΠΎΡΡ‚ΡŒ эффСктивной Π½Π°Π²ΠΈΠ³Π°Ρ†ΠΈΠΈ ΠΏΠΎ большим обсуТдСниям +4. ΠŸΡ€Π΅Π΄Π·Π°Π³Ρ€ΡƒΠ·ΠΊΠ° ΠΏΠ΅Ρ€Π²Ρ‹Ρ… N ΠΎΡ‚Π²Π΅Ρ‚ΠΎΠ² для ΡƒΠ»ΡƒΡ‡ΡˆΠ΅Π½ΠΈΡ UX + +## API для иСрархичСской Π·Π°Π³Ρ€ΡƒΠ·ΠΊΠΈ ΠΊΠΎΠΌΠΌΠ΅Π½Ρ‚Π°Ρ€ΠΈΠ΅Π² + +### GraphQL запрос `load_comments_branch` + +```graphql +query LoadCommentsBranch( + $shout: Int!, + $parentId: Int, + $limit: Int, + $offset: Int, + $sort: ReactionSort, + $childrenLimit: Int, + $childrenOffset: Int +) { + load_comments_branch( + shout: $shout, + parent_id: $parentId, + limit: $limit, + offset: $offset, + sort: $sort, + children_limit: $childrenLimit, + children_offset: $childrenOffset + ) { + id + body + created_at + created_by { + id + name + slug + pic + } + kind + reply_to + stat { + rating + commented + } + first_replies { + id + body + created_at + created_by { + id + name + slug + pic + } + kind + reply_to + stat { + rating + commented + } + } + } +} +``` + +### ΠŸΠ°Ρ€Π°ΠΌΠ΅Ρ‚Ρ€Ρ‹ запроса + +| ΠŸΠ°Ρ€Π°ΠΌΠ΅Ρ‚Ρ€ | Π’ΠΈΠΏ | По ΡƒΠΌΠΎΠ»Ρ‡Π°Π½ΠΈΡŽ | ОписаниС | +|----------|-----|--------------|----------| +| shout | Int! | - | ID ΡΡ‚Π°Ρ‚ΡŒΠΈ, ΠΊ ΠΊΠΎΡ‚ΠΎΡ€ΠΎΠΉ относятся ΠΊΠΎΠΌΠΌΠ΅Π½Ρ‚Π°Ρ€ΠΈΠΈ | +| parent_id | Int | null | ID Ρ€ΠΎΠ΄ΠΈΡ‚Π΅Π»ΡŒΡΠΊΠΎΠ³ΠΎ коммСнтария. Если null, Π·Π°Π³Ρ€ΡƒΠΆΠ°ΡŽΡ‚ΡΡ ΠΊΠΎΡ€Π½Π΅Π²Ρ‹Π΅ ΠΊΠΎΠΌΠΌΠ΅Π½Ρ‚Π°Ρ€ΠΈΠΈ | +| limit | Int | 10 | МаксимальноС количСство ΠΊΠΎΠΌΠΌΠ΅Π½Ρ‚Π°Ρ€ΠΈΠ΅Π² для Π·Π°Π³Ρ€ΡƒΠ·ΠΊΠΈ | +| offset | Int | 0 | Π‘ΠΌΠ΅Ρ‰Π΅Π½ΠΈΠ΅ для ΠΏΠ°Π³ΠΈΠ½Π°Ρ†ΠΈΠΈ | +| sort | ReactionSort | newest | ΠŸΠΎΡ€ΡΠ΄ΠΎΠΊ сортировки: newest, oldest, like | +| children_limit | Int | 3 | МаксимальноС количСство Π΄ΠΎΡ‡Π΅Ρ€Π½ΠΈΡ… ΠΊΠΎΠΌΠΌΠ΅Π½Ρ‚Π°Ρ€ΠΈΠ΅Π² для ΠΊΠ°ΠΆΠ΄ΠΎΠ³ΠΎ Ρ€ΠΎΠ΄ΠΈΡ‚Π΅Π»ΡŒΡΠΊΠΎΠ³ΠΎ | +| children_offset | Int | 0 | Π‘ΠΌΠ΅Ρ‰Π΅Π½ΠΈΠ΅ для ΠΏΠ°Π³ΠΈΠ½Π°Ρ†ΠΈΠΈ Π΄ΠΎΡ‡Π΅Ρ€Π½ΠΈΡ… ΠΊΠΎΠΌΠΌΠ΅Π½Ρ‚Π°Ρ€ΠΈΠ΅Π² | + +### Поля Π² ΠΎΡ‚Π²Π΅Ρ‚Π΅ + +ΠšΠ°ΠΆΠ΄Ρ‹ΠΉ ΠΊΠΎΠΌΠΌΠ΅Π½Ρ‚Π°Ρ€ΠΈΠΉ содСрТит ΡΠ»Π΅Π΄ΡƒΡŽΡ‰ΠΈΠ΅ основныС поля: + +- `id`: ID коммСнтария +- `body`: ВСкст коммСнтария +- `created_at`: ВрСмя создания +- `created_by`: Π˜Π½Ρ„ΠΎΡ€ΠΌΠ°Ρ†ΠΈΡ ΠΎΠ± Π°Π²Ρ‚ΠΎΡ€Π΅ +- `kind`: Π’ΠΈΠΏ Ρ€Π΅Π°ΠΊΡ†ΠΈΠΈ (COMMENT) +- `reply_to`: ID Ρ€ΠΎΠ΄ΠΈΡ‚Π΅Π»ΡŒΡΠΊΠΎΠ³ΠΎ коммСнтария (null для ΠΊΠΎΡ€Π½Π΅Π²Ρ‹Ρ…) +- `first_replies`: ΠŸΠ΅Ρ€Π²Ρ‹Π΅ N Π΄ΠΎΡ‡Π΅Ρ€Π½ΠΈΡ… ΠΊΠΎΠΌΠΌΠ΅Π½Ρ‚Π°Ρ€ΠΈΠ΅Π² +- `stat`: Бтатистика коммСнтария, Π²ΠΊΠ»ΡŽΡ‡Π°ΡŽΡ‰Π°Ρ: + - `commented`: ΠšΠΎΠ»ΠΈΡ‡Π΅ΡΡ‚Π²ΠΎ ΠΎΡ‚Π²Π΅Ρ‚ΠΎΠ² Π½Π° ΠΊΠΎΠΌΠΌΠ΅Π½Ρ‚Π°Ρ€ΠΈΠΉ + - `rating`: Π Π΅ΠΉΡ‚ΠΈΠ½Π³ коммСнтария + +## ΠŸΡ€ΠΈΠΌΠ΅Ρ€Ρ‹ использования + +### Π—Π°Π³Ρ€ΡƒΠ·ΠΊΠ° ΠΊΠΎΡ€Π½Π΅Π²Ρ‹Ρ… ΠΊΠΎΠΌΠΌΠ΅Π½Ρ‚Π°Ρ€ΠΈΠ΅Π² с ΠΏΠ΅Ρ€Π²Ρ‹ΠΌΠΈ ΠΎΡ‚Π²Π΅Ρ‚Π°ΠΌΠΈ + +```javascript +const { data } = await client.query({ + query: LOAD_COMMENTS_BRANCH, + variables: { + shout: 222, + limit: 10, + offset: 0, + sort: "newest", + childrenLimit: 3 + } +}); +``` + +### Π—Π°Π³Ρ€ΡƒΠ·ΠΊΠ° ΠΎΡ‚Π²Π΅Ρ‚ΠΎΠ² Π½Π° ΠΊΠΎΠ½ΠΊΡ€Π΅Ρ‚Π½Ρ‹ΠΉ ΠΊΠΎΠΌΠΌΠ΅Π½Ρ‚Π°Ρ€ΠΈΠΉ + +```javascript +const { data } = await client.query({ + query: LOAD_COMMENTS_BRANCH, + variables: { + shout: 222, + parentId: 123, // ID коммСнтария, для ΠΊΠΎΡ‚ΠΎΡ€ΠΎΠ³ΠΎ Π·Π°Π³Ρ€ΡƒΠΆΠ°Π΅ΠΌ ΠΎΡ‚Π²Π΅Ρ‚Ρ‹ + limit: 10, + offset: 0, + sort: "oldest" // Π‘ΠΎΡ€Ρ‚ΠΈΡ€ΡƒΠ΅ΠΌ ΠΎΡ‚Π²Π΅Ρ‚Ρ‹ ΠΎΡ‚ старых ΠΊ Π½ΠΎΠ²Ρ‹ΠΌ + } +}); +``` + +### ΠŸΠ°Π³ΠΈΠ½Π°Ρ†ΠΈΡ Π΄ΠΎΡ‡Π΅Ρ€Π½ΠΈΡ… ΠΊΠΎΠΌΠΌΠ΅Π½Ρ‚Π°Ρ€ΠΈΠ΅Π² + +Для Π·Π°Π³Ρ€ΡƒΠ·ΠΊΠΈ Π΄ΠΎΠΏΠΎΠ»Π½ΠΈΡ‚Π΅Π»ΡŒΠ½Ρ‹Ρ… ΠΎΡ‚Π²Π΅Ρ‚ΠΎΠ² Π½Π° ΠΊΠΎΠΌΠΌΠ΅Π½Ρ‚Π°Ρ€ΠΈΠΉ: + +```javascript +const { data } = await client.query({ + query: LOAD_COMMENTS_BRANCH, + variables: { + shout: 222, + parentId: 123, + limit: 10, + offset: 0, + childrenLimit: 5, + childrenOffset: 3 // ΠŸΡ€ΠΎΠΏΡƒΡΠΊΠ°Π΅ΠΌ ΠΏΠ΅Ρ€Π²Ρ‹Π΅ 3 коммСнтария (ΡƒΠΆΠ΅ Π·Π°Π³Ρ€ΡƒΠΆΠ΅Π½Π½Ρ‹Π΅) + } +}); +``` + +## Π Π΅ΠΊΠΎΠΌΠ΅Π½Π΄Π°Ρ†ΠΈΠΈ ΠΏΠΎ клиСнтской Ρ€Π΅Π°Π»ΠΈΠ·Π°Ρ†ΠΈΠΈ + +1. Для эффСктивной Ρ€Π°Π±ΠΎΡ‚Ρ‹ со слоТными Π²Π΅Ρ‚ΠΊΠ°ΠΌΠΈ обсуТдСний рСкомСндуСтся: + + - Π‘Π½Π°Ρ‡Π°Π»Π° Π·Π°Π³Ρ€ΡƒΠΆΠ°Ρ‚ΡŒ Ρ‚ΠΎΠ»ΡŒΠΊΠΎ ΠΊΠΎΡ€Π½Π΅Π²Ρ‹Π΅ ΠΊΠΎΠΌΠΌΠ΅Π½Ρ‚Π°Ρ€ΠΈΠΈ с ΠΏΠ΅Ρ€Π²Ρ‹ΠΌΠΈ N ΠΎΡ‚Π²Π΅Ρ‚Π°ΠΌΠΈ + - ΠŸΡ€ΠΈ Π½Π°Π»ΠΈΡ‡ΠΈΠΈ Π΄ΠΎΠΏΠΎΠ»Π½ΠΈΡ‚Π΅Π»ΡŒΠ½Ρ‹Ρ… ΠΎΡ‚Π²Π΅Ρ‚ΠΎΠ² (ΠΊΠΎΠ³Π΄Π° `stat.commented > first_replies.length`) + Π΄ΠΎΠ±Π°Π²ΠΈΡ‚ΡŒ ΠΊΠ½ΠΎΠΏΠΊΡƒ "ΠŸΠΎΠΊΠ°Π·Π°Ρ‚ΡŒ всС ΠΎΡ‚Π²Π΅Ρ‚Ρ‹" + - ΠŸΡ€ΠΈ Π½Π°ΠΆΠ°Ρ‚ΠΈΠΈ Π½Π° ΠΊΠ½ΠΎΠΏΠΊΡƒ Π·Π°Π³Ρ€ΡƒΠΆΠ°Ρ‚ΡŒ Π΄ΠΎΠΏΠΎΠ»Π½ΠΈΡ‚Π΅Π»ΡŒΠ½Ρ‹Π΅ ΠΎΡ‚Π²Π΅Ρ‚Ρ‹ с ΠΏΠΎΠΌΠΎΡ‰ΡŒΡŽ запроса с ΡƒΠΊΠ°Π·Π°Π½Π½Ρ‹ΠΌ `parentId` + +2. Для сортировки: + - По ΡƒΠΌΠΎΠ»Ρ‡Π°Π½ΠΈΡŽ ΠΈΡΠΏΠΎΠ»ΡŒΠ·ΠΎΠ²Π°Ρ‚ΡŒ `newest` для отобраТСния свСТих обсуТдСний + - ΠŸΡ€Π΅Π΄ΡƒΡΠΌΠΎΡ‚Ρ€Π΅Ρ‚ΡŒ ΠΏΠ΅Ρ€Π΅ΠΊΠ»ΡŽΡ‡Π°Ρ‚Π΅Π»ΡŒ сортировки для всСго Π΄Π΅Ρ€Π΅Π²Π° ΠΊΠΎΠΌΠΌΠ΅Π½Ρ‚Π°Ρ€ΠΈΠ΅Π² + - ΠŸΡ€ΠΈ ΠΈΠ·ΠΌΠ΅Π½Π΅Π½ΠΈΠΈ сортировки ΠΏΠ΅Ρ€Π΅Π·Π°Π³Ρ€ΡƒΠΆΠ°Ρ‚ΡŒ Π΄Π°Π½Π½Ρ‹Π΅ с Π½ΠΎΠ²Ρ‹ΠΌ ΠΏΠ°Ρ€Π°ΠΌΠ΅Ρ‚Ρ€ΠΎΠΌ `sort` + +3. Для ΡƒΠ»ΡƒΡ‡ΡˆΠ΅Π½ΠΈΡ ΠΏΡ€ΠΎΠΈΠ·Π²ΠΎΠ΄ΠΈΡ‚Π΅Π»ΡŒΠ½ΠΎΡΡ‚ΠΈ: + - ΠšΠ΅ΡˆΠΈΡ€ΠΎΠ²Π°Ρ‚ΡŒ Ρ€Π΅Π·ΡƒΠ»ΡŒΡ‚Π°Ρ‚Ρ‹ запросов Π½Π° ΠΊΠ»ΠΈΠ΅Π½Ρ‚Π΅ + - Π˜ΡΠΏΠΎΠ»ΡŒΠ·ΠΎΠ²Π°Ρ‚ΡŒ оптимистичныС обновлСния ΠΏΡ€ΠΈ Π΄ΠΎΠ±Π°Π²Π»Π΅Π½ΠΈΠΈ/Ρ€Π΅Π΄Π°ΠΊΡ‚ΠΈΡ€ΠΎΠ²Π°Π½ΠΈΠΈ ΠΊΠΎΠΌΠΌΠ΅Π½Ρ‚Π°Ρ€ΠΈΠ΅Π² + - ΠŸΡ€ΠΈ нСобходимости Π·Π°Π³Ρ€ΡƒΠΆΠ°Ρ‚ΡŒ ΠΊΠΎΠΌΠΌΠ΅Π½Ρ‚Π°Ρ€ΠΈΠΈ порциями (лСнивая Π·Π°Π³Ρ€ΡƒΠ·ΠΊΠ°) \ No newline at end of file diff --git a/docs/features.md b/docs/features.md index e0ed3526..37ff05fc 100644 --- a/docs/features.md +++ b/docs/features.md @@ -34,4 +34,15 @@ - ΠŸΠΎΠ΄Π΄Π΅Ρ€ΠΆΠΈΠ²Π°Π΅ΠΌΡ‹Π΅ ΠΌΠ΅Ρ‚ΠΎΠ΄Ρ‹: GET, POST, OPTIONS - НастроСна ΠΏΠΎΠ΄Π΄Π΅Ρ€ΠΆΠΊΠ° credentials - Π Π°Π·Ρ€Π΅ΡˆΠ΅Π½Π½Ρ‹Π΅ Π·Π°Π³ΠΎΠ»ΠΎΠ²ΠΊΠΈ: Authorization, Content-Type, X-Requested-With, DNT, Cache-Control -- НастроСно ΠΊΡΡˆΠΈΡ€ΠΎΠ²Π°Π½ΠΈΠ΅ preflight-ΠΎΡ‚Π²Π΅Ρ‚ΠΎΠ² Π½Π° 20 Π΄Π½Π΅ΠΉ (1728000 сСкунд) \ No newline at end of file +- НастроСно ΠΊΡΡˆΠΈΡ€ΠΎΠ²Π°Π½ΠΈΠ΅ preflight-ΠΎΡ‚Π²Π΅Ρ‚ΠΎΠ² Π½Π° 20 Π΄Π½Π΅ΠΉ (1728000 сСкунд) + +## ΠŸΠ°Π³ΠΈΠ½Π°Ρ†ΠΈΡ ΠΊΠΎΠΌΠΌΠ΅Π½Ρ‚Π°Ρ€ΠΈΠ΅Π² ΠΏΠΎ Π²Π΅Ρ‚ΠΊΠ°ΠΌ + +- ЭффСктивная Π·Π°Π³Ρ€ΡƒΠ·ΠΊΠ° ΠΊΠΎΠΌΠΌΠ΅Π½Ρ‚Π°Ρ€ΠΈΠ΅Π² с ΡƒΡ‡Π΅Ρ‚ΠΎΠΌ ΠΈΡ… иСрархичСской структуры +- ΠžΡ‚Π΄Π΅Π»ΡŒΠ½Ρ‹ΠΉ запрос `load_comments_branch` для ΠΎΠΏΡ‚ΠΈΠΌΠΈΠ·ΠΈΡ€ΠΎΠ²Π°Π½Π½ΠΎΠΉ Π·Π°Π³Ρ€ΡƒΠ·ΠΊΠΈ Π²Π΅Ρ‚ΠΊΠΈ ΠΊΠΎΠΌΠΌΠ΅Π½Ρ‚Π°Ρ€ΠΈΠ΅Π² +- Π’ΠΎΠ·ΠΌΠΎΠΆΠ½ΠΎΡΡ‚ΡŒ Π·Π°Π³Ρ€ΡƒΠ·ΠΊΠΈ ΠΊΠΎΡ€Π½Π΅Π²Ρ‹Ρ… ΠΊΠΎΠΌΠΌΠ΅Π½Ρ‚Π°Ρ€ΠΈΠ΅Π² ΡΡ‚Π°Ρ‚ΡŒΠΈ с ΠΏΠ΅Ρ€Π²Ρ‹ΠΌΠΈ ΠΎΡ‚Π²Π΅Ρ‚Π°ΠΌΠΈ Π½Π° Π½ΠΈΡ… +- Гибкая пагинация ΠΊΠ°ΠΊ для ΠΊΠΎΡ€Π½Π΅Π²Ρ‹Ρ…, Ρ‚Π°ΠΊ ΠΈ для Π΄ΠΎΡ‡Π΅Ρ€Π½ΠΈΡ… ΠΊΠΎΠΌΠΌΠ΅Π½Ρ‚Π°Ρ€ΠΈΠ΅Π² +- ИспользованиС поля `stat.commented` для отобраТСния количСства ΠΎΡ‚Π²Π΅Ρ‚ΠΎΠ² Π½Π° ΠΊΠΎΠΌΠΌΠ΅Π½Ρ‚Π°Ρ€ΠΈΠΉ +- Π”ΠΎΠ±Π°Π²Π»Π΅Π½ΠΈΠ΅ ΡΠΏΠ΅Ρ†ΠΈΠ°Π»ΡŒΠ½ΠΎΠ³ΠΎ поля `first_replies` для хранСния ΠΏΠ΅Ρ€Π²Ρ‹Ρ… ΠΎΡ‚Π²Π΅Ρ‚ΠΎΠ² Π½Π° ΠΊΠΎΠΌΠΌΠ΅Π½Ρ‚Π°Ρ€ΠΈΠΉ +- ΠŸΠΎΠ΄Π΄Π΅Ρ€ΠΆΠΊΠ° Ρ€Π°Π·Π»ΠΈΡ‡Π½Ρ‹Ρ… ΠΌΠ΅Ρ‚ΠΎΠ΄ΠΎΠ² сортировки (Π½ΠΎΠ²Ρ‹Π΅, старыС, популярныС) +- ΠžΠΏΡ‚ΠΈΠΌΠΈΠ·ΠΈΡ€ΠΎΠ²Π°Π½Π½Ρ‹Π΅ SQL запросы для ΠΌΠΈΠ½ΠΈΠΌΠΈΠ·Π°Ρ†ΠΈΠΈ Π½Π°Π³Ρ€ΡƒΠ·ΠΊΠΈ Π½Π° Π±Π°Π·Ρƒ Π΄Π°Π½Π½Ρ‹Ρ… \ No newline at end of file diff --git a/resolvers/__init__.py b/resolvers/__init__.py index 4d2f8d69..699bc4c4 100644 --- a/resolvers/__init__.py +++ b/resolvers/__init__.py @@ -37,6 +37,7 @@ from resolvers.reaction import ( create_reaction, delete_reaction, load_comment_ratings, + load_comments_branch, load_reactions_by, load_shout_comments, load_shout_ratings, @@ -107,6 +108,7 @@ __all__ = [ "load_shout_comments", "load_shout_ratings", "load_comment_ratings", + "load_comments_branch", # notifier "load_notifications", "notifications_seen_thread", diff --git a/resolvers/reaction.py b/resolvers/reaction.py index 89c4f9ac..35d2d536 100644 --- a/resolvers/reaction.py +++ b/resolvers/reaction.py @@ -612,24 +612,22 @@ async def load_shout_comments(_, info, shout: int, limit=50, offset=0): @query.field("load_comment_ratings") async def load_comment_ratings(_, info, comment: int, limit=50, offset=0): """ - Load ratings for a specified comment with pagination and statistics. + Load ratings for a specified comment with pagination. :param info: GraphQL context info. :param comment: Comment ID. :param limit: Number of ratings to load. :param offset: Pagination offset. - :return: List of reactions. + :return: List of ratings. """ q = query_reactions() - q = add_reaction_stat_columns(q) - # Filter, group, sort, limit, offset q = q.filter( and_( Reaction.deleted_at.is_(None), Reaction.reply_to == comment, - Reaction.kind == ReactionKind.COMMENT.value, + Reaction.kind.in_(RATING_REACTIONS), ) ) q = q.group_by(Reaction.id, Author.id, Shout.id) @@ -637,3 +635,186 @@ async def load_comment_ratings(_, info, comment: int, limit=50, offset=0): # Retrieve and return reactions return get_reactions_with_stat(q, limit, offset) + + +@query.field("load_comments_branch") +async def load_comments_branch( + _, + _info, + shout: int, + parent_id: int | None = None, + limit=10, + offset=0, + sort="newest", + children_limit=3, + children_offset=0, +): + """ + Π—Π°Π³Ρ€ΡƒΠΆΠ°Π΅Ρ‚ иСрархичСскиС ΠΊΠΎΠΌΠΌΠ΅Π½Ρ‚Π°Ρ€ΠΈΠΈ с Π²ΠΎΠ·ΠΌΠΎΠΆΠ½ΠΎΡΡ‚ΡŒΡŽ ΠΏΠ°Π³ΠΈΠ½Π°Ρ†ΠΈΠΈ ΠΊΠΎΡ€Π½Π΅Π²Ρ‹Ρ… ΠΈ Π΄ΠΎΡ‡Π΅Ρ€Π½ΠΈΡ…. + + :param info: GraphQL context info. + :param shout: ID ΡΡ‚Π°Ρ‚ΡŒΠΈ. + :param parent_id: ID Ρ€ΠΎΠ΄ΠΈΡ‚Π΅Π»ΡŒΡΠΊΠΎΠ³ΠΎ коммСнтария (None для ΠΊΠΎΡ€Π½Π΅Π²Ρ‹Ρ…). + :param limit: ΠšΠΎΠ»ΠΈΡ‡Π΅ΡΡ‚Π²ΠΎ ΠΊΠΎΠΌΠΌΠ΅Π½Ρ‚Π°Ρ€ΠΈΠ΅Π² для Π·Π°Π³Ρ€ΡƒΠ·ΠΊΠΈ. + :param offset: Π‘ΠΌΠ΅Ρ‰Π΅Π½ΠΈΠ΅ для ΠΏΠ°Π³ΠΈΠ½Π°Ρ†ΠΈΠΈ. + :param sort: ΠŸΠΎΡ€ΡΠ΄ΠΎΠΊ сортировки ('newest', 'oldest', 'like'). + :param children_limit: МаксимальноС количСство Π΄ΠΎΡ‡Π΅Ρ€Π½ΠΈΡ… ΠΊΠΎΠΌΠΌΠ΅Π½Ρ‚Π°Ρ€ΠΈΠ΅Π². + :param children_offset: Π‘ΠΌΠ΅Ρ‰Π΅Π½ΠΈΠ΅ для Π΄ΠΎΡ‡Π΅Ρ€Π½ΠΈΡ… ΠΊΠΎΠΌΠΌΠ΅Π½Ρ‚Π°Ρ€ΠΈΠ΅Π². + :return: Бписок ΠΊΠΎΠΌΠΌΠ΅Π½Ρ‚Π°Ρ€ΠΈΠ΅Π² с Π΄ΠΎΡ‡Π΅Ρ€Π½ΠΈΠΌΠΈ. + """ + # Π‘ΠΎΠ·Π΄Π°Π΅ΠΌ Π±Π°Π·ΠΎΠ²Ρ‹ΠΉ запрос + q = query_reactions() + q = add_reaction_stat_columns(q) + + # Π€ΠΈΠ»ΡŒΡ‚Ρ€ΡƒΠ΅ΠΌ ΠΏΠΎ ΡΡ‚Π°Ρ‚ΡŒΠ΅ ΠΈ Ρ‚ΠΈΠΏΡƒ (ΠΊΠΎΠΌΠΌΠ΅Π½Ρ‚Π°Ρ€ΠΈΠΈ) + q = q.filter( + and_( + Reaction.deleted_at.is_(None), + Reaction.shout == shout, + Reaction.kind == ReactionKind.COMMENT.value, + ) + ) + + # Π€ΠΈΠ»ΡŒΡ‚Ρ€ΡƒΠ΅ΠΌ ΠΏΠΎ Ρ€ΠΎΠ΄ΠΈΡ‚Π΅Π»ΡŒΡΠΊΠΎΠΌΡƒ ID + if parent_id is None: + # Π—Π°Π³Ρ€ΡƒΠΆΠ°Π΅ΠΌ Ρ‚ΠΎΠ»ΡŒΠΊΠΎ ΠΊΠΎΡ€Π½Π΅Π²Ρ‹Π΅ ΠΊΠΎΠΌΠΌΠ΅Π½Ρ‚Π°Ρ€ΠΈΠΈ + q = q.filter(Reaction.reply_to.is_(None)) + else: + # Π—Π°Π³Ρ€ΡƒΠΆΠ°Π΅ΠΌ Ρ‚ΠΎΠ»ΡŒΠΊΠΎ прямыС ΠΎΡ‚Π²Π΅Ρ‚Ρ‹ Π½Π° ΡƒΠΊΠ°Π·Π°Π½Π½Ρ‹ΠΉ ΠΊΠΎΠΌΠΌΠ΅Π½Ρ‚Π°Ρ€ΠΈΠΉ + q = q.filter(Reaction.reply_to == parent_id) + + # Π‘ΠΎΡ€Ρ‚ΠΈΡ€ΠΎΠ²ΠΊΠ° ΠΈ Π³Ρ€ΡƒΠΏΠΏΠΈΡ€ΠΎΠ²ΠΊΠ° + q = q.group_by(Reaction.id, Author.id, Shout.id) + + # ΠžΠΏΡ€Π΅Π΄Π΅Π»ΡΠ΅ΠΌ сортировку + order_by_stmt = None + if sort.lower() == "oldest": + order_by_stmt = asc(Reaction.created_at) + elif sort.lower() == "like": + order_by_stmt = desc("rating_stat") + else: # "newest" ΠΏΠΎ ΡƒΠΌΠΎΠ»Ρ‡Π°Π½ΠΈΡŽ + order_by_stmt = desc(Reaction.created_at) + + q = q.order_by(order_by_stmt) + + # ВыполняСм запрос для получСния ΠΊΠΎΠΌΠΌΠ΅Π½Ρ‚Π°Ρ€ΠΈΠ΅Π² + comments = get_reactions_with_stat(q, limit, offset) + + # Если ΠΊΠΎΠΌΠΌΠ΅Π½Ρ‚Π°Ρ€ΠΈΠΈ Π½Π°ΠΉΠ΄Π΅Π½Ρ‹, Π·Π°Π³Ρ€ΡƒΠΆΠ°Π΅ΠΌ Π΄ΠΎΡ‡Π΅Ρ€Π½ΠΈΠ΅ ΠΈ количСство ΠΎΡ‚Π²Π΅Ρ‚ΠΎΠ² + if comments: + # Π—Π°Π³Ρ€ΡƒΠΆΠ°Π΅ΠΌ количСство ΠΎΡ‚Π²Π΅Ρ‚ΠΎΠ² для ΠΊΠ°ΠΆΠ΄ΠΎΠ³ΠΎ коммСнтария + await load_replies_count(comments) + + # Π—Π°Π³Ρ€ΡƒΠΆΠ°Π΅ΠΌ Π΄ΠΎΡ‡Π΅Ρ€Π½ΠΈΠ΅ ΠΊΠΎΠΌΠΌΠ΅Π½Ρ‚Π°Ρ€ΠΈΠΈ + await load_first_replies(comments, children_limit, children_offset, sort) + + return comments + + +async def load_replies_count(comments): + """ + Π—Π°Π³Ρ€ΡƒΠΆΠ°Π΅Ρ‚ количСство ΠΎΡ‚Π²Π΅Ρ‚ΠΎΠ² для списка ΠΊΠΎΠΌΠΌΠ΅Π½Ρ‚Π°Ρ€ΠΈΠ΅Π² ΠΈ обновляСт ΠΏΠΎΠ»Π΅ stat.commented. + + :param comments: Бписок ΠΊΠΎΠΌΠΌΠ΅Π½Ρ‚Π°Ρ€ΠΈΠ΅Π², для ΠΊΠΎΡ‚ΠΎΡ€Ρ‹Ρ… Π½ΡƒΠΆΠ½ΠΎ Π·Π°Π³Ρ€ΡƒΠ·ΠΈΡ‚ΡŒ количСство ΠΎΡ‚Π²Π΅Ρ‚ΠΎΠ². + """ + if not comments: + return + + comment_ids = [comment["id"] for comment in comments] + + # Запрос для подсчСта количСства ΠΎΡ‚Π²Π΅Ρ‚ΠΎΠ² + q = ( + select(Reaction.reply_to.label("parent_id"), func.count().label("count")) + .where( + and_( + Reaction.reply_to.in_(comment_ids), + Reaction.deleted_at.is_(None), + Reaction.kind == ReactionKind.COMMENT.value, + ) + ) + .group_by(Reaction.reply_to) + ) + + # ВыполняСм запрос + with local_session() as session: + result = session.execute(q).fetchall() + + # Π‘ΠΎΠ·Π΄Π°Π΅ΠΌ ΡΠ»ΠΎΠ²Π°Ρ€ΡŒ {parent_id: count} + replies_count = {row[0]: row[1] for row in result} + + # ДобавляСм значСния Π² ΠΊΠΎΠΌΠΌΠ΅Π½Ρ‚Π°Ρ€ΠΈΠΈ + for comment in comments: + if "stat" not in comment: + comment["stat"] = {} + + # ОбновляСм счСтчик ΠΊΠΎΠΌΠΌΠ΅Π½Ρ‚Π°Ρ€ΠΈΠ΅Π² Π² stat + comment["stat"]["commented"] = replies_count.get(comment["id"], 0) + + +async def load_first_replies(comments, limit, offset, sort="newest"): + """ + Π—Π°Π³Ρ€ΡƒΠΆΠ°Π΅Ρ‚ ΠΏΠ΅Ρ€Π²Ρ‹Π΅ N ΠΎΡ‚Π²Π΅Ρ‚ΠΎΠ² для ΠΊΠ°ΠΆΠ΄ΠΎΠ³ΠΎ коммСнтария. + + :param comments: Бписок ΠΊΠΎΠΌΠΌΠ΅Π½Ρ‚Π°Ρ€ΠΈΠ΅Π², для ΠΊΠΎΡ‚ΠΎΡ€Ρ‹Ρ… Π½ΡƒΠΆΠ½ΠΎ Π·Π°Π³Ρ€ΡƒΠ·ΠΈΡ‚ΡŒ ΠΎΡ‚Π²Π΅Ρ‚Ρ‹. + :param limit: МаксимальноС количСство ΠΎΡ‚Π²Π΅Ρ‚ΠΎΠ² для ΠΊΠ°ΠΆΠ΄ΠΎΠ³ΠΎ коммСнтария. + :param offset: Π‘ΠΌΠ΅Ρ‰Π΅Π½ΠΈΠ΅ для ΠΏΠ°Π³ΠΈΠ½Π°Ρ†ΠΈΠΈ Π΄ΠΎΡ‡Π΅Ρ€Π½ΠΈΡ… ΠΊΠΎΠΌΠΌΠ΅Π½Ρ‚Π°Ρ€ΠΈΠ΅Π². + :param sort: ΠŸΠΎΡ€ΡΠ΄ΠΎΠΊ сортировки ΠΎΡ‚Π²Π΅Ρ‚ΠΎΠ². + """ + if not comments or limit <= 0: + return + + # Π‘ΠΎΠ±ΠΈΡ€Π°Π΅ΠΌ ID ΠΊΠΎΠΌΠΌΠ΅Π½Ρ‚Π°Ρ€ΠΈΠ΅Π² + comment_ids = [comment["id"] for comment in comments] + + # Π‘Π°Π·ΠΎΠ²Ρ‹ΠΉ запрос для Π·Π°Π³Ρ€ΡƒΠ·ΠΊΠΈ ΠΎΡ‚Π²Π΅Ρ‚ΠΎΠ² + q = query_reactions() + q = add_reaction_stat_columns(q) + + # Π€ΠΈΠ»ΡŒΡ‚Ρ€Π°Ρ†ΠΈΡ: Ρ‚ΠΎΠ»ΡŒΠΊΠΎ ΠΎΡ‚Π²Π΅Ρ‚Ρ‹ Π½Π° ΡƒΠΊΠ°Π·Π°Π½Π½Ρ‹Π΅ ΠΊΠΎΠΌΠΌΠ΅Π½Ρ‚Π°Ρ€ΠΈΠΈ + q = q.filter( + and_( + Reaction.reply_to.in_(comment_ids), + Reaction.deleted_at.is_(None), + Reaction.kind == ReactionKind.COMMENT.value, + ) + ) + + # Π“Ρ€ΡƒΠΏΠΏΠΈΡ€ΠΎΠ²ΠΊΠ° + q = q.group_by(Reaction.id, Author.id, Shout.id) + + # ΠžΠΏΡ€Π΅Π΄Π΅Π»ΡΠ΅ΠΌ сортировку + order_by_stmt = None + if sort.lower() == "oldest": + order_by_stmt = asc(Reaction.created_at) + elif sort.lower() == "like": + order_by_stmt = desc("rating_stat") + else: # "newest" ΠΏΠΎ ΡƒΠΌΠΎΠ»Ρ‡Π°Π½ΠΈΡŽ + order_by_stmt = desc(Reaction.created_at) + + q = q.order_by(order_by_stmt, Reaction.reply_to) + + # ВыполняСм запрос + replies = get_reactions_with_stat(q) + + # Π“Ρ€ΡƒΠΏΠΏΠΈΡ€ΡƒΠ΅ΠΌ ΠΎΡ‚Π²Π΅Ρ‚Ρ‹ ΠΏΠΎ Ρ€ΠΎΠ΄ΠΈΡ‚Π΅Π»ΡŒΡΠΊΠΈΠΌ ID + replies_by_parent = {} + for reply in replies: + parent_id = reply.get("reply_to") + if parent_id not in replies_by_parent: + replies_by_parent[parent_id] = [] + replies_by_parent[parent_id].append(reply) + + # ДобавляСм ΠΎΡ‚Π²Π΅Ρ‚Ρ‹ ΠΊ ΡΠΎΠΎΡ‚Π²Π΅Ρ‚ΡΡ‚Π²ΡƒΡŽΡ‰ΠΈΠΌ коммСнтариям с ΡƒΡ‡Π΅Ρ‚ΠΎΠΌ смСщСния ΠΈ Π»ΠΈΠΌΠΈΡ‚Π° + for comment in comments: + comment_id = comment["id"] + if comment_id in replies_by_parent: + parent_replies = replies_by_parent[comment_id] + # ΠŸΡ€ΠΈΠΌΠ΅Π½ΡΠ΅ΠΌ смСщСниС ΠΈ Π»ΠΈΠΌΠΈΡ‚ + comment["first_replies"] = parent_replies[offset : offset + limit] + else: + comment["first_replies"] = [] + + # Π—Π°Π³Ρ€ΡƒΠΆΠ°Π΅ΠΌ количСство ΠΎΡ‚Π²Π΅Ρ‚ΠΎΠ² для Π΄ΠΎΡ‡Π΅Ρ€Π½ΠΈΡ… ΠΊΠΎΠΌΠΌΠ΅Π½Ρ‚Π°Ρ€ΠΈΠ΅Π² + all_replies = [reply for replies in replies_by_parent.values() for reply in replies] + if all_replies: + await load_replies_count(all_replies) diff --git a/schema/query.graphql b/schema/query.graphql index f39fba5c..ce839aed 100644 --- a/schema/query.graphql +++ b/schema/query.graphql @@ -26,6 +26,9 @@ type Query { load_shout_ratings(shout: Int!, limit: Int, offset: Int): [Reaction] load_comment_ratings(comment: Int!, limit: Int, offset: Int): [Reaction] + # branched comments pagination + load_comments_branch(shout: Int!, parent_id: Int, limit: Int, offset: Int, sort: ReactionSort, children_limit: Int, children_offset: Int): [Reaction] + # reader get_shout(slug: String, shout_id: Int): Shout load_shouts_by(options: LoadShoutsOptions): [Shout] From fe9984e2d8c94fb081ee1a7fa23506a2d587d677 Mon Sep 17 00:00:00 2001 From: Untone Date: Sat, 22 Mar 2025 18:39:14 +0300 Subject: [PATCH 22/27] type-fix --- schema/type.graphql | 1 + 1 file changed, 1 insertion(+) diff --git a/schema/type.graphql b/schema/type.graphql index 7a8344da..ee327e16 100644 --- a/schema/type.graphql +++ b/schema/type.graphql @@ -55,6 +55,7 @@ type Reaction { stat: Stat oid: String # old_thread: String + first_replies: [Reaction] } type MediaItem { From 81a8bf3c58d608315d72e9732b57d95061f1b0b1 Mon Sep 17 00:00:00 2001 From: Untone Date: Sat, 22 Mar 2025 18:44:31 +0300 Subject: [PATCH 23/27] query-type-fix --- CHANGELOG.md | 3 +-- resolvers/author.py | 16 ---------------- 2 files changed, 1 insertion(+), 18 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index b9b5f937..37d617e8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -41,8 +41,7 @@ - Implemented persistent Redis caching for author queries without TTL (invalidated only on changes) - Optimized author retrieval with separate endpoints: - `get_authors_all` - returns all non-deleted authors without statistics - - `get_authors_paginated` - returns authors with statistics and pagination support - - `load_authors_by` - optimized to use caching and efficient sorting + - `load_authors_by` - optimized to use caching and efficient sorting and pagination - Improved SQL queries with optimized JOIN conditions and efficient filtering - Added pre-aggregation of statistics (shouts count, followers count) in single efficient queries - Implemented robust cache invalidation on author updates diff --git a/resolvers/author.py b/resolvers/author.py index e4cfd794..91bad2e5 100644 --- a/resolvers/author.py +++ b/resolvers/author.py @@ -232,22 +232,6 @@ async def get_authors_all(_, _info): return await get_all_authors() -@query.field("get_authors_paginated") -async def get_authors_paginated(_, _info, limit=50, offset=0, by=None): - """ - ΠŸΠΎΠ»ΡƒΡ‡Π°Π΅Ρ‚ список Π°Π²Ρ‚ΠΎΡ€ΠΎΠ² с ΠΏΠ°Π³ΠΈΠ½Π°Ρ†ΠΈΠ΅ΠΉ ΠΈ статистикой. - - Args: - limit: МаксимальноС количСство Π²ΠΎΠ·Π²Ρ€Π°Ρ‰Π°Π΅ΠΌΡ‹Ρ… Π°Π²Ρ‚ΠΎΡ€ΠΎΠ² - offset: Π‘ΠΌΠ΅Ρ‰Π΅Π½ΠΈΠ΅ для ΠΏΠ°Π³ΠΈΠ½Π°Ρ†ΠΈΠΈ - by: ΠŸΠ°Ρ€Π°ΠΌΠ΅Ρ‚Ρ€ сортировки (new/active) - - Returns: - list: Бписок Π°Π²Ρ‚ΠΎΡ€ΠΎΠ² с ΠΈΡ… статистикой - """ - return await get_authors_with_stats(limit, offset, by) - - @query.field("get_author") async def get_author(_, _info, slug="", author_id=0): author_dict = None From 3c56fdfaeaa8a8fe8478ee3b88d4eae1a46024c6 Mon Sep 17 00:00:00 2001 From: Untone Date: Sat, 22 Mar 2025 18:49:15 +0300 Subject: [PATCH 24/27] get_topics_paginated-fix --- CHANGELOG.md | 1 - resolvers/topic.py | 17 ----------------- schema/query.graphql | 2 +- 3 files changed, 1 insertion(+), 19 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 37d617e8..4554ff14 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -53,7 +53,6 @@ - Implemented persistent Redis caching for topic queries (no TTL, invalidated only on changes) - Optimized topic retrieval with separate endpoints for different use cases: - `get_topics_all` - returns all topics without statistics for lightweight listing - - `get_topics_paginated` - returns topics with statistics and pagination support - `get_topics_by_community` - adds pagination and optimized filtering by community - Added SQLAlchemy-managed indexes directly in ORM models for automatic schema maintenance - Created `sync_indexes()` function for automatic index synchronization during app startup diff --git a/resolvers/topic.py b/resolvers/topic.py index 855d8a82..c0f8836e 100644 --- a/resolvers/topic.py +++ b/resolvers/topic.py @@ -202,23 +202,6 @@ async def get_topics_all(_, _info): return await get_all_topics() -# Запрос Π½Π° ΠΏΠΎΠ»ΡƒΡ‡Π΅Π½ΠΈΠ΅ Ρ‚Π΅ΠΌ с ΠΏΠ°Π³ΠΈΠ½Π°Ρ†ΠΈΠ΅ΠΉ ΠΈ статистикой -@query.field("get_topics_paginated") -async def get_topics_paginated(_, _info, limit=100, offset=0, by=None): - """ - ΠŸΠΎΠ»ΡƒΡ‡Π°Π΅Ρ‚ список Ρ‚Π΅ΠΌ с ΠΏΠ°Π³ΠΈΠ½Π°Ρ†ΠΈΠ΅ΠΉ ΠΈ статистикой. - - Args: - limit: МаксимальноС количСство Π²ΠΎΠ·Π²Ρ€Π°Ρ‰Π°Π΅ΠΌΡ‹Ρ… Ρ‚Π΅ΠΌ - offset: Π‘ΠΌΠ΅Ρ‰Π΅Π½ΠΈΠ΅ для ΠΏΠ°Π³ΠΈΠ½Π°Ρ†ΠΈΠΈ - by: ΠžΠΏΡ†ΠΈΠΎΠ½Π°Π»ΡŒΠ½Ρ‹Π΅ ΠΏΠ°Ρ€Π°ΠΌΠ΅Ρ‚Ρ€Ρ‹ сортировки - - Returns: - list: Бписок Ρ‚Π΅ΠΌ с ΠΈΡ… статистикой - """ - return await get_topics_with_stats(limit, offset, None, by) - - # Запрос Π½Π° ΠΏΠΎΠ»ΡƒΡ‡Π΅Π½ΠΈΠ΅ Ρ‚Π΅ΠΌ ΠΏΠΎ сообщСству @query.field("get_topics_by_community") async def get_topics_by_community(_, _info, community_id: int, limit=100, offset=0, by=None): diff --git a/schema/query.graphql b/schema/query.graphql index ce839aed..e07954ae 100644 --- a/schema/query.graphql +++ b/schema/query.graphql @@ -60,7 +60,7 @@ type Query { get_topic(slug: String!): Topic get_topics_all: [Topic] get_topics_by_author(slug: String, user: String, author_id: Int): [Topic] - get_topics_by_community(slug: String, community_id: Int): [Topic] + get_topics_by_community(community_id: Int!, limit: Int, offset: Int): [Topic] # notifier load_notifications(after: Int!, limit: Int, offset: Int): NotificationsResult! From a5eaf4bb6510fad954a65feefbdb0aa218300662 Mon Sep 17 00:00:00 2001 From: Untone Date: Wed, 26 Mar 2025 08:25:18 +0300 Subject: [PATCH 25/27] commented->comments_count --- CHANGELOG.md | 4 ++-- docs/comments-pagination.md | 8 ++++---- docs/features.md | 2 +- resolvers/reaction.py | 12 ++++++------ resolvers/reader.py | 2 +- schema/type.graphql | 2 +- 6 files changed, 15 insertions(+), 15 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 4554ff14..80c7577e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,7 +3,7 @@ - Created new GraphQL query `load_comments_branch` for efficient loading of hierarchical comments - Ability to load root comments with their first N replies - Added pagination for both root and child comments - - Using existing `commented` field in `Stat` type to display number of replies + - Using existing `comments_count` field in `Stat` type to display number of replies - Added special `first_replies` field to store first replies to a comment - Optimized SQL queries for efficient loading of comment hierarchies - Implemented flexible comment sorting system (by time, rating) @@ -150,7 +150,7 @@ #### [0.4.4] - `followers_stat` removed for shout - sqlite3 support added -- `rating_stat` and `commented_stat` fixes +- `rating_stat` and `comments_count` fixes #### [0.4.3] - cache reimplemented diff --git a/docs/comments-pagination.md b/docs/comments-pagination.md index 3b34a0de..0f8f7261 100644 --- a/docs/comments-pagination.md +++ b/docs/comments-pagination.md @@ -45,7 +45,7 @@ query LoadCommentsBranch( reply_to stat { rating - commented + comments_count } first_replies { id @@ -61,7 +61,7 @@ query LoadCommentsBranch( reply_to stat { rating - commented + comments_count } } } @@ -92,7 +92,7 @@ query LoadCommentsBranch( - `reply_to`: ID Ρ€ΠΎΠ΄ΠΈΡ‚Π΅Π»ΡŒΡΠΊΠΎΠ³ΠΎ коммСнтария (null для ΠΊΠΎΡ€Π½Π΅Π²Ρ‹Ρ…) - `first_replies`: ΠŸΠ΅Ρ€Π²Ρ‹Π΅ N Π΄ΠΎΡ‡Π΅Ρ€Π½ΠΈΡ… ΠΊΠΎΠΌΠΌΠ΅Π½Ρ‚Π°Ρ€ΠΈΠ΅Π² - `stat`: Бтатистика коммСнтария, Π²ΠΊΠ»ΡŽΡ‡Π°ΡŽΡ‰Π°Ρ: - - `commented`: ΠšΠΎΠ»ΠΈΡ‡Π΅ΡΡ‚Π²ΠΎ ΠΎΡ‚Π²Π΅Ρ‚ΠΎΠ² Π½Π° ΠΊΠΎΠΌΠΌΠ΅Π½Ρ‚Π°Ρ€ΠΈΠΉ + - `comments_count`: ΠšΠΎΠ»ΠΈΡ‡Π΅ΡΡ‚Π²ΠΎ ΠΎΡ‚Π²Π΅Ρ‚ΠΎΠ² Π½Π° ΠΊΠΎΠΌΠΌΠ΅Π½Ρ‚Π°Ρ€ΠΈΠΉ - `rating`: Π Π΅ΠΉΡ‚ΠΈΠ½Π³ коммСнтария ## ΠŸΡ€ΠΈΠΌΠ΅Ρ€Ρ‹ использования @@ -150,7 +150,7 @@ const { data } = await client.query({ 1. Для эффСктивной Ρ€Π°Π±ΠΎΡ‚Ρ‹ со слоТными Π²Π΅Ρ‚ΠΊΠ°ΠΌΠΈ обсуТдСний рСкомСндуСтся: - Π‘Π½Π°Ρ‡Π°Π»Π° Π·Π°Π³Ρ€ΡƒΠΆΠ°Ρ‚ΡŒ Ρ‚ΠΎΠ»ΡŒΠΊΠΎ ΠΊΠΎΡ€Π½Π΅Π²Ρ‹Π΅ ΠΊΠΎΠΌΠΌΠ΅Π½Ρ‚Π°Ρ€ΠΈΠΈ с ΠΏΠ΅Ρ€Π²Ρ‹ΠΌΠΈ N ΠΎΡ‚Π²Π΅Ρ‚Π°ΠΌΠΈ - - ΠŸΡ€ΠΈ Π½Π°Π»ΠΈΡ‡ΠΈΠΈ Π΄ΠΎΠΏΠΎΠ»Π½ΠΈΡ‚Π΅Π»ΡŒΠ½Ρ‹Ρ… ΠΎΡ‚Π²Π΅Ρ‚ΠΎΠ² (ΠΊΠΎΠ³Π΄Π° `stat.commented > first_replies.length`) + - ΠŸΡ€ΠΈ Π½Π°Π»ΠΈΡ‡ΠΈΠΈ Π΄ΠΎΠΏΠΎΠ»Π½ΠΈΡ‚Π΅Π»ΡŒΠ½Ρ‹Ρ… ΠΎΡ‚Π²Π΅Ρ‚ΠΎΠ² (ΠΊΠΎΠ³Π΄Π° `stat.comments_count > first_replies.length`) Π΄ΠΎΠ±Π°Π²ΠΈΡ‚ΡŒ ΠΊΠ½ΠΎΠΏΠΊΡƒ "ΠŸΠΎΠΊΠ°Π·Π°Ρ‚ΡŒ всС ΠΎΡ‚Π²Π΅Ρ‚Ρ‹" - ΠŸΡ€ΠΈ Π½Π°ΠΆΠ°Ρ‚ΠΈΠΈ Π½Π° ΠΊΠ½ΠΎΠΏΠΊΡƒ Π·Π°Π³Ρ€ΡƒΠΆΠ°Ρ‚ΡŒ Π΄ΠΎΠΏΠΎΠ»Π½ΠΈΡ‚Π΅Π»ΡŒΠ½Ρ‹Π΅ ΠΎΡ‚Π²Π΅Ρ‚Ρ‹ с ΠΏΠΎΠΌΠΎΡ‰ΡŒΡŽ запроса с ΡƒΠΊΠ°Π·Π°Π½Π½Ρ‹ΠΌ `parentId` diff --git a/docs/features.md b/docs/features.md index 37ff05fc..4837f870 100644 --- a/docs/features.md +++ b/docs/features.md @@ -42,7 +42,7 @@ - ΠžΡ‚Π΄Π΅Π»ΡŒΠ½Ρ‹ΠΉ запрос `load_comments_branch` для ΠΎΠΏΡ‚ΠΈΠΌΠΈΠ·ΠΈΡ€ΠΎΠ²Π°Π½Π½ΠΎΠΉ Π·Π°Π³Ρ€ΡƒΠ·ΠΊΠΈ Π²Π΅Ρ‚ΠΊΠΈ ΠΊΠΎΠΌΠΌΠ΅Π½Ρ‚Π°Ρ€ΠΈΠ΅Π² - Π’ΠΎΠ·ΠΌΠΎΠΆΠ½ΠΎΡΡ‚ΡŒ Π·Π°Π³Ρ€ΡƒΠ·ΠΊΠΈ ΠΊΠΎΡ€Π½Π΅Π²Ρ‹Ρ… ΠΊΠΎΠΌΠΌΠ΅Π½Ρ‚Π°Ρ€ΠΈΠ΅Π² ΡΡ‚Π°Ρ‚ΡŒΠΈ с ΠΏΠ΅Ρ€Π²Ρ‹ΠΌΠΈ ΠΎΡ‚Π²Π΅Ρ‚Π°ΠΌΠΈ Π½Π° Π½ΠΈΡ… - Гибкая пагинация ΠΊΠ°ΠΊ для ΠΊΠΎΡ€Π½Π΅Π²Ρ‹Ρ…, Ρ‚Π°ΠΊ ΠΈ для Π΄ΠΎΡ‡Π΅Ρ€Π½ΠΈΡ… ΠΊΠΎΠΌΠΌΠ΅Π½Ρ‚Π°Ρ€ΠΈΠ΅Π² -- ИспользованиС поля `stat.commented` для отобраТСния количСства ΠΎΡ‚Π²Π΅Ρ‚ΠΎΠ² Π½Π° ΠΊΠΎΠΌΠΌΠ΅Π½Ρ‚Π°Ρ€ΠΈΠΉ +- ИспользованиС поля `stat.comments_count` для отобраТСния количСства ΠΎΡ‚Π²Π΅Ρ‚ΠΎΠ² Π½Π° ΠΊΠΎΠΌΠΌΠ΅Π½Ρ‚Π°Ρ€ΠΈΠΉ - Π”ΠΎΠ±Π°Π²Π»Π΅Π½ΠΈΠ΅ ΡΠΏΠ΅Ρ†ΠΈΠ°Π»ΡŒΠ½ΠΎΠ³ΠΎ поля `first_replies` для хранСния ΠΏΠ΅Ρ€Π²Ρ‹Ρ… ΠΎΡ‚Π²Π΅Ρ‚ΠΎΠ² Π½Π° ΠΊΠΎΠΌΠΌΠ΅Π½Ρ‚Π°Ρ€ΠΈΠΉ - ΠŸΠΎΠ΄Π΄Π΅Ρ€ΠΆΠΊΠ° Ρ€Π°Π·Π»ΠΈΡ‡Π½Ρ‹Ρ… ΠΌΠ΅Ρ‚ΠΎΠ΄ΠΎΠ² сортировки (Π½ΠΎΠ²Ρ‹Π΅, старыС, популярныС) - ΠžΠΏΡ‚ΠΈΠΌΠΈΠ·ΠΈΡ€ΠΎΠ²Π°Π½Π½Ρ‹Π΅ SQL запросы для ΠΌΠΈΠ½ΠΈΠΌΠΈΠ·Π°Ρ†ΠΈΠΈ Π½Π°Π³Ρ€ΡƒΠ·ΠΊΠΈ Π½Π° Π±Π°Π·Ρƒ Π΄Π°Π½Π½Ρ‹Ρ… \ No newline at end of file diff --git a/resolvers/reaction.py b/resolvers/reaction.py index 35d2d536..24d5f941 100644 --- a/resolvers/reaction.py +++ b/resolvers/reaction.py @@ -81,7 +81,7 @@ def get_reactions_with_stat(q, limit, offset): with local_session() as session: result_rows = session.execute(q) - for reaction, author, shout, commented_stat, rating_stat in result_rows: + for reaction, author, shout, comments_count, rating_stat in result_rows: # ΠŸΡ€ΠΎΠΏΡƒΡΠΊΠ°Π΅ΠΌ Ρ€Π΅Π°ΠΊΡ†ΠΈΠΈ с ΠΎΡ‚ΡΡƒΡ‚ΡΡ‚Π²ΡƒΡŽΡ‰ΠΈΠΌΠΈ shout ΠΈΠ»ΠΈ author if not shout or not author: logger.error(f"ΠŸΡ€ΠΎΠΏΡƒΡ‰Π΅Π½Π° рСакция ΠΈΠ·-Π·Π° отсутствия shout ΠΈΠ»ΠΈ author: {reaction.dict()}") @@ -89,7 +89,7 @@ def get_reactions_with_stat(q, limit, offset): reaction.created_by = author.dict() reaction.shout = shout.dict() - reaction.stat = {"rating": rating_stat, "comments": commented_stat} + reaction.stat = {"rating": rating_stat, "comments_count": comments_count} reactions.append(reaction) return reactions @@ -393,7 +393,7 @@ async def update_reaction(_, info, reaction): result = session.execute(reaction_query).unique().first() if result: - r, author, _shout, commented_stat, rating_stat = result + r, author, _shout, comments_count, rating_stat = result if not r or not author: return {"error": "Invalid reaction ID or unauthorized"} @@ -408,7 +408,7 @@ async def update_reaction(_, info, reaction): session.commit() r.stat = { - "commented": commented_stat, + "comments_count": comments_count, "rating": rating_stat, } @@ -713,7 +713,7 @@ async def load_comments_branch( async def load_replies_count(comments): """ - Π—Π°Π³Ρ€ΡƒΠΆΠ°Π΅Ρ‚ количСство ΠΎΡ‚Π²Π΅Ρ‚ΠΎΠ² для списка ΠΊΠΎΠΌΠΌΠ΅Π½Ρ‚Π°Ρ€ΠΈΠ΅Π² ΠΈ обновляСт ΠΏΠΎΠ»Π΅ stat.commented. + Π—Π°Π³Ρ€ΡƒΠΆΠ°Π΅Ρ‚ количСство ΠΎΡ‚Π²Π΅Ρ‚ΠΎΠ² для списка ΠΊΠΎΠΌΠΌΠ΅Π½Ρ‚Π°Ρ€ΠΈΠ΅Π² ΠΈ обновляСт ΠΏΠΎΠ»Π΅ stat.comments_count. :param comments: Бписок ΠΊΠΎΠΌΠΌΠ΅Π½Ρ‚Π°Ρ€ΠΈΠ΅Π², для ΠΊΠΎΡ‚ΠΎΡ€Ρ‹Ρ… Π½ΡƒΠΆΠ½ΠΎ Π·Π°Π³Ρ€ΡƒΠ·ΠΈΡ‚ΡŒ количСство ΠΎΡ‚Π²Π΅Ρ‚ΠΎΠ². """ @@ -748,7 +748,7 @@ async def load_replies_count(comments): comment["stat"] = {} # ОбновляСм счСтчик ΠΊΠΎΠΌΠΌΠ΅Π½Ρ‚Π°Ρ€ΠΈΠ΅Π² Π² stat - comment["stat"]["commented"] = replies_count.get(comment["id"], 0) + comment["stat"]["comments_count"] = replies_count.get(comment["id"], 0) async def load_first_replies(comments, limit, offset, sort="newest"): diff --git a/resolvers/reader.py b/resolvers/reader.py index 003a50cd..a8d6b026 100644 --- a/resolvers/reader.py +++ b/resolvers/reader.py @@ -225,7 +225,7 @@ def get_shouts_with_links(info, q, limit=20, offset=0): elif isinstance(row.stat, dict): stat = row.stat viewed = ViewedStorage.get_shout(shout_id=shout_id) or 0 - shout_dict["stat"] = {**stat, "viewed": viewed, "commented": stat.get("comments_count", 0)} + shout_dict["stat"] = {**stat, "viewed": viewed} # ΠžΠ±Ρ€Π°Π±ΠΎΡ‚ΠΊΠ° main_topic ΠΈ topics topics = None diff --git a/schema/type.graphql b/schema/type.graphql index ee327e16..21243b81 100644 --- a/schema/type.graphql +++ b/schema/type.graphql @@ -137,7 +137,7 @@ type Draft { type Stat { rating: Int - commented: Int + comments_count: Int viewed: Int last_commented_at: Int } From 4f599e097fd06dedfdf0470dae1a5d3d059cbfc2 Mon Sep 17 00:00:00 2001 From: Untone Date: Wed, 26 Mar 2025 08:54:10 +0300 Subject: [PATCH 26/27] [0.4.17] - 2025-03-26 - Fixed `'Reaction' object is not subscriptable` error in hierarchical comments: - Modified `get_reactions_with_stat()` to convert Reaction objects to dictionaries - Added default values for limit/offset parameters - Fixed `load_first_replies()` implementation with proper parameter passing - Added doctest with example usage - Limited child comments to 100 per parent for performance --- CHANGELOG.md | 8 ++++++++ resolvers/reaction.py | 22 ++++++++++++++-------- 2 files changed, 22 insertions(+), 8 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 80c7577e..d25f8dcf 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,11 @@ +#### [0.4.17] - 2025-03-26 +- Fixed `'Reaction' object is not subscriptable` error in hierarchical comments: + - Modified `get_reactions_with_stat()` to convert Reaction objects to dictionaries + - Added default values for limit/offset parameters + - Fixed `load_first_replies()` implementation with proper parameter passing + - Added doctest with example usage + - Limited child comments to 100 per parent for performance + #### [0.4.16] - 2025-03-22 - Added hierarchical comments pagination: - Created new GraphQL query `load_comments_branch` for efficient loading of hierarchical comments diff --git a/resolvers/reaction.py b/resolvers/reaction.py index 24d5f941..4a3fe56a 100644 --- a/resolvers/reaction.py +++ b/resolvers/reaction.py @@ -67,14 +67,17 @@ def add_reaction_stat_columns(q): return q -def get_reactions_with_stat(q, limit, offset): +def get_reactions_with_stat(q, limit=10, offset=0): """ Execute the reaction query and retrieve reactions with statistics. :param q: Query with reactions and statistics. :param limit: Number of reactions to load. :param offset: Pagination offset. - :return: List of reactions. + :return: List of reactions as dictionaries. + + >>> get_reactions_with_stat(q, 10, 0) # doctest: +SKIP + [{'id': 1, 'body': 'ВСкст коммСнтария', 'stat': {'rating': 5, 'comments_count': 3}, ...}] """ q = q.limit(limit).offset(offset) reactions = [] @@ -87,10 +90,12 @@ def get_reactions_with_stat(q, limit, offset): logger.error(f"ΠŸΡ€ΠΎΠΏΡƒΡ‰Π΅Π½Π° рСакция ΠΈΠ·-Π·Π° отсутствия shout ΠΈΠ»ΠΈ author: {reaction.dict()}") continue - reaction.created_by = author.dict() - reaction.shout = shout.dict() - reaction.stat = {"rating": rating_stat, "comments_count": comments_count} - reactions.append(reaction) + # ΠŸΡ€Π΅ΠΎΠ±Ρ€Π°Π·ΡƒΠ΅ΠΌ Reaction Π² ΡΠ»ΠΎΠ²Π°Ρ€ΡŒ для доступа ΠΏΠΎ ΠΊΠ»ΡŽΡ‡Ρƒ + reaction_dict = reaction.dict() + reaction_dict["created_by"] = author.dict() + reaction_dict["shout"] = shout.dict() + reaction_dict["stat"] = {"rating": rating_stat, "comments_count": comments_count} + reactions.append(reaction_dict) return reactions @@ -793,8 +798,9 @@ async def load_first_replies(comments, limit, offset, sort="newest"): q = q.order_by(order_by_stmt, Reaction.reply_to) - # ВыполняСм запрос - replies = get_reactions_with_stat(q) + # ВыполняСм запрос - ΡƒΠΊΠ°Π·Ρ‹Π²Π°Π΅ΠΌ limit для Π½Π΅ΠΎΠ³Ρ€Π°Π½ΠΈΡ‡Π΅Π½Π½ΠΎΠ³ΠΎ количСства ΠΎΡ‚Π²Π΅Ρ‚ΠΎΠ² + # Π½ΠΎ Π½Π΅ Π±ΠΎΠ»Π΅Π΅ 100 Π½Π° Ρ€ΠΎΠ΄ΠΈΡ‚Π΅Π»ΡŒΡΠΊΠΈΠΉ ΠΊΠΎΠΌΠΌΠ΅Π½Ρ‚Π°Ρ€ΠΈΠΉ + replies = get_reactions_with_stat(q, limit=100, offset=0) # Π“Ρ€ΡƒΠΏΠΏΠΈΡ€ΡƒΠ΅ΠΌ ΠΎΡ‚Π²Π΅Ρ‚Ρ‹ ΠΏΠΎ Ρ€ΠΎΠ΄ΠΈΡ‚Π΅Π»ΡŒΡΠΊΠΈΠΌ ID replies_by_parent = {} From abbc074474168d0219aacc668289e8a95f1db34d Mon Sep 17 00:00:00 2001 From: Untone Date: Mon, 31 Mar 2025 14:39:02 +0300 Subject: [PATCH 27/27] updateby-fix --- resolvers/draft.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/resolvers/draft.py b/resolvers/draft.py index 4424ff3e..0e04ffd8 100644 --- a/resolvers/draft.py +++ b/resolvers/draft.py @@ -148,7 +148,11 @@ async def update_draft(_, info, draft_id: int, draft_input): return {"error": "Draft not found"} Draft.update(draft, draft_input) - draft.updated_at = int(time.time()) + # Set updated_at and updated_by from the authenticated user + current_time = int(time.time()) + draft.updated_at = current_time + draft.updated_by = author_id + session.commit() return {"draft": draft}