From fcbbe4fcaccabf803c7bf22a0136e86df55feda6 Mon Sep 17 00:00:00 2001 From: Untone Date: Fri, 1 Nov 2024 20:02:46 +0300 Subject: [PATCH] fixed-shouts-load --- CHECKS | 5 - resolvers/reader.py | 303 +++++++++++++++++++++++--------------------- 2 files changed, 158 insertions(+), 150 deletions(-) delete mode 100644 CHECKS diff --git a/CHECKS b/CHECKS deleted file mode 100644 index bdfcc6fe..00000000 --- a/CHECKS +++ /dev/null @@ -1,5 +0,0 @@ -WAIT=10 -TIMEOUT=10 -ATTEMPTS=3 - -/ diff --git a/resolvers/reader.py b/resolvers/reader.py index d6e44ab2..c76d3847 100644 --- a/resolvers/reader.py +++ b/resolvers/reader.py @@ -61,17 +61,90 @@ def query_with_stat(info): :return: Запрос с подзапросом статистики. """ - q = select(Shout) + q = select(Shout).distinct().group_by(Shout.id) + # Создаем алиасы для всех таблиц + main_author = aliased(Author) + main_topic_join = aliased(ShoutTopic) + main_topic = aliased(Topic) + + # main_author + q = q.join(main_author, main_author.id == Shout.created_by) + q = q.add_columns( + func.json_object( + 'id', main_author.id, + 'name', main_author.name, + 'slug', main_author.slug, + 'pic', main_author.pic + ).label("main_author") + ) + + if has_field(info, "main_topic"): + main_topic = aliased(Topic) + q = q.join( + main_topic_join, + and_( + main_topic_join.shout == Shout.id, + main_topic_join.main.is_(True) + ) + ).join( + main_topic, + main_topic.id == main_topic_join.topic + ) + q = q.add_columns( + func.json_object( + 'id', main_topic.id, + 'title', main_topic.title, + 'slug', main_topic.slug, + 'is_main', main_topic_join.main + ).label("main_topic") + ) + + if has_field(info, "topics"): + topics_subquery = ( + select( + func.json_group_array( + func.json_object( + 'id', Topic.id, + 'title', Topic.title, + 'slug', Topic.slug, + 'is_main', ShoutTopic.main + ) + ).label("topics") + ) + .outerjoin(Topic, ShoutTopic.topic == Topic.id) + .where(ShoutTopic.shout == Shout.id) + .subquery() + ) + q = q.outerjoin(topics_subquery, topics_subquery.c.shout == Shout.id) + q = q.add_columns(topics_subquery.c.topics) + + if has_field(info, "authors"): + authors_subquery = ( + select( + func.json_group_array( + func.json_object( + 'id', Author.id, + 'name', Author.name, + 'slug', Author.slug, + 'pic', Author.pic, + 'caption', ShoutAuthor.caption + ) + ).label("authors") + ) + .outerjoin(Author, ShoutAuthor.author == Author.id) + .where(ShoutAuthor.shout == Shout.id) + .subquery() + ) + q = q.outerjoin(authors_subquery, authors_subquery.c.shout == Shout.id) + q = q.add_columns(authors_subquery.c.authors) + if has_field(info, "stat"): stats_subquery = ( select( Reaction.shout, - func.count( - case( - (Reaction.kind == ReactionKind.COMMENT.value, 1), - else_=None, - ) + func.count(func.distinct(Reaction.id)).filter( + Reaction.kind == ReactionKind.COMMENT.value ).label("comments_count"), func.coalesce( func.sum( @@ -79,17 +152,20 @@ def query_with_stat(info): (Reaction.reply_to.is_not(None), 0), (Reaction.kind == ReactionKind.LIKE.value, 1), (Reaction.kind == ReactionKind.DISLIKE.value, -1), - else_=0, + else_=0 ) ), - 0, + 0 ).label("rating"), - func.max( - case( - (Reaction.reply_to.is_(None), Reaction.created_at), - else_=None, - ) - ).label("last_reacted_at"), + func.coalesce( + func.max( + case( + (Reaction.reply_to.is_(None), Reaction.created_at), + else_=None + ) + ), + 0 + ).label("last_reacted_at") ) .where(Reaction.deleted_at.is_(None)) .group_by(Reaction.shout) @@ -97,70 +173,17 @@ def query_with_stat(info): ) q = q.outerjoin(stats_subquery, stats_subquery.c.shout == Shout.id) - q = q.add_columns(stats_subquery.c.comments_count, stats_subquery.c.rating, stats_subquery.c.last_reacted_at) - - if has_field(info, "created_by"): - q = q.outerjoin(Author, Shout.created_by == Author.id) + # aggregate in one column q = q.add_columns( - Author.id.label("main_author_id"), - Author.name.label("main_author_name"), - Author.slug.label("main_author_slug"), - Author.pic.label("main_author_pic"), - ) - - if has_field(info, "main_topic"): - q = q.outerjoin(ShoutTopic, and_(ShoutTopic.shout == Shout.id, ShoutTopic.main.is_(True))) - q = q.outerjoin(Topic, ShoutTopic.topic == Topic.id) - q = q.add_columns( - Topic.id.label("main_topic_id"), Topic.title.label("main_topic_title"), Topic.slug.label("main_topic_slug") - ) - - if has_field(info, "authors"): - topics_subquery = ( - select( - ShoutTopic.shout, - Topic.id.label("topic_id"), - Topic.title.label("topic_title"), - Topic.slug.label("topic_slug"), - ShoutTopic.main.label("is_main"), - ) - .outerjoin(Topic, ShoutTopic.topic == Topic.id) - .where(ShoutTopic.shout == Shout.id) - .subquery() - ) - q = q.outerjoin(topics_subquery, topics_subquery.c.shout == Shout.id) - q = q.add_columns( - topics_subquery.c.topic_id, - topics_subquery.c.topic_title, - topics_subquery.c.topic_slug, - topics_subquery.c.is_main, - ) - - authors_subquery = ( - select( - ShoutAuthor.shout, - Author.id.label("author_id"), - Author.name.label("author_name"), - Author.slug.label("author_slug"), - Author.pic.label("author_pic"), - ShoutAuthor.caption.label("author_caption"), - ) - .outerjoin(Author, ShoutAuthor.author == Author.id) - .where(ShoutAuthor.shout == Shout.id) - .subquery() - ) - q = q.outerjoin(authors_subquery, authors_subquery.c.shout == Shout.id) - q = q.add_columns( - authors_subquery.c.author_id, - authors_subquery.c.author_name, - authors_subquery.c.author_slug, - authors_subquery.c.author_pic, - authors_subquery.c.author_caption, + func.json_object( + 'comments_count', stats_subquery.c.comments_count, + 'rating', stats_subquery.c.rating, + 'last_reacted_at', stats_subquery.c.last_reacted_at, + ).label("stat") ) # Фильтр опубликованных q = q.where(and_(Shout.published_at.is_not(None), Shout.deleted_at.is_(None))) - return q @@ -168,83 +191,73 @@ def get_shouts_with_links(info, q, limit=20, offset=0): """ получение публикаций с применением пагинации """ - q = q.limit(limit).offset(offset) + shouts = [] + try: + logger.info(f"Starting get_shouts_with_links with limit={limit}, offset={offset}") + q = q.limit(limit).offset(offset) + + with local_session() as session: + logger.info("Executing query...") + shouts_result = session.execute(q).all() + logger.info(f"Query executed, got {len(shouts_result)} results") + + if not shouts_result: + logger.warning("No results found") + return [] - includes_authors = has_field(info, "authors") - includes_topics = has_field(info, "topics") - includes_stat = has_field(info, "stat") - includes_media = has_field(info, "media") + shouts = [] - with local_session() as session: - shouts_result = session.execute(q).all() - if not shouts_result: - return [] + for idx, row in enumerate(shouts_result): + try: + # logger.debug(row) + shout = None + if hasattr(row, "Shout"): + shout = row.Shout + else: + logger.warning(f"Row {idx} has no Shout attribute: {row}") + continue + + if shout: + shout_id = int(f"{shout.id}") + # logger.info(f"Processing shout ID: {shout_id}") + shout_dict = shout.dict() + if has_field(info, "created_by") and shout_dict.get("created_by"): + main_author_id = shout_dict.get("created_by") + a = session.query(Author).filter(Author.id == main_author_id).first() + shout_dict["created_by"] = { + "id": main_author_id, + "name": a.id, + "slug": a.slug, + "pic": a.pic + } + # logger.info({ **shout_dict, "body": "", "media": []}) + stat = json.loads(row.stat) if hasattr(row, "stat") else {} + viewed = ViewedStorage.get_shout(shout_id=shout_id) or 0 + stat["viewed"] = viewed + if stat: + shout_dict["stat"] = { + **stat, + "commented": stat.get("comments_count", 0) + } - shouts = [] - - for row in shouts_result: - logger.debug(row) - shout_id = row.Shout.id - shout_dict = row.Shout.dict() - shout_dict.update( - { - "authors": [], - "topics": set(), - "media": json.dumps(shout_dict.get("media", [])) if includes_media else [], - "stat": { - "viewed": ViewedStorage.get_shout(shout_id=shout_id) or 0, - "commented": row.comments_count or 0, - "rating": row.rating or 0, - "last_reacted_at": row.last_reacted_at, - } - if includes_stat and hasattr(row, "comments_count") - else {}, - } - ) - if includes_authors and hasattr(row, "main_author_id"): - shout_dict["created_by"] = { - "id": row.main_author_id, - "name": row.main_author_name or "Аноним", - "slug": row.main_author_slug or "", - "pic": row.main_author_pic or "", - } - if includes_topics and hasattr(row, "main_topic_id"): - shout_dict["main_topic"] = { - "id": row.main_topic_id or 0, - "title": row.main_topic_title or "", - "slug": row.main_topic_slug or "", - } - - if includes_authors and hasattr(row, "author_id"): - author = { - "id": row.author_id, - "name": row.author_name, - "slug": row.author_slug, - "pic": row.author_pic, - "caption": row.author_caption, - } - if not filter(lambda x: x["id"] == author["id"], shout_dict["authors"]): - shout_dict["authors"].append(author) - - if includes_topics and hasattr(row, "topic_id"): - topic = { - "id": row.topic_id, - "title": row.topic_title, - "slug": row.topic_slug, - "is_main": row.is_main, - } - if not filter(lambda x: x["id"] == topic["id"], shout_dict["topics"]): - shout_dict["topics"].add(frozenset(topic.items())) - - if includes_topics: - shout_dict["topics"] = sorted( - [dict(t) for t in shout_dict["topics"]], key=lambda x: (not x.get("is_main", False), x["id"]) - ) - - shouts.append(shout_dict) + if has_field(info, "main_topic") and hasattr(row, "main_topic"): + shout_dict["main_topic"] = json.loads(row.main_topic) + if has_field(info, "authors") and hasattr(row, "authors"): + shout_dict["authors"] = json.loads(row.authors) + if has_field(info, "topics") and hasattr(row, "topics"): + shout_dict["topics"] = json.loads(row.topics) + + shouts.append(shout_dict) + + except Exception as row_error: + logger.error(f"Error processing row {idx}: {row_error}", exc_info=True) + continue + except Exception as e: + logger.error(f"Fatal error in get_shouts_with_links: {e}", exc_info=True) + raise + finally: return shouts - return [] def apply_filters(q, filters):