json-distinct-fix
All checks were successful
Deploy on push / deploy (push) Successful in 6s

This commit is contained in:
Untone 2024-11-01 20:24:09 +03:00
parent a0f29eb5b8
commit 3e50902f07
2 changed files with 48 additions and 66 deletions

View File

@ -7,7 +7,7 @@ from orm.author import Author
from orm.reaction import Reaction, ReactionKind from orm.reaction import Reaction, ReactionKind
from orm.shout import Shout, ShoutAuthor, ShoutTopic from orm.shout import Shout, ShoutAuthor, ShoutTopic
from orm.topic import Topic from orm.topic import Topic
from services.db import local_session, json_builder, json_array_builder from services.db import json_array_builder, json_builder, local_session
from services.schema import query from services.schema import query
from services.search import search_text from services.search import search_text
from services.viewed import ViewedStorage from services.viewed import ViewedStorage
@ -60,8 +60,7 @@ def query_with_stat(info):
:param info: Информация о контексте GraphQL :param info: Информация о контексте GraphQL
:return: Запрос с подзапросом статистики. :return: Запрос с подзапросом статистики.
""" """
q = select(Shout).distinct(Shout.id).group_by(Shout.id)
q = select(Shout).distinct().group_by(Shout.id)
# Создаем алиасы для всех таблиц # Создаем алиасы для всех таблиц
main_author = aliased(Author) main_author = aliased(Author)
@ -72,31 +71,17 @@ def query_with_stat(info):
q = q.join(main_author, main_author.id == Shout.created_by) q = q.join(main_author, main_author.id == Shout.created_by)
q = q.add_columns( q = q.add_columns(
json_builder( json_builder(
'id', main_author.id, "id", main_author.id, "name", main_author.name, "slug", main_author.slug, "pic", main_author.pic
'name', main_author.name,
'slug', main_author.slug,
'pic', main_author.pic
).label("main_author") ).label("main_author")
) )
if has_field(info, "main_topic"): if has_field(info, "main_topic"):
main_topic = aliased(Topic) q = q.join(main_topic_join, and_(main_topic_join.shout == Shout.id, main_topic_join.main.is_(True))).join(
q = q.join( main_topic, main_topic.id == main_topic_join.topic
main_topic_join,
and_(
main_topic_join.shout == Shout.id,
main_topic_join.main.is_(True)
)
).join(
main_topic,
main_topic.id == main_topic_join.topic
) )
q = q.add_columns( q = q.add_columns(
json_builder( json_builder(
'id', main_topic.id, "id", main_topic.id, "title", main_topic.title, "slug", main_topic.slug, "is_main", main_topic_join.main
'title', main_topic.title,
'slug', main_topic.slug,
'is_main', main_topic_join.main
).label("main_topic") ).label("main_topic")
) )
@ -104,12 +89,7 @@ def query_with_stat(info):
topics_subquery = ( topics_subquery = (
select( select(
json_array_builder( json_array_builder(
json_builder( json_builder("id", Topic.id, "title", Topic.title, "slug", Topic.slug, "is_main", ShoutTopic.main)
'id', Topic.id,
'title', Topic.title,
'slug', Topic.slug,
'is_main', ShoutTopic.main
)
).label("topics") ).label("topics")
) )
.outerjoin(Topic, ShoutTopic.topic == Topic.id) .outerjoin(Topic, ShoutTopic.topic == Topic.id)
@ -124,11 +104,16 @@ def query_with_stat(info):
select( select(
json_array_builder( json_array_builder(
json_builder( json_builder(
'id', Author.id, "id",
'name', Author.name, Author.id,
'slug', Author.slug, "name",
'pic', Author.pic, Author.name,
'caption', ShoutAuthor.caption "slug",
Author.slug,
"pic",
Author.pic,
"caption",
ShoutAuthor.caption,
) )
).label("authors") ).label("authors")
) )
@ -138,34 +123,28 @@ def query_with_stat(info):
) )
q = q.outerjoin(authors_subquery, authors_subquery.c.shout == Shout.id) q = q.outerjoin(authors_subquery, authors_subquery.c.shout == Shout.id)
q = q.add_columns(authors_subquery.c.authors) q = q.add_columns(authors_subquery.c.authors)
if has_field(info, "stat"): if has_field(info, "stat"):
stats_subquery = ( stats_subquery = (
select( select(
Reaction.shout, Reaction.shout,
func.count(func.distinct(Reaction.id)).filter( func.count(func.distinct(Reaction.id))
Reaction.kind == ReactionKind.COMMENT.value .filter(Reaction.kind == ReactionKind.COMMENT.value)
).label("comments_count"), .label("comments_count"),
func.coalesce( func.coalesce(
func.sum( func.sum(
case( case(
(Reaction.reply_to.is_not(None), 0), (Reaction.reply_to.is_not(None), 0),
(Reaction.kind == ReactionKind.LIKE.value, 1), (Reaction.kind == ReactionKind.LIKE.value, 1),
(Reaction.kind == ReactionKind.DISLIKE.value, -1), (Reaction.kind == ReactionKind.DISLIKE.value, -1),
else_=0 else_=0,
) )
), ),
0 0,
).label("rating"), ).label("rating"),
func.coalesce( func.coalesce(func.max(case((Reaction.reply_to.is_(None), Reaction.created_at), else_=None)), 0).label(
func.max( "last_reacted_at"
case( ),
(Reaction.reply_to.is_(None), Reaction.created_at),
else_=None
)
),
0
).label("last_reacted_at")
) )
.where(Reaction.deleted_at.is_(None)) .where(Reaction.deleted_at.is_(None))
.group_by(Reaction.shout) .group_by(Reaction.shout)
@ -176,9 +155,12 @@ def query_with_stat(info):
# aggregate in one column # aggregate in one column
q = q.add_columns( q = q.add_columns(
json_builder( json_builder(
'comments_count', stats_subquery.c.comments_count, "comments_count",
'rating', stats_subquery.c.rating, stats_subquery.c.comments_count,
'last_reacted_at', stats_subquery.c.last_reacted_at, "rating",
stats_subquery.c.rating,
"last_reacted_at",
stats_subquery.c.last_reacted_at,
).label("stat") ).label("stat")
) )
@ -195,12 +177,12 @@ def get_shouts_with_links(info, q, limit=20, offset=0):
try: try:
logger.info(f"Starting get_shouts_with_links with limit={limit}, offset={offset}") logger.info(f"Starting get_shouts_with_links with limit={limit}, offset={offset}")
q = q.limit(limit).offset(offset) q = q.limit(limit).offset(offset)
with local_session() as session: with local_session() as session:
logger.info("Executing query...") logger.info("Executing query...")
shouts_result = session.execute(q).all() shouts_result = session.execute(q).all()
logger.info(f"Query executed, got {len(shouts_result)} results") logger.info(f"Query executed, got {len(shouts_result)} results")
if not shouts_result: if not shouts_result:
logger.warning("No results found") logger.warning("No results found")
return [] return []
@ -216,7 +198,7 @@ def get_shouts_with_links(info, q, limit=20, offset=0):
else: else:
logger.warning(f"Row {idx} has no Shout attribute: {row}") logger.warning(f"Row {idx} has no Shout attribute: {row}")
continue continue
if shout: if shout:
shout_id = int(f"{shout.id}") shout_id = int(f"{shout.id}")
# logger.info(f"Processing shout ID: {shout_id}") # logger.info(f"Processing shout ID: {shout_id}")
@ -228,17 +210,14 @@ def get_shouts_with_links(info, q, limit=20, offset=0):
"id": main_author_id, "id": main_author_id,
"name": a.id, "name": a.id,
"slug": a.slug, "slug": a.slug,
"pic": a.pic "pic": a.pic,
} }
# logger.info({ **shout_dict, "body": "", "media": []}) # logger.info({ **shout_dict, "body": "", "media": []})
stat = json.loads(row.stat) if hasattr(row, "stat") else {} stat = json.loads(row.stat) if hasattr(row, "stat") else {}
viewed = ViewedStorage.get_shout(shout_id=shout_id) or 0 viewed = ViewedStorage.get_shout(shout_id=shout_id) or 0
stat["viewed"] = viewed stat["viewed"] = viewed
if stat: if stat:
shout_dict["stat"] = { shout_dict["stat"] = {**stat, "commented": stat.get("comments_count", 0)}
**stat,
"commented": stat.get("comments_count", 0)
}
if has_field(info, "main_topic") and hasattr(row, "main_topic"): if has_field(info, "main_topic") and hasattr(row, "main_topic"):
shout_dict["main_topic"] = json.loads(row.main_topic) shout_dict["main_topic"] = json.loads(row.main_topic)
@ -246,17 +225,16 @@ def get_shouts_with_links(info, q, limit=20, offset=0):
shout_dict["authors"] = json.loads(row.authors) shout_dict["authors"] = json.loads(row.authors)
if has_field(info, "topics") and hasattr(row, "topics"): if has_field(info, "topics") and hasattr(row, "topics"):
shout_dict["topics"] = json.loads(row.topics) shout_dict["topics"] = json.loads(row.topics)
shouts.append(shout_dict) shouts.append(shout_dict)
except Exception as row_error: except Exception as row_error:
logger.error(f"Error processing row {idx}: {row_error}", exc_info=True) logger.error(f"Error processing row {idx}: {row_error}", exc_info=True)
continue continue
except Exception as e: except Exception as e:
logger.error(f"Fatal error in get_shouts_with_links: {e}", exc_info=True) logger.error(f"Fatal error in get_shouts_with_links: {e}", exc_info=True)
raise raise
finally: finally:
return shouts return shouts

View File

@ -5,6 +5,7 @@ import traceback
import warnings import warnings
from typing import Any, Callable, Dict, TypeVar from typing import Any, Callable, Dict, TypeVar
import sqlalchemy
from sqlalchemy import JSON, Column, Engine, Integer, create_engine, event, exc, func, inspect from sqlalchemy import JSON, Column, Engine, Integer, create_engine, event, exc, func, inspect
from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import Session, configure_mappers from sqlalchemy.orm import Session, configure_mappers
@ -155,12 +156,15 @@ def get_json_builder():
Возвращает подходящие функции для построения JSON объектов в зависимости от драйвера БД Возвращает подходящие функции для построения JSON объектов в зависимости от драйвера БД
""" """
dialect = engine.dialect.name dialect = engine.dialect.name
if dialect.startswith('postgres'): json_cast = lambda x: x # noqa: E731
return func.json_build_object, func.json_agg if dialect.startswith("postgres"):
elif dialect.startswith('sqlite') or dialect.startswith('mysql'): json_cast = lambda x: func.cast(x, sqlalchemy.Text) # noqa: E731
return func.json_object, func.json_group_array return func.json_build_object, func.json_agg, json_cast
elif dialect.startswith("sqlite") or dialect.startswith("mysql"):
return func.json_object, func.json_group_array, json_cast
else: else:
raise NotImplementedError(f"JSON builder not implemented for dialect {dialect}") raise NotImplementedError(f"JSON builder not implemented for dialect {dialect}")
# Используем их в коде # Используем их в коде
json_builder, json_array_builder = get_json_builder() json_builder, json_array_builder, json_cast = get_json_builder()