fixed-shouts-load
All checks were successful
Deploy on push / deploy (push) Successful in 6s

This commit is contained in:
Untone 2024-11-01 20:02:46 +03:00
parent 4ef5d172a0
commit fcbbe4fcac
2 changed files with 158 additions and 150 deletions

5
CHECKS
View File

@ -1,5 +0,0 @@
WAIT=10
TIMEOUT=10
ATTEMPTS=3
/

View File

@ -61,17 +61,90 @@ def query_with_stat(info):
:return: Запрос с подзапросом статистики. :return: Запрос с подзапросом статистики.
""" """
q = select(Shout) q = select(Shout).distinct().group_by(Shout.id)
# Создаем алиасы для всех таблиц
main_author = aliased(Author)
main_topic_join = aliased(ShoutTopic)
main_topic = aliased(Topic)
# main_author
q = q.join(main_author, main_author.id == Shout.created_by)
q = q.add_columns(
func.json_object(
'id', main_author.id,
'name', main_author.name,
'slug', main_author.slug,
'pic', main_author.pic
).label("main_author")
)
if has_field(info, "main_topic"):
main_topic = aliased(Topic)
q = q.join(
main_topic_join,
and_(
main_topic_join.shout == Shout.id,
main_topic_join.main.is_(True)
)
).join(
main_topic,
main_topic.id == main_topic_join.topic
)
q = q.add_columns(
func.json_object(
'id', main_topic.id,
'title', main_topic.title,
'slug', main_topic.slug,
'is_main', main_topic_join.main
).label("main_topic")
)
if has_field(info, "topics"):
topics_subquery = (
select(
func.json_group_array(
func.json_object(
'id', Topic.id,
'title', Topic.title,
'slug', Topic.slug,
'is_main', ShoutTopic.main
)
).label("topics")
)
.outerjoin(Topic, ShoutTopic.topic == Topic.id)
.where(ShoutTopic.shout == Shout.id)
.subquery()
)
q = q.outerjoin(topics_subquery, topics_subquery.c.shout == Shout.id)
q = q.add_columns(topics_subquery.c.topics)
if has_field(info, "authors"):
authors_subquery = (
select(
func.json_group_array(
func.json_object(
'id', Author.id,
'name', Author.name,
'slug', Author.slug,
'pic', Author.pic,
'caption', ShoutAuthor.caption
)
).label("authors")
)
.outerjoin(Author, ShoutAuthor.author == Author.id)
.where(ShoutAuthor.shout == Shout.id)
.subquery()
)
q = q.outerjoin(authors_subquery, authors_subquery.c.shout == Shout.id)
q = q.add_columns(authors_subquery.c.authors)
if has_field(info, "stat"): if has_field(info, "stat"):
stats_subquery = ( stats_subquery = (
select( select(
Reaction.shout, Reaction.shout,
func.count( func.count(func.distinct(Reaction.id)).filter(
case( Reaction.kind == ReactionKind.COMMENT.value
(Reaction.kind == ReactionKind.COMMENT.value, 1),
else_=None,
)
).label("comments_count"), ).label("comments_count"),
func.coalesce( func.coalesce(
func.sum( func.sum(
@ -79,17 +152,20 @@ def query_with_stat(info):
(Reaction.reply_to.is_not(None), 0), (Reaction.reply_to.is_not(None), 0),
(Reaction.kind == ReactionKind.LIKE.value, 1), (Reaction.kind == ReactionKind.LIKE.value, 1),
(Reaction.kind == ReactionKind.DISLIKE.value, -1), (Reaction.kind == ReactionKind.DISLIKE.value, -1),
else_=0, else_=0
) )
), ),
0, 0
).label("rating"), ).label("rating"),
func.coalesce(
func.max( func.max(
case( case(
(Reaction.reply_to.is_(None), Reaction.created_at), (Reaction.reply_to.is_(None), Reaction.created_at),
else_=None, else_=None
) )
).label("last_reacted_at"), ),
0
).label("last_reacted_at")
) )
.where(Reaction.deleted_at.is_(None)) .where(Reaction.deleted_at.is_(None))
.group_by(Reaction.shout) .group_by(Reaction.shout)
@ -97,70 +173,17 @@ def query_with_stat(info):
) )
q = q.outerjoin(stats_subquery, stats_subquery.c.shout == Shout.id) q = q.outerjoin(stats_subquery, stats_subquery.c.shout == Shout.id)
q = q.add_columns(stats_subquery.c.comments_count, stats_subquery.c.rating, stats_subquery.c.last_reacted_at) # aggregate in one column
if has_field(info, "created_by"):
q = q.outerjoin(Author, Shout.created_by == Author.id)
q = q.add_columns( q = q.add_columns(
Author.id.label("main_author_id"), func.json_object(
Author.name.label("main_author_name"), 'comments_count', stats_subquery.c.comments_count,
Author.slug.label("main_author_slug"), 'rating', stats_subquery.c.rating,
Author.pic.label("main_author_pic"), 'last_reacted_at', stats_subquery.c.last_reacted_at,
) ).label("stat")
if has_field(info, "main_topic"):
q = q.outerjoin(ShoutTopic, and_(ShoutTopic.shout == Shout.id, ShoutTopic.main.is_(True)))
q = q.outerjoin(Topic, ShoutTopic.topic == Topic.id)
q = q.add_columns(
Topic.id.label("main_topic_id"), Topic.title.label("main_topic_title"), Topic.slug.label("main_topic_slug")
)
if has_field(info, "authors"):
topics_subquery = (
select(
ShoutTopic.shout,
Topic.id.label("topic_id"),
Topic.title.label("topic_title"),
Topic.slug.label("topic_slug"),
ShoutTopic.main.label("is_main"),
)
.outerjoin(Topic, ShoutTopic.topic == Topic.id)
.where(ShoutTopic.shout == Shout.id)
.subquery()
)
q = q.outerjoin(topics_subquery, topics_subquery.c.shout == Shout.id)
q = q.add_columns(
topics_subquery.c.topic_id,
topics_subquery.c.topic_title,
topics_subquery.c.topic_slug,
topics_subquery.c.is_main,
)
authors_subquery = (
select(
ShoutAuthor.shout,
Author.id.label("author_id"),
Author.name.label("author_name"),
Author.slug.label("author_slug"),
Author.pic.label("author_pic"),
ShoutAuthor.caption.label("author_caption"),
)
.outerjoin(Author, ShoutAuthor.author == Author.id)
.where(ShoutAuthor.shout == Shout.id)
.subquery()
)
q = q.outerjoin(authors_subquery, authors_subquery.c.shout == Shout.id)
q = q.add_columns(
authors_subquery.c.author_id,
authors_subquery.c.author_name,
authors_subquery.c.author_slug,
authors_subquery.c.author_pic,
authors_subquery.c.author_caption,
) )
# Фильтр опубликованных # Фильтр опубликованных
q = q.where(and_(Shout.published_at.is_not(None), Shout.deleted_at.is_(None))) q = q.where(and_(Shout.published_at.is_not(None), Shout.deleted_at.is_(None)))
return q return q
@ -168,83 +191,73 @@ def get_shouts_with_links(info, q, limit=20, offset=0):
""" """
получение публикаций с применением пагинации получение публикаций с применением пагинации
""" """
shouts = []
try:
logger.info(f"Starting get_shouts_with_links with limit={limit}, offset={offset}")
q = q.limit(limit).offset(offset) q = q.limit(limit).offset(offset)
includes_authors = has_field(info, "authors")
includes_topics = has_field(info, "topics")
includes_stat = has_field(info, "stat")
includes_media = has_field(info, "media")
with local_session() as session: with local_session() as session:
logger.info("Executing query...")
shouts_result = session.execute(q).all() shouts_result = session.execute(q).all()
logger.info(f"Query executed, got {len(shouts_result)} results")
if not shouts_result: if not shouts_result:
logger.warning("No results found")
return [] return []
shouts = [] shouts = []
for row in shouts_result: for idx, row in enumerate(shouts_result):
logger.debug(row) try:
shout_id = row.Shout.id # logger.debug(row)
shout_dict = row.Shout.dict() shout = None
shout_dict.update( if hasattr(row, "Shout"):
{ shout = row.Shout
"authors": [], else:
"topics": set(), logger.warning(f"Row {idx} has no Shout attribute: {row}")
"media": json.dumps(shout_dict.get("media", [])) if includes_media else [], continue
"stat": {
"viewed": ViewedStorage.get_shout(shout_id=shout_id) or 0, if shout:
"commented": row.comments_count or 0, shout_id = int(f"{shout.id}")
"rating": row.rating or 0, # logger.info(f"Processing shout ID: {shout_id}")
"last_reacted_at": row.last_reacted_at, shout_dict = shout.dict()
} if has_field(info, "created_by") and shout_dict.get("created_by"):
if includes_stat and hasattr(row, "comments_count") main_author_id = shout_dict.get("created_by")
else {}, a = session.query(Author).filter(Author.id == main_author_id).first()
}
)
if includes_authors and hasattr(row, "main_author_id"):
shout_dict["created_by"] = { shout_dict["created_by"] = {
"id": row.main_author_id, "id": main_author_id,
"name": row.main_author_name or "Аноним", "name": a.id,
"slug": row.main_author_slug or "", "slug": a.slug,
"pic": row.main_author_pic or "", "pic": a.pic
} }
if includes_topics and hasattr(row, "main_topic_id"): # logger.info({ **shout_dict, "body": "", "media": []})
shout_dict["main_topic"] = { stat = json.loads(row.stat) if hasattr(row, "stat") else {}
"id": row.main_topic_id or 0, viewed = ViewedStorage.get_shout(shout_id=shout_id) or 0
"title": row.main_topic_title or "", stat["viewed"] = viewed
"slug": row.main_topic_slug or "", if stat:
shout_dict["stat"] = {
**stat,
"commented": stat.get("comments_count", 0)
} }
if includes_authors and hasattr(row, "author_id"): if has_field(info, "main_topic") and hasattr(row, "main_topic"):
author = { shout_dict["main_topic"] = json.loads(row.main_topic)
"id": row.author_id, if has_field(info, "authors") and hasattr(row, "authors"):
"name": row.author_name, shout_dict["authors"] = json.loads(row.authors)
"slug": row.author_slug, if has_field(info, "topics") and hasattr(row, "topics"):
"pic": row.author_pic, shout_dict["topics"] = json.loads(row.topics)
"caption": row.author_caption,
}
if not filter(lambda x: x["id"] == author["id"], shout_dict["authors"]):
shout_dict["authors"].append(author)
if includes_topics and hasattr(row, "topic_id"):
topic = {
"id": row.topic_id,
"title": row.topic_title,
"slug": row.topic_slug,
"is_main": row.is_main,
}
if not filter(lambda x: x["id"] == topic["id"], shout_dict["topics"]):
shout_dict["topics"].add(frozenset(topic.items()))
if includes_topics:
shout_dict["topics"] = sorted(
[dict(t) for t in shout_dict["topics"]], key=lambda x: (not x.get("is_main", False), x["id"])
)
shouts.append(shout_dict) shouts.append(shout_dict)
except Exception as row_error:
logger.error(f"Error processing row {idx}: {row_error}", exc_info=True)
continue
except Exception as e:
logger.error(f"Fatal error in get_shouts_with_links: {e}", exc_info=True)
raise
finally:
return shouts return shouts
return []
def apply_filters(q, filters): def apply_filters(q, filters):