cache-invalidate-fix-2

This commit is contained in:
Untone 2025-01-25 11:57:10 +03:00
parent 88525276c2
commit d59710309d
2 changed files with 43 additions and 16 deletions

View File

@ -324,6 +324,21 @@ async def update_shout(_, info, shout_id: int, shout_input=None, publish=False):
if publish: if publish:
logger.info(f"publishing shout#{shout_id} with input: {shout_input}") logger.info(f"publishing shout#{shout_id} with input: {shout_input}")
shout_input["published_at"] = current_time shout_input["published_at"] = current_time
# Проверяем наличие связи с автором
logger.info(f"Checking author link for shout#{shout_id} and author#{author_id}")
author_link = session.query(ShoutAuthor).filter(
and_(ShoutAuthor.shout == shout_id, ShoutAuthor.author == author_id)
).first()
if not author_link:
logger.info(f"Adding missing author link for shout#{shout_id}")
sa = ShoutAuthor(shout=shout_id, author=author_id)
session.add(sa)
session.flush()
logger.info(f"Author link added successfully")
else:
logger.info(f"Author link already exists")
Shout.update(shout_by_id, shout_input) Shout.update(shout_by_id, shout_input)
session.add(shout_by_id) session.add(shout_by_id)
session.commit() session.commit()

View File

@ -64,6 +64,7 @@ def query_with_stat(info):
Добавляет подзапрос статистики Добавляет подзапрос статистики
""" """
logger.info("Starting query_with_stat")
q = ( q = (
select(Shout) select(Shout)
.where(and_(Shout.published_at.is_not(None), Shout.deleted_at.is_(None))) .where(and_(Shout.published_at.is_not(None), Shout.deleted_at.is_(None)))
@ -88,7 +89,10 @@ def query_with_stat(info):
).label("main_author") ).label("main_author")
) )
logger.info(f"Checking fields - main_topic: {has_field(info, 'main_topic')}, authors: {has_field(info, 'authors')}, topics: {has_field(info, 'topics')}")
if has_field(info, "main_topic"): if has_field(info, "main_topic"):
logger.info("Adding main_topic join")
main_topic_join = aliased(ShoutTopic) main_topic_join = aliased(ShoutTopic)
main_topic = aliased(Topic) main_topic = aliased(Topic)
q = q.join(main_topic_join, and_(main_topic_join.shout == Shout.id, main_topic_join.main.is_(True))) q = q.join(main_topic_join, and_(main_topic_join.shout == Shout.id, main_topic_join.main.is_(True)))
@ -99,23 +103,8 @@ def query_with_stat(info):
).label("main_topic") ).label("main_topic")
) )
if has_field(info, "topics"):
topics_subquery = (
select(
ShoutTopic.shout,
json_array_builder(
json_builder("id", Topic.id, "title", Topic.title, "slug", Topic.slug, "is_main", ShoutTopic.main)
).label("topics"),
)
.outerjoin(Topic, ShoutTopic.topic == Topic.id)
.where(ShoutTopic.shout == Shout.id)
.group_by(ShoutTopic.shout)
.subquery()
)
q = q.outerjoin(topics_subquery, topics_subquery.c.shout == Shout.id)
q = q.add_columns(topics_subquery.c.topics)
if has_field(info, "authors"): if has_field(info, "authors"):
logger.info("Adding authors subquery")
authors_subquery = ( authors_subquery = (
select( select(
ShoutAuthor.shout, ShoutAuthor.shout,
@ -144,7 +133,25 @@ def query_with_stat(info):
q = q.outerjoin(authors_subquery, authors_subquery.c.shout == Shout.id) q = q.outerjoin(authors_subquery, authors_subquery.c.shout == Shout.id)
q = q.add_columns(authors_subquery.c.authors) q = q.add_columns(authors_subquery.c.authors)
if has_field(info, "topics"):
logger.info("Adding topics subquery")
topics_subquery = (
select(
ShoutTopic.shout,
json_array_builder(
json_builder("id", Topic.id, "title", Topic.title, "slug", Topic.slug, "is_main", ShoutTopic.main)
).label("topics"),
)
.outerjoin(Topic, ShoutTopic.topic == Topic.id)
.where(ShoutTopic.shout == Shout.id)
.group_by(ShoutTopic.shout)
.subquery()
)
q = q.outerjoin(topics_subquery, topics_subquery.c.shout == Shout.id)
q = q.add_columns(topics_subquery.c.topics)
if has_field(info, "stat"): if has_field(info, "stat"):
logger.info("Adding stats subquery")
# Подзапрос для статистики реакций # Подзапрос для статистики реакций
stats_subquery = ( stats_subquery = (
select( select(
@ -234,16 +241,21 @@ def get_shouts_with_links(info, q, limit=20, offset=0):
shout_dict["stat"] = {**stat, "viewed": viewed, "commented": stat.get("comments_count", 0)} shout_dict["stat"] = {**stat, "viewed": viewed, "commented": stat.get("comments_count", 0)}
if has_field(info, "main_topic") and hasattr(row, "main_topic"): if has_field(info, "main_topic") and hasattr(row, "main_topic"):
logger.debug(f"Processing main_topic for shout {shout_id}")
shout_dict["main_topic"] = ( shout_dict["main_topic"] = (
json.loads(row.main_topic) if isinstance(row.main_topic, str) else row.main_topic json.loads(row.main_topic) if isinstance(row.main_topic, str) else row.main_topic
) )
if has_field(info, "authors") and hasattr(row, "authors"): if has_field(info, "authors") and hasattr(row, "authors"):
logger.debug(f"Processing authors for shout {shout_id}, authors data: {row.authors}")
shout_dict["authors"] = ( shout_dict["authors"] = (
json.loads(row.authors) if isinstance(row.authors, str) else row.authors json.loads(row.authors) if isinstance(row.authors, str) else row.authors
) )
if has_field(info, "topics") and hasattr(row, "topics"): if has_field(info, "topics") and hasattr(row, "topics"):
logger.debug(f"Processing topics for shout {shout_id}, topics data: {row.topics}")
shout_dict["topics"] = json.loads(row.topics) if isinstance(row.topics, str) else row.topics shout_dict["topics"] = json.loads(row.topics) if isinstance(row.topics, str) else row.topics
logger.debug(f"Final shout_dict for {shout_id}: {shout_dict}")
shouts.append(shout_dict) shouts.append(shout_dict)
except Exception as row_error: except Exception as row_error: